🔀 Sync master

This commit is contained in:
ricardo 2021-04-01 21:53:11 -04:00
commit 8db3734794
1619 changed files with 152921 additions and 16926 deletions

3
.dockerignore Normal file
View file

@ -0,0 +1,3 @@
node_modules
packages/*/node_modules
packages/*/dist

View file

@ -0,0 +1,49 @@
name: Docker Nightly Image CI
on:
schedule:
- cron: "0 1 * * *"
workflow_dispatch:
inputs:
branch:
description: 'Name of the GitHub branch to create image off.'
required: true
default: 'master'
tag:
description: 'Name of the docker tag to create.'
required: true
default: 'nightly'
jobs:
build:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v2
with:
ref: ${{ github.event.inputs.branch || 'master' }}
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
-
name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./docker/images/n8n-custom/Dockerfile
platforms: linux/amd64
push: true
tags: n8nio/n8n:${{ github.event.inputs.tag || 'nightly' }}

View file

@ -4,6 +4,12 @@ on:
push: push:
tags: tags:
- n8n@* - n8n@*
workflow_dispatch:
inputs:
version:
description: 'n8n version to build docker image for.'
required: true
default: '0.112.0'
jobs: jobs:
armv7_job: armv7_job:
@ -19,14 +25,16 @@ jobs:
run: docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }} run: docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }}
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: crazy-max/ghaction-docker-buildx@v1 uses: crazy-max/ghaction-docker-buildx@v3
with: with:
version: latest buildx-version: latest
qemu-version: latest
- name: Run Buildx (push image) - name: Run Buildx (push image)
if: success() if: success()
run: | run: |
docker buildx build --platform linux/arm/v7 --build-arg N8N_VERSION=${{steps.vars.outputs.tag}} -t n8nio/n8n:${{steps.vars.outputs.tag}}-rpi --output type=image,push=true docker/images/n8n-rpi docker buildx build \
- name: Tag Docker image with latest --platform linux/arm/v7 \
run: docker tag n8nio/n8n:${{steps.vars.outputs.tag}}-rpi n8nio/n8n:latest-rpi --build-arg N8N_VERSION=${{github.event.inputs.version || steps.vars.outputs.tag}} \
- name: Push docker images of latest -t ${{ secrets.DOCKER_USERNAME }}/n8n:${{github.event.inputs.version || steps.vars.outputs.tag}}-rpi \
run: docker push n8nio/n8n:latest-rpi -t ${{ secrets.DOCKER_USERNAME }}/n8n:latest-rpi \
--output type=image,push=true docker/images/n8n-rpi

View file

@ -28,7 +28,11 @@ jobs:
- name: Push docker images of latest - name: Push docker images of latest
run: docker push n8nio/n8n:latest run: docker push n8nio/n8n:latest
- name: Build the Docker image of version (Ubuntu) - name: Build the Docker image of version (Debian)
run: docker build --build-arg N8N_VERSION=${{steps.vars.outputs.tag}} -t n8nio/n8n:${{steps.vars.outputs.tag}}-ubuntu docker/images/n8n-ubuntu run: docker build --build-arg N8N_VERSION=${{steps.vars.outputs.tag}} -t n8nio/n8n:${{steps.vars.outputs.tag}}-debian docker/images/n8n-debian
- name: Push Docker image of version (Ubuntu) - name: Push Docker image of version (Debian)
run: docker push n8nio/n8n:${{steps.vars.outputs.tag}}-ubuntu run: docker push n8nio/n8n:${{steps.vars.outputs.tag}}-debian
- name: Tag Docker image with latest (Debian)
run: docker tag n8nio/n8n:${{steps.vars.outputs.tag}}-debian n8nio/n8n:latest-debian
- name: Push docker images of latest (Debian)
run: docker push n8nio/n8n:latest-debian

View file

@ -9,7 +9,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [10.x, 12.x, 14.x] node-version: [12.x, 14.x]
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
@ -23,5 +23,6 @@ jobs:
npm run bootstrap npm run bootstrap
npm run build --if-present npm run build --if-present
npm test npm test
npm run tslint
env: env:
CI: true CI: true

View file

@ -30,7 +30,7 @@ n8n is split up in different modules which are all in a single mono repository.
The most important directories: The most important directories:
- [/docker/image](/docker/image) - Dockerfiles to create n8n containers - [/docker/image](/docker/images) - Dockerfiles to create n8n containers
- [/docker/compose](/docker/compose) - Examples Docker Setups - [/docker/compose](/docker/compose) - Examples Docker Setups
- [/packages](/packages) - The different n8n modules - [/packages](/packages) - The different n8n modules
- [/packages/cli](/packages/cli) - CLI code to run front- & backend - [/packages/cli](/packages/cli) - CLI code to run front- & backend
@ -57,11 +57,16 @@ dependencies are installed and the packages get linked correctly. Here a short g
The packages which n8n uses depend on a few build tools: The packages which n8n uses depend on a few build tools:
Linux: Debian/Ubuntu:
``` ```
apt-get install -y build-essential python apt-get install -y build-essential python
``` ```
CentOS:
```
yum install gcc gcc-c++ make
```
Windows: Windows:
``` ```
npm install -g windows-build-tools npm install -g windows-build-tools
@ -119,6 +124,10 @@ To start n8n execute:
npm run start npm run start
``` ```
To start n8n with tunnel:
```
./packages/cli/bin/n8n start --tunnel
```
## Development Cycle ## Development Cycle
@ -213,23 +222,7 @@ If you'd like to submit a new node, please go through the following checklist. T
## Extend Documentation ## Extend Documentation
All the files which get used in the n8n documentation on [https://docs.n8n.io](https://docs.n8n.io) The repository for the n8n documentation on https://docs.n8n.io can be found [here](https://github.com/n8n-io/n8n-docs).
can be found in the [/docs](https://github.com/n8n-io/n8n/tree/master/docs) folder. So all changes
and additions can directly be made in there
That the markdown docs look pretty we use [docsify](https://docsify.js.org). It is possible to test
locally how it looks like rendered with the following commands:
```bash
# 1. Install docisify
npm i docsify-cli -g
# 2. Go into n8n folder (the same folder which contains this file). For example:
cd /data/n8n
# 3. Start docsificy
docsify serve ./docs
```
## Contributor License Agreement ## Contributor License Agreement

View file

@ -19,7 +19,7 @@ Condition notice.
Software: n8n Software: n8n
License: Apache 2.0 License: Apache 2.0 with Commons Clause
Licensor: n8n GmbH Licensor: n8n GmbH

View file

@ -2,7 +2,7 @@
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png) ![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools. n8n is an extendable workflow automation tool. With a [fair-code](http://faircode.io) distribution model, n8n will always have visible source code, be available to self-host, and allow you to add your own custom functions, logic and apps. n8n's node-based approach makes it highly versatile, enabling you to connect anything to everything.
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a> <a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
@ -16,7 +16,7 @@ received or lost a star.
## Available integrations ## Available integrations
n8n has 100+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes) n8n has 200+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
## Documentation ## Documentation
@ -25,6 +25,8 @@ The official n8n documentation can be found under: [https://docs.n8n.io](https:/
Additional information and example workflows on the n8n.io website: [https://n8n.io](https://n8n.io) Additional information and example workflows on the n8n.io website: [https://n8n.io](https://n8n.io)
The changelog can be found [here](https://docs.n8n.io/reference/changelog.html) and the list of breaking changes [here](https://github.com/n8n-io/n8n/blob/master/packages/cli/BREAKING-CHANGES.md).
## Usage ## Usage
@ -39,10 +41,14 @@ Execute: `npm run start`
## Hosted n8n ## n8n.cloud
If you are interested in a hosted version of n8n on our infrastructure please contact us via: Sign-up for an [n8n.cloud](https://www.n8n.cloud/) account.
[hosting@n8n.io](mailto:hosting@n8n.io)
While n8n.cloud and n8n are the same in terms of features, n8n.cloud provides certain conveniences such as:
- Not having to set up and maintain your n8n instance
- Managed OAuth for authentication
- Easily upgrading to the newer n8n versions
@ -63,20 +69,19 @@ check out our job posts:
## What does n8n mean and how do you pronounce it ## What does n8n mean and how do you pronounce it?
**Short answer:** It means "nodemation" **Short answer:** It means "nodemation" and it is pronounced as n-eight-n.
**Long answer:** I get that question quite often (more often than I expected) **Long answer:** "I get that question quite often (more often than I expected)
so I decided it is probably best to answer it here. While looking for a so I decided it is probably best to answer it here. While looking for a
good name for the project with a free domain I realized very quickly that all the good name for the project with a free domain I realized very quickly that all the
good ones I could think of were already taken. So, in the end, I chose good ones I could think of were already taken. So, in the end, I chose
nodemation. "node-" in the sense that it uses a Node-View and that it uses nodemation. 'node-' in the sense that it uses a Node-View and that it uses
Node.js and "-mation" for "automation" which is what the project is supposed to help with. Node.js and '-mation' for 'automation' which is what the project is supposed to help with.
However, I did not like how long the name was and I could not imagine writing However, I did not like how long the name was and I could not imagine writing
something that long every time in the CLI. That is when I then ended up on something that long every time in the CLI. That is when I then ended up on
"n8n". Sure does not work perfectly but does neither for Kubernetes (k8s) and 'n8n'." - **Jan Oberhauser, Founder and CEO, n8n.io**
did not hear anybody complain there. So I guess it should be ok.
@ -88,6 +93,6 @@ Have you found a bug :bug: ? Or maybe you have a nice feature :sparkles: to cont
## License ## License
n8n is [fair-code](http://faircode.io) licensed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) n8n is [fair-code](http://faircode.io) distributed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) license.
Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license) Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license).

View file

@ -20,7 +20,3 @@ To stop it execute:
``` ```
docker-compose stop docker-compose stop
``` ```
## Configuration
The default name of the database, user and password for MongoDB can be changed in the `.env` file in the current directory.

View file

@ -49,8 +49,7 @@ services:
- N8N_PROTOCOL=https - N8N_PROTOCOL=https
- NODE_ENV=production - NODE_ENV=production
- N8N_PATH - N8N_PATH
- WEBHOOK_TUNNEL_URL=https://${DOMAIN_NAME}${N8N_PATH} - WEBHOOK_URL=https://${DOMAIN_NAME}${N8N_PATH}
- VUE_APP_URL_BASE_API=https://${DOMAIN_NAME}${N8N_PATH}
volumes: volumes:
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
- ${DATA_FOLDER}/.n8n:/root/.n8n - ${DATA_FOLDER}/.n8n:/home/node/.n8n

View file

@ -1,9 +0,0 @@
MONGO_INITDB_ROOT_USERNAME=changeUser
MONGO_INITDB_ROOT_PASSWORD=changePassword
MONGO_INITDB_DATABASE=n8n
MONGO_NON_ROOT_USERNAME=changeUser
MONGO_NON_ROOT_PASSWORD=changePassword
N8N_BASIC_AUTH_USER=changeUser
N8N_BASIC_AUTH_PASSWORD=changePassword

View file

@ -1,26 +0,0 @@
# n8n with MongoDB
Starts n8n with MongoDB as database.
## Start
To start n8n with MongoDB simply start docker-compose by executing the following
command in the current folder.
**IMPORTANT:** But before you do that change the default users and passwords in the `.env` file!
```
docker-compose up -d
```
To stop it execute:
```
docker-compose stop
```
## Configuration
The default name of the database, user and password for MongoDB can be changed in the `.env` file in the current directory.

View file

@ -1,34 +0,0 @@
version: '3.1'
services:
mongo:
image: mongo:4.0
restart: always
environment:
- MONGO_INITDB_ROOT_USERNAME
- MONGO_INITDB_ROOT_PASSWORD
- MONGO_INITDB_DATABASE
- MONGO_NON_ROOT_USERNAME
- MONGO_NON_ROOT_PASSWORD
volumes:
- ./init-data.sh:/docker-entrypoint-initdb.d/init-data.sh
n8n:
image: n8nio/n8n
restart: always
environment:
- DB_TYPE=mongodb
- DB_MONGODB_CONNECTION_URL=mongodb://${MONGO_NON_ROOT_USERNAME}:${MONGO_NON_ROOT_PASSWORD}@mongo:27017/${MONGO_INITDB_DATABASE}
- N8N_BASIC_AUTH_ACTIVE=true
- N8N_BASIC_AUTH_USER
- N8N_BASIC_AUTH_PASSWORD
ports:
- 5678:5678
links:
- mongo
volumes:
- ~/.n8n:/root/.n8n
# Wait 5 seconds to start n8n to make sure that MongoDB is ready
# when n8n tries to connect to it
command: /bin/sh -c "sleep 5; n8n start"

View file

@ -1,17 +0,0 @@
#!/bin/bash
set -e;
# Create a default non-root role
MONGO_NON_ROOT_ROLE="${MONGO_NON_ROOT_ROLE:-readWrite}"
if [ -n "${MONGO_NON_ROOT_USERNAME:-}" ] && [ -n "${MONGO_NON_ROOT_PASSWORD:-}" ]; then
"${mongo[@]}" "$MONGO_INITDB_DATABASE" <<-EOJS
db.createUser({
user: $(_js_escape "$MONGO_NON_ROOT_USERNAME"),
pwd: $(_js_escape "$MONGO_NON_ROOT_PASSWORD"),
roles: [ { role: $(_js_escape "$MONGO_NON_ROOT_ROLE"), db: $(_js_escape "$MONGO_INITDB_DATABASE") } ]
})
EOJS
else
echo "SETUP INFO: No Environment variables given!"
fi

View file

@ -32,7 +32,7 @@ services:
links: links:
- postgres - postgres
volumes: volumes:
- ~/.n8n:/root/.n8n - ~/.n8n:/home/node/.n8n
# Wait 5 seconds to start n8n to make sure that PostgreSQL is ready # Wait 5 seconds to start n8n to make sure that PostgreSQL is ready
# when n8n tries to connect to it # when n8n tries to connect to it
command: /bin/sh -c "sleep 5; n8n start" command: /bin/sh -c "sleep 5; n8n start"

View file

@ -1,5 +1,5 @@
# 1. Create an image to build n8n # 1. Create an image to build n8n
FROM node:12.16-alpine as builder FROM node:14.15-alpine as builder
# Update everything and install needed dependencies # Update everything and install needed dependencies
USER root USER root
@ -25,7 +25,7 @@ RUN npm run build
# 2. Start with a new clean image with just the code that is needed to run n8n # 2. Start with a new clean image with just the code that is needed to run n8n
FROM node:12.16-alpine FROM node:14.15-alpine
USER root USER root
@ -36,6 +36,13 @@ WORKDIR /data
# Install all needed dependencies # Install all needed dependencies
RUN npm_config_user=root npm install -g full-icu RUN npm_config_user=root npm install -g full-icu
# Install fonts
RUN apk --no-cache add --virtual fonts msttcorefonts-installer fontconfig && \
update-ms-fonts && \
fc-cache -f && \
apk del fonts && \
find /usr/share/fonts/truetype/msttcorefonts/ -type l -exec unlink {} \;
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
COPY --from=builder /data ./ COPY --from=builder /data ./

View file

@ -1,49 +0,0 @@
FROM node:12.16-alpine as builder
# FROM node:12.16-alpine
# Update everything and install needed dependencies
RUN apk add --update graphicsmagick tzdata git tini su-exec
USER root
# Install all needed dependencies
RUN apk --update add --virtual build-dependencies python build-base ca-certificates && \
npm_config_user=root npm install -g full-icu lerna
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
WORKDIR /data
COPY lerna.json .
COPY package.json .
COPY packages/cli/ ./packages/cli/
COPY packages/core/ ./packages/core/
COPY packages/editor-ui/ ./packages/editor-ui/
COPY packages/nodes-base/ ./packages/nodes-base/
COPY packages/workflow/ ./packages/workflow/
RUN rm -rf node_modules packages/*/node_modules packages/*/dist
RUN npm install --loglevel notice
RUN lerna bootstrap --hoist
RUN npm run build
FROM node:12.16-alpine
WORKDIR /data
# Install all needed dependencies
RUN npm_config_user=root npm install -g full-icu
USER root
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
COPY --from=builder /data ./
RUN apk add --update graphicsmagick tzdata git tini su-exec
COPY docker/images/n8n-dev/docker-entrypoint.sh /docker-entrypoint.sh
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
EXPOSE 5678/tcp

View file

@ -1,4 +1,4 @@
FROM node:12.16 FROM node:14.15
ARG N8N_VERSION ARG N8N_VERSION

View file

@ -1,6 +1,6 @@
## n8n - Ubuntu Docker Image ## n8n - Debian Docker Image
Dockerfile to build n8n with Ubuntu. Dockerfile to build n8n with Debian.
For information about how to run n8n with Docker check the generic For information about how to run n8n with Docker check the generic
[Docker-Readme](https://github.com/n8n-io/n8n/tree/master/docker/images/n8n/README.md) [Docker-Readme](https://github.com/n8n-io/n8n/tree/master/docker/images/n8n/README.md)
@ -10,12 +10,12 @@ For information about how to run n8n with Docker check the generic
docker build --build-arg N8N_VERSION=<VERSION> -t n8nio/n8n:<VERSION> . docker build --build-arg N8N_VERSION=<VERSION> -t n8nio/n8n:<VERSION> .
# For example: # For example:
docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-ubuntu . docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-debian .
``` ```
``` ```
docker run -it --rm \ docker run -it --rm \
--name n8n \ --name n8n \
-p 5678:5678 \ -p 5678:5678 \
n8nio/n8n:0.43.0-ubuntu n8nio/n8n:0.43.0-debian
``` ```

View file

@ -1,4 +1,4 @@
FROM arm32v7/node:12.16 FROM arm32v7/node:14.15
ARG N8N_VERSION ARG N8N_VERSION
@ -15,6 +15,7 @@ ENV NODE_ENV production
WORKDIR /data WORKDIR /data
USER node USER root
CMD n8n CMD chown -R node:node /home/node/.n8n \
&& gosu node n8n

View file

@ -17,5 +17,6 @@ docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-rpi .
docker run -it --rm \ docker run -it --rm \
--name n8n \ --name n8n \
-p 5678:5678 \ -p 5678:5678 \
-v ~/.n8n:/home/node/.n8n \
n8nio/n8n:0.70.0-rpi n8nio/n8n:0.70.0-rpi
``` ```

View file

@ -1,4 +1,4 @@
FROM node:12.16-alpine FROM node:14.15-alpine
ARG N8N_VERSION ARG N8N_VERSION
@ -16,6 +16,13 @@ RUN apk --update add --virtual build-dependencies python build-base ca-certifica
npm_config_user=root npm install -g full-icu n8n@${N8N_VERSION} && \ npm_config_user=root npm install -g full-icu n8n@${N8N_VERSION} && \
apk del build-dependencies apk del build-dependencies
# Install fonts
RUN apk --no-cache add --virtual fonts msttcorefonts-installer fontconfig && \
update-ms-fonts && \
fc-cache -f && \
apk del fonts && \
find /usr/share/fonts/truetype/msttcorefonts/ -type l -exec unlink {} \;
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
WORKDIR /data WORKDIR /data

View file

@ -2,7 +2,7 @@
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png) ![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools. n8n is a free and open [fair-code](http://faircode.io) distributed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a> <a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
@ -33,7 +33,7 @@ Slack notification every time a Github repository received or lost a star.
## Available integrations ## Available integrations
n8n has 100+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes) n8n has 200+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
## Documentation ## Documentation
@ -71,7 +71,7 @@ To use it simply start n8n with `--tunnel`
docker run -it --rm \ docker run -it --rm \
--name n8n \ --name n8n \
-p 5678:5678 \ -p 5678:5678 \
-v ~/.n8n:/root/.n8n \ -v ~/.n8n:/home/node/.n8n \
n8nio/n8n \ n8nio/n8n \
n8n start --tunnel n8n start --tunnel
``` ```
@ -93,56 +93,30 @@ N8N_BASIC_AUTH_PASSWORD=<PASSWORD>
## Persist data ## Persist data
The workflow data gets by default saved in an SQLite database in the user The workflow data gets by default saved in an SQLite database in the user
folder (`/root/.n8n`). That folder also additionally contains the folder (`/home/node/.n8n`). That folder also additionally contains the
settings like webhook URL and encryption key. settings like webhook URL and encryption key.
``` ```
docker run -it --rm \ docker run -it --rm \
--name n8n \ --name n8n \
-p 5678:5678 \ -p 5678:5678 \
-v ~/.n8n:/root/.n8n \ -v ~/.n8n:/home/node/.n8n \
n8nio/n8n n8nio/n8n
``` ```
### Start with other Database ### Start with other Database
By default n8n uses SQLite to save credentials, past executions and workflows. By default n8n uses SQLite to save credentials, past executions and workflows.
n8n however also supports MongoDB and PostgresDB. To use them simply a few n8n however also supports PostgresDB, MySQL and MariaDB. To use them simply a few
environment variables have to be set. environment variables have to be set.
It is important to still persist the data in the `/root/.n8` folder. The reason It is important to still persist the data in the `/root/.n8n` folder. The reason
is that it contains n8n user data. That is the name of the webhook is that it contains n8n user data. That is the name of the webhook
(in case) the n8n tunnel gets used and even more important the encryption key (in case) the n8n tunnel gets used and even more important the encryption key
for the credentials. If none gets found n8n creates automatically one on for the credentials. If none gets found n8n creates automatically one on
startup. In case credentials are already saved with a different encryption key startup. In case credentials are already saved with a different encryption key
it can not be used anymore as encrypting it is not possible anymore. it can not be used anymore as encrypting it is not possible anymore.
#### Use with MongoDB
> **WARNING**: Use Postgres if possible! Mongo has problems with saving large
> amounts of data in a document and causes also other problems. So support will
> may be dropped in the future.
Replace the following placeholders with the actual data:
- MONGO_DATABASE
- MONGO_HOST
- MONGO_PORT
- MONGO_USER
- MONGO_PASSWORD
```
docker run -it --rm \
--name n8n \
-p 5678:5678 \
-e DB_TYPE=mongodb \
-e DB_MONGODB_CONNECTION_URL="mongodb://<MONGO_USER>:<MONGO_PASSWORD>@<MONGO_SERVER>:<MONGO_PORT>/<MONGO_DATABASE>" \
-v ~/.n8n:/root/.n8n \
n8nio/n8n \
n8n start
```
A full working setup with docker-compose can be found [here](https://github.com/n8n-io/n8n/blob/master/docker/compose/withMongo/README.md)
#### Use with PostgresDB #### Use with PostgresDB
Replace the following placeholders with the actual data: Replace the following placeholders with the actual data:
@ -164,7 +138,7 @@ docker run -it --rm \
-e DB_POSTGRESDB_USER=<POSTGRES_USER> \ -e DB_POSTGRESDB_USER=<POSTGRES_USER> \
-e DB_POSTGRESDB_SCHEMA=<POSTGRES_SCHEMA> \ -e DB_POSTGRESDB_SCHEMA=<POSTGRES_SCHEMA> \
-e DB_POSTGRESDB_PASSWORD=<POSTGRES_PASSWORD> \ -e DB_POSTGRESDB_PASSWORD=<POSTGRES_PASSWORD> \
-v ~/.n8n:/root/.n8n \ -v ~/.n8n:/home/node/.n8n \
n8nio/n8n \ n8nio/n8n \
n8n start n8n start
``` ```
@ -190,7 +164,7 @@ docker run -it --rm \
-e DB_MYSQLDB_PORT=<MYSQLDB_PORT> \ -e DB_MYSQLDB_PORT=<MYSQLDB_PORT> \
-e DB_MYSQLDB_USER=<MYSQLDB_USER> \ -e DB_MYSQLDB_USER=<MYSQLDB_USER> \
-e DB_MYSQLDB_PASSWORD=<MYSQLDB_PASSWORD> \ -e DB_MYSQLDB_PASSWORD=<MYSQLDB_PASSWORD> \
-v ~/.n8n:/root/.n8n \ -v ~/.n8n:/home/node/.n8n \
n8nio/n8n \ n8nio/n8n \
n8n start n8n start
``` ```
@ -203,7 +177,6 @@ with the given name. That makes it possible to load data easily from
Docker- and Kubernetes-Secrets. Docker- and Kubernetes-Secrets.
The following environment variables support file input: The following environment variables support file input:
- DB_MONGODB_CONNECTION_URL_FILE
- DB_POSTGRESDB_DATABASE_FILE - DB_POSTGRESDB_DATABASE_FILE
- DB_POSTGRESDB_HOST_FILE - DB_POSTGRESDB_HOST_FILE
- DB_POSTGRESDB_PASSWORD_FILE - DB_POSTGRESDB_PASSWORD_FILE
@ -260,9 +233,9 @@ docker build --build-arg N8N_VERSION=0.18.1 -t n8nio/n8n:0.18.1 .
``` ```
## What does n8n mean and how do you pronounce it ## What does n8n mean and how do you pronounce it?
**Short answer:** It means "nodemation" **Short answer:** It means "nodemation" and it is pronounced as n-eight-n.
**Long answer:** I get that question quite often (more often than I expected) **Long answer:** I get that question quite often (more often than I expected)
so I decided it is probably best to answer it here. While looking for a so I decided it is probably best to answer it here. While looking for a
@ -305,6 +278,6 @@ Before you upgrade to the latest version make sure to check here if there are an
## License ## License
n8n is [fair-code](http://faircode.io) licensed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) n8n is [fair-code](http://faircode.io) distributed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) license
Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license) Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license)

View file

@ -6,6 +6,8 @@ if [ -d /root/.n8n ] ; then
ln -s /root/.n8n /home/node/ ln -s /root/.n8n /home/node/
fi fi
chown -R node /home/node
if [ "$#" -gt 0 ]; then if [ "$#" -gt 0 ]; then
# Got started with arguments # Got started with arguments
exec su-exec node "$@" exec su-exec node "$@"

View file

@ -2,6 +2,225 @@
This list shows all the versions which include breaking changes and how to upgrade. This list shows all the versions which include breaking changes and how to upgrade.
## 0.113.0
### What changed?
In the Dropbox node, both credential types (Access Token & OAuth2) have a new parameter called "APP Access Type".
### When is action necessary?
If you are using a Dropbox APP with permission type, "App Folder".
### How to upgrade:
Open your Dropbox node's credentials and set the "APP Access Type" parameter to "App Folder".
## 0.111.0
### What changed?
In the Dropbox node, now all operations are performed relative to the user's root directory.
### When is action necessary?
If you are using any resource/operation with OAuth2 authentication.
If you are using the `folder:list` operation with the parameter `Folder Path` empty (root path) and have a Team Space in your Dropbox account.
### How to upgrade:
Open the Dropbox node, go to the OAuth2 credential you are using and reconnect it again.
Also, if you are using the `folder:list` operation, make sure your logic is taking into account the team folders in the response.
## 0.105.0
### What changed?
In the Hubspot Trigger, now multiple events can be provided and the field `App ID` was so moved to the credentials.
### When is action necessary?
If you are using the Hubspot Trigger node.
### How to upgrade:
Open the Hubspot Trigger and set the events again. Also open the credentials `Hubspot Developer API` and set your APP ID.
## 0.104.0
### What changed?
Support for MongoDB as a database for n8n has been dropped as MongoDB had problems saving large amounts of data in a document, among other issues.
### When is action necessary?
If you have been using MongoDB as a database for n8n. Please note that this is not related to the MongoDB node.
### How to upgrade:
Before upgrading, you can [export](https://docs.n8n.io/reference/start-workflows-via-cli.html#export-workflows-and-credentials) all your credentials and workflows using the CLI.
```
n8n export:workflow --backup --output=backups/latest/
n8n export:credentials --backup --output=backups/latest/
```
You can then change the database to one of the supported databases mentioned [here](https://docs.n8n.io/reference/data/database.html). Finally, you can upgrade n8n and [import](https://docs.n8n.io/reference/start-workflows-via-cli.html#import-workflows-and-credentials) all your credentials and workflows back into n8n.
```
n8n import:workflow --separate --input=backups/latest/
n8n import:credentials --separate --input=backups/latest/
```
## 0.102.0
### What changed?
- The `As User` property and the `User Name` field got combined and renamed to `Send as User`. It also got moved under “Add Options”.
- The `Ephemeral` property got removed. To send an ephemeral message, you have to select the "Post (Ephemeral)" operation.
### When is action necessary?
If you are using the following fields or properties in the Slack node:
- As User
- Ephemeral
- User Name
### How to upgrade:
Open the Slack node and set them again to the appropriate values.
----------------------------
### What changed?
If you have a question in Typeform that uses a previously answered question as part of its text, the question text would look like this in the Typeform Trigger node:
`You have chosen {{field:23234242}} as your answer. Is this correct?`
Those curly braces broke the expression editor. The change makes it now display like this:
`You have chosen [field:23234242] as your answer. Is this correct?`
### When is action necessary?
If you are using the Typeform Trigger node with questions using the [Recall information](https://help.typeform.com/hc/en-us/articles/360050447072-What-is-Recall-information-) feature.
### How to upgrade:
In workflows using the Typeform Trigger node, nodes that reference such key names (questions that use a previously answered question as part of its text) will need to be updated.
## 0.95.0
### What changed?
In the Harvest Node, we moved the account field from the credentials to the node parameters. This will allow you to work witn multiples accounts without having to create multiples credentials.
### When is action necessary?
If you are using the Harvest Node.
### How to upgrade:
Open the node set the parameter `Account ID`.
## 0.94.0
### What changed?
In the Segment Node, we have changed how the properties 'traits' and 'properties' are defined. Now, key/value pairs can be provided, allowing you to send customs traits/properties.
### When is action necessary?
When the properties 'traits' or 'properties' are set, and one of the following resources/operations is used:
| Resource | Operation |
|--|--|
| Identify | Create |
| Track | Event |
| Track | Page |
| Group | Add |
### How to upgrade:
Open the affected resource/operation and set the parameters 'traits' or 'properties' again.
## 0.93.0
### What changed?
Change in naming of the Authentication field for the Pipedrive Trigger node.
### When is action necessary?
If you had set "Basic Auth" for the "Authentication" field in the node.
### How to upgrade:
The "Authentication" field has been renamed to "Incoming Authentication". Please set the parameter “Incoming Authentication” to “Basic Auth” to activate it again.
## 0.90.0
### What changed?
Node.js version 12.9 or newer is required to run n8n.
### When is action necessary?
If you are running Node.js version older than 12.9.
### How to upgrade:
You can find download and install the latest version of Node.js from [here](https://nodejs.org/en/download/).
## 0.87.0
### What changed?
The link.fish node got removed because the service is shutting down.
### When is action necessary?
If you are are actively using the link.fish node.
### How to upgrade:
Unfortunately, that's not possible. We'd recommend you to look for an alternative service.
## 0.83.0
### What changed?
In the Active Campaign Node, we have changed how the `getAll` operation works with various resources for the sake of consistency. To achieve this, a new parameter called 'Simple' has been added.
### When is action necessary?
When one of the following resources/operations is used:
| Resource | Operation |
|--|--|
| Deal | Get All |
| Connector | Get All |
| E-commerce Order | Get All |
| E-commerce Customer | Get All |
| E-commerce Order Products | Get All |
### How to upgrade:
Open the affected resource/operation and set the parameter `Simple` to false.
## 0.79.0
### What changed?
We have renamed the operations in the Todoist Node for consistency with the codebase. We also deleted the `close_match` and `delete_match` operations as these can be accomplished using the following operations: `getAll`, `close`, and `delete`.
### When is action necessary?
When one of the following operations is used:
- close_by
- close_match
- delete_id
- delete_match
### How to upgrade:
After upgrading, open all workflows which contain the Todoist Node. Set the corresponding operation, and then save the workflow.
If the operations `close_match` or `delete_match` are used, recreate them using the operations: `getAll`, `delete`, and `close`.
## 0.69.0 ## 0.69.0

View file

@ -19,7 +19,7 @@ Condition notice.
Software: n8n Software: n8n
License: Apache 2.0 License: Apache 2.0 with Commons Clause
Licensor: n8n GmbH Licensor: n8n GmbH

View file

@ -2,7 +2,7 @@
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png) ![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools. n8n is a free and open [fair-code](http://faircode.io) distributed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a> <a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
@ -32,7 +32,7 @@ Slack notification every time a Github repository received or lost a star.
## Available integrations ## Available integrations
n8n has 100+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes) n8n has 200+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
## Documentation ## Documentation
@ -60,9 +60,9 @@ If you are interested in a hosted version of n8n on our infrastructure please co
## What does n8n mean and how do you pronounce it ## What does n8n mean and how do you pronounce it?
**Short answer:** It means "nodemation" **Short answer:** It means "nodemation" and it is pronounced as n-eight-n.
**Long answer:** I get that question quite often (more often than I expected) **Long answer:** I get that question quite often (more often than I expected)
so I decided it is probably best to answer it here. While looking for a so I decided it is probably best to answer it here. While looking for a
@ -100,7 +100,7 @@ Before you upgrade to the latest version make sure to check here if there are an
## License ## License
n8n is [fair-code](http://faircode.io) licensed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) n8n is [fair-code](http://faircode.io) distributed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) license
Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license) Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license)

View file

@ -10,7 +10,7 @@ process.env.NODE_CONFIG_DIR = process.env.NODE_CONFIG_DIR || path.join(__dirname
var versionFlags = [ // tslint:disable-line:no-var-keyword var versionFlags = [ // tslint:disable-line:no-var-keyword
'-v', '-v',
'-V', '-V',
'--version' '--version',
]; ];
if (versionFlags.includes(process.argv.slice(-1)[0])) { if (versionFlags.includes(process.argv.slice(-1)[0])) {
console.log(require('../package').version); console.log(require('../package').version);
@ -22,23 +22,10 @@ if (process.argv.length === 2) {
process.argv.push('start'); process.argv.push('start');
} }
var command = process.argv[2]; // tslint:disable-line:no-var-keyword var nodeVersion = process.versions.node.split('.');
// Check if the command the user did enter is supported else stop if (parseInt(nodeVersion[0], 10) < 12 || parseInt(nodeVersion[0], 10) === 12 && parseInt(nodeVersion[1], 10) < 9) {
var supportedCommands = [ // tslint:disable-line:no-var-keyword console.log(`\nYour Node.js version (${process.versions.node}) is too old to run n8n.\nPlease update to version 12.9 or later!\n`);
'execute',
'help',
'start',
];
if (!supportedCommands.includes(command)) {
console.log('\nThe command "' + command + '" is not known!\n');
process.argv.pop();
process.argv.push('--help');
}
if (parseInt(process.versions.node.split('.')[0], 10) < 10) {
console.log('\nThe Node.js version is too old to run n8n. Please use version 10 or later!\n');
process.exit(0); process.exit(0);
} }

View file

@ -10,6 +10,7 @@ import {
import { import {
ActiveExecutions, ActiveExecutions,
CredentialsOverwrites, CredentialsOverwrites,
CredentialTypes,
Db, Db,
ExternalHooks, ExternalHooks,
GenericHelpers, GenericHelpers,
@ -20,7 +21,7 @@ import {
WorkflowCredentials, WorkflowCredentials,
WorkflowHelpers, WorkflowHelpers,
WorkflowRunner, WorkflowRunner,
} from "../src"; } from '../src';
export class Execute extends Command { export class Execute extends Command {
@ -116,6 +117,8 @@ export class Execute extends Command {
// Add the found types to an instance other parts of the application can use // Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes(); const nodeTypes = NodeTypes();
await nodeTypes.init(loadNodesAndCredentials.nodeTypes); await nodeTypes.init(loadNodesAndCredentials.nodeTypes);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
if (!WorkflowHelpers.isWorkflowIdValid(workflowId)) { if (!WorkflowHelpers.isWorkflowIdValid(workflowId)) {
workflowId = undefined; workflowId = undefined;
@ -124,7 +127,7 @@ export class Execute extends Command {
// Check if the workflow contains the required "Start" node // Check if the workflow contains the required "Start" node
// "requiredNodeTypes" are also defined in editor-ui/views/NodeView.vue // "requiredNodeTypes" are also defined in editor-ui/views/NodeView.vue
const requiredNodeTypes = ['n8n-nodes-base.start']; const requiredNodeTypes = ['n8n-nodes-base.start'];
let startNode: INode | undefined= undefined; let startNode: INode | undefined = undefined;
for (const node of workflowData!.nodes) { for (const node of workflowData!.nodes) {
if (requiredNodeTypes.includes(node.type)) { if (requiredNodeTypes.includes(node.type)) {
startNode = node; startNode = node;

View file

@ -0,0 +1,161 @@
import {
Command,
flags,
} from '@oclif/command';
import {
Credentials,
UserSettings,
} from 'n8n-core';
import {
IDataObject
} from 'n8n-workflow';
import {
Db,
GenericHelpers,
ICredentialsDecryptedDb,
} from '../../src';
import * as fs from 'fs';
import * as path from 'path';
export class ExportCredentialsCommand extends Command {
static description = 'Export credentials';
static examples = [
`$ n8n export:credentials --all`,
`$ n8n export:credentials --id=5 --output=file.json`,
`$ n8n export:credentials --all --output=backups/latest.json`,
`$ n8n export:credentials --backup --output=backups/latest/`,
`$ n8n export:credentials --all --decrypted --output=backups/decrypted.json`,
];
static flags = {
help: flags.help({ char: 'h' }),
all: flags.boolean({
description: 'Export all credentials',
}),
backup: flags.boolean({
description: 'Sets --all --pretty --separate for simple backups. Only --output has to be set additionally.',
}),
id: flags.string({
description: 'The ID of the credential to export',
}),
output: flags.string({
char: 'o',
description: 'Output file name or directory if using separate files',
}),
pretty: flags.boolean({
description: 'Format the output in an easier to read fashion',
}),
separate: flags.boolean({
description: 'Exports one file per credential (useful for versioning). Must inform a directory via --output.',
}),
decrypted: flags.boolean({
description: 'Exports data decrypted / in plain text. ALL SENSITIVE INFORMATION WILL BE VISIBLE IN THE FILES. Use to migrate from a installation to another that have a different secret key (in the config file).',
}),
};
async run() {
const { flags } = this.parse(ExportCredentialsCommand);
if (flags.backup) {
flags.all = true;
flags.pretty = true;
flags.separate = true;
}
if (!flags.all && !flags.id) {
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
return;
}
if (flags.all && flags.id) {
GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`);
return;
}
if (flags.separate) {
try {
if (!flags.output) {
GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`);
return;
}
if (fs.existsSync(flags.output)) {
if (!fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a directory`);
return;
}
} else {
fs.mkdirSync(flags.output, { recursive: true });
}
} catch (e) {
console.error('\nFILESYSTEM ERROR');
console.log('====================================');
console.error(e.message);
console.error(e.stack);
this.exit(1);
}
} else if (flags.output) {
if (fs.existsSync(flags.output)) {
if (fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a writeble file`);
return;
}
}
}
try {
await Db.init();
const findQuery: IDataObject = {};
if (flags.id) {
findQuery.id = flags.id;
}
const credentials = await Db.collections.Credentials!.find(findQuery);
if (flags.decrypted) {
const encryptionKey = await UserSettings.getEncryptionKey();
if (encryptionKey === undefined) {
throw new Error('No encryption key got found to decrypt the credentials!');
}
for (let i = 0; i < credentials.length; i++) {
const { name, type, nodesAccess, data } = credentials[i];
const credential = new Credentials(name, type, nodesAccess, data);
const plainData = credential.getData(encryptionKey);
(credentials[i] as ICredentialsDecryptedDb).data = plainData;
}
}
if (credentials.length === 0) {
throw new Error('No credentials found with specified filters.');
}
if (flags.separate) {
let fileContents: string, i: number;
for (i = 0; i < credentials.length; i++) {
fileContents = JSON.stringify(credentials[i], null, flags.pretty ? 2 : undefined);
const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + credentials[i].id + '.json';
fs.writeFileSync(filename, fileContents);
}
console.log('Successfully exported', i, 'credentials.');
} else {
const fileContents = JSON.stringify(credentials, null, flags.pretty ? 2 : undefined);
if (flags.output) {
fs.writeFileSync(flags.output!, fileContents);
console.log('Successfully exported', credentials.length, 'credentials.');
} else {
console.log(fileContents);
}
}
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -0,0 +1,137 @@
import {
Command,
flags,
} from '@oclif/command';
import {
IDataObject
} from 'n8n-workflow';
import {
Db,
GenericHelpers,
} from '../../src';
import * as fs from 'fs';
import * as path from 'path';
export class ExportWorkflowsCommand extends Command {
static description = 'Export workflows';
static examples = [
`$ n8n export:workflow --all`,
`$ n8n export:workflow --id=5 --output=file.json`,
`$ n8n export:workflow --all --output=backups/latest/`,
`$ n8n export:workflow --backup --output=backups/latest/`,
];
static flags = {
help: flags.help({ char: 'h' }),
all: flags.boolean({
description: 'Export all workflows',
}),
backup: flags.boolean({
description: 'Sets --all --pretty --separate for simple backups. Only --output has to be set additionally.',
}),
id: flags.string({
description: 'The ID of the workflow to export',
}),
output: flags.string({
char: 'o',
description: 'Output file name or directory if using separate files',
}),
pretty: flags.boolean({
description: 'Format the output in an easier to read fashion',
}),
separate: flags.boolean({
description: 'Exports one file per workflow (useful for versioning). Must inform a directory via --output.',
}),
};
async run() {
const { flags } = this.parse(ExportWorkflowsCommand);
if (flags.backup) {
flags.all = true;
flags.pretty = true;
flags.separate = true;
}
if (!flags.all && !flags.id) {
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
return;
}
if (flags.all && flags.id) {
GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`);
return;
}
if (flags.separate) {
try {
if (!flags.output) {
GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`);
return;
}
if (fs.existsSync(flags.output)) {
if (!fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a directory`);
return;
}
} else {
fs.mkdirSync(flags.output, { recursive: true });
}
} catch (e) {
console.error('\nFILESYSTEM ERROR');
console.log('====================================');
console.error(e.message);
console.error(e.stack);
this.exit(1);
}
} else if (flags.output) {
if (fs.existsSync(flags.output)) {
if (fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a writeble file`);
return;
}
}
}
try {
await Db.init();
const findQuery: IDataObject = {};
if (flags.id) {
findQuery.id = flags.id;
}
const workflows = await Db.collections.Workflow!.find(findQuery);
if (workflows.length === 0) {
throw new Error('No workflows found with specified filters.');
}
if (flags.separate) {
let fileContents: string, i: number;
for (i = 0; i < workflows.length; i++) {
fileContents = JSON.stringify(workflows[i], null, flags.pretty ? 2 : undefined);
const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + workflows[i].id + '.json';
fs.writeFileSync(filename, fileContents);
}
console.log('Successfully exported', i, 'workflows.');
} else {
const fileContents = JSON.stringify(workflows, null, flags.pretty ? 2 : undefined);
if (flags.output) {
fs.writeFileSync(flags.output!, fileContents);
console.log('Successfully exported', workflows.length, workflows.length === 1 ? 'workflow.' : 'workflows.');
} else {
console.log(fileContents);
}
}
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -0,0 +1,98 @@
import {
Command,
flags,
} from '@oclif/command';
import {
Credentials,
UserSettings,
} from 'n8n-core';
import {
Db,
GenericHelpers,
} from '../../src';
import * as fs from 'fs';
import * as glob from 'glob-promise';
import * as path from 'path';
export class ImportCredentialsCommand extends Command {
static description = 'Import credentials';
static examples = [
`$ n8n import:credentials --input=file.json`,
`$ n8n import:credentials --separate --input=backups/latest/`,
];
static flags = {
help: flags.help({ char: 'h' }),
input: flags.string({
char: 'i',
description: 'Input file name or directory if --separate is used',
}),
separate: flags.boolean({
description: 'Imports *.json files from directory provided by --input',
}),
};
async run() {
const { flags } = this.parse(ImportCredentialsCommand);
if (!flags.input) {
GenericHelpers.logOutput(`An input file or directory with --input must be provided`);
return;
}
if (flags.separate) {
if (fs.existsSync(flags.input)) {
if (!fs.lstatSync(flags.input).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --input must be a directory`);
return;
}
}
}
try {
await Db.init();
let i;
const encryptionKey = await UserSettings.getEncryptionKey();
if (encryptionKey === undefined) {
throw new Error('No encryption key got found to encrypt the credentials!');
}
if (flags.separate) {
const files = await glob((flags.input.endsWith(path.sep) ? flags.input : flags.input + path.sep) + '*.json');
for (i = 0; i < files.length; i++) {
const credential = JSON.parse(fs.readFileSync(files[i], { encoding: 'utf8' }));
if (typeof credential.data === 'object') {
// plain data / decrypted input. Should be encrypted first.
Credentials.prototype.setData.call(credential, credential.data, encryptionKey);
}
await Db.collections.Credentials!.save(credential);
}
} else {
const fileContents = JSON.parse(fs.readFileSync(flags.input, { encoding: 'utf8' }));
if (!Array.isArray(fileContents)) {
throw new Error(`File does not seem to contain credentials.`);
}
for (i = 0; i < fileContents.length; i++) {
if (typeof fileContents[i].data === 'object') {
// plain data / decrypted input. Should be encrypted first.
Credentials.prototype.setData.call(fileContents[i], fileContents[i].data, encryptionKey);
}
await Db.collections.Credentials!.save(fileContents[i]);
}
}
console.log('Successfully imported', i, 'credentials.');
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -0,0 +1,78 @@
import {
Command,
flags,
} from '@oclif/command';
import {
Db,
GenericHelpers,
} from '../../src';
import * as fs from 'fs';
import * as glob from 'glob-promise';
import * as path from 'path';
export class ImportWorkflowsCommand extends Command {
static description = 'Import workflows';
static examples = [
`$ n8n import:workflow --input=file.json`,
`$ n8n import:workflow --separate --input=backups/latest/`,
];
static flags = {
help: flags.help({ char: 'h' }),
input: flags.string({
char: 'i',
description: 'Input file name or directory if --separate is used',
}),
separate: flags.boolean({
description: 'Imports *.json files from directory provided by --input',
}),
};
async run() {
const { flags } = this.parse(ImportWorkflowsCommand);
if (!flags.input) {
GenericHelpers.logOutput(`An input file or directory with --input must be provided`);
return;
}
if (flags.separate) {
if (fs.existsSync(flags.input)) {
if (!fs.lstatSync(flags.input).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --input must be a directory`);
return;
}
}
}
try {
await Db.init();
let i;
if (flags.separate) {
const files = await glob((flags.input.endsWith(path.sep) ? flags.input : flags.input + path.sep) + '*.json');
for (i = 0; i < files.length; i++) {
const workflow = JSON.parse(fs.readFileSync(files[i], { encoding: 'utf8' }));
await Db.collections.Workflow!.save(workflow);
}
} else {
const fileContents = JSON.parse(fs.readFileSync(flags.input, { encoding: 'utf8' }));
if (!Array.isArray(fileContents)) {
throw new Error(`File does not seem to contain workflows.`);
}
for (i = 0; i < fileContents.length; i++) {
await Db.collections.Workflow!.save(fileContents[i]);
}
}
console.log('Successfully imported', i, i === 1 ? 'workflow.' : 'workflows.');
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -5,20 +5,25 @@ import {
} from 'n8n-core'; } from 'n8n-core';
import { Command, flags } from '@oclif/command'; import { Command, flags } from '@oclif/command';
const open = require('open'); const open = require('open');
import * as Redis from 'ioredis';
import * as config from '../config'; import * as config from '../config';
import { import {
ActiveExecutions,
ActiveWorkflowRunner, ActiveWorkflowRunner,
CredentialTypes,
CredentialsOverwrites, CredentialsOverwrites,
CredentialTypes,
DatabaseType,
Db, Db,
ExternalHooks, ExternalHooks,
GenericHelpers, GenericHelpers,
IExecutionsCurrentSummary,
LoadNodesAndCredentials, LoadNodesAndCredentials,
NodeTypes, NodeTypes,
Server, Server,
TestWebhooks, TestWebhooks,
} from "../src"; } from '../src';
import { IDataObject } from 'n8n-workflow';
let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined; let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined;
@ -68,14 +73,20 @@ export class Start extends Command {
static async stopProcess() { static async stopProcess() {
console.log(`\nStopping n8n...`); console.log(`\nStopping n8n...`);
try {
const externalHooks = ExternalHooks();
await externalHooks.run('n8n.stop', []);
setTimeout(() => { setTimeout(() => {
// In case that something goes wrong with shutdown we // In case that something goes wrong with shutdown we
// kill after max. 30 seconds no matter what // kill after max. 30 seconds no matter what
process.exit(processExistCode); process.exit(processExistCode);
}, 30000); }, 30000);
const skipWebhookDeregistration = config.get('endpoints.skipWebhoooksDeregistrationOnShutdown') as boolean;
const removePromises = []; const removePromises = [];
if (activeWorkflowRunner !== undefined) { if (activeWorkflowRunner !== undefined && skipWebhookDeregistration !== true) {
removePromises.push(activeWorkflowRunner.removeAll()); removePromises.push(activeWorkflowRunner.removeAll());
} }
@ -85,6 +96,28 @@ export class Start extends Command {
await Promise.all(removePromises); await Promise.all(removePromises);
// Wait for active workflow executions to finish
const activeExecutionsInstance = ActiveExecutions.getInstance();
let executingWorkflows = activeExecutionsInstance.getActiveExecutions() as IExecutionsCurrentSummary[];
let count = 0;
while (executingWorkflows.length !== 0) {
if (count++ % 4 === 0) {
console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`);
executingWorkflows.map(execution => {
console.log(` - Execution ID ${execution.id}, workflow ID: ${execution.workflowId}`);
});
}
await new Promise((resolve) => {
setTimeout(resolve, 500);
});
executingWorkflows = activeExecutionsInstance.getActiveExecutions();
}
} catch (error) {
console.error('There was an error shutting down n8n.', error);
}
process.exit(processExistCode); process.exit(processExistCode);
} }
@ -97,10 +130,16 @@ export class Start extends Command {
const { flags } = this.parse(Start); const { flags } = this.parse(Start);
// Wrap that the process does not close but we can still use async // Wrap that the process does not close but we can still use async
(async () => { await (async () => {
try { try {
// Start directly with the init of the database to improve startup time // Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init(); const startDbInitPromise = Db.init().catch((error: Error) => {
console.error(`There was an error initializing DB: ${error.message}`);
processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
});
// Make sure the settings exist // Make sure the settings exist
const userSettings = await UserSettings.prepareUserSettings(); const userSettings = await UserSettings.prepareUserSettings();
@ -126,6 +165,70 @@ export class Start extends Command {
// Wait till the database is ready // Wait till the database is ready
await startDbInitPromise; await startDbInitPromise;
if (config.get('executions.mode') === 'queue') {
const redisHost = config.get('queue.bull.redis.host');
const redisPassword = config.get('queue.bull.redis.password');
const redisPort = config.get('queue.bull.redis.port');
const redisDB = config.get('queue.bull.redis.db');
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
let lastTimer = 0, cumulativeTimeout = 0;
const settings = {
retryStrategy: (times: number): number | null => {
const now = Date.now();
if (now - lastTimer > 30000) {
// Means we had no timeout at all or last timeout was temporary and we recovered
lastTimer = now;
cumulativeTimeout = 0;
} else {
cumulativeTimeout += now - lastTimer;
lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
process.exit(1);
}
}
return 500;
},
} as IDataObject;
if (redisHost) {
settings.host = redisHost;
}
if (redisPassword) {
settings.password = redisPassword;
}
if (redisPort) {
settings.port = redisPort;
}
if (redisDB) {
settings.db = redisDB;
}
// This connection is going to be our heartbeat
// IORedis automatically pings redis and tries to reconnect
// We will be using the retryStrategy above
// to control how and when to exit.
const redis = new Redis(settings);
redis.on('error', (error) => {
if (error.toString().includes('ECONNREFUSED') === true) {
console.warn('Redis unavailable - trying to reconnect...');
} else {
console.warn('Error with Redis: ', error);
}
});
}
const dbType = await GenericHelpers.getConfigValue('database.type') as DatabaseType;
if (dbType === 'sqlite') {
const shouldRunVacuum = config.get('database.sqlite.executeVacuumOnStartup') as number;
if (shouldRunVacuum) {
Db.collections.Execution!.query('VACUUM;');
}
}
if (flags.tunnel === true) { if (flags.tunnel === true) {
this.log('\nWaiting for tunnel ...'); this.log('\nWaiting for tunnel ...');
@ -156,8 +259,8 @@ export class Start extends Command {
// @ts-ignore // @ts-ignore
const webhookTunnel = await localtunnel(port, tunnelSettings); const webhookTunnel = await localtunnel(port, tunnelSettings);
process.env.WEBHOOK_TUNNEL_URL = webhookTunnel.url + '/'; process.env.WEBHOOK_URL = webhookTunnel.url + '/';
this.log(`Tunnel URL: ${process.env.WEBHOOK_TUNNEL_URL}\n`); this.log(`Tunnel URL: ${process.env.WEBHOOK_URL}\n`);
this.log('IMPORTANT! Do not share with anybody as it would give people access to your n8n instance!'); this.log('IMPORTANT! Do not share with anybody as it would give people access to your n8n instance!');
} }
@ -181,7 +284,7 @@ export class Start extends Command {
Start.openBrowser(); Start.openBrowser();
} }
this.log(`\nPress "o" to open in Browser.`); this.log(`\nPress "o" to open in Browser.`);
process.stdin.on("data", (key : string) => { process.stdin.on('data', (key: string) => {
if (key === 'o') { if (key === 'o') {
Start.openBrowser(); Start.openBrowser();
inputText = ''; inputText = '';

View file

@ -0,0 +1,85 @@
import {
Command, flags,
} from '@oclif/command';
import {
IDataObject
} from 'n8n-workflow';
import {
Db,
GenericHelpers,
} from '../../src';
export class UpdateWorkflowCommand extends Command {
static description = '\Update workflows';
static examples = [
`$ n8n update:workflow --all --active=false`,
`$ n8n update:workflow --id=5 --active=true`,
];
static flags = {
help: flags.help({ char: 'h' }),
active: flags.string({
description: 'Active state the workflow/s should be set to',
}),
all: flags.boolean({
description: 'Operate on all workflows',
}),
id: flags.string({
description: 'The ID of the workflow to operate on',
}),
};
async run() {
const { flags } = this.parse(UpdateWorkflowCommand);
if (!flags.all && !flags.id) {
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
return;
}
if (flags.all && flags.id) {
GenericHelpers.logOutput(`Either something else on top should be "--all" or "--id" can be set never both!`);
return;
}
const updateQuery: IDataObject = {};
if (flags.active === undefined) {
GenericHelpers.logOutput(`No update flag like "--active=true" has been set!`);
return;
} else {
if (!['false', 'true'].includes(flags.active)) {
GenericHelpers.logOutput(`Valid values for flag "--active" are only "false" or "true"!`);
return;
}
updateQuery.active = flags.active === 'true';
}
try {
await Db.init();
const findQuery: IDataObject = {};
if (flags.id) {
console.log(`Deactivating workflow with ID: ${flags.id}`);
findQuery.id = flags.id;
} else {
console.log('Deactivating all workflows');
findQuery.active = true;
}
await Db.collections.Workflow!.update(findQuery, updateQuery);
console.log('Done');
} catch (e) {
console.error('\nGOT ERROR');
console.log('====================================');
console.error(e.message);
console.error(e.stack);
this.exit(1);
}
this.exit();
}
}

View file

@ -0,0 +1,223 @@
import {
UserSettings,
} from 'n8n-core';
import { Command, flags } from '@oclif/command';
import * as Redis from 'ioredis';
import * as config from '../config';
import {
ActiveExecutions,
ActiveWorkflowRunner,
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
GenericHelpers,
LoadNodesAndCredentials,
NodeTypes,
TestWebhooks,
WebhookServer,
} from '../src';
import { IDataObject } from 'n8n-workflow';
let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined;
let processExistCode = 0;
export class Webhook extends Command {
static description = 'Starts n8n webhook process. Intercepts only production URLs.';
static examples = [
`$ n8n webhook`,
];
static flags = {
help: flags.help({ char: 'h' }),
};
/**
* Stops the n8n in a graceful way.
* Make for example sure that all the webhooks from third party services
* get removed.
*/
static async stopProcess() {
console.log(`\nStopping n8n...`);
try {
const externalHooks = ExternalHooks();
await externalHooks.run('n8n.stop', []);
setTimeout(() => {
// In case that something goes wrong with shutdown we
// kill after max. 30 seconds no matter what
process.exit(processExistCode);
}, 30000);
const removePromises = [];
if (activeWorkflowRunner !== undefined) {
removePromises.push(activeWorkflowRunner.removeAll());
}
// Remove all test webhooks
const testWebhooks = TestWebhooks.getInstance();
removePromises.push(testWebhooks.removeAll());
await Promise.all(removePromises);
// Wait for active workflow executions to finish
const activeExecutionsInstance = ActiveExecutions.getInstance();
let executingWorkflows = activeExecutionsInstance.getActiveExecutions();
let count = 0;
while (executingWorkflows.length !== 0) {
if (count++ % 4 === 0) {
console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`);
}
await new Promise((resolve) => {
setTimeout(resolve, 500);
});
executingWorkflows = activeExecutionsInstance.getActiveExecutions();
}
} catch (error) {
console.error('There was an error shutting down n8n.', error);
}
process.exit(processExistCode);
}
async run() {
// Make sure that n8n shuts down gracefully if possible
process.on('SIGTERM', Webhook.stopProcess);
process.on('SIGINT', Webhook.stopProcess);
const { flags } = this.parse(Webhook);
// Wrap that the process does not close but we can still use async
await (async () => {
if (config.get('executions.mode') !== 'queue') {
/**
* It is technically possible to run without queues but
* there are 2 known bugs when running in this mode:
* - Executions list will be problematic as the main process
* is not aware of current executions in the webhook processes
* and therefore will display all current executions as error
* as it is unable to determine if it is still running or crashed
* - You cannot stop currently executing jobs from webhook processes
* when running without queues as the main process cannot talk to
* the wehbook processes to communicate workflow execution interruption.
*/
this.error('Webhook processes can only run with execution mode as queue.');
}
try {
// Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init().catch(error => {
console.error(`There was an error initializing DB: ${error.message}`);
processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
});
// Make sure the settings exist
const userSettings = await UserSettings.prepareUserSettings();
// Load all node and credential types
const loadNodesAndCredentials = LoadNodesAndCredentials();
await loadNodesAndCredentials.init();
// Load the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites();
await credentialsOverwrites.init();
// Load all external hooks
const externalHooks = ExternalHooks();
await externalHooks.init();
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes();
await nodeTypes.init(loadNodesAndCredentials.nodeTypes);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
// Wait till the database is ready
await startDbInitPromise;
if (config.get('executions.mode') === 'queue') {
const redisHost = config.get('queue.bull.redis.host');
const redisPassword = config.get('queue.bull.redis.password');
const redisPort = config.get('queue.bull.redis.port');
const redisDB = config.get('queue.bull.redis.db');
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
let lastTimer = 0, cumulativeTimeout = 0;
const settings = {
retryStrategy: (times: number): number | null => {
const now = Date.now();
if (now - lastTimer > 30000) {
// Means we had no timeout at all or last timeout was temporary and we recovered
lastTimer = now;
cumulativeTimeout = 0;
} else {
cumulativeTimeout += now - lastTimer;
lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
process.exit(1);
}
}
return 500;
},
} as IDataObject;
if (redisHost) {
settings.host = redisHost;
}
if (redisPassword) {
settings.password = redisPassword;
}
if (redisPort) {
settings.port = redisPort;
}
if (redisDB) {
settings.db = redisDB;
}
// This connection is going to be our heartbeat
// IORedis automatically pings redis and tries to reconnect
// We will be using the retryStrategy above
// to control how and when to exit.
const redis = new Redis(settings);
redis.on('error', (error) => {
if (error.toString().includes('ECONNREFUSED') === true) {
console.warn('Redis unavailable - trying to reconnect...');
} else {
console.warn('Error with Redis: ', error);
}
});
}
await WebhookServer.start();
// Start to get active workflows and run their triggers
activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
await activeWorkflowRunner.initWebhooks();
const editorUrl = GenericHelpers.getBaseUrl();
this.log('Webhook listener waiting for requests.');
} catch (error) {
this.error(`There was an error: ${error.message}`);
processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
}
})();
}
}

View file

@ -0,0 +1,269 @@
import * as PCancelable from 'p-cancelable';
import { Command, flags } from '@oclif/command';
import {
UserSettings,
WorkflowExecute,
} from 'n8n-core';
import {
IDataObject,
INodeTypes,
IRun,
IWorkflowExecuteHooks,
Workflow,
WorkflowHooks,
} from 'n8n-workflow';
import {
FindOneOptions,
} from 'typeorm';
import {
ActiveExecutions,
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
GenericHelpers,
IBullJobData,
IBullJobResponse,
IExecutionFlattedDb,
IExecutionResponse,
LoadNodesAndCredentials,
NodeTypes,
ResponseHelper,
WorkflowCredentials,
WorkflowExecuteAdditionalData,
} from '../src';
import * as config from '../config';
import * as Bull from 'bull';
import * as Queue from '../src/Queue';
export class Worker extends Command {
static description = '\nStarts a n8n worker';
static examples = [
`$ n8n worker --concurrency=5`,
];
static flags = {
help: flags.help({ char: 'h' }),
concurrency: flags.integer({
default: 10,
description: 'How many jobs can run in parallel.',
}),
};
static runningJobs: {
[key: string]: PCancelable<IRun>;
} = {};
static jobQueue: Bull.Queue;
static processExistCode = 0;
// static activeExecutions = ActiveExecutions.getInstance();
/**
* Stoppes the n8n in a graceful way.
* Make for example sure that all the webhooks from third party services
* get removed.
*/
static async stopProcess() {
console.log(`\nStopping n8n...`);
// Stop accepting new jobs
Worker.jobQueue.pause(true);
try {
const externalHooks = ExternalHooks();
await externalHooks.run('n8n.stop', []);
const maxStopTime = 30000;
const stopTime = new Date().getTime() + maxStopTime;
setTimeout(() => {
// In case that something goes wrong with shutdown we
// kill after max. 30 seconds no matter what
process.exit(Worker.processExistCode);
}, maxStopTime);
// Wait for active workflow executions to finish
let count = 0;
while (Object.keys(Worker.runningJobs).length !== 0) {
if (count++ % 4 === 0) {
const waitLeft = Math.ceil((stopTime - new Date().getTime()) / 1000);
console.log(`Waiting for ${Object.keys(Worker.runningJobs).length} active executions to finish... (wait ${waitLeft} more seconds)`);
}
await new Promise((resolve) => {
setTimeout(resolve, 500);
});
}
} catch (error) {
console.error('There was an error shutting down n8n.', error);
}
process.exit(Worker.processExistCode);
}
async runJob(job: Bull.Job, nodeTypes: INodeTypes): Promise<IBullJobResponse> {
const jobData = job.data as IBullJobData;
const executionDb = await Db.collections.Execution!.findOne(jobData.executionId) as IExecutionFlattedDb;
const currentExecutionDb = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse;
console.log(`Start job: ${job.id} (Workflow ID: ${currentExecutionDb.workflowData.id} | Execution: ${jobData.executionId})`);
let staticData = currentExecutionDb.workflowData!.staticData;
if (jobData.loadStaticData === true) {
const findOptions = {
select: ['id', 'staticData'],
} as FindOneOptions;
const workflowData = await Db.collections!.Workflow!.findOne(currentExecutionDb.workflowData.id, findOptions);
if (workflowData === undefined) {
throw new Error(`The workflow with the ID "${currentExecutionDb.workflowData.id}" could not be found`);
}
staticData = workflowData.staticData;
}
const workflow = new Workflow({ id: currentExecutionDb.workflowData.id as string, name: currentExecutionDb.workflowData.name, nodes: currentExecutionDb.workflowData!.nodes, connections: currentExecutionDb.workflowData!.connections, active: currentExecutionDb.workflowData!.active, nodeTypes, staticData, settings: currentExecutionDb.workflowData!.settings });
const credentials = await WorkflowCredentials(currentExecutionDb.workflowData.nodes);
const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials);
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter(currentExecutionDb.mode, job.data.executionId, currentExecutionDb.workflowData, { retryOf: currentExecutionDb.retryOf as string });
let workflowExecute: WorkflowExecute;
let workflowRun: PCancelable<IRun>;
if (currentExecutionDb.data !== undefined) {
workflowExecute = new WorkflowExecute(additionalData, currentExecutionDb.mode, currentExecutionDb.data);
workflowRun = workflowExecute.processRunExecutionData(workflow);
} else {
// Execute all nodes
// Can execute without webhook so go on
workflowExecute = new WorkflowExecute(additionalData, currentExecutionDb.mode);
workflowRun = workflowExecute.run(workflow);
}
Worker.runningJobs[job.id] = workflowRun;
// Wait till the execution is finished
const runData = await workflowRun;
delete Worker.runningJobs[job.id];
return {
success: true,
};
}
async run() {
console.log('Starting n8n worker...');
// Make sure that n8n shuts down gracefully if possible
process.on('SIGTERM', Worker.stopProcess);
process.on('SIGINT', Worker.stopProcess);
// Wrap that the process does not close but we can still use async
await (async () => {
try {
const { flags } = this.parse(Worker);
// Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init().catch(error => {
console.error(`There was an error initializing DB: ${error.message}`);
Worker.processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
});
// Make sure the settings exist
await UserSettings.prepareUserSettings();
// Load all node and credential types
const loadNodesAndCredentials = LoadNodesAndCredentials();
await loadNodesAndCredentials.init();
// Load the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites();
await credentialsOverwrites.init();
// Load all external hooks
const externalHooks = ExternalHooks();
await externalHooks.init();
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes();
await nodeTypes.init(loadNodesAndCredentials.nodeTypes);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
// Wait till the database is ready
await startDbInitPromise;
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
Worker.jobQueue = Queue.getInstance().getBullObjectInstance();
Worker.jobQueue.process(flags.concurrency, (job) => this.runJob(job, nodeTypes));
const versions = await GenericHelpers.getVersions();
console.log('\nn8n worker is now ready');
console.log(` * Version: ${versions.cli}`);
console.log(` * Concurrency: ${flags.concurrency}`);
console.log('');
Worker.jobQueue.on('global:progress', (jobId, progress) => {
// Progress of a job got updated which does get used
// to communicate that a job got canceled.
if (progress === -1) {
// Job has to get canceled
if (Worker.runningJobs[jobId] !== undefined) {
// Job is processed by current worker so cancel
Worker.runningJobs[jobId].cancel();
delete Worker.runningJobs[jobId];
}
}
});
let lastTimer = 0, cumulativeTimeout = 0;
Worker.jobQueue.on('error', (error: Error) => {
if (error.toString().includes('ECONNREFUSED') === true) {
const now = Date.now();
if (now - lastTimer > 30000) {
// Means we had no timeout at all or last timeout was temporary and we recovered
lastTimer = now;
cumulativeTimeout = 0;
} else {
cumulativeTimeout += now - lastTimer;
lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
process.exit(1);
}
}
console.warn('Redis unavailable - trying to reconnect...');
} else if (error.toString().includes('Error initializing Lua scripts') === true) {
// This is a non-recoverable error
// Happens when worker starts and Redis is unavailable
// Even if Redis comes back online, worker will be zombie
console.error('Error initializing worker.');
process.exit(2);
} else {
console.error('Error from queue: ', error);
}
});
} catch (error) {
this.error(`There was an error: ${error.message}`);
Worker.processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
}
})();
}
}

View file

@ -8,60 +8,52 @@ const config = convict({
database: { database: {
type: { type: {
doc: 'Type of database to use', doc: 'Type of database to use',
format: ['sqlite', 'mariadb', 'mongodb', 'mysqldb', 'postgresdb'], format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'],
default: 'sqlite', default: 'sqlite',
env: 'DB_TYPE' env: 'DB_TYPE',
},
mongodb: {
connectionUrl: {
doc: 'MongoDB Connection URL',
format: '*',
default: 'mongodb://user:password@localhost:27017/database',
env: 'DB_MONGODB_CONNECTION_URL'
}
}, },
tablePrefix: { tablePrefix: {
doc: 'Prefix for table names', doc: 'Prefix for table names',
format: '*', format: '*',
default: '', default: '',
env: 'DB_TABLE_PREFIX' env: 'DB_TABLE_PREFIX',
}, },
postgresdb: { postgresdb: {
database: { database: {
doc: 'PostgresDB Database', doc: 'PostgresDB Database',
format: String, format: String,
default: 'n8n', default: 'n8n',
env: 'DB_POSTGRESDB_DATABASE' env: 'DB_POSTGRESDB_DATABASE',
}, },
host: { host: {
doc: 'PostgresDB Host', doc: 'PostgresDB Host',
format: String, format: String,
default: 'localhost', default: 'localhost',
env: 'DB_POSTGRESDB_HOST' env: 'DB_POSTGRESDB_HOST',
}, },
password: { password: {
doc: 'PostgresDB Password', doc: 'PostgresDB Password',
format: String, format: String,
default: '', default: '',
env: 'DB_POSTGRESDB_PASSWORD' env: 'DB_POSTGRESDB_PASSWORD',
}, },
port: { port: {
doc: 'PostgresDB Port', doc: 'PostgresDB Port',
format: Number, format: Number,
default: 5432, default: 5432,
env: 'DB_POSTGRESDB_PORT' env: 'DB_POSTGRESDB_PORT',
}, },
user: { user: {
doc: 'PostgresDB User', doc: 'PostgresDB User',
format: String, format: String,
default: 'root', default: 'root',
env: 'DB_POSTGRESDB_USER' env: 'DB_POSTGRESDB_USER',
}, },
schema: { schema: {
doc: 'PostgresDB Schema', doc: 'PostgresDB Schema',
format: String, format: String,
default: 'public', default: 'public',
env: 'DB_POSTGRESDB_SCHEMA' env: 'DB_POSTGRESDB_SCHEMA',
}, },
ssl: { ssl: {
@ -89,7 +81,7 @@ const config = convict({
default: true, default: true,
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED', env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
}, },
} },
}, },
mysqldb: { mysqldb: {
@ -97,31 +89,39 @@ const config = convict({
doc: 'MySQL Database', doc: 'MySQL Database',
format: String, format: String,
default: 'n8n', default: 'n8n',
env: 'DB_MYSQLDB_DATABASE' env: 'DB_MYSQLDB_DATABASE',
}, },
host: { host: {
doc: 'MySQL Host', doc: 'MySQL Host',
format: String, format: String,
default: 'localhost', default: 'localhost',
env: 'DB_MYSQLDB_HOST' env: 'DB_MYSQLDB_HOST',
}, },
password: { password: {
doc: 'MySQL Password', doc: 'MySQL Password',
format: String, format: String,
default: '', default: '',
env: 'DB_MYSQLDB_PASSWORD' env: 'DB_MYSQLDB_PASSWORD',
}, },
port: { port: {
doc: 'MySQL Port', doc: 'MySQL Port',
format: Number, format: Number,
default: 3306, default: 3306,
env: 'DB_MYSQLDB_PORT' env: 'DB_MYSQLDB_PORT',
}, },
user: { user: {
doc: 'MySQL User', doc: 'MySQL User',
format: String, format: String,
default: 'root', default: 'root',
env: 'DB_MYSQLDB_USER' env: 'DB_MYSQLDB_USER',
},
},
sqlite: {
executeVacuumOnStartup: {
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
format: Boolean,
default: false,
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
}, },
}, },
}, },
@ -136,7 +136,7 @@ const config = convict({
doc: 'Overwrites for credentials', doc: 'Overwrites for credentials',
format: '*', format: '*',
default: '{}', default: '{}',
env: 'CREDENTIALS_OVERWRITE_DATA' env: 'CREDENTIALS_OVERWRITE_DATA',
}, },
endpoint: { endpoint: {
doc: 'Fetch credentials from API', doc: 'Fetch credentials from API',
@ -156,7 +156,14 @@ const config = convict({
doc: 'In what process workflows should be executed', doc: 'In what process workflows should be executed',
format: ['main', 'own'], format: ['main', 'own'],
default: 'own', default: 'own',
env: 'EXECUTIONS_PROCESS' env: 'EXECUTIONS_PROCESS',
},
mode: {
doc: 'If it should run executions directly or via queue',
format: ['regular', 'queue'],
default: 'regular',
env: 'EXECUTIONS_MODE',
}, },
// A Workflow times out and gets canceled after this time (seconds). // A Workflow times out and gets canceled after this time (seconds).
@ -174,13 +181,13 @@ const config = convict({
doc: 'Max run time (seconds) before stopping the workflow execution', doc: 'Max run time (seconds) before stopping the workflow execution',
format: Number, format: Number,
default: -1, default: -1,
env: 'EXECUTIONS_TIMEOUT' env: 'EXECUTIONS_TIMEOUT',
}, },
maxTimeout: { maxTimeout: {
doc: 'Max execution time (seconds) that can be set for a workflow individually', doc: 'Max execution time (seconds) that can be set for a workflow individually',
format: Number, format: Number,
default: 3600, default: 3600,
env: 'EXECUTIONS_TIMEOUT_MAX' env: 'EXECUTIONS_TIMEOUT_MAX',
}, },
// If a workflow executes all the data gets saved by default. This // If a workflow executes all the data gets saved by default. This
@ -193,13 +200,19 @@ const config = convict({
doc: 'What workflow execution data to save on error', doc: 'What workflow execution data to save on error',
format: ['all', 'none'], format: ['all', 'none'],
default: 'all', default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR' env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
}, },
saveDataOnSuccess: { saveDataOnSuccess: {
doc: 'What workflow execution data to save on success', doc: 'What workflow execution data to save on success',
format: ['all', 'none'], format: ['all', 'none'],
default: 'all', default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS' env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
},
saveExecutionProgress: {
doc: 'Wether or not to save progress for each node executed',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
}, },
// If the executions of workflows which got started via the editor // If the executions of workflows which got started via the editor
@ -211,7 +224,7 @@ const config = convict({
doc: 'Save data of executions when started manually via editor', doc: 'Save data of executions when started manually via editor',
format: 'Boolean', format: 'Boolean',
default: false, default: false,
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS' env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
}, },
// To not exceed the database's capacity and keep its size moderate // To not exceed the database's capacity and keep its size moderate
@ -223,22 +236,70 @@ const config = convict({
doc: 'Delete data of past executions on a rolling basis', doc: 'Delete data of past executions on a rolling basis',
format: 'Boolean', format: 'Boolean',
default: false, default: false,
env: 'EXECUTIONS_DATA_PRUNE' env: 'EXECUTIONS_DATA_PRUNE',
}, },
pruneDataMaxAge: { pruneDataMaxAge: {
doc: 'How old (hours) the execution data has to be to get deleted', doc: 'How old (hours) the execution data has to be to get deleted',
format: Number, format: Number,
default: 336, default: 336,
env: 'EXECUTIONS_DATA_MAX_AGE' env: 'EXECUTIONS_DATA_MAX_AGE',
}, },
pruneDataTimeout: { pruneDataTimeout: {
doc: 'Timeout (seconds) after execution data has been pruned', doc: 'Timeout (seconds) after execution data has been pruned',
format: Number, format: Number,
default: 3600, default: 3600,
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT' env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
}, },
}, },
queue: {
bull: {
prefix: {
doc: 'Prefix for all queue keys',
format: String,
default: '',
env: 'QUEUE_BULL_PREFIX',
},
redis: {
db: {
doc: 'Redis DB',
format: Number,
default: 0,
env: 'QUEUE_BULL_REDIS_DB',
},
host: {
doc: 'Redis Host',
format: String,
default: 'localhost',
env: 'QUEUE_BULL_REDIS_HOST',
},
password: {
doc: 'Redis Password',
format: String,
default: '',
env: 'QUEUE_BULL_REDIS_PASSWORD',
},
port: {
doc: 'Redis Port',
format: Number,
default: 6379,
env: 'QUEUE_BULL_REDIS_PORT',
},
timeoutThreshold: {
doc: 'Redis timeout threshold',
format: Number,
default: 10000,
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
},
},
queueRecoveryInterval: {
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
format: Number,
default: 60,
env: 'QUEUE_RECOVERY_INTERVAL',
},
},
},
generic: { generic: {
// The timezone to use. Is important for nodes like "Cron" which start the // The timezone to use. Is important for nodes like "Cron" which start the
// workflow automatically at a specified time. This setting can also be // workflow automatically at a specified time. This setting can also be
@ -248,7 +309,7 @@ const config = convict({
doc: 'The timezone to use', doc: 'The timezone to use',
format: '*', format: '*',
default: 'America/New_York', default: 'America/New_York',
env: 'GENERIC_TIMEZONE' env: 'GENERIC_TIMEZONE',
}, },
}, },
@ -258,66 +319,78 @@ const config = convict({
default: '/', default: '/',
arg: 'path', arg: 'path',
env: 'N8N_PATH', env: 'N8N_PATH',
doc: 'Path n8n is deployed to' doc: 'Path n8n is deployed to',
}, },
host: { host: {
format: String, format: String,
default: 'localhost', default: 'localhost',
arg: 'host', arg: 'host',
env: 'N8N_HOST', env: 'N8N_HOST',
doc: 'Host name n8n can be reached' doc: 'Host name n8n can be reached',
}, },
port: { port: {
format: Number, format: Number,
default: 5678, default: 5678,
arg: 'port', arg: 'port',
env: 'N8N_PORT', env: 'N8N_PORT',
doc: 'HTTP port n8n can be reached' doc: 'HTTP port n8n can be reached',
}, },
listen_address: { listen_address: {
format: String, format: String,
default: '0.0.0.0', default: '0.0.0.0',
env: 'N8N_LISTEN_ADDRESS', env: 'N8N_LISTEN_ADDRESS',
doc: 'IP address n8n should listen on' doc: 'IP address n8n should listen on',
}, },
protocol: { protocol: {
format: ['http', 'https'], format: ['http', 'https'],
default: 'http', default: 'http',
env: 'N8N_PROTOCOL', env: 'N8N_PROTOCOL',
doc: 'HTTP Protocol via which n8n can be reached' doc: 'HTTP Protocol via which n8n can be reached',
}, },
ssl_key: { ssl_key: {
format: String, format: String,
default: '', default: '',
env: 'N8N_SSL_KEY', env: 'N8N_SSL_KEY',
doc: 'SSL Key for HTTPS Protocol' doc: 'SSL Key for HTTPS Protocol',
}, },
ssl_cert: { ssl_cert: {
format: String, format: String,
default: '', default: '',
env: 'N8N_SSL_CERT', env: 'N8N_SSL_CERT',
doc: 'SSL Cert for HTTPS Protocol' doc: 'SSL Cert for HTTPS Protocol',
}, },
security: { security: {
excludeEndpoints: {
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
format: String,
default: '',
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
},
basicAuth: { basicAuth: {
active: { active: {
format: 'Boolean', format: 'Boolean',
default: false, default: false,
env: 'N8N_BASIC_AUTH_ACTIVE', env: 'N8N_BASIC_AUTH_ACTIVE',
doc: 'If basic auth should be activated for editor and REST-API' doc: 'If basic auth should be activated for editor and REST-API',
}, },
user: { user: {
format: String, format: String,
default: '', default: '',
env: 'N8N_BASIC_AUTH_USER', env: 'N8N_BASIC_AUTH_USER',
doc: 'The name of the basic auth user' doc: 'The name of the basic auth user',
}, },
password: { password: {
format: String, format: String,
default: '', default: '',
env: 'N8N_BASIC_AUTH_PASSWORD', env: 'N8N_BASIC_AUTH_PASSWORD',
doc: 'The password of the basic auth user' doc: 'The password of the basic auth user',
},
hash: {
format: 'Boolean',
default: false,
env: 'N8N_BASIC_AUTH_HASH',
doc: 'If password for basic auth is hashed',
}, },
}, },
jwtAuth: { jwtAuth: {
@ -325,71 +398,109 @@ const config = convict({
format: 'Boolean', format: 'Boolean',
default: false, default: false,
env: 'N8N_JWT_AUTH_ACTIVE', env: 'N8N_JWT_AUTH_ACTIVE',
doc: 'If JWT auth should be activated for editor and REST-API' doc: 'If JWT auth should be activated for editor and REST-API',
}, },
jwtHeader: { jwtHeader: {
format: String, format: String,
default: '', default: '',
env: 'N8N_JWT_AUTH_HEADER', env: 'N8N_JWT_AUTH_HEADER',
doc: 'The request header containing a signed JWT' doc: 'The request header containing a signed JWT',
}, },
jwtHeaderValuePrefix: { jwtHeaderValuePrefix: {
format: String, format: String,
default: '', default: '',
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX', env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
doc: 'The request header value prefix to strip (optional)' doc: 'The request header value prefix to strip (optional)',
}, },
jwksUri: { jwksUri: {
format: String, format: String,
default: '', default: '',
env: 'N8N_JWKS_URI', env: 'N8N_JWKS_URI',
doc: 'The URI to fetch JWK Set for JWT authentication' doc: 'The URI to fetch JWK Set for JWT authentication',
}, },
jwtIssuer: { jwtIssuer: {
format: String, format: String,
default: '', default: '',
env: 'N8N_JWT_ISSUER', env: 'N8N_JWT_ISSUER',
doc: 'JWT issuer to expect (optional)' doc: 'JWT issuer to expect (optional)',
}, },
jwtNamespace: { jwtNamespace: {
format: String, format: String,
default: '', default: '',
env: 'N8N_JWT_NAMESPACE', env: 'N8N_JWT_NAMESPACE',
doc: 'JWT namespace to expect (optional)' doc: 'JWT namespace to expect (optional)',
}, },
jwtAllowedTenantKey: { jwtAllowedTenantKey: {
format: String, format: String,
default: '', default: '',
env: 'N8N_JWT_ALLOWED_TENANT_KEY', env: 'N8N_JWT_ALLOWED_TENANT_KEY',
doc: 'JWT tenant key name to inspect within JWT namespace (optional)' doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
}, },
jwtAllowedTenant: { jwtAllowedTenant: {
format: String, format: String,
default: '', default: '',
env: 'N8N_JWT_ALLOWED_TENANT', env: 'N8N_JWT_ALLOWED_TENANT',
doc: 'JWT tenant to allow (optional)' doc: 'JWT tenant to allow (optional)',
}, },
}, },
}, },
endpoints: { endpoints: {
metrics: {
enable: {
format: 'Boolean',
default: false,
env: 'N8N_METRICS',
doc: 'Enable metrics endpoint',
},
prefix: {
format: String,
default: 'n8n_',
env: 'N8N_METRICS_PREFIX',
doc: 'An optional prefix for metric names. Default: n8n_',
},
},
rest: { rest: {
format: String, format: String,
default: 'rest', default: 'rest',
env: 'N8N_ENDPOINT_REST', env: 'N8N_ENDPOINT_REST',
doc: 'Path for rest endpoint' doc: 'Path for rest endpoint',
}, },
webhook: { webhook: {
format: String, format: String,
default: 'webhook', default: 'webhook',
env: 'N8N_ENDPOINT_WEBHOOK', env: 'N8N_ENDPOINT_WEBHOOK',
doc: 'Path for webhook endpoint' doc: 'Path for webhook endpoint',
}, },
webhookTest: { webhookTest: {
format: String, format: String,
default: 'webhook-test', default: 'webhook-test',
env: 'N8N_ENDPOINT_WEBHOOK_TEST', env: 'N8N_ENDPOINT_WEBHOOK_TEST',
doc: 'Path for test-webhook endpoint' doc: 'Path for test-webhook endpoint',
},
disableProductionWebhooksOnMainProcess: {
format: Boolean,
default: false,
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
},
skipWebhoooksDeregistrationOnShutdown: {
/**
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
* and registers on startup / activation. If we skip
* deactivation on shutdown, webhooks will remain active on 3rd party services.
* We don't have to worry about startup as it always
* checks if webhooks already exist.
* If users want to upgrade n8n, it is possible to run
* two instances simultaneously without downtime, similar
* to blue/green deployment.
* WARNING: Trigger nodes (like Cron) will cause duplication
* of work, so be aware when using.
*/
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
format: Boolean,
default: false,
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
}, },
}, },
@ -397,10 +508,34 @@ const config = convict({
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")', doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
format: String, format: String,
default: '', default: '',
env: 'EXTERNAL_HOOK_FILES' env: 'EXTERNAL_HOOK_FILES',
}, },
nodes: { nodes: {
include: {
doc: 'Nodes to load',
format: function check(rawValue) {
if (rawValue === '') {
return;
}
try {
const values = JSON.parse(rawValue);
if (!Array.isArray(values)) {
throw new Error();
}
for (const value of values) {
if (typeof value !== 'string') {
throw new Error();
}
}
} catch (error) {
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
}
},
default: undefined,
env: 'NODES_INCLUDE',
},
exclude: { exclude: {
doc: 'Nodes not to load', doc: 'Nodes not to load',
format: function check(rawValue) { format: function check(rawValue) {
@ -421,13 +556,13 @@ const config = convict({
} }
}, },
default: '[]', default: '[]',
env: 'NODES_EXCLUDE' env: 'NODES_EXCLUDE',
}, },
errorTriggerType: { errorTriggerType: {
doc: 'Node Type to use as Error Trigger', doc: 'Node Type to use as Error Trigger',
format: String, format: String,
default: 'n8n-nodes-base.errorTrigger', default: 'n8n-nodes-base.errorTrigger',
env: 'NODES_ERROR_TRIGGER_TYPE' env: 'NODES_ERROR_TRIGGER_TYPE',
}, },
}, },

View file

@ -1,4 +1,4 @@
import {MongoDb, SQLite, MySQLDb, PostgresDb} from '../src/databases/index'; import { SQLite, MySQLDb, PostgresDb} from '../src/databases/index';
module.exports = [ module.exports = [
{ {
@ -19,25 +19,6 @@ module.exports = [
"subscribersDir": "./src/databases/sqlite/subscribers" "subscribersDir": "./src/databases/sqlite/subscribers"
} }
}, },
{
"name": "mongodb",
"type": "mongodb",
"logging": false,
"entities": Object.values(MongoDb),
"url": "mongodb://root:example@localhost:27017/n8n",
"authSource": 'admin',
"migrations": [
"./src/databases/mongodb/migrations/*.ts"
],
"subscribers": [
"src/subscriber/**/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/mongodb",
"migrationsDir": "./src/databases/mongodb/Migrations",
"subscribersDir": "./src/databases/mongodb/Subscribers"
}
},
{ {
"name": "postgres", "name": "postgres",
"type": "postgres", "type": "postgres",

View file

@ -1,6 +1,6 @@
{ {
"name": "n8n", "name": "n8n",
"version": "0.78.0", "version": "0.113.0",
"description": "n8n Workflow Automation Tool", "description": "n8n Workflow Automation Tool",
"license": "SEE LICENSE IN LICENSE.md", "license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://n8n.io", "homepage": "https://n8n.io",
@ -28,6 +28,7 @@
"start:windows": "cd bin && n8n", "start:windows": "cd bin && n8n",
"test": "jest", "test": "jest",
"tslint": "tslint -p tsconfig.json -c tslint.json", "tslint": "tslint -p tsconfig.json -c tslint.json",
"tslintfix": "tslint --fix -p tsconfig.json -c tslint.json",
"watch": "tsc --watch", "watch": "tsc --watch",
"typeorm": "ts-node ./node_modules/typeorm/cli.js" "typeorm": "ts-node ./node_modules/typeorm/cli.js"
}, },
@ -43,7 +44,7 @@
"workflow" "workflow"
], ],
"engines": { "engines": {
"node": ">=8.0.0" "node": ">=12.0.0"
}, },
"files": [ "files": [
"bin", "bin",
@ -54,39 +55,43 @@
"devDependencies": { "devDependencies": {
"@oclif/dev-cli": "^1.22.2", "@oclif/dev-cli": "^1.22.2",
"@types/basic-auth": "^1.1.2", "@types/basic-auth": "^1.1.2",
"@types/bcryptjs": "^2.4.1",
"@types/bull": "^3.3.10",
"@types/compression": "1.0.1", "@types/compression": "1.0.1",
"@types/connect-history-api-fallback": "^1.3.1", "@types/connect-history-api-fallback": "^1.3.1",
"@types/convict": "^4.2.1", "@types/convict": "^4.2.1",
"@types/dotenv": "^8.2.0", "@types/dotenv": "^8.2.0",
"@types/express": "^4.17.6", "@types/express": "^4.17.6",
"@types/jest": "^25.2.1", "@types/jest": "^26.0.13",
"@types/localtunnel": "^1.9.0", "@types/localtunnel": "^1.9.0",
"@types/lodash.get": "^4.4.6", "@types/lodash.get": "^4.4.6",
"@types/node": "^14.0.27", "@types/node": "14.0.27",
"@types/open": "^6.1.0", "@types/open": "^6.1.0",
"@types/parseurl": "^1.3.1", "@types/parseurl": "^1.3.1",
"@types/request-promise-native": "~1.0.15", "@types/request-promise-native": "~1.0.15",
"concurrently": "^5.1.0", "concurrently": "^5.1.0",
"jest": "^24.9.0", "jest": "^26.4.2",
"nodemon": "^2.0.2", "nodemon": "^2.0.2",
"p-cancelable": "^2.0.0", "p-cancelable": "^2.0.0",
"run-script-os": "^1.0.7", "run-script-os": "^1.0.7",
"ts-jest": "^25.4.0", "ts-jest": "^26.3.0",
"ts-node": "^8.9.1",
"tslint": "^6.1.2", "tslint": "^6.1.2",
"typescript": "~3.7.4", "typescript": "~3.9.7"
"ts-node": "^8.9.1"
}, },
"dependencies": { "dependencies": {
"@oclif/command": "^1.5.18", "@oclif/command": "^1.5.18",
"@oclif/errors": "^1.2.2", "@oclif/errors": "^1.2.2",
"@types/jsonwebtoken": "^8.3.4", "@types/jsonwebtoken": "^8.3.4",
"basic-auth": "^2.0.1", "basic-auth": "^2.0.1",
"bcryptjs": "^2.4.3",
"body-parser": "^1.18.3", "body-parser": "^1.18.3",
"body-parser-xml": "^1.1.0", "body-parser-xml": "^1.1.0",
"bull": "^3.19.0",
"client-oauth2": "^4.2.5", "client-oauth2": "^4.2.5",
"compression": "^1.7.4", "compression": "^1.7.4",
"connect-history-api-fallback": "^1.6.0", "connect-history-api-fallback": "^1.6.0",
"convict": "^5.0.0", "convict": "^6.0.1",
"csrf": "^3.1.0", "csrf": "^3.1.0",
"dotenv": "^8.0.0", "dotenv": "^8.0.0",
"express": "^4.16.4", "express": "^4.16.4",
@ -95,23 +100,23 @@
"google-timezones-json": "^1.0.2", "google-timezones-json": "^1.0.2",
"inquirer": "^7.0.1", "inquirer": "^7.0.1",
"jsonwebtoken": "^8.5.1", "jsonwebtoken": "^8.5.1",
"jwks-rsa": "^1.6.0", "jwks-rsa": "~1.12.1",
"localtunnel": "^2.0.0", "localtunnel": "^2.0.0",
"lodash.get": "^4.4.2", "lodash.get": "^4.4.2",
"mongodb": "^3.5.5", "mysql2": "~2.1.0",
"mysql2": "^2.0.1", "n8n-core": "~0.66.0",
"n8n-core": "~0.43.0", "n8n-editor-ui": "~0.83.0",
"n8n-editor-ui": "~0.54.0", "n8n-nodes-base": "~0.110.0",
"n8n-nodes-base": "~0.73.0", "n8n-workflow": "~0.55.0",
"n8n-workflow": "~0.39.0",
"oauth-1.0a": "^2.2.6", "oauth-1.0a": "^2.2.6",
"open": "^7.0.0", "open": "^7.0.0",
"pg": "^8.3.0", "pg": "^8.3.0",
"prom-client": "^13.1.0",
"request-promise-native": "^1.0.7", "request-promise-native": "^1.0.7",
"sqlite3": "^4.2.0", "sqlite3": "^5.0.1",
"sse-channel": "^3.1.1", "sse-channel": "^3.1.1",
"tslib": "1.11.2", "tslib": "1.11.2",
"typeorm": "^0.2.24" "typeorm": "^0.2.30"
}, },
"jest": { "jest": {
"transform": { "transform": {

View file

@ -7,17 +7,22 @@ import {
} from 'n8n-core'; } from 'n8n-core';
import { import {
IExecutionsCurrentSummary, Db,
IExecutingWorkflowData, IExecutingWorkflowData,
IExecutionDb,
IExecutionFlattedDb,
IExecutionsCurrentSummary,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
ResponseHelper,
WorkflowHelpers,
} from '.'; } from '.';
import { ChildProcess } from 'child_process'; import { ChildProcess } from 'child_process';
import * as PCancelable from 'p-cancelable'; import * as PCancelable from 'p-cancelable';
import { ObjectID } from 'typeorm';
export class ActiveExecutions { export class ActiveExecutions {
private nextId = 1;
private activeExecutions: { private activeExecutions: {
[index: string]: IExecutingWorkflowData; [index: string]: IExecutingWorkflowData;
} = {}; } = {};
@ -31,8 +36,30 @@ export class ActiveExecutions {
* @returns {string} * @returns {string}
* @memberof ActiveExecutions * @memberof ActiveExecutions
*/ */
add(executionData: IWorkflowExecutionDataProcess, process?: ChildProcess): string { async add(executionData: IWorkflowExecutionDataProcess, process?: ChildProcess): Promise<string> {
const executionId = this.nextId++;
const fullExecutionData: IExecutionDb = {
data: executionData.executionData!,
mode: executionData.executionMode,
finished: false,
startedAt: new Date(),
workflowData: executionData.workflowData,
};
if (executionData.retryOf !== undefined) {
fullExecutionData.retryOf = executionData.retryOf.toString();
}
if (executionData.workflowData.id !== undefined && WorkflowHelpers.isWorkflowIdValid(executionData.workflowData.id.toString()) === true) {
fullExecutionData.workflowId = executionData.workflowData.id.toString();
}
const execution = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB
const executionResult = await Db.collections.Execution!.save(execution as IExecutionFlattedDb);
const executionId = typeof executionResult.id === "object" ? executionResult.id.toString() : executionResult.id + "";
this.activeExecutions[executionId] = { this.activeExecutions[executionId] = {
executionData, executionData,
@ -41,7 +68,7 @@ export class ActiveExecutions {
postExecutePromises: [], postExecutePromises: [],
}; };
return executionId.toString(); return executionId;
} }

View file

@ -1,17 +1,17 @@
import { import {
IActivationError,
Db, Db,
NodeTypes, IActivationError,
IResponseCallbackData, IResponseCallbackData,
IWebhookDb,
IWorkflowDb, IWorkflowDb,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
NodeTypes,
ResponseHelper, ResponseHelper,
WebhookHelpers, WebhookHelpers,
WorkflowCredentials, WorkflowCredentials,
WorkflowExecuteAdditionalData,
WorkflowHelpers, WorkflowHelpers,
WorkflowRunner, WorkflowRunner,
WorkflowExecuteAdditionalData,
IWebhookDb,
} from './'; } from './';
import { import {
@ -20,16 +20,18 @@ import {
} from 'n8n-core'; } from 'n8n-core';
import { import {
IDataObject,
IExecuteData, IExecuteData,
IGetExecutePollFunctions, IGetExecutePollFunctions,
IGetExecuteTriggerFunctions, IGetExecuteTriggerFunctions,
INode, INode,
INodeExecutionData, INodeExecutionData,
IRunExecutionData, IRunExecutionData,
NodeHelpers,
IWorkflowExecuteAdditionalData as IWorkflowExecuteAdditionalDataWorkflow, IWorkflowExecuteAdditionalData as IWorkflowExecuteAdditionalDataWorkflow,
NodeHelpers,
WebhookHttpMethod, WebhookHttpMethod,
Workflow, Workflow,
WorkflowActivateMode,
WorkflowExecuteMode, WorkflowExecuteMode,
} from 'n8n-workflow'; } from 'n8n-workflow';
@ -52,6 +54,9 @@ export class ActiveWorkflowRunner {
// so intead of pulling all the active wehhooks just pull the actives that have a trigger // so intead of pulling all the active wehhooks just pull the actives that have a trigger
const workflowsData: IWorkflowDb[] = await Db.collections.Workflow!.find({ active: true }) as IWorkflowDb[]; const workflowsData: IWorkflowDb[] = await Db.collections.Workflow!.find({ active: true }) as IWorkflowDb[];
// Clear up active workflow table
await Db.collections.Webhook?.clear();
this.activeWorkflows = new ActiveWorkflows(); this.activeWorkflows = new ActiveWorkflows();
if (workflowsData.length !== 0) { if (workflowsData.length !== 0) {
@ -59,17 +64,10 @@ export class ActiveWorkflowRunner {
console.log(' Start Active Workflows:'); console.log(' Start Active Workflows:');
console.log(' ================================'); console.log(' ================================');
const nodeTypes = NodeTypes();
for (const workflowData of workflowsData) { for (const workflowData of workflowsData) {
const workflow = new Workflow({ id: workflowData.id.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings});
if (workflow.getTriggerNodes().length !== 0
|| workflow.getPollNodes().length !== 0) {
console.log(` - ${workflowData.name}`); console.log(` - ${workflowData.name}`);
try { try {
await this.add(workflowData.id.toString(), workflowData); await this.add(workflowData.id.toString(), 'init', workflowData);
console.log(` => Started`); console.log(` => Started`);
} catch (error) { } catch (error) {
console.log(` => ERROR: Workflow could not be activated:`); console.log(` => ERROR: Workflow could not be activated:`);
@ -78,6 +76,9 @@ export class ActiveWorkflowRunner {
} }
} }
} }
async initWebhooks() {
this.activeWorkflows = new ActiveWorkflows();
} }
/** /**
@ -87,14 +88,18 @@ export class ActiveWorkflowRunner {
* @memberof ActiveWorkflowRunner * @memberof ActiveWorkflowRunner
*/ */
async removeAll(): Promise<void> { async removeAll(): Promise<void> {
if (this.activeWorkflows === null) { const activeWorkflowId: string[] = [];
return;
if (this.activeWorkflows !== null) {
// TODO: This should be renamed!
activeWorkflowId.push.apply(activeWorkflowId, this.activeWorkflows.allActiveWorkflows());
} }
const activeWorkflows = this.activeWorkflows.allActiveWorkflows(); const activeWorkflows = await this.getActiveWorkflows();
activeWorkflowId.push.apply(activeWorkflowId, activeWorkflows.map(workflow => workflow.id));
const removePromises = []; const removePromises = [];
for (const workflowId of activeWorkflows) { for (const workflowId of activeWorkflowId) {
removePromises.push(this.remove(workflowId)); removePromises.push(this.remove(workflowId));
} }
@ -117,14 +122,59 @@ export class ActiveWorkflowRunner {
throw new ResponseHelper.ResponseError('The "activeWorkflows" instance did not get initialized yet.', 404, 404); throw new ResponseHelper.ResponseError('The "activeWorkflows" instance did not get initialized yet.', 404, 404);
} }
const webhook = await Db.collections.Webhook?.findOne({ webhookPath: path, method: httpMethod }) as IWebhookDb; // Reset request parameters
req.params = {};
// check if something exist // Remove trailing slash
if (path.endsWith('/')) {
path = path.slice(0, -1);
}
let webhook = await Db.collections.Webhook?.findOne({ webhookPath: path, method: httpMethod }) as IWebhookDb;
let webhookId: string | undefined;
// check if path is dynamic
if (webhook === undefined) { if (webhook === undefined) {
// check if a dynamic webhook path exists
const pathElements = path.split('/');
webhookId = pathElements.shift();
const dynamicWebhooks = await Db.collections.Webhook?.find({ webhookId, method: httpMethod, pathLength: pathElements.length });
if (dynamicWebhooks === undefined || dynamicWebhooks.length === 0) {
// The requested webhook is not registered // The requested webhook is not registered
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404); throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
} }
let maxMatches = 0;
const pathElementsSet = new Set(pathElements);
// check if static elements match in path
// if more results have been returned choose the one with the most static-route matches
dynamicWebhooks.forEach(dynamicWebhook => {
const staticElements = dynamicWebhook.webhookPath.split('/').filter(ele => !ele.startsWith(':'));
const allStaticExist = staticElements.every(staticEle => pathElementsSet.has(staticEle));
if (allStaticExist && staticElements.length > maxMatches) {
maxMatches = staticElements.length;
webhook = dynamicWebhook;
}
// handle routes with no static elements
else if (staticElements.length === 0 && !webhook) {
webhook = dynamicWebhook;
}
});
if (webhook === undefined) {
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
}
path = webhook!.webhookPath;
// extracting params from path
webhook!.webhookPath.split('/').forEach((ele, index) => {
if (ele.startsWith(':')) {
// write params to req.params
req.params[ele.slice(1)] = pathElements[index];
}
});
}
const workflowData = await Db.collections.Workflow!.findOne(webhook.workflowId); const workflowData = await Db.collections.Workflow!.findOne(webhook.workflowId);
if (workflowData === undefined) { if (workflowData === undefined) {
throw new ResponseHelper.ResponseError(`Could not find workflow with id "${webhook.workflowId}"`, 404, 404); throw new ResponseHelper.ResponseError(`Could not find workflow with id "${webhook.workflowId}"`, 404, 404);
@ -182,8 +232,9 @@ export class ActiveWorkflowRunner {
* @returns {string[]} * @returns {string[]}
* @memberof ActiveWorkflowRunner * @memberof ActiveWorkflowRunner
*/ */
getActiveWorkflows(): Promise<IWorkflowDb[]> { async getActiveWorkflows(): Promise<IWorkflowDb[]> {
return Db.collections.Workflow?.find({ select: ['id'] }) as Promise<IWorkflowDb[]>; const activeWorkflows = await Db.collections.Workflow?.find({ where: { active: true }, select: ['id'] }) as IWorkflowDb[];
return activeWorkflows.filter(workflow => this.activationErrors[workflow.id.toString()] === undefined);
} }
@ -223,7 +274,7 @@ export class ActiveWorkflowRunner {
* @returns {Promise<void>} * @returns {Promise<void>}
* @memberof ActiveWorkflowRunner * @memberof ActiveWorkflowRunner
*/ */
async addWorkflowWebhooks(workflow: Workflow, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): Promise<void> { async addWorkflowWebhooks(workflow: Workflow, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode, activation: WorkflowActivateMode): Promise<void> {
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData); const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData);
let path = '' as string | undefined; let path = '' as string | undefined;
@ -235,7 +286,7 @@ export class ActiveWorkflowRunner {
path = node.parameters.path as string; path = node.parameters.path as string;
if (node.parameters.path === undefined) { if (node.parameters.path === undefined) {
path = workflow.getSimpleParameterValue(node, webhookData.webhookDescription['path']) as string | undefined; path = workflow.expression.getSimpleParameterValue(node, webhookData.webhookDescription['path'], mode) as string | undefined;
if (path === undefined) { if (path === undefined) {
// TODO: Use a proper logger // TODO: Use a proper logger
@ -244,7 +295,7 @@ export class ActiveWorkflowRunner {
} }
} }
const isFullPath: boolean = workflow.getSimpleParameterValue(node, webhookData.webhookDescription['isFullPath'], false) as boolean; const isFullPath: boolean = workflow.expression.getSimpleParameterValue(node, webhookData.webhookDescription['isFullPath'], mode, false) as boolean;
const webhook = { const webhook = {
workflowId: webhookData.workflowId, workflowId: webhookData.workflowId,
@ -253,29 +304,42 @@ export class ActiveWorkflowRunner {
method: webhookData.httpMethod, method: webhookData.httpMethod,
} as IWebhookDb; } as IWebhookDb;
if (webhook.webhookPath.startsWith('/')) {
webhook.webhookPath = webhook.webhookPath.slice(1);
}
if (webhook.webhookPath.endsWith('/')) {
webhook.webhookPath = webhook.webhookPath.slice(0, -1);
}
if ((path.startsWith(':') || path.includes('/:')) && node.webhookId) {
webhook.webhookId = node.webhookId;
webhook.pathLength = webhook.webhookPath.split('/').length;
}
try { try {
await Db.collections.Webhook?.insert(webhook); await Db.collections.Webhook?.insert(webhook);
const webhookExists = await workflow.runWebhookMethod('checkExists', webhookData, NodeExecuteFunctions, mode, false); const webhookExists = await workflow.runWebhookMethod('checkExists', webhookData, NodeExecuteFunctions, mode, activation, false);
if (webhookExists === false) { if (webhookExists !== true) {
// If webhook does not exist yet create it // If webhook does not exist yet create it
await workflow.runWebhookMethod('create', webhookData, NodeExecuteFunctions, mode, false); await workflow.runWebhookMethod('create', webhookData, NodeExecuteFunctions, mode, activation, false);
} }
} catch (error) { } catch (error) {
try {
await this.removeWorkflowWebhooks(workflow.id as string);
} catch (error) {
console.error(`Could not remove webhooks of workflow "${workflow.id}" because of error: "${error.message}"`);
}
let errorMessage = ''; let errorMessage = '';
await Db.collections.Webhook?.delete({ workflowId: workflow.id });
// if it's a workflow from the the insert // if it's a workflow from the the insert
// TODO check if there is standard error code for deplicate key violation that works // TODO check if there is standard error code for duplicate key violation that works
// with all databases // with all databases
if (error.name === 'MongoError' || error.name === 'QueryFailedError') { if (error.name === 'QueryFailedError') {
errorMessage = `The webhook path [${webhook.webhookPath}] and method [${webhook.method}] already exist.`; errorMessage = `The webhook path [${webhook.webhookPath}] and method [${webhook.method}] already exist.`;
} else if (error.detail) { } else if (error.detail) {
// it's a error runnig the webhook methods (checkExists, create) // it's a error runnig the webhook methods (checkExists, create)
errorMessage = error.detail; errorMessage = error.detail;
@ -315,13 +379,10 @@ export class ActiveWorkflowRunner {
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData); const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData);
for (const webhookData of webhooks) { for (const webhookData of webhooks) {
await workflow.runWebhookMethod('delete', webhookData, NodeExecuteFunctions, mode, false); await workflow.runWebhookMethod('delete', webhookData, NodeExecuteFunctions, mode, 'update', false);
} }
// if it's a mongo objectId convert it to string await WorkflowHelpers.saveStaticData(workflow);
if (typeof workflowData.id === 'object') {
workflowData.id = workflowData.id.toString();
}
const webhook = { const webhook = {
workflowId: workflowData.id, workflowId: workflowData.id,
@ -347,8 +408,8 @@ export class ActiveWorkflowRunner {
node, node,
data: { data: {
main: data, main: data,
} },
} },
]; ];
const executionData: IRunExecutionData = { const executionData: IRunExecutionData = {
@ -386,9 +447,9 @@ export class ActiveWorkflowRunner {
* @returns {IGetExecutePollFunctions} * @returns {IGetExecutePollFunctions}
* @memberof ActiveWorkflowRunner * @memberof ActiveWorkflowRunner
*/ */
getExecutePollFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): IGetExecutePollFunctions { getExecutePollFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode, activation: WorkflowActivateMode): IGetExecutePollFunctions {
return ((workflow: Workflow, node: INode) => { return ((workflow: Workflow, node: INode) => {
const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions(workflow, node, additionalData, mode); const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions(workflow, node, additionalData, mode, activation);
returnFunctions.__emit = (data: INodeExecutionData[][]): void => { returnFunctions.__emit = (data: INodeExecutionData[][]): void => {
this.runWorkflow(workflowData, node, data, additionalData, mode); this.runWorkflow(workflowData, node, data, additionalData, mode);
}; };
@ -407,12 +468,12 @@ export class ActiveWorkflowRunner {
* @returns {IGetExecuteTriggerFunctions} * @returns {IGetExecuteTriggerFunctions}
* @memberof ActiveWorkflowRunner * @memberof ActiveWorkflowRunner
*/ */
getExecuteTriggerFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): IGetExecuteTriggerFunctions{ getExecuteTriggerFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode, activation: WorkflowActivateMode): IGetExecuteTriggerFunctions{
return ((workflow: Workflow, node: INode) => { return ((workflow: Workflow, node: INode) => {
const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(workflow, node, additionalData, mode); const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(workflow, node, additionalData, mode, activation);
returnFunctions.emit = (data: INodeExecutionData[][]): void => { returnFunctions.emit = (data: INodeExecutionData[][]): void => {
WorkflowHelpers.saveStaticData(workflow); WorkflowHelpers.saveStaticData(workflow);
this.runWorkflow(workflowData, node, data, additionalData, mode); this.runWorkflow(workflowData, node, data, additionalData, mode).catch((err) => console.error(err));
}; };
return returnFunctions; return returnFunctions;
}); });
@ -426,7 +487,7 @@ export class ActiveWorkflowRunner {
* @returns {Promise<void>} * @returns {Promise<void>}
* @memberof ActiveWorkflowRunner * @memberof ActiveWorkflowRunner
*/ */
async add(workflowId: string, workflowData?: IWorkflowDb): Promise<void> { async add(workflowId: string, activation: WorkflowActivateMode, workflowData?: IWorkflowDb): Promise<void> {
if (this.activeWorkflows === null) { if (this.activeWorkflows === null) {
throw new Error(`The "activeWorkflows" instance did not get initialized yet.`); throw new Error(`The "activeWorkflows" instance did not get initialized yet.`);
} }
@ -451,15 +512,15 @@ export class ActiveWorkflowRunner {
const mode = 'trigger'; const mode = 'trigger';
const credentials = await WorkflowCredentials(workflowData.nodes); const credentials = await WorkflowCredentials(workflowData.nodes);
const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials); const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials);
const getTriggerFunctions = this.getExecuteTriggerFunctions(workflowData, additionalData, mode); const getTriggerFunctions = this.getExecuteTriggerFunctions(workflowData, additionalData, mode, activation);
const getPollFunctions = this.getExecutePollFunctions(workflowData, additionalData, mode); const getPollFunctions = this.getExecutePollFunctions(workflowData, additionalData, mode, activation);
// Add the workflows which have webhooks defined // Add the workflows which have webhooks defined
await this.addWorkflowWebhooks(workflowInstance, additionalData, mode); await this.addWorkflowWebhooks(workflowInstance, additionalData, mode, activation);
if (workflowInstance.getTriggerNodes().length !== 0 if (workflowInstance.getTriggerNodes().length !== 0
|| workflowInstance.getPollNodes().length !== 0) { || workflowInstance.getPollNodes().length !== 0) {
await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, getTriggerFunctions, getPollFunctions); await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, mode, activation, getTriggerFunctions, getPollFunctions);
} }
if (this.activationErrors[workflowId] !== undefined) { if (this.activationErrors[workflowId] !== undefined) {
@ -496,7 +557,11 @@ export class ActiveWorkflowRunner {
if (this.activeWorkflows !== null) { if (this.activeWorkflows !== null) {
// Remove all the webhooks of the workflow // Remove all the webhooks of the workflow
try {
await this.removeWorkflowWebhooks(workflowId); await this.removeWorkflowWebhooks(workflowId);
} catch (error) {
console.error(`Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`);
}
if (this.activationErrors[workflowId] !== undefined) { if (this.activationErrors[workflowId] !== undefined) {
// If there were any activation errors delete them // If there were any activation errors delete them

View file

@ -4,10 +4,17 @@ import {
import { import {
ICredentialDataDecryptedObject, ICredentialDataDecryptedObject,
ICredentialsExpressionResolveValues,
ICredentialsHelper, ICredentialsHelper,
INode,
INodeParameters, INodeParameters,
INodeProperties, INodeProperties,
INodeType,
INodeTypeData,
INodeTypes,
NodeHelpers, NodeHelpers,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
@ -18,6 +25,19 @@ import {
} from './'; } from './';
const mockNodeTypes: INodeTypes = {
nodeTypes: {},
init: async (nodeTypes?: INodeTypeData): Promise<void> => { },
getAll: (): INodeType[] => {
// Does not get used in Workflow so no need to return it
return [];
},
getByName: (nodeType: string): INodeType | undefined => {
return undefined;
},
};
export class CredentialsHelper extends ICredentialsHelper { export class CredentialsHelper extends ICredentialsHelper {
/** /**
@ -82,7 +102,7 @@ export class CredentialsHelper extends ICredentialsHelper {
* @returns {ICredentialDataDecryptedObject} * @returns {ICredentialDataDecryptedObject}
* @memberof CredentialsHelper * @memberof CredentialsHelper
*/ */
getDecrypted(name: string, type: string, raw?: boolean): ICredentialDataDecryptedObject { getDecrypted(name: string, type: string, mode: WorkflowExecuteMode, raw?: boolean, expressionResolveValues?: ICredentialsExpressionResolveValues): ICredentialDataDecryptedObject {
const credentials = this.getCredentials(name, type); const credentials = this.getCredentials(name, type);
const decryptedDataOriginal = credentials.getData(this.encryptionKey); const decryptedDataOriginal = credentials.getData(this.encryptionKey);
@ -91,7 +111,7 @@ export class CredentialsHelper extends ICredentialsHelper {
return decryptedDataOriginal; return decryptedDataOriginal;
} }
return this.applyDefaultsAndOverwrites(decryptedDataOriginal, type); return this.applyDefaultsAndOverwrites(decryptedDataOriginal, type, mode, expressionResolveValues);
} }
@ -103,11 +123,11 @@ export class CredentialsHelper extends ICredentialsHelper {
* @returns {ICredentialDataDecryptedObject} * @returns {ICredentialDataDecryptedObject}
* @memberof CredentialsHelper * @memberof CredentialsHelper
*/ */
applyDefaultsAndOverwrites(decryptedDataOriginal: ICredentialDataDecryptedObject, type: string): ICredentialDataDecryptedObject { applyDefaultsAndOverwrites(decryptedDataOriginal: ICredentialDataDecryptedObject, type: string, mode: WorkflowExecuteMode, expressionResolveValues?: ICredentialsExpressionResolveValues): ICredentialDataDecryptedObject {
const credentialsProperties = this.getCredentialsProperties(type); const credentialsProperties = this.getCredentialsProperties(type);
// Add the default credential values // Add the default credential values
const decryptedData = NodeHelpers.getNodeParameters(credentialsProperties, decryptedDataOriginal as INodeParameters, true, false) as ICredentialDataDecryptedObject; let decryptedData = NodeHelpers.getNodeParameters(credentialsProperties, decryptedDataOriginal as INodeParameters, true, false) as ICredentialDataDecryptedObject;
if (decryptedDataOriginal.oauthTokenData !== undefined) { if (decryptedDataOriginal.oauthTokenData !== undefined) {
// The OAuth data gets removed as it is not defined specifically as a parameter // The OAuth data gets removed as it is not defined specifically as a parameter
@ -115,6 +135,29 @@ export class CredentialsHelper extends ICredentialsHelper {
decryptedData.oauthTokenData = decryptedDataOriginal.oauthTokenData; decryptedData.oauthTokenData = decryptedDataOriginal.oauthTokenData;
} }
if (expressionResolveValues) {
try {
const workflow = new Workflow({ nodes: Object.values(expressionResolveValues.workflow.nodes), connections: expressionResolveValues.workflow.connectionsBySourceNode, active: false, nodeTypes: expressionResolveValues.workflow.nodeTypes });
decryptedData = workflow.expression.getParameterValue(decryptedData as INodeParameters, expressionResolveValues.runExecutionData, expressionResolveValues.runIndex, expressionResolveValues.itemIndex, expressionResolveValues.node.name, expressionResolveValues.connectionInputData, mode, false, decryptedData) as ICredentialDataDecryptedObject;
} catch (e) {
e.message += ' [Error resolving credentials]';
throw e;
}
} else {
const node = {
name: '',
typeVersion: 1,
type: 'mock',
position: [0, 0],
parameters: {} as INodeParameters,
} as INode;
const workflow = new Workflow({ nodes: [node!], connections: {}, active: false, nodeTypes: mockNodeTypes });
// Resolve expressions if any are set
decryptedData = workflow.expression.getComplexParameterValue(node!, decryptedData as INodeParameters, mode, undefined, decryptedData) as ICredentialDataDecryptedObject;
}
// Load and apply the credentials overwrites if any exist // Load and apply the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites(); const credentialsOverwrites = CredentialsOverwrites();
return credentialsOverwrites.applyOverwrite(type, decryptedData); return credentialsOverwrites.applyOverwrite(type, decryptedData);

View file

@ -3,32 +3,53 @@ import {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
ICredentialsOverwrite, CredentialTypes,
GenericHelpers, GenericHelpers,
ICredentialsOverwrite,
} from './'; } from './';
class CredentialsOverwritesClass { class CredentialsOverwritesClass {
private credentialTypes = CredentialTypes();
private overwriteData: ICredentialsOverwrite = {}; private overwriteData: ICredentialsOverwrite = {};
private resolvedTypes: string[] = [];
async init(overwriteData?: ICredentialsOverwrite) { async init(overwriteData?: ICredentialsOverwrite) {
if (overwriteData !== undefined) { if (overwriteData !== undefined) {
// If data is already given it can directly be set instead of // If data is already given it can directly be set instead of
// loaded from environment // loaded from environment
this.overwriteData = overwriteData; this.__setData(JSON.parse(JSON.stringify(overwriteData)));
return; return;
} }
const data = await GenericHelpers.getConfigValue('credentials.overwrite.data') as string; const data = await GenericHelpers.getConfigValue('credentials.overwrite.data') as string;
try { try {
this.overwriteData = JSON.parse(data); const overwriteData = JSON.parse(data);
this.__setData(overwriteData);
} catch (error) { } catch (error) {
throw new Error(`The credentials-overwrite is not valid JSON.`); throw new Error(`The credentials-overwrite is not valid JSON.`);
} }
} }
__setData(overwriteData: ICredentialsOverwrite) {
this.overwriteData = overwriteData;
for (const credentialTypeData of this.credentialTypes.getAll()) {
const type = credentialTypeData.name;
const overwrites = this.__getExtended(type);
if (overwrites && Object.keys(overwrites).length) {
this.overwriteData[type] = overwrites;
}
}
}
applyOverwrite(type: string, data: ICredentialDataDecryptedObject) { applyOverwrite(type: string, data: ICredentialDataDecryptedObject) {
const overwrites = this.get(type); const overwrites = this.get(type);
@ -38,15 +59,55 @@ class CredentialsOverwritesClass {
} }
const returnData = JSON.parse(JSON.stringify(data)); const returnData = JSON.parse(JSON.stringify(data));
Object.assign(returnData, overwrites); // Overwrite only if there is currently no data set
for (const key of Object.keys(overwrites)) {
if ([null, undefined, ''].includes(returnData[key])) {
returnData[key] = overwrites[key];
}
}
return returnData; return returnData;
} }
__getExtended(type: string): ICredentialDataDecryptedObject | undefined {
if (this.resolvedTypes.includes(type)) {
// Type got already resolved and can so returned directly
return this.overwriteData[type];
}
const credentialTypeData = this.credentialTypes.getByName(type);
if (credentialTypeData === undefined) {
throw new Error(`The credentials of type "${type}" are not known.`);
}
if (credentialTypeData.extends === undefined) {
this.resolvedTypes.push(type);
return this.overwriteData[type];
}
const overwrites: ICredentialDataDecryptedObject = {};
for (const credentialsTypeName of credentialTypeData.extends) {
Object.assign(overwrites, this.__getExtended(credentialsTypeName));
}
if (this.overwriteData[type] !== undefined) {
Object.assign(overwrites, this.overwriteData[type]);
}
this.resolvedTypes.push(type);
return overwrites;
}
get(type: string): ICredentialDataDecryptedObject | undefined { get(type: string): ICredentialDataDecryptedObject | undefined {
return this.overwriteData[type]; return this.overwriteData[type];
} }
getAll(): ICredentialsOverwrite { getAll(): ICredentialsOverwrite {
return this.overwriteData; return this.overwriteData;
} }

View file

@ -19,7 +19,6 @@ import { TlsOptions } from 'tls';
import * as config from '../config'; import * as config from '../config';
import { import {
MongoDb,
MySQLDb, MySQLDb,
PostgresDb, PostgresDb,
SQLite, SQLite,
@ -32,29 +31,9 @@ export let collections: IDatabaseCollections = {
Webhook: null, Webhook: null,
}; };
import { import { postgresMigrations } from './databases/postgresdb/migrations';
InitialMigration1587669153312, import { mysqlMigrations } from './databases/mysqldb/migrations';
WebhookModel1589476000887, import { sqliteMigrations } from './databases/sqlite/migrations';
CreateIndexStoppedAt1594828256133,
} from './databases/postgresdb/migrations';
import {
InitialMigration1587563438936,
WebhookModel1592679094242,
CreateIndexStoppedAt1594910478695,
} from './databases/mongodb/migrations';
import {
InitialMigration1588157391238,
WebhookModel1592447867632,
CreateIndexStoppedAt1594902918301,
} from './databases/mysqldb/migrations';
import {
InitialMigration1588102412422,
WebhookModel1592445003908,
CreateIndexStoppedAt1594825041918,
} from './databases/sqlite/migrations';
import * as path from 'path'; import * as path from 'path';
@ -68,23 +47,6 @@ export async function init(): Promise<IDatabaseCollections> {
const entityPrefix = config.get('database.tablePrefix'); const entityPrefix = config.get('database.tablePrefix');
switch (dbType) { switch (dbType) {
case 'mongodb':
entities = MongoDb;
connectionOptions = {
type: 'mongodb',
entityPrefix,
url: await GenericHelpers.getConfigValue('database.mongodb.connectionUrl') as string,
useNewUrlParser: true,
migrations: [
InitialMigration1587563438936,
WebhookModel1592679094242,
CreateIndexStoppedAt1594910478695,
],
migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`,
};
break;
case 'postgresdb': case 'postgresdb':
entities = PostgresDb; entities = PostgresDb;
@ -112,11 +74,7 @@ export async function init(): Promise<IDatabaseCollections> {
port: await GenericHelpers.getConfigValue('database.postgresdb.port') as number, port: await GenericHelpers.getConfigValue('database.postgresdb.port') as number,
username: await GenericHelpers.getConfigValue('database.postgresdb.user') as string, username: await GenericHelpers.getConfigValue('database.postgresdb.user') as string,
schema: config.get('database.postgresdb.schema'), schema: config.get('database.postgresdb.schema'),
migrations: [ migrations: postgresMigrations,
InitialMigration1587669153312,
WebhookModel1589476000887,
CreateIndexStoppedAt1594828256133,
],
migrationsRun: true, migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`, migrationsTableName: `${entityPrefix}migrations`,
ssl, ssl,
@ -135,11 +93,7 @@ export async function init(): Promise<IDatabaseCollections> {
password: await GenericHelpers.getConfigValue('database.mysqldb.password') as string, password: await GenericHelpers.getConfigValue('database.mysqldb.password') as string,
port: await GenericHelpers.getConfigValue('database.mysqldb.port') as number, port: await GenericHelpers.getConfigValue('database.mysqldb.port') as number,
username: await GenericHelpers.getConfigValue('database.mysqldb.user') as string, username: await GenericHelpers.getConfigValue('database.mysqldb.user') as string,
migrations: [ migrations: mysqlMigrations,
InitialMigration1588157391238,
WebhookModel1592447867632,
CreateIndexStoppedAt1594902918301,
],
migrationsRun: true, migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`, migrationsTableName: `${entityPrefix}migrations`,
}; };
@ -151,12 +105,8 @@ export async function init(): Promise<IDatabaseCollections> {
type: 'sqlite', type: 'sqlite',
database: path.join(n8nFolder, 'database.sqlite'), database: path.join(n8nFolder, 'database.sqlite'),
entityPrefix, entityPrefix,
migrations: [ migrations: sqliteMigrations,
InitialMigration1588102412422, migrationsRun: false, // migrations for sqlite will be ran manually for now; see below
WebhookModel1592445003908,
CreateIndexStoppedAt1594825041918
],
migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`, migrationsTableName: `${entityPrefix}migrations`,
}; };
break; break;
@ -171,12 +121,31 @@ export async function init(): Promise<IDatabaseCollections> {
logging: false, logging: false,
}); });
const connection = await createConnection(connectionOptions); let connection = await createConnection(connectionOptions);
if (dbType === 'sqlite') {
// This specific migration changes database metadata.
// A field is now nullable. We need to reconnect so that
// n8n knows it has changed. Happens only on sqlite.
let migrations = [];
try {
migrations = await connection.query(`SELECT id FROM ${entityPrefix}migrations where name = "MakeStoppedAtNullable1607431743769"`);
} catch(error) {
// Migration table does not exist yet - it will be created after migrations run for the first time.
}
// If you remove this call, remember to turn back on the
// setting to run migrations automatically above.
await connection.runMigrations({ await connection.runMigrations({
transaction: 'none', transaction: 'none',
}); });
if (migrations.length === 0) {
await connection.close();
connection = await createConnection(connectionOptions);
}
}
collections.Credentials = getRepository(entities.CredentialsEntity); collections.Credentials = getRepository(entities.CredentialsEntity);
collections.Execution = getRepository(entities.ExecutionEntity); collections.Execution = getRepository(entities.ExecutionEntity);
collections.Workflow = getRepository(entities.WorkflowEntity); collections.Workflow = getRepository(entities.WorkflowEntity);

View file

@ -1,7 +1,8 @@
import { import {
Db, Db,
IExternalHooksFunctions,
IExternalHooksClass, IExternalHooksClass,
IExternalHooksFileData,
IExternalHooksFunctions,
} from './'; } from './';
import * as config from '../config'; import * as config from '../config';
@ -20,6 +21,24 @@ class ExternalHooksClass implements IExternalHooksClass {
return; return;
} }
await this.loadHooksFiles();
this.initDidRun = true;
}
async reload(externalHooks?: IExternalHooksFileData) {
this.externalHooks = {};
if (externalHooks === undefined) {
await this.loadHooksFiles(true);
} else {
this.loadHooks(externalHooks);
}
}
async loadHooksFiles(reload = false) {
const externalHookFiles = config.get('externalHookFiles').split(':'); const externalHookFiles = config.get('externalHookFiles').split(':');
// Load all the provided hook-files // Load all the provided hook-files
@ -27,10 +46,24 @@ class ExternalHooksClass implements IExternalHooksClass {
hookFilePath = hookFilePath.trim(); hookFilePath = hookFilePath.trim();
if (hookFilePath !== '') { if (hookFilePath !== '') {
try { try {
const hookFile = require(hookFilePath);
for (const resource of Object.keys(hookFile)) { if (reload === true) {
for (const operation of Object.keys(hookFile[resource])) { delete require.cache[require.resolve(hookFilePath)];
}
const hookFile = require(hookFilePath) as IExternalHooksFileData;
this.loadHooks(hookFile);
} catch (error) {
throw new Error(`Problem loading external hook file "${hookFilePath}": ${error.message}`);
}
}
}
}
loadHooks(hookFileData: IExternalHooksFileData) {
for (const resource of Object.keys(hookFileData)) {
for (const operation of Object.keys(hookFileData[resource])) {
// Save all the hook functions directly under their string // Save all the hook functions directly under their string
// format in an array // format in an array
const hookString = `${resource}.${operation}`; const hookString = `${resource}.${operation}`;
@ -38,17 +71,11 @@ class ExternalHooksClass implements IExternalHooksClass {
this.externalHooks[hookString] = []; this.externalHooks[hookString] = [];
} }
this.externalHooks[hookString].push.apply(this.externalHooks[hookString], hookFile[resource][operation]); this.externalHooks[hookString].push.apply(this.externalHooks[hookString], hookFileData[resource][operation]);
}
}
} catch (error) {
throw new Error(`Problem loading external hook file "${hookFilePath}": ${error.message}`);
} }
} }
} }
this.initDidRun = true;
}
async run(hookName: string, hookParameters?: any[]): Promise<void> { // tslint:disable-line:no-any async run(hookName: string, hookParameters?: any[]): Promise<void> { // tslint:disable-line:no-any
const externalHookFunctions: IExternalHooksFunctions = { const externalHookFunctions: IExternalHooksFunctions = {
@ -64,6 +91,11 @@ class ExternalHooksClass implements IExternalHooksClass {
} }
} }
exists(hookName: string): boolean {
return !!this.externalHooks[hookName];
}
} }

View file

@ -95,14 +95,15 @@ export async function getConfigValue(configKey: string): Promise<string | boolea
// Get the environment variable // Get the environment variable
const configSchema = config.getSchema(); const configSchema = config.getSchema();
let currentSchema = configSchema.properties as IDataObject; // @ts-ignore
let currentSchema = configSchema._cvtProperties as IDataObject;
for (const key of configKeyParts) { for (const key of configKeyParts) {
if (currentSchema[key] === undefined) { if (currentSchema[key] === undefined) {
throw new Error(`Key "${key}" of ConfigKey "${configKey}" does not exist`); throw new Error(`Key "${key}" of ConfigKey "${configKey}" does not exist`);
} else if ((currentSchema[key]! as IDataObject).properties === undefined) { } else if ((currentSchema[key]! as IDataObject)._cvtProperties === undefined) {
currentSchema = currentSchema[key] as IDataObject; currentSchema = currentSchema[key] as IDataObject;
} else { } else {
currentSchema = (currentSchema[key] as IDataObject).properties as IDataObject; currentSchema = (currentSchema[key] as IDataObject)._cvtProperties as IDataObject;
} }
} }

View file

@ -33,6 +33,15 @@ export interface IActivationError {
}; };
} }
export interface IBullJobData {
executionId: string;
loadStaticData: boolean;
}
export interface IBullJobResponse {
success: boolean;
}
export interface ICustomRequest extends Request { export interface ICustomRequest extends Request {
parsedUrl: Url | undefined; parsedUrl: Url | undefined;
} }
@ -57,6 +66,8 @@ export interface IWebhookDb {
webhookPath: string; webhookPath: string;
method: string; method: string;
node: string; node: string;
webhookId?: string;
pathLength?: number;
} }
export interface IWorkflowBase extends IWorkflowBaseWorkflow { export interface IWorkflowBase extends IWorkflowBaseWorkflow {
@ -103,14 +114,14 @@ export interface ICredentialsDecryptedResponse extends ICredentialsDecryptedDb {
id: string; id: string;
} }
export type DatabaseType = 'mariadb' | 'mongodb' | 'postgresdb' | 'mysqldb' | 'sqlite'; export type DatabaseType = 'mariadb' | 'postgresdb' | 'mysqldb' | 'sqlite';
export type SaveExecutionDataType = 'all' | 'none'; export type SaveExecutionDataType = 'all' | 'none';
export interface IExecutionBase { export interface IExecutionBase {
id?: number | string | ObjectID; id?: number | string | ObjectID;
mode: WorkflowExecuteMode; mode: WorkflowExecuteMode;
startedAt: Date; startedAt: Date;
stoppedAt: Date; stoppedAt?: Date; // empty value means execution is still running
workflowId?: string; // To be able to filter executions easily // workflowId?: string; // To be able to filter executions easily //
finished: boolean; finished: boolean;
retryOf?: number | string | ObjectID; // If it is a retry, the id of the execution it is a retry of. retryOf?: number | string | ObjectID; // If it is a retry, the id of the execution it is a retry of.
@ -164,12 +175,11 @@ export interface IExecutionsStopData {
finished?: boolean; finished?: boolean;
mode: WorkflowExecuteMode; mode: WorkflowExecuteMode;
startedAt: Date; startedAt: Date;
stoppedAt: Date; stoppedAt?: Date;
} }
export interface IExecutionsSummary { export interface IExecutionsSummary {
id?: string; // executionIdDb id: string;
idActive?: string; // executionIdActive
finished?: boolean; finished?: boolean;
mode: WorkflowExecuteMode; mode: WorkflowExecuteMode;
retryOf?: string; retryOf?: string;
@ -219,6 +229,12 @@ export interface IExternalHooks {
}; };
} }
export interface IExternalHooksFileData {
[key: string]: {
[key: string]: Array<(...args: any[]) => Promise<void>>; //tslint:disable-line:no-any
};
}
export interface IExternalHooksFunctions { export interface IExternalHooksFunctions {
dbCollections: IDatabaseCollections; dbCollections: IDatabaseCollections;
} }
@ -241,9 +257,6 @@ export interface IN8nConfig {
export interface IN8nConfigDatabase { export interface IN8nConfigDatabase {
type: DatabaseType; type: DatabaseType;
mongodb: {
connectionUrl: string;
};
postgresdb: { postgresdb: {
host: string; host: string;
password: string; password: string;
@ -288,9 +301,16 @@ export interface IN8nUISettings {
saveManualExecutions: boolean; saveManualExecutions: boolean;
executionTimeout: number; executionTimeout: number;
maxExecutionTimeout: number; maxExecutionTimeout: number;
oauthCallbackUrls: {
oauth1: string;
oauth2: string;
};
timezone: string; timezone: string;
urlBaseWebhook: string; urlBaseWebhook: string;
versionCli: string; versionCli: string;
n8nMetadata?: {
[key: string]: string | number | undefined;
};
} }
export interface IPackageVersions { export interface IPackageVersions {
@ -306,8 +326,7 @@ export type IPushDataType = 'executionFinished' | 'executionStarted' | 'nodeExec
export interface IPushDataExecutionFinished { export interface IPushDataExecutionFinished {
data: IRun; data: IRun;
executionIdActive: string; executionId: string;
executionIdDb?: string;
retryOf?: string; retryOf?: string;
} }

View file

@ -33,6 +33,7 @@ class LoadNodesAndCredentialsClass {
} = {}; } = {};
excludeNodes: string[] | undefined = undefined; excludeNodes: string[] | undefined = undefined;
includeNodes: string[] | undefined = undefined;
nodeModulesPath = ''; nodeModulesPath = '';
@ -63,6 +64,7 @@ class LoadNodesAndCredentialsClass {
} }
this.excludeNodes = config.get('nodes.exclude'); this.excludeNodes = config.get('nodes.exclude');
this.includeNodes = config.get('nodes.include');
// Get all the installed packages which contain n8n nodes // Get all the installed packages which contain n8n nodes
const packages = await this.getN8nNodePackages(); const packages = await this.getN8nNodePackages();
@ -175,6 +177,10 @@ class LoadNodesAndCredentialsClass {
tempNode.description.icon = 'file:' + path.join(path.dirname(filePath), tempNode.description.icon.substr(5)); tempNode.description.icon = 'file:' + path.join(path.dirname(filePath), tempNode.description.icon.substr(5));
} }
if (this.includeNodes !== undefined && !this.includeNodes.includes(fullNodeName)) {
return;
}
// Check if the node should be skiped // Check if the node should be skiped
if (this.excludeNodes !== undefined && this.excludeNodes.includes(fullNodeName)) { if (this.excludeNodes !== undefined && this.excludeNodes.includes(fullNodeName)) {
return; return;

View file

@ -1,7 +1,7 @@
import { import {
INodeType, INodeType,
INodeTypes,
INodeTypeData, INodeTypeData,
INodeTypes,
NodeHelpers, NodeHelpers,
} from 'n8n-workflow'; } from 'n8n-workflow';

67
packages/cli/src/Queue.ts Normal file
View file

@ -0,0 +1,67 @@
import * as Bull from 'bull';
import * as config from '../config';
import { IBullJobData } from './Interfaces';
export class Queue {
private jobQueue: Bull.Queue;
constructor() {
const prefix = config.get('queue.bull.prefix') as string;
const redisOptions = config.get('queue.bull.redis') as object;
// Disabling ready check is necessary as it allows worker to
// quickly reconnect to Redis if Redis crashes or is unreachable
// for some time. With it enabled, worker might take minutes to realize
// redis is back up and resume working.
// More here: https://github.com/OptimalBits/bull/issues/890
// @ts-ignore
this.jobQueue = new Bull('jobs', { prefix, redis: redisOptions, enableReadyCheck: false });
}
async add(jobData: IBullJobData, jobOptions: object): Promise<Bull.Job> {
return await this.jobQueue.add(jobData,jobOptions);
}
async getJob(jobId: Bull.JobId): Promise<Bull.Job | null> {
return await this.jobQueue.getJob(jobId);
}
async getJobs(jobTypes: Bull.JobStatus[]): Promise<Bull.Job[]> {
return await this.jobQueue.getJobs(jobTypes);
}
getBullObjectInstance(): Bull.Queue {
return this.jobQueue;
}
/**
*
* @param job A Bull.Job instance
* @returns boolean true if we were able to securely stop the job
*/
async stopJob(job: Bull.Job): Promise<boolean> {
if (await job.isActive()) {
// Job is already running so tell it to stop
await job.progress(-1);
return true;
} else {
// Job did not get started yet so remove from queue
try {
await job.remove();
return true;
} catch (e) {
await job.progress(-1);
}
}
return false;
}
}
let activeQueueInstance: Queue | undefined;
export function getInstance(): Queue {
if (activeQueueInstance === undefined) {
activeQueueInstance = new Queue();
}
return activeQueueInstance;
}

View file

@ -64,10 +64,14 @@ export function sendSuccessResponse(res: Response, data: any, raw?: boolean, res
} }
if (raw === true) { if (raw === true) {
if (typeof data === 'string') {
res.send(data);
} else {
res.json(data); res.json(data);
}
} else { } else {
res.json({ res.json({
data data,
}); });
} }
} }
@ -183,7 +187,8 @@ export function unflattenExecutionData(fullExecutionData: IExecutionFlattedDb):
mode: fullExecutionData.mode, mode: fullExecutionData.mode,
startedAt: fullExecutionData.startedAt, startedAt: fullExecutionData.startedAt,
stoppedAt: fullExecutionData.stoppedAt, stoppedAt: fullExecutionData.stoppedAt,
finished: fullExecutionData.finished ? fullExecutionData.finished : false finished: fullExecutionData.finished ? fullExecutionData.finished : false,
workflowId: fullExecutionData.workflowId,
}); });
return returnData; return returnData;

View file

@ -8,7 +8,9 @@ import {
resolve as pathResolve, resolve as pathResolve,
} from 'path'; } from 'path';
import { import {
getConnection,
getConnectionManager, getConnectionManager,
In,
} from 'typeorm'; } from 'typeorm';
import * as bodyParser from 'body-parser'; import * as bodyParser from 'body-parser';
require('body-parser-xml')(bodyParser); require('body-parser-xml')(bodyParser);
@ -20,25 +22,31 @@ import { RequestOptions } from 'oauth-1.0a';
import * as csrf from 'csrf'; import * as csrf from 'csrf';
import * as requestPromise from 'request-promise-native'; import * as requestPromise from 'request-promise-native';
import { createHmac } from 'crypto'; import { createHmac } from 'crypto';
import { compare } from 'bcryptjs';
import * as promClient from 'prom-client';
import { import {
ActiveExecutions, ActiveExecutions,
ActiveWorkflowRunner, ActiveWorkflowRunner,
CredentialsHelper, CredentialsHelper,
CredentialsOverwrites,
CredentialTypes, CredentialTypes,
Db, Db,
ExternalHooks, ExternalHooks,
GenericHelpers,
IActivationError, IActivationError,
ICustomRequest,
ICredentialsDb, ICredentialsDb,
ICredentialsDecryptedDb, ICredentialsDecryptedDb,
ICredentialsDecryptedResponse, ICredentialsDecryptedResponse,
ICredentialsOverwrite,
ICredentialsResponse, ICredentialsResponse,
ICustomRequest,
IExecutionDeleteFilter, IExecutionDeleteFilter,
IExecutionFlatted, IExecutionFlatted,
IExecutionFlattedDb, IExecutionFlattedDb,
IExecutionFlattedResponse, IExecutionFlattedResponse,
IExecutionPushResponse, IExecutionPushResponse,
IExecutionResponse,
IExecutionsListResponse, IExecutionsListResponse,
IExecutionsStopData, IExecutionsStopData,
IExecutionsSummary, IExecutionsSummary,
@ -46,21 +54,19 @@ import {
IN8nUISettings, IN8nUISettings,
IPackageVersions, IPackageVersions,
IWorkflowBase, IWorkflowBase,
IWorkflowShortResponse,
IWorkflowResponse,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
IWorkflowResponse,
IWorkflowShortResponse,
LoadNodesAndCredentials,
NodeTypes, NodeTypes,
Push, Push,
ResponseHelper, ResponseHelper,
TestWebhooks, TestWebhooks,
WorkflowCredentials,
WebhookHelpers, WebhookHelpers,
WebhookServer,
WorkflowCredentials,
WorkflowExecuteAdditionalData, WorkflowExecuteAdditionalData,
WorkflowRunner, WorkflowRunner,
GenericHelpers,
CredentialsOverwrites,
ICredentialsOverwrite,
LoadNodesAndCredentials,
} from './'; } from './';
import { import {
@ -74,18 +80,18 @@ import {
ICredentialType, ICredentialType,
IDataObject, IDataObject,
INodeCredentials, INodeCredentials,
INodeTypeDescription,
INodeParameters, INodeParameters,
INodePropertyOptions, INodePropertyOptions,
INodeTypeDescription,
IRunData, IRunData,
IWorkflowCredentials, IWorkflowCredentials,
Workflow, Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
FindManyOptions, FindManyOptions,
FindOneOptions, FindOneOptions,
LessThan,
LessThanOrEqual, LessThanOrEqual,
Not, Not,
} from 'typeorm'; } from 'typeorm';
@ -99,7 +105,9 @@ import * as jwks from 'jwks-rsa';
import * as timezones from 'google-timezones-json'; import * as timezones from 'google-timezones-json';
import * as parseUrl from 'parseurl'; import * as parseUrl from 'parseurl';
import * as querystring from 'querystring'; import * as querystring from 'querystring';
import * as Queue from '../src/Queue';
import { OptionsWithUrl } from 'request-promise-native'; import { OptionsWithUrl } from 'request-promise-native';
import { Registry } from 'prom-client';
class App { class App {
@ -120,7 +128,7 @@ class App {
push: Push.Push; push: Push.Push;
versions: IPackageVersions | undefined; versions: IPackageVersions | undefined;
restEndpoint: string; restEndpoint: string;
frontendSettings: IN8nUISettings;
protocol: string; protocol: string;
sslKey: string; sslKey: string;
sslCert: string; sslCert: string;
@ -154,6 +162,25 @@ class App {
this.presetCredentialsLoaded = false; this.presetCredentialsLoaded = false;
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string; this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl();
this.frontendSettings = {
endpointWebhook: this.endpointWebhook,
endpointWebhookTest: this.endpointWebhookTest,
saveDataErrorExecution: this.saveDataErrorExecution,
saveDataSuccessExecution: this.saveDataSuccessExecution,
saveManualExecutions: this.saveManualExecutions,
executionTimeout: this.executionTimeout,
maxExecutionTimeout: this.maxExecutionTimeout,
timezone: this.timezone,
urlBaseWebhook,
versionCli: '',
oauthCallbackUrls: {
'oauth1': urlBaseWebhook + `${this.restEndpoint}/oauth1-credential/callback`,
'oauth2': urlBaseWebhook + `${this.restEndpoint}/oauth2-credential/callback`,
},
};
} }
@ -170,8 +197,27 @@ class App {
async config(): Promise<void> { async config(): Promise<void> {
const enableMetrics = config.get('endpoints.metrics.enable') as boolean;
let register: Registry;
if (enableMetrics === true) {
const prefix = config.get('endpoints.metrics.prefix') as string;
register = new promClient.Registry();
register.setDefaultLabels({ prefix });
promClient.collectDefaultMetrics({ register });
}
this.versions = await GenericHelpers.getVersions(); this.versions = await GenericHelpers.getVersions();
const authIgnoreRegex = new RegExp(`^\/(healthz|${this.endpointWebhook}|${this.endpointWebhookTest})\/?.*$`); this.frontendSettings.versionCli = this.versions.cli;
await this.externalHooks.run('frontend.settings', [this.frontendSettings]);
const excludeEndpoints = config.get('security.excludeEndpoints') as string;
const ignoredEndpoints = ['healthz', 'metrics', this.endpointWebhook, this.endpointWebhookTest, this.endpointPresetCredentials];
ignoredEndpoints.push.apply(ignoredEndpoints, excludeEndpoints.split(':'));
const authIgnoreRegex = new RegExp(`^\/(${_(ignoredEndpoints).compact().join('|')})\/?.*$`);
// Check for basic auth credentials if activated // Check for basic auth credentials if activated
const basicAuthActive = config.get('security.basicAuth.active') as boolean; const basicAuthActive = config.get('security.basicAuth.active') as boolean;
@ -186,7 +232,11 @@ class App {
throw new Error('Basic auth is activated but no password got defined. Please set one!'); throw new Error('Basic auth is activated but no password got defined. Please set one!');
} }
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => { const basicAuthHashEnabled = await GenericHelpers.getConfigValue('security.basicAuth.hash') as boolean;
let validPassword: null | string = null;
this.app.use(async (req: express.Request, res: express.Response, next: express.NextFunction) => {
if (req.url.match(authIgnoreRegex)) { if (req.url.match(authIgnoreRegex)) {
return next(); return next();
} }
@ -198,12 +248,27 @@ class App {
return ResponseHelper.basicAuthAuthorizationError(res, realm, 'Authorization is required!'); return ResponseHelper.basicAuthAuthorizationError(res, realm, 'Authorization is required!');
} }
if (basicAuthData.name !== basicAuthUser || basicAuthData.pass !== basicAuthPassword) { if (basicAuthData.name === basicAuthUser) {
// Provided authentication data is wrong if (basicAuthHashEnabled === true) {
return ResponseHelper.basicAuthAuthorizationError(res, realm, 'Authorization data is wrong!'); if (validPassword === null && await compare(basicAuthData.pass, basicAuthPassword)) {
// Password is valid so save for future requests
validPassword = basicAuthData.pass;
} }
next(); if (validPassword === basicAuthData.pass && validPassword !== null) {
// Provided hash is correct
return next();
}
} else {
if (basicAuthData.pass === basicAuthPassword) {
// Provided password is correct
return next();
}
}
}
// Provided authentication data is wrong
return ResponseHelper.basicAuthAuthorizationError(res, realm, 'Authorization data is wrong!');
}); });
} }
@ -224,12 +289,12 @@ class App {
const jwtAllowedTenantKey = await GenericHelpers.getConfigValue('security.jwtAuth.jwtAllowedTenantKey') as string; const jwtAllowedTenantKey = await GenericHelpers.getConfigValue('security.jwtAuth.jwtAllowedTenantKey') as string;
const jwtAllowedTenant = await GenericHelpers.getConfigValue('security.jwtAuth.jwtAllowedTenant') as string; const jwtAllowedTenant = await GenericHelpers.getConfigValue('security.jwtAuth.jwtAllowedTenant') as string;
function isTenantAllowed(decodedToken: object): Boolean { function isTenantAllowed(decodedToken: object): boolean {
if (jwtNamespace === '' || jwtAllowedTenantKey === '' || jwtAllowedTenant === '') return true; if (jwtNamespace === '' || jwtAllowedTenantKey === '' || jwtAllowedTenant === '') return true;
else { else {
for (let [k, v] of Object.entries(decodedToken)) { for (const [k, v] of Object.entries(decodedToken)) {
if (k === jwtNamespace) { if (k === jwtNamespace) {
for (let [kn, kv] of Object.entries(v)) { for (const [kn, kv] of Object.entries(v)) {
if (kn === jwtAllowedTenantKey && kv === jwtAllowedTenant) { if (kn === jwtAllowedTenantKey && kv === jwtAllowedTenant) {
return true; return true;
} }
@ -245,11 +310,11 @@ class App {
return next(); return next();
} }
var token = req.header(jwtAuthHeader) as string; let token = req.header(jwtAuthHeader) as string;
if (token === undefined || token === '') { if (token === undefined || token === '') {
return ResponseHelper.jwtAuthAuthorizationError(res, "Missing token"); return ResponseHelper.jwtAuthAuthorizationError(res, "Missing token");
} }
if (jwtHeaderValuePrefix != '' && token.startsWith(jwtHeaderValuePrefix)) { if (jwtHeaderValuePrefix !== '' && token.startsWith(jwtHeaderValuePrefix)) {
token = token.replace(jwtHeaderValuePrefix + ' ', '').trimLeft(); token = token.replace(jwtHeaderValuePrefix + ' ', '').trimLeft();
} }
@ -263,10 +328,11 @@ class App {
}); });
} }
var jwtVerifyOptions: jwt.VerifyOptions = { const jwtVerifyOptions: jwt.VerifyOptions = {
issuer: jwtIssuer != '' ? jwtIssuer : undefined, issuer: jwtIssuer !== '' ? jwtIssuer : undefined,
ignoreExpiration: false ignoreExpiration: false,
} };
jwt.verify(token, getKey, jwtVerifyOptions, (err: jwt.VerifyErrors, decoded: object) => { jwt.verify(token, getKey, jwtVerifyOptions, (err: jwt.VerifyErrors, decoded: object) => {
if (err) ResponseHelper.jwtAuthAuthorizationError(res, 'Invalid token'); if (err) ResponseHelper.jwtAuthAuthorizationError(res, 'Invalid token');
else if (!isTenantAllowed(decoded)) ResponseHelper.jwtAuthAuthorizationError(res, 'Tenant not allowed'); else if (!isTenantAllowed(decoded)) ResponseHelper.jwtAuthAuthorizationError(res, 'Tenant not allowed');
@ -296,6 +362,8 @@ class App {
// Make sure that each request has the "parsedUrl" parameter // Make sure that each request has the "parsedUrl" parameter
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => { this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
(req as ICustomRequest).parsedUrl = parseUrl(req); (req as ICustomRequest).parsedUrl = parseUrl(req);
// @ts-ignore
req.rawBody = Buffer.from('', 'base64');
next(); next();
}); });
@ -304,38 +372,46 @@ class App {
limit: '16mb', verify: (req, res, buf) => { limit: '16mb', verify: (req, res, buf) => {
// @ts-ignore // @ts-ignore
req.rawBody = buf; req.rawBody = buf;
} },
})); }));
// Support application/xml type post data // Support application/xml type post data
// @ts-ignore // @ts-ignore
this.app.use(bodyParser.xml({ limit: '16mb', xmlParseOptions: { this.app.use(bodyParser.xml({
limit: '16mb', xmlParseOptions: {
normalize: true, // Trim whitespace inside text nodes normalize: true, // Trim whitespace inside text nodes
normalizeTags: true, // Transform tags to lowercase normalizeTags: true, // Transform tags to lowercase
explicitArray: false, // Only put properties in array if length > 1 explicitArray: false, // Only put properties in array if length > 1
} })); },
}));
this.app.use(bodyParser.text({ this.app.use(bodyParser.text({
limit: '16mb', verify: (req, res, buf) => { limit: '16mb', verify: (req, res, buf) => {
// @ts-ignore // @ts-ignore
req.rawBody = buf; req.rawBody = buf;
} },
})); }));
// Make sure that Vue history mode works properly // Make sure that Vue history mode works properly
this.app.use(history({ this.app.use(history({
rewrites: [ rewrites: [
{ {
from: new RegExp(`^\/(${this.restEndpoint}|healthz|css|js|${this.endpointWebhook}|${this.endpointWebhookTest})\/?.*$`), from: new RegExp(`^\/(${this.restEndpoint}|healthz|metrics|css|js|${this.endpointWebhook}|${this.endpointWebhookTest})\/?.*$`),
to: (context) => { to: (context) => {
return context.parsedUrl!.pathname!.toString(); return context.parsedUrl!.pathname!.toString();
} },
} },
] ],
})); }));
//support application/x-www-form-urlencoded post data //support application/x-www-form-urlencoded post data
this.app.use(bodyParser.urlencoded({ extended: false })); this.app.use(bodyParser.urlencoded({
extended: false,
verify: (req, res, buf) => {
// @ts-ignore
req.rawBody = buf;
},
}));
if (process.env['NODE_ENV'] !== 'production') { if (process.env['NODE_ENV'] !== 'production') {
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => { this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
@ -388,7 +464,16 @@ class App {
ResponseHelper.sendSuccessResponse(res, responseData, true, 200); ResponseHelper.sendSuccessResponse(res, responseData, true, 200);
}); });
// ----------------------------------------
// Metrics
// ----------------------------------------
if (enableMetrics === true) {
this.app.get('/metrics', async (req: express.Request, res: express.Response) => {
const response = await register.metrics();
res.setHeader('Content-Type', register.contentType);
ResponseHelper.sendSuccessResponse(res, response, true, 200);
});
}
// ---------------------------------------- // ----------------------------------------
// Workflow // Workflow
@ -522,6 +607,7 @@ class App {
newWorkflowData.updatedAt = this.getCurrentDate(); newWorkflowData.updatedAt = this.getCurrentDate();
await Db.collections.Workflow!.update(id, newWorkflowData); await Db.collections.Workflow!.update(id, newWorkflowData);
await this.externalHooks.run('workflow.afterUpdate', [newWorkflowData]);
// We sadly get nothing back from "update". Neither if it updated a record // We sadly get nothing back from "update". Neither if it updated a record
// nor the new value. So query now the hopefully updated entry. // nor the new value. So query now the hopefully updated entry.
@ -536,7 +622,7 @@ class App {
try { try {
await this.externalHooks.run('workflow.activate', [responseData]); await this.externalHooks.run('workflow.activate', [responseData]);
await this.activeWorkflowRunner.add(id); await this.activeWorkflowRunner.add(id, isActive ? 'update' : 'activate');
} catch (error) { } catch (error) {
// If workflow could not be activated set it again to inactive // If workflow could not be activated set it again to inactive
newWorkflowData.active = false; newWorkflowData.active = false;
@ -570,6 +656,7 @@ class App {
} }
await Db.collections.Workflow!.delete(id); await Db.collections.Workflow!.delete(id);
await this.externalHooks.run('workflow.afterDelete', [id]);
return true; return true;
})); }));
@ -581,6 +668,7 @@ class App {
const startNodes: string[] | undefined = req.body.startNodes; const startNodes: string[] | undefined = req.body.startNodes;
const destinationNode: string | undefined = req.body.destinationNode; const destinationNode: string | undefined = req.body.destinationNode;
const executionMode = 'manual'; const executionMode = 'manual';
const activationMode = 'manual';
const sessionId = GenericHelpers.getSessionId(req); const sessionId = GenericHelpers.getSessionId(req);
@ -590,7 +678,7 @@ class App {
const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials); const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials);
const nodeTypes = NodeTypes(); const nodeTypes = NodeTypes();
const workflowInstance = new Workflow({ id: workflowData.id, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: false, nodeTypes, staticData: undefined, settings: workflowData.settings }); const workflowInstance = new Workflow({ id: workflowData.id, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: false, nodeTypes, staticData: undefined, settings: workflowData.settings });
const needsWebhook = await this.testWebhooks.needsWebhookData(workflowData, workflowInstance, additionalData, executionMode, sessionId, destinationNode); const needsWebhook = await this.testWebhooks.needsWebhookData(workflowData, workflowInstance, additionalData, executionMode, activationMode, sessionId, destinationNode);
if (needsWebhook === true) { if (needsWebhook === true) {
return { return {
waitingForWebhook: true, waitingForWebhook: true,
@ -655,13 +743,36 @@ class App {
const allNodes = nodeTypes.getAll(); const allNodes = nodeTypes.getAll();
allNodes.forEach((nodeData) => { allNodes.forEach((nodeData) => {
returnData.push(nodeData.description); // Make a copy of the object. If we don't do this, then when
// The method below is called the properties are removed for good
// This happens because nodes are returned as reference.
const nodeInfo: INodeTypeDescription = { ...nodeData.description };
if (req.query.includeProperties !== 'true') {
// @ts-ignore
delete nodeInfo.properties;
}
returnData.push(nodeInfo);
}); });
return returnData; return returnData;
})); }));
// Returns node information baesd on namese
this.app.post(`/${this.restEndpoint}/node-types`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<INodeTypeDescription[]> => {
const nodeNames = _.get(req, 'body.nodeNames', []) as string[];
const nodeTypes = NodeTypes();
return nodeNames.map(name => {
try {
return nodeTypes.getByName(name);
} catch (e) {
return undefined;
}
}).filter(nodeData => !!nodeData).map(nodeData => nodeData!.description);
}));
// ---------------------------------------- // ----------------------------------------
// Node-Types // Node-Types
@ -993,13 +1104,14 @@ class App {
[result.name as string]: result as ICredentialsEncrypted, [result.name as string]: result as ICredentialsEncrypted,
}, },
}; };
const mode: WorkflowExecuteMode = 'internal';
const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey); const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey);
const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, true); const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, mode, true);
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type); const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type, mode);
const signatureMethod = _.get(oauthCredentials, 'signatureMethod') as string; const signatureMethod = _.get(oauthCredentials, 'signatureMethod') as string;
const oauth = new clientOAuth1({ const oAuthOptions: clientOAuth1.Options = {
consumer: { consumer: {
key: _.get(oauthCredentials, 'consumerKey') as string, key: _.get(oauthCredentials, 'consumerKey') as string,
secret: _.get(oauthCredentials, 'consumerSecret') as string, secret: _.get(oauthCredentials, 'consumerSecret') as string,
@ -1011,16 +1123,20 @@ class App {
.update(base) .update(base)
.digest('base64'); .digest('base64');
}, },
}); };
const callback = `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth1-credential/callback?cid=${req.query.id}`; const oauthRequestData = {
oauth_callback: `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth1-credential/callback?cid=${req.query.id}`,
};
await this.externalHooks.run('oauth1.authenticate', [oAuthOptions, oauthRequestData]);
const oauth = new clientOAuth1(oAuthOptions);
const options: RequestOptions = { const options: RequestOptions = {
method: 'POST', method: 'POST',
url: (_.get(oauthCredentials, 'requestTokenUrl') as string), url: (_.get(oauthCredentials, 'requestTokenUrl') as string),
data: { data: oauthRequestData,
oauth_callback: callback,
},
}; };
const data = oauth.toHeader(oauth.authorize(options as RequestOptions)); const data = oauth.toHeader(oauth.authorize(options as RequestOptions));
@ -1079,9 +1195,10 @@ class App {
[result.name as string]: result as ICredentialsEncrypted, [result.name as string]: result as ICredentialsEncrypted,
}, },
}; };
const mode: WorkflowExecuteMode = 'internal';
const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey); const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey);
const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, true); const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, mode, true);
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type); const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type, mode);
const options: OptionsWithUrl = { const options: OptionsWithUrl = {
method: 'POST', method: 'POST',
@ -1089,7 +1206,7 @@ class App {
qs: { qs: {
oauth_token, oauth_token,
oauth_verifier, oauth_verifier,
} },
}; };
let oauthToken; let oauthToken;
@ -1150,20 +1267,21 @@ class App {
[result.name as string]: result as ICredentialsEncrypted, [result.name as string]: result as ICredentialsEncrypted,
}, },
}; };
const mode: WorkflowExecuteMode = 'internal';
const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey); const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey);
const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, true); const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, mode, true);
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type); const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type, mode);
const token = new csrf(); const token = new csrf();
// Generate a CSRF prevention token and send it as a OAuth2 state stringma/ERR // Generate a CSRF prevention token and send it as a OAuth2 state stringma/ERR
const csrfSecret = token.secretSync(); const csrfSecret = token.secretSync();
const state = { const state = {
token: token.create(csrfSecret), token: token.create(csrfSecret),
cid: req.query.id cid: req.query.id,
}; };
const stateEncodedStr = Buffer.from(JSON.stringify(state)).toString('base64') as string; const stateEncodedStr = Buffer.from(JSON.stringify(state)).toString('base64') as string;
const oAuthObj = new clientOAuth2({ const oAuthOptions: clientOAuth2.Options = {
clientId: _.get(oauthCredentials, 'clientId') as string, clientId: _.get(oauthCredentials, 'clientId') as string,
clientSecret: _.get(oauthCredentials, 'clientSecret', '') as string, clientSecret: _.get(oauthCredentials, 'clientSecret', '') as string,
accessTokenUri: _.get(oauthCredentials, 'accessTokenUrl', '') as string, accessTokenUri: _.get(oauthCredentials, 'accessTokenUrl', '') as string,
@ -1171,7 +1289,11 @@ class App {
redirectUri: `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth2-credential/callback`, redirectUri: `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth2-credential/callback`,
scopes: _.split(_.get(oauthCredentials, 'scope', 'openid,') as string, ','), scopes: _.split(_.get(oauthCredentials, 'scope', 'openid,') as string, ','),
state: stateEncodedStr, state: stateEncodedStr,
}); };
await this.externalHooks.run('oauth2.authenticate', [oAuthOptions]);
const oAuthObj = new clientOAuth2(oAuthOptions);
// Encrypt the data // Encrypt the data
const credentials = new Credentials(result.name, result.type, result.nodesAccess); const credentials = new Credentials(result.name, result.type, result.nodesAccess);
@ -1209,7 +1331,9 @@ class App {
// Verify and store app code. Generate access tokens and store for respective credential. // Verify and store app code. Generate access tokens and store for respective credential.
this.app.get(`/${this.restEndpoint}/oauth2-credential/callback`, async (req: express.Request, res: express.Response) => { this.app.get(`/${this.restEndpoint}/oauth2-credential/callback`, async (req: express.Request, res: express.Response) => {
const {code, state: stateEncoded } = req.query;
// realmId it's currently just use for the quickbook OAuth2 flow
const { code, state: stateEncoded } = req.query;
if (code === undefined || stateEncoded === undefined) { if (code === undefined || stateEncoded === undefined) {
const errorResponse = new ResponseHelper.ResponseError('Insufficient parameters for OAuth2 callback. Received following query parameters: ' + JSON.stringify(req.query), undefined, 503); const errorResponse = new ResponseHelper.ResponseError('Insufficient parameters for OAuth2 callback. Received following query parameters: ' + JSON.stringify(req.query), undefined, 503);
@ -1243,9 +1367,10 @@ class App {
[result.name as string]: result as ICredentialsEncrypted, [result.name as string]: result as ICredentialsEncrypted,
}, },
}; };
const mode: WorkflowExecuteMode = 'internal';
const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey); const credentialsHelper = new CredentialsHelper(workflowCredentials, encryptionKey);
const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, true); const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, mode, true);
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type); const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type, mode);
const token = new csrf(); const token = new csrf();
if (decryptedDataOriginal.csrfSecret === undefined || !token.verify(decryptedDataOriginal.csrfSecret as string, state.token)) { if (decryptedDataOriginal.csrfSecret === undefined || !token.verify(decryptedDataOriginal.csrfSecret as string, state.token)) {
@ -1257,11 +1382,11 @@ class App {
const oAuth2Parameters = { const oAuth2Parameters = {
clientId: _.get(oauthCredentials, 'clientId') as string, clientId: _.get(oauthCredentials, 'clientId') as string,
clientSecret: _.get(oauthCredentials, 'clientSecret', '') as string, clientSecret: _.get(oauthCredentials, 'clientSecret', '') as string | undefined,
accessTokenUri: _.get(oauthCredentials, 'accessTokenUrl', '') as string, accessTokenUri: _.get(oauthCredentials, 'accessTokenUrl', '') as string,
authorizationUri: _.get(oauthCredentials, 'authUrl', '') as string, authorizationUri: _.get(oauthCredentials, 'authUrl', '') as string,
redirectUri: `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth2-credential/callback`, redirectUri: `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth2-credential/callback`,
scopes: _.split(_.get(oauthCredentials, 'scope', 'openid,') as string, ',') scopes: _.split(_.get(oauthCredentials, 'scope', 'openid,') as string, ','),
}; };
if (_.get(oauthCredentials, 'authentication', 'header') as string === 'body') { if (_.get(oauthCredentials, 'authentication', 'header') as string === 'body') {
@ -1273,13 +1398,18 @@ class App {
}; };
delete oAuth2Parameters.clientSecret; delete oAuth2Parameters.clientSecret;
} }
const redirectUri = `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth2-credential/callback`;
await this.externalHooks.run('oauth2.callback', [oAuth2Parameters]);
const oAuthObj = new clientOAuth2(oAuth2Parameters); const oAuthObj = new clientOAuth2(oAuth2Parameters);
const queryParameters = req.originalUrl.split('?').splice(1, 1).join(''); const queryParameters = req.originalUrl.split('?').splice(1, 1).join('');
const oauthToken = await oAuthObj.code.getToken(`${redirectUri}?${queryParameters}`, options); const oauthToken = await oAuthObj.code.getToken(`${oAuth2Parameters.redirectUri}?${queryParameters}`, options);
if (Object.keys(req.query).length > 2) {
_.set(oauthToken.data, 'callbackQueryString', _.omit(req.query, 'state', 'code'));
}
if (oauthToken === undefined) { if (oauthToken === undefined) {
const errorResponse = new ResponseHelper.ResponseError('Unable to get access tokens!', undefined, 404); const errorResponse = new ResponseHelper.ResponseError('Unable to get access tokens!', undefined, 404);
@ -1327,29 +1457,48 @@ class App {
limit = parseInt(req.query.limit as string, 10); limit = parseInt(req.query.limit as string, 10);
} }
const countFilter = JSON.parse(JSON.stringify(filter)); const executingWorkflowIds: string[] = [];
if (req.query.lastId) {
filter.id = LessThan(req.query.lastId);
}
countFilter.select = ['id'];
const resultsPromise = Db.collections.Execution!.find({ if (config.get('executions.mode') === 'queue') {
select: [ const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
'id', executingWorkflowIds.push(...currentJobs.map(job => job.data.executionId) as string[]);
'finished', }
'mode', // We may have manual executions even with queue so we must account for these.
'retryOf', executingWorkflowIds.push(...this.activeExecutionsInstance.getActiveExecutions().map(execution => execution.id.toString()) as string[]);
'retrySuccessId',
'startedAt', const countFilter = JSON.parse(JSON.stringify(filter));
'stoppedAt', countFilter.select = ['id'];
'workflowData', countFilter.where = {id: Not(In(executingWorkflowIds))};
],
where: filter, const resultsQuery = await Db.collections.Execution!
order: { .createQueryBuilder("execution")
id: 'DESC', .select([
}, 'execution.id',
take: limit, 'execution.finished',
'execution.mode',
'execution.retryOf',
'execution.retrySuccessId',
'execution.startedAt',
'execution.stoppedAt',
'execution.workflowData',
])
.orderBy('execution.id', 'DESC')
.take(limit);
Object.keys(filter).forEach((filterField) => {
resultsQuery.andWhere(`execution.${filterField} = :${filterField}`, {[filterField]: filter[filterField]});
}); });
if (req.query.lastId) {
resultsQuery.andWhere(`execution.id < :lastId`, {lastId: req.query.lastId});
}
if (req.query.firstId) {
resultsQuery.andWhere(`execution.id > :firstId`, {firstId: req.query.firstId});
}
if (executingWorkflowIds.length > 0) {
resultsQuery.andWhere(`execution.id NOT IN (:...ids)`, {ids: executingWorkflowIds});
}
const resultsPromise = resultsQuery.getMany();
const countPromise = Db.collections.Execution!.count(countFilter); const countPromise = Db.collections.Execution!.count(countFilter);
@ -1380,16 +1529,21 @@ class App {
// Returns a specific execution // Returns a specific execution
this.app.get(`/${this.restEndpoint}/executions/:id`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IExecutionFlattedResponse | undefined> => { this.app.get(`/${this.restEndpoint}/executions/:id`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IExecutionResponse | IExecutionFlattedResponse | undefined> => {
const result = await Db.collections.Execution!.findOne(req.params.id); const result = await Db.collections.Execution!.findOne(req.params.id);
if (result === undefined) { if (result === undefined) {
return undefined; return undefined;
} }
if (req.query.unflattedResponse === 'true') {
const fullExecutionData = ResponseHelper.unflattenExecutionData(result);
return fullExecutionData as IExecutionResponse;
} else {
// Convert to response format in which the id is a string // Convert to response format in which the id is a string
(result as IExecutionFlatted as IExecutionFlattedResponse).id = result.id.toString(); (result as IExecutionFlatted as IExecutionFlattedResponse).id = result.id.toString();
return result as IExecutionFlatted as IExecutionFlattedResponse; return result as IExecutionFlatted as IExecutionFlattedResponse;
}
})); }));
@ -1423,27 +1577,39 @@ class App {
workflowData: fullExecutionData.workflowData, workflowData: fullExecutionData.workflowData,
}; };
const lastNodeExecuted = data!.executionData!.resultData.lastNodeExecuted as string; const lastNodeExecuted = data!.executionData!.resultData.lastNodeExecuted as string | undefined;
if (lastNodeExecuted) {
// Remove the old error and the data of the last run of the node that it can be replaced // Remove the old error and the data of the last run of the node that it can be replaced
delete data!.executionData!.resultData.error; delete data!.executionData!.resultData.error;
const length = data!.executionData!.resultData.runData[lastNodeExecuted].length;
if (length > 0 && data!.executionData!.resultData.runData[lastNodeExecuted][length - 1].error !== undefined) {
// Remove results only if it is an error.
// If we are retrying due to a crash, the information is simply success info from last node
data!.executionData!.resultData.runData[lastNodeExecuted].pop(); data!.executionData!.resultData.runData[lastNodeExecuted].pop();
// Stack will determine what to run next
}
}
if (req.body.loadWorkflow === true) { if (req.body.loadWorkflow === true) {
// Loads the currently saved workflow to execute instead of the // Loads the currently saved workflow to execute instead of the
// one saved at the time of the execution. // one saved at the time of the execution.
const workflowId = fullExecutionData.workflowData.id; const workflowId = fullExecutionData.workflowData.id;
data.workflowData = await Db.collections.Workflow!.findOne(workflowId) as IWorkflowBase; const workflowData = await Db.collections.Workflow!.findOne(workflowId) as IWorkflowBase;
if (data.workflowData === undefined) { if (workflowData === undefined) {
throw new Error(`The workflow with the ID "${workflowId}" could not be found and so the data not be loaded for the retry.`); throw new Error(`The workflow with the ID "${workflowId}" could not be found and so the data not be loaded for the retry.`);
} }
data.workflowData = workflowData;
const nodeTypes = NodeTypes();
const workflowInstance = new Workflow({ id: workflowData.id as string, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: false, nodeTypes, staticData: undefined, settings: workflowData.settings });
// Replace all of the nodes in the execution stack with the ones of the new workflow // Replace all of the nodes in the execution stack with the ones of the new workflow
for (const stack of data!.executionData!.executionData!.nodeExecutionStack) { for (const stack of data!.executionData!.executionData!.nodeExecutionStack) {
// Find the data of the last executed node in the new workflow // Find the data of the last executed node in the new workflow
const node = data.workflowData.nodes.find(node => node.name === stack.node.name); const node = workflowInstance.getNode(stack.node.name);
if (node === undefined) { if (node === null) {
throw new Error(`Could not find the node "${stack.node.name}" in workflow. It probably got deleted or renamed. Without it the workflow can sadly not be retried.`); throw new Error(`Could not find the node "${stack.node.name}" in workflow. It probably got deleted or renamed. Without it the workflow can sadly not be retried.`);
} }
@ -1496,6 +1662,51 @@ class App {
// Returns all the currently working executions // Returns all the currently working executions
this.app.get(`/${this.restEndpoint}/executions-current`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IExecutionsSummary[]> => { this.app.get(`/${this.restEndpoint}/executions-current`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IExecutionsSummary[]> => {
if (config.get('executions.mode') === 'queue') {
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
const currentlyRunningQueueIds = currentJobs.map(job => job.data.executionId);
const currentlyRunningManualExecutions = this.activeExecutionsInstance.getActiveExecutions();
const manualExecutionIds = currentlyRunningManualExecutions.map(execution => execution.id);
const currentlyRunningExecutionIds = currentlyRunningQueueIds.concat(manualExecutionIds);
if (currentlyRunningExecutionIds.length === 0) {
return [];
}
const resultsQuery = await Db.collections.Execution!
.createQueryBuilder("execution")
.select([
'execution.id',
'execution.workflowId',
'execution.mode',
'execution.retryOf',
'execution.startedAt',
])
.orderBy('execution.id', 'DESC')
.andWhere(`execution.id IN (:...ids)`, {ids: currentlyRunningExecutionIds});
if (req.query.filter) {
const filter = JSON.parse(req.query.filter as string);
if (filter.workflowId !== undefined) {
resultsQuery.andWhere('execution.workflowId = :workflowId', {workflowId: filter.workflowId});
}
}
const results = await resultsQuery.getMany();
return results.map(result => {
return {
id: result.id,
workflowId: result.workflowId,
mode: result.mode,
retryOf: result.retryOf !== null ? result.retryOf : undefined,
startedAt: new Date(result.startedAt),
} as IExecutionsSummary;
});
} else {
const executingWorkflows = this.activeExecutionsInstance.getActiveExecutions(); const executingWorkflows = this.activeExecutionsInstance.getActiveExecutions();
const returnData: IExecutionsSummary[] = []; const returnData: IExecutionsSummary[] = [];
@ -1511,8 +1722,8 @@ class App {
} }
returnData.push( returnData.push(
{ {
idActive: data.id.toString(), id: data.id.toString(),
workflowId: data.workflowId.toString(), workflowId: data.workflowId === undefined ? '' : data.workflowId.toString(),
mode: data.mode, mode: data.mode,
retryOf: data.retryOf, retryOf: data.retryOf,
startedAt: new Date(data.startedAt), startedAt: new Date(data.startedAt),
@ -1521,10 +1732,49 @@ class App {
} }
return returnData; return returnData;
}
})); }));
// Forces the execution to stop // Forces the execution to stop
this.app.post(`/${this.restEndpoint}/executions-current/:id/stop`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IExecutionsStopData> => { this.app.post(`/${this.restEndpoint}/executions-current/:id/stop`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IExecutionsStopData> => {
if (config.get('executions.mode') === 'queue') {
// Manual executions should still be stoppable, so
// try notifying the `activeExecutions` to stop it.
const result = await this.activeExecutionsInstance.stopExecution(req.params.id);
if (result !== undefined) {
const returnData: IExecutionsStopData = {
mode: result.mode,
startedAt: new Date(result.startedAt),
stoppedAt: result.stoppedAt ? new Date(result.stoppedAt) : undefined,
finished: result.finished,
};
return returnData;
}
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
const job = currentJobs.find(job => job.data.executionId.toString() === req.params.id);
if (!job) {
throw new Error(`Could not stop "${req.params.id}" as it is no longer in queue.`);
} else {
await Queue.getInstance().stopJob(job);
}
const executionDb = await Db.collections.Execution?.findOne(req.params.id) as IExecutionFlattedDb;
const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse;
const returnData: IExecutionsStopData = {
mode: fullExecutionData.mode,
startedAt: new Date(fullExecutionData.startedAt),
stoppedAt: fullExecutionData.stoppedAt ? new Date(fullExecutionData.stoppedAt) : undefined,
finished: fullExecutionData.finished,
};
return returnData;
} else {
const executionId = req.params.id; const executionId = req.params.id;
// Stopt he execution and wait till it is done and we got the data // Stopt he execution and wait till it is done and we got the data
@ -1537,11 +1787,12 @@ class App {
const returnData: IExecutionsStopData = { const returnData: IExecutionsStopData = {
mode: result.mode, mode: result.mode,
startedAt: new Date(result.startedAt), startedAt: new Date(result.startedAt),
stoppedAt: new Date(result.stoppedAt), stoppedAt: result.stoppedAt ? new Date(result.stoppedAt) : undefined,
finished: result.finished, finished: result.finished,
}; };
return returnData; return returnData;
}
})); }));
@ -1572,18 +1823,7 @@ class App {
// Returns the settings which are needed in the UI // Returns the settings which are needed in the UI
this.app.get(`/${this.restEndpoint}/settings`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IN8nUISettings> => { this.app.get(`/${this.restEndpoint}/settings`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<IN8nUISettings> => {
return { return this.frontendSettings;
endpointWebhook: this.endpointWebhook,
endpointWebhookTest: this.endpointWebhookTest,
saveDataErrorExecution: this.saveDataErrorExecution,
saveDataSuccessExecution: this.saveDataSuccessExecution,
saveManualExecutions: this.saveManualExecutions,
executionTimeout: this.executionTimeout,
maxExecutionTimeout: this.maxExecutionTimeout,
timezone: this.timezone,
urlBaseWebhook: WebhookHelpers.getWebhookBaseUrl(),
versionCli: this.versions!.cli,
};
})); }));
@ -1592,89 +1832,10 @@ class App {
// Webhooks // Webhooks
// ---------------------------------------- // ----------------------------------------
// HEAD webhook requests if (config.get('endpoints.disableProductionWebhooksOnMainProcess') !== true) {
this.app.head(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => { WebhookServer.registerProductionWebhooks.apply(this);
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('HEAD', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
} }
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
// OPTIONS webhook requests
this.app.options(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let allowedMethods: string[];
try {
allowedMethods = await this.activeWorkflowRunner.getWebhookMethods(requestUrl);
allowedMethods.push('OPTIONS');
// Add custom "Allow" header to satisfy OPTIONS response.
res.append('Allow', allowedMethods);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
ResponseHelper.sendSuccessResponse(res, {}, true, 204);
});
// GET webhook requests
this.app.get(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('GET', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return ;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
// POST webhook requests
this.app.post(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('POST', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
// HEAD webhook requests (test for UI) // HEAD webhook requests (test for UI)
this.app.head(`/${this.endpointWebhookTest}/*`, async (req: express.Request, res: express.Response) => { this.app.head(`/${this.endpointWebhookTest}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook-test/" to get the registred part of the url // Cut away the "/webhook-test/" to get the registred part of the url
@ -1819,7 +1980,7 @@ class App {
// got used // got used
res.setHeader('Last-Modified', startTime); res.setHeader('Last-Modified', startTime);
} }
} },
})); }));
} }
@ -1835,13 +1996,13 @@ export async function start(): Promise<void> {
let server; let server;
if (app.protocol === 'https' && app.sslKey && app.sslCert){ if (app.protocol === 'https' && app.sslKey && app.sslCert) {
const https = require('https'); const https = require('https');
const privateKey = readFileSync(app.sslKey, 'utf8'); const privateKey = readFileSync(app.sslKey, 'utf8');
const cert = readFileSync(app.sslCert, 'utf8'); const cert = readFileSync(app.sslCert, 'utf8');
const credentials = { key: privateKey,cert }; const credentials = { key: privateKey, cert };
server = https.createServer(credentials,app.app); server = https.createServer(credentials, app.app);
}else{ } else {
const http = require('http'); const http = require('http');
server = http.createServer(app.app); server = http.createServer(app.app);
} }
@ -1850,5 +2011,7 @@ export async function start(): Promise<void> {
const versions = await GenericHelpers.getVersions(); const versions = await GenericHelpers.getVersions();
console.log(`n8n ready on ${ADDRESS}, port ${PORT}`); console.log(`n8n ready on ${ADDRESS}, port ${PORT}`);
console.log(`Version: ${versions.cli}`); console.log(`Version: ${versions.cli}`);
await app.externalHooks.run('n8n.ready', [app]);
}); });
} }

View file

@ -3,11 +3,9 @@ import * as express from 'express';
import { import {
IResponseCallbackData, IResponseCallbackData,
IWorkflowDb, IWorkflowDb,
NodeTypes,
Push, Push,
ResponseHelper, ResponseHelper,
WebhookHelpers, WebhookHelpers,
WorkflowHelpers,
} from './'; } from './';
import { import {
@ -19,6 +17,7 @@ import {
IWorkflowExecuteAdditionalData, IWorkflowExecuteAdditionalData,
WebhookHttpMethod, WebhookHttpMethod,
Workflow, Workflow,
WorkflowActivateMode,
WorkflowExecuteMode, WorkflowExecuteMode,
} from 'n8n-workflow'; } from 'n8n-workflow';
@ -31,6 +30,7 @@ export class TestWebhooks {
sessionId?: string; sessionId?: string;
timeout: NodeJS.Timeout, timeout: NodeJS.Timeout,
workflowData: IWorkflowDb; workflowData: IWorkflowDb;
workflow: Workflow;
}; };
} = {}; } = {};
private activeWebhooks: ActiveWebhooks | null = null; private activeWebhooks: ActiveWebhooks | null = null;
@ -55,19 +55,45 @@ export class TestWebhooks {
* @memberof TestWebhooks * @memberof TestWebhooks
*/ */
async callTestWebhook(httpMethod: WebhookHttpMethod, path: string, request: express.Request, response: express.Response): Promise<IResponseCallbackData> { async callTestWebhook(httpMethod: WebhookHttpMethod, path: string, request: express.Request, response: express.Response): Promise<IResponseCallbackData> {
const webhookData: IWebhookData | undefined = this.activeWebhooks!.get(httpMethod, path); // Reset request parameters
request.params = {};
// Remove trailing slash
if (path.endsWith('/')) {
path = path.slice(0, -1);
}
let webhookData: IWebhookData | undefined = this.activeWebhooks!.get(httpMethod, path);
// check if path is dynamic
if (webhookData === undefined) {
const pathElements = path.split('/');
const webhookId = pathElements.shift();
webhookData = this.activeWebhooks!.get(httpMethod, pathElements.join('/'), webhookId);
if (webhookData === undefined) { if (webhookData === undefined) {
// The requested webhook is not registered // The requested webhook is not registered
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404); throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
} }
const webhookKey = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path); path = webhookData.path;
// extracting params from path
path.split('/').forEach((ele, index) => {
if (ele.startsWith(':')) {
// write params to req.params
request.params[ele.slice(1)] = pathElements[index];
}
});
}
const workflowData = this.testWebhookData[webhookKey].workflowData; const webhookKey = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path, webhookData.webhookId) + `|${webhookData.workflowId}`;
const nodeTypes = NodeTypes(); // TODO: Clean that duplication up one day and improve code generally
const workflow = new Workflow({ id: webhookData.workflowId, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings}); if (this.testWebhookData[webhookKey] === undefined) {
// The requested webhook is not registered
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
}
const workflow = this.testWebhookData[webhookKey].workflow;
// Get the node which has the webhook defined to know where to start from and to // Get the node which has the webhook defined to know where to start from and to
// get additional data // get additional data
@ -79,7 +105,7 @@ export class TestWebhooks {
return new Promise(async (resolve, reject) => { return new Promise(async (resolve, reject) => {
try { try {
const executionMode = 'manual'; const executionMode = 'manual';
const executionId = await WebhookHelpers.executeWebhook(workflow, webhookData, this.testWebhookData[webhookKey].workflowData, workflowStartNode, executionMode, this.testWebhookData[webhookKey].sessionId, request, response, (error: Error | null, data: IResponseCallbackData) => { const executionId = await WebhookHelpers.executeWebhook(workflow, webhookData!, this.testWebhookData[webhookKey].workflowData, workflowStartNode, executionMode, this.testWebhookData[webhookKey].sessionId, request, response, (error: Error | null, data: IResponseCallbackData) => {
if (error !== null) { if (error !== null) {
return reject(error); return reject(error);
} }
@ -96,7 +122,7 @@ export class TestWebhooks {
// Inform editor-ui that webhook got received // Inform editor-ui that webhook got received
if (this.testWebhookData[webhookKey].sessionId !== undefined) { if (this.testWebhookData[webhookKey].sessionId !== undefined) {
const pushInstance = Push.getInstance(); const pushInstance = Push.getInstance();
pushInstance.send('testWebhookReceived', { workflowId: webhookData.workflowId, executionId }, this.testWebhookData[webhookKey].sessionId!); pushInstance.send('testWebhookReceived', { workflowId: webhookData!.workflowId, executionId }, this.testWebhookData[webhookKey].sessionId!);
} }
} catch (error) { } catch (error) {
@ -136,7 +162,7 @@ export class TestWebhooks {
* @returns {(Promise<IExecutionDb | undefined>)} * @returns {(Promise<IExecutionDb | undefined>)}
* @memberof TestWebhooks * @memberof TestWebhooks
*/ */
async needsWebhookData(workflowData: IWorkflowDb, workflow: Workflow, additionalData: IWorkflowExecuteAdditionalData, mode: WorkflowExecuteMode, sessionId?: string, destinationNode?: string): Promise<boolean> { async needsWebhookData(workflowData: IWorkflowDb, workflow: Workflow, additionalData: IWorkflowExecuteAdditionalData, mode: WorkflowExecuteMode, activation: WorkflowActivateMode, sessionId?: string, destinationNode?: string): Promise<boolean> {
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData, destinationNode); const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData, destinationNode);
if (webhooks.length === 0) { if (webhooks.length === 0) {
@ -154,19 +180,26 @@ export class TestWebhooks {
}, 120000); }, 120000);
let key: string; let key: string;
const activatedKey: string[] = [];
for (const webhookData of webhooks) { for (const webhookData of webhooks) {
key = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path); key = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path, webhookData.webhookId) + `|${workflowData.id}`;
await this.activeWebhooks!.add(workflow, webhookData, mode); activatedKey.push(key);
this.testWebhookData[key] = { this.testWebhookData[key] = {
sessionId, sessionId,
timeout, timeout,
workflow,
workflowData, workflowData,
}; };
// Save static data! try {
this.testWebhookData[key].workflowData.staticData = workflow.staticData; await this.activeWebhooks!.add(workflow, webhookData, mode, activation);
} catch (error) {
activatedKey.forEach(deleteKey => delete this.testWebhookData[deleteKey] );
await this.activeWebhooks!.removeWorkflow(workflow);
throw error;
}
} }
return true; return true;
@ -181,8 +214,6 @@ export class TestWebhooks {
* @memberof TestWebhooks * @memberof TestWebhooks
*/ */
cancelTestWebhook(workflowId: string): boolean { cancelTestWebhook(workflowId: string): boolean {
const nodeTypes = NodeTypes();
let foundWebhook = false; let foundWebhook = false;
for (const webhookKey of Object.keys(this.testWebhookData)) { for (const webhookKey of Object.keys(this.testWebhookData)) {
const webhookData = this.testWebhookData[webhookKey]; const webhookData = this.testWebhookData[webhookKey];
@ -191,8 +222,6 @@ export class TestWebhooks {
continue; continue;
} }
foundWebhook = true;
clearTimeout(this.testWebhookData[webhookKey].timeout); clearTimeout(this.testWebhookData[webhookKey].timeout);
// Inform editor-ui that webhook got received // Inform editor-ui that webhook got received
@ -205,14 +234,19 @@ export class TestWebhooks {
} }
} }
const workflowData = webhookData.workflowData; const workflow = this.testWebhookData[webhookKey].workflow;
const workflow = new Workflow({ id: workflowData.id.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings });
// Remove the webhook // Remove the webhook
delete this.testWebhookData[webhookKey]; delete this.testWebhookData[webhookKey];
if (foundWebhook === false) {
// As it removes all webhooks of the workflow execute only once
this.activeWebhooks!.removeWorkflow(workflow); this.activeWebhooks!.removeWorkflow(workflow);
} }
foundWebhook = true;
}
return foundWebhook; return foundWebhook;
} }
@ -225,14 +259,10 @@ export class TestWebhooks {
return; return;
} }
const nodeTypes = NodeTypes();
let workflowData: IWorkflowDb;
let workflow: Workflow; let workflow: Workflow;
const workflows: Workflow[] = []; const workflows: Workflow[] = [];
for (const webhookKey of Object.keys(this.testWebhookData)) { for (const webhookKey of Object.keys(this.testWebhookData)) {
workflowData = this.testWebhookData[webhookKey].workflowData; workflow = this.testWebhookData[webhookKey].workflow;
workflow = new Workflow({ id: workflowData.id.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings });
workflows.push(workflow); workflows.push(workflow);
} }

View file

@ -3,16 +3,17 @@ import { get } from 'lodash';
import { import {
ActiveExecutions, ActiveExecutions,
ExternalHooks,
GenericHelpers, GenericHelpers,
IExecutionDb, IExecutionDb,
IResponseCallbackData, IResponseCallbackData,
IWorkflowDb, IWorkflowDb,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
ResponseHelper, ResponseHelper,
WorkflowHelpers,
WorkflowRunner,
WorkflowCredentials, WorkflowCredentials,
WorkflowExecuteAdditionalData, WorkflowExecuteAdditionalData,
WorkflowHelpers,
WorkflowRunner,
} from './'; } from './';
import { import {
@ -114,8 +115,8 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
} }
// Get the responseMode // Get the responseMode
const responseMode = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseMode'], 'onReceived'); const responseMode = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseMode'], executionMode, 'onReceived');
const responseCode = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseCode'], 200) as number; const responseCode = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseCode'], executionMode, 200) as number;
if (!['onReceived', 'lastNode'].includes(responseMode as string)) { if (!['onReceived', 'lastNode'].includes(responseMode as string)) {
// If the mode is not known we error. Is probably best like that instead of using // If the mode is not known we error. Is probably best like that instead of using
@ -173,7 +174,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
await WorkflowHelpers.saveStaticData(workflow); await WorkflowHelpers.saveStaticData(workflow);
if (webhookData.webhookDescription['responseHeaders'] !== undefined) { if (webhookData.webhookDescription['responseHeaders'] !== undefined) {
const responseHeaders = workflow.getComplexParameterValue(workflowStartNode, webhookData.webhookDescription['responseHeaders'], undefined) as { const responseHeaders = workflow.expression.getComplexParameterValue(workflowStartNode, webhookData.webhookDescription['responseHeaders'], executionMode, undefined) as {
entries?: Array<{ entries?: Array<{
name: string; name: string;
value: string; value: string;
@ -221,7 +222,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
return; return;
} }
// Now that we know that the workflow should run we can return the default respons // Now that we know that the workflow should run we can return the default response
// directly if responseMode it set to "onReceived" and a respone should be sent // directly if responseMode it set to "onReceived" and a respone should be sent
if (responseMode === 'onReceived' && didSendResponse === false) { if (responseMode === 'onReceived' && didSendResponse === false) {
// Return response directly and do not wait for the workflow to finish // Return response directly and do not wait for the workflow to finish
@ -251,7 +252,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
data: { data: {
main: webhookResultData.workflowData, main: webhookResultData.workflowData,
}, },
}, }
); );
const runExecutionData: IRunExecutionData = { const runExecutionData: IRunExecutionData = {
@ -282,7 +283,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
// Start now to run the workflow // Start now to run the workflow
const workflowRunner = new WorkflowRunner(); const workflowRunner = new WorkflowRunner();
const executionId = await workflowRunner.run(runData, true); const executionId = await workflowRunner.run(runData, true, !didSendResponse);
// Get a promise which resolves when the workflow did execute and send then response // Get a promise which resolves when the workflow did execute and send then response
const executePromise = activeExecutions.getPostExecutePromise(executionId) as Promise<IExecutionDb | undefined>; const executePromise = activeExecutions.getPostExecutePromise(executionId) as Promise<IExecutionDb | undefined>;
@ -301,18 +302,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
} }
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data); const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data);
if (returnData === undefined) { if(data.data.resultData.error || returnData?.error !== undefined) {
if (didSendResponse === false) {
responseCallback(null, {
data: {
message: 'Workflow did execute sucessfully but the last node did not return any data.',
},
responseCode,
});
}
didSendResponse = true;
return data;
} else if (returnData.error !== undefined) {
if (didSendResponse === false) { if (didSendResponse === false) {
responseCallback(null, { responseCallback(null, {
data: { data: {
@ -325,7 +315,20 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
return data; return data;
} }
const responseData = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseData'], 'firstEntryJson'); if (returnData === undefined) {
if (didSendResponse === false) {
responseCallback(null, {
data: {
message: 'Workflow did execute sucessfully but the last node did not return any data.',
},
responseCode,
});
}
didSendResponse = true;
return data;
}
const responseData = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseData'], executionMode, 'firstEntryJson');
if (didSendResponse === false) { if (didSendResponse === false) {
let data: IDataObject | IDataObject[]; let data: IDataObject | IDataObject[];
@ -340,13 +343,13 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
data = returnData.data!.main[0]![0].json; data = returnData.data!.main[0]![0].json;
const responsePropertyName = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responsePropertyName'], undefined); const responsePropertyName = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responsePropertyName'], executionMode, undefined);
if (responsePropertyName !== undefined) { if (responsePropertyName !== undefined) {
data = get(data, responsePropertyName as string) as IDataObject; data = get(data, responsePropertyName as string) as IDataObject;
} }
const responseContentType = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseContentType'], undefined); const responseContentType = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseContentType'], executionMode, undefined);
if (responseContentType !== undefined) { if (responseContentType !== undefined) {
// Send the webhook response manually to be able to set the content-type // Send the webhook response manually to be able to set the content-type
@ -379,7 +382,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
didSendResponse = true; didSendResponse = true;
} }
const responseBinaryPropertyName = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseBinaryPropertyName'], 'data'); const responseBinaryPropertyName = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseBinaryPropertyName'], executionMode, 'data');
if (responseBinaryPropertyName === undefined && didSendResponse === false) { if (responseBinaryPropertyName === undefined && didSendResponse === false) {
responseCallback(new Error('No "responseBinaryPropertyName" is set.'), {}); responseCallback(new Error('No "responseBinaryPropertyName" is set.'), {});
@ -450,8 +453,11 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
export function getWebhookBaseUrl() { export function getWebhookBaseUrl() {
let urlBaseWebhook = GenericHelpers.getBaseUrl(); let urlBaseWebhook = GenericHelpers.getBaseUrl();
if (process.env.WEBHOOK_TUNNEL_URL !== undefined) { // We renamed WEBHOOK_TUNNEL_URL to WEBHOOK_URL. This is here to maintain
urlBaseWebhook = process.env.WEBHOOK_TUNNEL_URL; // backward compatibility. Will be deprecated and removed in the future.
if (process.env.WEBHOOK_TUNNEL_URL !== undefined || process.env.WEBHOOK_URL !== undefined) {
// @ts-ignore
urlBaseWebhook = process.env.WEBHOOK_TUNNEL_URL || process.env.WEBHOOK_URL;
} }
return urlBaseWebhook; return urlBaseWebhook;

View file

@ -0,0 +1,306 @@
import * as express from 'express';
import {
readFileSync,
} from 'fs';
import {
getConnectionManager,
} from 'typeorm';
import * as bodyParser from 'body-parser';
require('body-parser-xml')(bodyParser);
import * as _ from 'lodash';
import {
ActiveExecutions,
ActiveWorkflowRunner,
Db,
ExternalHooks,
GenericHelpers,
ICustomRequest,
IExternalHooksClass,
IPackageVersions,
ResponseHelper,
} from './';
import * as compression from 'compression';
import * as config from '../config';
import * as parseUrl from 'parseurl';
export function registerProductionWebhooks() {
// HEAD webhook requests
this.app.head(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('HEAD', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
// OPTIONS webhook requests
this.app.options(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let allowedMethods: string[];
try {
allowedMethods = await this.activeWorkflowRunner.getWebhookMethods(requestUrl);
allowedMethods.push('OPTIONS');
// Add custom "Allow" header to satisfy OPTIONS response.
res.append('Allow', allowedMethods);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
ResponseHelper.sendSuccessResponse(res, {}, true, 204);
});
// GET webhook requests
this.app.get(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('GET', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
// POST webhook requests
this.app.post(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('POST', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
}
class App {
app: express.Application;
activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner;
endpointWebhook: string;
endpointPresetCredentials: string;
externalHooks: IExternalHooksClass;
saveDataErrorExecution: string;
saveDataSuccessExecution: string;
saveManualExecutions: boolean;
executionTimeout: number;
maxExecutionTimeout: number;
timezone: string;
activeExecutionsInstance: ActiveExecutions.ActiveExecutions;
versions: IPackageVersions | undefined;
restEndpoint: string;
protocol: string;
sslKey: string;
sslCert: string;
presetCredentialsLoaded: boolean;
constructor() {
this.app = express();
this.endpointWebhook = config.get('endpoints.webhook') as string;
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
this.executionTimeout = config.get('executions.timeout') as number;
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number;
this.timezone = config.get('generic.timezone') as string;
this.restEndpoint = config.get('endpoints.rest') as string;
this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
this.activeExecutionsInstance = ActiveExecutions.getInstance();
this.protocol = config.get('protocol');
this.sslKey = config.get('ssl_key');
this.sslCert = config.get('ssl_cert');
this.externalHooks = ExternalHooks();
this.presetCredentialsLoaded = false;
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
}
/**
* Returns the current epoch time
*
* @returns {number}
* @memberof App
*/
getCurrentDate(): Date {
return new Date();
}
async config(): Promise<void> {
this.versions = await GenericHelpers.getVersions();
// Compress the response data
this.app.use(compression());
// Make sure that each request has the "parsedUrl" parameter
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
(req as ICustomRequest).parsedUrl = parseUrl(req);
// @ts-ignore
req.rawBody = Buffer.from('', 'base64');
next();
});
// Support application/json type post data
this.app.use(bodyParser.json({
limit: '16mb', verify: (req, res, buf) => {
// @ts-ignore
req.rawBody = buf;
},
}));
// Support application/xml type post data
// @ts-ignore
this.app.use(bodyParser.xml({
limit: '16mb', xmlParseOptions: {
normalize: true, // Trim whitespace inside text nodes
normalizeTags: true, // Transform tags to lowercase
explicitArray: false, // Only put properties in array if length > 1
},
}));
this.app.use(bodyParser.text({
limit: '16mb', verify: (req, res, buf) => {
// @ts-ignore
req.rawBody = buf;
},
}));
//support application/x-www-form-urlencoded post data
this.app.use(bodyParser.urlencoded({ extended: false,
verify: (req, res, buf) => {
// @ts-ignore
req.rawBody = buf;
},
}));
if (process.env['NODE_ENV'] !== 'production') {
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
// Allow access also from frontend when developing
res.header('Access-Control-Allow-Origin', 'http://localhost:8080');
res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept, sessionid');
next();
});
}
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
if (Db.collections.Workflow === null) {
const error = new ResponseHelper.ResponseError('Database is not ready!', undefined, 503);
return ResponseHelper.sendErrorResponse(res, error);
}
next();
});
// ----------------------------------------
// Healthcheck
// ----------------------------------------
// Does very basic health check
this.app.get('/healthz', async (req: express.Request, res: express.Response) => {
const connectionManager = getConnectionManager();
if (connectionManager.connections.length === 0) {
const error = new ResponseHelper.ResponseError('No Database connection found!', undefined, 503);
return ResponseHelper.sendErrorResponse(res, error);
}
if (connectionManager.connections[0].isConnected === false) {
// Connection is not active
const error = new ResponseHelper.ResponseError('Database connection not active!', undefined, 503);
return ResponseHelper.sendErrorResponse(res, error);
}
// Everything fine
const responseData = {
status: 'ok',
};
ResponseHelper.sendSuccessResponse(res, responseData, true, 200);
});
registerProductionWebhooks.apply(this);
}
}
export async function start(): Promise<void> {
const PORT = config.get('port');
const ADDRESS = config.get('listen_address');
const app = new App();
await app.config();
let server;
if (app.protocol === 'https' && app.sslKey && app.sslCert) {
const https = require('https');
const privateKey = readFileSync(app.sslKey, 'utf8');
const cert = readFileSync(app.sslCert, 'utf8');
const credentials = { key: privateKey, cert };
server = https.createServer(credentials, app.app);
} else {
const http = require('http');
server = http.createServer(app.app);
}
server.listen(PORT, ADDRESS, async () => {
const versions = await GenericHelpers.getVersions();
console.log(`n8n ready on ${ADDRESS}, port ${PORT}`);
console.log(`Version: ${versions.cli}`);
await app.externalHooks.run('n8n.ready', [app]);
});
}

View file

@ -1,9 +1,11 @@
import { import {
ActiveExecutions,
CredentialsHelper, CredentialsHelper,
Db, Db,
ExternalHooks, ExternalHooks,
IExecutionDb, IExecutionDb,
IExecutionFlattedDb, IExecutionFlattedDb,
IExecutionResponse,
IPushDataExecutionFinished, IPushDataExecutionFinished,
IWorkflowBase, IWorkflowBase,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
@ -25,8 +27,8 @@ import {
IExecuteData, IExecuteData,
IExecuteWorkflowInfo, IExecuteWorkflowInfo,
INode, INode,
INodeParameters,
INodeExecutionData, INodeExecutionData,
INodeParameters,
IRun, IRun,
IRunExecutionData, IRunExecutionData,
ITaskData, ITaskData,
@ -43,9 +45,11 @@ import * as config from '../config';
import { LessThanOrEqual } from "typeorm"; import { LessThanOrEqual } from "typeorm";
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string;
/** /**
* Checks if there was an error and if errorWorkflow is defined. If so it collects * Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects
* all the data and executes it * all the data and executes it
* *
* @param {IWorkflowBase} workflowData The workflow which got executed * @param {IWorkflowBase} workflowData The workflow which got executed
@ -54,14 +58,14 @@ import { LessThanOrEqual } from "typeorm";
* @param {string} [executionId] The id the execution got saved as * @param {string} [executionId] The id the execution got saved as
*/ */
function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mode: WorkflowExecuteMode, executionId?: string, retryOf?: string): void { function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mode: WorkflowExecuteMode, executionId?: string, retryOf?: string): void {
// Check if there was an error and if so if an errorWorkflow is set // Check if there was an error and if so if an errorWorkflow or a trigger is set
let pastExecutionUrl: string | undefined = undefined; let pastExecutionUrl: string | undefined = undefined;
if (executionId !== undefined) { if (executionId !== undefined) {
pastExecutionUrl = `${WebhookHelpers.getWebhookBaseUrl()}execution/${executionId}`; pastExecutionUrl = `${WebhookHelpers.getWebhookBaseUrl()}execution/${executionId}`;
} }
if (fullRunData.data.resultData.error !== undefined && workflowData.settings !== undefined && workflowData.settings.errorWorkflow) { if (fullRunData.data.resultData.error !== undefined) {
const workflowErrorData = { const workflowErrorData = {
execution: { execution: {
id: executionId, id: executionId,
@ -74,10 +78,18 @@ function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mo
workflow: { workflow: {
id: workflowData.id !== undefined ? workflowData.id.toString() as string : undefined, id: workflowData.id !== undefined ? workflowData.id.toString() as string : undefined,
name: workflowData.name, name: workflowData.name,
} },
}; };
// Run the error workflow // Run the error workflow
// To avoid an infinite loop do not run the error workflow again if the error-workflow itself failed and it is its own error-workflow.
if (workflowData.settings !== undefined && workflowData.settings.errorWorkflow && !(mode === 'error' && workflowData.id && workflowData.settings.errorWorkflow.toString() === workflowData.id.toString())) {
// If a specific error workflow is set run only that one
WorkflowHelpers.executeErrorWorkflow(workflowData.settings.errorWorkflow as string, workflowErrorData); WorkflowHelpers.executeErrorWorkflow(workflowData.settings.errorWorkflow as string, workflowErrorData);
} else if (mode !== 'error' && workflowData.id !== undefined && workflowData.nodes.some((node) => node.type === ERROR_TRIGGER_TYPE)) {
// If the workflow contains
WorkflowHelpers.executeErrorWorkflow(workflowData.id.toString(), workflowErrorData);
}
} }
} }
@ -106,42 +118,6 @@ function pruneExecutionData(): void {
} }
/**
* Pushes the execution out to all connected clients
*
* @param {WorkflowExecuteMode} mode The mode in which the workflow got started in
* @param {IRun} fullRunData The RunData of the finished execution
* @param {string} executionIdActive The id of the finished execution
* @param {string} [executionIdDb] The database id of finished execution
*/
export function pushExecutionFinished(mode: WorkflowExecuteMode, fullRunData: IRun, executionIdActive: string, executionIdDb?: string, retryOf?: string) {
// Clone the object except the runData. That one is not supposed
// to be send. Because that data got send piece by piece after
// each node which finished executing
const pushRunData = {
...fullRunData,
data: {
...fullRunData.data,
resultData: {
...fullRunData.data.resultData,
runData: {},
},
},
};
// Push data to editor-ui once workflow finished
const sendData: IPushDataExecutionFinished = {
executionIdActive,
executionIdDb,
data: pushRunData,
retryOf,
};
const pushInstance = Push.getInstance();
pushInstance.send('executionFinished', sendData);
}
/** /**
* Returns hook functions to push data to Editor-UI * Returns hook functions to push data to Editor-UI
* *
@ -181,7 +157,10 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
], ],
workflowExecuteBefore: [ workflowExecuteBefore: [
async function (this: WorkflowHooks): Promise<void> { async function (this: WorkflowHooks): Promise<void> {
// Push data to editor-ui once workflow finished // Push data to session which started the workflow
if (this.sessionId === undefined) {
return;
}
const pushInstance = Push.getInstance(); const pushInstance = Push.getInstance();
pushInstance.send('executionStarted', { pushInstance.send('executionStarted', {
executionId: this.executionId, executionId: this.executionId,
@ -190,14 +169,127 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
retryOf: this.retryOf, retryOf: this.retryOf,
workflowId: this.workflowData.id as string, workflowId: this.workflowData.id as string,
workflowName: this.workflowData.name, workflowName: this.workflowData.name,
}); }, this.sessionId);
} },
], ],
workflowExecuteAfter: [ workflowExecuteAfter: [
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> { async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
pushExecutionFinished(this.mode, fullRunData, this.executionId, undefined, this.retryOf); // Push data to session which started the workflow
if (this.sessionId === undefined) {
return;
}
// Clone the object except the runData. That one is not supposed
// to be send. Because that data got send piece by piece after
// each node which finished executing
const pushRunData = {
...fullRunData,
data: {
...fullRunData.data,
resultData: {
...fullRunData.data.resultData,
runData: {},
}, },
] },
};
// Push data to editor-ui once workflow finished
// TODO: Look at this again
const sendData: IPushDataExecutionFinished = {
executionId: this.executionId,
data: pushRunData,
retryOf: this.retryOf,
};
const pushInstance = Push.getInstance();
pushInstance.send('executionFinished', sendData, this.sessionId);
},
],
};
}
export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowExecuteHooks {
const externalHooks = ExternalHooks();
return {
workflowExecuteBefore: [
async function (this: WorkflowHooks, workflow: Workflow): Promise<void> {
await externalHooks.run('workflow.preExecute', [workflow, this.mode]);
},
],
nodeExecuteAfter: [
async function (nodeName: string, data: ITaskData, executionData: IRunExecutionData): Promise<void> {
if (this.workflowData.settings !== undefined) {
if (this.workflowData.settings.saveExecutionProgress === false) {
return;
} else if (this.workflowData.settings.saveExecutionProgress !== true && !config.get('executions.saveExecutionProgress') as boolean) {
return;
}
} else if (!config.get('executions.saveExecutionProgress') as boolean) {
return;
}
try {
const execution = await Db.collections.Execution!.findOne(this.executionId);
if (execution === undefined) {
// Something went badly wrong if this happens.
// This check is here mostly to make typescript happy.
return undefined;
}
const fullExecutionData: IExecutionResponse = ResponseHelper.unflattenExecutionData(execution);
if (fullExecutionData.finished) {
// We already received ´workflowExecuteAfter´ webhook, so this is just an async call
// that was left behind. We skip saving because the other call should have saved everything
// so this one is safe to ignore
return;
}
if (fullExecutionData.data === undefined) {
fullExecutionData.data = {
startData: {
},
resultData: {
runData: {},
},
executionData: {
contextData: {},
nodeExecutionStack: [],
waitingExecution: {},
},
};
}
if (Array.isArray(fullExecutionData.data.resultData.runData[nodeName])) {
// Append data if array exists
fullExecutionData.data.resultData.runData[nodeName].push(data);
} else {
// Initialize array and save data
fullExecutionData.data.resultData.runData[nodeName] = [data];
}
fullExecutionData.data.executionData = executionData.executionData;
// Set last executed node so that it may resume on failure
fullExecutionData.data.resultData.lastNodeExecuted = nodeName;
const flattenedExecutionData = ResponseHelper.flattenExecutionData(fullExecutionData);
await Db.collections.Execution!.update(this.executionId, flattenedExecutionData as IExecutionFlattedDb);
} catch (err) {
// TODO: Improve in the future!
// Errors here might happen because of database access
// For busy machines, we may get "Database is locked" errors.
// We do this to prevent crashes and executions ending in `unknown` state.
console.log(`Failed saving execution progress to database for execution ID ${this.executionId}`, err);
}
},
],
}; };
} }
@ -240,6 +332,8 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
} }
if (isManualMode && saveManualExecutions === false) { if (isManualMode && saveManualExecutions === false) {
// Data is always saved, so we remove from database
await Db.collections.Execution!.delete(this.executionId);
return; return;
} }
@ -258,6 +352,8 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
if (!isManualMode) { if (!isManualMode) {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf); executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf);
} }
// Data is always saved, so we remove from database
await Db.collections.Execution!.delete(this.executionId);
return; return;
} }
@ -281,16 +377,16 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData); const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB // Save the Execution in DB
const executionResult = await Db.collections.Execution!.save(executionData as IExecutionFlattedDb); await Db.collections.Execution!.update(this.executionId, executionData as IExecutionFlattedDb);
if (fullRunData.finished === true && this.retryOf !== undefined) { if (fullRunData.finished === true && this.retryOf !== undefined) {
// If the retry was successful save the reference it on the original execution // If the retry was successful save the reference it on the original execution
// await Db.collections.Execution!.save(executionData as IExecutionFlattedDb); // await Db.collections.Execution!.save(executionData as IExecutionFlattedDb);
await Db.collections.Execution!.update(this.retryOf, { retrySuccessId: executionResult.id }); await Db.collections.Execution!.update(this.retryOf, { retrySuccessId: this.executionId });
} }
if (!isManualMode) { if (!isManualMode) {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, executionResult ? executionResult.id as string : undefined, this.retryOf); executeErrorWorkflow(this.workflowData, fullRunData, this.mode, this.executionId, this.retryOf);
} }
} catch (error) { } catch (error) {
if (!isManualMode) { if (!isManualMode) {
@ -298,23 +394,148 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
} }
} }
}, },
] ],
}; };
} }
/** /**
* Executes the workflow with the given ID * Returns hook functions to save workflow execution and call error workflow
* for running with queues. Manual executions should never run on queues as
* they are always executed in the main process.
* *
* @export * @returns {IWorkflowExecuteHooks}
* @param {string} workflowId The id of the workflow to execute
* @param {IWorkflowExecuteAdditionalData} additionalData
* @param {INodeExecutionData[]} [inputData]
* @returns {(Promise<Array<INodeExecutionData[] | null>>)}
*/ */
export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, inputData?: INodeExecutionData[]): Promise<Array<INodeExecutionData[] | null>> { function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
return {
nodeExecuteBefore: [],
nodeExecuteAfter: [],
workflowExecuteBefore: [],
workflowExecuteAfter: [
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
try {
if (WorkflowHelpers.isWorkflowIdValid(this.workflowData.id as string) === true && newStaticData) {
// Workflow is saved so update in database
try {
await WorkflowHelpers.saveStaticDataById(this.workflowData.id as string, newStaticData);
} catch (e) {
// TODO: Add proper logging!
console.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: ${e.message}`);
}
}
// Check config to know if execution should be saved or not
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
if (this.workflowData.settings !== undefined) {
saveDataErrorExecution = (this.workflowData.settings.saveDataErrorExecution as string) || saveDataErrorExecution;
}
const workflowDidSucceed = !fullRunData.data.resultData.error;
if (workflowDidSucceed === false && saveDataErrorExecution === 'none') {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf);
}
const fullExecutionData: IExecutionDb = {
data: fullRunData.data,
mode: fullRunData.mode,
finished: fullRunData.finished ? fullRunData.finished : false,
startedAt: fullRunData.startedAt,
stoppedAt: fullRunData.stoppedAt,
workflowData: this.workflowData,
};
if (this.retryOf !== undefined) {
fullExecutionData.retryOf = this.retryOf.toString();
}
if (this.workflowData.id !== undefined && WorkflowHelpers.isWorkflowIdValid(this.workflowData.id.toString()) === true) {
fullExecutionData.workflowId = this.workflowData.id.toString();
}
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB
await Db.collections.Execution!.update(this.executionId, executionData as IExecutionFlattedDb);
if (fullRunData.finished === true && this.retryOf !== undefined) {
// If the retry was successful save the reference it on the original execution
// await Db.collections.Execution!.save(executionData as IExecutionFlattedDb);
await Db.collections.Execution!.update(this.retryOf, { retrySuccessId: this.executionId });
}
} catch (error) {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf);
}
},
],
};
}
export async function getRunData(workflowData: IWorkflowBase, inputData?: INodeExecutionData[]): Promise<IWorkflowExecutionDataProcess> {
const mode = 'integrated'; const mode = 'integrated';
// Find Start-Node
const requiredNodeTypes = ['n8n-nodes-base.start'];
let startNode: INode | undefined;
for (const node of workflowData!.nodes) {
if (requiredNodeTypes.includes(node.type)) {
startNode = node;
break;
}
}
if (startNode === undefined) {
// If the workflow does not contain a start-node we can not know what
// should be executed and with what data to start.
throw new Error(`The workflow does not contain a "Start" node and can so not be executed.`);
}
// Always start with empty data if no inputData got supplied
inputData = inputData || [
{
json: {},
},
];
// Initialize the incoming data
const nodeExecutionStack: IExecuteData[] = [];
nodeExecutionStack.push(
{
node: startNode,
data: {
main: [inputData],
},
}
);
const runExecutionData: IRunExecutionData = {
startData: {
},
resultData: {
runData: {},
},
executionData: {
contextData: {},
nodeExecutionStack,
waitingExecution: {},
},
};
// Get the needed credentials for the current workflow as they will differ to the ones of the
// calling workflow.
const credentials = await WorkflowCredentials(workflowData!.nodes);
const runData: IWorkflowExecutionDataProcess = {
credentials,
executionMode: mode,
executionData: runExecutionData,
// @ts-ignore
workflowData,
};
return runData;
}
export async function getWorkflowData(workflowInfo: IExecuteWorkflowInfo): Promise<IWorkflowBase> {
if (workflowInfo.id === undefined && workflowInfo.code === undefined) { if (workflowInfo.id === undefined && workflowInfo.code === undefined) {
throw new Error(`No information about the workflow to execute found. Please provide either the "id" or "code"!`); throw new Error(`No information about the workflow to execute found. Please provide either the "id" or "code"!`);
} }
@ -335,82 +556,76 @@ export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additi
workflowData = workflowInfo.code; workflowData = workflowInfo.code;
} }
return workflowData!;
}
/**
* Executes the workflow with the given ID
*
* @export
* @param {string} workflowId The id of the workflow to execute
* @param {IWorkflowExecuteAdditionalData} additionalData
* @param {INodeExecutionData[]} [inputData]
* @returns {(Promise<Array<INodeExecutionData[] | null>>)}
*/
export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, inputData?: INodeExecutionData[], parentExecutionId?: string, loadedWorkflowData?: IWorkflowBase, loadedRunData?: IWorkflowExecutionDataProcess): Promise<Array<INodeExecutionData[] | null> | IRun> {
const externalHooks = ExternalHooks(); const externalHooks = ExternalHooks();
await externalHooks.init(); await externalHooks.init();
await externalHooks.run('workflow.execute', [workflowData, mode]);
const nodeTypes = NodeTypes(); const nodeTypes = NodeTypes();
const workflowData = loadedWorkflowData !== undefined ? loadedWorkflowData : await getWorkflowData(workflowInfo);
const workflowName = workflowData ? workflowData.name : undefined; const workflowName = workflowData ? workflowData.name : undefined;
const workflow = new Workflow({ id: workflowInfo.id, name: workflowName, nodes: workflowData!.nodes, connections: workflowData!.connections, active: workflowData!.active, nodeTypes, staticData: workflowData!.staticData }); const workflow = new Workflow({ id: workflowInfo.id, name: workflowName, nodes: workflowData!.nodes, connections: workflowData!.connections, active: workflowData!.active, nodeTypes, staticData: workflowData!.staticData });
// Does not get used so set it simply to empty string const runData = loadedRunData !== undefined ? loadedRunData : await getRunData(workflowData, inputData);
const executionId = '';
let executionId;
if (parentExecutionId !== undefined) {
executionId = parentExecutionId;
} else {
executionId = parentExecutionId !== undefined ? parentExecutionId : await ActiveExecutions.getInstance().add(runData);
}
const runExecutionData = runData.executionData as IRunExecutionData;
// Get the needed credentials for the current workflow as they will differ to the ones of the // Get the needed credentials for the current workflow as they will differ to the ones of the
// calling workflow. // calling workflow.
const credentials = await WorkflowCredentials(workflowData!.nodes); const credentials = await WorkflowCredentials(workflowData!.nodes);
// Create new additionalData to have different workflow loaded and to call // Create new additionalData to have different workflow loaded and to call
// different webooks // different webooks
const additionalDataIntegrated = await getBase(credentials); const additionalDataIntegrated = await getBase(credentials);
additionalDataIntegrated.hooks = getWorkflowHooksIntegrated(mode, executionId, workflowData!, { parentProcessMode: additionalData.hooks!.mode }); additionalDataIntegrated.hooks = getWorkflowHooksIntegrated(runData.executionMode, executionId, workflowData!, { parentProcessMode: additionalData.hooks!.mode });
// Make sure we pass on the original executeWorkflow function we received
// This one already contains changes to talk to parent process
// and get executionID from `activeExecutions` running on main process
additionalDataIntegrated.executeWorkflow = additionalData.executeWorkflow;
// Find Start-Node
const requiredNodeTypes = ['n8n-nodes-base.start'];
let startNode: INode | undefined;
for (const node of workflowData!.nodes) {
if (requiredNodeTypes.includes(node.type)) {
startNode = node;
break;
}
}
if (startNode === undefined) {
// If the workflow does not contain a start-node we can not know what
// should be executed and with what data to start.
throw new Error(`The workflow does not contain a "Start" node and can so not be executed.`);
}
// Always start with empty data if no inputData got supplied
inputData = inputData || [
{
json: {}
}
];
// Initialize the incoming data
const nodeExecutionStack: IExecuteData[] = [];
nodeExecutionStack.push(
{
node: startNode,
data: {
main: [inputData],
},
},
);
const runExecutionData: IRunExecutionData = {
startData: {
},
resultData: {
runData: {},
},
executionData: {
contextData: {},
nodeExecutionStack,
waitingExecution: {},
},
};
// Execute the workflow // Execute the workflow
const workflowExecute = new WorkflowExecute(additionalDataIntegrated, mode, runExecutionData); const workflowExecute = new WorkflowExecute(additionalDataIntegrated, runData.executionMode, runExecutionData);
const data = await workflowExecute.processRunExecutionData(workflow); const data = await workflowExecute.processRunExecutionData(workflow);
await externalHooks.run('workflow.postExecute', [data, workflowData]);
if (data.finished === true) { if (data.finished === true) {
// Workflow did finish successfully // Workflow did finish successfully
if (parentExecutionId !== undefined) {
return data;
} else {
await ActiveExecutions.getInstance().remove(executionId, data);
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data); const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data);
return returnData!.data!.main; return returnData!.data!.main;
}
} else { } else {
await ActiveExecutions.getInstance().remove(executionId, data);
// Workflow did fail // Workflow did fail
const error = new Error(data.data.resultData.error!.message); const error = new Error(data.data.resultData.error!.message);
error.stack = data.data.resultData.error!.stack; error.stack = data.data.resultData.error!.stack;
@ -460,6 +675,52 @@ export async function getBase(credentials: IWorkflowCredentials, currentNodePara
export function getWorkflowHooksIntegrated(mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, optionalParameters?: IWorkflowHooksOptionalParameters): WorkflowHooks { export function getWorkflowHooksIntegrated(mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, optionalParameters?: IWorkflowHooksOptionalParameters): WorkflowHooks {
optionalParameters = optionalParameters || {}; optionalParameters = optionalParameters || {};
const hookFunctions = hookFunctionsSave(optionalParameters.parentProcessMode); const hookFunctions = hookFunctionsSave(optionalParameters.parentProcessMode);
const preExecuteFunctions = hookFunctionsPreExecute(optionalParameters.parentProcessMode);
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters);
}
/**
* Returns WorkflowHooks instance for running integrated workflows
* (Workflows which get started inside of another workflow)
*/
export function getWorkflowHooksWorkerExecuter(mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, optionalParameters?: IWorkflowHooksOptionalParameters): WorkflowHooks {
optionalParameters = optionalParameters || {};
const hookFunctions = hookFunctionsSaveWorker();
const preExecuteFunctions = hookFunctionsPreExecute(optionalParameters.parentProcessMode);
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters);
}
/**
* Returns WorkflowHooks instance for main process if workflow runs via worker
*/
export function getWorkflowHooksWorkerMain(mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, optionalParameters?: IWorkflowHooksOptionalParameters): WorkflowHooks {
optionalParameters = optionalParameters || {};
const hookFunctions = hookFunctionsPush();
const preExecuteFunctions = hookFunctionsPreExecute(optionalParameters.parentProcessMode);
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
// When running with worker mode, main process executes
// Only workflowExecuteBefore + workflowExecuteAfter
// So to avoid confusion, we are removing other hooks.
hookFunctions.nodeExecuteBefore = [];
hookFunctions.nodeExecuteAfter = [];
return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters); return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters);
} }
@ -472,12 +733,26 @@ export function getWorkflowHooksIntegrated(mode: WorkflowExecuteMode, executionI
* @param {string} executionId * @param {string} executionId
* @returns {WorkflowHooks} * @returns {WorkflowHooks}
*/ */
export function getWorkflowHooksMain(data: IWorkflowExecutionDataProcess, executionId: string): WorkflowHooks { export function getWorkflowHooksMain(data: IWorkflowExecutionDataProcess, executionId: string, isMainProcess = false): WorkflowHooks {
const hookFunctions = hookFunctionsSave(); const hookFunctions = hookFunctionsSave();
const pushFunctions = hookFunctionsPush(); const pushFunctions = hookFunctionsPush();
for (const key of Object.keys(pushFunctions)) { for (const key of Object.keys(pushFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], pushFunctions[key]); hookFunctions[key]!.push.apply(hookFunctions[key], pushFunctions[key]);
} }
return new WorkflowHooks(hookFunctions, data.executionMode, executionId, data.workflowData, { sessionId: data.sessionId, retryOf: data.retryOf as string}); if (isMainProcess) {
const preExecuteFunctions = hookFunctionsPreExecute();
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
}
return new WorkflowHooks(hookFunctions, data.executionMode, executionId, data.workflowData, { sessionId: data.sessionId, retryOf: data.retryOf as string });
} }

View file

@ -3,8 +3,8 @@ import {
Db, Db,
ICredentialsTypeData, ICredentialsTypeData,
ITransferNodeTypes, ITransferNodeTypes,
IWorkflowExecutionDataProcess,
IWorkflowErrorData, IWorkflowErrorData,
IWorkflowExecutionDataProcess,
NodeTypes, NodeTypes,
WorkflowCredentials, WorkflowCredentials,
WorkflowRunner, WorkflowRunner,
@ -120,12 +120,12 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData
main: [ main: [
[ [
{ {
json: workflowErrorData json: workflowErrorData,
} },
] ],
], ],
}, },
}, }
); );
const runExecutionData: IRunExecutionData = { const runExecutionData: IRunExecutionData = {

View file

@ -2,15 +2,21 @@ import {
ActiveExecutions, ActiveExecutions,
CredentialsOverwrites, CredentialsOverwrites,
CredentialTypes, CredentialTypes,
Db,
ExternalHooks, ExternalHooks,
IBullJobData,
IBullJobResponse,
ICredentialsOverwrite, ICredentialsOverwrite,
ICredentialsTypeData, ICredentialsTypeData,
IExecutionFlattedDb,
IExecutionResponse,
IProcessMessageDataHook, IProcessMessageDataHook,
ITransferNodeTypes, ITransferNodeTypes,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
IWorkflowExecutionDataProcessWithExecution, IWorkflowExecutionDataProcessWithExecution,
NodeTypes, NodeTypes,
Push, Push,
ResponseHelper,
WorkflowExecuteAdditionalData, WorkflowExecuteAdditionalData,
WorkflowHelpers, WorkflowHelpers,
} from './'; } from './';
@ -21,11 +27,12 @@ import {
} from 'n8n-core'; } from 'n8n-core';
import { import {
IDataObject,
IExecutionError, IExecutionError,
IRun, IRun,
Workflow, Workflow,
WorkflowHooks,
WorkflowExecuteMode, WorkflowExecuteMode,
WorkflowHooks,
} from 'n8n-workflow'; } from 'n8n-workflow';
import * as config from '../config'; import * as config from '../config';
@ -33,17 +40,26 @@ import * as PCancelable from 'p-cancelable';
import { join as pathJoin } from 'path'; import { join as pathJoin } from 'path';
import { fork } from 'child_process'; import { fork } from 'child_process';
import * as Bull from 'bull';
import * as Queue from './Queue';
export class WorkflowRunner { export class WorkflowRunner {
activeExecutions: ActiveExecutions.ActiveExecutions; activeExecutions: ActiveExecutions.ActiveExecutions;
credentialsOverwrites: ICredentialsOverwrite; credentialsOverwrites: ICredentialsOverwrite;
push: Push.Push; push: Push.Push;
jobQueue: Bull.Queue;
constructor() { constructor() {
this.push = Push.getInstance(); this.push = Push.getInstance();
this.activeExecutions = ActiveExecutions.getInstance(); this.activeExecutions = ActiveExecutions.getInstance();
this.credentialsOverwrites = CredentialsOverwrites().getAll(); this.credentialsOverwrites = CredentialsOverwrites().getAll();
const executionsMode = config.get('executions.mode') as string;
if (executionsMode === 'queue') {
this.jobQueue = Queue.getInstance().getBullObjectInstance();
}
} }
@ -85,9 +101,6 @@ export class WorkflowRunner {
// Remove from active execution with empty data. That will // Remove from active execution with empty data. That will
// set the execution to failed. // set the execution to failed.
this.activeExecutions.remove(executionId, fullRunData); this.activeExecutions.remove(executionId, fullRunData);
// Also send to Editor UI
WorkflowExecuteAdditionalData.pushExecutionFinished(executionMode, fullRunData, executionId);
} }
/** /**
@ -99,16 +112,33 @@ export class WorkflowRunner {
* @returns {Promise<string>} * @returns {Promise<string>}
* @memberof WorkflowRunner * @memberof WorkflowRunner
*/ */
async run(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean): Promise<string> { async run(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean): Promise<string> {
const externalHooks = ExternalHooks();
await externalHooks.run('workflow.execute', [data.workflowData, data.executionMode]);
const executionsProcess = config.get('executions.process') as string; const executionsProcess = config.get('executions.process') as string;
if (executionsProcess === 'main') { const executionsMode = config.get('executions.mode') as string;
return this.runMainProcess(data, loadStaticData);
let executionId: string;
if (executionsMode === 'queue' && data.executionMode !== 'manual') {
// Do not run "manual" executions in bull because sending events to the
// frontend would not be possible
executionId = await this.runBull(data, loadStaticData, realtime);
} else if (executionsProcess === 'main') {
executionId = await this.runMainProcess(data, loadStaticData);
} else {
executionId = await this.runSubprocess(data, loadStaticData);
} }
return this.runSubprocess(data, loadStaticData); const externalHooks = ExternalHooks();
if (externalHooks.exists('workflow.postExecute')) {
this.activeExecutions.getPostExecutePromise(executionId)
.then(async (executionData) => {
await externalHooks.run('workflow.postExecute', [executionData, data.workflowData]);
})
.catch(error => {
console.error('There was a problem running hook "workflow.postExecute"', error);
});
}
return executionId;
} }
@ -132,9 +162,9 @@ export class WorkflowRunner {
const additionalData = await WorkflowExecuteAdditionalData.getBase(data.credentials); const additionalData = await WorkflowExecuteAdditionalData.getBase(data.credentials);
// Register the active execution // Register the active execution
const executionId = this.activeExecutions.add(data, undefined); const executionId = await this.activeExecutions.add(data, undefined);
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId); additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId, true);
let workflowExecution: PCancelable<IRun>; let workflowExecution: PCancelable<IRun>;
if (data.executionData !== undefined) { if (data.executionData !== undefined) {
@ -179,6 +209,172 @@ export class WorkflowRunner {
return executionId; return executionId;
} }
async runBull(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean): Promise<string> {
// TODO: If "loadStaticData" is set to true it has to load data new on worker
// Register the active execution
const executionId = await this.activeExecutions.add(data, undefined);
const jobData: IBullJobData = {
executionId,
loadStaticData: !!loadStaticData,
};
let priority = 100;
if (realtime === true) {
// Jobs which require a direct response get a higher priority
priority = 50;
}
// TODO: For realtime jobs should probably also not do retry or not retry if they are older than x seconds.
// Check if they get retried by default and how often.
const jobOptions = {
priority,
removeOnComplete: true,
removeOnFail: true,
};
const job = await this.jobQueue.add(jobData, jobOptions);
console.log('Started with ID: ' + job.id.toString());
const hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined });
// Normally also workflow should be supplied here but as it only used for sending
// data to editor-UI is not needed.
hooks.executeHookFunctions('workflowExecuteBefore', []);
const workflowExecution: PCancelable<IRun> = new PCancelable(async (resolve, reject, onCancel) => {
onCancel.shouldReject = false;
onCancel(async () => {
await Queue.getInstance().stopJob(job);
const fullRunData :IRun = {
data: {
resultData: {
error: {
message: 'Workflow has been canceled!',
} as IExecutionError,
runData: {},
},
},
mode: data.executionMode,
startedAt: new Date(),
stoppedAt: new Date(),
};
this.activeExecutions.remove(executionId, fullRunData);
resolve(fullRunData);
});
const jobData: Promise<IBullJobResponse> = job.finished();
const queueRecoveryInterval = config.get('queue.bull.queueRecoveryInterval') as number;
if (queueRecoveryInterval > 0) {
/*************************************************
* Long explanation about what this solves: *
* This only happens in a very specific scenario *
* when Redis crashes and recovers shortly *
* but during this time, some execution(s) *
* finished. The end result is that the main *
* process will wait indefinitively and never *
* get a response. This adds an active polling to*
* the queue that allows us to identify that the *
* execution finished and get information from *
* the database. *
*************************************************/
let watchDogInterval: NodeJS.Timeout | undefined;
let resolved = false;
const watchDog = new Promise((res) => {
watchDogInterval = setInterval(async () => {
const currentJob = await this.jobQueue.getJob(job.id);
// When null means job is finished (not found in queue)
if (currentJob === null) {
// Mimic worker's success message
res({success: true});
}
}, queueRecoveryInterval * 1000);
});
const clearWatchdogInterval = () => {
if (watchDogInterval) {
clearInterval(watchDogInterval);
watchDogInterval = undefined;
}
};
await new Promise((res, rej) => {
jobData.then((data) => {
if (!resolved) {
resolved = true;
clearWatchdogInterval();
res(data);
}
}).catch((e) => {
if(!resolved) {
resolved = true;
clearWatchdogInterval();
rej(e);
}
});
watchDog.then((data) => {
if (!resolved) {
resolved = true;
clearWatchdogInterval();
res(data);
}
});
});
} else {
await jobData;
}
const executionDb = await Db.collections.Execution!.findOne(executionId) as IExecutionFlattedDb;
const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse;
const runData = {
data: fullExecutionData.data,
finished: fullExecutionData.finished,
mode: fullExecutionData.mode,
startedAt: fullExecutionData.startedAt,
stoppedAt: fullExecutionData.stoppedAt,
} as IRun;
this.activeExecutions.remove(executionId, runData);
// Normally also static data should be supplied here but as it only used for sending
// data to editor-UI is not needed.
hooks.executeHookFunctions('workflowExecuteAfter', [runData]);
try {
// Check if this execution data has to be removed from database
// based on workflow settings.
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
if (data.workflowData.settings !== undefined) {
saveDataErrorExecution = (data.workflowData.settings.saveDataErrorExecution as string) || saveDataErrorExecution;
saveDataSuccessExecution = (data.workflowData.settings.saveDataSuccessExecution as string) || saveDataSuccessExecution;
}
const workflowDidSucceed = !runData.data.resultData.error;
if (workflowDidSucceed === true && saveDataSuccessExecution === 'none' ||
workflowDidSucceed === false && saveDataErrorExecution === 'none'
) {
await Db.collections.Execution!.delete(executionId);
}
} catch (err) {
// We don't want errors here to crash n8n. Just log and proceed.
console.log('Error removing saved execution from database. More details: ', err);
}
resolve(runData);
});
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
return executionId;
}
/** /**
* Run the workflow * Run the workflow
* *
@ -197,7 +393,7 @@ export class WorkflowRunner {
} }
// Register the active execution // Register the active execution
const executionId = this.activeExecutions.add(data, subprocess); const executionId = await this.activeExecutions.add(data, subprocess);
// Check if workflow contains a "executeWorkflow" Node as in this // Check if workflow contains a "executeWorkflow" Node as in this
// case we can not know which nodeTypes and credentialTypes will // case we can not know which nodeTypes and credentialTypes will
@ -212,6 +408,7 @@ export class WorkflowRunner {
let nodeTypeData: ITransferNodeTypes; let nodeTypeData: ITransferNodeTypes;
let credentialTypeData: ICredentialsTypeData; let credentialTypeData: ICredentialsTypeData;
let credentialsOverwrites = this.credentialsOverwrites;
if (loadAllNodeTypes === true) { if (loadAllNodeTypes === true) {
// Supply all nodeTypes and credentialTypes // Supply all nodeTypes and credentialTypes
@ -219,15 +416,22 @@ export class WorkflowRunner {
const credentialTypes = CredentialTypes(); const credentialTypes = CredentialTypes();
credentialTypeData = credentialTypes.credentialTypes; credentialTypeData = credentialTypes.credentialTypes;
} else { } else {
// Supply only nodeTypes and credentialTypes which the workflow needs // Supply only nodeTypes, credentialTypes and overwrites that the workflow needs
nodeTypeData = WorkflowHelpers.getNodeTypeData(data.workflowData.nodes); nodeTypeData = WorkflowHelpers.getNodeTypeData(data.workflowData.nodes);
credentialTypeData = WorkflowHelpers.getCredentialsData(data.credentials); credentialTypeData = WorkflowHelpers.getCredentialsData(data.credentials);
credentialsOverwrites = {};
for (const credentialName of Object.keys(credentialTypeData)) {
if (this.credentialsOverwrites[credentialName] !== undefined) {
credentialsOverwrites[credentialName] = this.credentialsOverwrites[credentialName];
}
}
} }
(data as unknown as IWorkflowExecutionDataProcessWithExecution).executionId = executionId; (data as unknown as IWorkflowExecutionDataProcessWithExecution).executionId = executionId;
(data as unknown as IWorkflowExecutionDataProcessWithExecution).nodeTypeData = nodeTypeData; (data as unknown as IWorkflowExecutionDataProcessWithExecution).nodeTypeData = nodeTypeData;
(data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsOverwrite = this.credentialsOverwrites; (data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsOverwrite = credentialsOverwrites;
(data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsTypeData = credentialTypeData; // TODO: Still needs correct value (data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsTypeData = credentialTypeData; // TODO: Still needs correct value
const workflowHooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId); const workflowHooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId);
@ -253,7 +457,7 @@ export class WorkflowRunner {
// Listen to data from the subprocess // Listen to data from the subprocess
subprocess.on('message', (message: IProcessMessage) => { subprocess.on('message', async (message: IProcessMessage) => {
if (message.type === 'end') { if (message.type === 'end') {
clearTimeout(executionTimeout); clearTimeout(executionTimeout);
this.activeExecutions.remove(executionId!, message.data.runData); this.activeExecutions.remove(executionId!, message.data.runData);
@ -270,6 +474,11 @@ export class WorkflowRunner {
const timeoutError = { message: 'Workflow execution timed out!' } as IExecutionError; const timeoutError = { message: 'Workflow execution timed out!' } as IExecutionError;
this.processError(timeoutError, startedAt, data.executionMode, executionId); this.processError(timeoutError, startedAt, data.executionMode, executionId);
} else if (message.type === 'startExecution') {
const executionId = await this.activeExecutions.add(message.data.runData);
subprocess.send({ type: 'executionId', data: {executionId} } as IProcessMessage);
} else if (message.type === 'finishExecution') {
await this.activeExecutions.remove(message.data.executionId, message.data.result);
} }
}); });

View file

@ -2,9 +2,12 @@
import { import {
CredentialsOverwrites, CredentialsOverwrites,
CredentialTypes, CredentialTypes,
Db,
ExternalHooks,
IWorkflowExecutionDataProcessWithExecution, IWorkflowExecutionDataProcessWithExecution,
NodeTypes, NodeTypes,
WorkflowExecuteAdditionalData, WorkflowExecuteAdditionalData,
WorkflowHelpers,
} from './'; } from './';
import { import {
@ -14,23 +17,42 @@ import {
import { import {
IDataObject, IDataObject,
IExecuteData,
IExecuteWorkflowInfo,
IExecutionError, IExecutionError,
INodeExecutionData,
INodeType, INodeType,
INodeTypeData, INodeTypeData,
IRun, IRun,
IRunExecutionData,
ITaskData, ITaskData,
IWorkflowExecuteAdditionalData,
IWorkflowExecuteHooks,
Workflow, Workflow,
WorkflowHooks, WorkflowHooks,
} from 'n8n-workflow'; } from 'n8n-workflow';
import * as config from '../config';
export class WorkflowRunnerProcess { export class WorkflowRunnerProcess {
data: IWorkflowExecutionDataProcessWithExecution | undefined; data: IWorkflowExecutionDataProcessWithExecution | undefined;
startedAt = new Date(); startedAt = new Date();
workflow: Workflow | undefined; workflow: Workflow | undefined;
workflowExecute: WorkflowExecute | undefined; workflowExecute: WorkflowExecute | undefined;
executionIdCallback: (executionId: string) => void | undefined;
static async stopProcess() {
setTimeout(() => {
// Attempt a graceful shutdown, giving executions 30 seconds to finish
process.exit(0);
}, 30000);
}
async runWorkflow(inputData: IWorkflowExecutionDataProcessWithExecution): Promise<IRun> { async runWorkflow(inputData: IWorkflowExecutionDataProcessWithExecution): Promise<IRun> {
process.on('SIGTERM', WorkflowRunnerProcess.stopProcess);
process.on('SIGINT', WorkflowRunnerProcess.stopProcess);
this.data = inputData; this.data = inputData;
let className: string; let className: string;
let tempNode: INodeType; let tempNode: INodeType;
@ -66,12 +88,54 @@ export class WorkflowRunnerProcess {
// Load the credentials overwrites if any exist // Load the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites(); const credentialsOverwrites = CredentialsOverwrites();
await credentialsOverwrites.init(); await credentialsOverwrites.init(inputData.credentialsOverwrite);
this.workflow = new Workflow({ id: this.data.workflowData.id as string | undefined, name: this.data.workflowData.name, nodes: this.data.workflowData!.nodes, connections: this.data.workflowData!.connections, active: this.data.workflowData!.active, nodeTypes, staticData: this.data.workflowData!.staticData, settings: this.data.workflowData!.settings}); // Load all external hooks
const externalHooks = ExternalHooks();
await externalHooks.init();
// This code has been split into 3 ifs just to make it easier to understand
// Can be made smaller but in the end it will make it impossible to read.
if (inputData.workflowData.settings !== undefined && inputData.workflowData.settings.saveExecutionProgress === true) {
// Workflow settings specifying it should save
await Db.init();
} else if (inputData.workflowData.settings !== undefined && inputData.workflowData.settings.saveExecutionProgress !== false && config.get('executions.saveExecutionProgress') as boolean) {
// Workflow settings not saying anything about saving but default settings says so
await Db.init();
} else if (inputData.workflowData.settings === undefined && config.get('executions.saveExecutionProgress') as boolean) {
// Workflow settings not saying anything about saving but default settings says so
await Db.init();
}
this.workflow = new Workflow({ id: this.data.workflowData.id as string | undefined, name: this.data.workflowData.name, nodes: this.data.workflowData!.nodes, connections: this.data.workflowData!.connections, active: this.data.workflowData!.active, nodeTypes, staticData: this.data.workflowData!.staticData, settings: this.data.workflowData!.settings });
const additionalData = await WorkflowExecuteAdditionalData.getBase(this.data.credentials); const additionalData = await WorkflowExecuteAdditionalData.getBase(this.data.credentials);
additionalData.hooks = this.getProcessForwardHooks(); additionalData.hooks = this.getProcessForwardHooks();
const executeWorkflowFunction = additionalData.executeWorkflow;
additionalData.executeWorkflow = async (workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, inputData?: INodeExecutionData[] | undefined): Promise<Array<INodeExecutionData[] | null> | IRun> => {
const workflowData = await WorkflowExecuteAdditionalData.getWorkflowData(workflowInfo);
const runData = await WorkflowExecuteAdditionalData.getRunData(workflowData, inputData);
await sendToParentProcess('startExecution', { runData });
const executionId: string = await new Promise((resolve) => {
this.executionIdCallback = (executionId: string) => {
resolve(executionId);
};
});
let result: IRun;
try {
result = await executeWorkflowFunction(workflowInfo, additionalData, inputData, executionId, workflowData, runData);
} catch (e) {
await sendToParentProcess('finishExecution', { executionId });
// Throw same error we had
throw e;
}
await sendToParentProcess('finishExecution', { executionId, result });
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(result);
return returnData!.data!.main;
};
if (this.data.executionData !== undefined) { if (this.data.executionData !== undefined) {
this.workflowExecute = new WorkflowExecute(additionalData, this.data.executionMode, this.data.executionData); this.workflowExecute = new WorkflowExecute(additionalData, this.data.executionMode, this.data.executionData);
return this.workflowExecute.processRunExecutionData(this.workflow); return this.workflowExecute.processRunExecutionData(this.workflow);
@ -121,7 +185,7 @@ export class WorkflowRunnerProcess {
* @returns * @returns
*/ */
getProcessForwardHooks(): WorkflowHooks { getProcessForwardHooks(): WorkflowHooks {
const hookFunctions = { const hookFunctions: IWorkflowExecuteHooks = {
nodeExecuteBefore: [ nodeExecuteBefore: [
async (nodeName: string): Promise<void> => { async (nodeName: string): Promise<void> => {
this.sendHookToParentProcess('nodeExecuteBefore', [nodeName]); this.sendHookToParentProcess('nodeExecuteBefore', [nodeName]);
@ -135,15 +199,23 @@ export class WorkflowRunnerProcess {
workflowExecuteBefore: [ workflowExecuteBefore: [
async (): Promise<void> => { async (): Promise<void> => {
this.sendHookToParentProcess('workflowExecuteBefore', []); this.sendHookToParentProcess('workflowExecuteBefore', []);
} },
], ],
workflowExecuteAfter: [ workflowExecuteAfter: [
async (fullRunData: IRun, newStaticData?: IDataObject): Promise<void> => { async (fullRunData: IRun, newStaticData?: IDataObject): Promise<void> => {
this.sendHookToParentProcess('workflowExecuteAfter', [fullRunData, newStaticData]); this.sendHookToParentProcess('workflowExecuteAfter', [fullRunData, newStaticData]);
}, },
] ],
}; };
const preExecuteFunctions = WorkflowExecuteAdditionalData.hookFunctionsPreExecute();
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
return new WorkflowHooks(hookFunctions, this.data!.executionMode, this.data!.executionId, this.data!.workflowData, { sessionId: this.data!.sessionId, retryOf: this.data!.retryOf as string }); return new WorkflowHooks(hookFunctions, this.data!.executionMode, this.data!.executionId, this.data!.workflowData, { sessionId: this.data!.sessionId, retryOf: this.data!.retryOf as string });
} }
@ -225,6 +297,8 @@ process.on('message', async (message: IProcessMessage) => {
// Stop process // Stop process
process.exit(); process.exit();
} else if (message.type === 'executionId') {
workflowRunner.executionIdCallback(message.data.executionId);
} }
} catch (error) { } catch (error) {
// Catch all uncaught errors and forward them to parent process // Catch all uncaught errors and forward them to parent process

View file

@ -1,10 +1,8 @@
import * as MongoDb from './mongodb';
import * as PostgresDb from './postgresdb'; import * as PostgresDb from './postgresdb';
import * as SQLite from './sqlite'; import * as SQLite from './sqlite';
import * as MySQLDb from './mysqldb'; import * as MySQLDb from './mysqldb';
export { export {
MongoDb,
PostgresDb, PostgresDb,
SQLite, SQLite,
MySQLDb, MySQLDb,

View file

@ -1,41 +0,0 @@
import {
ICredentialNodeAccess,
} from 'n8n-workflow';
import {
ICredentialsDb,
} from '../../';
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class CredentialsEntity implements ICredentialsDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
name: string;
@Column()
data: string;
@Index()
@Column()
type: string;
@Column('json')
nodesAccess: ICredentialNodeAccess[];
@Column('Date')
createdAt: Date;
@Column('Date')
updatedAt: Date;
}

View file

@ -1,52 +0,0 @@
import {
WorkflowExecuteMode,
} from 'n8n-workflow';
import {
IExecutionFlattedDb,
IWorkflowDb,
} from '../../';
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class ExecutionEntity implements IExecutionFlattedDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
data: string;
@Column()
finished: boolean;
@Column()
mode: WorkflowExecuteMode;
@Column()
retryOf: string;
@Column()
retrySuccessId: string;
@Column('Date')
startedAt: Date;
@Index()
@Column('Date')
stoppedAt: Date;
@Column('json')
workflowData: IWorkflowDb;
@Index()
@Column()
workflowId: string;
}

View file

@ -1,30 +0,0 @@
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
import {
IWebhookDb,
} from '../../Interfaces';
@Entity()
export class WebhookEntity implements IWebhookDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
workflowId: number;
@Column()
webhookPath: string;
@Column()
method: string;
@Column()
node: string;
}

View file

@ -1,48 +0,0 @@
import {
IConnections,
IDataObject,
INode,
IWorkflowSettings,
} from 'n8n-workflow';
import {
IWorkflowDb,
} from '../../';
import {
Column,
Entity,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class WorkflowEntity implements IWorkflowDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
name: string;
@Column()
active: boolean;
@Column('json')
nodes: INode[];
@Column('json')
connections: IConnections;
@Column('Date')
createdAt: Date;
@Column('Date')
updatedAt: Date;
@Column('json')
settings?: IWorkflowSettings;
@Column('json')
staticData?: IDataObject;
}

View file

@ -1,5 +0,0 @@
export * from './CredentialsEntity';
export * from './ExecutionEntity';
export * from './WorkflowEntity';
export * from './WebhookEntity';

View file

@ -1,22 +0,0 @@
import { MigrationInterface } from "typeorm";
import {
MongoQueryRunner,
} from 'typeorm/driver/mongodb/MongoQueryRunner';
import * as config from '../../../../config';
export class CreateIndexStoppedAt1594910478695 implements MigrationInterface {
name = 'CreateIndexStoppedAt1594910478695';
async up(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.manager.createCollectionIndex(`${tablePrefix}execution_entity`, 'stoppedAt', { name: `IDX_${tablePrefix}execution_entity_stoppedAt` });
}
async down(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.manager.dropCollectionIndex
(`${tablePrefix}execution_entity`, `IDX_${tablePrefix}execution_entity_stoppedAt`);
}
}

View file

@ -1,11 +0,0 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
export class InitialMigration1587563438936 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
}
async down(queryRunner: QueryRunner): Promise<void> {
}
}

View file

@ -1,57 +0,0 @@
import {
MigrationInterface,
} from 'typeorm';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow/dist/src/Workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
import * as config from '../../../../config';
import {
MongoQueryRunner,
} from 'typeorm/driver/mongodb/MongoQueryRunner';
export class WebhookModel1592679094242 implements MigrationInterface {
name = 'WebhookModel1592679094242';
async up(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
const workflows = await queryRunner.cursor( `${tablePrefix}workflow_entity`, { active: true }).toArray() as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.insertMany(`${tablePrefix}webhook_entity`, data);
}
await queryRunner.manager.createCollectionIndex(`${tablePrefix}webhook_entity`, ['webhookPath', 'method'], { unique: true, background: false });
}
async down(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.dropTable(`${tablePrefix}webhook_entity`);
}
}

View file

@ -1,3 +0,0 @@
export * from './1587563438936-InitialMigration';
export * from './1592679094242-WebhookModel';
export * from './151594910478695-CreateIndexStoppedAt';

View file

@ -20,7 +20,7 @@ export class CredentialsEntity implements ICredentialsDb {
id: number; id: number;
@Column({ @Column({
length: 128 length: 128,
}) })
name: string; name: string;
@ -29,7 +29,7 @@ export class CredentialsEntity implements ICredentialsDb {
@Index() @Index()
@Column({ @Column({
length: 32 length: 32,
}) })
type: string; type: string;

View file

@ -40,7 +40,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
startedAt: Date; startedAt: Date;
@Index() @Index()
@Column('datetime') @Column('datetime', { nullable: true })
stoppedAt: Date; stoppedAt: Date;
@Column('json') @Column('json')

View file

@ -1,6 +1,7 @@
import { import {
Column, Column,
Entity, Entity,
Index,
PrimaryColumn, PrimaryColumn,
} from 'typeorm'; } from 'typeorm';
@ -9,6 +10,7 @@ import {
} from '../../Interfaces'; } from '../../Interfaces';
@Entity() @Entity()
@Index(['webhookId', 'method', 'pathLength'])
export class WebhookEntity implements IWebhookDb { export class WebhookEntity implements IWebhookDb {
@Column() @Column()
@ -22,4 +24,10 @@ export class WebhookEntity implements IWebhookDb {
@Column() @Column()
node: string; node: string;
@Column({ nullable: true })
webhookId: string;
@Column({ nullable: true })
pathLength: number;
} }

View file

@ -22,7 +22,7 @@ export class WorkflowEntity implements IWorkflowDb {
id: number; id: number;
@Column({ @Column({
length: 128 length: 128,
}) })
name: string; name: string;

View file

@ -5,20 +5,6 @@ import {
import * as config from '../../../../config'; import * as config from '../../../../config';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
export class WebhookModel1592447867632 implements MigrationInterface { export class WebhookModel1592447867632 implements MigrationInterface {
name = 'WebhookModel1592447867632'; name = 'WebhookModel1592447867632';
@ -26,30 +12,6 @@ export class WebhookModel1592447867632 implements MigrationInterface {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`); await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`);
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.createQueryBuilder()
.insert()
.into(`${tablePrefix}webhook_entity`)
.values(data)
.execute();
}
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -0,0 +1,17 @@
import { MigrationInterface, QueryRunner } from "typeorm";
import * as config from '../../../../config';
export class MakeStoppedAtNullable1607431743767 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime', undefined);
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime NOT NULL', undefined);
}
}

View file

@ -0,0 +1,24 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class AddWebhookId1611149998770 implements MigrationInterface {
name = 'AddWebhookId1611149998770';
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `webhookId` varchar(255) NULL');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `pathLength` int NULL');
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity` (`webhookId`, `method`, `pathLength`)');
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(
'DROP INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity`'
);
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` DROP COLUMN `pathLength`');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` DROP COLUMN `webhookId`');
}
}

View file

@ -0,0 +1,18 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config';
export class ChangeDataSize1615306975123 implements MigrationInterface {
name = 'ChangeDataSize1615306975123';
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL');
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL');
}
}

View file

@ -1,3 +1,15 @@
export * from './1588157391238-InitialMigration'; import { InitialMigration1588157391238 } from './1588157391238-InitialMigration';
export * from './1592447867632-WebhookModel'; import { WebhookModel1592447867632 } from './1592447867632-WebhookModel';
export * from './1594902918301-CreateIndexStoppedAt'; import { CreateIndexStoppedAt1594902918301 } from './1594902918301-CreateIndexStoppedAt';
import { AddWebhookId1611149998770 } from './1611149998770-AddWebhookId';
import { MakeStoppedAtNullable1607431743767 } from './1607431743767-MakeStoppedAtNullable';
import { ChangeDataSize1615306975123 } from './1615306975123-ChangeDataSize';
export const mysqlMigrations = [
InitialMigration1588157391238,
WebhookModel1592447867632,
CreateIndexStoppedAt1594902918301,
AddWebhookId1611149998770,
MakeStoppedAtNullable1607431743767,
ChangeDataSize1615306975123,
];

View file

@ -20,7 +20,7 @@ export class CredentialsEntity implements ICredentialsDb {
id: number; id: number;
@Column({ @Column({
length: 128 length: 128,
}) })
name: string; name: string;
@ -29,7 +29,7 @@ export class CredentialsEntity implements ICredentialsDb {
@Index() @Index()
@Column({ @Column({
length: 32 length: 32,
}) })
type: string; type: string;

View file

@ -40,7 +40,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
startedAt: Date; startedAt: Date;
@Index() @Index()
@Column('timestamp') @Column('timestamp', { nullable: true })
stoppedAt: Date; stoppedAt: Date;
@Column('json') @Column('json')

View file

@ -1,6 +1,7 @@
import { import {
Column, Column,
Entity, Entity,
Index,
PrimaryColumn, PrimaryColumn,
} from 'typeorm'; } from 'typeorm';
@ -9,6 +10,7 @@ import {
} from '../../'; } from '../../';
@Entity() @Entity()
@Index(['webhookId', 'method', 'pathLength'])
export class WebhookEntity implements IWebhookDb { export class WebhookEntity implements IWebhookDb {
@Column() @Column()
@ -22,4 +24,10 @@ export class WebhookEntity implements IWebhookDb {
@Column() @Column()
node: string; node: string;
@Column({ nullable: true })
webhookId: string;
@Column({ nullable: true })
pathLength: number;
} }

View file

@ -22,7 +22,7 @@ export class WorkflowEntity implements IWorkflowDb {
id: number; id: number;
@Column({ @Column({
length: 128 length: 128,
}) })
name: string; name: string;

View file

@ -3,20 +3,6 @@ import {
QueryRunner, QueryRunner,
} from 'typeorm'; } from 'typeorm';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
import * as config from '../../../../config'; import * as config from '../../../../config';
export class WebhookModel1589476000887 implements MigrationInterface { export class WebhookModel1589476000887 implements MigrationInterface {
@ -30,31 +16,7 @@ export class WebhookModel1589476000887 implements MigrationInterface {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
await queryRunner.query(`CREATE TABLE ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`, undefined); await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`, undefined);
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.createQueryBuilder()
.insert()
.into(`${tablePrefix}webhook_entity`)
.values(data)
.execute();
}
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -0,0 +1,21 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
name = 'MakeStoppedAtNullable1607431743768';
async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix');
const schema = config.get('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query('ALTER TABLE ' + tablePrefix + 'execution_entity ALTER COLUMN "stoppedAt" DROP NOT NULL', undefined);
}
async down(queryRunner: QueryRunner): Promise<void> {
// Cannot be undone as column might already have null values
}
}

View file

@ -0,0 +1,33 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class AddWebhookId1611144599516 implements MigrationInterface {
name = 'AddWebhookId1611144599516';
async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "webhookId" character varying`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "pathLength" integer`);
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240 ON ${tablePrefix}webhook_entity ("webhookId", "method", "pathLength") `);
}
async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "pathLength"`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "webhookId"`);
}
}

View file

@ -1,4 +1,13 @@
export * from './1587669153312-InitialMigration'; import { InitialMigration1587669153312 } from './1587669153312-InitialMigration';
export * from './1589476000887-WebhookModel'; import { WebhookModel1589476000887 } from './1589476000887-WebhookModel';
export * from './1594828256133-CreateIndexStoppedAt'; import { CreateIndexStoppedAt1594828256133 } from './1594828256133-CreateIndexStoppedAt';
import { AddWebhookId1611144599516 } from './1611144599516-AddWebhookId';
import { MakeStoppedAtNullable1607431743768 } from './1607431743768-MakeStoppedAtNullable';
export const postgresMigrations = [
InitialMigration1587669153312,
WebhookModel1589476000887,
CreateIndexStoppedAt1594828256133,
AddWebhookId1611144599516,
MakeStoppedAtNullable1607431743768,
];

View file

@ -20,7 +20,7 @@ export class CredentialsEntity implements ICredentialsDb {
id: number; id: number;
@Column({ @Column({
length: 128 length: 128,
}) })
name: string; name: string;
@ -29,7 +29,7 @@ export class CredentialsEntity implements ICredentialsDb {
@Index() @Index()
@Column({ @Column({
length: 32 length: 32,
}) })
type: string; type: string;

View file

@ -40,7 +40,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
startedAt: Date; startedAt: Date;
@Index() @Index()
@Column() @Column({ nullable: true })
stoppedAt: Date; stoppedAt: Date;
@Column('simple-json') @Column('simple-json')

View file

@ -1,6 +1,7 @@
import { import {
Column, Column,
Entity, Entity,
Index,
PrimaryColumn, PrimaryColumn,
} from 'typeorm'; } from 'typeorm';
@ -9,6 +10,7 @@ import {
} from '../../Interfaces'; } from '../../Interfaces';
@Entity() @Entity()
@Index(['webhookId', 'method', 'pathLength'])
export class WebhookEntity implements IWebhookDb { export class WebhookEntity implements IWebhookDb {
@Column() @Column()
@ -22,4 +24,10 @@ export class WebhookEntity implements IWebhookDb {
@Column() @Column()
node: string; node: string;
@Column({ nullable: true })
webhookId: string;
@Column({ nullable: true })
pathLength: number;
} }

View file

@ -22,7 +22,7 @@ export class WorkflowEntity implements IWorkflowDb {
id: number; id: number;
@Column({ @Column({
length: 128 length: 128,
}) })
name: string; name: string;

View file

@ -5,20 +5,6 @@ import {
import * as config from '../../../../config'; import * as config from '../../../../config';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
export class WebhookModel1592445003908 implements MigrationInterface { export class WebhookModel1592445003908 implements MigrationInterface {
name = 'WebhookModel1592445003908'; name = 'WebhookModel1592445003908';
@ -26,34 +12,6 @@ export class WebhookModel1592445003908 implements MigrationInterface {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`); await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`);
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
workflow.nodes = JSON.parse(workflow.nodes as unknown as string);
workflow.connections = JSON.parse(workflow.connections as unknown as string);
workflow.staticData = JSON.parse(workflow.staticData as unknown as string);
workflow.settings = JSON.parse(workflow.settings as unknown as string);
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.createQueryBuilder()
.insert()
.into(`${tablePrefix}webhook_entity`)
.values(data)
.execute();
}
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -8,7 +8,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "execution_entity" ("stoppedAt") `); await queryRunner.query(`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt") `);
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -0,0 +1,23 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class MakeStoppedAtNullable1607431743769 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
// SQLite does not allow us to simply "alter column"
// We're hacking the way sqlite identifies tables
// Allowing a column to become nullable
// This is a very strict case when this can be done safely
// As no collateral effects exist.
await queryRunner.query(`PRAGMA writable_schema = 1; `, undefined);
await queryRunner.query(`UPDATE SQLITE_MASTER SET SQL = 'CREATE TABLE IF NOT EXISTS "${tablePrefix}execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)' WHERE NAME = "${tablePrefix}execution_entity";`, undefined);
await queryRunner.query(`PRAGMA writable_schema = 0;`, undefined);
}
async down(queryRunner: QueryRunner): Promise<void> {
// This cannot be undone as the table might already have nullable values
}
}

View file

@ -0,0 +1,26 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class AddWebhookId1611071044839 implements MigrationInterface {
name = 'AddWebhookId1611071044839';
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE TABLE "temporary_webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"))`);
await queryRunner.query(`INSERT INTO "temporary_webhook_entity"("workflowId", "webhookPath", "method", "node") SELECT "workflowId", "webhookPath", "method", "node" FROM "${tablePrefix}webhook_entity"`);
await queryRunner.query(`DROP TABLE "${tablePrefix}webhook_entity"`);
await queryRunner.query(`ALTER TABLE "temporary_webhook_entity" RENAME TO "${tablePrefix}webhook_entity"`);
await queryRunner.query(`CREATE INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2" ON "${tablePrefix}webhook_entity" ("webhookId", "method", "pathLength") `);
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2"`);
await queryRunner.query(`ALTER TABLE "${tablePrefix}webhook_entity" RENAME TO "temporary_webhook_entity"`);
await queryRunner.query(`CREATE TABLE "${tablePrefix}webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`);
await queryRunner.query(`INSERT INTO "${tablePrefix}webhook_entity"("workflowId", "webhookPath", "method", "node") SELECT "workflowId", "webhookPath", "method", "node" FROM "temporary_webhook_entity"`);
await queryRunner.query(`DROP TABLE "temporary_webhook_entity"`);
}
}

View file

@ -1,3 +1,13 @@
export * from './1588102412422-InitialMigration'; import { InitialMigration1588102412422 } from './1588102412422-InitialMigration';
export * from './1592445003908-WebhookModel'; import { WebhookModel1592445003908 } from './1592445003908-WebhookModel';
export * from './1594825041918-CreateIndexStoppedAt'; import { CreateIndexStoppedAt1594825041918 } from './1594825041918-CreateIndexStoppedAt';
import { AddWebhookId1611071044839 } from './1611071044839-AddWebhookId';
import { MakeStoppedAtNullable1607431743769 } from './1607431743769-MakeStoppedAtNullable';
export const sqliteMigrations = [
InitialMigration1588102412422,
WebhookModel1592445003908,
CreateIndexStoppedAt1594825041918,
AddWebhookId1611071044839,
MakeStoppedAtNullable1607431743769,
];

View file

@ -17,6 +17,7 @@ import * as ResponseHelper from './ResponseHelper';
import * as Server from './Server'; import * as Server from './Server';
import * as TestWebhooks from './TestWebhooks'; import * as TestWebhooks from './TestWebhooks';
import * as WebhookHelpers from './WebhookHelpers'; import * as WebhookHelpers from './WebhookHelpers';
import * as WebhookServer from './WebhookServer';
import * as WorkflowExecuteAdditionalData from './WorkflowExecuteAdditionalData'; import * as WorkflowExecuteAdditionalData from './WorkflowExecuteAdditionalData';
import * as WorkflowHelpers from './WorkflowHelpers'; import * as WorkflowHelpers from './WorkflowHelpers';
export { export {
@ -29,6 +30,7 @@ export {
Server, Server,
TestWebhooks, TestWebhooks,
WebhookHelpers, WebhookHelpers,
WebhookServer,
WorkflowExecuteAdditionalData, WorkflowExecuteAdditionalData,
WorkflowHelpers, WorkflowHelpers,
}; };

View file

@ -1,9 +1,9 @@
<html> <html>
<script> <script>
(function messageParent() { (function messageParent() {
window.opener.postMessage('success', '*'); window.opener.postMessage('success', '*');
}()); }());
</script> </script>
Got connected. The window can be closed now. Got connected. The window can be closed now.
</html> </html>

Some files were not shown because too many files have changed in this diff Show more