🔀 Sync master

This commit is contained in:
ricardo 2021-04-01 21:53:11 -04:00
commit 8db3734794
1619 changed files with 152921 additions and 16926 deletions

3
.dockerignore Normal file
View file

@ -0,0 +1,3 @@
node_modules
packages/*/node_modules
packages/*/dist

View file

@ -0,0 +1,49 @@
name: Docker Nightly Image CI
on:
schedule:
- cron: "0 1 * * *"
workflow_dispatch:
inputs:
branch:
description: 'Name of the GitHub branch to create image off.'
required: true
default: 'master'
tag:
description: 'Name of the docker tag to create.'
required: true
default: 'nightly'
jobs:
build:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v2
with:
ref: ${{ github.event.inputs.branch || 'master' }}
-
name: Set up QEMU
uses: docker/setup-qemu-action@v1
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
-
name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./docker/images/n8n-custom/Dockerfile
platforms: linux/amd64
push: true
tags: n8nio/n8n:${{ github.event.inputs.tag || 'nightly' }}

View file

@ -4,6 +4,12 @@ on:
push:
tags:
- n8n@*
workflow_dispatch:
inputs:
version:
description: 'n8n version to build docker image for.'
required: true
default: '0.112.0'
jobs:
armv7_job:
@ -19,14 +25,16 @@ jobs:
run: docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }}
- name: Set up Docker Buildx
uses: crazy-max/ghaction-docker-buildx@v1
uses: crazy-max/ghaction-docker-buildx@v3
with:
version: latest
buildx-version: latest
qemu-version: latest
- name: Run Buildx (push image)
if: success()
run: |
docker buildx build --platform linux/arm/v7 --build-arg N8N_VERSION=${{steps.vars.outputs.tag}} -t n8nio/n8n:${{steps.vars.outputs.tag}}-rpi --output type=image,push=true docker/images/n8n-rpi
- name: Tag Docker image with latest
run: docker tag n8nio/n8n:${{steps.vars.outputs.tag}}-rpi n8nio/n8n:latest-rpi
- name: Push docker images of latest
run: docker push n8nio/n8n:latest-rpi
docker buildx build \
--platform linux/arm/v7 \
--build-arg N8N_VERSION=${{github.event.inputs.version || steps.vars.outputs.tag}} \
-t ${{ secrets.DOCKER_USERNAME }}/n8n:${{github.event.inputs.version || steps.vars.outputs.tag}}-rpi \
-t ${{ secrets.DOCKER_USERNAME }}/n8n:latest-rpi \
--output type=image,push=true docker/images/n8n-rpi

View file

@ -28,7 +28,11 @@ jobs:
- name: Push docker images of latest
run: docker push n8nio/n8n:latest
- name: Build the Docker image of version (Ubuntu)
run: docker build --build-arg N8N_VERSION=${{steps.vars.outputs.tag}} -t n8nio/n8n:${{steps.vars.outputs.tag}}-ubuntu docker/images/n8n-ubuntu
- name: Push Docker image of version (Ubuntu)
run: docker push n8nio/n8n:${{steps.vars.outputs.tag}}-ubuntu
- name: Build the Docker image of version (Debian)
run: docker build --build-arg N8N_VERSION=${{steps.vars.outputs.tag}} -t n8nio/n8n:${{steps.vars.outputs.tag}}-debian docker/images/n8n-debian
- name: Push Docker image of version (Debian)
run: docker push n8nio/n8n:${{steps.vars.outputs.tag}}-debian
- name: Tag Docker image with latest (Debian)
run: docker tag n8nio/n8n:${{steps.vars.outputs.tag}}-debian n8nio/n8n:latest-debian
- name: Push docker images of latest (Debian)
run: docker push n8nio/n8n:latest-debian

View file

@ -9,7 +9,7 @@ jobs:
strategy:
matrix:
node-version: [10.x, 12.x, 14.x]
node-version: [12.x, 14.x]
steps:
- uses: actions/checkout@v1
@ -23,5 +23,6 @@ jobs:
npm run bootstrap
npm run build --if-present
npm test
npm run tslint
env:
CI: true

View file

@ -30,7 +30,7 @@ n8n is split up in different modules which are all in a single mono repository.
The most important directories:
- [/docker/image](/docker/image) - Dockerfiles to create n8n containers
- [/docker/image](/docker/images) - Dockerfiles to create n8n containers
- [/docker/compose](/docker/compose) - Examples Docker Setups
- [/packages](/packages) - The different n8n modules
- [/packages/cli](/packages/cli) - CLI code to run front- & backend
@ -57,11 +57,16 @@ dependencies are installed and the packages get linked correctly. Here a short g
The packages which n8n uses depend on a few build tools:
Linux:
Debian/Ubuntu:
```
apt-get install -y build-essential python
```
CentOS:
```
yum install gcc gcc-c++ make
```
Windows:
```
npm install -g windows-build-tools
@ -119,6 +124,10 @@ To start n8n execute:
npm run start
```
To start n8n with tunnel:
```
./packages/cli/bin/n8n start --tunnel
```
## Development Cycle
@ -213,23 +222,7 @@ If you'd like to submit a new node, please go through the following checklist. T
## Extend Documentation
All the files which get used in the n8n documentation on [https://docs.n8n.io](https://docs.n8n.io)
can be found in the [/docs](https://github.com/n8n-io/n8n/tree/master/docs) folder. So all changes
and additions can directly be made in there
That the markdown docs look pretty we use [docsify](https://docsify.js.org). It is possible to test
locally how it looks like rendered with the following commands:
```bash
# 1. Install docisify
npm i docsify-cli -g
# 2. Go into n8n folder (the same folder which contains this file). For example:
cd /data/n8n
# 3. Start docsificy
docsify serve ./docs
```
The repository for the n8n documentation on https://docs.n8n.io can be found [here](https://github.com/n8n-io/n8n-docs).
## Contributor License Agreement

View file

@ -19,7 +19,7 @@ Condition notice.
Software: n8n
License: Apache 2.0
License: Apache 2.0 with Commons Clause
Licensor: n8n GmbH

View file

@ -2,7 +2,7 @@
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
n8n is an extendable workflow automation tool. With a [fair-code](http://faircode.io) distribution model, n8n will always have visible source code, be available to self-host, and allow you to add your own custom functions, logic and apps. n8n's node-based approach makes it highly versatile, enabling you to connect anything to everything.
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
@ -16,7 +16,7 @@ received or lost a star.
## Available integrations
n8n has 100+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
n8n has 200+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
## Documentation
@ -25,6 +25,8 @@ The official n8n documentation can be found under: [https://docs.n8n.io](https:/
Additional information and example workflows on the n8n.io website: [https://n8n.io](https://n8n.io)
The changelog can be found [here](https://docs.n8n.io/reference/changelog.html) and the list of breaking changes [here](https://github.com/n8n-io/n8n/blob/master/packages/cli/BREAKING-CHANGES.md).
## Usage
@ -39,10 +41,14 @@ Execute: `npm run start`
## Hosted n8n
## n8n.cloud
If you are interested in a hosted version of n8n on our infrastructure please contact us via:
[hosting@n8n.io](mailto:hosting@n8n.io)
Sign-up for an [n8n.cloud](https://www.n8n.cloud/) account.
While n8n.cloud and n8n are the same in terms of features, n8n.cloud provides certain conveniences such as:
- Not having to set up and maintain your n8n instance
- Managed OAuth for authentication
- Easily upgrading to the newer n8n versions
@ -63,20 +69,19 @@ check out our job posts:
## What does n8n mean and how do you pronounce it
## What does n8n mean and how do you pronounce it?
**Short answer:** It means "nodemation"
**Short answer:** It means "nodemation" and it is pronounced as n-eight-n.
**Long answer:** I get that question quite often (more often than I expected)
**Long answer:** "I get that question quite often (more often than I expected)
so I decided it is probably best to answer it here. While looking for a
good name for the project with a free domain I realized very quickly that all the
good ones I could think of were already taken. So, in the end, I chose
nodemation. "node-" in the sense that it uses a Node-View and that it uses
Node.js and "-mation" for "automation" which is what the project is supposed to help with.
nodemation. 'node-' in the sense that it uses a Node-View and that it uses
Node.js and '-mation' for 'automation' which is what the project is supposed to help with.
However, I did not like how long the name was and I could not imagine writing
something that long every time in the CLI. That is when I then ended up on
"n8n". Sure does not work perfectly but does neither for Kubernetes (k8s) and
did not hear anybody complain there. So I guess it should be ok.
'n8n'." - **Jan Oberhauser, Founder and CEO, n8n.io**
@ -88,6 +93,6 @@ Have you found a bug :bug: ? Or maybe you have a nice feature :sparkles: to cont
## License
n8n is [fair-code](http://faircode.io) licensed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md)
n8n is [fair-code](http://faircode.io) distributed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) license.
Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license)
Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license).

View file

@ -20,7 +20,3 @@ To stop it execute:
```
docker-compose stop
```
## Configuration
The default name of the database, user and password for MongoDB can be changed in the `.env` file in the current directory.

View file

@ -49,8 +49,7 @@ services:
- N8N_PROTOCOL=https
- NODE_ENV=production
- N8N_PATH
- WEBHOOK_TUNNEL_URL=https://${DOMAIN_NAME}${N8N_PATH}
- VUE_APP_URL_BASE_API=https://${DOMAIN_NAME}${N8N_PATH}
- WEBHOOK_URL=https://${DOMAIN_NAME}${N8N_PATH}
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ${DATA_FOLDER}/.n8n:/root/.n8n
- ${DATA_FOLDER}/.n8n:/home/node/.n8n

View file

@ -1,9 +0,0 @@
MONGO_INITDB_ROOT_USERNAME=changeUser
MONGO_INITDB_ROOT_PASSWORD=changePassword
MONGO_INITDB_DATABASE=n8n
MONGO_NON_ROOT_USERNAME=changeUser
MONGO_NON_ROOT_PASSWORD=changePassword
N8N_BASIC_AUTH_USER=changeUser
N8N_BASIC_AUTH_PASSWORD=changePassword

View file

@ -1,26 +0,0 @@
# n8n with MongoDB
Starts n8n with MongoDB as database.
## Start
To start n8n with MongoDB simply start docker-compose by executing the following
command in the current folder.
**IMPORTANT:** But before you do that change the default users and passwords in the `.env` file!
```
docker-compose up -d
```
To stop it execute:
```
docker-compose stop
```
## Configuration
The default name of the database, user and password for MongoDB can be changed in the `.env` file in the current directory.

View file

@ -1,34 +0,0 @@
version: '3.1'
services:
mongo:
image: mongo:4.0
restart: always
environment:
- MONGO_INITDB_ROOT_USERNAME
- MONGO_INITDB_ROOT_PASSWORD
- MONGO_INITDB_DATABASE
- MONGO_NON_ROOT_USERNAME
- MONGO_NON_ROOT_PASSWORD
volumes:
- ./init-data.sh:/docker-entrypoint-initdb.d/init-data.sh
n8n:
image: n8nio/n8n
restart: always
environment:
- DB_TYPE=mongodb
- DB_MONGODB_CONNECTION_URL=mongodb://${MONGO_NON_ROOT_USERNAME}:${MONGO_NON_ROOT_PASSWORD}@mongo:27017/${MONGO_INITDB_DATABASE}
- N8N_BASIC_AUTH_ACTIVE=true
- N8N_BASIC_AUTH_USER
- N8N_BASIC_AUTH_PASSWORD
ports:
- 5678:5678
links:
- mongo
volumes:
- ~/.n8n:/root/.n8n
# Wait 5 seconds to start n8n to make sure that MongoDB is ready
# when n8n tries to connect to it
command: /bin/sh -c "sleep 5; n8n start"

View file

@ -1,17 +0,0 @@
#!/bin/bash
set -e;
# Create a default non-root role
MONGO_NON_ROOT_ROLE="${MONGO_NON_ROOT_ROLE:-readWrite}"
if [ -n "${MONGO_NON_ROOT_USERNAME:-}" ] && [ -n "${MONGO_NON_ROOT_PASSWORD:-}" ]; then
"${mongo[@]}" "$MONGO_INITDB_DATABASE" <<-EOJS
db.createUser({
user: $(_js_escape "$MONGO_NON_ROOT_USERNAME"),
pwd: $(_js_escape "$MONGO_NON_ROOT_PASSWORD"),
roles: [ { role: $(_js_escape "$MONGO_NON_ROOT_ROLE"), db: $(_js_escape "$MONGO_INITDB_DATABASE") } ]
})
EOJS
else
echo "SETUP INFO: No Environment variables given!"
fi

View file

@ -32,7 +32,7 @@ services:
links:
- postgres
volumes:
- ~/.n8n:/root/.n8n
- ~/.n8n:/home/node/.n8n
# Wait 5 seconds to start n8n to make sure that PostgreSQL is ready
# when n8n tries to connect to it
command: /bin/sh -c "sleep 5; n8n start"

View file

@ -1,5 +1,5 @@
# 1. Create an image to build n8n
FROM node:12.16-alpine as builder
FROM node:14.15-alpine as builder
# Update everything and install needed dependencies
USER root
@ -25,7 +25,7 @@ RUN npm run build
# 2. Start with a new clean image with just the code that is needed to run n8n
FROM node:12.16-alpine
FROM node:14.15-alpine
USER root
@ -36,6 +36,13 @@ WORKDIR /data
# Install all needed dependencies
RUN npm_config_user=root npm install -g full-icu
# Install fonts
RUN apk --no-cache add --virtual fonts msttcorefonts-installer fontconfig && \
update-ms-fonts && \
fc-cache -f && \
apk del fonts && \
find /usr/share/fonts/truetype/msttcorefonts/ -type l -exec unlink {} \;
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
COPY --from=builder /data ./

View file

@ -1,49 +0,0 @@
FROM node:12.16-alpine as builder
# FROM node:12.16-alpine
# Update everything and install needed dependencies
RUN apk add --update graphicsmagick tzdata git tini su-exec
USER root
# Install all needed dependencies
RUN apk --update add --virtual build-dependencies python build-base ca-certificates && \
npm_config_user=root npm install -g full-icu lerna
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
WORKDIR /data
COPY lerna.json .
COPY package.json .
COPY packages/cli/ ./packages/cli/
COPY packages/core/ ./packages/core/
COPY packages/editor-ui/ ./packages/editor-ui/
COPY packages/nodes-base/ ./packages/nodes-base/
COPY packages/workflow/ ./packages/workflow/
RUN rm -rf node_modules packages/*/node_modules packages/*/dist
RUN npm install --loglevel notice
RUN lerna bootstrap --hoist
RUN npm run build
FROM node:12.16-alpine
WORKDIR /data
# Install all needed dependencies
RUN npm_config_user=root npm install -g full-icu
USER root
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
COPY --from=builder /data ./
RUN apk add --update graphicsmagick tzdata git tini su-exec
COPY docker/images/n8n-dev/docker-entrypoint.sh /docker-entrypoint.sh
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
EXPOSE 5678/tcp

View file

@ -1,4 +1,4 @@
FROM node:12.16
FROM node:14.15
ARG N8N_VERSION

View file

@ -1,6 +1,6 @@
## n8n - Ubuntu Docker Image
## n8n - Debian Docker Image
Dockerfile to build n8n with Ubuntu.
Dockerfile to build n8n with Debian.
For information about how to run n8n with Docker check the generic
[Docker-Readme](https://github.com/n8n-io/n8n/tree/master/docker/images/n8n/README.md)
@ -10,12 +10,12 @@ For information about how to run n8n with Docker check the generic
docker build --build-arg N8N_VERSION=<VERSION> -t n8nio/n8n:<VERSION> .
# For example:
docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-ubuntu .
docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-debian .
```
```
docker run -it --rm \
--name n8n \
-p 5678:5678 \
n8nio/n8n:0.43.0-ubuntu
n8nio/n8n:0.43.0-debian
```

View file

@ -1,4 +1,4 @@
FROM arm32v7/node:12.16
FROM arm32v7/node:14.15
ARG N8N_VERSION
@ -15,6 +15,7 @@ ENV NODE_ENV production
WORKDIR /data
USER node
USER root
CMD n8n
CMD chown -R node:node /home/node/.n8n \
&& gosu node n8n

View file

@ -17,5 +17,6 @@ docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-rpi .
docker run -it --rm \
--name n8n \
-p 5678:5678 \
-v ~/.n8n:/home/node/.n8n \
n8nio/n8n:0.70.0-rpi
```

View file

@ -1,4 +1,4 @@
FROM node:12.16-alpine
FROM node:14.15-alpine
ARG N8N_VERSION
@ -16,6 +16,13 @@ RUN apk --update add --virtual build-dependencies python build-base ca-certifica
npm_config_user=root npm install -g full-icu n8n@${N8N_VERSION} && \
apk del build-dependencies
# Install fonts
RUN apk --no-cache add --virtual fonts msttcorefonts-installer fontconfig && \
update-ms-fonts && \
fc-cache -f && \
apk del fonts && \
find /usr/share/fonts/truetype/msttcorefonts/ -type l -exec unlink {} \;
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
WORKDIR /data

View file

@ -2,7 +2,7 @@
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
n8n is a free and open [fair-code](http://faircode.io) distributed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
@ -33,7 +33,7 @@ Slack notification every time a Github repository received or lost a star.
## Available integrations
n8n has 100+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
n8n has 200+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
## Documentation
@ -71,7 +71,7 @@ To use it simply start n8n with `--tunnel`
docker run -it --rm \
--name n8n \
-p 5678:5678 \
-v ~/.n8n:/root/.n8n \
-v ~/.n8n:/home/node/.n8n \
n8nio/n8n \
n8n start --tunnel
```
@ -93,56 +93,30 @@ N8N_BASIC_AUTH_PASSWORD=<PASSWORD>
## Persist data
The workflow data gets by default saved in an SQLite database in the user
folder (`/root/.n8n`). That folder also additionally contains the
folder (`/home/node/.n8n`). That folder also additionally contains the
settings like webhook URL and encryption key.
```
docker run -it --rm \
--name n8n \
-p 5678:5678 \
-v ~/.n8n:/root/.n8n \
-v ~/.n8n:/home/node/.n8n \
n8nio/n8n
```
### Start with other Database
By default n8n uses SQLite to save credentials, past executions and workflows.
n8n however also supports MongoDB and PostgresDB. To use them simply a few
n8n however also supports PostgresDB, MySQL and MariaDB. To use them simply a few
environment variables have to be set.
It is important to still persist the data in the `/root/.n8` folder. The reason
It is important to still persist the data in the `/root/.n8n` folder. The reason
is that it contains n8n user data. That is the name of the webhook
(in case) the n8n tunnel gets used and even more important the encryption key
for the credentials. If none gets found n8n creates automatically one on
startup. In case credentials are already saved with a different encryption key
it can not be used anymore as encrypting it is not possible anymore.
#### Use with MongoDB
> **WARNING**: Use Postgres if possible! Mongo has problems with saving large
> amounts of data in a document and causes also other problems. So support will
> may be dropped in the future.
Replace the following placeholders with the actual data:
- MONGO_DATABASE
- MONGO_HOST
- MONGO_PORT
- MONGO_USER
- MONGO_PASSWORD
```
docker run -it --rm \
--name n8n \
-p 5678:5678 \
-e DB_TYPE=mongodb \
-e DB_MONGODB_CONNECTION_URL="mongodb://<MONGO_USER>:<MONGO_PASSWORD>@<MONGO_SERVER>:<MONGO_PORT>/<MONGO_DATABASE>" \
-v ~/.n8n:/root/.n8n \
n8nio/n8n \
n8n start
```
A full working setup with docker-compose can be found [here](https://github.com/n8n-io/n8n/blob/master/docker/compose/withMongo/README.md)
#### Use with PostgresDB
Replace the following placeholders with the actual data:
@ -164,7 +138,7 @@ docker run -it --rm \
-e DB_POSTGRESDB_USER=<POSTGRES_USER> \
-e DB_POSTGRESDB_SCHEMA=<POSTGRES_SCHEMA> \
-e DB_POSTGRESDB_PASSWORD=<POSTGRES_PASSWORD> \
-v ~/.n8n:/root/.n8n \
-v ~/.n8n:/home/node/.n8n \
n8nio/n8n \
n8n start
```
@ -190,7 +164,7 @@ docker run -it --rm \
-e DB_MYSQLDB_PORT=<MYSQLDB_PORT> \
-e DB_MYSQLDB_USER=<MYSQLDB_USER> \
-e DB_MYSQLDB_PASSWORD=<MYSQLDB_PASSWORD> \
-v ~/.n8n:/root/.n8n \
-v ~/.n8n:/home/node/.n8n \
n8nio/n8n \
n8n start
```
@ -203,7 +177,6 @@ with the given name. That makes it possible to load data easily from
Docker- and Kubernetes-Secrets.
The following environment variables support file input:
- DB_MONGODB_CONNECTION_URL_FILE
- DB_POSTGRESDB_DATABASE_FILE
- DB_POSTGRESDB_HOST_FILE
- DB_POSTGRESDB_PASSWORD_FILE
@ -260,9 +233,9 @@ docker build --build-arg N8N_VERSION=0.18.1 -t n8nio/n8n:0.18.1 .
```
## What does n8n mean and how do you pronounce it
## What does n8n mean and how do you pronounce it?
**Short answer:** It means "nodemation"
**Short answer:** It means "nodemation" and it is pronounced as n-eight-n.
**Long answer:** I get that question quite often (more often than I expected)
so I decided it is probably best to answer it here. While looking for a
@ -305,6 +278,6 @@ Before you upgrade to the latest version make sure to check here if there are an
## License
n8n is [fair-code](http://faircode.io) licensed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md)
n8n is [fair-code](http://faircode.io) distributed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) license
Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license)

View file

@ -6,6 +6,8 @@ if [ -d /root/.n8n ] ; then
ln -s /root/.n8n /home/node/
fi
chown -R node /home/node
if [ "$#" -gt 0 ]; then
# Got started with arguments
exec su-exec node "$@"

View file

@ -2,6 +2,225 @@
This list shows all the versions which include breaking changes and how to upgrade.
## 0.113.0
### What changed?
In the Dropbox node, both credential types (Access Token & OAuth2) have a new parameter called "APP Access Type".
### When is action necessary?
If you are using a Dropbox APP with permission type, "App Folder".
### How to upgrade:
Open your Dropbox node's credentials and set the "APP Access Type" parameter to "App Folder".
## 0.111.0
### What changed?
In the Dropbox node, now all operations are performed relative to the user's root directory.
### When is action necessary?
If you are using any resource/operation with OAuth2 authentication.
If you are using the `folder:list` operation with the parameter `Folder Path` empty (root path) and have a Team Space in your Dropbox account.
### How to upgrade:
Open the Dropbox node, go to the OAuth2 credential you are using and reconnect it again.
Also, if you are using the `folder:list` operation, make sure your logic is taking into account the team folders in the response.
## 0.105.0
### What changed?
In the Hubspot Trigger, now multiple events can be provided and the field `App ID` was so moved to the credentials.
### When is action necessary?
If you are using the Hubspot Trigger node.
### How to upgrade:
Open the Hubspot Trigger and set the events again. Also open the credentials `Hubspot Developer API` and set your APP ID.
## 0.104.0
### What changed?
Support for MongoDB as a database for n8n has been dropped as MongoDB had problems saving large amounts of data in a document, among other issues.
### When is action necessary?
If you have been using MongoDB as a database for n8n. Please note that this is not related to the MongoDB node.
### How to upgrade:
Before upgrading, you can [export](https://docs.n8n.io/reference/start-workflows-via-cli.html#export-workflows-and-credentials) all your credentials and workflows using the CLI.
```
n8n export:workflow --backup --output=backups/latest/
n8n export:credentials --backup --output=backups/latest/
```
You can then change the database to one of the supported databases mentioned [here](https://docs.n8n.io/reference/data/database.html). Finally, you can upgrade n8n and [import](https://docs.n8n.io/reference/start-workflows-via-cli.html#import-workflows-and-credentials) all your credentials and workflows back into n8n.
```
n8n import:workflow --separate --input=backups/latest/
n8n import:credentials --separate --input=backups/latest/
```
## 0.102.0
### What changed?
- The `As User` property and the `User Name` field got combined and renamed to `Send as User`. It also got moved under “Add Options”.
- The `Ephemeral` property got removed. To send an ephemeral message, you have to select the "Post (Ephemeral)" operation.
### When is action necessary?
If you are using the following fields or properties in the Slack node:
- As User
- Ephemeral
- User Name
### How to upgrade:
Open the Slack node and set them again to the appropriate values.
----------------------------
### What changed?
If you have a question in Typeform that uses a previously answered question as part of its text, the question text would look like this in the Typeform Trigger node:
`You have chosen {{field:23234242}} as your answer. Is this correct?`
Those curly braces broke the expression editor. The change makes it now display like this:
`You have chosen [field:23234242] as your answer. Is this correct?`
### When is action necessary?
If you are using the Typeform Trigger node with questions using the [Recall information](https://help.typeform.com/hc/en-us/articles/360050447072-What-is-Recall-information-) feature.
### How to upgrade:
In workflows using the Typeform Trigger node, nodes that reference such key names (questions that use a previously answered question as part of its text) will need to be updated.
## 0.95.0
### What changed?
In the Harvest Node, we moved the account field from the credentials to the node parameters. This will allow you to work witn multiples accounts without having to create multiples credentials.
### When is action necessary?
If you are using the Harvest Node.
### How to upgrade:
Open the node set the parameter `Account ID`.
## 0.94.0
### What changed?
In the Segment Node, we have changed how the properties 'traits' and 'properties' are defined. Now, key/value pairs can be provided, allowing you to send customs traits/properties.
### When is action necessary?
When the properties 'traits' or 'properties' are set, and one of the following resources/operations is used:
| Resource | Operation |
|--|--|
| Identify | Create |
| Track | Event |
| Track | Page |
| Group | Add |
### How to upgrade:
Open the affected resource/operation and set the parameters 'traits' or 'properties' again.
## 0.93.0
### What changed?
Change in naming of the Authentication field for the Pipedrive Trigger node.
### When is action necessary?
If you had set "Basic Auth" for the "Authentication" field in the node.
### How to upgrade:
The "Authentication" field has been renamed to "Incoming Authentication". Please set the parameter “Incoming Authentication” to “Basic Auth” to activate it again.
## 0.90.0
### What changed?
Node.js version 12.9 or newer is required to run n8n.
### When is action necessary?
If you are running Node.js version older than 12.9.
### How to upgrade:
You can find download and install the latest version of Node.js from [here](https://nodejs.org/en/download/).
## 0.87.0
### What changed?
The link.fish node got removed because the service is shutting down.
### When is action necessary?
If you are are actively using the link.fish node.
### How to upgrade:
Unfortunately, that's not possible. We'd recommend you to look for an alternative service.
## 0.83.0
### What changed?
In the Active Campaign Node, we have changed how the `getAll` operation works with various resources for the sake of consistency. To achieve this, a new parameter called 'Simple' has been added.
### When is action necessary?
When one of the following resources/operations is used:
| Resource | Operation |
|--|--|
| Deal | Get All |
| Connector | Get All |
| E-commerce Order | Get All |
| E-commerce Customer | Get All |
| E-commerce Order Products | Get All |
### How to upgrade:
Open the affected resource/operation and set the parameter `Simple` to false.
## 0.79.0
### What changed?
We have renamed the operations in the Todoist Node for consistency with the codebase. We also deleted the `close_match` and `delete_match` operations as these can be accomplished using the following operations: `getAll`, `close`, and `delete`.
### When is action necessary?
When one of the following operations is used:
- close_by
- close_match
- delete_id
- delete_match
### How to upgrade:
After upgrading, open all workflows which contain the Todoist Node. Set the corresponding operation, and then save the workflow.
If the operations `close_match` or `delete_match` are used, recreate them using the operations: `getAll`, `delete`, and `close`.
## 0.69.0

View file

@ -19,7 +19,7 @@ Condition notice.
Software: n8n
License: Apache 2.0
License: Apache 2.0 with Commons Clause
Licensor: n8n GmbH

View file

@ -2,7 +2,7 @@
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
n8n is a free and open [fair-code](http://faircode.io) distributed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
@ -32,7 +32,7 @@ Slack notification every time a Github repository received or lost a star.
## Available integrations
n8n has 100+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
n8n has 200+ different nodes to automate workflows. The list can be found on: [https://n8n.io/nodes](https://n8n.io/nodes)
## Documentation
@ -60,9 +60,9 @@ If you are interested in a hosted version of n8n on our infrastructure please co
## What does n8n mean and how do you pronounce it
## What does n8n mean and how do you pronounce it?
**Short answer:** It means "nodemation"
**Short answer:** It means "nodemation" and it is pronounced as n-eight-n.
**Long answer:** I get that question quite often (more often than I expected)
so I decided it is probably best to answer it here. While looking for a
@ -100,7 +100,7 @@ Before you upgrade to the latest version make sure to check here if there are an
## License
n8n is [fair-code](http://faircode.io) licensed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md)
n8n is [fair-code](http://faircode.io) distributed under [**Apache 2.0 with Commons Clause**](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md) license
Additional information about license can be found in the [FAQ](https://docs.n8n.io/#/faq?id=license)

View file

@ -10,7 +10,7 @@ process.env.NODE_CONFIG_DIR = process.env.NODE_CONFIG_DIR || path.join(__dirname
var versionFlags = [ // tslint:disable-line:no-var-keyword
'-v',
'-V',
'--version'
'--version',
];
if (versionFlags.includes(process.argv.slice(-1)[0])) {
console.log(require('../package').version);
@ -22,23 +22,10 @@ if (process.argv.length === 2) {
process.argv.push('start');
}
var command = process.argv[2]; // tslint:disable-line:no-var-keyword
var nodeVersion = process.versions.node.split('.');
// Check if the command the user did enter is supported else stop
var supportedCommands = [ // tslint:disable-line:no-var-keyword
'execute',
'help',
'start',
];
if (!supportedCommands.includes(command)) {
console.log('\nThe command "' + command + '" is not known!\n');
process.argv.pop();
process.argv.push('--help');
}
if (parseInt(process.versions.node.split('.')[0], 10) < 10) {
console.log('\nThe Node.js version is too old to run n8n. Please use version 10 or later!\n');
if (parseInt(nodeVersion[0], 10) < 12 || parseInt(nodeVersion[0], 10) === 12 && parseInt(nodeVersion[1], 10) < 9) {
console.log(`\nYour Node.js version (${process.versions.node}) is too old to run n8n.\nPlease update to version 12.9 or later!\n`);
process.exit(0);
}

View file

@ -10,6 +10,7 @@ import {
import {
ActiveExecutions,
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
GenericHelpers,
@ -20,7 +21,7 @@ import {
WorkflowCredentials,
WorkflowHelpers,
WorkflowRunner,
} from "../src";
} from '../src';
export class Execute extends Command {
@ -116,6 +117,8 @@ export class Execute extends Command {
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes();
await nodeTypes.init(loadNodesAndCredentials.nodeTypes);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
if (!WorkflowHelpers.isWorkflowIdValid(workflowId)) {
workflowId = undefined;
@ -124,7 +127,7 @@ export class Execute extends Command {
// Check if the workflow contains the required "Start" node
// "requiredNodeTypes" are also defined in editor-ui/views/NodeView.vue
const requiredNodeTypes = ['n8n-nodes-base.start'];
let startNode: INode | undefined= undefined;
let startNode: INode | undefined = undefined;
for (const node of workflowData!.nodes) {
if (requiredNodeTypes.includes(node.type)) {
startNode = node;

View file

@ -0,0 +1,161 @@
import {
Command,
flags,
} from '@oclif/command';
import {
Credentials,
UserSettings,
} from 'n8n-core';
import {
IDataObject
} from 'n8n-workflow';
import {
Db,
GenericHelpers,
ICredentialsDecryptedDb,
} from '../../src';
import * as fs from 'fs';
import * as path from 'path';
export class ExportCredentialsCommand extends Command {
static description = 'Export credentials';
static examples = [
`$ n8n export:credentials --all`,
`$ n8n export:credentials --id=5 --output=file.json`,
`$ n8n export:credentials --all --output=backups/latest.json`,
`$ n8n export:credentials --backup --output=backups/latest/`,
`$ n8n export:credentials --all --decrypted --output=backups/decrypted.json`,
];
static flags = {
help: flags.help({ char: 'h' }),
all: flags.boolean({
description: 'Export all credentials',
}),
backup: flags.boolean({
description: 'Sets --all --pretty --separate for simple backups. Only --output has to be set additionally.',
}),
id: flags.string({
description: 'The ID of the credential to export',
}),
output: flags.string({
char: 'o',
description: 'Output file name or directory if using separate files',
}),
pretty: flags.boolean({
description: 'Format the output in an easier to read fashion',
}),
separate: flags.boolean({
description: 'Exports one file per credential (useful for versioning). Must inform a directory via --output.',
}),
decrypted: flags.boolean({
description: 'Exports data decrypted / in plain text. ALL SENSITIVE INFORMATION WILL BE VISIBLE IN THE FILES. Use to migrate from a installation to another that have a different secret key (in the config file).',
}),
};
async run() {
const { flags } = this.parse(ExportCredentialsCommand);
if (flags.backup) {
flags.all = true;
flags.pretty = true;
flags.separate = true;
}
if (!flags.all && !flags.id) {
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
return;
}
if (flags.all && flags.id) {
GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`);
return;
}
if (flags.separate) {
try {
if (!flags.output) {
GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`);
return;
}
if (fs.existsSync(flags.output)) {
if (!fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a directory`);
return;
}
} else {
fs.mkdirSync(flags.output, { recursive: true });
}
} catch (e) {
console.error('\nFILESYSTEM ERROR');
console.log('====================================');
console.error(e.message);
console.error(e.stack);
this.exit(1);
}
} else if (flags.output) {
if (fs.existsSync(flags.output)) {
if (fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a writeble file`);
return;
}
}
}
try {
await Db.init();
const findQuery: IDataObject = {};
if (flags.id) {
findQuery.id = flags.id;
}
const credentials = await Db.collections.Credentials!.find(findQuery);
if (flags.decrypted) {
const encryptionKey = await UserSettings.getEncryptionKey();
if (encryptionKey === undefined) {
throw new Error('No encryption key got found to decrypt the credentials!');
}
for (let i = 0; i < credentials.length; i++) {
const { name, type, nodesAccess, data } = credentials[i];
const credential = new Credentials(name, type, nodesAccess, data);
const plainData = credential.getData(encryptionKey);
(credentials[i] as ICredentialsDecryptedDb).data = plainData;
}
}
if (credentials.length === 0) {
throw new Error('No credentials found with specified filters.');
}
if (flags.separate) {
let fileContents: string, i: number;
for (i = 0; i < credentials.length; i++) {
fileContents = JSON.stringify(credentials[i], null, flags.pretty ? 2 : undefined);
const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + credentials[i].id + '.json';
fs.writeFileSync(filename, fileContents);
}
console.log('Successfully exported', i, 'credentials.');
} else {
const fileContents = JSON.stringify(credentials, null, flags.pretty ? 2 : undefined);
if (flags.output) {
fs.writeFileSync(flags.output!, fileContents);
console.log('Successfully exported', credentials.length, 'credentials.');
} else {
console.log(fileContents);
}
}
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -0,0 +1,137 @@
import {
Command,
flags,
} from '@oclif/command';
import {
IDataObject
} from 'n8n-workflow';
import {
Db,
GenericHelpers,
} from '../../src';
import * as fs from 'fs';
import * as path from 'path';
export class ExportWorkflowsCommand extends Command {
static description = 'Export workflows';
static examples = [
`$ n8n export:workflow --all`,
`$ n8n export:workflow --id=5 --output=file.json`,
`$ n8n export:workflow --all --output=backups/latest/`,
`$ n8n export:workflow --backup --output=backups/latest/`,
];
static flags = {
help: flags.help({ char: 'h' }),
all: flags.boolean({
description: 'Export all workflows',
}),
backup: flags.boolean({
description: 'Sets --all --pretty --separate for simple backups. Only --output has to be set additionally.',
}),
id: flags.string({
description: 'The ID of the workflow to export',
}),
output: flags.string({
char: 'o',
description: 'Output file name or directory if using separate files',
}),
pretty: flags.boolean({
description: 'Format the output in an easier to read fashion',
}),
separate: flags.boolean({
description: 'Exports one file per workflow (useful for versioning). Must inform a directory via --output.',
}),
};
async run() {
const { flags } = this.parse(ExportWorkflowsCommand);
if (flags.backup) {
flags.all = true;
flags.pretty = true;
flags.separate = true;
}
if (!flags.all && !flags.id) {
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
return;
}
if (flags.all && flags.id) {
GenericHelpers.logOutput(`You should either use "--all" or "--id" but never both!`);
return;
}
if (flags.separate) {
try {
if (!flags.output) {
GenericHelpers.logOutput(`You must inform an output directory via --output when using --separate`);
return;
}
if (fs.existsSync(flags.output)) {
if (!fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a directory`);
return;
}
} else {
fs.mkdirSync(flags.output, { recursive: true });
}
} catch (e) {
console.error('\nFILESYSTEM ERROR');
console.log('====================================');
console.error(e.message);
console.error(e.stack);
this.exit(1);
}
} else if (flags.output) {
if (fs.existsSync(flags.output)) {
if (fs.lstatSync(flags.output).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --output must be a writeble file`);
return;
}
}
}
try {
await Db.init();
const findQuery: IDataObject = {};
if (flags.id) {
findQuery.id = flags.id;
}
const workflows = await Db.collections.Workflow!.find(findQuery);
if (workflows.length === 0) {
throw new Error('No workflows found with specified filters.');
}
if (flags.separate) {
let fileContents: string, i: number;
for (i = 0; i < workflows.length; i++) {
fileContents = JSON.stringify(workflows[i], null, flags.pretty ? 2 : undefined);
const filename = (flags.output!.endsWith(path.sep) ? flags.output! : flags.output + path.sep) + workflows[i].id + '.json';
fs.writeFileSync(filename, fileContents);
}
console.log('Successfully exported', i, 'workflows.');
} else {
const fileContents = JSON.stringify(workflows, null, flags.pretty ? 2 : undefined);
if (flags.output) {
fs.writeFileSync(flags.output!, fileContents);
console.log('Successfully exported', workflows.length, workflows.length === 1 ? 'workflow.' : 'workflows.');
} else {
console.log(fileContents);
}
}
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -0,0 +1,98 @@
import {
Command,
flags,
} from '@oclif/command';
import {
Credentials,
UserSettings,
} from 'n8n-core';
import {
Db,
GenericHelpers,
} from '../../src';
import * as fs from 'fs';
import * as glob from 'glob-promise';
import * as path from 'path';
export class ImportCredentialsCommand extends Command {
static description = 'Import credentials';
static examples = [
`$ n8n import:credentials --input=file.json`,
`$ n8n import:credentials --separate --input=backups/latest/`,
];
static flags = {
help: flags.help({ char: 'h' }),
input: flags.string({
char: 'i',
description: 'Input file name or directory if --separate is used',
}),
separate: flags.boolean({
description: 'Imports *.json files from directory provided by --input',
}),
};
async run() {
const { flags } = this.parse(ImportCredentialsCommand);
if (!flags.input) {
GenericHelpers.logOutput(`An input file or directory with --input must be provided`);
return;
}
if (flags.separate) {
if (fs.existsSync(flags.input)) {
if (!fs.lstatSync(flags.input).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --input must be a directory`);
return;
}
}
}
try {
await Db.init();
let i;
const encryptionKey = await UserSettings.getEncryptionKey();
if (encryptionKey === undefined) {
throw new Error('No encryption key got found to encrypt the credentials!');
}
if (flags.separate) {
const files = await glob((flags.input.endsWith(path.sep) ? flags.input : flags.input + path.sep) + '*.json');
for (i = 0; i < files.length; i++) {
const credential = JSON.parse(fs.readFileSync(files[i], { encoding: 'utf8' }));
if (typeof credential.data === 'object') {
// plain data / decrypted input. Should be encrypted first.
Credentials.prototype.setData.call(credential, credential.data, encryptionKey);
}
await Db.collections.Credentials!.save(credential);
}
} else {
const fileContents = JSON.parse(fs.readFileSync(flags.input, { encoding: 'utf8' }));
if (!Array.isArray(fileContents)) {
throw new Error(`File does not seem to contain credentials.`);
}
for (i = 0; i < fileContents.length; i++) {
if (typeof fileContents[i].data === 'object') {
// plain data / decrypted input. Should be encrypted first.
Credentials.prototype.setData.call(fileContents[i], fileContents[i].data, encryptionKey);
}
await Db.collections.Credentials!.save(fileContents[i]);
}
}
console.log('Successfully imported', i, 'credentials.');
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -0,0 +1,78 @@
import {
Command,
flags,
} from '@oclif/command';
import {
Db,
GenericHelpers,
} from '../../src';
import * as fs from 'fs';
import * as glob from 'glob-promise';
import * as path from 'path';
export class ImportWorkflowsCommand extends Command {
static description = 'Import workflows';
static examples = [
`$ n8n import:workflow --input=file.json`,
`$ n8n import:workflow --separate --input=backups/latest/`,
];
static flags = {
help: flags.help({ char: 'h' }),
input: flags.string({
char: 'i',
description: 'Input file name or directory if --separate is used',
}),
separate: flags.boolean({
description: 'Imports *.json files from directory provided by --input',
}),
};
async run() {
const { flags } = this.parse(ImportWorkflowsCommand);
if (!flags.input) {
GenericHelpers.logOutput(`An input file or directory with --input must be provided`);
return;
}
if (flags.separate) {
if (fs.existsSync(flags.input)) {
if (!fs.lstatSync(flags.input).isDirectory()) {
GenericHelpers.logOutput(`The paramenter --input must be a directory`);
return;
}
}
}
try {
await Db.init();
let i;
if (flags.separate) {
const files = await glob((flags.input.endsWith(path.sep) ? flags.input : flags.input + path.sep) + '*.json');
for (i = 0; i < files.length; i++) {
const workflow = JSON.parse(fs.readFileSync(files[i], { encoding: 'utf8' }));
await Db.collections.Workflow!.save(workflow);
}
} else {
const fileContents = JSON.parse(fs.readFileSync(flags.input, { encoding: 'utf8' }));
if (!Array.isArray(fileContents)) {
throw new Error(`File does not seem to contain workflows.`);
}
for (i = 0; i < fileContents.length; i++) {
await Db.collections.Workflow!.save(fileContents[i]);
}
}
console.log('Successfully imported', i, i === 1 ? 'workflow.' : 'workflows.');
} catch (error) {
this.error(error.message);
this.exit(1);
}
}
}

View file

@ -5,20 +5,25 @@ import {
} from 'n8n-core';
import { Command, flags } from '@oclif/command';
const open = require('open');
import * as Redis from 'ioredis';
import * as config from '../config';
import {
ActiveExecutions,
ActiveWorkflowRunner,
CredentialTypes,
CredentialsOverwrites,
CredentialTypes,
DatabaseType,
Db,
ExternalHooks,
GenericHelpers,
IExecutionsCurrentSummary,
LoadNodesAndCredentials,
NodeTypes,
Server,
TestWebhooks,
} from "../src";
} from '../src';
import { IDataObject } from 'n8n-workflow';
let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined;
@ -68,23 +73,51 @@ export class Start extends Command {
static async stopProcess() {
console.log(`\nStopping n8n...`);
setTimeout(() => {
// In case that something goes wrong with shutdown we
// kill after max. 30 seconds no matter what
process.exit(processExistCode);
}, 30000);
try {
const externalHooks = ExternalHooks();
await externalHooks.run('n8n.stop', []);
const removePromises = [];
if (activeWorkflowRunner !== undefined) {
removePromises.push(activeWorkflowRunner.removeAll());
setTimeout(() => {
// In case that something goes wrong with shutdown we
// kill after max. 30 seconds no matter what
process.exit(processExistCode);
}, 30000);
const skipWebhookDeregistration = config.get('endpoints.skipWebhoooksDeregistrationOnShutdown') as boolean;
const removePromises = [];
if (activeWorkflowRunner !== undefined && skipWebhookDeregistration !== true) {
removePromises.push(activeWorkflowRunner.removeAll());
}
// Remove all test webhooks
const testWebhooks = TestWebhooks.getInstance();
removePromises.push(testWebhooks.removeAll());
await Promise.all(removePromises);
// Wait for active workflow executions to finish
const activeExecutionsInstance = ActiveExecutions.getInstance();
let executingWorkflows = activeExecutionsInstance.getActiveExecutions() as IExecutionsCurrentSummary[];
let count = 0;
while (executingWorkflows.length !== 0) {
if (count++ % 4 === 0) {
console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`);
executingWorkflows.map(execution => {
console.log(` - Execution ID ${execution.id}, workflow ID: ${execution.workflowId}`);
});
}
await new Promise((resolve) => {
setTimeout(resolve, 500);
});
executingWorkflows = activeExecutionsInstance.getActiveExecutions();
}
} catch (error) {
console.error('There was an error shutting down n8n.', error);
}
// Remove all test webhooks
const testWebhooks = TestWebhooks.getInstance();
removePromises.push(testWebhooks.removeAll());
await Promise.all(removePromises);
process.exit(processExistCode);
}
@ -97,10 +130,16 @@ export class Start extends Command {
const { flags } = this.parse(Start);
// Wrap that the process does not close but we can still use async
(async () => {
await (async () => {
try {
// Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init();
const startDbInitPromise = Db.init().catch((error: Error) => {
console.error(`There was an error initializing DB: ${error.message}`);
processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
});
// Make sure the settings exist
const userSettings = await UserSettings.prepareUserSettings();
@ -126,6 +165,70 @@ export class Start extends Command {
// Wait till the database is ready
await startDbInitPromise;
if (config.get('executions.mode') === 'queue') {
const redisHost = config.get('queue.bull.redis.host');
const redisPassword = config.get('queue.bull.redis.password');
const redisPort = config.get('queue.bull.redis.port');
const redisDB = config.get('queue.bull.redis.db');
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
let lastTimer = 0, cumulativeTimeout = 0;
const settings = {
retryStrategy: (times: number): number | null => {
const now = Date.now();
if (now - lastTimer > 30000) {
// Means we had no timeout at all or last timeout was temporary and we recovered
lastTimer = now;
cumulativeTimeout = 0;
} else {
cumulativeTimeout += now - lastTimer;
lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
process.exit(1);
}
}
return 500;
},
} as IDataObject;
if (redisHost) {
settings.host = redisHost;
}
if (redisPassword) {
settings.password = redisPassword;
}
if (redisPort) {
settings.port = redisPort;
}
if (redisDB) {
settings.db = redisDB;
}
// This connection is going to be our heartbeat
// IORedis automatically pings redis and tries to reconnect
// We will be using the retryStrategy above
// to control how and when to exit.
const redis = new Redis(settings);
redis.on('error', (error) => {
if (error.toString().includes('ECONNREFUSED') === true) {
console.warn('Redis unavailable - trying to reconnect...');
} else {
console.warn('Error with Redis: ', error);
}
});
}
const dbType = await GenericHelpers.getConfigValue('database.type') as DatabaseType;
if (dbType === 'sqlite') {
const shouldRunVacuum = config.get('database.sqlite.executeVacuumOnStartup') as number;
if (shouldRunVacuum) {
Db.collections.Execution!.query('VACUUM;');
}
}
if (flags.tunnel === true) {
this.log('\nWaiting for tunnel ...');
@ -156,8 +259,8 @@ export class Start extends Command {
// @ts-ignore
const webhookTunnel = await localtunnel(port, tunnelSettings);
process.env.WEBHOOK_TUNNEL_URL = webhookTunnel.url + '/';
this.log(`Tunnel URL: ${process.env.WEBHOOK_TUNNEL_URL}\n`);
process.env.WEBHOOK_URL = webhookTunnel.url + '/';
this.log(`Tunnel URL: ${process.env.WEBHOOK_URL}\n`);
this.log('IMPORTANT! Do not share with anybody as it would give people access to your n8n instance!');
}
@ -181,7 +284,7 @@ export class Start extends Command {
Start.openBrowser();
}
this.log(`\nPress "o" to open in Browser.`);
process.stdin.on("data", (key : string) => {
process.stdin.on('data', (key: string) => {
if (key === 'o') {
Start.openBrowser();
inputText = '';

View file

@ -0,0 +1,85 @@
import {
Command, flags,
} from '@oclif/command';
import {
IDataObject
} from 'n8n-workflow';
import {
Db,
GenericHelpers,
} from '../../src';
export class UpdateWorkflowCommand extends Command {
static description = '\Update workflows';
static examples = [
`$ n8n update:workflow --all --active=false`,
`$ n8n update:workflow --id=5 --active=true`,
];
static flags = {
help: flags.help({ char: 'h' }),
active: flags.string({
description: 'Active state the workflow/s should be set to',
}),
all: flags.boolean({
description: 'Operate on all workflows',
}),
id: flags.string({
description: 'The ID of the workflow to operate on',
}),
};
async run() {
const { flags } = this.parse(UpdateWorkflowCommand);
if (!flags.all && !flags.id) {
GenericHelpers.logOutput(`Either option "--all" or "--id" have to be set!`);
return;
}
if (flags.all && flags.id) {
GenericHelpers.logOutput(`Either something else on top should be "--all" or "--id" can be set never both!`);
return;
}
const updateQuery: IDataObject = {};
if (flags.active === undefined) {
GenericHelpers.logOutput(`No update flag like "--active=true" has been set!`);
return;
} else {
if (!['false', 'true'].includes(flags.active)) {
GenericHelpers.logOutput(`Valid values for flag "--active" are only "false" or "true"!`);
return;
}
updateQuery.active = flags.active === 'true';
}
try {
await Db.init();
const findQuery: IDataObject = {};
if (flags.id) {
console.log(`Deactivating workflow with ID: ${flags.id}`);
findQuery.id = flags.id;
} else {
console.log('Deactivating all workflows');
findQuery.active = true;
}
await Db.collections.Workflow!.update(findQuery, updateQuery);
console.log('Done');
} catch (e) {
console.error('\nGOT ERROR');
console.log('====================================');
console.error(e.message);
console.error(e.stack);
this.exit(1);
}
this.exit();
}
}

View file

@ -0,0 +1,223 @@
import {
UserSettings,
} from 'n8n-core';
import { Command, flags } from '@oclif/command';
import * as Redis from 'ioredis';
import * as config from '../config';
import {
ActiveExecutions,
ActiveWorkflowRunner,
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
GenericHelpers,
LoadNodesAndCredentials,
NodeTypes,
TestWebhooks,
WebhookServer,
} from '../src';
import { IDataObject } from 'n8n-workflow';
let activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner | undefined;
let processExistCode = 0;
export class Webhook extends Command {
static description = 'Starts n8n webhook process. Intercepts only production URLs.';
static examples = [
`$ n8n webhook`,
];
static flags = {
help: flags.help({ char: 'h' }),
};
/**
* Stops the n8n in a graceful way.
* Make for example sure that all the webhooks from third party services
* get removed.
*/
static async stopProcess() {
console.log(`\nStopping n8n...`);
try {
const externalHooks = ExternalHooks();
await externalHooks.run('n8n.stop', []);
setTimeout(() => {
// In case that something goes wrong with shutdown we
// kill after max. 30 seconds no matter what
process.exit(processExistCode);
}, 30000);
const removePromises = [];
if (activeWorkflowRunner !== undefined) {
removePromises.push(activeWorkflowRunner.removeAll());
}
// Remove all test webhooks
const testWebhooks = TestWebhooks.getInstance();
removePromises.push(testWebhooks.removeAll());
await Promise.all(removePromises);
// Wait for active workflow executions to finish
const activeExecutionsInstance = ActiveExecutions.getInstance();
let executingWorkflows = activeExecutionsInstance.getActiveExecutions();
let count = 0;
while (executingWorkflows.length !== 0) {
if (count++ % 4 === 0) {
console.log(`Waiting for ${executingWorkflows.length} active executions to finish...`);
}
await new Promise((resolve) => {
setTimeout(resolve, 500);
});
executingWorkflows = activeExecutionsInstance.getActiveExecutions();
}
} catch (error) {
console.error('There was an error shutting down n8n.', error);
}
process.exit(processExistCode);
}
async run() {
// Make sure that n8n shuts down gracefully if possible
process.on('SIGTERM', Webhook.stopProcess);
process.on('SIGINT', Webhook.stopProcess);
const { flags } = this.parse(Webhook);
// Wrap that the process does not close but we can still use async
await (async () => {
if (config.get('executions.mode') !== 'queue') {
/**
* It is technically possible to run without queues but
* there are 2 known bugs when running in this mode:
* - Executions list will be problematic as the main process
* is not aware of current executions in the webhook processes
* and therefore will display all current executions as error
* as it is unable to determine if it is still running or crashed
* - You cannot stop currently executing jobs from webhook processes
* when running without queues as the main process cannot talk to
* the wehbook processes to communicate workflow execution interruption.
*/
this.error('Webhook processes can only run with execution mode as queue.');
}
try {
// Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init().catch(error => {
console.error(`There was an error initializing DB: ${error.message}`);
processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
});
// Make sure the settings exist
const userSettings = await UserSettings.prepareUserSettings();
// Load all node and credential types
const loadNodesAndCredentials = LoadNodesAndCredentials();
await loadNodesAndCredentials.init();
// Load the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites();
await credentialsOverwrites.init();
// Load all external hooks
const externalHooks = ExternalHooks();
await externalHooks.init();
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes();
await nodeTypes.init(loadNodesAndCredentials.nodeTypes);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
// Wait till the database is ready
await startDbInitPromise;
if (config.get('executions.mode') === 'queue') {
const redisHost = config.get('queue.bull.redis.host');
const redisPassword = config.get('queue.bull.redis.password');
const redisPort = config.get('queue.bull.redis.port');
const redisDB = config.get('queue.bull.redis.db');
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
let lastTimer = 0, cumulativeTimeout = 0;
const settings = {
retryStrategy: (times: number): number | null => {
const now = Date.now();
if (now - lastTimer > 30000) {
// Means we had no timeout at all or last timeout was temporary and we recovered
lastTimer = now;
cumulativeTimeout = 0;
} else {
cumulativeTimeout += now - lastTimer;
lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
process.exit(1);
}
}
return 500;
},
} as IDataObject;
if (redisHost) {
settings.host = redisHost;
}
if (redisPassword) {
settings.password = redisPassword;
}
if (redisPort) {
settings.port = redisPort;
}
if (redisDB) {
settings.db = redisDB;
}
// This connection is going to be our heartbeat
// IORedis automatically pings redis and tries to reconnect
// We will be using the retryStrategy above
// to control how and when to exit.
const redis = new Redis(settings);
redis.on('error', (error) => {
if (error.toString().includes('ECONNREFUSED') === true) {
console.warn('Redis unavailable - trying to reconnect...');
} else {
console.warn('Error with Redis: ', error);
}
});
}
await WebhookServer.start();
// Start to get active workflows and run their triggers
activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
await activeWorkflowRunner.initWebhooks();
const editorUrl = GenericHelpers.getBaseUrl();
this.log('Webhook listener waiting for requests.');
} catch (error) {
this.error(`There was an error: ${error.message}`);
processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
}
})();
}
}

View file

@ -0,0 +1,269 @@
import * as PCancelable from 'p-cancelable';
import { Command, flags } from '@oclif/command';
import {
UserSettings,
WorkflowExecute,
} from 'n8n-core';
import {
IDataObject,
INodeTypes,
IRun,
IWorkflowExecuteHooks,
Workflow,
WorkflowHooks,
} from 'n8n-workflow';
import {
FindOneOptions,
} from 'typeorm';
import {
ActiveExecutions,
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
GenericHelpers,
IBullJobData,
IBullJobResponse,
IExecutionFlattedDb,
IExecutionResponse,
LoadNodesAndCredentials,
NodeTypes,
ResponseHelper,
WorkflowCredentials,
WorkflowExecuteAdditionalData,
} from '../src';
import * as config from '../config';
import * as Bull from 'bull';
import * as Queue from '../src/Queue';
export class Worker extends Command {
static description = '\nStarts a n8n worker';
static examples = [
`$ n8n worker --concurrency=5`,
];
static flags = {
help: flags.help({ char: 'h' }),
concurrency: flags.integer({
default: 10,
description: 'How many jobs can run in parallel.',
}),
};
static runningJobs: {
[key: string]: PCancelable<IRun>;
} = {};
static jobQueue: Bull.Queue;
static processExistCode = 0;
// static activeExecutions = ActiveExecutions.getInstance();
/**
* Stoppes the n8n in a graceful way.
* Make for example sure that all the webhooks from third party services
* get removed.
*/
static async stopProcess() {
console.log(`\nStopping n8n...`);
// Stop accepting new jobs
Worker.jobQueue.pause(true);
try {
const externalHooks = ExternalHooks();
await externalHooks.run('n8n.stop', []);
const maxStopTime = 30000;
const stopTime = new Date().getTime() + maxStopTime;
setTimeout(() => {
// In case that something goes wrong with shutdown we
// kill after max. 30 seconds no matter what
process.exit(Worker.processExistCode);
}, maxStopTime);
// Wait for active workflow executions to finish
let count = 0;
while (Object.keys(Worker.runningJobs).length !== 0) {
if (count++ % 4 === 0) {
const waitLeft = Math.ceil((stopTime - new Date().getTime()) / 1000);
console.log(`Waiting for ${Object.keys(Worker.runningJobs).length} active executions to finish... (wait ${waitLeft} more seconds)`);
}
await new Promise((resolve) => {
setTimeout(resolve, 500);
});
}
} catch (error) {
console.error('There was an error shutting down n8n.', error);
}
process.exit(Worker.processExistCode);
}
async runJob(job: Bull.Job, nodeTypes: INodeTypes): Promise<IBullJobResponse> {
const jobData = job.data as IBullJobData;
const executionDb = await Db.collections.Execution!.findOne(jobData.executionId) as IExecutionFlattedDb;
const currentExecutionDb = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse;
console.log(`Start job: ${job.id} (Workflow ID: ${currentExecutionDb.workflowData.id} | Execution: ${jobData.executionId})`);
let staticData = currentExecutionDb.workflowData!.staticData;
if (jobData.loadStaticData === true) {
const findOptions = {
select: ['id', 'staticData'],
} as FindOneOptions;
const workflowData = await Db.collections!.Workflow!.findOne(currentExecutionDb.workflowData.id, findOptions);
if (workflowData === undefined) {
throw new Error(`The workflow with the ID "${currentExecutionDb.workflowData.id}" could not be found`);
}
staticData = workflowData.staticData;
}
const workflow = new Workflow({ id: currentExecutionDb.workflowData.id as string, name: currentExecutionDb.workflowData.name, nodes: currentExecutionDb.workflowData!.nodes, connections: currentExecutionDb.workflowData!.connections, active: currentExecutionDb.workflowData!.active, nodeTypes, staticData, settings: currentExecutionDb.workflowData!.settings });
const credentials = await WorkflowCredentials(currentExecutionDb.workflowData.nodes);
const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials);
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerExecuter(currentExecutionDb.mode, job.data.executionId, currentExecutionDb.workflowData, { retryOf: currentExecutionDb.retryOf as string });
let workflowExecute: WorkflowExecute;
let workflowRun: PCancelable<IRun>;
if (currentExecutionDb.data !== undefined) {
workflowExecute = new WorkflowExecute(additionalData, currentExecutionDb.mode, currentExecutionDb.data);
workflowRun = workflowExecute.processRunExecutionData(workflow);
} else {
// Execute all nodes
// Can execute without webhook so go on
workflowExecute = new WorkflowExecute(additionalData, currentExecutionDb.mode);
workflowRun = workflowExecute.run(workflow);
}
Worker.runningJobs[job.id] = workflowRun;
// Wait till the execution is finished
const runData = await workflowRun;
delete Worker.runningJobs[job.id];
return {
success: true,
};
}
async run() {
console.log('Starting n8n worker...');
// Make sure that n8n shuts down gracefully if possible
process.on('SIGTERM', Worker.stopProcess);
process.on('SIGINT', Worker.stopProcess);
// Wrap that the process does not close but we can still use async
await (async () => {
try {
const { flags } = this.parse(Worker);
// Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init().catch(error => {
console.error(`There was an error initializing DB: ${error.message}`);
Worker.processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
});
// Make sure the settings exist
await UserSettings.prepareUserSettings();
// Load all node and credential types
const loadNodesAndCredentials = LoadNodesAndCredentials();
await loadNodesAndCredentials.init();
// Load the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites();
await credentialsOverwrites.init();
// Load all external hooks
const externalHooks = ExternalHooks();
await externalHooks.init();
// Add the found types to an instance other parts of the application can use
const nodeTypes = NodeTypes();
await nodeTypes.init(loadNodesAndCredentials.nodeTypes);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
// Wait till the database is ready
await startDbInitPromise;
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
Worker.jobQueue = Queue.getInstance().getBullObjectInstance();
Worker.jobQueue.process(flags.concurrency, (job) => this.runJob(job, nodeTypes));
const versions = await GenericHelpers.getVersions();
console.log('\nn8n worker is now ready');
console.log(` * Version: ${versions.cli}`);
console.log(` * Concurrency: ${flags.concurrency}`);
console.log('');
Worker.jobQueue.on('global:progress', (jobId, progress) => {
// Progress of a job got updated which does get used
// to communicate that a job got canceled.
if (progress === -1) {
// Job has to get canceled
if (Worker.runningJobs[jobId] !== undefined) {
// Job is processed by current worker so cancel
Worker.runningJobs[jobId].cancel();
delete Worker.runningJobs[jobId];
}
}
});
let lastTimer = 0, cumulativeTimeout = 0;
Worker.jobQueue.on('error', (error: Error) => {
if (error.toString().includes('ECONNREFUSED') === true) {
const now = Date.now();
if (now - lastTimer > 30000) {
// Means we had no timeout at all or last timeout was temporary and we recovered
lastTimer = now;
cumulativeTimeout = 0;
} else {
cumulativeTimeout += now - lastTimer;
lastTimer = now;
if (cumulativeTimeout > redisConnectionTimeoutLimit) {
console.error('Unable to connect to Redis after ' + redisConnectionTimeoutLimit + '. Exiting process.');
process.exit(1);
}
}
console.warn('Redis unavailable - trying to reconnect...');
} else if (error.toString().includes('Error initializing Lua scripts') === true) {
// This is a non-recoverable error
// Happens when worker starts and Redis is unavailable
// Even if Redis comes back online, worker will be zombie
console.error('Error initializing worker.');
process.exit(2);
} else {
console.error('Error from queue: ', error);
}
});
} catch (error) {
this.error(`There was an error: ${error.message}`);
Worker.processExistCode = 1;
// @ts-ignore
process.emit('SIGINT');
}
})();
}
}

View file

@ -8,60 +8,52 @@ const config = convict({
database: {
type: {
doc: 'Type of database to use',
format: ['sqlite', 'mariadb', 'mongodb', 'mysqldb', 'postgresdb'],
format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'],
default: 'sqlite',
env: 'DB_TYPE'
},
mongodb: {
connectionUrl: {
doc: 'MongoDB Connection URL',
format: '*',
default: 'mongodb://user:password@localhost:27017/database',
env: 'DB_MONGODB_CONNECTION_URL'
}
env: 'DB_TYPE',
},
tablePrefix: {
doc: 'Prefix for table names',
format: '*',
default: '',
env: 'DB_TABLE_PREFIX'
env: 'DB_TABLE_PREFIX',
},
postgresdb: {
database: {
doc: 'PostgresDB Database',
format: String,
default: 'n8n',
env: 'DB_POSTGRESDB_DATABASE'
env: 'DB_POSTGRESDB_DATABASE',
},
host: {
doc: 'PostgresDB Host',
format: String,
default: 'localhost',
env: 'DB_POSTGRESDB_HOST'
env: 'DB_POSTGRESDB_HOST',
},
password: {
doc: 'PostgresDB Password',
format: String,
default: '',
env: 'DB_POSTGRESDB_PASSWORD'
env: 'DB_POSTGRESDB_PASSWORD',
},
port: {
doc: 'PostgresDB Port',
format: Number,
default: 5432,
env: 'DB_POSTGRESDB_PORT'
env: 'DB_POSTGRESDB_PORT',
},
user: {
doc: 'PostgresDB User',
format: String,
default: 'root',
env: 'DB_POSTGRESDB_USER'
env: 'DB_POSTGRESDB_USER',
},
schema: {
doc: 'PostgresDB Schema',
format: String,
default: 'public',
env: 'DB_POSTGRESDB_SCHEMA'
env: 'DB_POSTGRESDB_SCHEMA',
},
ssl: {
@ -89,7 +81,7 @@ const config = convict({
default: true,
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
},
}
},
},
mysqldb: {
@ -97,31 +89,39 @@ const config = convict({
doc: 'MySQL Database',
format: String,
default: 'n8n',
env: 'DB_MYSQLDB_DATABASE'
env: 'DB_MYSQLDB_DATABASE',
},
host: {
doc: 'MySQL Host',
format: String,
default: 'localhost',
env: 'DB_MYSQLDB_HOST'
env: 'DB_MYSQLDB_HOST',
},
password: {
doc: 'MySQL Password',
format: String,
default: '',
env: 'DB_MYSQLDB_PASSWORD'
env: 'DB_MYSQLDB_PASSWORD',
},
port: {
doc: 'MySQL Port',
format: Number,
default: 3306,
env: 'DB_MYSQLDB_PORT'
env: 'DB_MYSQLDB_PORT',
},
user: {
doc: 'MySQL User',
format: String,
default: 'root',
env: 'DB_MYSQLDB_USER'
env: 'DB_MYSQLDB_USER',
},
},
sqlite: {
executeVacuumOnStartup: {
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
format: Boolean,
default: false,
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
},
},
},
@ -136,7 +136,7 @@ const config = convict({
doc: 'Overwrites for credentials',
format: '*',
default: '{}',
env: 'CREDENTIALS_OVERWRITE_DATA'
env: 'CREDENTIALS_OVERWRITE_DATA',
},
endpoint: {
doc: 'Fetch credentials from API',
@ -156,7 +156,14 @@ const config = convict({
doc: 'In what process workflows should be executed',
format: ['main', 'own'],
default: 'own',
env: 'EXECUTIONS_PROCESS'
env: 'EXECUTIONS_PROCESS',
},
mode: {
doc: 'If it should run executions directly or via queue',
format: ['regular', 'queue'],
default: 'regular',
env: 'EXECUTIONS_MODE',
},
// A Workflow times out and gets canceled after this time (seconds).
@ -174,13 +181,13 @@ const config = convict({
doc: 'Max run time (seconds) before stopping the workflow execution',
format: Number,
default: -1,
env: 'EXECUTIONS_TIMEOUT'
env: 'EXECUTIONS_TIMEOUT',
},
maxTimeout: {
doc: 'Max execution time (seconds) that can be set for a workflow individually',
format: Number,
default: 3600,
env: 'EXECUTIONS_TIMEOUT_MAX'
env: 'EXECUTIONS_TIMEOUT_MAX',
},
// If a workflow executes all the data gets saved by default. This
@ -193,13 +200,19 @@ const config = convict({
doc: 'What workflow execution data to save on error',
format: ['all', 'none'],
default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR'
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
},
saveDataOnSuccess: {
doc: 'What workflow execution data to save on success',
format: ['all', 'none'],
default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS'
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
},
saveExecutionProgress: {
doc: 'Wether or not to save progress for each node executed',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
},
// If the executions of workflows which got started via the editor
@ -211,7 +224,7 @@ const config = convict({
doc: 'Save data of executions when started manually via editor',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS'
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
},
// To not exceed the database's capacity and keep its size moderate
@ -223,22 +236,70 @@ const config = convict({
doc: 'Delete data of past executions on a rolling basis',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_PRUNE'
env: 'EXECUTIONS_DATA_PRUNE',
},
pruneDataMaxAge: {
doc: 'How old (hours) the execution data has to be to get deleted',
format: Number,
default: 336,
env: 'EXECUTIONS_DATA_MAX_AGE'
env: 'EXECUTIONS_DATA_MAX_AGE',
},
pruneDataTimeout: {
doc: 'Timeout (seconds) after execution data has been pruned',
format: Number,
default: 3600,
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT'
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
},
},
queue: {
bull: {
prefix: {
doc: 'Prefix for all queue keys',
format: String,
default: '',
env: 'QUEUE_BULL_PREFIX',
},
redis: {
db: {
doc: 'Redis DB',
format: Number,
default: 0,
env: 'QUEUE_BULL_REDIS_DB',
},
host: {
doc: 'Redis Host',
format: String,
default: 'localhost',
env: 'QUEUE_BULL_REDIS_HOST',
},
password: {
doc: 'Redis Password',
format: String,
default: '',
env: 'QUEUE_BULL_REDIS_PASSWORD',
},
port: {
doc: 'Redis Port',
format: Number,
default: 6379,
env: 'QUEUE_BULL_REDIS_PORT',
},
timeoutThreshold: {
doc: 'Redis timeout threshold',
format: Number,
default: 10000,
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
},
},
queueRecoveryInterval: {
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
format: Number,
default: 60,
env: 'QUEUE_RECOVERY_INTERVAL',
},
},
},
generic: {
// The timezone to use. Is important for nodes like "Cron" which start the
// workflow automatically at a specified time. This setting can also be
@ -248,7 +309,7 @@ const config = convict({
doc: 'The timezone to use',
format: '*',
default: 'America/New_York',
env: 'GENERIC_TIMEZONE'
env: 'GENERIC_TIMEZONE',
},
},
@ -258,66 +319,78 @@ const config = convict({
default: '/',
arg: 'path',
env: 'N8N_PATH',
doc: 'Path n8n is deployed to'
doc: 'Path n8n is deployed to',
},
host: {
format: String,
default: 'localhost',
arg: 'host',
env: 'N8N_HOST',
doc: 'Host name n8n can be reached'
doc: 'Host name n8n can be reached',
},
port: {
format: Number,
default: 5678,
arg: 'port',
env: 'N8N_PORT',
doc: 'HTTP port n8n can be reached'
doc: 'HTTP port n8n can be reached',
},
listen_address: {
format: String,
default: '0.0.0.0',
env: 'N8N_LISTEN_ADDRESS',
doc: 'IP address n8n should listen on'
doc: 'IP address n8n should listen on',
},
protocol: {
format: ['http', 'https'],
default: 'http',
env: 'N8N_PROTOCOL',
doc: 'HTTP Protocol via which n8n can be reached'
doc: 'HTTP Protocol via which n8n can be reached',
},
ssl_key: {
format: String,
default: '',
env: 'N8N_SSL_KEY',
doc: 'SSL Key for HTTPS Protocol'
doc: 'SSL Key for HTTPS Protocol',
},
ssl_cert: {
format: String,
default: '',
env: 'N8N_SSL_CERT',
doc: 'SSL Cert for HTTPS Protocol'
doc: 'SSL Cert for HTTPS Protocol',
},
security: {
excludeEndpoints: {
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
format: String,
default: '',
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
},
basicAuth: {
active: {
format: 'Boolean',
default: false,
env: 'N8N_BASIC_AUTH_ACTIVE',
doc: 'If basic auth should be activated for editor and REST-API'
doc: 'If basic auth should be activated for editor and REST-API',
},
user: {
format: String,
default: '',
env: 'N8N_BASIC_AUTH_USER',
doc: 'The name of the basic auth user'
doc: 'The name of the basic auth user',
},
password: {
format: String,
default: '',
env: 'N8N_BASIC_AUTH_PASSWORD',
doc: 'The password of the basic auth user'
doc: 'The password of the basic auth user',
},
hash: {
format: 'Boolean',
default: false,
env: 'N8N_BASIC_AUTH_HASH',
doc: 'If password for basic auth is hashed',
},
},
jwtAuth: {
@ -325,71 +398,109 @@ const config = convict({
format: 'Boolean',
default: false,
env: 'N8N_JWT_AUTH_ACTIVE',
doc: 'If JWT auth should be activated for editor and REST-API'
doc: 'If JWT auth should be activated for editor and REST-API',
},
jwtHeader: {
format: String,
default: '',
env: 'N8N_JWT_AUTH_HEADER',
doc: 'The request header containing a signed JWT'
doc: 'The request header containing a signed JWT',
},
jwtHeaderValuePrefix: {
format: String,
default: '',
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
doc: 'The request header value prefix to strip (optional)'
doc: 'The request header value prefix to strip (optional)',
},
jwksUri: {
format: String,
default: '',
env: 'N8N_JWKS_URI',
doc: 'The URI to fetch JWK Set for JWT authentication'
doc: 'The URI to fetch JWK Set for JWT authentication',
},
jwtIssuer: {
format: String,
default: '',
env: 'N8N_JWT_ISSUER',
doc: 'JWT issuer to expect (optional)'
doc: 'JWT issuer to expect (optional)',
},
jwtNamespace: {
format: String,
default: '',
env: 'N8N_JWT_NAMESPACE',
doc: 'JWT namespace to expect (optional)'
doc: 'JWT namespace to expect (optional)',
},
jwtAllowedTenantKey: {
format: String,
default: '',
env: 'N8N_JWT_ALLOWED_TENANT_KEY',
doc: 'JWT tenant key name to inspect within JWT namespace (optional)'
doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
},
jwtAllowedTenant: {
format: String,
default: '',
env: 'N8N_JWT_ALLOWED_TENANT',
doc: 'JWT tenant to allow (optional)'
doc: 'JWT tenant to allow (optional)',
},
},
},
endpoints: {
metrics: {
enable: {
format: 'Boolean',
default: false,
env: 'N8N_METRICS',
doc: 'Enable metrics endpoint',
},
prefix: {
format: String,
default: 'n8n_',
env: 'N8N_METRICS_PREFIX',
doc: 'An optional prefix for metric names. Default: n8n_',
},
},
rest: {
format: String,
default: 'rest',
env: 'N8N_ENDPOINT_REST',
doc: 'Path for rest endpoint'
doc: 'Path for rest endpoint',
},
webhook: {
format: String,
default: 'webhook',
env: 'N8N_ENDPOINT_WEBHOOK',
doc: 'Path for webhook endpoint'
doc: 'Path for webhook endpoint',
},
webhookTest: {
format: String,
default: 'webhook-test',
env: 'N8N_ENDPOINT_WEBHOOK_TEST',
doc: 'Path for test-webhook endpoint'
doc: 'Path for test-webhook endpoint',
},
disableProductionWebhooksOnMainProcess: {
format: Boolean,
default: false,
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
},
skipWebhoooksDeregistrationOnShutdown: {
/**
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
* and registers on startup / activation. If we skip
* deactivation on shutdown, webhooks will remain active on 3rd party services.
* We don't have to worry about startup as it always
* checks if webhooks already exist.
* If users want to upgrade n8n, it is possible to run
* two instances simultaneously without downtime, similar
* to blue/green deployment.
* WARNING: Trigger nodes (like Cron) will cause duplication
* of work, so be aware when using.
*/
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
format: Boolean,
default: false,
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
},
},
@ -397,10 +508,34 @@ const config = convict({
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
format: String,
default: '',
env: 'EXTERNAL_HOOK_FILES'
env: 'EXTERNAL_HOOK_FILES',
},
nodes: {
include: {
doc: 'Nodes to load',
format: function check(rawValue) {
if (rawValue === '') {
return;
}
try {
const values = JSON.parse(rawValue);
if (!Array.isArray(values)) {
throw new Error();
}
for (const value of values) {
if (typeof value !== 'string') {
throw new Error();
}
}
} catch (error) {
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
}
},
default: undefined,
env: 'NODES_INCLUDE',
},
exclude: {
doc: 'Nodes not to load',
format: function check(rawValue) {
@ -421,13 +556,13 @@ const config = convict({
}
},
default: '[]',
env: 'NODES_EXCLUDE'
env: 'NODES_EXCLUDE',
},
errorTriggerType: {
doc: 'Node Type to use as Error Trigger',
format: String,
default: 'n8n-nodes-base.errorTrigger',
env: 'NODES_ERROR_TRIGGER_TYPE'
env: 'NODES_ERROR_TRIGGER_TYPE',
},
},

View file

@ -1,4 +1,4 @@
import {MongoDb, SQLite, MySQLDb, PostgresDb} from '../src/databases/index';
import { SQLite, MySQLDb, PostgresDb} from '../src/databases/index';
module.exports = [
{
@ -19,25 +19,6 @@ module.exports = [
"subscribersDir": "./src/databases/sqlite/subscribers"
}
},
{
"name": "mongodb",
"type": "mongodb",
"logging": false,
"entities": Object.values(MongoDb),
"url": "mongodb://root:example@localhost:27017/n8n",
"authSource": 'admin',
"migrations": [
"./src/databases/mongodb/migrations/*.ts"
],
"subscribers": [
"src/subscriber/**/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/mongodb",
"migrationsDir": "./src/databases/mongodb/Migrations",
"subscribersDir": "./src/databases/mongodb/Subscribers"
}
},
{
"name": "postgres",
"type": "postgres",

View file

@ -1,133 +1,138 @@
{
"name": "n8n",
"version": "0.78.0",
"description": "n8n Workflow Automation Tool",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://n8n.io",
"author": {
"name": "Jan Oberhauser",
"email": "jan@n8n.io"
"name": "n8n",
"version": "0.113.0",
"description": "n8n Workflow Automation Tool",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://n8n.io",
"author": {
"name": "Jan Oberhauser",
"email": "jan@n8n.io"
},
"repository": {
"type": "git",
"url": "git+https://github.com/n8n-io/n8n.git"
},
"main": "dist/index",
"types": "dist/src/index.d.ts",
"oclif": {
"commands": "./dist/commands",
"bin": "n8n"
},
"scripts": {
"build": "tsc",
"dev": "concurrently -k -n \"TypeScript,Node\" -c \"yellow.bold,cyan.bold\" \"npm run watch\" \"nodemon\"",
"postpack": "rm -f oclif.manifest.json",
"prepack": "echo \"Building project...\" && rm -rf dist && tsc -b && oclif-dev manifest",
"start": "run-script-os",
"start:default": "cd bin && ./n8n",
"start:windows": "cd bin && n8n",
"test": "jest",
"tslint": "tslint -p tsconfig.json -c tslint.json",
"tslintfix": "tslint --fix -p tsconfig.json -c tslint.json",
"watch": "tsc --watch",
"typeorm": "ts-node ./node_modules/typeorm/cli.js"
},
"bin": {
"n8n": "./bin/n8n"
},
"keywords": [
"automate",
"automation",
"IaaS",
"iPaaS",
"n8n",
"workflow"
],
"engines": {
"node": ">=12.0.0"
},
"files": [
"bin",
"templates",
"dist",
"oclif.manifest.json"
],
"devDependencies": {
"@oclif/dev-cli": "^1.22.2",
"@types/basic-auth": "^1.1.2",
"@types/bcryptjs": "^2.4.1",
"@types/bull": "^3.3.10",
"@types/compression": "1.0.1",
"@types/connect-history-api-fallback": "^1.3.1",
"@types/convict": "^4.2.1",
"@types/dotenv": "^8.2.0",
"@types/express": "^4.17.6",
"@types/jest": "^26.0.13",
"@types/localtunnel": "^1.9.0",
"@types/lodash.get": "^4.4.6",
"@types/node": "14.0.27",
"@types/open": "^6.1.0",
"@types/parseurl": "^1.3.1",
"@types/request-promise-native": "~1.0.15",
"concurrently": "^5.1.0",
"jest": "^26.4.2",
"nodemon": "^2.0.2",
"p-cancelable": "^2.0.0",
"run-script-os": "^1.0.7",
"ts-jest": "^26.3.0",
"ts-node": "^8.9.1",
"tslint": "^6.1.2",
"typescript": "~3.9.7"
},
"dependencies": {
"@oclif/command": "^1.5.18",
"@oclif/errors": "^1.2.2",
"@types/jsonwebtoken": "^8.3.4",
"basic-auth": "^2.0.1",
"bcryptjs": "^2.4.3",
"body-parser": "^1.18.3",
"body-parser-xml": "^1.1.0",
"bull": "^3.19.0",
"client-oauth2": "^4.2.5",
"compression": "^1.7.4",
"connect-history-api-fallback": "^1.6.0",
"convict": "^6.0.1",
"csrf": "^3.1.0",
"dotenv": "^8.0.0",
"express": "^4.16.4",
"flatted": "^2.0.0",
"glob-promise": "^3.4.0",
"google-timezones-json": "^1.0.2",
"inquirer": "^7.0.1",
"jsonwebtoken": "^8.5.1",
"jwks-rsa": "~1.12.1",
"localtunnel": "^2.0.0",
"lodash.get": "^4.4.2",
"mysql2": "~2.1.0",
"n8n-core": "~0.66.0",
"n8n-editor-ui": "~0.83.0",
"n8n-nodes-base": "~0.110.0",
"n8n-workflow": "~0.55.0",
"oauth-1.0a": "^2.2.6",
"open": "^7.0.0",
"pg": "^8.3.0",
"prom-client": "^13.1.0",
"request-promise-native": "^1.0.7",
"sqlite3": "^5.0.1",
"sse-channel": "^3.1.1",
"tslib": "1.11.2",
"typeorm": "^0.2.30"
},
"jest": {
"transform": {
"^.+\\.tsx?$": "ts-jest"
},
"repository": {
"type": "git",
"url": "git+https://github.com/n8n-io/n8n.git"
},
"main": "dist/index",
"types": "dist/src/index.d.ts",
"oclif": {
"commands": "./dist/commands",
"bin": "n8n"
},
"scripts": {
"build": "tsc",
"dev": "concurrently -k -n \"TypeScript,Node\" -c \"yellow.bold,cyan.bold\" \"npm run watch\" \"nodemon\"",
"postpack": "rm -f oclif.manifest.json",
"prepack": "echo \"Building project...\" && rm -rf dist && tsc -b && oclif-dev manifest",
"start": "run-script-os",
"start:default": "cd bin && ./n8n",
"start:windows": "cd bin && n8n",
"test": "jest",
"tslint": "tslint -p tsconfig.json -c tslint.json",
"watch": "tsc --watch",
"typeorm": "ts-node ./node_modules/typeorm/cli.js"
},
"bin": {
"n8n": "./bin/n8n"
},
"keywords": [
"automate",
"automation",
"IaaS",
"iPaaS",
"n8n",
"workflow"
"testURL": "http://localhost/",
"testRegex": "(/__tests__/.*|(\\.|/)(test|spec))\\.(jsx?|tsx?)$",
"testPathIgnorePatterns": [
"/dist/",
"/node_modules/"
],
"engines": {
"node": ">=8.0.0"
},
"files": [
"bin",
"templates",
"dist",
"oclif.manifest.json"
],
"devDependencies": {
"@oclif/dev-cli": "^1.22.2",
"@types/basic-auth": "^1.1.2",
"@types/compression": "1.0.1",
"@types/connect-history-api-fallback": "^1.3.1",
"@types/convict": "^4.2.1",
"@types/dotenv": "^8.2.0",
"@types/express": "^4.17.6",
"@types/jest": "^25.2.1",
"@types/localtunnel": "^1.9.0",
"@types/lodash.get": "^4.4.6",
"@types/node": "^14.0.27",
"@types/open": "^6.1.0",
"@types/parseurl": "^1.3.1",
"@types/request-promise-native": "~1.0.15",
"concurrently": "^5.1.0",
"jest": "^24.9.0",
"nodemon": "^2.0.2",
"p-cancelable": "^2.0.0",
"run-script-os": "^1.0.7",
"ts-jest": "^25.4.0",
"tslint": "^6.1.2",
"typescript": "~3.7.4",
"ts-node": "^8.9.1"
},
"dependencies": {
"@oclif/command": "^1.5.18",
"@oclif/errors": "^1.2.2",
"@types/jsonwebtoken": "^8.3.4",
"basic-auth": "^2.0.1",
"body-parser": "^1.18.3",
"body-parser-xml": "^1.1.0",
"client-oauth2": "^4.2.5",
"compression": "^1.7.4",
"connect-history-api-fallback": "^1.6.0",
"convict": "^5.0.0",
"csrf": "^3.1.0",
"dotenv": "^8.0.0",
"express": "^4.16.4",
"flatted": "^2.0.0",
"glob-promise": "^3.4.0",
"google-timezones-json": "^1.0.2",
"inquirer": "^7.0.1",
"jsonwebtoken": "^8.5.1",
"jwks-rsa": "^1.6.0",
"localtunnel": "^2.0.0",
"lodash.get": "^4.4.2",
"mongodb": "^3.5.5",
"mysql2": "^2.0.1",
"n8n-core": "~0.43.0",
"n8n-editor-ui": "~0.54.0",
"n8n-nodes-base": "~0.73.0",
"n8n-workflow": "~0.39.0",
"oauth-1.0a": "^2.2.6",
"open": "^7.0.0",
"pg": "^8.3.0",
"request-promise-native": "^1.0.7",
"sqlite3": "^4.2.0",
"sse-channel": "^3.1.1",
"tslib": "1.11.2",
"typeorm": "^0.2.24"
},
"jest": {
"transform": {
"^.+\\.tsx?$": "ts-jest"
},
"testURL": "http://localhost/",
"testRegex": "(/__tests__/.*|(\\.|/)(test|spec))\\.(jsx?|tsx?)$",
"testPathIgnorePatterns": [
"/dist/",
"/node_modules/"
],
"moduleFileExtensions": [
"ts",
"tsx",
"js",
"json"
]
}
"moduleFileExtensions": [
"ts",
"tsx",
"js",
"json"
]
}
}

View file

@ -7,17 +7,22 @@ import {
} from 'n8n-core';
import {
IExecutionsCurrentSummary,
Db,
IExecutingWorkflowData,
IExecutionDb,
IExecutionFlattedDb,
IExecutionsCurrentSummary,
IWorkflowExecutionDataProcess,
ResponseHelper,
WorkflowHelpers,
} from '.';
import { ChildProcess } from 'child_process';
import * as PCancelable from 'p-cancelable';
import { ObjectID } from 'typeorm';
export class ActiveExecutions {
private nextId = 1;
private activeExecutions: {
[index: string]: IExecutingWorkflowData;
} = {};
@ -31,8 +36,30 @@ export class ActiveExecutions {
* @returns {string}
* @memberof ActiveExecutions
*/
add(executionData: IWorkflowExecutionDataProcess, process?: ChildProcess): string {
const executionId = this.nextId++;
async add(executionData: IWorkflowExecutionDataProcess, process?: ChildProcess): Promise<string> {
const fullExecutionData: IExecutionDb = {
data: executionData.executionData!,
mode: executionData.executionMode,
finished: false,
startedAt: new Date(),
workflowData: executionData.workflowData,
};
if (executionData.retryOf !== undefined) {
fullExecutionData.retryOf = executionData.retryOf.toString();
}
if (executionData.workflowData.id !== undefined && WorkflowHelpers.isWorkflowIdValid(executionData.workflowData.id.toString()) === true) {
fullExecutionData.workflowId = executionData.workflowData.id.toString();
}
const execution = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB
const executionResult = await Db.collections.Execution!.save(execution as IExecutionFlattedDb);
const executionId = typeof executionResult.id === "object" ? executionResult.id.toString() : executionResult.id + "";
this.activeExecutions[executionId] = {
executionData,
@ -41,7 +68,7 @@ export class ActiveExecutions {
postExecutePromises: [],
};
return executionId.toString();
return executionId;
}

View file

@ -1,17 +1,17 @@
import {
IActivationError,
Db,
NodeTypes,
IActivationError,
IResponseCallbackData,
IWebhookDb,
IWorkflowDb,
IWorkflowExecutionDataProcess,
NodeTypes,
ResponseHelper,
WebhookHelpers,
WorkflowCredentials,
WorkflowExecuteAdditionalData,
WorkflowHelpers,
WorkflowRunner,
WorkflowExecuteAdditionalData,
IWebhookDb,
} from './';
import {
@ -20,16 +20,18 @@ import {
} from 'n8n-core';
import {
IDataObject,
IExecuteData,
IGetExecutePollFunctions,
IGetExecuteTriggerFunctions,
INode,
INodeExecutionData,
IRunExecutionData,
NodeHelpers,
IWorkflowExecuteAdditionalData as IWorkflowExecuteAdditionalDataWorkflow,
NodeHelpers,
WebhookHttpMethod,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
@ -52,6 +54,9 @@ export class ActiveWorkflowRunner {
// so intead of pulling all the active wehhooks just pull the actives that have a trigger
const workflowsData: IWorkflowDb[] = await Db.collections.Workflow!.find({ active: true }) as IWorkflowDb[];
// Clear up active workflow table
await Db.collections.Webhook?.clear();
this.activeWorkflows = new ActiveWorkflows();
if (workflowsData.length !== 0) {
@ -59,27 +64,23 @@ export class ActiveWorkflowRunner {
console.log(' Start Active Workflows:');
console.log(' ================================');
const nodeTypes = NodeTypes();
for (const workflowData of workflowsData) {
const workflow = new Workflow({ id: workflowData.id.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings});
if (workflow.getTriggerNodes().length !== 0
|| workflow.getPollNodes().length !== 0) {
console.log(` - ${workflowData.name}`);
try {
await this.add(workflowData.id.toString(), workflowData);
console.log(` => Started`);
} catch (error) {
console.log(` => ERROR: Workflow could not be activated:`);
console.log(` ${error.message}`);
}
console.log(` - ${workflowData.name}`);
try {
await this.add(workflowData.id.toString(), 'init', workflowData);
console.log(` => Started`);
} catch (error) {
console.log(` => ERROR: Workflow could not be activated:`);
console.log(` ${error.message}`);
}
}
}
}
async initWebhooks() {
this.activeWorkflows = new ActiveWorkflows();
}
/**
* Removes all the currently active workflows
*
@ -87,14 +88,18 @@ export class ActiveWorkflowRunner {
* @memberof ActiveWorkflowRunner
*/
async removeAll(): Promise<void> {
if (this.activeWorkflows === null) {
return;
const activeWorkflowId: string[] = [];
if (this.activeWorkflows !== null) {
// TODO: This should be renamed!
activeWorkflowId.push.apply(activeWorkflowId, this.activeWorkflows.allActiveWorkflows());
}
const activeWorkflows = this.activeWorkflows.allActiveWorkflows();
const activeWorkflows = await this.getActiveWorkflows();
activeWorkflowId.push.apply(activeWorkflowId, activeWorkflows.map(workflow => workflow.id));
const removePromises = [];
for (const workflowId of activeWorkflows) {
for (const workflowId of activeWorkflowId) {
removePromises.push(this.remove(workflowId));
}
@ -117,12 +122,57 @@ export class ActiveWorkflowRunner {
throw new ResponseHelper.ResponseError('The "activeWorkflows" instance did not get initialized yet.', 404, 404);
}
const webhook = await Db.collections.Webhook?.findOne({ webhookPath: path, method: httpMethod }) as IWebhookDb;
// Reset request parameters
req.params = {};
// check if something exist
// Remove trailing slash
if (path.endsWith('/')) {
path = path.slice(0, -1);
}
let webhook = await Db.collections.Webhook?.findOne({ webhookPath: path, method: httpMethod }) as IWebhookDb;
let webhookId: string | undefined;
// check if path is dynamic
if (webhook === undefined) {
// The requested webhook is not registered
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
// check if a dynamic webhook path exists
const pathElements = path.split('/');
webhookId = pathElements.shift();
const dynamicWebhooks = await Db.collections.Webhook?.find({ webhookId, method: httpMethod, pathLength: pathElements.length });
if (dynamicWebhooks === undefined || dynamicWebhooks.length === 0) {
// The requested webhook is not registered
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
}
let maxMatches = 0;
const pathElementsSet = new Set(pathElements);
// check if static elements match in path
// if more results have been returned choose the one with the most static-route matches
dynamicWebhooks.forEach(dynamicWebhook => {
const staticElements = dynamicWebhook.webhookPath.split('/').filter(ele => !ele.startsWith(':'));
const allStaticExist = staticElements.every(staticEle => pathElementsSet.has(staticEle));
if (allStaticExist && staticElements.length > maxMatches) {
maxMatches = staticElements.length;
webhook = dynamicWebhook;
}
// handle routes with no static elements
else if (staticElements.length === 0 && !webhook) {
webhook = dynamicWebhook;
}
});
if (webhook === undefined) {
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
}
path = webhook!.webhookPath;
// extracting params from path
webhook!.webhookPath.split('/').forEach((ele, index) => {
if (ele.startsWith(':')) {
// write params to req.params
req.params[ele.slice(1)] = pathElements[index];
}
});
}
const workflowData = await Db.collections.Workflow!.findOne(webhook.workflowId);
@ -182,8 +232,9 @@ export class ActiveWorkflowRunner {
* @returns {string[]}
* @memberof ActiveWorkflowRunner
*/
getActiveWorkflows(): Promise<IWorkflowDb[]> {
return Db.collections.Workflow?.find({ select: ['id'] }) as Promise<IWorkflowDb[]>;
async getActiveWorkflows(): Promise<IWorkflowDb[]> {
const activeWorkflows = await Db.collections.Workflow?.find({ where: { active: true }, select: ['id'] }) as IWorkflowDb[];
return activeWorkflows.filter(workflow => this.activationErrors[workflow.id.toString()] === undefined);
}
@ -223,7 +274,7 @@ export class ActiveWorkflowRunner {
* @returns {Promise<void>}
* @memberof ActiveWorkflowRunner
*/
async addWorkflowWebhooks(workflow: Workflow, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): Promise<void> {
async addWorkflowWebhooks(workflow: Workflow, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode, activation: WorkflowActivateMode): Promise<void> {
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData);
let path = '' as string | undefined;
@ -235,7 +286,7 @@ export class ActiveWorkflowRunner {
path = node.parameters.path as string;
if (node.parameters.path === undefined) {
path = workflow.getSimpleParameterValue(node, webhookData.webhookDescription['path']) as string | undefined;
path = workflow.expression.getSimpleParameterValue(node, webhookData.webhookDescription['path'], mode) as string | undefined;
if (path === undefined) {
// TODO: Use a proper logger
@ -244,7 +295,7 @@ export class ActiveWorkflowRunner {
}
}
const isFullPath: boolean = workflow.getSimpleParameterValue(node, webhookData.webhookDescription['isFullPath'], false) as boolean;
const isFullPath: boolean = workflow.expression.getSimpleParameterValue(node, webhookData.webhookDescription['isFullPath'], mode, false) as boolean;
const webhook = {
workflowId: webhookData.workflowId,
@ -253,29 +304,42 @@ export class ActiveWorkflowRunner {
method: webhookData.httpMethod,
} as IWebhookDb;
if (webhook.webhookPath.startsWith('/')) {
webhook.webhookPath = webhook.webhookPath.slice(1);
}
if (webhook.webhookPath.endsWith('/')) {
webhook.webhookPath = webhook.webhookPath.slice(0, -1);
}
if ((path.startsWith(':') || path.includes('/:')) && node.webhookId) {
webhook.webhookId = node.webhookId;
webhook.pathLength = webhook.webhookPath.split('/').length;
}
try {
await Db.collections.Webhook?.insert(webhook);
const webhookExists = await workflow.runWebhookMethod('checkExists', webhookData, NodeExecuteFunctions, mode, false);
if (webhookExists === false) {
const webhookExists = await workflow.runWebhookMethod('checkExists', webhookData, NodeExecuteFunctions, mode, activation, false);
if (webhookExists !== true) {
// If webhook does not exist yet create it
await workflow.runWebhookMethod('create', webhookData, NodeExecuteFunctions, mode, false);
await workflow.runWebhookMethod('create', webhookData, NodeExecuteFunctions, mode, activation, false);
}
} catch (error) {
try {
await this.removeWorkflowWebhooks(workflow.id as string);
} catch (error) {
console.error(`Could not remove webhooks of workflow "${workflow.id}" because of error: "${error.message}"`);
}
let errorMessage = '';
await Db.collections.Webhook?.delete({ workflowId: workflow.id });
// if it's a workflow from the the insert
// TODO check if there is standard error code for deplicate key violation that works
// TODO check if there is standard error code for duplicate key violation that works
// with all databases
if (error.name === 'MongoError' || error.name === 'QueryFailedError') {
if (error.name === 'QueryFailedError') {
errorMessage = `The webhook path [${webhook.webhookPath}] and method [${webhook.method}] already exist.`;
} else if (error.detail) {
// it's a error runnig the webhook methods (checkExists, create)
errorMessage = error.detail;
@ -315,13 +379,10 @@ export class ActiveWorkflowRunner {
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData);
for (const webhookData of webhooks) {
await workflow.runWebhookMethod('delete', webhookData, NodeExecuteFunctions, mode, false);
await workflow.runWebhookMethod('delete', webhookData, NodeExecuteFunctions, mode, 'update', false);
}
// if it's a mongo objectId convert it to string
if (typeof workflowData.id === 'object') {
workflowData.id = workflowData.id.toString();
}
await WorkflowHelpers.saveStaticData(workflow);
const webhook = {
workflowId: workflowData.id,
@ -347,8 +408,8 @@ export class ActiveWorkflowRunner {
node,
data: {
main: data,
}
}
},
},
];
const executionData: IRunExecutionData = {
@ -386,9 +447,9 @@ export class ActiveWorkflowRunner {
* @returns {IGetExecutePollFunctions}
* @memberof ActiveWorkflowRunner
*/
getExecutePollFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): IGetExecutePollFunctions {
getExecutePollFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode, activation: WorkflowActivateMode): IGetExecutePollFunctions {
return ((workflow: Workflow, node: INode) => {
const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions(workflow, node, additionalData, mode);
const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions(workflow, node, additionalData, mode, activation);
returnFunctions.__emit = (data: INodeExecutionData[][]): void => {
this.runWorkflow(workflowData, node, data, additionalData, mode);
};
@ -407,12 +468,12 @@ export class ActiveWorkflowRunner {
* @returns {IGetExecuteTriggerFunctions}
* @memberof ActiveWorkflowRunner
*/
getExecuteTriggerFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): IGetExecuteTriggerFunctions{
getExecuteTriggerFunctions(workflowData: IWorkflowDb, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode, activation: WorkflowActivateMode): IGetExecuteTriggerFunctions{
return ((workflow: Workflow, node: INode) => {
const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(workflow, node, additionalData, mode);
const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(workflow, node, additionalData, mode, activation);
returnFunctions.emit = (data: INodeExecutionData[][]): void => {
WorkflowHelpers.saveStaticData(workflow);
this.runWorkflow(workflowData, node, data, additionalData, mode);
this.runWorkflow(workflowData, node, data, additionalData, mode).catch((err) => console.error(err));
};
return returnFunctions;
});
@ -426,7 +487,7 @@ export class ActiveWorkflowRunner {
* @returns {Promise<void>}
* @memberof ActiveWorkflowRunner
*/
async add(workflowId: string, workflowData?: IWorkflowDb): Promise<void> {
async add(workflowId: string, activation: WorkflowActivateMode, workflowData?: IWorkflowDb): Promise<void> {
if (this.activeWorkflows === null) {
throw new Error(`The "activeWorkflows" instance did not get initialized yet.`);
}
@ -451,15 +512,15 @@ export class ActiveWorkflowRunner {
const mode = 'trigger';
const credentials = await WorkflowCredentials(workflowData.nodes);
const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials);
const getTriggerFunctions = this.getExecuteTriggerFunctions(workflowData, additionalData, mode);
const getPollFunctions = this.getExecutePollFunctions(workflowData, additionalData, mode);
const getTriggerFunctions = this.getExecuteTriggerFunctions(workflowData, additionalData, mode, activation);
const getPollFunctions = this.getExecutePollFunctions(workflowData, additionalData, mode, activation);
// Add the workflows which have webhooks defined
await this.addWorkflowWebhooks(workflowInstance, additionalData, mode);
await this.addWorkflowWebhooks(workflowInstance, additionalData, mode, activation);
if (workflowInstance.getTriggerNodes().length !== 0
|| workflowInstance.getPollNodes().length !== 0) {
await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, getTriggerFunctions, getPollFunctions);
await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, mode, activation, getTriggerFunctions, getPollFunctions);
}
if (this.activationErrors[workflowId] !== undefined) {
@ -496,7 +557,11 @@ export class ActiveWorkflowRunner {
if (this.activeWorkflows !== null) {
// Remove all the webhooks of the workflow
await this.removeWorkflowWebhooks(workflowId);
try {
await this.removeWorkflowWebhooks(workflowId);
} catch (error) {
console.error(`Could not remove webhooks of workflow "${workflowId}" because of error: "${error.message}"`);
}
if (this.activationErrors[workflowId] !== undefined) {
// If there were any activation errors delete them

View file

@ -4,10 +4,17 @@ import {
import {
ICredentialDataDecryptedObject,
ICredentialsExpressionResolveValues,
ICredentialsHelper,
INode,
INodeParameters,
INodeProperties,
INodeType,
INodeTypeData,
INodeTypes,
NodeHelpers,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import {
@ -18,6 +25,19 @@ import {
} from './';
const mockNodeTypes: INodeTypes = {
nodeTypes: {},
init: async (nodeTypes?: INodeTypeData): Promise<void> => { },
getAll: (): INodeType[] => {
// Does not get used in Workflow so no need to return it
return [];
},
getByName: (nodeType: string): INodeType | undefined => {
return undefined;
},
};
export class CredentialsHelper extends ICredentialsHelper {
/**
@ -82,7 +102,7 @@ export class CredentialsHelper extends ICredentialsHelper {
* @returns {ICredentialDataDecryptedObject}
* @memberof CredentialsHelper
*/
getDecrypted(name: string, type: string, raw?: boolean): ICredentialDataDecryptedObject {
getDecrypted(name: string, type: string, mode: WorkflowExecuteMode, raw?: boolean, expressionResolveValues?: ICredentialsExpressionResolveValues): ICredentialDataDecryptedObject {
const credentials = this.getCredentials(name, type);
const decryptedDataOriginal = credentials.getData(this.encryptionKey);
@ -91,7 +111,7 @@ export class CredentialsHelper extends ICredentialsHelper {
return decryptedDataOriginal;
}
return this.applyDefaultsAndOverwrites(decryptedDataOriginal, type);
return this.applyDefaultsAndOverwrites(decryptedDataOriginal, type, mode, expressionResolveValues);
}
@ -103,11 +123,11 @@ export class CredentialsHelper extends ICredentialsHelper {
* @returns {ICredentialDataDecryptedObject}
* @memberof CredentialsHelper
*/
applyDefaultsAndOverwrites(decryptedDataOriginal: ICredentialDataDecryptedObject, type: string): ICredentialDataDecryptedObject {
applyDefaultsAndOverwrites(decryptedDataOriginal: ICredentialDataDecryptedObject, type: string, mode: WorkflowExecuteMode, expressionResolveValues?: ICredentialsExpressionResolveValues): ICredentialDataDecryptedObject {
const credentialsProperties = this.getCredentialsProperties(type);
// Add the default credential values
const decryptedData = NodeHelpers.getNodeParameters(credentialsProperties, decryptedDataOriginal as INodeParameters, true, false) as ICredentialDataDecryptedObject;
let decryptedData = NodeHelpers.getNodeParameters(credentialsProperties, decryptedDataOriginal as INodeParameters, true, false) as ICredentialDataDecryptedObject;
if (decryptedDataOriginal.oauthTokenData !== undefined) {
// The OAuth data gets removed as it is not defined specifically as a parameter
@ -115,6 +135,29 @@ export class CredentialsHelper extends ICredentialsHelper {
decryptedData.oauthTokenData = decryptedDataOriginal.oauthTokenData;
}
if (expressionResolveValues) {
try {
const workflow = new Workflow({ nodes: Object.values(expressionResolveValues.workflow.nodes), connections: expressionResolveValues.workflow.connectionsBySourceNode, active: false, nodeTypes: expressionResolveValues.workflow.nodeTypes });
decryptedData = workflow.expression.getParameterValue(decryptedData as INodeParameters, expressionResolveValues.runExecutionData, expressionResolveValues.runIndex, expressionResolveValues.itemIndex, expressionResolveValues.node.name, expressionResolveValues.connectionInputData, mode, false, decryptedData) as ICredentialDataDecryptedObject;
} catch (e) {
e.message += ' [Error resolving credentials]';
throw e;
}
} else {
const node = {
name: '',
typeVersion: 1,
type: 'mock',
position: [0, 0],
parameters: {} as INodeParameters,
} as INode;
const workflow = new Workflow({ nodes: [node!], connections: {}, active: false, nodeTypes: mockNodeTypes });
// Resolve expressions if any are set
decryptedData = workflow.expression.getComplexParameterValue(node!, decryptedData as INodeParameters, mode, undefined, decryptedData) as ICredentialDataDecryptedObject;
}
// Load and apply the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites();
return credentialsOverwrites.applyOverwrite(type, decryptedData);

View file

@ -3,32 +3,53 @@ import {
} from 'n8n-workflow';
import {
ICredentialsOverwrite,
CredentialTypes,
GenericHelpers,
ICredentialsOverwrite,
} from './';
class CredentialsOverwritesClass {
private credentialTypes = CredentialTypes();
private overwriteData: ICredentialsOverwrite = {};
private resolvedTypes: string[] = [];
async init(overwriteData?: ICredentialsOverwrite) {
if (overwriteData !== undefined) {
// If data is already given it can directly be set instead of
// loaded from environment
this.overwriteData = overwriteData;
this.__setData(JSON.parse(JSON.stringify(overwriteData)));
return;
}
const data = await GenericHelpers.getConfigValue('credentials.overwrite.data') as string;
try {
this.overwriteData = JSON.parse(data);
const overwriteData = JSON.parse(data);
this.__setData(overwriteData);
} catch (error) {
throw new Error(`The credentials-overwrite is not valid JSON.`);
}
}
__setData(overwriteData: ICredentialsOverwrite) {
this.overwriteData = overwriteData;
for (const credentialTypeData of this.credentialTypes.getAll()) {
const type = credentialTypeData.name;
const overwrites = this.__getExtended(type);
if (overwrites && Object.keys(overwrites).length) {
this.overwriteData[type] = overwrites;
}
}
}
applyOverwrite(type: string, data: ICredentialDataDecryptedObject) {
const overwrites = this.get(type);
@ -38,15 +59,55 @@ class CredentialsOverwritesClass {
}
const returnData = JSON.parse(JSON.stringify(data));
Object.assign(returnData, overwrites);
// Overwrite only if there is currently no data set
for (const key of Object.keys(overwrites)) {
if ([null, undefined, ''].includes(returnData[key])) {
returnData[key] = overwrites[key];
}
}
return returnData;
}
__getExtended(type: string): ICredentialDataDecryptedObject | undefined {
if (this.resolvedTypes.includes(type)) {
// Type got already resolved and can so returned directly
return this.overwriteData[type];
}
const credentialTypeData = this.credentialTypes.getByName(type);
if (credentialTypeData === undefined) {
throw new Error(`The credentials of type "${type}" are not known.`);
}
if (credentialTypeData.extends === undefined) {
this.resolvedTypes.push(type);
return this.overwriteData[type];
}
const overwrites: ICredentialDataDecryptedObject = {};
for (const credentialsTypeName of credentialTypeData.extends) {
Object.assign(overwrites, this.__getExtended(credentialsTypeName));
}
if (this.overwriteData[type] !== undefined) {
Object.assign(overwrites, this.overwriteData[type]);
}
this.resolvedTypes.push(type);
return overwrites;
}
get(type: string): ICredentialDataDecryptedObject | undefined {
return this.overwriteData[type];
}
getAll(): ICredentialsOverwrite {
return this.overwriteData;
}

View file

@ -19,7 +19,6 @@ import { TlsOptions } from 'tls';
import * as config from '../config';
import {
MongoDb,
MySQLDb,
PostgresDb,
SQLite,
@ -32,29 +31,9 @@ export let collections: IDatabaseCollections = {
Webhook: null,
};
import {
InitialMigration1587669153312,
WebhookModel1589476000887,
CreateIndexStoppedAt1594828256133,
} from './databases/postgresdb/migrations';
import {
InitialMigration1587563438936,
WebhookModel1592679094242,
CreateIndexStoppedAt1594910478695,
} from './databases/mongodb/migrations';
import {
InitialMigration1588157391238,
WebhookModel1592447867632,
CreateIndexStoppedAt1594902918301,
} from './databases/mysqldb/migrations';
import {
InitialMigration1588102412422,
WebhookModel1592445003908,
CreateIndexStoppedAt1594825041918,
} from './databases/sqlite/migrations';
import { postgresMigrations } from './databases/postgresdb/migrations';
import { mysqlMigrations } from './databases/mysqldb/migrations';
import { sqliteMigrations } from './databases/sqlite/migrations';
import * as path from 'path';
@ -68,23 +47,6 @@ export async function init(): Promise<IDatabaseCollections> {
const entityPrefix = config.get('database.tablePrefix');
switch (dbType) {
case 'mongodb':
entities = MongoDb;
connectionOptions = {
type: 'mongodb',
entityPrefix,
url: await GenericHelpers.getConfigValue('database.mongodb.connectionUrl') as string,
useNewUrlParser: true,
migrations: [
InitialMigration1587563438936,
WebhookModel1592679094242,
CreateIndexStoppedAt1594910478695,
],
migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`,
};
break;
case 'postgresdb':
entities = PostgresDb;
@ -112,11 +74,7 @@ export async function init(): Promise<IDatabaseCollections> {
port: await GenericHelpers.getConfigValue('database.postgresdb.port') as number,
username: await GenericHelpers.getConfigValue('database.postgresdb.user') as string,
schema: config.get('database.postgresdb.schema'),
migrations: [
InitialMigration1587669153312,
WebhookModel1589476000887,
CreateIndexStoppedAt1594828256133,
],
migrations: postgresMigrations,
migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`,
ssl,
@ -135,11 +93,7 @@ export async function init(): Promise<IDatabaseCollections> {
password: await GenericHelpers.getConfigValue('database.mysqldb.password') as string,
port: await GenericHelpers.getConfigValue('database.mysqldb.port') as number,
username: await GenericHelpers.getConfigValue('database.mysqldb.user') as string,
migrations: [
InitialMigration1588157391238,
WebhookModel1592447867632,
CreateIndexStoppedAt1594902918301,
],
migrations: mysqlMigrations,
migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`,
};
@ -151,12 +105,8 @@ export async function init(): Promise<IDatabaseCollections> {
type: 'sqlite',
database: path.join(n8nFolder, 'database.sqlite'),
entityPrefix,
migrations: [
InitialMigration1588102412422,
WebhookModel1592445003908,
CreateIndexStoppedAt1594825041918
],
migrationsRun: true,
migrations: sqliteMigrations,
migrationsRun: false, // migrations for sqlite will be ran manually for now; see below
migrationsTableName: `${entityPrefix}migrations`,
};
break;
@ -171,11 +121,30 @@ export async function init(): Promise<IDatabaseCollections> {
logging: false,
});
const connection = await createConnection(connectionOptions);
let connection = await createConnection(connectionOptions);
await connection.runMigrations({
transaction: 'none',
});
if (dbType === 'sqlite') {
// This specific migration changes database metadata.
// A field is now nullable. We need to reconnect so that
// n8n knows it has changed. Happens only on sqlite.
let migrations = [];
try {
migrations = await connection.query(`SELECT id FROM ${entityPrefix}migrations where name = "MakeStoppedAtNullable1607431743769"`);
} catch(error) {
// Migration table does not exist yet - it will be created after migrations run for the first time.
}
// If you remove this call, remember to turn back on the
// setting to run migrations automatically above.
await connection.runMigrations({
transaction: 'none',
});
if (migrations.length === 0) {
await connection.close();
connection = await createConnection(connectionOptions);
}
}
collections.Credentials = getRepository(entities.CredentialsEntity);
collections.Execution = getRepository(entities.ExecutionEntity);

View file

@ -1,7 +1,8 @@
import {
Db,
IExternalHooksFunctions,
IExternalHooksClass,
IExternalHooksFileData,
IExternalHooksFunctions,
} from './';
import * as config from '../config';
@ -20,6 +21,24 @@ class ExternalHooksClass implements IExternalHooksClass {
return;
}
await this.loadHooksFiles();
this.initDidRun = true;
}
async reload(externalHooks?: IExternalHooksFileData) {
this.externalHooks = {};
if (externalHooks === undefined) {
await this.loadHooksFiles(true);
} else {
this.loadHooks(externalHooks);
}
}
async loadHooksFiles(reload = false) {
const externalHookFiles = config.get('externalHookFiles').split(':');
// Load all the provided hook-files
@ -27,29 +46,37 @@ class ExternalHooksClass implements IExternalHooksClass {
hookFilePath = hookFilePath.trim();
if (hookFilePath !== '') {
try {
const hookFile = require(hookFilePath);
for (const resource of Object.keys(hookFile)) {
for (const operation of Object.keys(hookFile[resource])) {
// Save all the hook functions directly under their string
// format in an array
const hookString = `${resource}.${operation}`;
if (this.externalHooks[hookString] === undefined) {
this.externalHooks[hookString] = [];
}
this.externalHooks[hookString].push.apply(this.externalHooks[hookString], hookFile[resource][operation]);
}
if (reload === true) {
delete require.cache[require.resolve(hookFilePath)];
}
const hookFile = require(hookFilePath) as IExternalHooksFileData;
this.loadHooks(hookFile);
} catch (error) {
throw new Error(`Problem loading external hook file "${hookFilePath}": ${error.message}`);
}
}
}
this.initDidRun = true;
}
loadHooks(hookFileData: IExternalHooksFileData) {
for (const resource of Object.keys(hookFileData)) {
for (const operation of Object.keys(hookFileData[resource])) {
// Save all the hook functions directly under their string
// format in an array
const hookString = `${resource}.${operation}`;
if (this.externalHooks[hookString] === undefined) {
this.externalHooks[hookString] = [];
}
this.externalHooks[hookString].push.apply(this.externalHooks[hookString], hookFileData[resource][operation]);
}
}
}
async run(hookName: string, hookParameters?: any[]): Promise<void> { // tslint:disable-line:no-any
const externalHookFunctions: IExternalHooksFunctions = {
dbCollections: Db.collections,
@ -64,6 +91,11 @@ class ExternalHooksClass implements IExternalHooksClass {
}
}
exists(hookName: string): boolean {
return !!this.externalHooks[hookName];
}
}

View file

@ -95,14 +95,15 @@ export async function getConfigValue(configKey: string): Promise<string | boolea
// Get the environment variable
const configSchema = config.getSchema();
let currentSchema = configSchema.properties as IDataObject;
// @ts-ignore
let currentSchema = configSchema._cvtProperties as IDataObject;
for (const key of configKeyParts) {
if (currentSchema[key] === undefined) {
throw new Error(`Key "${key}" of ConfigKey "${configKey}" does not exist`);
} else if ((currentSchema[key]! as IDataObject).properties === undefined) {
} else if ((currentSchema[key]! as IDataObject)._cvtProperties === undefined) {
currentSchema = currentSchema[key] as IDataObject;
} else {
currentSchema = (currentSchema[key] as IDataObject).properties as IDataObject;
currentSchema = (currentSchema[key] as IDataObject)._cvtProperties as IDataObject;
}
}

View file

@ -33,6 +33,15 @@ export interface IActivationError {
};
}
export interface IBullJobData {
executionId: string;
loadStaticData: boolean;
}
export interface IBullJobResponse {
success: boolean;
}
export interface ICustomRequest extends Request {
parsedUrl: Url | undefined;
}
@ -57,6 +66,8 @@ export interface IWebhookDb {
webhookPath: string;
method: string;
node: string;
webhookId?: string;
pathLength?: number;
}
export interface IWorkflowBase extends IWorkflowBaseWorkflow {
@ -103,14 +114,14 @@ export interface ICredentialsDecryptedResponse extends ICredentialsDecryptedDb {
id: string;
}
export type DatabaseType = 'mariadb' | 'mongodb' | 'postgresdb' | 'mysqldb' | 'sqlite';
export type DatabaseType = 'mariadb' | 'postgresdb' | 'mysqldb' | 'sqlite';
export type SaveExecutionDataType = 'all' | 'none';
export interface IExecutionBase {
id?: number | string | ObjectID;
mode: WorkflowExecuteMode;
startedAt: Date;
stoppedAt: Date;
stoppedAt?: Date; // empty value means execution is still running
workflowId?: string; // To be able to filter executions easily //
finished: boolean;
retryOf?: number | string | ObjectID; // If it is a retry, the id of the execution it is a retry of.
@ -164,12 +175,11 @@ export interface IExecutionsStopData {
finished?: boolean;
mode: WorkflowExecuteMode;
startedAt: Date;
stoppedAt: Date;
stoppedAt?: Date;
}
export interface IExecutionsSummary {
id?: string; // executionIdDb
idActive?: string; // executionIdActive
id: string;
finished?: boolean;
mode: WorkflowExecuteMode;
retryOf?: string;
@ -219,6 +229,12 @@ export interface IExternalHooks {
};
}
export interface IExternalHooksFileData {
[key: string]: {
[key: string]: Array<(...args: any[]) => Promise<void>>; //tslint:disable-line:no-any
};
}
export interface IExternalHooksFunctions {
dbCollections: IDatabaseCollections;
}
@ -241,9 +257,6 @@ export interface IN8nConfig {
export interface IN8nConfigDatabase {
type: DatabaseType;
mongodb: {
connectionUrl: string;
};
postgresdb: {
host: string;
password: string;
@ -288,9 +301,16 @@ export interface IN8nUISettings {
saveManualExecutions: boolean;
executionTimeout: number;
maxExecutionTimeout: number;
oauthCallbackUrls: {
oauth1: string;
oauth2: string;
};
timezone: string;
urlBaseWebhook: string;
versionCli: string;
n8nMetadata?: {
[key: string]: string | number | undefined;
};
}
export interface IPackageVersions {
@ -306,8 +326,7 @@ export type IPushDataType = 'executionFinished' | 'executionStarted' | 'nodeExec
export interface IPushDataExecutionFinished {
data: IRun;
executionIdActive: string;
executionIdDb?: string;
executionId: string;
retryOf?: string;
}

View file

@ -33,6 +33,7 @@ class LoadNodesAndCredentialsClass {
} = {};
excludeNodes: string[] | undefined = undefined;
includeNodes: string[] | undefined = undefined;
nodeModulesPath = '';
@ -63,6 +64,7 @@ class LoadNodesAndCredentialsClass {
}
this.excludeNodes = config.get('nodes.exclude');
this.includeNodes = config.get('nodes.include');
// Get all the installed packages which contain n8n nodes
const packages = await this.getN8nNodePackages();
@ -175,6 +177,10 @@ class LoadNodesAndCredentialsClass {
tempNode.description.icon = 'file:' + path.join(path.dirname(filePath), tempNode.description.icon.substr(5));
}
if (this.includeNodes !== undefined && !this.includeNodes.includes(fullNodeName)) {
return;
}
// Check if the node should be skiped
if (this.excludeNodes !== undefined && this.excludeNodes.includes(fullNodeName)) {
return;

View file

@ -1,7 +1,7 @@
import {
INodeType,
INodeTypes,
INodeTypeData,
INodeTypes,
NodeHelpers,
} from 'n8n-workflow';

67
packages/cli/src/Queue.ts Normal file
View file

@ -0,0 +1,67 @@
import * as Bull from 'bull';
import * as config from '../config';
import { IBullJobData } from './Interfaces';
export class Queue {
private jobQueue: Bull.Queue;
constructor() {
const prefix = config.get('queue.bull.prefix') as string;
const redisOptions = config.get('queue.bull.redis') as object;
// Disabling ready check is necessary as it allows worker to
// quickly reconnect to Redis if Redis crashes or is unreachable
// for some time. With it enabled, worker might take minutes to realize
// redis is back up and resume working.
// More here: https://github.com/OptimalBits/bull/issues/890
// @ts-ignore
this.jobQueue = new Bull('jobs', { prefix, redis: redisOptions, enableReadyCheck: false });
}
async add(jobData: IBullJobData, jobOptions: object): Promise<Bull.Job> {
return await this.jobQueue.add(jobData,jobOptions);
}
async getJob(jobId: Bull.JobId): Promise<Bull.Job | null> {
return await this.jobQueue.getJob(jobId);
}
async getJobs(jobTypes: Bull.JobStatus[]): Promise<Bull.Job[]> {
return await this.jobQueue.getJobs(jobTypes);
}
getBullObjectInstance(): Bull.Queue {
return this.jobQueue;
}
/**
*
* @param job A Bull.Job instance
* @returns boolean true if we were able to securely stop the job
*/
async stopJob(job: Bull.Job): Promise<boolean> {
if (await job.isActive()) {
// Job is already running so tell it to stop
await job.progress(-1);
return true;
} else {
// Job did not get started yet so remove from queue
try {
await job.remove();
return true;
} catch (e) {
await job.progress(-1);
}
}
return false;
}
}
let activeQueueInstance: Queue | undefined;
export function getInstance(): Queue {
if (activeQueueInstance === undefined) {
activeQueueInstance = new Queue();
}
return activeQueueInstance;
}

View file

@ -64,10 +64,14 @@ export function sendSuccessResponse(res: Response, data: any, raw?: boolean, res
}
if (raw === true) {
res.json(data);
if (typeof data === 'string') {
res.send(data);
} else {
res.json(data);
}
} else {
res.json({
data
data,
});
}
}
@ -183,7 +187,8 @@ export function unflattenExecutionData(fullExecutionData: IExecutionFlattedDb):
mode: fullExecutionData.mode,
startedAt: fullExecutionData.startedAt,
stoppedAt: fullExecutionData.stoppedAt,
finished: fullExecutionData.finished ? fullExecutionData.finished : false
finished: fullExecutionData.finished ? fullExecutionData.finished : false,
workflowId: fullExecutionData.workflowId,
});
return returnData;

File diff suppressed because it is too large Load diff

View file

@ -3,11 +3,9 @@ import * as express from 'express';
import {
IResponseCallbackData,
IWorkflowDb,
NodeTypes,
Push,
ResponseHelper,
WebhookHelpers,
WorkflowHelpers,
} from './';
import {
@ -19,6 +17,7 @@ import {
IWorkflowExecuteAdditionalData,
WebhookHttpMethod,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
@ -31,6 +30,7 @@ export class TestWebhooks {
sessionId?: string;
timeout: NodeJS.Timeout,
workflowData: IWorkflowDb;
workflow: Workflow;
};
} = {};
private activeWebhooks: ActiveWebhooks | null = null;
@ -55,19 +55,45 @@ export class TestWebhooks {
* @memberof TestWebhooks
*/
async callTestWebhook(httpMethod: WebhookHttpMethod, path: string, request: express.Request, response: express.Response): Promise<IResponseCallbackData> {
const webhookData: IWebhookData | undefined = this.activeWebhooks!.get(httpMethod, path);
// Reset request parameters
request.params = {};
// Remove trailing slash
if (path.endsWith('/')) {
path = path.slice(0, -1);
}
let webhookData: IWebhookData | undefined = this.activeWebhooks!.get(httpMethod, path);
// check if path is dynamic
if (webhookData === undefined) {
const pathElements = path.split('/');
const webhookId = pathElements.shift();
webhookData = this.activeWebhooks!.get(httpMethod, pathElements.join('/'), webhookId);
if (webhookData === undefined) {
// The requested webhook is not registered
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
}
path = webhookData.path;
// extracting params from path
path.split('/').forEach((ele, index) => {
if (ele.startsWith(':')) {
// write params to req.params
request.params[ele.slice(1)] = pathElements[index];
}
});
}
const webhookKey = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path, webhookData.webhookId) + `|${webhookData.workflowId}`;
// TODO: Clean that duplication up one day and improve code generally
if (this.testWebhookData[webhookKey] === undefined) {
// The requested webhook is not registered
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
}
const webhookKey = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path);
const workflowData = this.testWebhookData[webhookKey].workflowData;
const nodeTypes = NodeTypes();
const workflow = new Workflow({ id: webhookData.workflowId, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings});
const workflow = this.testWebhookData[webhookKey].workflow;
// Get the node which has the webhook defined to know where to start from and to
// get additional data
@ -79,7 +105,7 @@ export class TestWebhooks {
return new Promise(async (resolve, reject) => {
try {
const executionMode = 'manual';
const executionId = await WebhookHelpers.executeWebhook(workflow, webhookData, this.testWebhookData[webhookKey].workflowData, workflowStartNode, executionMode, this.testWebhookData[webhookKey].sessionId, request, response, (error: Error | null, data: IResponseCallbackData) => {
const executionId = await WebhookHelpers.executeWebhook(workflow, webhookData!, this.testWebhookData[webhookKey].workflowData, workflowStartNode, executionMode, this.testWebhookData[webhookKey].sessionId, request, response, (error: Error | null, data: IResponseCallbackData) => {
if (error !== null) {
return reject(error);
}
@ -96,7 +122,7 @@ export class TestWebhooks {
// Inform editor-ui that webhook got received
if (this.testWebhookData[webhookKey].sessionId !== undefined) {
const pushInstance = Push.getInstance();
pushInstance.send('testWebhookReceived', { workflowId: webhookData.workflowId, executionId }, this.testWebhookData[webhookKey].sessionId!);
pushInstance.send('testWebhookReceived', { workflowId: webhookData!.workflowId, executionId }, this.testWebhookData[webhookKey].sessionId!);
}
} catch (error) {
@ -136,7 +162,7 @@ export class TestWebhooks {
* @returns {(Promise<IExecutionDb | undefined>)}
* @memberof TestWebhooks
*/
async needsWebhookData(workflowData: IWorkflowDb, workflow: Workflow, additionalData: IWorkflowExecuteAdditionalData, mode: WorkflowExecuteMode, sessionId?: string, destinationNode?: string): Promise<boolean> {
async needsWebhookData(workflowData: IWorkflowDb, workflow: Workflow, additionalData: IWorkflowExecuteAdditionalData, mode: WorkflowExecuteMode, activation: WorkflowActivateMode, sessionId?: string, destinationNode?: string): Promise<boolean> {
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData, destinationNode);
if (webhooks.length === 0) {
@ -154,19 +180,26 @@ export class TestWebhooks {
}, 120000);
let key: string;
const activatedKey: string[] = [];
for (const webhookData of webhooks) {
key = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path);
key = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path, webhookData.webhookId) + `|${workflowData.id}`;
await this.activeWebhooks!.add(workflow, webhookData, mode);
activatedKey.push(key);
this.testWebhookData[key] = {
sessionId,
timeout,
workflow,
workflowData,
};
// Save static data!
this.testWebhookData[key].workflowData.staticData = workflow.staticData;
try {
await this.activeWebhooks!.add(workflow, webhookData, mode, activation);
} catch (error) {
activatedKey.forEach(deleteKey => delete this.testWebhookData[deleteKey] );
await this.activeWebhooks!.removeWorkflow(workflow);
throw error;
}
}
return true;
@ -181,8 +214,6 @@ export class TestWebhooks {
* @memberof TestWebhooks
*/
cancelTestWebhook(workflowId: string): boolean {
const nodeTypes = NodeTypes();
let foundWebhook = false;
for (const webhookKey of Object.keys(this.testWebhookData)) {
const webhookData = this.testWebhookData[webhookKey];
@ -191,8 +222,6 @@ export class TestWebhooks {
continue;
}
foundWebhook = true;
clearTimeout(this.testWebhookData[webhookKey].timeout);
// Inform editor-ui that webhook got received
@ -205,12 +234,17 @@ export class TestWebhooks {
}
}
const workflowData = webhookData.workflowData;
const workflow = new Workflow({ id: workflowData.id.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings });
const workflow = this.testWebhookData[webhookKey].workflow;
// Remove the webhook
delete this.testWebhookData[webhookKey];
this.activeWebhooks!.removeWorkflow(workflow);
if (foundWebhook === false) {
// As it removes all webhooks of the workflow execute only once
this.activeWebhooks!.removeWorkflow(workflow);
}
foundWebhook = true;
}
return foundWebhook;
@ -225,14 +259,10 @@ export class TestWebhooks {
return;
}
const nodeTypes = NodeTypes();
let workflowData: IWorkflowDb;
let workflow: Workflow;
const workflows: Workflow[] = [];
for (const webhookKey of Object.keys(this.testWebhookData)) {
workflowData = this.testWebhookData[webhookKey].workflowData;
workflow = new Workflow({ id: workflowData.id.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings });
workflow = this.testWebhookData[webhookKey].workflow;
workflows.push(workflow);
}

View file

@ -3,16 +3,17 @@ import { get } from 'lodash';
import {
ActiveExecutions,
ExternalHooks,
GenericHelpers,
IExecutionDb,
IResponseCallbackData,
IWorkflowDb,
IWorkflowExecutionDataProcess,
ResponseHelper,
WorkflowHelpers,
WorkflowRunner,
WorkflowCredentials,
WorkflowExecuteAdditionalData,
WorkflowHelpers,
WorkflowRunner,
} from './';
import {
@ -114,8 +115,8 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
}
// Get the responseMode
const responseMode = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseMode'], 'onReceived');
const responseCode = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseCode'], 200) as number;
const responseMode = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseMode'], executionMode, 'onReceived');
const responseCode = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseCode'], executionMode, 200) as number;
if (!['onReceived', 'lastNode'].includes(responseMode as string)) {
// If the mode is not known we error. Is probably best like that instead of using
@ -173,7 +174,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
await WorkflowHelpers.saveStaticData(workflow);
if (webhookData.webhookDescription['responseHeaders'] !== undefined) {
const responseHeaders = workflow.getComplexParameterValue(workflowStartNode, webhookData.webhookDescription['responseHeaders'], undefined) as {
const responseHeaders = workflow.expression.getComplexParameterValue(workflowStartNode, webhookData.webhookDescription['responseHeaders'], executionMode, undefined) as {
entries?: Array<{
name: string;
value: string;
@ -221,7 +222,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
return;
}
// Now that we know that the workflow should run we can return the default respons
// Now that we know that the workflow should run we can return the default response
// directly if responseMode it set to "onReceived" and a respone should be sent
if (responseMode === 'onReceived' && didSendResponse === false) {
// Return response directly and do not wait for the workflow to finish
@ -251,7 +252,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
data: {
main: webhookResultData.workflowData,
},
},
}
);
const runExecutionData: IRunExecutionData = {
@ -282,7 +283,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
// Start now to run the workflow
const workflowRunner = new WorkflowRunner();
const executionId = await workflowRunner.run(runData, true);
const executionId = await workflowRunner.run(runData, true, !didSendResponse);
// Get a promise which resolves when the workflow did execute and send then response
const executePromise = activeExecutions.getPostExecutePromise(executionId) as Promise<IExecutionDb | undefined>;
@ -301,18 +302,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
}
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data);
if (returnData === undefined) {
if (didSendResponse === false) {
responseCallback(null, {
data: {
message: 'Workflow did execute sucessfully but the last node did not return any data.',
},
responseCode,
});
}
didSendResponse = true;
return data;
} else if (returnData.error !== undefined) {
if(data.data.resultData.error || returnData?.error !== undefined) {
if (didSendResponse === false) {
responseCallback(null, {
data: {
@ -325,7 +315,20 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
return data;
}
const responseData = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseData'], 'firstEntryJson');
if (returnData === undefined) {
if (didSendResponse === false) {
responseCallback(null, {
data: {
message: 'Workflow did execute sucessfully but the last node did not return any data.',
},
responseCode,
});
}
didSendResponse = true;
return data;
}
const responseData = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseData'], executionMode, 'firstEntryJson');
if (didSendResponse === false) {
let data: IDataObject | IDataObject[];
@ -340,13 +343,13 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
data = returnData.data!.main[0]![0].json;
const responsePropertyName = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responsePropertyName'], undefined);
const responsePropertyName = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responsePropertyName'], executionMode, undefined);
if (responsePropertyName !== undefined) {
data = get(data, responsePropertyName as string) as IDataObject;
}
const responseContentType = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseContentType'], undefined);
const responseContentType = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseContentType'], executionMode, undefined);
if (responseContentType !== undefined) {
// Send the webhook response manually to be able to set the content-type
@ -379,7 +382,7 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
didSendResponse = true;
}
const responseBinaryPropertyName = workflow.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseBinaryPropertyName'], 'data');
const responseBinaryPropertyName = workflow.expression.getSimpleParameterValue(workflowStartNode, webhookData.webhookDescription['responseBinaryPropertyName'], executionMode, 'data');
if (responseBinaryPropertyName === undefined && didSendResponse === false) {
responseCallback(new Error('No "responseBinaryPropertyName" is set.'), {});
@ -450,8 +453,11 @@ export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
export function getWebhookBaseUrl() {
let urlBaseWebhook = GenericHelpers.getBaseUrl();
if (process.env.WEBHOOK_TUNNEL_URL !== undefined) {
urlBaseWebhook = process.env.WEBHOOK_TUNNEL_URL;
// We renamed WEBHOOK_TUNNEL_URL to WEBHOOK_URL. This is here to maintain
// backward compatibility. Will be deprecated and removed in the future.
if (process.env.WEBHOOK_TUNNEL_URL !== undefined || process.env.WEBHOOK_URL !== undefined) {
// @ts-ignore
urlBaseWebhook = process.env.WEBHOOK_TUNNEL_URL || process.env.WEBHOOK_URL;
}
return urlBaseWebhook;

View file

@ -0,0 +1,306 @@
import * as express from 'express';
import {
readFileSync,
} from 'fs';
import {
getConnectionManager,
} from 'typeorm';
import * as bodyParser from 'body-parser';
require('body-parser-xml')(bodyParser);
import * as _ from 'lodash';
import {
ActiveExecutions,
ActiveWorkflowRunner,
Db,
ExternalHooks,
GenericHelpers,
ICustomRequest,
IExternalHooksClass,
IPackageVersions,
ResponseHelper,
} from './';
import * as compression from 'compression';
import * as config from '../config';
import * as parseUrl from 'parseurl';
export function registerProductionWebhooks() {
// HEAD webhook requests
this.app.head(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('HEAD', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
// OPTIONS webhook requests
this.app.options(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let allowedMethods: string[];
try {
allowedMethods = await this.activeWorkflowRunner.getWebhookMethods(requestUrl);
allowedMethods.push('OPTIONS');
// Add custom "Allow" header to satisfy OPTIONS response.
res.append('Allow', allowedMethods);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
ResponseHelper.sendSuccessResponse(res, {}, true, 204);
});
// GET webhook requests
this.app.get(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('GET', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
// POST webhook requests
this.app.post(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
// Cut away the "/webhook/" to get the registred part of the url
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
let response;
try {
response = await this.activeWorkflowRunner.executeWebhook('POST', requestUrl, req, res);
} catch (error) {
ResponseHelper.sendErrorResponse(res, error);
return;
}
if (response.noWebhookResponse === true) {
// Nothing else to do as the response got already sent
return;
}
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
});
}
class App {
app: express.Application;
activeWorkflowRunner: ActiveWorkflowRunner.ActiveWorkflowRunner;
endpointWebhook: string;
endpointPresetCredentials: string;
externalHooks: IExternalHooksClass;
saveDataErrorExecution: string;
saveDataSuccessExecution: string;
saveManualExecutions: boolean;
executionTimeout: number;
maxExecutionTimeout: number;
timezone: string;
activeExecutionsInstance: ActiveExecutions.ActiveExecutions;
versions: IPackageVersions | undefined;
restEndpoint: string;
protocol: string;
sslKey: string;
sslCert: string;
presetCredentialsLoaded: boolean;
constructor() {
this.app = express();
this.endpointWebhook = config.get('endpoints.webhook') as string;
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
this.executionTimeout = config.get('executions.timeout') as number;
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number;
this.timezone = config.get('generic.timezone') as string;
this.restEndpoint = config.get('endpoints.rest') as string;
this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
this.activeExecutionsInstance = ActiveExecutions.getInstance();
this.protocol = config.get('protocol');
this.sslKey = config.get('ssl_key');
this.sslCert = config.get('ssl_cert');
this.externalHooks = ExternalHooks();
this.presetCredentialsLoaded = false;
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
}
/**
* Returns the current epoch time
*
* @returns {number}
* @memberof App
*/
getCurrentDate(): Date {
return new Date();
}
async config(): Promise<void> {
this.versions = await GenericHelpers.getVersions();
// Compress the response data
this.app.use(compression());
// Make sure that each request has the "parsedUrl" parameter
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
(req as ICustomRequest).parsedUrl = parseUrl(req);
// @ts-ignore
req.rawBody = Buffer.from('', 'base64');
next();
});
// Support application/json type post data
this.app.use(bodyParser.json({
limit: '16mb', verify: (req, res, buf) => {
// @ts-ignore
req.rawBody = buf;
},
}));
// Support application/xml type post data
// @ts-ignore
this.app.use(bodyParser.xml({
limit: '16mb', xmlParseOptions: {
normalize: true, // Trim whitespace inside text nodes
normalizeTags: true, // Transform tags to lowercase
explicitArray: false, // Only put properties in array if length > 1
},
}));
this.app.use(bodyParser.text({
limit: '16mb', verify: (req, res, buf) => {
// @ts-ignore
req.rawBody = buf;
},
}));
//support application/x-www-form-urlencoded post data
this.app.use(bodyParser.urlencoded({ extended: false,
verify: (req, res, buf) => {
// @ts-ignore
req.rawBody = buf;
},
}));
if (process.env['NODE_ENV'] !== 'production') {
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
// Allow access also from frontend when developing
res.header('Access-Control-Allow-Origin', 'http://localhost:8080');
res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, PATCH, DELETE');
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept, sessionid');
next();
});
}
this.app.use((req: express.Request, res: express.Response, next: express.NextFunction) => {
if (Db.collections.Workflow === null) {
const error = new ResponseHelper.ResponseError('Database is not ready!', undefined, 503);
return ResponseHelper.sendErrorResponse(res, error);
}
next();
});
// ----------------------------------------
// Healthcheck
// ----------------------------------------
// Does very basic health check
this.app.get('/healthz', async (req: express.Request, res: express.Response) => {
const connectionManager = getConnectionManager();
if (connectionManager.connections.length === 0) {
const error = new ResponseHelper.ResponseError('No Database connection found!', undefined, 503);
return ResponseHelper.sendErrorResponse(res, error);
}
if (connectionManager.connections[0].isConnected === false) {
// Connection is not active
const error = new ResponseHelper.ResponseError('Database connection not active!', undefined, 503);
return ResponseHelper.sendErrorResponse(res, error);
}
// Everything fine
const responseData = {
status: 'ok',
};
ResponseHelper.sendSuccessResponse(res, responseData, true, 200);
});
registerProductionWebhooks.apply(this);
}
}
export async function start(): Promise<void> {
const PORT = config.get('port');
const ADDRESS = config.get('listen_address');
const app = new App();
await app.config();
let server;
if (app.protocol === 'https' && app.sslKey && app.sslCert) {
const https = require('https');
const privateKey = readFileSync(app.sslKey, 'utf8');
const cert = readFileSync(app.sslCert, 'utf8');
const credentials = { key: privateKey, cert };
server = https.createServer(credentials, app.app);
} else {
const http = require('http');
server = http.createServer(app.app);
}
server.listen(PORT, ADDRESS, async () => {
const versions = await GenericHelpers.getVersions();
console.log(`n8n ready on ${ADDRESS}, port ${PORT}`);
console.log(`Version: ${versions.cli}`);
await app.externalHooks.run('n8n.ready', [app]);
});
}

View file

@ -1,9 +1,11 @@
import {
ActiveExecutions,
CredentialsHelper,
Db,
ExternalHooks,
IExecutionDb,
IExecutionFlattedDb,
IExecutionResponse,
IPushDataExecutionFinished,
IWorkflowBase,
IWorkflowExecutionDataProcess,
@ -25,8 +27,8 @@ import {
IExecuteData,
IExecuteWorkflowInfo,
INode,
INodeParameters,
INodeExecutionData,
INodeParameters,
IRun,
IRunExecutionData,
ITaskData,
@ -43,9 +45,11 @@ import * as config from '../config';
import { LessThanOrEqual } from "typeorm";
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string;
/**
* Checks if there was an error and if errorWorkflow is defined. If so it collects
* Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects
* all the data and executes it
*
* @param {IWorkflowBase} workflowData The workflow which got executed
@ -54,14 +58,14 @@ import { LessThanOrEqual } from "typeorm";
* @param {string} [executionId] The id the execution got saved as
*/
function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mode: WorkflowExecuteMode, executionId?: string, retryOf?: string): void {
// Check if there was an error and if so if an errorWorkflow is set
// Check if there was an error and if so if an errorWorkflow or a trigger is set
let pastExecutionUrl: string | undefined = undefined;
if (executionId !== undefined) {
pastExecutionUrl = `${WebhookHelpers.getWebhookBaseUrl()}execution/${executionId}`;
}
if (fullRunData.data.resultData.error !== undefined && workflowData.settings !== undefined && workflowData.settings.errorWorkflow) {
if (fullRunData.data.resultData.error !== undefined) {
const workflowErrorData = {
execution: {
id: executionId,
@ -74,10 +78,18 @@ function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mo
workflow: {
id: workflowData.id !== undefined ? workflowData.id.toString() as string : undefined,
name: workflowData.name,
}
},
};
// Run the error workflow
WorkflowHelpers.executeErrorWorkflow(workflowData.settings.errorWorkflow as string, workflowErrorData);
// To avoid an infinite loop do not run the error workflow again if the error-workflow itself failed and it is its own error-workflow.
if (workflowData.settings !== undefined && workflowData.settings.errorWorkflow && !(mode === 'error' && workflowData.id && workflowData.settings.errorWorkflow.toString() === workflowData.id.toString())) {
// If a specific error workflow is set run only that one
WorkflowHelpers.executeErrorWorkflow(workflowData.settings.errorWorkflow as string, workflowErrorData);
} else if (mode !== 'error' && workflowData.id !== undefined && workflowData.nodes.some((node) => node.type === ERROR_TRIGGER_TYPE)) {
// If the workflow contains
WorkflowHelpers.executeErrorWorkflow(workflowData.id.toString(), workflowErrorData);
}
}
}
@ -97,51 +109,15 @@ function pruneExecutionData(): void {
// throttle just on success to allow for self healing on failure
Db.collections.Execution!.delete({ stoppedAt: LessThanOrEqual(date.toISOString()) })
.then(data =>
setTimeout(() => {
throttling = false;
}, timeout * 1000)
).catch(err => throttling = false);
.then(data =>
setTimeout(() => {
throttling = false;
}, timeout * 1000)
).catch(err => throttling = false);
}
}
/**
* Pushes the execution out to all connected clients
*
* @param {WorkflowExecuteMode} mode The mode in which the workflow got started in
* @param {IRun} fullRunData The RunData of the finished execution
* @param {string} executionIdActive The id of the finished execution
* @param {string} [executionIdDb] The database id of finished execution
*/
export function pushExecutionFinished(mode: WorkflowExecuteMode, fullRunData: IRun, executionIdActive: string, executionIdDb?: string, retryOf?: string) {
// Clone the object except the runData. That one is not supposed
// to be send. Because that data got send piece by piece after
// each node which finished executing
const pushRunData = {
...fullRunData,
data: {
...fullRunData.data,
resultData: {
...fullRunData.data.resultData,
runData: {},
},
},
};
// Push data to editor-ui once workflow finished
const sendData: IPushDataExecutionFinished = {
executionIdActive,
executionIdDb,
data: pushRunData,
retryOf,
};
const pushInstance = Push.getInstance();
pushInstance.send('executionFinished', sendData);
}
/**
* Returns hook functions to push data to Editor-UI
*
@ -181,7 +157,10 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
],
workflowExecuteBefore: [
async function (this: WorkflowHooks): Promise<void> {
// Push data to editor-ui once workflow finished
// Push data to session which started the workflow
if (this.sessionId === undefined) {
return;
}
const pushInstance = Push.getInstance();
pushInstance.send('executionStarted', {
executionId: this.executionId,
@ -190,14 +169,127 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
retryOf: this.retryOf,
workflowId: this.workflowData.id as string,
workflowName: this.workflowData.name,
});
}
}, this.sessionId);
},
],
workflowExecuteAfter: [
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
pushExecutionFinished(this.mode, fullRunData, this.executionId, undefined, this.retryOf);
// Push data to session which started the workflow
if (this.sessionId === undefined) {
return;
}
// Clone the object except the runData. That one is not supposed
// to be send. Because that data got send piece by piece after
// each node which finished executing
const pushRunData = {
...fullRunData,
data: {
...fullRunData.data,
resultData: {
...fullRunData.data.resultData,
runData: {},
},
},
};
// Push data to editor-ui once workflow finished
// TODO: Look at this again
const sendData: IPushDataExecutionFinished = {
executionId: this.executionId,
data: pushRunData,
retryOf: this.retryOf,
};
const pushInstance = Push.getInstance();
pushInstance.send('executionFinished', sendData, this.sessionId);
},
]
],
};
}
export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowExecuteHooks {
const externalHooks = ExternalHooks();
return {
workflowExecuteBefore: [
async function (this: WorkflowHooks, workflow: Workflow): Promise<void> {
await externalHooks.run('workflow.preExecute', [workflow, this.mode]);
},
],
nodeExecuteAfter: [
async function (nodeName: string, data: ITaskData, executionData: IRunExecutionData): Promise<void> {
if (this.workflowData.settings !== undefined) {
if (this.workflowData.settings.saveExecutionProgress === false) {
return;
} else if (this.workflowData.settings.saveExecutionProgress !== true && !config.get('executions.saveExecutionProgress') as boolean) {
return;
}
} else if (!config.get('executions.saveExecutionProgress') as boolean) {
return;
}
try {
const execution = await Db.collections.Execution!.findOne(this.executionId);
if (execution === undefined) {
// Something went badly wrong if this happens.
// This check is here mostly to make typescript happy.
return undefined;
}
const fullExecutionData: IExecutionResponse = ResponseHelper.unflattenExecutionData(execution);
if (fullExecutionData.finished) {
// We already received ´workflowExecuteAfter´ webhook, so this is just an async call
// that was left behind. We skip saving because the other call should have saved everything
// so this one is safe to ignore
return;
}
if (fullExecutionData.data === undefined) {
fullExecutionData.data = {
startData: {
},
resultData: {
runData: {},
},
executionData: {
contextData: {},
nodeExecutionStack: [],
waitingExecution: {},
},
};
}
if (Array.isArray(fullExecutionData.data.resultData.runData[nodeName])) {
// Append data if array exists
fullExecutionData.data.resultData.runData[nodeName].push(data);
} else {
// Initialize array and save data
fullExecutionData.data.resultData.runData[nodeName] = [data];
}
fullExecutionData.data.executionData = executionData.executionData;
// Set last executed node so that it may resume on failure
fullExecutionData.data.resultData.lastNodeExecuted = nodeName;
const flattenedExecutionData = ResponseHelper.flattenExecutionData(fullExecutionData);
await Db.collections.Execution!.update(this.executionId, flattenedExecutionData as IExecutionFlattedDb);
} catch (err) {
// TODO: Improve in the future!
// Errors here might happen because of database access
// For busy machines, we may get "Database is locked" errors.
// We do this to prevent crashes and executions ending in `unknown` state.
console.log(`Failed saving execution progress to database for execution ID ${this.executionId}`, err);
}
},
],
};
}
@ -240,6 +332,8 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
}
if (isManualMode && saveManualExecutions === false) {
// Data is always saved, so we remove from database
await Db.collections.Execution!.delete(this.executionId);
return;
}
@ -258,6 +352,8 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
if (!isManualMode) {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf);
}
// Data is always saved, so we remove from database
await Db.collections.Execution!.delete(this.executionId);
return;
}
@ -281,16 +377,16 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB
const executionResult = await Db.collections.Execution!.save(executionData as IExecutionFlattedDb);
await Db.collections.Execution!.update(this.executionId, executionData as IExecutionFlattedDb);
if (fullRunData.finished === true && this.retryOf !== undefined) {
// If the retry was successful save the reference it on the original execution
// await Db.collections.Execution!.save(executionData as IExecutionFlattedDb);
await Db.collections.Execution!.update(this.retryOf, { retrySuccessId: executionResult.id });
await Db.collections.Execution!.update(this.retryOf, { retrySuccessId: this.executionId });
}
if (!isManualMode) {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, executionResult ? executionResult.id as string : undefined, this.retryOf);
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, this.executionId, this.retryOf);
}
} catch (error) {
if (!isManualMode) {
@ -298,23 +394,148 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
}
}
},
]
],
};
}
/**
* Executes the workflow with the given ID
* Returns hook functions to save workflow execution and call error workflow
* for running with queues. Manual executions should never run on queues as
* they are always executed in the main process.
*
* @export
* @param {string} workflowId The id of the workflow to execute
* @param {IWorkflowExecuteAdditionalData} additionalData
* @param {INodeExecutionData[]} [inputData]
* @returns {(Promise<Array<INodeExecutionData[] | null>>)}
* @returns {IWorkflowExecuteHooks}
*/
export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, inputData?: INodeExecutionData[]): Promise<Array<INodeExecutionData[] | null>> {
function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
return {
nodeExecuteBefore: [],
nodeExecuteAfter: [],
workflowExecuteBefore: [],
workflowExecuteAfter: [
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
try {
if (WorkflowHelpers.isWorkflowIdValid(this.workflowData.id as string) === true && newStaticData) {
// Workflow is saved so update in database
try {
await WorkflowHelpers.saveStaticDataById(this.workflowData.id as string, newStaticData);
} catch (e) {
// TODO: Add proper logging!
console.error(`There was a problem saving the workflow with id "${this.workflowData.id}" to save changed staticData: ${e.message}`);
}
}
// Check config to know if execution should be saved or not
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
if (this.workflowData.settings !== undefined) {
saveDataErrorExecution = (this.workflowData.settings.saveDataErrorExecution as string) || saveDataErrorExecution;
}
const workflowDidSucceed = !fullRunData.data.resultData.error;
if (workflowDidSucceed === false && saveDataErrorExecution === 'none') {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf);
}
const fullExecutionData: IExecutionDb = {
data: fullRunData.data,
mode: fullRunData.mode,
finished: fullRunData.finished ? fullRunData.finished : false,
startedAt: fullRunData.startedAt,
stoppedAt: fullRunData.stoppedAt,
workflowData: this.workflowData,
};
if (this.retryOf !== undefined) {
fullExecutionData.retryOf = this.retryOf.toString();
}
if (this.workflowData.id !== undefined && WorkflowHelpers.isWorkflowIdValid(this.workflowData.id.toString()) === true) {
fullExecutionData.workflowId = this.workflowData.id.toString();
}
const executionData = ResponseHelper.flattenExecutionData(fullExecutionData);
// Save the Execution in DB
await Db.collections.Execution!.update(this.executionId, executionData as IExecutionFlattedDb);
if (fullRunData.finished === true && this.retryOf !== undefined) {
// If the retry was successful save the reference it on the original execution
// await Db.collections.Execution!.save(executionData as IExecutionFlattedDb);
await Db.collections.Execution!.update(this.retryOf, { retrySuccessId: this.executionId });
}
} catch (error) {
executeErrorWorkflow(this.workflowData, fullRunData, this.mode, undefined, this.retryOf);
}
},
],
};
}
export async function getRunData(workflowData: IWorkflowBase, inputData?: INodeExecutionData[]): Promise<IWorkflowExecutionDataProcess> {
const mode = 'integrated';
// Find Start-Node
const requiredNodeTypes = ['n8n-nodes-base.start'];
let startNode: INode | undefined;
for (const node of workflowData!.nodes) {
if (requiredNodeTypes.includes(node.type)) {
startNode = node;
break;
}
}
if (startNode === undefined) {
// If the workflow does not contain a start-node we can not know what
// should be executed and with what data to start.
throw new Error(`The workflow does not contain a "Start" node and can so not be executed.`);
}
// Always start with empty data if no inputData got supplied
inputData = inputData || [
{
json: {},
},
];
// Initialize the incoming data
const nodeExecutionStack: IExecuteData[] = [];
nodeExecutionStack.push(
{
node: startNode,
data: {
main: [inputData],
},
}
);
const runExecutionData: IRunExecutionData = {
startData: {
},
resultData: {
runData: {},
},
executionData: {
contextData: {},
nodeExecutionStack,
waitingExecution: {},
},
};
// Get the needed credentials for the current workflow as they will differ to the ones of the
// calling workflow.
const credentials = await WorkflowCredentials(workflowData!.nodes);
const runData: IWorkflowExecutionDataProcess = {
credentials,
executionMode: mode,
executionData: runExecutionData,
// @ts-ignore
workflowData,
};
return runData;
}
export async function getWorkflowData(workflowInfo: IExecuteWorkflowInfo): Promise<IWorkflowBase> {
if (workflowInfo.id === undefined && workflowInfo.code === undefined) {
throw new Error(`No information about the workflow to execute found. Please provide either the "id" or "code"!`);
}
@ -335,82 +556,76 @@ export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additi
workflowData = workflowInfo.code;
}
return workflowData!;
}
/**
* Executes the workflow with the given ID
*
* @export
* @param {string} workflowId The id of the workflow to execute
* @param {IWorkflowExecuteAdditionalData} additionalData
* @param {INodeExecutionData[]} [inputData]
* @returns {(Promise<Array<INodeExecutionData[] | null>>)}
*/
export async function executeWorkflow(workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, inputData?: INodeExecutionData[], parentExecutionId?: string, loadedWorkflowData?: IWorkflowBase, loadedRunData?: IWorkflowExecutionDataProcess): Promise<Array<INodeExecutionData[] | null> | IRun> {
const externalHooks = ExternalHooks();
await externalHooks.init();
await externalHooks.run('workflow.execute', [workflowData, mode]);
const nodeTypes = NodeTypes();
const workflowData = loadedWorkflowData !== undefined ? loadedWorkflowData : await getWorkflowData(workflowInfo);
const workflowName = workflowData ? workflowData.name : undefined;
const workflow = new Workflow({ id: workflowInfo.id, name: workflowName, nodes: workflowData!.nodes, connections: workflowData!.connections, active: workflowData!.active, nodeTypes, staticData: workflowData!.staticData });
// Does not get used so set it simply to empty string
const executionId = '';
const runData = loadedRunData !== undefined ? loadedRunData : await getRunData(workflowData, inputData);
let executionId;
if (parentExecutionId !== undefined) {
executionId = parentExecutionId;
} else {
executionId = parentExecutionId !== undefined ? parentExecutionId : await ActiveExecutions.getInstance().add(runData);
}
const runExecutionData = runData.executionData as IRunExecutionData;
// Get the needed credentials for the current workflow as they will differ to the ones of the
// calling workflow.
const credentials = await WorkflowCredentials(workflowData!.nodes);
// Create new additionalData to have different workflow loaded and to call
// different webooks
const additionalDataIntegrated = await getBase(credentials);
additionalDataIntegrated.hooks = getWorkflowHooksIntegrated(mode, executionId, workflowData!, { parentProcessMode: additionalData.hooks!.mode });
additionalDataIntegrated.hooks = getWorkflowHooksIntegrated(runData.executionMode, executionId, workflowData!, { parentProcessMode: additionalData.hooks!.mode });
// Make sure we pass on the original executeWorkflow function we received
// This one already contains changes to talk to parent process
// and get executionID from `activeExecutions` running on main process
additionalDataIntegrated.executeWorkflow = additionalData.executeWorkflow;
// Find Start-Node
const requiredNodeTypes = ['n8n-nodes-base.start'];
let startNode: INode | undefined;
for (const node of workflowData!.nodes) {
if (requiredNodeTypes.includes(node.type)) {
startNode = node;
break;
}
}
if (startNode === undefined) {
// If the workflow does not contain a start-node we can not know what
// should be executed and with what data to start.
throw new Error(`The workflow does not contain a "Start" node and can so not be executed.`);
}
// Always start with empty data if no inputData got supplied
inputData = inputData || [
{
json: {}
}
];
// Initialize the incoming data
const nodeExecutionStack: IExecuteData[] = [];
nodeExecutionStack.push(
{
node: startNode,
data: {
main: [inputData],
},
},
);
const runExecutionData: IRunExecutionData = {
startData: {
},
resultData: {
runData: {},
},
executionData: {
contextData: {},
nodeExecutionStack,
waitingExecution: {},
},
};
// Execute the workflow
const workflowExecute = new WorkflowExecute(additionalDataIntegrated, mode, runExecutionData);
const workflowExecute = new WorkflowExecute(additionalDataIntegrated, runData.executionMode, runExecutionData);
const data = await workflowExecute.processRunExecutionData(workflow);
await externalHooks.run('workflow.postExecute', [data, workflowData]);
if (data.finished === true) {
// Workflow did finish successfully
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data);
return returnData!.data!.main;
if (parentExecutionId !== undefined) {
return data;
} else {
await ActiveExecutions.getInstance().remove(executionId, data);
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data);
return returnData!.data!.main;
}
} else {
await ActiveExecutions.getInstance().remove(executionId, data);
// Workflow did fail
const error = new Error(data.data.resultData.error!.message);
error.stack = data.data.resultData.error!.stack;
@ -460,6 +675,52 @@ export async function getBase(credentials: IWorkflowCredentials, currentNodePara
export function getWorkflowHooksIntegrated(mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, optionalParameters?: IWorkflowHooksOptionalParameters): WorkflowHooks {
optionalParameters = optionalParameters || {};
const hookFunctions = hookFunctionsSave(optionalParameters.parentProcessMode);
const preExecuteFunctions = hookFunctionsPreExecute(optionalParameters.parentProcessMode);
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters);
}
/**
* Returns WorkflowHooks instance for running integrated workflows
* (Workflows which get started inside of another workflow)
*/
export function getWorkflowHooksWorkerExecuter(mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, optionalParameters?: IWorkflowHooksOptionalParameters): WorkflowHooks {
optionalParameters = optionalParameters || {};
const hookFunctions = hookFunctionsSaveWorker();
const preExecuteFunctions = hookFunctionsPreExecute(optionalParameters.parentProcessMode);
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters);
}
/**
* Returns WorkflowHooks instance for main process if workflow runs via worker
*/
export function getWorkflowHooksWorkerMain(mode: WorkflowExecuteMode, executionId: string, workflowData: IWorkflowBase, optionalParameters?: IWorkflowHooksOptionalParameters): WorkflowHooks {
optionalParameters = optionalParameters || {};
const hookFunctions = hookFunctionsPush();
const preExecuteFunctions = hookFunctionsPreExecute(optionalParameters.parentProcessMode);
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
// When running with worker mode, main process executes
// Only workflowExecuteBefore + workflowExecuteAfter
// So to avoid confusion, we are removing other hooks.
hookFunctions.nodeExecuteBefore = [];
hookFunctions.nodeExecuteAfter = [];
return new WorkflowHooks(hookFunctions, mode, executionId, workflowData, optionalParameters);
}
@ -472,12 +733,26 @@ export function getWorkflowHooksIntegrated(mode: WorkflowExecuteMode, executionI
* @param {string} executionId
* @returns {WorkflowHooks}
*/
export function getWorkflowHooksMain(data: IWorkflowExecutionDataProcess, executionId: string): WorkflowHooks {
export function getWorkflowHooksMain(data: IWorkflowExecutionDataProcess, executionId: string, isMainProcess = false): WorkflowHooks {
const hookFunctions = hookFunctionsSave();
const pushFunctions = hookFunctionsPush();
for (const key of Object.keys(pushFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], pushFunctions[key]);
}
return new WorkflowHooks(hookFunctions, data.executionMode, executionId, data.workflowData, { sessionId: data.sessionId, retryOf: data.retryOf as string});
if (isMainProcess) {
const preExecuteFunctions = hookFunctionsPreExecute();
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
}
return new WorkflowHooks(hookFunctions, data.executionMode, executionId, data.workflowData, { sessionId: data.sessionId, retryOf: data.retryOf as string });
}

View file

@ -3,8 +3,8 @@ import {
Db,
ICredentialsTypeData,
ITransferNodeTypes,
IWorkflowExecutionDataProcess,
IWorkflowErrorData,
IWorkflowExecutionDataProcess,
NodeTypes,
WorkflowCredentials,
WorkflowRunner,
@ -120,12 +120,12 @@ export async function executeErrorWorkflow(workflowId: string, workflowErrorData
main: [
[
{
json: workflowErrorData
}
]
json: workflowErrorData,
},
],
],
},
},
}
);
const runExecutionData: IRunExecutionData = {

View file

@ -2,15 +2,21 @@ import {
ActiveExecutions,
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
IBullJobData,
IBullJobResponse,
ICredentialsOverwrite,
ICredentialsTypeData,
IExecutionFlattedDb,
IExecutionResponse,
IProcessMessageDataHook,
ITransferNodeTypes,
IWorkflowExecutionDataProcess,
IWorkflowExecutionDataProcessWithExecution,
NodeTypes,
Push,
ResponseHelper,
WorkflowExecuteAdditionalData,
WorkflowHelpers,
} from './';
@ -21,11 +27,12 @@ import {
} from 'n8n-core';
import {
IDataObject,
IExecutionError,
IRun,
Workflow,
WorkflowHooks,
WorkflowExecuteMode,
WorkflowHooks,
} from 'n8n-workflow';
import * as config from '../config';
@ -33,17 +40,26 @@ import * as PCancelable from 'p-cancelable';
import { join as pathJoin } from 'path';
import { fork } from 'child_process';
import * as Bull from 'bull';
import * as Queue from './Queue';
export class WorkflowRunner {
activeExecutions: ActiveExecutions.ActiveExecutions;
credentialsOverwrites: ICredentialsOverwrite;
push: Push.Push;
jobQueue: Bull.Queue;
constructor() {
this.push = Push.getInstance();
this.activeExecutions = ActiveExecutions.getInstance();
this.credentialsOverwrites = CredentialsOverwrites().getAll();
const executionsMode = config.get('executions.mode') as string;
if (executionsMode === 'queue') {
this.jobQueue = Queue.getInstance().getBullObjectInstance();
}
}
@ -85,9 +101,6 @@ export class WorkflowRunner {
// Remove from active execution with empty data. That will
// set the execution to failed.
this.activeExecutions.remove(executionId, fullRunData);
// Also send to Editor UI
WorkflowExecuteAdditionalData.pushExecutionFinished(executionMode, fullRunData, executionId);
}
/**
@ -99,16 +112,33 @@ export class WorkflowRunner {
* @returns {Promise<string>}
* @memberof WorkflowRunner
*/
async run(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean): Promise<string> {
const externalHooks = ExternalHooks();
await externalHooks.run('workflow.execute', [data.workflowData, data.executionMode]);
async run(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean): Promise<string> {
const executionsProcess = config.get('executions.process') as string;
if (executionsProcess === 'main') {
return this.runMainProcess(data, loadStaticData);
const executionsMode = config.get('executions.mode') as string;
let executionId: string;
if (executionsMode === 'queue' && data.executionMode !== 'manual') {
// Do not run "manual" executions in bull because sending events to the
// frontend would not be possible
executionId = await this.runBull(data, loadStaticData, realtime);
} else if (executionsProcess === 'main') {
executionId = await this.runMainProcess(data, loadStaticData);
} else {
executionId = await this.runSubprocess(data, loadStaticData);
}
return this.runSubprocess(data, loadStaticData);
const externalHooks = ExternalHooks();
if (externalHooks.exists('workflow.postExecute')) {
this.activeExecutions.getPostExecutePromise(executionId)
.then(async (executionData) => {
await externalHooks.run('workflow.postExecute', [executionData, data.workflowData]);
})
.catch(error => {
console.error('There was a problem running hook "workflow.postExecute"', error);
});
}
return executionId;
}
@ -132,9 +162,9 @@ export class WorkflowRunner {
const additionalData = await WorkflowExecuteAdditionalData.getBase(data.credentials);
// Register the active execution
const executionId = this.activeExecutions.add(data, undefined);
const executionId = await this.activeExecutions.add(data, undefined);
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId);
additionalData.hooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId, true);
let workflowExecution: PCancelable<IRun>;
if (data.executionData !== undefined) {
@ -179,6 +209,172 @@ export class WorkflowRunner {
return executionId;
}
async runBull(data: IWorkflowExecutionDataProcess, loadStaticData?: boolean, realtime?: boolean): Promise<string> {
// TODO: If "loadStaticData" is set to true it has to load data new on worker
// Register the active execution
const executionId = await this.activeExecutions.add(data, undefined);
const jobData: IBullJobData = {
executionId,
loadStaticData: !!loadStaticData,
};
let priority = 100;
if (realtime === true) {
// Jobs which require a direct response get a higher priority
priority = 50;
}
// TODO: For realtime jobs should probably also not do retry or not retry if they are older than x seconds.
// Check if they get retried by default and how often.
const jobOptions = {
priority,
removeOnComplete: true,
removeOnFail: true,
};
const job = await this.jobQueue.add(jobData, jobOptions);
console.log('Started with ID: ' + job.id.toString());
const hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain(data.executionMode, executionId, data.workflowData, { retryOf: data.retryOf ? data.retryOf.toString() : undefined });
// Normally also workflow should be supplied here but as it only used for sending
// data to editor-UI is not needed.
hooks.executeHookFunctions('workflowExecuteBefore', []);
const workflowExecution: PCancelable<IRun> = new PCancelable(async (resolve, reject, onCancel) => {
onCancel.shouldReject = false;
onCancel(async () => {
await Queue.getInstance().stopJob(job);
const fullRunData :IRun = {
data: {
resultData: {
error: {
message: 'Workflow has been canceled!',
} as IExecutionError,
runData: {},
},
},
mode: data.executionMode,
startedAt: new Date(),
stoppedAt: new Date(),
};
this.activeExecutions.remove(executionId, fullRunData);
resolve(fullRunData);
});
const jobData: Promise<IBullJobResponse> = job.finished();
const queueRecoveryInterval = config.get('queue.bull.queueRecoveryInterval') as number;
if (queueRecoveryInterval > 0) {
/*************************************************
* Long explanation about what this solves: *
* This only happens in a very specific scenario *
* when Redis crashes and recovers shortly *
* but during this time, some execution(s) *
* finished. The end result is that the main *
* process will wait indefinitively and never *
* get a response. This adds an active polling to*
* the queue that allows us to identify that the *
* execution finished and get information from *
* the database. *
*************************************************/
let watchDogInterval: NodeJS.Timeout | undefined;
let resolved = false;
const watchDog = new Promise((res) => {
watchDogInterval = setInterval(async () => {
const currentJob = await this.jobQueue.getJob(job.id);
// When null means job is finished (not found in queue)
if (currentJob === null) {
// Mimic worker's success message
res({success: true});
}
}, queueRecoveryInterval * 1000);
});
const clearWatchdogInterval = () => {
if (watchDogInterval) {
clearInterval(watchDogInterval);
watchDogInterval = undefined;
}
};
await new Promise((res, rej) => {
jobData.then((data) => {
if (!resolved) {
resolved = true;
clearWatchdogInterval();
res(data);
}
}).catch((e) => {
if(!resolved) {
resolved = true;
clearWatchdogInterval();
rej(e);
}
});
watchDog.then((data) => {
if (!resolved) {
resolved = true;
clearWatchdogInterval();
res(data);
}
});
});
} else {
await jobData;
}
const executionDb = await Db.collections.Execution!.findOne(executionId) as IExecutionFlattedDb;
const fullExecutionData = ResponseHelper.unflattenExecutionData(executionDb) as IExecutionResponse;
const runData = {
data: fullExecutionData.data,
finished: fullExecutionData.finished,
mode: fullExecutionData.mode,
startedAt: fullExecutionData.startedAt,
stoppedAt: fullExecutionData.stoppedAt,
} as IRun;
this.activeExecutions.remove(executionId, runData);
// Normally also static data should be supplied here but as it only used for sending
// data to editor-UI is not needed.
hooks.executeHookFunctions('workflowExecuteAfter', [runData]);
try {
// Check if this execution data has to be removed from database
// based on workflow settings.
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
if (data.workflowData.settings !== undefined) {
saveDataErrorExecution = (data.workflowData.settings.saveDataErrorExecution as string) || saveDataErrorExecution;
saveDataSuccessExecution = (data.workflowData.settings.saveDataSuccessExecution as string) || saveDataSuccessExecution;
}
const workflowDidSucceed = !runData.data.resultData.error;
if (workflowDidSucceed === true && saveDataSuccessExecution === 'none' ||
workflowDidSucceed === false && saveDataErrorExecution === 'none'
) {
await Db.collections.Execution!.delete(executionId);
}
} catch (err) {
// We don't want errors here to crash n8n. Just log and proceed.
console.log('Error removing saved execution from database. More details: ', err);
}
resolve(runData);
});
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
return executionId;
}
/**
* Run the workflow
*
@ -197,7 +393,7 @@ export class WorkflowRunner {
}
// Register the active execution
const executionId = this.activeExecutions.add(data, subprocess);
const executionId = await this.activeExecutions.add(data, subprocess);
// Check if workflow contains a "executeWorkflow" Node as in this
// case we can not know which nodeTypes and credentialTypes will
@ -212,6 +408,7 @@ export class WorkflowRunner {
let nodeTypeData: ITransferNodeTypes;
let credentialTypeData: ICredentialsTypeData;
let credentialsOverwrites = this.credentialsOverwrites;
if (loadAllNodeTypes === true) {
// Supply all nodeTypes and credentialTypes
@ -219,15 +416,22 @@ export class WorkflowRunner {
const credentialTypes = CredentialTypes();
credentialTypeData = credentialTypes.credentialTypes;
} else {
// Supply only nodeTypes and credentialTypes which the workflow needs
// Supply only nodeTypes, credentialTypes and overwrites that the workflow needs
nodeTypeData = WorkflowHelpers.getNodeTypeData(data.workflowData.nodes);
credentialTypeData = WorkflowHelpers.getCredentialsData(data.credentials);
credentialsOverwrites = {};
for (const credentialName of Object.keys(credentialTypeData)) {
if (this.credentialsOverwrites[credentialName] !== undefined) {
credentialsOverwrites[credentialName] = this.credentialsOverwrites[credentialName];
}
}
}
(data as unknown as IWorkflowExecutionDataProcessWithExecution).executionId = executionId;
(data as unknown as IWorkflowExecutionDataProcessWithExecution).nodeTypeData = nodeTypeData;
(data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsOverwrite = this.credentialsOverwrites;
(data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsOverwrite = credentialsOverwrites;
(data as unknown as IWorkflowExecutionDataProcessWithExecution).credentialsTypeData = credentialTypeData; // TODO: Still needs correct value
const workflowHooks = WorkflowExecuteAdditionalData.getWorkflowHooksMain(data, executionId);
@ -253,7 +457,7 @@ export class WorkflowRunner {
// Listen to data from the subprocess
subprocess.on('message', (message: IProcessMessage) => {
subprocess.on('message', async (message: IProcessMessage) => {
if (message.type === 'end') {
clearTimeout(executionTimeout);
this.activeExecutions.remove(executionId!, message.data.runData);
@ -270,6 +474,11 @@ export class WorkflowRunner {
const timeoutError = { message: 'Workflow execution timed out!' } as IExecutionError;
this.processError(timeoutError, startedAt, data.executionMode, executionId);
} else if (message.type === 'startExecution') {
const executionId = await this.activeExecutions.add(message.data.runData);
subprocess.send({ type: 'executionId', data: {executionId} } as IProcessMessage);
} else if (message.type === 'finishExecution') {
await this.activeExecutions.remove(message.data.executionId, message.data.result);
}
});

View file

@ -2,9 +2,12 @@
import {
CredentialsOverwrites,
CredentialTypes,
Db,
ExternalHooks,
IWorkflowExecutionDataProcessWithExecution,
NodeTypes,
WorkflowExecuteAdditionalData,
WorkflowHelpers,
} from './';
import {
@ -14,23 +17,42 @@ import {
import {
IDataObject,
IExecuteData,
IExecuteWorkflowInfo,
IExecutionError,
INodeExecutionData,
INodeType,
INodeTypeData,
IRun,
IRunExecutionData,
ITaskData,
IWorkflowExecuteAdditionalData,
IWorkflowExecuteHooks,
Workflow,
WorkflowHooks,
} from 'n8n-workflow';
import * as config from '../config';
export class WorkflowRunnerProcess {
data: IWorkflowExecutionDataProcessWithExecution | undefined;
startedAt = new Date();
workflow: Workflow | undefined;
workflowExecute: WorkflowExecute | undefined;
executionIdCallback: (executionId: string) => void | undefined;
static async stopProcess() {
setTimeout(() => {
// Attempt a graceful shutdown, giving executions 30 seconds to finish
process.exit(0);
}, 30000);
}
async runWorkflow(inputData: IWorkflowExecutionDataProcessWithExecution): Promise<IRun> {
process.on('SIGTERM', WorkflowRunnerProcess.stopProcess);
process.on('SIGINT', WorkflowRunnerProcess.stopProcess);
this.data = inputData;
let className: string;
let tempNode: INodeType;
@ -66,12 +88,54 @@ export class WorkflowRunnerProcess {
// Load the credentials overwrites if any exist
const credentialsOverwrites = CredentialsOverwrites();
await credentialsOverwrites.init();
await credentialsOverwrites.init(inputData.credentialsOverwrite);
this.workflow = new Workflow({ id: this.data.workflowData.id as string | undefined, name: this.data.workflowData.name, nodes: this.data.workflowData!.nodes, connections: this.data.workflowData!.connections, active: this.data.workflowData!.active, nodeTypes, staticData: this.data.workflowData!.staticData, settings: this.data.workflowData!.settings});
// Load all external hooks
const externalHooks = ExternalHooks();
await externalHooks.init();
// This code has been split into 3 ifs just to make it easier to understand
// Can be made smaller but in the end it will make it impossible to read.
if (inputData.workflowData.settings !== undefined && inputData.workflowData.settings.saveExecutionProgress === true) {
// Workflow settings specifying it should save
await Db.init();
} else if (inputData.workflowData.settings !== undefined && inputData.workflowData.settings.saveExecutionProgress !== false && config.get('executions.saveExecutionProgress') as boolean) {
// Workflow settings not saying anything about saving but default settings says so
await Db.init();
} else if (inputData.workflowData.settings === undefined && config.get('executions.saveExecutionProgress') as boolean) {
// Workflow settings not saying anything about saving but default settings says so
await Db.init();
}
this.workflow = new Workflow({ id: this.data.workflowData.id as string | undefined, name: this.data.workflowData.name, nodes: this.data.workflowData!.nodes, connections: this.data.workflowData!.connections, active: this.data.workflowData!.active, nodeTypes, staticData: this.data.workflowData!.staticData, settings: this.data.workflowData!.settings });
const additionalData = await WorkflowExecuteAdditionalData.getBase(this.data.credentials);
additionalData.hooks = this.getProcessForwardHooks();
const executeWorkflowFunction = additionalData.executeWorkflow;
additionalData.executeWorkflow = async (workflowInfo: IExecuteWorkflowInfo, additionalData: IWorkflowExecuteAdditionalData, inputData?: INodeExecutionData[] | undefined): Promise<Array<INodeExecutionData[] | null> | IRun> => {
const workflowData = await WorkflowExecuteAdditionalData.getWorkflowData(workflowInfo);
const runData = await WorkflowExecuteAdditionalData.getRunData(workflowData, inputData);
await sendToParentProcess('startExecution', { runData });
const executionId: string = await new Promise((resolve) => {
this.executionIdCallback = (executionId: string) => {
resolve(executionId);
};
});
let result: IRun;
try {
result = await executeWorkflowFunction(workflowInfo, additionalData, inputData, executionId, workflowData, runData);
} catch (e) {
await sendToParentProcess('finishExecution', { executionId });
// Throw same error we had
throw e;
}
await sendToParentProcess('finishExecution', { executionId, result });
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(result);
return returnData!.data!.main;
};
if (this.data.executionData !== undefined) {
this.workflowExecute = new WorkflowExecute(additionalData, this.data.executionMode, this.data.executionData);
return this.workflowExecute.processRunExecutionData(this.workflow);
@ -121,7 +185,7 @@ export class WorkflowRunnerProcess {
* @returns
*/
getProcessForwardHooks(): WorkflowHooks {
const hookFunctions = {
const hookFunctions: IWorkflowExecuteHooks = {
nodeExecuteBefore: [
async (nodeName: string): Promise<void> => {
this.sendHookToParentProcess('nodeExecuteBefore', [nodeName]);
@ -135,15 +199,23 @@ export class WorkflowRunnerProcess {
workflowExecuteBefore: [
async (): Promise<void> => {
this.sendHookToParentProcess('workflowExecuteBefore', []);
}
},
],
workflowExecuteAfter: [
async (fullRunData: IRun, newStaticData?: IDataObject): Promise<void> => {
this.sendHookToParentProcess('workflowExecuteAfter', [fullRunData, newStaticData]);
},
]
],
};
const preExecuteFunctions = WorkflowExecuteAdditionalData.hookFunctionsPreExecute();
for (const key of Object.keys(preExecuteFunctions)) {
if (hookFunctions[key] === undefined) {
hookFunctions[key] = [];
}
hookFunctions[key]!.push.apply(hookFunctions[key], preExecuteFunctions[key]);
}
return new WorkflowHooks(hookFunctions, this.data!.executionMode, this.data!.executionId, this.data!.workflowData, { sessionId: this.data!.sessionId, retryOf: this.data!.retryOf as string });
}
@ -225,6 +297,8 @@ process.on('message', async (message: IProcessMessage) => {
// Stop process
process.exit();
} else if (message.type === 'executionId') {
workflowRunner.executionIdCallback(message.data.executionId);
}
} catch (error) {
// Catch all uncaught errors and forward them to parent process

View file

@ -1,10 +1,8 @@
import * as MongoDb from './mongodb';
import * as PostgresDb from './postgresdb';
import * as SQLite from './sqlite';
import * as MySQLDb from './mysqldb';
export {
MongoDb,
PostgresDb,
SQLite,
MySQLDb,

View file

@ -1,41 +0,0 @@
import {
ICredentialNodeAccess,
} from 'n8n-workflow';
import {
ICredentialsDb,
} from '../../';
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class CredentialsEntity implements ICredentialsDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
name: string;
@Column()
data: string;
@Index()
@Column()
type: string;
@Column('json')
nodesAccess: ICredentialNodeAccess[];
@Column('Date')
createdAt: Date;
@Column('Date')
updatedAt: Date;
}

View file

@ -1,52 +0,0 @@
import {
WorkflowExecuteMode,
} from 'n8n-workflow';
import {
IExecutionFlattedDb,
IWorkflowDb,
} from '../../';
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class ExecutionEntity implements IExecutionFlattedDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
data: string;
@Column()
finished: boolean;
@Column()
mode: WorkflowExecuteMode;
@Column()
retryOf: string;
@Column()
retrySuccessId: string;
@Column('Date')
startedAt: Date;
@Index()
@Column('Date')
stoppedAt: Date;
@Column('json')
workflowData: IWorkflowDb;
@Index()
@Column()
workflowId: string;
}

View file

@ -1,30 +0,0 @@
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
import {
IWebhookDb,
} from '../../Interfaces';
@Entity()
export class WebhookEntity implements IWebhookDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
workflowId: number;
@Column()
webhookPath: string;
@Column()
method: string;
@Column()
node: string;
}

View file

@ -1,48 +0,0 @@
import {
IConnections,
IDataObject,
INode,
IWorkflowSettings,
} from 'n8n-workflow';
import {
IWorkflowDb,
} from '../../';
import {
Column,
Entity,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class WorkflowEntity implements IWorkflowDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
name: string;
@Column()
active: boolean;
@Column('json')
nodes: INode[];
@Column('json')
connections: IConnections;
@Column('Date')
createdAt: Date;
@Column('Date')
updatedAt: Date;
@Column('json')
settings?: IWorkflowSettings;
@Column('json')
staticData?: IDataObject;
}

View file

@ -1,5 +0,0 @@
export * from './CredentialsEntity';
export * from './ExecutionEntity';
export * from './WorkflowEntity';
export * from './WebhookEntity';

View file

@ -1,22 +0,0 @@
import { MigrationInterface } from "typeorm";
import {
MongoQueryRunner,
} from 'typeorm/driver/mongodb/MongoQueryRunner';
import * as config from '../../../../config';
export class CreateIndexStoppedAt1594910478695 implements MigrationInterface {
name = 'CreateIndexStoppedAt1594910478695';
async up(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.manager.createCollectionIndex(`${tablePrefix}execution_entity`, 'stoppedAt', { name: `IDX_${tablePrefix}execution_entity_stoppedAt` });
}
async down(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.manager.dropCollectionIndex
(`${tablePrefix}execution_entity`, `IDX_${tablePrefix}execution_entity_stoppedAt`);
}
}

View file

@ -1,11 +0,0 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
export class InitialMigration1587563438936 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
}
async down(queryRunner: QueryRunner): Promise<void> {
}
}

View file

@ -1,57 +0,0 @@
import {
MigrationInterface,
} from 'typeorm';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow/dist/src/Workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
import * as config from '../../../../config';
import {
MongoQueryRunner,
} from 'typeorm/driver/mongodb/MongoQueryRunner';
export class WebhookModel1592679094242 implements MigrationInterface {
name = 'WebhookModel1592679094242';
async up(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
const workflows = await queryRunner.cursor( `${tablePrefix}workflow_entity`, { active: true }).toArray() as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.insertMany(`${tablePrefix}webhook_entity`, data);
}
await queryRunner.manager.createCollectionIndex(`${tablePrefix}webhook_entity`, ['webhookPath', 'method'], { unique: true, background: false });
}
async down(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.dropTable(`${tablePrefix}webhook_entity`);
}
}

View file

@ -1,3 +0,0 @@
export * from './1587563438936-InitialMigration';
export * from './1592679094242-WebhookModel';
export * from './151594910478695-CreateIndexStoppedAt';

View file

@ -20,7 +20,7 @@ export class CredentialsEntity implements ICredentialsDb {
id: number;
@Column({
length: 128
length: 128,
})
name: string;
@ -29,7 +29,7 @@ export class CredentialsEntity implements ICredentialsDb {
@Index()
@Column({
length: 32
length: 32,
})
type: string;

View file

@ -40,7 +40,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
startedAt: Date;
@Index()
@Column('datetime')
@Column('datetime', { nullable: true })
stoppedAt: Date;
@Column('json')

View file

@ -1,6 +1,7 @@
import {
Column,
Entity,
Index,
PrimaryColumn,
} from 'typeorm';
@ -9,6 +10,7 @@ import {
} from '../../Interfaces';
@Entity()
@Index(['webhookId', 'method', 'pathLength'])
export class WebhookEntity implements IWebhookDb {
@Column()
@ -22,4 +24,10 @@ export class WebhookEntity implements IWebhookDb {
@Column()
node: string;
@Column({ nullable: true })
webhookId: string;
@Column({ nullable: true })
pathLength: number;
}

View file

@ -22,7 +22,7 @@ export class WorkflowEntity implements IWorkflowDb {
id: number;
@Column({
length: 128
length: 128,
})
name: string;

View file

@ -5,20 +5,6 @@ import {
import * as config from '../../../../config';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
export class WebhookModel1592447867632 implements MigrationInterface {
name = 'WebhookModel1592447867632';
@ -26,30 +12,6 @@ export class WebhookModel1592447867632 implements MigrationInterface {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`);
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.createQueryBuilder()
.insert()
.into(`${tablePrefix}webhook_entity`)
.values(data)
.execute();
}
}
async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -0,0 +1,17 @@
import { MigrationInterface, QueryRunner } from "typeorm";
import * as config from '../../../../config';
export class MakeStoppedAtNullable1607431743767 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime', undefined);
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime NOT NULL', undefined);
}
}

View file

@ -0,0 +1,24 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class AddWebhookId1611149998770 implements MigrationInterface {
name = 'AddWebhookId1611149998770';
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `webhookId` varchar(255) NULL');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `pathLength` int NULL');
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity` (`webhookId`, `method`, `pathLength`)');
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(
'DROP INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity`'
);
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` DROP COLUMN `pathLength`');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` DROP COLUMN `webhookId`');
}
}

View file

@ -0,0 +1,18 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import * as config from '../../../../config';
export class ChangeDataSize1615306975123 implements MigrationInterface {
name = 'ChangeDataSize1615306975123';
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL');
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL');
}
}

View file

@ -1,3 +1,15 @@
export * from './1588157391238-InitialMigration';
export * from './1592447867632-WebhookModel';
export * from './1594902918301-CreateIndexStoppedAt';
import { InitialMigration1588157391238 } from './1588157391238-InitialMigration';
import { WebhookModel1592447867632 } from './1592447867632-WebhookModel';
import { CreateIndexStoppedAt1594902918301 } from './1594902918301-CreateIndexStoppedAt';
import { AddWebhookId1611149998770 } from './1611149998770-AddWebhookId';
import { MakeStoppedAtNullable1607431743767 } from './1607431743767-MakeStoppedAtNullable';
import { ChangeDataSize1615306975123 } from './1615306975123-ChangeDataSize';
export const mysqlMigrations = [
InitialMigration1588157391238,
WebhookModel1592447867632,
CreateIndexStoppedAt1594902918301,
AddWebhookId1611149998770,
MakeStoppedAtNullable1607431743767,
ChangeDataSize1615306975123,
];

View file

@ -20,7 +20,7 @@ export class CredentialsEntity implements ICredentialsDb {
id: number;
@Column({
length: 128
length: 128,
})
name: string;
@ -29,7 +29,7 @@ export class CredentialsEntity implements ICredentialsDb {
@Index()
@Column({
length: 32
length: 32,
})
type: string;

View file

@ -40,7 +40,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
startedAt: Date;
@Index()
@Column('timestamp')
@Column('timestamp', { nullable: true })
stoppedAt: Date;
@Column('json')

View file

@ -1,6 +1,7 @@
import {
Column,
Entity,
Index,
PrimaryColumn,
} from 'typeorm';
@ -9,6 +10,7 @@ import {
} from '../../';
@Entity()
@Index(['webhookId', 'method', 'pathLength'])
export class WebhookEntity implements IWebhookDb {
@Column()
@ -22,4 +24,10 @@ export class WebhookEntity implements IWebhookDb {
@Column()
node: string;
@Column({ nullable: true })
webhookId: string;
@Column({ nullable: true })
pathLength: number;
}

View file

@ -22,7 +22,7 @@ export class WorkflowEntity implements IWorkflowDb {
id: number;
@Column({
length: 128
length: 128,
})
name: string;

View file

@ -3,20 +3,6 @@ import {
QueryRunner,
} from 'typeorm';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
import * as config from '../../../../config';
export class WebhookModel1589476000887 implements MigrationInterface {
@ -30,31 +16,7 @@ export class WebhookModel1589476000887 implements MigrationInterface {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`CREATE TABLE ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`, undefined);
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.createQueryBuilder()
.insert()
.into(`${tablePrefix}webhook_entity`)
.values(data)
.execute();
}
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`, undefined);
}
async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -0,0 +1,21 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
name = 'MakeStoppedAtNullable1607431743768';
async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix');
const schema = config.get('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query('ALTER TABLE ' + tablePrefix + 'execution_entity ALTER COLUMN "stoppedAt" DROP NOT NULL', undefined);
}
async down(queryRunner: QueryRunner): Promise<void> {
// Cannot be undone as column might already have null values
}
}

View file

@ -0,0 +1,33 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class AddWebhookId1611144599516 implements MigrationInterface {
name = 'AddWebhookId1611144599516';
async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "webhookId" character varying`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "pathLength" integer`);
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240 ON ${tablePrefix}webhook_entity ("webhookId", "method", "pathLength") `);
}
async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "pathLength"`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "webhookId"`);
}
}

View file

@ -1,4 +1,13 @@
export * from './1587669153312-InitialMigration';
export * from './1589476000887-WebhookModel';
export * from './1594828256133-CreateIndexStoppedAt';
import { InitialMigration1587669153312 } from './1587669153312-InitialMigration';
import { WebhookModel1589476000887 } from './1589476000887-WebhookModel';
import { CreateIndexStoppedAt1594828256133 } from './1594828256133-CreateIndexStoppedAt';
import { AddWebhookId1611144599516 } from './1611144599516-AddWebhookId';
import { MakeStoppedAtNullable1607431743768 } from './1607431743768-MakeStoppedAtNullable';
export const postgresMigrations = [
InitialMigration1587669153312,
WebhookModel1589476000887,
CreateIndexStoppedAt1594828256133,
AddWebhookId1611144599516,
MakeStoppedAtNullable1607431743768,
];

View file

@ -20,7 +20,7 @@ export class CredentialsEntity implements ICredentialsDb {
id: number;
@Column({
length: 128
length: 128,
})
name: string;
@ -29,7 +29,7 @@ export class CredentialsEntity implements ICredentialsDb {
@Index()
@Column({
length: 32
length: 32,
})
type: string;

View file

@ -40,7 +40,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
startedAt: Date;
@Index()
@Column()
@Column({ nullable: true })
stoppedAt: Date;
@Column('simple-json')

View file

@ -1,6 +1,7 @@
import {
Column,
Entity,
Index,
PrimaryColumn,
} from 'typeorm';
@ -9,6 +10,7 @@ import {
} from '../../Interfaces';
@Entity()
@Index(['webhookId', 'method', 'pathLength'])
export class WebhookEntity implements IWebhookDb {
@Column()
@ -22,4 +24,10 @@ export class WebhookEntity implements IWebhookDb {
@Column()
node: string;
@Column({ nullable: true })
webhookId: string;
@Column({ nullable: true })
pathLength: number;
}

View file

@ -22,7 +22,7 @@ export class WorkflowEntity implements IWorkflowDb {
id: number;
@Column({
length: 128
length: 128,
})
name: string;

View file

@ -5,20 +5,6 @@ import {
import * as config from '../../../../config';
import {
IWorkflowDb,
NodeTypes,
WebhookHelpers,
} from '../../..';
import {
Workflow,
} from 'n8n-workflow';
import {
IWebhookDb,
} from '../../../Interfaces';
export class WebhookModel1592445003908 implements MigrationInterface {
name = 'WebhookModel1592445003908';
@ -26,34 +12,6 @@ export class WebhookModel1592445003908 implements MigrationInterface {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`);
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
const data: IWebhookDb[] = [];
const nodeTypes = NodeTypes();
for (const workflow of workflows) {
workflow.nodes = JSON.parse(workflow.nodes as unknown as string);
workflow.connections = JSON.parse(workflow.connections as unknown as string);
workflow.staticData = JSON.parse(workflow.staticData as unknown as string);
workflow.settings = JSON.parse(workflow.settings as unknown as string);
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
for (const webhook of webhooks) {
data.push({
workflowId: workflowInstance.id as string,
webhookPath: webhook.path,
method: webhook.httpMethod,
node: webhook.node,
});
}
}
if (data.length !== 0) {
await queryRunner.manager.createQueryBuilder()
.insert()
.into(`${tablePrefix}webhook_entity`)
.values(data)
.execute();
}
}
async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -8,7 +8,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "execution_entity" ("stoppedAt") `);
await queryRunner.query(`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt") `);
}
async down(queryRunner: QueryRunner): Promise<void> {

View file

@ -0,0 +1,23 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class MakeStoppedAtNullable1607431743769 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
// SQLite does not allow us to simply "alter column"
// We're hacking the way sqlite identifies tables
// Allowing a column to become nullable
// This is a very strict case when this can be done safely
// As no collateral effects exist.
await queryRunner.query(`PRAGMA writable_schema = 1; `, undefined);
await queryRunner.query(`UPDATE SQLITE_MASTER SET SQL = 'CREATE TABLE IF NOT EXISTS "${tablePrefix}execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)' WHERE NAME = "${tablePrefix}execution_entity";`, undefined);
await queryRunner.query(`PRAGMA writable_schema = 0;`, undefined);
}
async down(queryRunner: QueryRunner): Promise<void> {
// This cannot be undone as the table might already have nullable values
}
}

View file

@ -0,0 +1,26 @@
import {MigrationInterface, QueryRunner} from "typeorm";
import * as config from '../../../../config';
export class AddWebhookId1611071044839 implements MigrationInterface {
name = 'AddWebhookId1611071044839';
async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`CREATE TABLE "temporary_webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"))`);
await queryRunner.query(`INSERT INTO "temporary_webhook_entity"("workflowId", "webhookPath", "method", "node") SELECT "workflowId", "webhookPath", "method", "node" FROM "${tablePrefix}webhook_entity"`);
await queryRunner.query(`DROP TABLE "${tablePrefix}webhook_entity"`);
await queryRunner.query(`ALTER TABLE "temporary_webhook_entity" RENAME TO "${tablePrefix}webhook_entity"`);
await queryRunner.query(`CREATE INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2" ON "${tablePrefix}webhook_entity" ("webhookId", "method", "pathLength") `);
}
async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2"`);
await queryRunner.query(`ALTER TABLE "${tablePrefix}webhook_entity" RENAME TO "temporary_webhook_entity"`);
await queryRunner.query(`CREATE TABLE "${tablePrefix}webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`);
await queryRunner.query(`INSERT INTO "${tablePrefix}webhook_entity"("workflowId", "webhookPath", "method", "node") SELECT "workflowId", "webhookPath", "method", "node" FROM "temporary_webhook_entity"`);
await queryRunner.query(`DROP TABLE "temporary_webhook_entity"`);
}
}

View file

@ -1,3 +1,13 @@
export * from './1588102412422-InitialMigration';
export * from './1592445003908-WebhookModel';
export * from './1594825041918-CreateIndexStoppedAt';
import { InitialMigration1588102412422 } from './1588102412422-InitialMigration';
import { WebhookModel1592445003908 } from './1592445003908-WebhookModel';
import { CreateIndexStoppedAt1594825041918 } from './1594825041918-CreateIndexStoppedAt';
import { AddWebhookId1611071044839 } from './1611071044839-AddWebhookId';
import { MakeStoppedAtNullable1607431743769 } from './1607431743769-MakeStoppedAtNullable';
export const sqliteMigrations = [
InitialMigration1588102412422,
WebhookModel1592445003908,
CreateIndexStoppedAt1594825041918,
AddWebhookId1611071044839,
MakeStoppedAtNullable1607431743769,
];

View file

@ -17,6 +17,7 @@ import * as ResponseHelper from './ResponseHelper';
import * as Server from './Server';
import * as TestWebhooks from './TestWebhooks';
import * as WebhookHelpers from './WebhookHelpers';
import * as WebhookServer from './WebhookServer';
import * as WorkflowExecuteAdditionalData from './WorkflowExecuteAdditionalData';
import * as WorkflowHelpers from './WorkflowHelpers';
export {
@ -29,6 +30,7 @@ export {
Server,
TestWebhooks,
WebhookHelpers,
WebhookServer,
WorkflowExecuteAdditionalData,
WorkflowHelpers,
};

View file

@ -1,9 +1,9 @@
<html>
<script>
(function messageParent() {
window.opener.postMessage('success', '*');
}());
</script>
<script>
(function messageParent() {
window.opener.postMessage('success', '*');
}());
</script>
Got connected. The window can be closed now.
Got connected. The window can be closed now.
</html>

Some files were not shown because too many files have changed in this diff Show more