mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-10 06:34:05 -08:00
Merge 'master' into 'ConvertKit'
This commit is contained in:
commit
852c73e795
32
.github/workflows/docker-images-rpi.yml
vendored
Normal file
32
.github/workflows/docker-images-rpi.yml
vendored
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
name: Docker Image CI - Rpi
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- n8n@*
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
armv7_job:
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
name: Build on ARMv7 (Rpi)
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: Get the version
|
||||||
|
id: vars
|
||||||
|
run: echo ::set-output name=tag::$(echo ${GITHUB_REF:14})
|
||||||
|
|
||||||
|
- name: Log in to Docker registry
|
||||||
|
run: docker login -u ${{ secrets.DOCKER_USERNAME }} -p ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: crazy-max/ghaction-docker-buildx@v1
|
||||||
|
with:
|
||||||
|
version: latest
|
||||||
|
- name: Run Buildx (push image)
|
||||||
|
if: success()
|
||||||
|
run: |
|
||||||
|
docker buildx build --platform linux/arm/v7 --build-arg N8N_VERSION=${{steps.vars.outputs.tag}} -t n8nio/n8n:${{steps.vars.outputs.tag}}-rpi --output type=image,push=true docker/images/n8n-rpi
|
||||||
|
- name: Tag Docker image with latest
|
||||||
|
run: docker tag n8nio/n8n:${{steps.vars.outputs.tag}}-rpi n8nio/n8n:latest-rpi
|
||||||
|
- name: Push docker images of latest
|
||||||
|
run: docker push n8nio/n8n:latest-rpi
|
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
|
@ -9,7 +9,7 @@ jobs:
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
node-version: [10.x, 12.x]
|
node-version: [10.x, 12.x, 14.x]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -12,3 +12,4 @@ _START_PACKAGE
|
||||||
.env
|
.env
|
||||||
.vscode
|
.vscode
|
||||||
.idea
|
.idea
|
||||||
|
.prettierrc.js
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
# n8n - Workflow Automation Tool
|
# n8n - Workflow Automation Tool
|
||||||
|
|
||||||
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-logo.png)
|
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
|
||||||
|
|
||||||
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
|
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
|
||||||
|
|
||||||
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
|
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ If you have problems or questions go to our forum, we will then try to help you
|
||||||
If you are interested in working for n8n and so shape the future of the project
|
If you are interested in working for n8n and so shape the future of the project
|
||||||
check out our job posts:
|
check out our job posts:
|
||||||
|
|
||||||
[https://jobs.n8n.io](https://jobs.n8n.io)
|
[https://n8n.join.com](https://n8n.join.com)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 2.6 KiB |
Before Width: | Height: | Size: 127 KiB After Width: | Height: | Size: 127 KiB |
25
docker/compose/subfolderWithSSL/.env
Normal file
25
docker/compose/subfolderWithSSL/.env
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# Folder where data should be saved
|
||||||
|
DATA_FOLDER=/root/n8n/
|
||||||
|
|
||||||
|
# The top level domain to serve from
|
||||||
|
DOMAIN_NAME=example.com
|
||||||
|
|
||||||
|
# The subfolder to serve from
|
||||||
|
SUBFOLDER=app1
|
||||||
|
N8N_PATH=/app1/
|
||||||
|
|
||||||
|
# DOMAIN_NAME and SUBDOMAIN combined decide where n8n will be reachable from
|
||||||
|
# above example would result in: https://example.com/n8n/
|
||||||
|
|
||||||
|
# The user name to use for autentication - IMPORTANT ALWAYS CHANGE!
|
||||||
|
N8N_BASIC_AUTH_USER=user
|
||||||
|
|
||||||
|
# The password to use for autentication - IMPORTANT ALWAYS CHANGE!
|
||||||
|
N8N_BASIC_AUTH_PASSWORD=password
|
||||||
|
|
||||||
|
# Optional timezone to set which gets used by Cron-Node by default
|
||||||
|
# If not set New York time will be used
|
||||||
|
GENERIC_TIMEZONE=Europe/Berlin
|
||||||
|
|
||||||
|
# The email address to use for the SSL certificate creation
|
||||||
|
SSL_EMAIL=user@example.com
|
26
docker/compose/subfolderWithSSL/README.md
Normal file
26
docker/compose/subfolderWithSSL/README.md
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
# n8n on Subfolder with SSL
|
||||||
|
|
||||||
|
Starts n8n and deployes it on a subfolder
|
||||||
|
|
||||||
|
|
||||||
|
## Start
|
||||||
|
|
||||||
|
To start n8n in a subfolder simply start docker-compose by executing the following
|
||||||
|
command in the current folder.
|
||||||
|
|
||||||
|
|
||||||
|
**IMPORTANT:** But before you do that change the default users and passwords in the `.env` file!
|
||||||
|
|
||||||
|
```
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
To stop it execute:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker-compose stop
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The default name of the database, user and password for MongoDB can be changed in the `.env` file in the current directory.
|
57
docker/compose/subfolderWithSSL/docker-compose.yml
Normal file
57
docker/compose/subfolderWithSSL/docker-compose.yml
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
version: "3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
traefik:
|
||||||
|
image: "traefik"
|
||||||
|
command:
|
||||||
|
- "--api=true"
|
||||||
|
- "--api.insecure=true"
|
||||||
|
- "--api.dashboard=true"
|
||||||
|
- "--providers.docker=true"
|
||||||
|
- "--providers.docker.exposedbydefault=false"
|
||||||
|
- "--entrypoints.websecure.address=:443"
|
||||||
|
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true"
|
||||||
|
- "--certificatesresolvers.mytlschallenge.acme.email=${SSL_EMAIL}"
|
||||||
|
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json"
|
||||||
|
- /home/jan/www/n8n/n8n:/data
|
||||||
|
ports:
|
||||||
|
- "443:443"
|
||||||
|
- "80:80"
|
||||||
|
volumes:
|
||||||
|
- ${DATA_FOLDER}/letsencrypt:/letsencrypt
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
|
n8n:
|
||||||
|
image: n8nio/n8n
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:5678:5678"
|
||||||
|
labels:
|
||||||
|
- traefik.enable=true
|
||||||
|
- traefik.http.routers.n8n.rule=Host(`${DOMAIN_NAME}`)
|
||||||
|
- traefik.http.routers.n8n.tls=true
|
||||||
|
- traefik.http.routers.n8n.entrypoints=websecure
|
||||||
|
- "traefik.http.routers.n8n.rule=PathPrefix(`/${SUBFOLDER}{regex:$$|/.*}`)"
|
||||||
|
- "traefik.http.middlewares.n8n-stripprefix.stripprefix.prefixes=/${SUBFOLDER}"
|
||||||
|
- "traefik.http.routers.n8n.middlewares=n8n-stripprefix"
|
||||||
|
- traefik.http.routers.n8n.tls.certresolver=mytlschallenge
|
||||||
|
- traefik.http.middlewares.n8n.headers.SSLRedirect=true
|
||||||
|
- traefik.http.middlewares.n8n.headers.STSSeconds=315360000
|
||||||
|
- traefik.http.middlewares.n8n.headers.browserXSSFilter=true
|
||||||
|
- traefik.http.middlewares.n8n.headers.contentTypeNosniff=true
|
||||||
|
- traefik.http.middlewares.n8n.headers.forceSTSHeader=true
|
||||||
|
- traefik.http.middlewares.n8n.headers.SSLHost=${DOMAIN_NAME}
|
||||||
|
- traefik.http.middlewares.n8n.headers.STSIncludeSubdomains=true
|
||||||
|
- traefik.http.middlewares.n8n.headers.STSPreload=true
|
||||||
|
environment:
|
||||||
|
- N8N_BASIC_AUTH_ACTIVE=true
|
||||||
|
- N8N_BASIC_AUTH_USER
|
||||||
|
- N8N_BASIC_AUTH_PASSWORD
|
||||||
|
- N8N_HOST=${DOMAIN_NAME}
|
||||||
|
- N8N_PORT=5678
|
||||||
|
- N8N_PROTOCOL=https
|
||||||
|
- NODE_ENV=production
|
||||||
|
- N8N_PATH
|
||||||
|
- WEBHOOK_TUNNEL_URL=http://${DOMAIN_NAME}${N8N_PATH}
|
||||||
|
- VUE_APP_URL_BASE_API=http://${DOMAIN_NAME}${N8N_PATH}
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
- ${DATA_FOLDER}/.n8n:/root/.n8n
|
49
docker/images/n8n-custom/Dockerfile copy
Normal file
49
docker/images/n8n-custom/Dockerfile copy
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
FROM node:12.16-alpine as builder
|
||||||
|
# FROM node:12.16-alpine
|
||||||
|
|
||||||
|
# Update everything and install needed dependencies
|
||||||
|
RUN apk add --update graphicsmagick tzdata git tini su-exec
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
|
# Install all needed dependencies
|
||||||
|
RUN apk --update add --virtual build-dependencies python build-base ca-certificates && \
|
||||||
|
npm_config_user=root npm install -g full-icu lerna
|
||||||
|
|
||||||
|
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
|
||||||
|
|
||||||
|
WORKDIR /data
|
||||||
|
|
||||||
|
COPY lerna.json .
|
||||||
|
COPY package.json .
|
||||||
|
COPY packages/cli/ ./packages/cli/
|
||||||
|
COPY packages/core/ ./packages/core/
|
||||||
|
COPY packages/editor-ui/ ./packages/editor-ui/
|
||||||
|
COPY packages/nodes-base/ ./packages/nodes-base/
|
||||||
|
COPY packages/workflow/ ./packages/workflow/
|
||||||
|
RUN rm -rf node_modules packages/*/node_modules packages/*/dist
|
||||||
|
|
||||||
|
RUN npm install --loglevel notice
|
||||||
|
RUN lerna bootstrap --hoist
|
||||||
|
RUN npm run build
|
||||||
|
|
||||||
|
|
||||||
|
FROM node:12.16-alpine
|
||||||
|
|
||||||
|
WORKDIR /data
|
||||||
|
|
||||||
|
# Install all needed dependencies
|
||||||
|
RUN npm_config_user=root npm install -g full-icu
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
|
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
|
||||||
|
|
||||||
|
COPY --from=builder /data ./
|
||||||
|
|
||||||
|
RUN apk add --update graphicsmagick tzdata git tini su-exec
|
||||||
|
|
||||||
|
COPY docker/images/n8n-dev/docker-entrypoint.sh /docker-entrypoint.sh
|
||||||
|
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||||
|
|
||||||
|
EXPOSE 5678/tcp
|
23
docker/images/n8n-rhel7/Dockerfile
Normal file
23
docker/images/n8n-rhel7/Dockerfile
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
FROM richxsl/rhel7
|
||||||
|
|
||||||
|
ARG N8N_VERSION
|
||||||
|
|
||||||
|
RUN if [ -z "$N8N_VERSION" ] ; then echo "The N8N_VERSION argument is missing!" ; exit 1; fi
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
yum install -y gcc-c++ make
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
curl -sL https://rpm.nodesource.com/setup_12.x | sudo -E bash -
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
sudo yum install nodejs
|
||||||
|
|
||||||
|
# Set a custom user to not have n8n run as root
|
||||||
|
USER root
|
||||||
|
|
||||||
|
RUN npm_config_user=root npm install -g n8n@${N8N_VERSION}
|
||||||
|
|
||||||
|
WORKDIR /data
|
||||||
|
|
||||||
|
CMD "n8n"
|
16
docker/images/n8n-rhel7/README.md
Normal file
16
docker/images/n8n-rhel7/README.md
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
## Build Docker-Image
|
||||||
|
|
||||||
|
```
|
||||||
|
docker build --build-arg N8N_VERSION=<VERSION> -t n8nio/n8n:<VERSION> .
|
||||||
|
|
||||||
|
# For example:
|
||||||
|
docker build --build-arg N8N_VERSION=0.36.1 -t n8nio/n8n:0.36.1-rhel7 .
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run -it --rm \
|
||||||
|
--name n8n \
|
||||||
|
-p 5678:5678 \
|
||||||
|
n8nio/n8n:0.25.0-ubuntu
|
||||||
|
```
|
20
docker/images/n8n-rpi/Dockerfile
Normal file
20
docker/images/n8n-rpi/Dockerfile
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
FROM arm32v7/node:12.16
|
||||||
|
|
||||||
|
ARG N8N_VERSION
|
||||||
|
|
||||||
|
RUN if [ -z "$N8N_VERSION" ] ; then echo "The N8N_VERSION argument is missing!" ; exit 1; fi
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get -y install graphicsmagick gosu
|
||||||
|
|
||||||
|
RUN npm_config_user=root npm install -g full-icu n8n@${N8N_VERSION}
|
||||||
|
|
||||||
|
ENV NODE_ICU_DATA /usr/local/lib/node_modules/full-icu
|
||||||
|
ENV NODE_ENV production
|
||||||
|
|
||||||
|
WORKDIR /data
|
||||||
|
|
||||||
|
USER node
|
||||||
|
|
||||||
|
CMD n8n
|
21
docker/images/n8n-rpi/README.md
Normal file
21
docker/images/n8n-rpi/README.md
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
## n8n - Raspberry PI Docker Image
|
||||||
|
|
||||||
|
Dockerfile to build n8n for Raspberry PI.
|
||||||
|
|
||||||
|
For information about how to run n8n with Docker check the generic
|
||||||
|
[Docker-Readme](https://github.com/n8n-io/n8n/tree/master/docker/images/n8n/README.md)
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
docker build --build-arg N8N_VERSION=<VERSION> -t n8nio/n8n:<VERSION> .
|
||||||
|
|
||||||
|
# For example:
|
||||||
|
docker build --build-arg N8N_VERSION=0.43.0 -t n8nio/n8n:0.43.0-rpi .
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
docker run -it --rm \
|
||||||
|
--name n8n \
|
||||||
|
-p 5678:5678 \
|
||||||
|
n8nio/n8n:0.70.0-rpi
|
||||||
|
```
|
|
@ -1,30 +1,28 @@
|
||||||
# n8n - Workflow Automation
|
# n8n - Workflow Automation
|
||||||
|
|
||||||
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-logo.png)
|
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
|
||||||
|
|
||||||
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
|
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
|
||||||
|
|
||||||
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
|
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
|
||||||
|
|
||||||
|
|
||||||
## Contents
|
## Contents
|
||||||
|
|
||||||
- [Demo](#demo)
|
- [Demo](#demo)
|
||||||
- [Available integrations](#available-integrations)
|
- [Available integrations](#available-integrations)
|
||||||
- [Documentation](#documentation)
|
- [Documentation](#documentation)
|
||||||
- [Start n8n in Docker](#start-n8n-in-docker)
|
- [Start n8n in Docker](#start-n8n-in-docker)
|
||||||
- [Start with tunnel](#start-with-tunnel)
|
- [Start with tunnel](#start-with-tunnel)
|
||||||
- [Securing n8n](#securing-n8n)
|
- [Securing n8n](#securing-n8n)
|
||||||
- [Persist data](#persist-data)
|
- [Persist data](#persist-data)
|
||||||
- [Passing Sensitive Data via File](#passing-sensitive-data-via-file)
|
- [Passing Sensitive Data via File](#passing-sensitive-data-via-file)
|
||||||
- [Updating a Running docker-compose Instance](#updating-a-running-docker-compose-instance)
|
- [Updating a Running docker-compose Instance](#updating-a-running-docker-compose-instance)
|
||||||
- [Example Setup with Lets Encrypt](#example-setup-with-lets-encrypt)
|
- [Example Setup with Lets Encrypt](#example-setup-with-lets-encrypt)
|
||||||
- [What does n8n mean and how do you pronounce it](#what-does-n8n-mean-and-how-do-you-pronounce-it)
|
- [What does n8n mean and how do you pronounce it](#what-does-n8n-mean-and-how-do-you-pronounce-it)
|
||||||
- [Support](#support)
|
- [Support](#support)
|
||||||
- [Jobs](#jobs)
|
- [Jobs](#jobs)
|
||||||
- [Upgrading](#upgrading)
|
- [Upgrading](#upgrading)
|
||||||
- [License](#license)
|
- [License](#license)
|
||||||
|
|
||||||
|
|
||||||
## Demo
|
## Demo
|
||||||
|
|
||||||
|
@ -49,9 +47,9 @@ Additional information and example workflows on the n8n.io website: [https://n8n
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
--name n8n \
|
--name n8n \
|
||||||
-p 5678:5678 \
|
-p 5678:5678 \
|
||||||
n8nio/n8n
|
n8nio/n8n
|
||||||
```
|
```
|
||||||
|
|
||||||
You can then access n8n by opening:
|
You can then access n8n by opening:
|
||||||
|
@ -71,14 +69,13 @@ To use it simply start n8n with `--tunnel`
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
--name n8n \
|
--name n8n \
|
||||||
-p 5678:5678 \
|
-p 5678:5678 \
|
||||||
-v ~/.n8n:/root/.n8n \
|
-v ~/.n8n:/root/.n8n \
|
||||||
n8nio/n8n \
|
n8nio/n8n \
|
||||||
n8n start --tunnel
|
n8n start --tunnel
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Securing n8n
|
## Securing n8n
|
||||||
|
|
||||||
By default n8n can be accessed by everybody. This is OK if you have it only running
|
By default n8n can be accessed by everybody. This is OK if you have it only running
|
||||||
|
@ -93,7 +90,6 @@ N8N_BASIC_AUTH_USER=<USER>
|
||||||
N8N_BASIC_AUTH_PASSWORD=<PASSWORD>
|
N8N_BASIC_AUTH_PASSWORD=<PASSWORD>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Persist data
|
## Persist data
|
||||||
|
|
||||||
The workflow data gets by default saved in an SQLite database in the user
|
The workflow data gets by default saved in an SQLite database in the user
|
||||||
|
@ -102,10 +98,10 @@ settings like webhook URL and encryption key.
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
--name n8n \
|
--name n8n \
|
||||||
-p 5678:5678 \
|
-p 5678:5678 \
|
||||||
-v ~/.n8n:/root/.n8n \
|
-v ~/.n8n:/root/.n8n \
|
||||||
n8nio/n8n
|
n8nio/n8n
|
||||||
```
|
```
|
||||||
|
|
||||||
### Start with other Database
|
### Start with other Database
|
||||||
|
@ -121,7 +117,6 @@ for the credentials. If none gets found n8n creates automatically one on
|
||||||
startup. In case credentials are already saved with a different encryption key
|
startup. In case credentials are already saved with a different encryption key
|
||||||
it can not be used anymore as encrypting it is not possible anymore.
|
it can not be used anymore as encrypting it is not possible anymore.
|
||||||
|
|
||||||
|
|
||||||
#### Use with MongoDB
|
#### Use with MongoDB
|
||||||
|
|
||||||
> **WARNING**: Use Postgres if possible! Mongo has problems with saving large
|
> **WARNING**: Use Postgres if possible! Mongo has problems with saving large
|
||||||
|
@ -129,40 +124,39 @@ it can not be used anymore as encrypting it is not possible anymore.
|
||||||
> may be dropped in the future.
|
> may be dropped in the future.
|
||||||
|
|
||||||
Replace the following placeholders with the actual data:
|
Replace the following placeholders with the actual data:
|
||||||
- <MONGO_DATABASE>
|
- MONGO_DATABASE
|
||||||
- <MONGO_HOST>
|
- MONGO_HOST
|
||||||
- <MONGO_PORT>
|
- MONGO_PORT
|
||||||
- <MONGO_USER>
|
- MONGO_USER
|
||||||
- <MONGO_PASSWORD>
|
- MONGO_PASSWORD
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
--name n8n \
|
--name n8n \
|
||||||
-p 5678:5678 \
|
-p 5678:5678 \
|
||||||
-e DB_TYPE=mongodb \
|
-e DB_TYPE=mongodb \
|
||||||
-e DB_MONGODB_CONNECTION_URL="mongodb://<MONGO_USER>:<MONGO_PASSWORD>@<MONGO_SERVER>:<MONGO_PORT>/<MONGO_DATABASE>" \
|
-e DB_MONGODB_CONNECTION_URL="mongodb://<MONGO_USER>:<MONGO_PASSWORD>@<MONGO_SERVER>:<MONGO_PORT>/<MONGO_DATABASE>" \
|
||||||
-v ~/.n8n:/root/.n8n \
|
-v ~/.n8n:/root/.n8n \
|
||||||
n8nio/n8n \
|
n8nio/n8n \
|
||||||
n8n start
|
n8n start
|
||||||
```
|
```
|
||||||
|
|
||||||
A full working setup with docker-compose can be found [here](https://github.com/n8n-io/n8n/blob/master/docker/compose/withMongo/README.md)
|
A full working setup with docker-compose can be found [here](https://github.com/n8n-io/n8n/blob/master/docker/compose/withMongo/README.md)
|
||||||
|
|
||||||
|
|
||||||
#### Use with PostgresDB
|
#### Use with PostgresDB
|
||||||
|
|
||||||
Replace the following placeholders with the actual data:
|
Replace the following placeholders with the actual data:
|
||||||
- <POSTGRES_DATABASE>
|
- POSTGRES_DATABASE
|
||||||
- <POSTGRES_HOST>
|
- POSTGRES_HOST
|
||||||
- <POSTGRES_PASSWORD>
|
- POSTGRES_PASSWORD
|
||||||
- <POSTGRES_PORT>
|
- POSTGRES_PORT
|
||||||
- <POSTGRES_USER>
|
- POSTGRES_USER
|
||||||
- <POSTGRES_SCHEMA>
|
- POSTGRES_SCHEMA
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
--name n8n \
|
--name n8n \
|
||||||
-p 5678:5678 \
|
-p 5678:5678 \
|
||||||
-e DB_TYPE=postgresdb \
|
-e DB_TYPE=postgresdb \
|
||||||
-e DB_POSTGRESDB_DATABASE=<POSTGRES_DATABASE> \
|
-e DB_POSTGRESDB_DATABASE=<POSTGRES_DATABASE> \
|
||||||
-e DB_POSTGRESDB_HOST=<POSTGRES_HOST> \
|
-e DB_POSTGRESDB_HOST=<POSTGRES_HOST> \
|
||||||
|
@ -170,39 +164,37 @@ docker run -it --rm \
|
||||||
-e DB_POSTGRESDB_USER=<POSTGRES_USER> \
|
-e DB_POSTGRESDB_USER=<POSTGRES_USER> \
|
||||||
-e DB_POSTGRESDB_SCHEMA=<POSTGRES_SCHEMA> \
|
-e DB_POSTGRESDB_SCHEMA=<POSTGRES_SCHEMA> \
|
||||||
-e DB_POSTGRESDB_PASSWORD=<POSTGRES_PASSWORD> \
|
-e DB_POSTGRESDB_PASSWORD=<POSTGRES_PASSWORD> \
|
||||||
-v ~/.n8n:/root/.n8n \
|
-v ~/.n8n:/root/.n8n \
|
||||||
n8nio/n8n \
|
n8nio/n8n \
|
||||||
n8n start
|
n8n start
|
||||||
```
|
```
|
||||||
|
|
||||||
A full working setup with docker-compose can be found [here](https://github.com/n8n-io/n8n/blob/master/docker/compose/withPostgres/README.md)
|
A full working setup with docker-compose can be found [here](https://github.com/n8n-io/n8n/blob/master/docker/compose/withPostgres/README.md)
|
||||||
|
|
||||||
|
|
||||||
#### Use with MySQL
|
#### Use with MySQL
|
||||||
|
|
||||||
Replace the following placeholders with the actual data:
|
Replace the following placeholders with the actual data:
|
||||||
- <MYSQLDB_DATABASE>
|
- MYSQLDB_DATABASE
|
||||||
- <MYSQLDB_HOST>
|
- MYSQLDB_HOST
|
||||||
- <MYSQLDB_PASSWORD>
|
- MYSQLDB_PASSWORD
|
||||||
- <MYSQLDB_PORT>
|
- MYSQLDB_PORT
|
||||||
- <MYSQLDB_USER>
|
- MYSQLDB_USER
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
--name n8n \
|
--name n8n \
|
||||||
-p 5678:5678 \
|
-p 5678:5678 \
|
||||||
-e DB_TYPE=mysqldb \
|
-e DB_TYPE=mysqldb \
|
||||||
-e DB_MYSQLDB_DATABASE=<MYSQLDB_DATABASE> \
|
-e DB_MYSQLDB_DATABASE=<MYSQLDB_DATABASE> \
|
||||||
-e DB_MYSQLDB_HOST=<MYSQLDB_HOST> \
|
-e DB_MYSQLDB_HOST=<MYSQLDB_HOST> \
|
||||||
-e DB_MYSQLDB_PORT=<MYSQLDB_PORT> \
|
-e DB_MYSQLDB_PORT=<MYSQLDB_PORT> \
|
||||||
-e DB_MYSQLDB_USER=<MYSQLDB_USER> \
|
-e DB_MYSQLDB_USER=<MYSQLDB_USER> \
|
||||||
-e DB_MYSQLDB_PASSWORD=<MYSQLDB_PASSWORD> \
|
-e DB_MYSQLDB_PASSWORD=<MYSQLDB_PASSWORD> \
|
||||||
-v ~/.n8n:/root/.n8n \
|
-v ~/.n8n:/root/.n8n \
|
||||||
n8nio/n8n \
|
n8nio/n8n \
|
||||||
n8n start
|
n8n start
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Passing Sensitive Data via File
|
## Passing Sensitive Data via File
|
||||||
|
|
||||||
To avoid passing sensitive information via environment variables "_FILE" may be
|
To avoid passing sensitive information via environment variables "_FILE" may be
|
||||||
|
@ -211,16 +203,15 @@ with the given name. That makes it possible to load data easily from
|
||||||
Docker- and Kubernetes-Secrets.
|
Docker- and Kubernetes-Secrets.
|
||||||
|
|
||||||
The following environment variables support file input:
|
The following environment variables support file input:
|
||||||
- DB_MONGODB_CONNECTION_URL_FILE
|
- DB_MONGODB_CONNECTION_URL_FILE
|
||||||
- DB_POSTGRESDB_DATABASE_FILE
|
- DB_POSTGRESDB_DATABASE_FILE
|
||||||
- DB_POSTGRESDB_HOST_FILE
|
- DB_POSTGRESDB_HOST_FILE
|
||||||
- DB_POSTGRESDB_PASSWORD_FILE
|
- DB_POSTGRESDB_PASSWORD_FILE
|
||||||
- DB_POSTGRESDB_PORT_FILE
|
- DB_POSTGRESDB_PORT_FILE
|
||||||
- DB_POSTGRESDB_USER_FILE
|
- DB_POSTGRESDB_USER_FILE
|
||||||
- DB_POSTGRESDB_SCHEMA_FILE
|
- DB_POSTGRESDB_SCHEMA_FILE
|
||||||
- N8N_BASIC_AUTH_PASSWORD_FILE
|
- N8N_BASIC_AUTH_PASSWORD_FILE
|
||||||
- N8N_BASIC_AUTH_USER_FILE
|
- N8N_BASIC_AUTH_USER_FILE
|
||||||
|
|
||||||
|
|
||||||
## Example Setup with Lets Encrypt
|
## Example Setup with Lets Encrypt
|
||||||
|
|
||||||
|
@ -235,7 +226,7 @@ docker pull n8nio/n8n
|
||||||
# Stop current setup
|
# Stop current setup
|
||||||
sudo docker-compose stop
|
sudo docker-compose stop
|
||||||
# Delete it (will only delete the docker-containers, data is stored separately)
|
# Delete it (will only delete the docker-containers, data is stored separately)
|
||||||
sudo docker-compose rm
|
sudo docker-compose rm
|
||||||
# Then start it again
|
# Then start it again
|
||||||
sudo docker-compose up -d
|
sudo docker-compose up -d
|
||||||
```
|
```
|
||||||
|
@ -251,11 +242,11 @@ the environment variable `TZ`.
|
||||||
Example to use the same timezone for both:
|
Example to use the same timezone for both:
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
--name n8n \
|
--name n8n \
|
||||||
-p 5678:5678 \
|
-p 5678:5678 \
|
||||||
-e GENERIC_TIMEZONE="Europe/Berlin" \
|
-e GENERIC_TIMEZONE="Europe/Berlin" \
|
||||||
-e TZ="Europe/Berlin" \
|
-e TZ="Europe/Berlin" \
|
||||||
n8nio/n8n
|
n8nio/n8n
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
@ -300,7 +291,7 @@ If you have problems or questions go to our forum, we will then try to help you
|
||||||
If you are interested in working for n8n and so shape the future of the project
|
If you are interested in working for n8n and so shape the future of the project
|
||||||
check out our job posts:
|
check out our job posts:
|
||||||
|
|
||||||
[https://jobs.n8n.io](https://jobs.n8n.io)
|
[https://n8n.join.com](https://n8n.join.com)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
docs-old.n8n.io
|
|
|
@ -1,10 +0,0 @@
|
||||||
# n8n Documentation
|
|
||||||
|
|
||||||
This is the documentation of n8n, a free and open [fair-code](http://faircode.io) licensed node-based Workflow Automation Tool.
|
|
||||||
|
|
||||||
It covers everything from setup to usage and development. It is still a work in progress and all contributions are welcome.
|
|
||||||
|
|
||||||
|
|
||||||
## What is n8n?
|
|
||||||
|
|
||||||
n8n (pronounced nodemation) helps you to interconnect every app with an API in the world with each other to share and manipulate its data without a single line of code. It is an easy to use, user-friendly and highly customizable service, which uses an intuitive user interface for you to design your unique workflows very fast. Hosted on your server and not based in the cloud, it keeps your sensible data very secure in your own trusted database.
|
|
|
@ -1,43 +0,0 @@
|
||||||
- Home
|
|
||||||
|
|
||||||
- [Welcome](/)
|
|
||||||
|
|
||||||
- Getting started
|
|
||||||
|
|
||||||
- [Key Components](key-components.md)
|
|
||||||
- [Quick Start](quick-start.md)
|
|
||||||
- [Setup](setup.md)
|
|
||||||
- [Tutorials](tutorials.md)
|
|
||||||
- [Docker](docker.md)
|
|
||||||
|
|
||||||
- Advanced
|
|
||||||
|
|
||||||
- [Configuration](configuration.md)
|
|
||||||
- [Data Structure](data-structure.md)
|
|
||||||
- [Database](database.md)
|
|
||||||
- [Keyboard Shortcuts](keyboard-shortcuts.md)
|
|
||||||
- [Node Basics](node-basics.md)
|
|
||||||
- [Nodes](nodes.md)
|
|
||||||
- [Security](security.md)
|
|
||||||
- [Sensitive Data](sensitive-data.md)
|
|
||||||
- [Server Setup](server-setup.md)
|
|
||||||
- [Start Workflows via CLI](start-workflows-via-cli.md)
|
|
||||||
- [Workflow](workflow.md)
|
|
||||||
|
|
||||||
- Development
|
|
||||||
|
|
||||||
- [Create Node](create-node.md)
|
|
||||||
- [Development](development.md)
|
|
||||||
|
|
||||||
|
|
||||||
- Other
|
|
||||||
|
|
||||||
- [FAQ](faq.md)
|
|
||||||
- [License](license.md)
|
|
||||||
- [Troubleshooting](troubleshooting.md)
|
|
||||||
|
|
||||||
|
|
||||||
- Links
|
|
||||||
|
|
||||||
- [![Jobs](https://n8n.io/favicon.ico ':size=16')Jobs](https://jobs.n8n.io)
|
|
||||||
- [![Website](https://n8n.io/favicon.ico ':size=16')n8n.io](https://n8n.io)
|
|
|
@ -1,244 +0,0 @@
|
||||||
|
|
||||||
|
|
||||||
# Configuration
|
|
||||||
|
|
||||||
It is possible to change some of the n8n defaults via special environment variables.
|
|
||||||
The ones that currently exist are:
|
|
||||||
|
|
||||||
|
|
||||||
## Publish
|
|
||||||
|
|
||||||
Sets how n8n should be made available.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# The port n8n should be made available on
|
|
||||||
N8N_PORT=5678
|
|
||||||
|
|
||||||
# The IP address n8n should listen on
|
|
||||||
N8N_LISTEN_ADDRESS=0.0.0.0
|
|
||||||
|
|
||||||
# This ones are currently only important for the webhook URL creation.
|
|
||||||
# So if "WEBHOOK_TUNNEL_URL" got set they do get ignored. It is however
|
|
||||||
# encouraged to set them correctly anyway in case they will become
|
|
||||||
# important in the future.
|
|
||||||
N8N_PROTOCOL=https
|
|
||||||
N8N_HOST=n8n.example.com
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Base URL
|
|
||||||
|
|
||||||
Tells the frontend how to reach the REST API of the backend.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export VUE_APP_URL_BASE_API="https://n8n.example.com/"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Execution Data Manual Runs
|
|
||||||
|
|
||||||
n8n creates a random encryption key automatically on the first launch and saves
|
|
||||||
it in the `~/.n8n` folder. That key is used to encrypt the credentials before
|
|
||||||
they get saved to the database. It is also possible to overwrite that key and
|
|
||||||
set it via an environment variable.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export N8N_ENCRYPTION_KEY="<SOME RANDOM STRING>"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Execution Data Manual Runs
|
|
||||||
|
|
||||||
Normally executions which got started via the Editor UI will not be saved as
|
|
||||||
they are normally only for testing and debugging. That default can be changed
|
|
||||||
with this environment variable.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS=true
|
|
||||||
```
|
|
||||||
|
|
||||||
This setting can also be overwritten on a per workflow basis in the workflow
|
|
||||||
settings in the Editor UI.
|
|
||||||
|
|
||||||
|
|
||||||
## Execution Data Error/Success
|
|
||||||
|
|
||||||
When a workflow gets executed, it will save the result in the database. That's
|
|
||||||
the case for executions that succeeded and for the ones that failed. The
|
|
||||||
default behavior can be changed like this:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export EXECUTIONS_DATA_SAVE_ON_ERROR=none
|
|
||||||
export EXECUTIONS_DATA_SAVE_ON_SUCCESS=none
|
|
||||||
```
|
|
||||||
|
|
||||||
Possible values are:
|
|
||||||
- **all**: Saves all data
|
|
||||||
- **none**: Does not save anything (recommended if a workflow runs very often and/or processes a lot of data, set up "Error Workflow" instead)
|
|
||||||
|
|
||||||
These settings can also be overwritten on a per workflow basis in the workflow
|
|
||||||
settings in the Editor UI.
|
|
||||||
|
|
||||||
|
|
||||||
## Execute In Same Process
|
|
||||||
|
|
||||||
All workflows get executed in their own separate process. This ensures that all CPU cores
|
|
||||||
get used and that they do not block each other on CPU intensive tasks. Additionally, this makes sure that
|
|
||||||
the crash of one execution does not take down the whole application. The disadvantage is, however,
|
|
||||||
that it slows down the start-time considerably and uses much more memory. So in case the
|
|
||||||
workflows are not CPU intensive and they have to start very fast, it is possible to run them
|
|
||||||
all directly in the main-process with this setting.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export EXECUTIONS_PROCESS=main
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Exclude Nodes
|
|
||||||
|
|
||||||
It is possible to not allow users to use nodes of a specific node type. For example, if you
|
|
||||||
do not want that people can write data to the disk with the "n8n-nodes-base.writeBinaryFile"
|
|
||||||
node and that they cannot execute commands with the "n8n-nodes-base.executeCommand" node, you can
|
|
||||||
set the following:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export NODES_EXCLUDE="[\"n8n-nodes-base.executeCommand\",\"n8n-nodes-base.writeBinaryFile\"]"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Custom Nodes Location
|
|
||||||
|
|
||||||
Every user can add custom nodes that get loaded by n8n on startup. The default
|
|
||||||
location is in the subfolder `.n8n/custom` of the user who started n8n.
|
|
||||||
Additional folders can be defined with an environment variable.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export N8N_CUSTOM_EXTENSIONS="/home/jim/n8n/custom-nodes;/data/n8n/nodes"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Use built-in and external modules in Function-Nodes
|
|
||||||
|
|
||||||
For security reasons, importing modules is restricted by default in the Function-Nodes.
|
|
||||||
It is, however, possible to lift that restriction for built-in and external modules by
|
|
||||||
setting the following environment variables:
|
|
||||||
- `NODE_FUNCTION_ALLOW_BUILTIN`: For builtin modules
|
|
||||||
- `NODE_FUNCTION_ALLOW_EXTERNAL`: For external modules sourced from n8n/node_modules directory. External module support is disabled when env variable is not set.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Allows usage of all builtin modules
|
|
||||||
export NODE_FUNCTION_ALLOW_BUILTIN=*
|
|
||||||
|
|
||||||
# Allows usage of only crypto
|
|
||||||
export NODE_FUNCTION_ALLOW_BUILTIN=crypto
|
|
||||||
|
|
||||||
# Allows usage of only crypto and fs
|
|
||||||
export NODE_FUNCTION_ALLOW_BUILTIN=crypto,fs
|
|
||||||
|
|
||||||
# Allow usage of external npm modules. Wildcard matching is not supported.
|
|
||||||
export NODE_FUNCTION_ALLOW_EXTERNAL=moment,lodash
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## SSL
|
|
||||||
|
|
||||||
It is possible to start n8n with SSL enabled by supplying a certificate to use:
|
|
||||||
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export N8N_PROTOCOL=https
|
|
||||||
export N8N_SSL_KEY=/data/certs/server.key
|
|
||||||
export N8N_SSL_CERT=/data/certs/server.pem
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Timezone
|
|
||||||
|
|
||||||
The timezone is set by default to "America/New_York". For instance, it is used by the
|
|
||||||
Cron node to know at what time the workflow should be started. To set a different
|
|
||||||
default timezone simply set `GENERIC_TIMEZONE` to the appropriate value. For example,
|
|
||||||
if you want to set the timezone to Berlin (Germany):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export GENERIC_TIMEZONE="Europe/Berlin"
|
|
||||||
```
|
|
||||||
|
|
||||||
You can find the name of your timezone here:
|
|
||||||
[https://momentjs.com/timezone/](https://momentjs.com/timezone/)
|
|
||||||
|
|
||||||
|
|
||||||
## User Folder
|
|
||||||
|
|
||||||
User-specific data like the encryption key, SQLite database file, and
|
|
||||||
the ID of the tunnel (if used) gets saved by default in the subfolder
|
|
||||||
`.n8n` of the user who started n8n. It is possible to overwrite the
|
|
||||||
user-folder via an environment variable.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export N8N_USER_FOLDER="/home/jim/n8n"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Webhook URL
|
|
||||||
|
|
||||||
The webhook URL will normally be created automatically by combining
|
|
||||||
`N8N_PROTOCOL`, `N8N_HOST` and `N8N_PORT`. However, if n8n runs behind a
|
|
||||||
reverse proxy that would not work. That's because n8n runs internally
|
|
||||||
on port 5678 but is exposed to the web via the reverse proxy on port 443. In
|
|
||||||
that case, it is important to set the webhook URL manually so that it can be
|
|
||||||
displayed correctly in the Editor UI and even more important is that the correct
|
|
||||||
webhook URLs get registred with the external services.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export WEBHOOK_TUNNEL_URL="https://n8n.example.com/"
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Configuration via file
|
|
||||||
|
|
||||||
It is also possible to configure n8n using a configuration file.
|
|
||||||
|
|
||||||
It is not necessary to define all values but only the ones that should be
|
|
||||||
different from the defaults.
|
|
||||||
|
|
||||||
If needed multiple files can also be supplied to. For example, have generic
|
|
||||||
base settings and some specific ones depending on the environment.
|
|
||||||
|
|
||||||
The path to the JSON configuration file to use can be set using the environment
|
|
||||||
variable `N8N_CONFIG_FILES`.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Single file
|
|
||||||
export N8N_CONFIG_FILES=/folder/my-config.json
|
|
||||||
|
|
||||||
# Multiple files can be comma-separated
|
|
||||||
export N8N_CONFIG_FILES=/folder/my-config.json,/folder/production.json
|
|
||||||
```
|
|
||||||
|
|
||||||
A possible configuration file could look like this:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"executions": {
|
|
||||||
"process": "main",
|
|
||||||
"saveDataOnSuccess": "none"
|
|
||||||
},
|
|
||||||
"generic": {
|
|
||||||
"timezone": "Europe/Berlin"
|
|
||||||
},
|
|
||||||
"security": {
|
|
||||||
"basicAuth": {
|
|
||||||
"active": true,
|
|
||||||
"user": "frank",
|
|
||||||
"password": "some-secure-password"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nodes": {
|
|
||||||
"exclude": "[\"n8n-nodes-base.executeCommand\",\"n8n-nodes-base.writeBinaryFile\"]"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
All possible values which can be set and their defaults can be found here:
|
|
||||||
|
|
||||||
[https://github.com/n8n-io/n8n/blob/master/packages/cli/config/index.ts](https://github.com/n8n-io/n8n/blob/master/packages/cli/config/index.ts)
|
|
|
@ -1,145 +0,0 @@
|
||||||
# Create Node
|
|
||||||
|
|
||||||
It is quite easy to create your own nodes in n8n. Mainly three things have to be defined:
|
|
||||||
|
|
||||||
1. Generic information like name, description, image/icon
|
|
||||||
1. The parameters to display via which the user can interact with it
|
|
||||||
1. The code to run once the node gets executed
|
|
||||||
|
|
||||||
To simplify the development process, we created a very basic CLI which creates boilerplate code to get started, builds the node (as they are written in TypeScript), and copies it to the correct location.
|
|
||||||
|
|
||||||
|
|
||||||
## Create the first basic node
|
|
||||||
|
|
||||||
1. Install the n8n-node-dev CLI: `npm install -g n8n-node-dev`
|
|
||||||
1. Create and go into the newly created folder in which you want to keep the code of the node
|
|
||||||
1. Use CLI to create boilerplate node code: `n8n-node-dev new`
|
|
||||||
1. Answer the questions (the “Execute” node type is the regular node type that you probably want to create).
|
|
||||||
It will then create the node in the current folder.
|
|
||||||
1. Program… Add the functionality to the node
|
|
||||||
1. Build the node and copy to correct location: `n8n-node-dev build`
|
|
||||||
That command will build the JavaScript version of the node from the TypeScript code and copy it to the user folder where custom nodes get read from `~/.n8n/custom/`
|
|
||||||
1. Restart n8n and refresh the window so that the new node gets displayed
|
|
||||||
|
|
||||||
|
|
||||||
## Create own custom n8n-nodes-module
|
|
||||||
|
|
||||||
If you want to create multiple custom nodes which are either:
|
|
||||||
|
|
||||||
- Only for yourself/your company
|
|
||||||
- Are only useful for a small number of people
|
|
||||||
- Require many or large dependencies
|
|
||||||
|
|
||||||
It is best to create your own `n8n-nodes-module` which can be installed separately.
|
|
||||||
That is a simple npm package that contains the nodes and is set up in a way
|
|
||||||
that n8n can automatically find and load them on startup.
|
|
||||||
|
|
||||||
When creating such a module the following rules have to be followed that n8n
|
|
||||||
can automatically find the nodes in the module:
|
|
||||||
|
|
||||||
- The name of the module has to start with `n8n-nodes-`
|
|
||||||
- The `package.json` file has to contain a key `n8n` with the paths to nodes and credentials
|
|
||||||
- The module has to be installed alongside n8n
|
|
||||||
|
|
||||||
An example starter module which contains one node and credentials and implements
|
|
||||||
the above can be found here:
|
|
||||||
|
|
||||||
[https://github.com/n8n-io/n8n-nodes-starter](https://github.com/n8n-io/n8n-nodes-starter)
|
|
||||||
|
|
||||||
|
|
||||||
### Setup to use n8n-nodes-module
|
|
||||||
|
|
||||||
To use a custom `n8n-nodes-module`, it simply has to be installed alongside n8n.
|
|
||||||
For example like this:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create folder for n8n installation
|
|
||||||
mkdir my-n8n
|
|
||||||
cd my-n8n
|
|
||||||
|
|
||||||
# Install n8n
|
|
||||||
npm install n8n
|
|
||||||
|
|
||||||
# Install custom nodes module
|
|
||||||
npm install n8n-nodes-my-custom-nodes
|
|
||||||
|
|
||||||
# Start n8n
|
|
||||||
n8n
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### Development/Testing of custom n8n-nodes-module
|
|
||||||
|
|
||||||
This works in the same way as for any other npm module.
|
|
||||||
|
|
||||||
Execute in the folder which contains the code of the custom `n8n-nodes-module`
|
|
||||||
which should be loaded with n8n:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Build the code
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
# "Publish" the package locally
|
|
||||||
npm link
|
|
||||||
```
|
|
||||||
|
|
||||||
Then in the folder in which n8n is installed:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# "Install" the above locally published module
|
|
||||||
npm link n8n-nodes-my-custom-nodes
|
|
||||||
|
|
||||||
# Start n8n
|
|
||||||
n8n
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Node Development Guidelines
|
|
||||||
|
|
||||||
|
|
||||||
Please make sure that everything works correctly and that no unnecessary code gets added. It is important to follow the following guidelines:
|
|
||||||
|
|
||||||
|
|
||||||
### Do not change incoming data
|
|
||||||
|
|
||||||
Never change the incoming data a node receives (which can be queried with `this.getInputData()`) as it gets shared by all nodes. If data has to get added, changed or deleted it has to be cloned and the new data returned. If that is not done, sibling nodes which execute after the current one will operate on the altered data and would process different data than they were supposed to.
|
|
||||||
It is however not needed to always clone all the data. If a node for, example only, changes only the binary data but not the JSON data, a new item can be created which reuses the reference to the JSON item.
|
|
||||||
|
|
||||||
An example can be seen in the code of the [ReadBinaryFile-Node](https://github.com/n8n-io/n8n/blob/master/packages/nodes-base/nodes/ReadBinaryFile.node.ts#L69-L83).
|
|
||||||
|
|
||||||
|
|
||||||
### Write nodes in TypeScript
|
|
||||||
|
|
||||||
All code of n8n is written in TypeScript and hence, the nodes should also be written in TypeScript. That makes development easier, faster, and avoids at least some bugs.
|
|
||||||
|
|
||||||
|
|
||||||
### Use the built in request library
|
|
||||||
|
|
||||||
Some third-party services have their own libraries on npm which make it easier to create an integration. It can be quite tempting to use them. The problem with those is that you add another dependency and not just one you add but also all the dependencies of the dependencies. This means more and more code gets added, has to get loaded, can introduce security vulnerabilities, bugs and so on. So please use the built-in module which can be used like this:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const response = await this.helpers.request(options);
|
|
||||||
```
|
|
||||||
|
|
||||||
That is simply using the npm package [`request-promise-native`](https://github.com/request/request-promise-native) which is the basic npm `request` module but with promises. For a full set of `options` consider looking at [the underlying `request` options documentation](https://github.com/request/request#requestoptions-callback).
|
|
||||||
|
|
||||||
|
|
||||||
### Reuse parameter names
|
|
||||||
|
|
||||||
When a node can perform multiple operations like edit and delete some kind of entity, for both operations, it would need an entity-id. Do not call them "editId" and "deleteId" simply call them "id". n8n can handle multiple parameters with the same name without a problem as long as only one is visible. To make sure that is the case, the "displayOptions" can be used. By keeping the same name, the value can be kept if a user switches the operation from "edit" to "delete".
|
|
||||||
|
|
||||||
|
|
||||||
### Create an "Options" parameter
|
|
||||||
|
|
||||||
Some nodes may need a lot of options. Add only the very important ones to the top level and for all others, create an "Options" parameter where they can be added if needed. This ensures that the interface stays clean and does not unnecessarily confuse people. A good example of that would be the XML node.
|
|
||||||
|
|
||||||
|
|
||||||
### Follow existing parameter naming guideline
|
|
||||||
|
|
||||||
There is not much of a guideline yet but if your node can do multiple things, call the parameter which sets the behavior either "mode" (like "Merge" and "XML" node) or "operation" like the most other ones. If these operations can be done on different resources (like "User" or "Order) create a "resource" parameter (like "Pipedrive" and "Trello" node)
|
|
||||||
|
|
||||||
|
|
||||||
### Node Icons
|
|
||||||
|
|
||||||
Check existing node icons as a reference when you create own ones. The resolution of an icon should be 60x60px and saved as PNG.
|
|
|
@ -1,39 +0,0 @@
|
||||||
# Data Structure
|
|
||||||
|
|
||||||
For "basic usage" it is not necessarily needed to understand how the data that
|
|
||||||
gets passed from one node to another is structured. However, it becomes important if you want to:
|
|
||||||
|
|
||||||
- create your own node
|
|
||||||
- write custom expressions
|
|
||||||
- use the Function or Function Item node
|
|
||||||
- you want to get the most out of n8n
|
|
||||||
|
|
||||||
|
|
||||||
In n8n, all the data that is passed between nodes is an array of objects. It has the following structure:
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
// Each item has to contain a "json" property. But it can be an empty object like {}.
|
|
||||||
// Any kind of JSON data is allowed. So arrays and the data being deeply nested is fine.
|
|
||||||
json: { // The actual data n8n operates on (required)
|
|
||||||
// This data is only an example it could be any kind of JSON data
|
|
||||||
jsonKeyName: 'keyValue',
|
|
||||||
anotherJsonKey: {
|
|
||||||
lowerLevelJsonKey: 1
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// Binary data of item. The most items in n8n do not contain any (optional)
|
|
||||||
binary: {
|
|
||||||
// The key-name "binaryKeyName" is only an example. Any kind of key-name is possible.
|
|
||||||
binaryKeyName: {
|
|
||||||
data: '....', // Base64 encoded binary data (required)
|
|
||||||
mimeType: 'image/png', // Optional but should be set if possible (optional)
|
|
||||||
fileExtension: 'png', // Optional but should be set if possible (optional)
|
|
||||||
fileName: 'example.png', // Optional but should be set if possible (optional)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
...
|
|
||||||
]
|
|
||||||
```
|
|
109
docs/database.md
109
docs/database.md
|
@ -1,109 +0,0 @@
|
||||||
# Database
|
|
||||||
|
|
||||||
By default, n8n uses SQLite to save credentials, past executions, and workflows. However,
|
|
||||||
n8n also supports MongoDB and PostgresDB.
|
|
||||||
|
|
||||||
|
|
||||||
## Shared Settings
|
|
||||||
|
|
||||||
The following environment variables get used by all databases:
|
|
||||||
|
|
||||||
- `DB_TABLE_PREFIX` (default: '') - Prefix for table names
|
|
||||||
|
|
||||||
|
|
||||||
## MongoDB
|
|
||||||
|
|
||||||
!> **WARNING**: Use PostgresDB, if possible! MongoDB has problems saving large
|
|
||||||
amounts of data in a document, among other issues. So, support
|
|
||||||
may be dropped in the future.
|
|
||||||
|
|
||||||
To use MongoDB as the database, you can provide the following environment variables like
|
|
||||||
in the example below:
|
|
||||||
- `DB_TYPE=mongodb`
|
|
||||||
- `DB_MONGODB_CONNECTION_URL=<CONNECTION_URL>`
|
|
||||||
|
|
||||||
Replace the following placeholders with the actual data:
|
|
||||||
- MONGO_DATABASE
|
|
||||||
- MONGO_HOST
|
|
||||||
- MONGO_PORT
|
|
||||||
- MONGO_USER
|
|
||||||
- MONGO_PASSWORD
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export DB_TYPE=mongodb
|
|
||||||
export DB_MONGODB_CONNECTION_URL=mongodb://MONGO_USER:MONGO_PASSWORD@MONGO_HOST:MONGO_PORT/MONGO_DATABASE
|
|
||||||
n8n start
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## PostgresDB
|
|
||||||
|
|
||||||
To use PostgresDB as the database, you can provide the following environment variables
|
|
||||||
- `DB_TYPE=postgresdb`
|
|
||||||
- `DB_POSTGRESDB_DATABASE` (default: 'n8n')
|
|
||||||
- `DB_POSTGRESDB_HOST` (default: 'localhost')
|
|
||||||
- `DB_POSTGRESDB_PORT` (default: 5432)
|
|
||||||
- `DB_POSTGRESDB_USER` (default: 'root')
|
|
||||||
- `DB_POSTGRESDB_PASSWORD` (default: empty)
|
|
||||||
- `DB_POSTGRESDB_SCHEMA` (default: 'public')
|
|
||||||
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export DB_TYPE=postgresdb
|
|
||||||
export DB_POSTGRESDB_DATABASE=n8n
|
|
||||||
export DB_POSTGRESDB_HOST=postgresdb
|
|
||||||
export DB_POSTGRESDB_PORT=5432
|
|
||||||
export DB_POSTGRESDB_USER=n8n
|
|
||||||
export DB_POSTGRESDB_PASSWORD=n8n
|
|
||||||
export DB_POSTGRESDB_SCHEMA=n8n
|
|
||||||
|
|
||||||
n8n start
|
|
||||||
```
|
|
||||||
|
|
||||||
## MySQL / MariaDB
|
|
||||||
|
|
||||||
The compatibility with MySQL/MariaDB has been tested. Even then, it is advisable to observe the operation of the application with this database as this option has been recently added. If you spot any problems, feel free to submit a burg report or a pull request.
|
|
||||||
|
|
||||||
To use MySQL as database you can provide the following environment variables:
|
|
||||||
- `DB_TYPE=mysqldb` or `DB_TYPE=mariadb`
|
|
||||||
- `DB_MYSQLDB_DATABASE` (default: 'n8n')
|
|
||||||
- `DB_MYSQLDB_HOST` (default: 'localhost')
|
|
||||||
- `DB_MYSQLDB_PORT` (default: 3306)
|
|
||||||
- `DB_MYSQLDB_USER` (default: 'root')
|
|
||||||
- `DB_MYSQLDB_PASSWORD` (default: empty)
|
|
||||||
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export DB_TYPE=mysqldb
|
|
||||||
export DB_MYSQLDB_DATABASE=n8n
|
|
||||||
export DB_MYSQLDB_HOST=mysqldb
|
|
||||||
export DB_MYSQLDB_PORT=3306
|
|
||||||
export DB_MYSQLDB_USER=n8n
|
|
||||||
export DB_MYSQLDB_PASSWORD=n8n
|
|
||||||
|
|
||||||
n8n start
|
|
||||||
```
|
|
||||||
|
|
||||||
## SQLite
|
|
||||||
|
|
||||||
This is the default database that gets used if nothing is defined.
|
|
||||||
|
|
||||||
The database file is located at:
|
|
||||||
`~/.n8n/database.sqlite`
|
|
||||||
|
|
||||||
|
|
||||||
## Other Databases
|
|
||||||
|
|
||||||
Currently, only the databases mentioned above are supported. n8n internally uses
|
|
||||||
[TypeORM](https://typeorm.io), so adding support for the following databases
|
|
||||||
should not be too much work:
|
|
||||||
|
|
||||||
- CockroachDB
|
|
||||||
- Microsoft SQL
|
|
||||||
- Oracle
|
|
||||||
|
|
||||||
If you cannot use any of the currently supported databases for some reason and
|
|
||||||
you can code, we'd appreciate your support in the form of a pull request. If not, you can request
|
|
||||||
for support here:
|
|
||||||
|
|
||||||
[https://community.n8n.io/c/feature-requests/cli](https://community.n8n.io/c/feature-requests/cli)
|
|
|
@ -1,3 +0,0 @@
|
||||||
# Development
|
|
||||||
|
|
||||||
Have you found a bug :bug:? Or maybe you have a nice feature :sparkles: to contribute? The [CONTRIBUTING guide](https://github.com/n8n-io/n8n/blob/master/CONTRIBUTING.md) will help you get your development environment ready in minutes.
|
|
|
@ -1,7 +0,0 @@
|
||||||
# Docker
|
|
||||||
|
|
||||||
Detailed information about how to run n8n in Docker can be found in the README
|
|
||||||
of the [Docker Image](https://github.com/n8n-io/n8n/blob/master/docker/images/n8n/README.md).
|
|
||||||
|
|
||||||
A basic step by step example setup of n8n with docker-compose and Let's Encrypt is available on the
|
|
||||||
[Server Setup](server-setup.md) page.
|
|
47
docs/faq.md
47
docs/faq.md
|
@ -1,47 +0,0 @@
|
||||||
# FAQ
|
|
||||||
|
|
||||||
## Integrations
|
|
||||||
|
|
||||||
|
|
||||||
### Can you create an integration for service X?
|
|
||||||
|
|
||||||
You can request new integrations to be added to our forum. There is a special section for that where
|
|
||||||
other users can also upvote it so that we know which integrations are important and should be
|
|
||||||
created next. Request a new feature [here](https://community.n8n.io/c/feature-requests/nodes).
|
|
||||||
|
|
||||||
|
|
||||||
### An integration exists already but a feature is missing. Can you add it?
|
|
||||||
|
|
||||||
Adding new functionality to an existing integration is normally not that complicated. So the chance is
|
|
||||||
high that we can do that quite fast. Post your feature request in the forum and we'll see
|
|
||||||
what we can do. Request a new feature [here](https://community.n8n.io/c/feature-requests/nodes).
|
|
||||||
|
|
||||||
|
|
||||||
### How can I create an integration myself?
|
|
||||||
|
|
||||||
Information about that can be found in the [CONTRIBUTING guide](https://github.com/n8n-io/n8n/blob/master/CONTRIBUTING.md).
|
|
||||||
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
|
|
||||||
### Which license does n8n use?
|
|
||||||
|
|
||||||
n8n is [fair-code](http://faircode.io) licensed under [Apache 2.0 with Commons Clause](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md)
|
|
||||||
|
|
||||||
|
|
||||||
### Is n8n open-source?
|
|
||||||
|
|
||||||
No. The [Commons Clause](https://commonsclause.com) that is attached to the Apache 2.0 license takes away some rights. Hence, according to the definition of the [Open Source Initiative (OSI)](https://opensource.org/osd), n8n is not open-source. Nonetheless, the source code is open and everyone (individuals and companies) can use it for free. However, it is not allowed to make money directly with n8n.
|
|
||||||
|
|
||||||
For instance, one cannot charge others to host or support n8n. However, to make things simpler, we grant everyone (individuals and companies) the right to offer consulting or support without prior permission as long as it is less than 30,000 USD ($30k) per annum.
|
|
||||||
If your revenue from services based on n8n is greater than $30k per annum, we'd invite you to become a partner and apply for a license. If you have any questions about this, feel free to reach out to us at [license@n8n.io](mailto:license@n8n.io).
|
|
||||||
|
|
||||||
|
|
||||||
### Why is n8n not open-source but [fair-code](http://faircode.io) licensed instead?
|
|
||||||
|
|
||||||
We love open-source and the idea that everybody can freely use and extend what we wrote. Our community is at the heart of everything that we do and we understand that people who contribute to a project are the main drivers that push a project forward. So to make sure that the project continues to evolve and stay alive in the longer run, we decided to attach the Commons Clause. This ensures that no other person or company can make money directly with n8n. Especially if it competes with how we plan to finance our further development. For the greater majority of the people, it will not make any difference at all. At the same time, it protects the project.
|
|
||||||
|
|
||||||
As n8n itself depends on and uses a lot of other open-source projects, it is only fair that we support them back. That is why we have planned to contribute a certain percentage of revenue/profit every month to these projects.
|
|
||||||
|
|
||||||
We have already started with the first monthly contributions via [Open Collective](https://opencollective.com/n8n). It is not much yet, but we hope to be able to ramp that up substantially over time.
|
|
|
@ -1,53 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<title>n8n Documentation</title>
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" />
|
|
||||||
<meta name="description" content="Documentation of n8n - Open Source Workflow Automation Tool">
|
|
||||||
<meta name="viewport"
|
|
||||||
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
|
|
||||||
<link rel="stylesheet" href="//unpkg.com/docsify/lib/themes/vue.css">
|
|
||||||
<style>
|
|
||||||
.app-name-link img {
|
|
||||||
width: 200px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.sidebar-nav a img {
|
|
||||||
position: relative;
|
|
||||||
top: 3px;
|
|
||||||
margin-right: 0.5em;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div id="app"></div>
|
|
||||||
<script src="//unpkg.com/docsify-edit-on-github/index.js"></script>
|
|
||||||
<script>
|
|
||||||
window.$docsify = {
|
|
||||||
auto2top: true,
|
|
||||||
ga: 'UA-146470481-3',
|
|
||||||
themeColor: '#ff0000',
|
|
||||||
name: 'docs.n8n.io',
|
|
||||||
logo: 'images/n8n-logo.png',
|
|
||||||
loadSidebar: true,
|
|
||||||
subMaxLevel: 2,
|
|
||||||
repo: '',
|
|
||||||
plugins: [
|
|
||||||
EditOnGithubPlugin.create('https://github.com/n8n-io/n8n/tree/master/docs/')
|
|
||||||
]
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<script src="//unpkg.com/docsify/lib/docsify.min.js"></script>
|
|
||||||
<script src="https://unpkg.com/docsify-copy-code@2"></script>
|
|
||||||
<script src="//unpkg.com/prismjs/components/prism-bash.min.js"></script>
|
|
||||||
<script src="//unpkg.com/prismjs/components/prism-yaml.min.js"></script>
|
|
||||||
<script src="//unpkg.com/prismjs/components/prism-json.min.js"></script>
|
|
||||||
<script src="//unpkg.com/prismjs/components/prism-typescript.min.js"></script>
|
|
||||||
<script src="//unpkg.com/docsify/lib/plugins/ga.min.js"></script>
|
|
||||||
<script src="//unpkg.com/docsify/lib/plugins/search.min.js"></script>
|
|
||||||
</body>
|
|
||||||
|
|
||||||
</html>
|
|
|
@ -1,25 +0,0 @@
|
||||||
# Key Components
|
|
||||||
|
|
||||||
|
|
||||||
## Connection
|
|
||||||
|
|
||||||
A connection establishes a link between nodes to route data through the workflow. Each node can have one or multiple connections.
|
|
||||||
|
|
||||||
|
|
||||||
## Node
|
|
||||||
|
|
||||||
A node is an entry point for retrieving data, a function to process data or an exit for sending data. The data process includes filtering, recomposing and changing data. There can be one or several nodes for your API, service or app. You can easily connect multiple nodes, which allows you to create simple and complex workflows with them intuitively.
|
|
||||||
|
|
||||||
For example, consider a Google Sheets node. It can be used to retrieve or write data to a Google Sheet.
|
|
||||||
|
|
||||||
|
|
||||||
## Trigger Node
|
|
||||||
|
|
||||||
A trigger node is a node that starts a workflow and supplies the initial data. What triggers it, depends on the node. It could be the time, a webhook call or an event from an external service.
|
|
||||||
|
|
||||||
For example, consider a Trello trigger node. When a Trello Board gets updated, it will trigger a workflow to start using the data from the updated board.
|
|
||||||
|
|
||||||
|
|
||||||
## Workflow
|
|
||||||
|
|
||||||
A workflow is a canvas on which you can place and connect nodes. A workflow can be started manually or by trigger nodes. A workflow run ends when all active and connected nodes have processed their data.
|
|
|
@ -1,28 +0,0 @@
|
||||||
# Keyboard Shortcuts
|
|
||||||
|
|
||||||
The following keyboard shortcuts can currently be used:
|
|
||||||
|
|
||||||
## General
|
|
||||||
|
|
||||||
- **Ctrl + Left Mouse Button**: Move/Pan Node View
|
|
||||||
- **Ctrl + a**: Select all nodes
|
|
||||||
- **Ctrl + Alt + n**: Create new workflow
|
|
||||||
- **Ctrl + o**: Open workflow
|
|
||||||
- **Ctrl + s**: Save the current workflow
|
|
||||||
- **Ctrl + v**: Paste nodes
|
|
||||||
- **Tab**: Open "Node Creator". Type to filter and navigate with arrow keys. To create press "enter"
|
|
||||||
|
|
||||||
|
|
||||||
## With node(s) selected
|
|
||||||
|
|
||||||
- **ArrowDown**: Select sibling node bellow the current one
|
|
||||||
- **ArrowLeft**: Select node left of the current one
|
|
||||||
- **ArrowRight**: Select node right of the current one
|
|
||||||
- **ArrowUp**: Select sibling node above the current one
|
|
||||||
- **Ctrl + c**: Copy nodes
|
|
||||||
- **Ctrl + x**: Cut nodes
|
|
||||||
- **d**: Deactivate nodes
|
|
||||||
- **Delete**: Delete nodes
|
|
||||||
- **F2**: Rename node
|
|
||||||
- **Shift + ArrowLeft**: Select all nodes left of the current one
|
|
||||||
- **Shift + ArrowRight**: Select all nodes right of the current one
|
|
|
@ -1,5 +0,0 @@
|
||||||
# License
|
|
||||||
|
|
||||||
n8n is [fair-code](http://faircode.io) licensed under [Apache 2.0 with Commons Clause](https://github.com/n8n-io/n8n/blob/master/packages/cli/LICENSE.md)
|
|
||||||
|
|
||||||
Additional information about the license can be found in the [FAQ](faq.md?id=license) and [fair-code](http://faircode.io).
|
|
|
@ -1,76 +0,0 @@
|
||||||
# Node Basics
|
|
||||||
|
|
||||||
|
|
||||||
## Types
|
|
||||||
|
|
||||||
There are two main node types in n8n: Trigger nodes and Regular nodes.
|
|
||||||
|
|
||||||
|
|
||||||
### Trigger Nodes
|
|
||||||
|
|
||||||
The Trigger nodes start a workflow and supply the initial data. A workflow can contain multiple trigger nodes but with each execution, only one of them will execute. This is because the other trigger nodes would not have any input as they are the nodes from which the execution of the workflow starts.
|
|
||||||
|
|
||||||
|
|
||||||
### Regular Nodes
|
|
||||||
|
|
||||||
These nodes do the actual work. They can add, remove, and edit the data in the flow as well as request and send data to external APIs. They can do everything possible with Node.js in general.
|
|
||||||
|
|
||||||
|
|
||||||
## Credentials
|
|
||||||
|
|
||||||
External services need a way to identify and authenticate users. This data can range from an API key over an email/password combination to a very long multi-line private key and can be saved in n8n as credentials.
|
|
||||||
|
|
||||||
Nodes in n8n can then request that credential information. As an additional layer of security credentials can only be accessed by node types which specifically have the right to do so.
|
|
||||||
|
|
||||||
To make sure that the data is secure, it gets saved to the database encrypted. A random personal encryption key is used which gets automatically generated on the first run of n8n and then saved under `~/.n8n/config`.
|
|
||||||
|
|
||||||
|
|
||||||
## Expressions
|
|
||||||
|
|
||||||
With the help of expressions, it is possible to set node parameters dynamically by referencing other data. That can be data from the flow, nodes, the environment or self-generated data. Expressions are normal text with placeholders (everything between {{...}}) that can execute JavaScript code, which offers access to special variables to access data.
|
|
||||||
|
|
||||||
An expression could look like this:
|
|
||||||
|
|
||||||
My name is: `{{$node["Webhook"].json["query"]["name"]}}`
|
|
||||||
|
|
||||||
This one would return "My name is: " and then attach the value that the node with the name "Webhook" outputs and there select the property "query" and its key "name". So if the node would output this data:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"query": {
|
|
||||||
"name": "Jim"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
the value would be: "My name is: Jim"
|
|
||||||
|
|
||||||
The following special variables are available:
|
|
||||||
|
|
||||||
- **$binary**: Incoming binary data of a node
|
|
||||||
- **$evaluateExpression**: Evaluates a string as expression
|
|
||||||
- **$env**: Environment variables
|
|
||||||
- **$items**: Environment variables
|
|
||||||
- **$json**: Incoming JSON data of a node
|
|
||||||
- **$node**: Data of other nodes (binary, context, json, parameter, runIndex)
|
|
||||||
- **$parameters**: Parameters of the current node
|
|
||||||
- **$runIndex**: The current run index (first time node gets executed it is 0, second time 1, ...)
|
|
||||||
- **$workflow**: Returns workflow metadata like: active, id, name
|
|
||||||
|
|
||||||
Normally it is not needed to write the JavaScript variables manually as they can be selected with the help of the Expression Editor.
|
|
||||||
|
|
||||||
|
|
||||||
## Parameters
|
|
||||||
|
|
||||||
Parameters can be set for most nodes in n8n. The values that get set define what exactly a node does.
|
|
||||||
|
|
||||||
Parameter values are static by default and are always the same no matter what kind of data the node processes. However, it is possible to set the values dynamically with the help of an Expression. Using Expressions, it is possible to make the parameter value dependent on other factors like the data of flow or parameters of other nodes.
|
|
||||||
|
|
||||||
More information about it can be found under [Expressions](#expressions).
|
|
||||||
|
|
||||||
|
|
||||||
## Pausing Node
|
|
||||||
|
|
||||||
Sometimes when creating and debugging a workflow, it is helpful to not execute specific nodes. To do that without disconnecting each node, you can pause them. When a node gets paused, the data passes through the node without being changed.
|
|
||||||
|
|
||||||
There are two ways to pause a node. You can either press the pause button which gets displayed above the node when hovering over it or select the node and press “d”.
|
|
247
docs/nodes.md
247
docs/nodes.md
|
@ -1,247 +0,0 @@
|
||||||
# Nodes
|
|
||||||
|
|
||||||
## Function and Function Item Nodes
|
|
||||||
|
|
||||||
These are the most powerful nodes in n8n. With these, almost everything can be done if you know how to
|
|
||||||
write JavaScript code. Both nodes work very similarly. They give you access to the incoming data
|
|
||||||
and you can manipulate it.
|
|
||||||
|
|
||||||
|
|
||||||
### Difference between both nodes
|
|
||||||
|
|
||||||
The difference is that the code of the Function node gets executed only once. It receives the
|
|
||||||
full items (JSON and binary data) as an array and expects an array of items as a return value. The items
|
|
||||||
returned can be totally different from the incoming ones. So it is not only possible to remove and edit
|
|
||||||
existing items, but also to add or return totally new ones.
|
|
||||||
|
|
||||||
The code of the Function Item node on the other hand gets executed once for every item. It receives
|
|
||||||
one item at a time as input and also just the JSON data. As a return value, it expects the JSON data
|
|
||||||
of one single item. That makes it possible to add, remove and edit JSON properties of items
|
|
||||||
but it is not possible to add new or remove existing items. Accessing and changing binary data is only
|
|
||||||
possible via the methods `getBinaryData` and `setBinaryData`.
|
|
||||||
|
|
||||||
Both nodes support promises. So instead of returning the item or items directly, it is also possible to
|
|
||||||
return a promise which resolves accordingly.
|
|
||||||
|
|
||||||
|
|
||||||
### Function-Node
|
|
||||||
|
|
||||||
#### Variable: items
|
|
||||||
|
|
||||||
It contains all the items that the node received as input.
|
|
||||||
|
|
||||||
Information about how the data is structured can be found on the page [Data Structure](data-structure.md).
|
|
||||||
|
|
||||||
The data can be accessed and manipulated like this:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Sets the JSON data property "myFileName" of the first item to the name of the
|
|
||||||
// file which is set in the binary property "image" of the same item.
|
|
||||||
items[0].json.myFileName = items[0].binary.image.fileName;
|
|
||||||
|
|
||||||
return items;
|
|
||||||
```
|
|
||||||
|
|
||||||
This example creates 10 dummy items with the ids 0 to 9:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const newItems = [];
|
|
||||||
|
|
||||||
for (let i=0;i<10;i++) {
|
|
||||||
newItems.push({
|
|
||||||
json: {
|
|
||||||
id: i
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return newItems;
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Method: $item(index: number, runIndex?: number)
|
|
||||||
|
|
||||||
With `$item` it is possible to access the data of parent nodes. That can be the item data but also
|
|
||||||
the parameters. It expects as input an index of the item the data should be returned for. This is
|
|
||||||
needed because for each item the data returned can be different. This is probably obvious for the
|
|
||||||
item data itself but maybe less for data like parameters. The reason why it is also needed, is
|
|
||||||
that they may contain an expression. Expressions get always executed of the context for an item.
|
|
||||||
If that would not be the case, for example, the Email Send node not would be able to send multiple
|
|
||||||
emails at once to different people. Instead, the same person would receive multiple emails.
|
|
||||||
|
|
||||||
The index is 0 based. So `$item(0)` will return the first item, `$item(1)` the second one, ...
|
|
||||||
|
|
||||||
By default the item of the last run of the node will be returned. So if the referenced node ran
|
|
||||||
3x (its last runIndex is 2) and the current node runs the first time (its runIndex is 0) the
|
|
||||||
data of runIndex 2 of the referenced node will be returned.
|
|
||||||
|
|
||||||
For more information about what data can be accessed via $node, check [here](#variable-node).
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Returns the value of the JSON data property "myNumber" of Node "Set" (first item)
|
|
||||||
const myNumber = $item(0).$node["Set"].json["myNumber"];
|
|
||||||
// Like above but data of the 6th item
|
|
||||||
const myNumber = $item(5).$node["Set"].json["myNumber"];
|
|
||||||
|
|
||||||
// Returns the value of the parameter "channel" of Node "Slack".
|
|
||||||
// If it contains an expression the value will be resolved with the
|
|
||||||
// data of the first item.
|
|
||||||
const channel = $item(0).$node["Slack"].parameter["channel"];
|
|
||||||
// Like above but resolved with the value of the 10th item.
|
|
||||||
const channel = $item(9).$node["Slack"].parameter["channel"];
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Method: $items(nodeName?: string, outputIndex?: number, runIndex?: number)
|
|
||||||
|
|
||||||
Gives access to all the items of current or parent nodes. If no parameters get supplied,
|
|
||||||
it returns all the items of the current node.
|
|
||||||
If a node-name is given, it returns the items the node output on its first output
|
|
||||||
(index: 0, most nodes only have one output, exceptions are IF and Switch-Node) on
|
|
||||||
its last run.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Returns all the items of the current node and current run
|
|
||||||
const allItems = $items();
|
|
||||||
|
|
||||||
// Returns all items the node "IF" outputs (index: 0 which is Output "true" of its most recent run)
|
|
||||||
const allItems = $items("IF");
|
|
||||||
|
|
||||||
// Returns all items the node "IF" outputs (index: 0 which is Output "true" of the same run as current node)
|
|
||||||
const allItems = $items("IF", 0, $runIndex);
|
|
||||||
|
|
||||||
// Returns all items the node "IF" outputs (index: 1 which is Output "false" of run 0 which is the first run)
|
|
||||||
const allItems = $items("IF", 1, 0);
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Variable: $node
|
|
||||||
|
|
||||||
Works exactly like `$item` with the difference that it will always return the data of the first item and
|
|
||||||
the last run of the node.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Returns the fileName of binary property "data" of Node "HTTP Request"
|
|
||||||
const fileName = $node["HTTP Request"].binary["data"]["fileName"]}}
|
|
||||||
|
|
||||||
// Returns the context data "noItemsLeft" of Node "SplitInBatches"
|
|
||||||
const noItemsLeft = $node["SplitInBatches"].context["noItemsLeft"];
|
|
||||||
|
|
||||||
// Returns the value of the JSON data property "myNumber" of Node "Set"
|
|
||||||
const myNumber = $node["Set"].json['myNumber'];
|
|
||||||
|
|
||||||
// Returns the value of the parameter "channel" of Node "Slack"
|
|
||||||
const channel = $node["Slack"].parameter["channel"];
|
|
||||||
|
|
||||||
// Returns the index of the last run of Node "HTTP Request"
|
|
||||||
const runIndex = $node["HTTP Request"].runIndex}}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Variable: $runIndex
|
|
||||||
|
|
||||||
Contains the index of the current run of the node.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Returns all items the node "IF" outputs (index: 0 which is Output "true" of the same run as current node)
|
|
||||||
const allItems = $items("IF", 0, $runIndex);
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Variable: $workflow
|
|
||||||
|
|
||||||
Gives information about the current workflow.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const isActive = $workflow.active;
|
|
||||||
const workflowId = $workflow.id;
|
|
||||||
const workflowName = $workflow.name;
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Method: $evaluateExpression(expression: string, itemIndex: number)
|
|
||||||
|
|
||||||
Evaluates a given string as expression.
|
|
||||||
If no `itemIndex` is provided it uses by default in the Function-Node the data of item 0 and
|
|
||||||
in the Function Item-Node the data of the current item.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
items[0].json.variable1 = $evaluateExpression('{{1+2}}');
|
|
||||||
items[0].json.variable2 = $evaluateExpression($node["Set"].json["myExpression"], 1);
|
|
||||||
|
|
||||||
return items;
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Method: getWorkflowStaticData(type)
|
|
||||||
|
|
||||||
Gives access to the static workflow data.
|
|
||||||
It is possible to save data directly with the workflow. This data should, however, be very small.
|
|
||||||
A common use case is to for example to save a timestamp of the last item that got processed from
|
|
||||||
an RSS-Feed or database. It will always return an object. Properties can then read, delete or
|
|
||||||
set on that object. When the workflow execution succeeds, n8n will check automatically if the data
|
|
||||||
has changed and will save it, if necessary.
|
|
||||||
|
|
||||||
There are two types of static data. The "global" and the "node" one. Global static data is the
|
|
||||||
same in the whole workflow. And every node in the workflow can access it. The node static data
|
|
||||||
, however, is different for every node and only the node which set it can retrieve it again.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// Get the global workflow static data
|
|
||||||
const staticData = getWorkflowStaticData('global');
|
|
||||||
// Get the static data of the node
|
|
||||||
const staticData = getWorkflowStaticData('node');
|
|
||||||
|
|
||||||
// Access its data
|
|
||||||
const lastExecution = staticData.lastExecution;
|
|
||||||
|
|
||||||
// Update its data
|
|
||||||
staticData.lastExecution = new Date().getTime();
|
|
||||||
|
|
||||||
// Delete data
|
|
||||||
delete staticData.lastExecution;
|
|
||||||
```
|
|
||||||
|
|
||||||
It is important to know that the static data can not be read and written when testing via the UI.
|
|
||||||
The data there will always be empty and the changes will not persist. Only when a workflow
|
|
||||||
is active and it gets called by a Trigger or Webhook, the static data will be saved.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Function Item-Node
|
|
||||||
|
|
||||||
|
|
||||||
#### Variable: item
|
|
||||||
|
|
||||||
It contains the "json" data of the currently processed item.
|
|
||||||
|
|
||||||
The data can be accessed and manipulated like this:
|
|
||||||
|
|
||||||
```json
|
|
||||||
// Uses the data of an already existing key to create a new additional one
|
|
||||||
item.newIncrementedCounter = item.existingCounter + 1;
|
|
||||||
return item;
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### Method: getBinaryData()
|
|
||||||
|
|
||||||
Returns all the binary data (all keys) of the item which gets currently processed.
|
|
||||||
|
|
||||||
|
|
||||||
#### Method: setBinaryData(binaryData)
|
|
||||||
|
|
||||||
Sets all the binary data (all keys) of the item which gets currently processed.
|
|
||||||
|
|
||||||
|
|
||||||
#### Method: getWorkflowStaticData(type)
|
|
||||||
|
|
||||||
As described above for Function node.
|
|
|
@ -1,43 +0,0 @@
|
||||||
# Quick Start
|
|
||||||
|
|
||||||
|
|
||||||
## Give n8n a spin
|
|
||||||
|
|
||||||
To spin up n8n, you can run:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npx n8n
|
|
||||||
```
|
|
||||||
|
|
||||||
It will download everything that is needed to start n8n.
|
|
||||||
|
|
||||||
You can then access n8n by opening:
|
|
||||||
[http://localhost:5678](http://localhost:5678)
|
|
||||||
|
|
||||||
|
|
||||||
## Start with docker
|
|
||||||
|
|
||||||
To play around with n8n, you can also start it using docker:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -it --rm \
|
|
||||||
--name n8n \
|
|
||||||
-p 5678:5678 \
|
|
||||||
n8nio/n8n
|
|
||||||
```
|
|
||||||
|
|
||||||
Be aware that all the data will be lost once the docker container gets removed. To
|
|
||||||
persist the data mount the `~/.n8n` folder:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -it --rm \
|
|
||||||
--name n8n \
|
|
||||||
-p 5678:5678 \
|
|
||||||
-v ~/.n8n:/root/.n8n \
|
|
||||||
n8nio/n8n
|
|
||||||
```
|
|
||||||
|
|
||||||
More information about the Docker setup can be found in the README file of the
|
|
||||||
[Docker Image](https://github.com/n8n-io/n8n/blob/master/docker/images/n8n/README.md).
|
|
||||||
|
|
||||||
In case you run into issues, check out the [troubleshooting](troubleshooting.md) page or ask for help in the community [forum](https://community.n8n.io/).
|
|
|
@ -1,13 +0,0 @@
|
||||||
# Security
|
|
||||||
|
|
||||||
By default, n8n can be accessed by everybody. This is okay if you only have it running
|
|
||||||
locally but if you deploy it on a server which is accessible from the web, you have
|
|
||||||
to make sure that n8n is protected.
|
|
||||||
Right now we have very basic protection in place using basic-auth. It can be activated
|
|
||||||
by setting the following environment variables:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export N8N_BASIC_AUTH_ACTIVE=true
|
|
||||||
export N8N_BASIC_AUTH_USER=<USER>
|
|
||||||
export N8N_BASIC_AUTH_PASSWORD=<PASSWORD>
|
|
||||||
```
|
|
|
@ -1,18 +0,0 @@
|
||||||
# Sensitive Data via File
|
|
||||||
|
|
||||||
To avoid passing sensitive information via environment variables, "_FILE" may be
|
|
||||||
appended to some environment variables. It will then load the data from a file
|
|
||||||
with the given name. That makes it possible to load data easily from
|
|
||||||
Docker and Kubernetes secrets.
|
|
||||||
|
|
||||||
The following environment variables support file input:
|
|
||||||
|
|
||||||
- DB_MONGODB_CONNECTION_URL_FILE
|
|
||||||
- DB_POSTGRESDB_DATABASE_FILE
|
|
||||||
- DB_POSTGRESDB_HOST_FILE
|
|
||||||
- DB_POSTGRESDB_PASSWORD_FILE
|
|
||||||
- DB_POSTGRESDB_PORT_FILE
|
|
||||||
- DB_POSTGRESDB_USER_FILE
|
|
||||||
- DB_POSTGRESDB_SCHEMA_FILE
|
|
||||||
- N8N_BASIC_AUTH_PASSWORD_FILE
|
|
||||||
- N8N_BASIC_AUTH_USER_FILE
|
|
|
@ -1,183 +0,0 @@
|
||||||
# Server Setup
|
|
||||||
|
|
||||||
!> ***Important***: Make sure that you secure your n8n instance as described under [Security](security.md).
|
|
||||||
|
|
||||||
|
|
||||||
## Example setup with docker-compose
|
|
||||||
|
|
||||||
If you have already installed docker and docker-compose, then you can directly start with step 4.
|
|
||||||
|
|
||||||
|
|
||||||
### 1. Install Docker
|
|
||||||
|
|
||||||
This can vary depending on the Linux distribution used. Example bellow is for Ubuntu:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install apt-transport-https ca-certificates curl software-properties-common
|
|
||||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
|
|
||||||
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable"
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install docker-ce -y
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Optional: If it should run as not root user
|
|
||||||
|
|
||||||
Run when logged in as the user that should also be allowed to run docker:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo usermod -aG docker ${USER}
|
|
||||||
su - ${USER}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Install Docker-compose
|
|
||||||
|
|
||||||
This can vary depending on the Linux distribution used. Example bellow is for Ubuntu:
|
|
||||||
|
|
||||||
Check before what version the latestand replace "1.24.1" with that version accordingly.
|
|
||||||
https://github.com/docker/compose/releases
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo curl -L https://github.com/docker/compose/releases/download/1.24.1/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
|
|
||||||
sudo chmod +x /usr/local/bin/docker-compose
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### 4. Setup DNS
|
|
||||||
|
|
||||||
Add A record to route the subdomain accordingly.
|
|
||||||
|
|
||||||
```
|
|
||||||
Type: A
|
|
||||||
Name: n8n (or whatever the subdomain should be)
|
|
||||||
IP address: <IP_OF_YOUR_SERVER>
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### 5. Create docker-compose file
|
|
||||||
|
|
||||||
Save this file as `docker-compose.yml`
|
|
||||||
|
|
||||||
Normally no changes should be needed.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
version: "3"
|
|
||||||
|
|
||||||
services:
|
|
||||||
traefik:
|
|
||||||
image: "traefik"
|
|
||||||
command:
|
|
||||||
- "--api=true"
|
|
||||||
- "--api.insecure=true"
|
|
||||||
- "--providers.docker=true"
|
|
||||||
- "--providers.docker.exposedbydefault=false"
|
|
||||||
- "--entrypoints.websecure.address=:443"
|
|
||||||
- "--certificatesresolvers.mytlschallenge.acme.tlschallenge=true"
|
|
||||||
- "--certificatesresolvers.mytlschallenge.acme.email=${SSL_EMAIL}"
|
|
||||||
- "--certificatesresolvers.mytlschallenge.acme.storage=/letsencrypt/acme.json"
|
|
||||||
ports:
|
|
||||||
- "443:443"
|
|
||||||
volumes:
|
|
||||||
- ${DATA_FOLDER}/letsencrypt:/letsencrypt
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
|
||||||
|
|
||||||
n8n:
|
|
||||||
image: n8nio/n8n
|
|
||||||
ports:
|
|
||||||
- "127.0.0.1:5678:5678"
|
|
||||||
labels:
|
|
||||||
- traefik.enable=true
|
|
||||||
- traefik.http.routers.n8n.rule=Host(`${SUBDOMAIN}.${DOMAIN_NAME}`)
|
|
||||||
- traefik.http.routers.n8n.tls=true
|
|
||||||
- traefik.http.routers.n8n.entrypoints=websecure
|
|
||||||
- traefik.http.routers.n8n.tls.certresolver=mytlschallenge
|
|
||||||
- traefik.http.middlewares.n8n.headers.SSLRedirect=true
|
|
||||||
- traefik.http.middlewares.n8n.headers.STSSeconds=315360000
|
|
||||||
- traefik.http.middlewares.n8n.headers.browserXSSFilter=true
|
|
||||||
- traefik.http.middlewares.n8n.headers.contentTypeNosniff=true
|
|
||||||
- traefik.http.middlewares.n8n.headers.forceSTSHeader=true
|
|
||||||
- traefik.http.middlewares.n8n.headers.SSLHost=${DOMAIN_NAME}
|
|
||||||
- traefik.http.middlewares.n8n.headers.STSIncludeSubdomains=true
|
|
||||||
- traefik.http.middlewares.n8n.headers.STSPreload=true
|
|
||||||
environment:
|
|
||||||
- N8N_BASIC_AUTH_ACTIVE=true
|
|
||||||
- N8N_BASIC_AUTH_USER
|
|
||||||
- N8N_BASIC_AUTH_PASSWORD
|
|
||||||
- N8N_HOST=${SUBDOMAIN}.${DOMAIN_NAME}
|
|
||||||
- N8N_PORT=5678
|
|
||||||
- N8N_LISTEN_ADDRESS=0.0.0.0
|
|
||||||
- N8N_PROTOCOL=https
|
|
||||||
- NODE_ENV=production
|
|
||||||
- WEBHOOK_TUNNEL_URL=https://${SUBDOMAIN}.${DOMAIN_NAME}/
|
|
||||||
- VUE_APP_URL_BASE_API=https://${SUBDOMAIN}.${DOMAIN_NAME}/
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
- ${DATA_FOLDER}/.n8n:/root/.n8n
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### 6. Create `.env` file
|
|
||||||
|
|
||||||
Create `.env` file and change it accordingly.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Folder where data should be saved
|
|
||||||
DATA_FOLDER=/root/n8n/
|
|
||||||
|
|
||||||
# The top level domain to serve from
|
|
||||||
DOMAIN_NAME=example.com
|
|
||||||
|
|
||||||
# The subdomain to serve from
|
|
||||||
SUBDOMAIN=n8n
|
|
||||||
|
|
||||||
# DOMAIN_NAME and SUBDOMAIN combined decide where n8n will be reachable from
|
|
||||||
# above example would result in: https://n8n.example.com
|
|
||||||
|
|
||||||
# The user name to use for autentication - IMPORTANT ALWAYS CHANGE!
|
|
||||||
N8N_BASIC_AUTH_USER=user
|
|
||||||
|
|
||||||
# The password to use for autentication - IMPORTANT ALWAYS CHANGE!
|
|
||||||
N8N_BASIC_AUTH_PASSWORD=password
|
|
||||||
|
|
||||||
# Optional timezone to set which gets used by Cron-Node by default
|
|
||||||
# If not set New York time will be used
|
|
||||||
GENERIC_TIMEZONE=Europe/Berlin
|
|
||||||
|
|
||||||
# The email address to use for the SSL certificate creation
|
|
||||||
SSL_EMAIL=user@example.com
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### 7. Create data folder
|
|
||||||
|
|
||||||
Create the folder which is defined as `DATA_FOLDER`. In the example
|
|
||||||
above, it is `/root/n8n/`.
|
|
||||||
|
|
||||||
In that folder, the database file from SQLite as well as the encryption key will be saved.
|
|
||||||
|
|
||||||
The folder can be created like this:
|
|
||||||
```
|
|
||||||
mkdir /root/n8n/
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### 8. Start docker-compose setup
|
|
||||||
|
|
||||||
n8n can now be started via:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo docker-compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
In case it should ever be stopped that can be done with this command:
|
|
||||||
```bash
|
|
||||||
sudo docker-compose stop
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
### 9. Done
|
|
||||||
|
|
||||||
n8n will now be reachable via the above defined subdomain + domain combination.
|
|
||||||
The above example would result in: https://n8n.example.com
|
|
||||||
|
|
||||||
n8n will only be reachable via https and not via http.
|
|
|
@ -1,35 +0,0 @@
|
||||||
# Setup
|
|
||||||
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
To install n8n globally:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install n8n -g
|
|
||||||
```
|
|
||||||
|
|
||||||
## Start
|
|
||||||
|
|
||||||
After the installation n8n can be started by simply typing in:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
n8n
|
|
||||||
# or
|
|
||||||
n8n start
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Start with tunnel
|
|
||||||
|
|
||||||
!> **WARNING**: This is only meant for local development and testing. It should not be used in production!
|
|
||||||
|
|
||||||
To be able to use webhooks for trigger nodes of external services like GitHub, n8n has to be reachable from the web. To make that easy, n8n has a special tunnel service, which redirects requests from our servers to your local n8n instance (uses this code: [https://github.com/localtunnel/localtunnel](https://github.com/localtunnel/localtunnel)).
|
|
||||||
|
|
||||||
To use it, simply start n8n with `--tunnel`
|
|
||||||
|
|
||||||
```bash
|
|
||||||
n8n start --tunnel
|
|
||||||
```
|
|
||||||
|
|
||||||
In case you run into issues, check out the [troubleshooting](troubleshooting.md) page or ask for help in the community [forum](https://community.n8n.io/).
|
|
|
@ -1,15 +0,0 @@
|
||||||
# Start Workflows via CLI
|
|
||||||
|
|
||||||
Workflows cannot be only started by triggers, webhooks or manually via the
|
|
||||||
Editor. It is also possible to start them directly via the CLI.
|
|
||||||
|
|
||||||
Execute a saved workflow by its ID:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
n8n execute --id <ID>
|
|
||||||
```
|
|
||||||
|
|
||||||
Execute a workflow from a workflow file:
|
|
||||||
```bash
|
|
||||||
n8n execute --file <WORKFLOW_FILE>
|
|
||||||
```
|
|
|
@ -1,3 +0,0 @@
|
||||||
# This is a simple test
|
|
||||||
|
|
||||||
with some text
|
|
|
@ -1,58 +0,0 @@
|
||||||
# Troubleshooting
|
|
||||||
|
|
||||||
## Windows
|
|
||||||
|
|
||||||
If you are experiencing issues running n8n with the typical flow of:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
npx n8n
|
|
||||||
```
|
|
||||||
|
|
||||||
### Requirements
|
|
||||||
|
|
||||||
Please ensure that you have the following requirements fulfilled:
|
|
||||||
|
|
||||||
- Install latest version of [NodeJS](https://nodejs.org/en/download/)
|
|
||||||
- Install [Python 2.7](https://www.python.org/downloads/release/python-2717/) (It is okay to have multiple versions installed on the machine)
|
|
||||||
- Windows SDK
|
|
||||||
- C++ Desktop Development Tools
|
|
||||||
- Windows Build Tools
|
|
||||||
|
|
||||||
#### Install build tools
|
|
||||||
|
|
||||||
If you haven't satisfied the above, follow this procedure through your PowerShell (run with administrative privileges).
|
|
||||||
This command installs the build tools, windows SDK and the C++ development tools in one package.
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
npm install --global --production windows-build-tools
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Configure npm to use Python version 2.7
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
npm config set python python2.7
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Configure npm to use correct msvs version
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
npm config set msvs_version 2017 --global
|
|
||||||
```
|
|
||||||
|
|
||||||
### Lesser known issues:
|
|
||||||
|
|
||||||
#### mmmagic npm package when using MSbuild tools with Visual Studio
|
|
||||||
|
|
||||||
While installing this package, `node-gyp` is run and it might fail to install it with an error appearing in the ballpark of:
|
|
||||||
|
|
||||||
```
|
|
||||||
gyp ERR! stack Error: spawn C:\Program Files (x86)\Microsoft Visual Studio\2019\**Enterprise**\MSBuild\Current\Bin\MSBuild.exe ENOENT
|
|
||||||
```
|
|
||||||
|
|
||||||
It is seeking the `MSBuild.exe` in a directory that does not exist. If you are using Visual Studio Community or vice versa, you can change the path of MSBuild with command:
|
|
||||||
|
|
||||||
```powershell
|
|
||||||
npm config set msbuild_path "C:\Program Files (x86)\Microsoft Visual Studio\2019\**Community**\MSBuild\Current\Bin\MSBuild.exe"
|
|
||||||
```
|
|
||||||
|
|
||||||
Attempt to install package again after running the command above.
|
|
|
@ -1,26 +0,0 @@
|
||||||
# Tutorials
|
|
||||||
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
Example workflows which show what can be done with n8n can be found here:
|
|
||||||
[https://n8n.io/workflows](https://n8n.io/workflows)
|
|
||||||
|
|
||||||
If you want to know how a node can get used in context, you can search for it here:
|
|
||||||
[https://n8n.io/nodes](https://n8n.io/nodes). There it shows in which workflows the
|
|
||||||
node got used.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Videos
|
|
||||||
|
|
||||||
- [Slack Notification on Github Star](https://www.youtube.com/watch?v=3w7xIMKLVAg)
|
|
||||||
- [Typeform to Google Sheet and Slack or Email](https://www.youtube.com/watch?v=rn3-d4IiW44)
|
|
||||||
|
|
||||||
|
|
||||||
### Community Tutorials
|
|
||||||
|
|
||||||
- [n8n basics 1/3 - Getting Started](https://www.youtube.com/watch?v=JIaxjH2CyFc)
|
|
||||||
- [n8n basics 2/3 - Simple Workflow](https://www.youtube.com/watch?v=ovlxledZfM4)
|
|
||||||
- [n8n basics 3/3 - Transforming JSON](https://www.youtube.com/watch?v=wGAEAcfwV8w)
|
|
||||||
- [n8n Google Integration - Using Google Sheets and Google Api nodes](https://www.youtube.com/watch?v=KFqx8OmkqVE)
|
|
111
docs/workflow.md
111
docs/workflow.md
|
@ -1,111 +0,0 @@
|
||||||
# Workflow
|
|
||||||
|
|
||||||
|
|
||||||
## Activate
|
|
||||||
|
|
||||||
Activating a workflow means that the Trigger and Webhook nodes get activated and can trigger a workflow to run. By default all the newly created workflows are deactivated. That means that even if a Trigger node like the Cron node should start a workflow because a predefined time is reached, it will not unless the workflow gets activated. It is only possible to activate a workflow which contains a Trigger or a Webhook node.
|
|
||||||
|
|
||||||
|
|
||||||
## Data Flow
|
|
||||||
|
|
||||||
Nodes do not only process one "item", they process multiple ones. So if the Trello node is set to "Create-Card" and it has an expression set for "Name" to be set depending on "name" property, it will create a card for each item, always choosing the name-property-value of the current one.
|
|
||||||
|
|
||||||
This data would, for example, create two boards. One named "test1" the other one named "test2":
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
name: "test1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test2"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Error Workflows
|
|
||||||
|
|
||||||
For each workflow, an optional "Error Workflow" can be set. It gets executed in case the execution of the workflow fails. That makes it possible to, for instance, inform the user via Email or Slack if something goes wrong. The same "Error Workflow" can be set on multiple workflows.
|
|
||||||
|
|
||||||
The only difference between a regular workflow and an "Error Workflow" is that it contains an "Error Trigger" node. So it is important to make sure that this node gets created before setting a workflow as "Error Workflow".
|
|
||||||
|
|
||||||
The "Error Trigger" node will trigger in case the execution fails and receives information about it. The data looks like this:
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"execution": {
|
|
||||||
"id": "231",
|
|
||||||
"url": "https://n8n.example.com/execution/231",
|
|
||||||
"retryOf": "34",
|
|
||||||
"error": {
|
|
||||||
"message": "Example Error Message",
|
|
||||||
"stack": "Stacktrace"
|
|
||||||
},
|
|
||||||
"lastNodeExecuted": "Node With Error",
|
|
||||||
"mode": "manual"
|
|
||||||
},
|
|
||||||
"workflow": {
|
|
||||||
"id": "1",
|
|
||||||
"name": "Example Workflow"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
All information is always present except:
|
|
||||||
- **execution.id**: Only present when the execution gets saved in the database
|
|
||||||
- **execution.url**: Only present when the execution gets saved in the database
|
|
||||||
- **execution.retryOf**: Only present when the execution is a retry of a previously failed execution
|
|
||||||
|
|
||||||
|
|
||||||
### Setting Error Workflow
|
|
||||||
|
|
||||||
An "Error Workflow" can be set in the Workflow Settings which can be accessed by pressing the "Workflow" button in the menu on the on the left side. The last option is "Settings". In the window that appears, the "Error Workflow" can be selected via the Dropdown "Error Workflow".
|
|
||||||
|
|
||||||
|
|
||||||
## Share Workflows
|
|
||||||
|
|
||||||
All workflows are JSON and can be shared very easily.
|
|
||||||
|
|
||||||
There are multiple ways to download a workflow as JSON to then share it with other people via Email, Slack, Skype, Dropbox, …
|
|
||||||
|
|
||||||
1. Press the "Download" button under the Workflow menu in the sidebar on the left. It then downloads the workflow as a JSON file.
|
|
||||||
1. Select the nodes in the editor which should be exported and then copy them (Ctrl + c). The nodes then get saved as JSON in the clipboard and can be pasted wherever desired (Ctrl + v).
|
|
||||||
|
|
||||||
Importing that JSON representation again into n8n is as easy and can also be done in different ways:
|
|
||||||
|
|
||||||
1. Press "Import from File" or "Import from URL" under the Workflow menu in the sidebar on the left.
|
|
||||||
1. Copy the JSON workflow to the clipboard (Ctrl + c) and then simply pasting it directly into the editor (Ctrl + v).
|
|
||||||
|
|
||||||
|
|
||||||
## Workflow Settings
|
|
||||||
|
|
||||||
On each workflow, it is possible to set some custom settings and overwrite some of the global default settings. Currently, the following settings can be set:
|
|
||||||
|
|
||||||
|
|
||||||
### Error Workflow
|
|
||||||
|
|
||||||
Workflow to run in case the execution of the current workflow fails. More information in section [Error Workflows](#error-workflows).
|
|
||||||
|
|
||||||
|
|
||||||
### Timezone
|
|
||||||
|
|
||||||
The timezone to use in the current workflow. If not set, the global Timezone (by default "New York" gets used). For instance, this is important for the Cron Trigger node.
|
|
||||||
|
|
||||||
|
|
||||||
### Save Data Error Execution
|
|
||||||
|
|
||||||
If the Execution data of the workflow should be saved when it fails.
|
|
||||||
|
|
||||||
|
|
||||||
### Save Data Success Execution
|
|
||||||
|
|
||||||
If the Execution data of the workflow should be saved when it succeeds.
|
|
||||||
|
|
||||||
|
|
||||||
### Save Manual Executions
|
|
||||||
|
|
||||||
If executions started from the Editor UI should be saved.
|
|
|
@ -11,6 +11,7 @@
|
||||||
"start:default": "cd packages/cli/bin && ./n8n",
|
"start:default": "cd packages/cli/bin && ./n8n",
|
||||||
"start:windows": "cd packages/cli/bin && n8n",
|
"start:windows": "cd packages/cli/bin && n8n",
|
||||||
"test": "lerna run test",
|
"test": "lerna run test",
|
||||||
|
"tslint": "lerna exec npm run tslint",
|
||||||
"watch": "lerna run --parallel watch"
|
"watch": "lerna run --parallel watch"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
# n8n - Workflow Automation Tool
|
# n8n - Workflow Automation Tool
|
||||||
|
|
||||||
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-logo.png)
|
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
|
||||||
|
|
||||||
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
|
n8n is a free and open [fair-code](http://faircode.io) licensed node based Workflow Automation Tool. It can be self-hosted, easily extended, and so also used with internal tools.
|
||||||
|
|
||||||
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
|
<a href="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png"><img src="https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-screenshot.png" width="550" alt="n8n.io - Screenshot"></a>
|
||||||
|
|
||||||
|
|
||||||
## Contents
|
## Contents
|
||||||
|
@ -89,8 +89,7 @@ If you have problems or questions go to our forum, we will then try to help you
|
||||||
If you are interested in working for n8n and so shape the future of the project
|
If you are interested in working for n8n and so shape the future of the project
|
||||||
check out our job posts:
|
check out our job posts:
|
||||||
|
|
||||||
[https://jobs.n8n.io](https://jobs.n8n.io)
|
[https://n8n.join.com](https://n8n.join.com)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Upgrading
|
## Upgrading
|
||||||
|
|
|
@ -128,15 +128,23 @@ const config = convict({
|
||||||
|
|
||||||
credentials: {
|
credentials: {
|
||||||
overwrite: {
|
overwrite: {
|
||||||
// Allows to set default values for credentials which
|
data: {
|
||||||
// get automatically prefilled and the user does not get
|
// Allows to set default values for credentials which
|
||||||
// displayed and can not change.
|
// get automatically prefilled and the user does not get
|
||||||
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
|
// displayed and can not change.
|
||||||
doc: 'Overwrites for credentials',
|
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
|
||||||
format: '*',
|
doc: 'Overwrites for credentials',
|
||||||
default: '{}',
|
format: '*',
|
||||||
env: 'CREDENTIALS_OVERWRITE'
|
default: '{}',
|
||||||
}
|
env: 'CREDENTIALS_OVERWRITE_DATA'
|
||||||
|
},
|
||||||
|
endpoint: {
|
||||||
|
doc: 'Fetch credentials from API',
|
||||||
|
format: String,
|
||||||
|
default: '',
|
||||||
|
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
executions: {
|
executions: {
|
||||||
|
@ -151,10 +159,34 @@ const config = convict({
|
||||||
env: 'EXECUTIONS_PROCESS'
|
env: 'EXECUTIONS_PROCESS'
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// A Workflow times out and gets canceled after this time (seconds).
|
||||||
|
// If the workflow is executed in the main process a soft timeout
|
||||||
|
// is executed (takes effect after the current node finishes).
|
||||||
|
// If a workflow is running in its own process is a soft timeout
|
||||||
|
// tried first, before killing the process after waiting for an
|
||||||
|
// additional fifth of the given timeout duration.
|
||||||
|
//
|
||||||
|
// To deactivate timeout set it to -1
|
||||||
|
//
|
||||||
|
// Timeout is currently not activated by default which will change
|
||||||
|
// in a future version.
|
||||||
|
timeout: {
|
||||||
|
doc: 'Max run time (seconds) before stopping the workflow execution',
|
||||||
|
format: Number,
|
||||||
|
default: -1,
|
||||||
|
env: 'EXECUTIONS_TIMEOUT'
|
||||||
|
},
|
||||||
|
maxTimeout: {
|
||||||
|
doc: 'Max execution time (seconds) that can be set for a workflow individually',
|
||||||
|
format: Number,
|
||||||
|
default: 3600,
|
||||||
|
env: 'EXECUTIONS_TIMEOUT_MAX'
|
||||||
|
},
|
||||||
|
|
||||||
// If a workflow executes all the data gets saved by default. This
|
// If a workflow executes all the data gets saved by default. This
|
||||||
// could be a problem when a workflow gets executed a lot and processes
|
// could be a problem when a workflow gets executed a lot and processes
|
||||||
// a lot of data. To not write the database full it is possible to
|
// a lot of data. To not exceed the database's capacity it is possible to
|
||||||
// not save the execution at all.
|
// prune the database regularly or to not save the execution at all.
|
||||||
// Depending on if the execution did succeed or error a different
|
// Depending on if the execution did succeed or error a different
|
||||||
// save behaviour can be set.
|
// save behaviour can be set.
|
||||||
saveDataOnError: {
|
saveDataOnError: {
|
||||||
|
@ -177,9 +209,34 @@ const config = convict({
|
||||||
// in the editor.
|
// in the editor.
|
||||||
saveDataManualExecutions: {
|
saveDataManualExecutions: {
|
||||||
doc: 'Save data of executions when started manually via editor',
|
doc: 'Save data of executions when started manually via editor',
|
||||||
|
format: 'Boolean',
|
||||||
default: false,
|
default: false,
|
||||||
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS'
|
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS'
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// To not exceed the database's capacity and keep its size moderate
|
||||||
|
// the execution data gets pruned regularly (default: 1 hour interval).
|
||||||
|
// All saved execution data older than the max age will be deleted.
|
||||||
|
// Pruning is currently not activated by default, which will change in
|
||||||
|
// a future version.
|
||||||
|
pruneData: {
|
||||||
|
doc: 'Delete data of past executions on a rolling basis',
|
||||||
|
format: 'Boolean',
|
||||||
|
default: false,
|
||||||
|
env: 'EXECUTIONS_DATA_PRUNE'
|
||||||
|
},
|
||||||
|
pruneDataMaxAge: {
|
||||||
|
doc: 'How old (hours) the execution data has to be to get deleted',
|
||||||
|
format: Number,
|
||||||
|
default: 336,
|
||||||
|
env: 'EXECUTIONS_DATA_MAX_AGE'
|
||||||
|
},
|
||||||
|
pruneDataTimeout: {
|
||||||
|
doc: 'Timeout (seconds) after execution data has been pruned',
|
||||||
|
format: Number,
|
||||||
|
default: 3600,
|
||||||
|
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT'
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
generic: {
|
generic: {
|
||||||
|
@ -196,6 +253,13 @@ const config = convict({
|
||||||
},
|
},
|
||||||
|
|
||||||
// How n8n can be reached (Editor & REST-API)
|
// How n8n can be reached (Editor & REST-API)
|
||||||
|
path: {
|
||||||
|
format: String,
|
||||||
|
default: '/',
|
||||||
|
arg: 'path',
|
||||||
|
env: 'N8N_PATH',
|
||||||
|
doc: 'Path n8n is deployed to'
|
||||||
|
},
|
||||||
host: {
|
host: {
|
||||||
format: String,
|
format: String,
|
||||||
default: 'localhost',
|
default: 'localhost',
|
||||||
|
|
|
@ -44,9 +44,9 @@ module.exports = [
|
||||||
"logging": false,
|
"logging": false,
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"username": "postgres",
|
"username": "postgres",
|
||||||
"password": "docker",
|
"password": "",
|
||||||
"port": 5432,
|
"port": 5432,
|
||||||
"database": "postgres",
|
"database": "n8n",
|
||||||
"schema": "public",
|
"schema": "public",
|
||||||
"entities": Object.values(PostgresDb),
|
"entities": Object.values(PostgresDb),
|
||||||
"migrations": [
|
"migrations": [
|
||||||
|
@ -68,7 +68,7 @@ module.exports = [
|
||||||
"username": "root",
|
"username": "root",
|
||||||
"password": "password",
|
"password": "password",
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": "3308",
|
"port": "3306",
|
||||||
"logging": false,
|
"logging": false,
|
||||||
"entities": Object.values(MySQLDb),
|
"entities": Object.values(MySQLDb),
|
||||||
"migrations": [
|
"migrations": [
|
||||||
|
@ -90,7 +90,7 @@ module.exports = [
|
||||||
"username": "root",
|
"username": "root",
|
||||||
"password": "password",
|
"password": "password",
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": "3308",
|
"port": "3306",
|
||||||
"logging": false,
|
"logging": false,
|
||||||
"entities": Object.values(MySQLDb),
|
"entities": Object.values(MySQLDb),
|
||||||
"migrations": [
|
"migrations": [
|
||||||
|
@ -105,4 +105,4 @@ module.exports = [
|
||||||
"subscribersDir": "./src/databases/mysqldb/Subscribers"
|
"subscribersDir": "./src/databases/mysqldb/Subscribers"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "n8n",
|
"name": "n8n",
|
||||||
"version": "0.72.0",
|
"version": "0.76.0",
|
||||||
"description": "n8n Workflow Automation Tool",
|
"description": "n8n Workflow Automation Tool",
|
||||||
"license": "SEE LICENSE IN LICENSE.md",
|
"license": "SEE LICENSE IN LICENSE.md",
|
||||||
"homepage": "https://n8n.io",
|
"homepage": "https://n8n.io",
|
||||||
|
@ -100,13 +100,13 @@
|
||||||
"lodash.get": "^4.4.2",
|
"lodash.get": "^4.4.2",
|
||||||
"mongodb": "^3.5.5",
|
"mongodb": "^3.5.5",
|
||||||
"mysql2": "^2.0.1",
|
"mysql2": "^2.0.1",
|
||||||
"n8n-core": "~0.37.0",
|
"n8n-core": "~0.41.0",
|
||||||
"n8n-editor-ui": "~0.48.0",
|
"n8n-editor-ui": "~0.52.0",
|
||||||
"n8n-nodes-base": "~0.67.0",
|
"n8n-nodes-base": "~0.71.0",
|
||||||
"n8n-workflow": "~0.33.0",
|
"n8n-workflow": "~0.37.0",
|
||||||
"oauth-1.0a": "^2.2.6",
|
"oauth-1.0a": "^2.2.6",
|
||||||
"open": "^7.0.0",
|
"open": "^7.0.0",
|
||||||
"pg": "^7.11.0",
|
"pg": "^8.3.0",
|
||||||
"request-promise-native": "^1.0.7",
|
"request-promise-native": "^1.0.7",
|
||||||
"sqlite3": "^4.2.0",
|
"sqlite3": "^4.2.0",
|
||||||
"sse-channel": "^3.1.1",
|
"sse-channel": "^3.1.1",
|
||||||
|
|
|
@ -88,10 +88,11 @@ export class ActiveExecutions {
|
||||||
* Forces an execution to stop
|
* Forces an execution to stop
|
||||||
*
|
*
|
||||||
* @param {string} executionId The id of the execution to stop
|
* @param {string} executionId The id of the execution to stop
|
||||||
|
* @param {string} timeout String 'timeout' given if stop due to timeout
|
||||||
* @returns {(Promise<IRun | undefined>)}
|
* @returns {(Promise<IRun | undefined>)}
|
||||||
* @memberof ActiveExecutions
|
* @memberof ActiveExecutions
|
||||||
*/
|
*/
|
||||||
async stopExecution(executionId: string): Promise<IRun | undefined> {
|
async stopExecution(executionId: string, timeout?: string): Promise<IRun | undefined> {
|
||||||
if (this.activeExecutions[executionId] === undefined) {
|
if (this.activeExecutions[executionId] === undefined) {
|
||||||
// There is no execution running with that id
|
// There is no execution running with that id
|
||||||
return;
|
return;
|
||||||
|
@ -101,17 +102,17 @@ export class ActiveExecutions {
|
||||||
// returned that it gets then also resolved correctly.
|
// returned that it gets then also resolved correctly.
|
||||||
if (this.activeExecutions[executionId].process !== undefined) {
|
if (this.activeExecutions[executionId].process !== undefined) {
|
||||||
// Workflow is running in subprocess
|
// Workflow is running in subprocess
|
||||||
setTimeout(() => {
|
if (this.activeExecutions[executionId].process!.connected) {
|
||||||
if (this.activeExecutions[executionId].process!.connected) {
|
setTimeout(() => {
|
||||||
|
// execute on next event loop tick;
|
||||||
this.activeExecutions[executionId].process!.send({
|
this.activeExecutions[executionId].process!.send({
|
||||||
type: 'stopExecution'
|
type: timeout ? timeout : 'stopExecution',
|
||||||
});
|
});
|
||||||
}
|
}, 1);
|
||||||
|
}
|
||||||
}, 1);
|
|
||||||
} else {
|
} else {
|
||||||
// Workflow is running in current process
|
// Workflow is running in current process
|
||||||
this.activeExecutions[executionId].workflowExecution!.cancel('Canceled by user');
|
this.activeExecutions[executionId].workflowExecution!.cancel();
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.getPostExecutePromise(executionId);
|
return this.getPostExecutePromise(executionId);
|
||||||
|
|
|
@ -11,11 +11,11 @@ import {
|
||||||
WorkflowHelpers,
|
WorkflowHelpers,
|
||||||
WorkflowRunner,
|
WorkflowRunner,
|
||||||
WorkflowExecuteAdditionalData,
|
WorkflowExecuteAdditionalData,
|
||||||
|
IWebhookDb,
|
||||||
} from './';
|
} from './';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ActiveWorkflows,
|
ActiveWorkflows,
|
||||||
ActiveWebhooks,
|
|
||||||
NodeExecuteFunctions,
|
NodeExecuteFunctions,
|
||||||
} from 'n8n-core';
|
} from 'n8n-core';
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ import {
|
||||||
INode,
|
INode,
|
||||||
INodeExecutionData,
|
INodeExecutionData,
|
||||||
IRunExecutionData,
|
IRunExecutionData,
|
||||||
IWebhookData,
|
NodeHelpers,
|
||||||
IWorkflowExecuteAdditionalData as IWorkflowExecuteAdditionalDataWorkflow,
|
IWorkflowExecuteAdditionalData as IWorkflowExecuteAdditionalDataWorkflow,
|
||||||
WebhookHttpMethod,
|
WebhookHttpMethod,
|
||||||
Workflow,
|
Workflow,
|
||||||
|
@ -35,22 +35,23 @@ import {
|
||||||
|
|
||||||
import * as express from 'express';
|
import * as express from 'express';
|
||||||
|
|
||||||
|
|
||||||
export class ActiveWorkflowRunner {
|
export class ActiveWorkflowRunner {
|
||||||
private activeWorkflows: ActiveWorkflows | null = null;
|
private activeWorkflows: ActiveWorkflows | null = null;
|
||||||
private activeWebhooks: ActiveWebhooks | null = null;
|
|
||||||
private activationErrors: {
|
private activationErrors: {
|
||||||
[key: string]: IActivationError;
|
[key: string]: IActivationError;
|
||||||
} = {};
|
} = {};
|
||||||
|
|
||||||
|
|
||||||
async init() {
|
async init() {
|
||||||
|
|
||||||
// Get the active workflows from database
|
// Get the active workflows from database
|
||||||
|
|
||||||
|
// NOTE
|
||||||
|
// Here I guess we can have a flag on the workflow table like hasTrigger
|
||||||
|
// so intead of pulling all the active wehhooks just pull the actives that have a trigger
|
||||||
const workflowsData: IWorkflowDb[] = await Db.collections.Workflow!.find({ active: true }) as IWorkflowDb[];
|
const workflowsData: IWorkflowDb[] = await Db.collections.Workflow!.find({ active: true }) as IWorkflowDb[];
|
||||||
|
|
||||||
this.activeWebhooks = new ActiveWebhooks();
|
|
||||||
|
|
||||||
// Add them as active workflows
|
|
||||||
this.activeWorkflows = new ActiveWorkflows();
|
this.activeWorkflows = new ActiveWorkflows();
|
||||||
|
|
||||||
if (workflowsData.length !== 0) {
|
if (workflowsData.length !== 0) {
|
||||||
|
@ -58,20 +59,27 @@ export class ActiveWorkflowRunner {
|
||||||
console.log(' Start Active Workflows:');
|
console.log(' Start Active Workflows:');
|
||||||
console.log(' ================================');
|
console.log(' ================================');
|
||||||
|
|
||||||
|
const nodeTypes = NodeTypes();
|
||||||
|
|
||||||
for (const workflowData of workflowsData) {
|
for (const workflowData of workflowsData) {
|
||||||
console.log(` - ${workflowData.name}`);
|
|
||||||
try {
|
const workflow = new Workflow({ id: workflowData.id.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings});
|
||||||
await this.add(workflowData.id.toString(), workflowData);
|
|
||||||
console.log(` => Started`);
|
if (workflow.getTriggerNodes().length !== 0
|
||||||
} catch (error) {
|
|| workflow.getPollNodes().length !== 0) {
|
||||||
console.log(` => ERROR: Workflow could not be activated:`);
|
console.log(` - ${workflowData.name}`);
|
||||||
console.log(` ${error.message}`);
|
try {
|
||||||
|
await this.add(workflowData.id.toString(), workflowData);
|
||||||
|
console.log(` => Started`);
|
||||||
|
} catch (error) {
|
||||||
|
console.log(` => ERROR: Workflow could not be activated:`);
|
||||||
|
console.log(` ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes all the currently active workflows
|
* Removes all the currently active workflows
|
||||||
*
|
*
|
||||||
|
@ -94,7 +102,6 @@ export class ActiveWorkflowRunner {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if a webhook for the given method and path exists and executes the workflow.
|
* Checks if a webhook for the given method and path exists and executes the workflow.
|
||||||
*
|
*
|
||||||
|
@ -110,30 +117,41 @@ export class ActiveWorkflowRunner {
|
||||||
throw new ResponseHelper.ResponseError('The "activeWorkflows" instance did not get initialized yet.', 404, 404);
|
throw new ResponseHelper.ResponseError('The "activeWorkflows" instance did not get initialized yet.', 404, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
const webhookData: IWebhookData | undefined = this.activeWebhooks!.get(httpMethod, path);
|
const webhook = await Db.collections.Webhook?.findOne({ webhookPath: path, method: httpMethod }) as IWebhookDb;
|
||||||
|
|
||||||
if (webhookData === undefined) {
|
// check if something exist
|
||||||
|
if (webhook === undefined) {
|
||||||
// The requested webhook is not registered
|
// The requested webhook is not registered
|
||||||
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
|
throw new ResponseHelper.ResponseError(`The requested webhook "${httpMethod} ${path}" is not registered.`, 404, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowData = await Db.collections.Workflow!.findOne(webhookData.workflowId);
|
const workflowData = await Db.collections.Workflow!.findOne(webhook.workflowId);
|
||||||
if (workflowData === undefined) {
|
if (workflowData === undefined) {
|
||||||
throw new ResponseHelper.ResponseError(`Could not find workflow with id "${webhookData.workflowId}"`, 404, 404);
|
throw new ResponseHelper.ResponseError(`Could not find workflow with id "${webhook.workflowId}"`, 404, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
const nodeTypes = NodeTypes();
|
const nodeTypes = NodeTypes();
|
||||||
const workflow = new Workflow({ id: webhookData.workflowId, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings});
|
const workflow = new Workflow({ id: webhook.workflowId.toString(), name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings});
|
||||||
|
|
||||||
|
const credentials = await WorkflowCredentials([workflow.getNode(webhook.node as string) as INode]);
|
||||||
|
|
||||||
|
const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials);
|
||||||
|
|
||||||
|
const webhookData = NodeHelpers.getNodeWebhooks(workflow, workflow.getNode(webhook.node as string) as INode, additionalData).filter((webhook) => {
|
||||||
|
return (webhook.httpMethod === httpMethod && webhook.path === path);
|
||||||
|
})[0];
|
||||||
|
|
||||||
// Get the node which has the webhook defined to know where to start from and to
|
// Get the node which has the webhook defined to know where to start from and to
|
||||||
// get additional data
|
// get additional data
|
||||||
const workflowStartNode = workflow.getNode(webhookData.node);
|
const workflowStartNode = workflow.getNode(webhookData.node);
|
||||||
|
|
||||||
if (workflowStartNode === null) {
|
if (workflowStartNode === null) {
|
||||||
throw new ResponseHelper.ResponseError('Could not find node to process webhook.', 404, 404);
|
throw new ResponseHelper.ResponseError('Could not find node to process webhook.', 404, 404);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const executionMode = 'webhook';
|
const executionMode = 'webhook';
|
||||||
|
//@ts-ignore
|
||||||
WebhookHelpers.executeWebhook(workflow, webhookData, workflowData, workflowStartNode, executionMode, undefined, req, res, (error: Error | null, data: object) => {
|
WebhookHelpers.executeWebhook(workflow, webhookData, workflowData, workflowStartNode, executionMode, undefined, req, res, (error: Error | null, data: object) => {
|
||||||
if (error !== null) {
|
if (error !== null) {
|
||||||
return reject(error);
|
return reject(error);
|
||||||
|
@ -143,6 +161,20 @@ export class ActiveWorkflowRunner {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all request methods associated with a single webhook
|
||||||
|
*
|
||||||
|
* @param {string} path webhook path
|
||||||
|
* @returns {Promise<string[]>}
|
||||||
|
* @memberof ActiveWorkflowRunner
|
||||||
|
*/
|
||||||
|
async getWebhookMethods(path: string) : Promise<string[]> {
|
||||||
|
const webhooks = await Db.collections.Webhook?.find({ webhookPath: path}) as IWebhookDb[];
|
||||||
|
|
||||||
|
// Gather all request methods in string array
|
||||||
|
const webhookMethods: string[] = webhooks.map(webhook => webhook.method);
|
||||||
|
return webhookMethods;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the ids of the currently active workflows
|
* Returns the ids of the currently active workflows
|
||||||
|
@ -150,12 +182,8 @@ export class ActiveWorkflowRunner {
|
||||||
* @returns {string[]}
|
* @returns {string[]}
|
||||||
* @memberof ActiveWorkflowRunner
|
* @memberof ActiveWorkflowRunner
|
||||||
*/
|
*/
|
||||||
getActiveWorkflows(): string[] {
|
getActiveWorkflows(): Promise<IWorkflowDb[]> {
|
||||||
if (this.activeWorkflows === null) {
|
return Db.collections.Workflow?.find({ select: ['id'] }) as Promise<IWorkflowDb[]>;
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.activeWorkflows.allActiveWorkflows();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -166,15 +194,11 @@ export class ActiveWorkflowRunner {
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
* @memberof ActiveWorkflowRunner
|
* @memberof ActiveWorkflowRunner
|
||||||
*/
|
*/
|
||||||
isActive(id: string): boolean {
|
async isActive(id: string): Promise<boolean> {
|
||||||
if (this.activeWorkflows !== null) {
|
const workflow = await Db.collections.Workflow?.findOne({ id }) as IWorkflowDb;
|
||||||
return this.activeWorkflows.isActive(id);
|
return workflow?.active as boolean;
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return error if there was a problem activating the workflow
|
* Return error if there was a problem activating the workflow
|
||||||
*
|
*
|
||||||
|
@ -190,7 +214,6 @@ export class ActiveWorkflowRunner {
|
||||||
return this.activationErrors[id];
|
return this.activationErrors[id];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Adds all the webhooks of the workflow
|
* Adds all the webhooks of the workflow
|
||||||
*
|
*
|
||||||
|
@ -202,12 +225,69 @@ export class ActiveWorkflowRunner {
|
||||||
*/
|
*/
|
||||||
async addWorkflowWebhooks(workflow: Workflow, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): Promise<void> {
|
async addWorkflowWebhooks(workflow: Workflow, additionalData: IWorkflowExecuteAdditionalDataWorkflow, mode: WorkflowExecuteMode): Promise<void> {
|
||||||
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData);
|
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData);
|
||||||
|
let path = '' as string | undefined;
|
||||||
|
|
||||||
for (const webhookData of webhooks) {
|
for (const webhookData of webhooks) {
|
||||||
await this.activeWebhooks!.add(workflow, webhookData, mode);
|
|
||||||
// Save static data!
|
const node = workflow.getNode(webhookData.node) as INode;
|
||||||
await WorkflowHelpers.saveStaticData(workflow);
|
node.name = webhookData.node;
|
||||||
|
|
||||||
|
path = node.parameters.path as string;
|
||||||
|
|
||||||
|
if (node.parameters.path === undefined) {
|
||||||
|
path = workflow.getSimpleParameterValue(node, webhookData.webhookDescription['path']) as string | undefined;
|
||||||
|
|
||||||
|
if (path === undefined) {
|
||||||
|
// TODO: Use a proper logger
|
||||||
|
console.error(`No webhook path could be found for node "${node.name}" in workflow "${workflow.id}".`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isFullPath: boolean = workflow.getSimpleParameterValue(node, webhookData.webhookDescription['isFullPath'], false) as boolean;
|
||||||
|
|
||||||
|
const webhook = {
|
||||||
|
workflowId: webhookData.workflowId,
|
||||||
|
webhookPath: NodeHelpers.getNodeWebhookPath(workflow.id as string, node, path, isFullPath),
|
||||||
|
node: node.name,
|
||||||
|
method: webhookData.httpMethod,
|
||||||
|
} as IWebhookDb;
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
|
await Db.collections.Webhook?.insert(webhook);
|
||||||
|
|
||||||
|
const webhookExists = await workflow.runWebhookMethod('checkExists', webhookData, NodeExecuteFunctions, mode, false);
|
||||||
|
if (webhookExists === false) {
|
||||||
|
// If webhook does not exist yet create it
|
||||||
|
await workflow.runWebhookMethod('create', webhookData, NodeExecuteFunctions, mode, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
|
||||||
|
let errorMessage = '';
|
||||||
|
|
||||||
|
await Db.collections.Webhook?.delete({ workflowId: workflow.id });
|
||||||
|
|
||||||
|
// if it's a workflow from the the insert
|
||||||
|
// TODO check if there is standard error code for deplicate key violation that works
|
||||||
|
// with all databases
|
||||||
|
if (error.name === 'MongoError' || error.name === 'QueryFailedError') {
|
||||||
|
|
||||||
|
errorMessage = `The webhook path [${webhook.webhookPath}] and method [${webhook.method}] already exist.`;
|
||||||
|
|
||||||
|
} else if (error.detail) {
|
||||||
|
// it's a error runnig the webhook methods (checkExists, create)
|
||||||
|
errorMessage = error.detail;
|
||||||
|
} else {
|
||||||
|
errorMessage = error.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(errorMessage);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
// Save static data!
|
||||||
|
await WorkflowHelpers.saveStaticData(workflow);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -227,13 +307,29 @@ export class ActiveWorkflowRunner {
|
||||||
const nodeTypes = NodeTypes();
|
const nodeTypes = NodeTypes();
|
||||||
const workflow = new Workflow({ id: workflowId, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings });
|
const workflow = new Workflow({ id: workflowId, name: workflowData.name, nodes: workflowData.nodes, connections: workflowData.connections, active: workflowData.active, nodeTypes, staticData: workflowData.staticData, settings: workflowData.settings });
|
||||||
|
|
||||||
await this.activeWebhooks!.removeWorkflow(workflow);
|
const mode = 'internal';
|
||||||
|
|
||||||
// Save the static workflow data if needed
|
const credentials = await WorkflowCredentials(workflowData.nodes);
|
||||||
await WorkflowHelpers.saveStaticData(workflow);
|
const additionalData = await WorkflowExecuteAdditionalData.getBase(credentials);
|
||||||
|
|
||||||
|
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData);
|
||||||
|
|
||||||
|
for (const webhookData of webhooks) {
|
||||||
|
await workflow.runWebhookMethod('delete', webhookData, NodeExecuteFunctions, mode, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// if it's a mongo objectId convert it to string
|
||||||
|
if (typeof workflowData.id === 'object') {
|
||||||
|
workflowData.id = workflowData.id.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
const webhook = {
|
||||||
|
workflowId: workflowData.id,
|
||||||
|
} as IWebhookDb;
|
||||||
|
|
||||||
|
await Db.collections.Webhook?.delete(webhook);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Runs the given workflow
|
* Runs the given workflow
|
||||||
*
|
*
|
||||||
|
@ -322,7 +418,6 @@ export class ActiveWorkflowRunner {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Makes a workflow active
|
* Makes a workflow active
|
||||||
*
|
*
|
||||||
|
@ -361,7 +456,11 @@ export class ActiveWorkflowRunner {
|
||||||
|
|
||||||
// Add the workflows which have webhooks defined
|
// Add the workflows which have webhooks defined
|
||||||
await this.addWorkflowWebhooks(workflowInstance, additionalData, mode);
|
await this.addWorkflowWebhooks(workflowInstance, additionalData, mode);
|
||||||
await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, getTriggerFunctions, getPollFunctions);
|
|
||||||
|
if (workflowInstance.getTriggerNodes().length !== 0
|
||||||
|
|| workflowInstance.getPollNodes().length !== 0) {
|
||||||
|
await this.activeWorkflows.add(workflowId, workflowInstance, additionalData, getTriggerFunctions, getPollFunctions);
|
||||||
|
}
|
||||||
|
|
||||||
if (this.activationErrors[workflowId] !== undefined) {
|
if (this.activationErrors[workflowId] !== undefined) {
|
||||||
// If there were activation errors delete them
|
// If there were activation errors delete them
|
||||||
|
@ -386,7 +485,6 @@ export class ActiveWorkflowRunner {
|
||||||
await WorkflowHelpers.saveStaticData(workflowInstance!);
|
await WorkflowHelpers.saveStaticData(workflowInstance!);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Makes a workflow inactive
|
* Makes a workflow inactive
|
||||||
*
|
*
|
||||||
|
@ -395,6 +493,7 @@ export class ActiveWorkflowRunner {
|
||||||
* @memberof ActiveWorkflowRunner
|
* @memberof ActiveWorkflowRunner
|
||||||
*/
|
*/
|
||||||
async remove(workflowId: string): Promise<void> {
|
async remove(workflowId: string): Promise<void> {
|
||||||
|
|
||||||
if (this.activeWorkflows !== null) {
|
if (this.activeWorkflows !== null) {
|
||||||
// Remove all the webhooks of the workflow
|
// Remove all the webhooks of the workflow
|
||||||
await this.removeWorkflowWebhooks(workflowId);
|
await this.removeWorkflowWebhooks(workflowId);
|
||||||
|
@ -404,8 +503,13 @@ export class ActiveWorkflowRunner {
|
||||||
delete this.activationErrors[workflowId];
|
delete this.activationErrors[workflowId];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove the workflow from the "list" of active workflows
|
// if it's active in memory then it's a trigger
|
||||||
return this.activeWorkflows.remove(workflowId);
|
// so remove from list of actives workflows
|
||||||
|
if (this.activeWorkflows.isActive(workflowId)) {
|
||||||
|
this.activeWorkflows.remove(workflowId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(`The "activeWorkflows" instance did not get initialized yet.`);
|
throw new Error(`The "activeWorkflows" instance did not get initialized yet.`);
|
||||||
|
|
|
@ -20,7 +20,7 @@ class CredentialsOverwritesClass {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const data = await GenericHelpers.getConfigValue('credentials.overwrite') as string;
|
const data = await GenericHelpers.getConfigValue('credentials.overwrite.data') as string;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
this.overwriteData = JSON.parse(data);
|
this.overwriteData = JSON.parse(data);
|
||||||
|
@ -30,6 +30,7 @@ class CredentialsOverwritesClass {
|
||||||
}
|
}
|
||||||
|
|
||||||
applyOverwrite(type: string, data: ICredentialDataDecryptedObject) {
|
applyOverwrite(type: string, data: ICredentialDataDecryptedObject) {
|
||||||
|
|
||||||
const overwrites = this.get(type);
|
const overwrites = this.get(type);
|
||||||
|
|
||||||
if (overwrites === undefined) {
|
if (overwrites === undefined) {
|
||||||
|
|
|
@ -29,22 +29,31 @@ export let collections: IDatabaseCollections = {
|
||||||
Credentials: null,
|
Credentials: null,
|
||||||
Execution: null,
|
Execution: null,
|
||||||
Workflow: null,
|
Workflow: null,
|
||||||
|
Webhook: null,
|
||||||
};
|
};
|
||||||
|
|
||||||
import {
|
import {
|
||||||
InitialMigration1587669153312
|
InitialMigration1587669153312,
|
||||||
|
WebhookModel1589476000887,
|
||||||
|
CreateIndexStoppedAt1594828256133,
|
||||||
} from './databases/postgresdb/migrations';
|
} from './databases/postgresdb/migrations';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
InitialMigration1587563438936
|
InitialMigration1587563438936,
|
||||||
|
WebhookModel1592679094242,
|
||||||
|
CreateIndexStoppedAt1594910478695,
|
||||||
} from './databases/mongodb/migrations';
|
} from './databases/mongodb/migrations';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
InitialMigration1588157391238
|
InitialMigration1588157391238,
|
||||||
|
WebhookModel1592447867632,
|
||||||
|
CreateIndexStoppedAt1594902918301,
|
||||||
} from './databases/mysqldb/migrations';
|
} from './databases/mysqldb/migrations';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
InitialMigration1588102412422
|
InitialMigration1588102412422,
|
||||||
|
WebhookModel1592445003908,
|
||||||
|
CreateIndexStoppedAt1594825041918,
|
||||||
} from './databases/sqlite/migrations';
|
} from './databases/sqlite/migrations';
|
||||||
|
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
@ -66,7 +75,11 @@ export async function init(): Promise<IDatabaseCollections> {
|
||||||
entityPrefix,
|
entityPrefix,
|
||||||
url: await GenericHelpers.getConfigValue('database.mongodb.connectionUrl') as string,
|
url: await GenericHelpers.getConfigValue('database.mongodb.connectionUrl') as string,
|
||||||
useNewUrlParser: true,
|
useNewUrlParser: true,
|
||||||
migrations: [InitialMigration1587563438936],
|
migrations: [
|
||||||
|
InitialMigration1587563438936,
|
||||||
|
WebhookModel1592679094242,
|
||||||
|
CreateIndexStoppedAt1594910478695,
|
||||||
|
],
|
||||||
migrationsRun: true,
|
migrationsRun: true,
|
||||||
migrationsTableName: `${entityPrefix}migrations`,
|
migrationsTableName: `${entityPrefix}migrations`,
|
||||||
};
|
};
|
||||||
|
@ -99,7 +112,11 @@ export async function init(): Promise<IDatabaseCollections> {
|
||||||
port: await GenericHelpers.getConfigValue('database.postgresdb.port') as number,
|
port: await GenericHelpers.getConfigValue('database.postgresdb.port') as number,
|
||||||
username: await GenericHelpers.getConfigValue('database.postgresdb.user') as string,
|
username: await GenericHelpers.getConfigValue('database.postgresdb.user') as string,
|
||||||
schema: config.get('database.postgresdb.schema'),
|
schema: config.get('database.postgresdb.schema'),
|
||||||
migrations: [InitialMigration1587669153312],
|
migrations: [
|
||||||
|
InitialMigration1587669153312,
|
||||||
|
WebhookModel1589476000887,
|
||||||
|
CreateIndexStoppedAt1594828256133,
|
||||||
|
],
|
||||||
migrationsRun: true,
|
migrationsRun: true,
|
||||||
migrationsTableName: `${entityPrefix}migrations`,
|
migrationsTableName: `${entityPrefix}migrations`,
|
||||||
ssl,
|
ssl,
|
||||||
|
@ -118,7 +135,11 @@ export async function init(): Promise<IDatabaseCollections> {
|
||||||
password: await GenericHelpers.getConfigValue('database.mysqldb.password') as string,
|
password: await GenericHelpers.getConfigValue('database.mysqldb.password') as string,
|
||||||
port: await GenericHelpers.getConfigValue('database.mysqldb.port') as number,
|
port: await GenericHelpers.getConfigValue('database.mysqldb.port') as number,
|
||||||
username: await GenericHelpers.getConfigValue('database.mysqldb.user') as string,
|
username: await GenericHelpers.getConfigValue('database.mysqldb.user') as string,
|
||||||
migrations: [InitialMigration1588157391238],
|
migrations: [
|
||||||
|
InitialMigration1588157391238,
|
||||||
|
WebhookModel1592447867632,
|
||||||
|
CreateIndexStoppedAt1594902918301,
|
||||||
|
],
|
||||||
migrationsRun: true,
|
migrationsRun: true,
|
||||||
migrationsTableName: `${entityPrefix}migrations`,
|
migrationsTableName: `${entityPrefix}migrations`,
|
||||||
};
|
};
|
||||||
|
@ -130,7 +151,11 @@ export async function init(): Promise<IDatabaseCollections> {
|
||||||
type: 'sqlite',
|
type: 'sqlite',
|
||||||
database: path.join(n8nFolder, 'database.sqlite'),
|
database: path.join(n8nFolder, 'database.sqlite'),
|
||||||
entityPrefix,
|
entityPrefix,
|
||||||
migrations: [InitialMigration1588102412422],
|
migrations: [
|
||||||
|
InitialMigration1588102412422,
|
||||||
|
WebhookModel1592445003908,
|
||||||
|
CreateIndexStoppedAt1594825041918
|
||||||
|
],
|
||||||
migrationsRun: true,
|
migrationsRun: true,
|
||||||
migrationsTableName: `${entityPrefix}migrations`,
|
migrationsTableName: `${entityPrefix}migrations`,
|
||||||
};
|
};
|
||||||
|
@ -155,6 +180,7 @@ export async function init(): Promise<IDatabaseCollections> {
|
||||||
collections.Credentials = getRepository(entities.CredentialsEntity);
|
collections.Credentials = getRepository(entities.CredentialsEntity);
|
||||||
collections.Execution = getRepository(entities.ExecutionEntity);
|
collections.Execution = getRepository(entities.ExecutionEntity);
|
||||||
collections.Workflow = getRepository(entities.WorkflowEntity);
|
collections.Workflow = getRepository(entities.WorkflowEntity);
|
||||||
|
collections.Webhook = getRepository(entities.WebhookEntity);
|
||||||
|
|
||||||
return collections;
|
return collections;
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,11 +40,12 @@ export function getBaseUrl(): string {
|
||||||
const protocol = config.get('protocol') as string;
|
const protocol = config.get('protocol') as string;
|
||||||
const host = config.get('host') as string;
|
const host = config.get('host') as string;
|
||||||
const port = config.get('port') as number;
|
const port = config.get('port') as number;
|
||||||
|
const path = config.get('path') as string;
|
||||||
|
|
||||||
if (protocol === 'http' && port === 80 || protocol === 'https' && port === 443) {
|
if (protocol === 'http' && port === 80 || protocol === 'https' && port === 443) {
|
||||||
return `${protocol}://${host}/`;
|
return `${protocol}://${host}${path}`;
|
||||||
}
|
}
|
||||||
return `${protocol}://${host}:${port}/`;
|
return `${protocol}://${host}:${port}${path}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -49,8 +49,15 @@ export interface IDatabaseCollections {
|
||||||
Credentials: Repository<ICredentialsDb> | null;
|
Credentials: Repository<ICredentialsDb> | null;
|
||||||
Execution: Repository<IExecutionFlattedDb> | null;
|
Execution: Repository<IExecutionFlattedDb> | null;
|
||||||
Workflow: Repository<IWorkflowDb> | null;
|
Workflow: Repository<IWorkflowDb> | null;
|
||||||
|
Webhook: Repository<IWebhookDb> | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface IWebhookDb {
|
||||||
|
workflowId: number | string | ObjectID;
|
||||||
|
webhookPath: string;
|
||||||
|
method: string;
|
||||||
|
node: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface IWorkflowBase extends IWorkflowBaseWorkflow {
|
export interface IWorkflowBase extends IWorkflowBaseWorkflow {
|
||||||
id?: number | string | ObjectID;
|
id?: number | string | ObjectID;
|
||||||
|
@ -279,17 +286,17 @@ export interface IN8nUISettings {
|
||||||
saveDataErrorExecution: string;
|
saveDataErrorExecution: string;
|
||||||
saveDataSuccessExecution: string;
|
saveDataSuccessExecution: string;
|
||||||
saveManualExecutions: boolean;
|
saveManualExecutions: boolean;
|
||||||
|
executionTimeout: number;
|
||||||
|
maxExecutionTimeout: number;
|
||||||
timezone: string;
|
timezone: string;
|
||||||
urlBaseWebhook: string;
|
urlBaseWebhook: string;
|
||||||
versionCli: string;
|
versionCli: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export interface IPackageVersions {
|
export interface IPackageVersions {
|
||||||
cli: string;
|
cli: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export interface IPushData {
|
export interface IPushData {
|
||||||
data: IPushDataExecutionFinished | IPushDataNodeExecuteAfter | IPushDataNodeExecuteBefore | IPushDataTestWebhook;
|
data: IPushDataExecutionFinished | IPushDataNodeExecuteAfter | IPushDataNodeExecuteBefore | IPushDataTestWebhook;
|
||||||
type: IPushDataType;
|
type: IPushDataType;
|
||||||
|
@ -297,7 +304,6 @@ export interface IPushData {
|
||||||
|
|
||||||
export type IPushDataType = 'executionFinished' | 'executionStarted' | 'nodeExecuteAfter' | 'nodeExecuteBefore' | 'testWebhookDeleted' | 'testWebhookReceived';
|
export type IPushDataType = 'executionFinished' | 'executionStarted' | 'nodeExecuteAfter' | 'nodeExecuteBefore' | 'testWebhookDeleted' | 'testWebhookReceived';
|
||||||
|
|
||||||
|
|
||||||
export interface IPushDataExecutionFinished {
|
export interface IPushDataExecutionFinished {
|
||||||
data: IRun;
|
data: IRun;
|
||||||
executionIdActive: string;
|
executionIdActive: string;
|
||||||
|
|
|
@ -29,6 +29,9 @@ class NodeTypesClass implements INodeTypes {
|
||||||
}
|
}
|
||||||
|
|
||||||
getByName(nodeType: string): INodeType | undefined {
|
getByName(nodeType: string): INodeType | undefined {
|
||||||
|
if (this.nodeTypes[nodeType] === undefined) {
|
||||||
|
throw new Error(`The node-type "${nodeType}" is not known!`);
|
||||||
|
}
|
||||||
return this.nodeTypes[nodeType].type;
|
return this.nodeTypes[nodeType].type;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@ import * as clientOAuth2 from 'client-oauth2';
|
||||||
import * as clientOAuth1 from 'oauth-1.0a';
|
import * as clientOAuth1 from 'oauth-1.0a';
|
||||||
import { RequestOptions } from 'oauth-1.0a';
|
import { RequestOptions } from 'oauth-1.0a';
|
||||||
import * as csrf from 'csrf';
|
import * as csrf from 'csrf';
|
||||||
import * as requestPromise from 'request-promise-native';
|
import * as requestPromise from 'request-promise-native';
|
||||||
import { createHmac } from 'crypto';
|
import { createHmac } from 'crypto';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
|
@ -58,6 +58,9 @@ import {
|
||||||
WorkflowExecuteAdditionalData,
|
WorkflowExecuteAdditionalData,
|
||||||
WorkflowRunner,
|
WorkflowRunner,
|
||||||
GenericHelpers,
|
GenericHelpers,
|
||||||
|
CredentialsOverwrites,
|
||||||
|
ICredentialsOverwrite,
|
||||||
|
LoadNodesAndCredentials,
|
||||||
} from './';
|
} from './';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
|
@ -105,10 +108,13 @@ class App {
|
||||||
testWebhooks: TestWebhooks.TestWebhooks;
|
testWebhooks: TestWebhooks.TestWebhooks;
|
||||||
endpointWebhook: string;
|
endpointWebhook: string;
|
||||||
endpointWebhookTest: string;
|
endpointWebhookTest: string;
|
||||||
|
endpointPresetCredentials: string;
|
||||||
externalHooks: IExternalHooksClass;
|
externalHooks: IExternalHooksClass;
|
||||||
saveDataErrorExecution: string;
|
saveDataErrorExecution: string;
|
||||||
saveDataSuccessExecution: string;
|
saveDataSuccessExecution: string;
|
||||||
saveManualExecutions: boolean;
|
saveManualExecutions: boolean;
|
||||||
|
executionTimeout: number;
|
||||||
|
maxExecutionTimeout: number;
|
||||||
timezone: string;
|
timezone: string;
|
||||||
activeExecutionsInstance: ActiveExecutions.ActiveExecutions;
|
activeExecutionsInstance: ActiveExecutions.ActiveExecutions;
|
||||||
push: Push.Push;
|
push: Push.Push;
|
||||||
|
@ -116,9 +122,11 @@ class App {
|
||||||
restEndpoint: string;
|
restEndpoint: string;
|
||||||
|
|
||||||
protocol: string;
|
protocol: string;
|
||||||
sslKey: string;
|
sslKey: string;
|
||||||
sslCert: string;
|
sslCert: string;
|
||||||
|
|
||||||
|
presetCredentialsLoaded: boolean;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.app = express();
|
this.app = express();
|
||||||
|
|
||||||
|
@ -127,6 +135,8 @@ class App {
|
||||||
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||||
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||||
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||||
|
this.executionTimeout = config.get('executions.timeout') as number;
|
||||||
|
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number;
|
||||||
this.timezone = config.get('generic.timezone') as string;
|
this.timezone = config.get('generic.timezone') as string;
|
||||||
this.restEndpoint = config.get('endpoints.rest') as string;
|
this.restEndpoint = config.get('endpoints.rest') as string;
|
||||||
|
|
||||||
|
@ -137,10 +147,13 @@ class App {
|
||||||
this.activeExecutionsInstance = ActiveExecutions.getInstance();
|
this.activeExecutionsInstance = ActiveExecutions.getInstance();
|
||||||
|
|
||||||
this.protocol = config.get('protocol');
|
this.protocol = config.get('protocol');
|
||||||
this.sslKey = config.get('ssl_key');
|
this.sslKey = config.get('ssl_key');
|
||||||
this.sslCert = config.get('ssl_cert');
|
this.sslCert = config.get('ssl_cert');
|
||||||
|
|
||||||
this.externalHooks = ExternalHooks();
|
this.externalHooks = ExternalHooks();
|
||||||
|
|
||||||
|
this.presetCredentialsLoaded = false;
|
||||||
|
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -195,7 +208,7 @@ class App {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for and validate JWT if configured
|
// Check for and validate JWT if configured
|
||||||
const jwtAuthActive = config.get('security.jwtAuth.active') as boolean;
|
const jwtAuthActive = config.get('security.jwtAuth.active') as boolean;
|
||||||
if (jwtAuthActive === true) {
|
if (jwtAuthActive === true) {
|
||||||
const jwtAuthHeader = await GenericHelpers.getConfigValue('security.jwtAuth.jwtHeader') as string;
|
const jwtAuthHeader = await GenericHelpers.getConfigValue('security.jwtAuth.jwtHeader') as string;
|
||||||
if (jwtAuthHeader === '') {
|
if (jwtAuthHeader === '') {
|
||||||
|
@ -273,7 +286,7 @@ class App {
|
||||||
normalize: true, // Trim whitespace inside text nodes
|
normalize: true, // Trim whitespace inside text nodes
|
||||||
normalizeTags: true, // Transform tags to lowercase
|
normalizeTags: true, // Transform tags to lowercase
|
||||||
explicitArray: false, // Only put properties in array if length > 1
|
explicitArray: false, // Only put properties in array if length > 1
|
||||||
} }));
|
} }));
|
||||||
|
|
||||||
this.app.use(bodyParser.text({
|
this.app.use(bodyParser.text({
|
||||||
limit: '16mb', verify: (req, res, buf) => {
|
limit: '16mb', verify: (req, res, buf) => {
|
||||||
|
@ -448,7 +461,9 @@ class App {
|
||||||
|
|
||||||
await this.externalHooks.run('workflow.update', [newWorkflowData]);
|
await this.externalHooks.run('workflow.update', [newWorkflowData]);
|
||||||
|
|
||||||
if (this.activeWorkflowRunner.isActive(id)) {
|
const isActive = await this.activeWorkflowRunner.isActive(id);
|
||||||
|
|
||||||
|
if (isActive) {
|
||||||
// When workflow gets saved always remove it as the triggers could have been
|
// When workflow gets saved always remove it as the triggers could have been
|
||||||
// changed and so the changes would not take effect
|
// changed and so the changes would not take effect
|
||||||
await this.activeWorkflowRunner.remove(id);
|
await this.activeWorkflowRunner.remove(id);
|
||||||
|
@ -471,9 +486,12 @@ class App {
|
||||||
// Do not save when default got set
|
// Do not save when default got set
|
||||||
delete newWorkflowData.settings.saveManualExecutions;
|
delete newWorkflowData.settings.saveManualExecutions;
|
||||||
}
|
}
|
||||||
|
if (parseInt(newWorkflowData.settings.executionTimeout as string, 10) === this.executionTimeout) {
|
||||||
|
// Do not save when default got set
|
||||||
|
delete newWorkflowData.settings.executionTimeout;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
newWorkflowData.updatedAt = this.getCurrentDate();
|
newWorkflowData.updatedAt = this.getCurrentDate();
|
||||||
|
|
||||||
await Db.collections.Workflow!.update(id, newWorkflowData);
|
await Db.collections.Workflow!.update(id, newWorkflowData);
|
||||||
|
@ -517,7 +535,9 @@ class App {
|
||||||
|
|
||||||
await this.externalHooks.run('workflow.delete', [id]);
|
await this.externalHooks.run('workflow.delete', [id]);
|
||||||
|
|
||||||
if (this.activeWorkflowRunner.isActive(id)) {
|
const isActive = await this.activeWorkflowRunner.isActive(id);
|
||||||
|
|
||||||
|
if (isActive) {
|
||||||
// Before deleting a workflow deactivate it
|
// Before deleting a workflow deactivate it
|
||||||
await this.activeWorkflowRunner.remove(id);
|
await this.activeWorkflowRunner.remove(id);
|
||||||
}
|
}
|
||||||
|
@ -657,7 +677,8 @@ class App {
|
||||||
|
|
||||||
// Returns the active workflow ids
|
// Returns the active workflow ids
|
||||||
this.app.get(`/${this.restEndpoint}/active`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<string[]> => {
|
this.app.get(`/${this.restEndpoint}/active`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<string[]> => {
|
||||||
return this.activeWorkflowRunner.getActiveWorkflows();
|
const activeWorkflows = await this.activeWorkflowRunner.getActiveWorkflows();
|
||||||
|
return activeWorkflows.map(workflow => workflow.id.toString()) as string[];
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|
||||||
|
@ -922,7 +943,8 @@ class App {
|
||||||
// Authorize OAuth Data
|
// Authorize OAuth Data
|
||||||
this.app.get(`/${this.restEndpoint}/oauth1-credential/auth`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<string> => {
|
this.app.get(`/${this.restEndpoint}/oauth1-credential/auth`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<string> => {
|
||||||
if (req.query.id === undefined) {
|
if (req.query.id === undefined) {
|
||||||
throw new Error('Required credential id is missing!');
|
res.status(500).send('Required credential id is missing!');
|
||||||
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await Db.collections.Credentials!.findOne(req.query.id as string);
|
const result = await Db.collections.Credentials!.findOne(req.query.id as string);
|
||||||
|
@ -934,7 +956,8 @@ class App {
|
||||||
let encryptionKey = undefined;
|
let encryptionKey = undefined;
|
||||||
encryptionKey = await UserSettings.getEncryptionKey();
|
encryptionKey = await UserSettings.getEncryptionKey();
|
||||||
if (encryptionKey === undefined) {
|
if (encryptionKey === undefined) {
|
||||||
throw new Error('No encryption key got found to decrypt the credentials!');
|
res.status(500).send('No encryption key got found to decrypt the credentials!');
|
||||||
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decrypt the currently saved credentials
|
// Decrypt the currently saved credentials
|
||||||
|
@ -965,7 +988,7 @@ class App {
|
||||||
|
|
||||||
const callback = `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth1-credential/callback?cid=${req.query.id}`;
|
const callback = `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth1-credential/callback?cid=${req.query.id}`;
|
||||||
|
|
||||||
const options: RequestOptions = {
|
const options: RequestOptions = {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
url: (_.get(oauthCredentials, 'requestTokenUrl') as string),
|
url: (_.get(oauthCredentials, 'requestTokenUrl') as string),
|
||||||
data: {
|
data: {
|
||||||
|
@ -1006,7 +1029,8 @@ class App {
|
||||||
const { oauth_verifier, oauth_token, cid } = req.query;
|
const { oauth_verifier, oauth_token, cid } = req.query;
|
||||||
|
|
||||||
if (oauth_verifier === undefined || oauth_token === undefined) {
|
if (oauth_verifier === undefined || oauth_token === undefined) {
|
||||||
throw new Error('Insufficient parameters for OAuth1 callback');
|
const errorResponse = new ResponseHelper.ResponseError('Insufficient parameters for OAuth1 callback. Received following query parameters: ' + JSON.stringify(req.query), undefined, 503);
|
||||||
|
return ResponseHelper.sendErrorResponse(res, errorResponse);
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await Db.collections.Credentials!.findOne(cid as any); // tslint:disable-line:no-any
|
const result = await Db.collections.Credentials!.findOne(cid as any); // tslint:disable-line:no-any
|
||||||
|
@ -1032,7 +1056,7 @@ class App {
|
||||||
const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, true);
|
const decryptedDataOriginal = credentialsHelper.getDecrypted(result.name, result.type, true);
|
||||||
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type);
|
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(decryptedDataOriginal, result.type);
|
||||||
|
|
||||||
const options: OptionsWithUrl = {
|
const options: OptionsWithUrl = {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
url: _.get(oauthCredentials, 'accessTokenUrl') as string,
|
url: _.get(oauthCredentials, 'accessTokenUrl') as string,
|
||||||
qs: {
|
qs: {
|
||||||
|
@ -1076,7 +1100,8 @@ class App {
|
||||||
// Authorize OAuth Data
|
// Authorize OAuth Data
|
||||||
this.app.get(`/${this.restEndpoint}/oauth2-credential/auth`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<string> => {
|
this.app.get(`/${this.restEndpoint}/oauth2-credential/auth`, ResponseHelper.send(async (req: express.Request, res: express.Response): Promise<string> => {
|
||||||
if (req.query.id === undefined) {
|
if (req.query.id === undefined) {
|
||||||
throw new Error('Required credential id is missing!');
|
res.status(500).send('Required credential id is missing.');
|
||||||
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await Db.collections.Credentials!.findOne(req.query.id as string);
|
const result = await Db.collections.Credentials!.findOne(req.query.id as string);
|
||||||
|
@ -1088,7 +1113,8 @@ class App {
|
||||||
let encryptionKey = undefined;
|
let encryptionKey = undefined;
|
||||||
encryptionKey = await UserSettings.getEncryptionKey();
|
encryptionKey = await UserSettings.getEncryptionKey();
|
||||||
if (encryptionKey === undefined) {
|
if (encryptionKey === undefined) {
|
||||||
throw new Error('No encryption key got found to decrypt the credentials!');
|
res.status(500).send('No encryption key got found to decrypt the credentials!');
|
||||||
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decrypt the currently saved credentials
|
// Decrypt the currently saved credentials
|
||||||
|
@ -1136,6 +1162,13 @@ class App {
|
||||||
const authQueryParameters = _.get(oauthCredentials, 'authQueryParameters', '') as string;
|
const authQueryParameters = _.get(oauthCredentials, 'authQueryParameters', '') as string;
|
||||||
let returnUri = oAuthObj.code.getUri();
|
let returnUri = oAuthObj.code.getUri();
|
||||||
|
|
||||||
|
// if scope uses comma, change it as the library always return then with spaces
|
||||||
|
if ((_.get(oauthCredentials, 'scope') as string).includes(',')) {
|
||||||
|
const data = querystring.parse(returnUri.split('?')[1] as string);
|
||||||
|
data.scope = _.get(oauthCredentials, 'scope') as string;
|
||||||
|
returnUri = `${_.get(oauthCredentials, 'authUrl', '')}?${querystring.stringify(data)}`;
|
||||||
|
}
|
||||||
|
|
||||||
if (authQueryParameters) {
|
if (authQueryParameters) {
|
||||||
returnUri += '&' + authQueryParameters;
|
returnUri += '&' + authQueryParameters;
|
||||||
}
|
}
|
||||||
|
@ -1152,7 +1185,8 @@ class App {
|
||||||
const {code, state: stateEncoded } = req.query;
|
const {code, state: stateEncoded } = req.query;
|
||||||
|
|
||||||
if (code === undefined || stateEncoded === undefined) {
|
if (code === undefined || stateEncoded === undefined) {
|
||||||
throw new Error('Insufficient parameters for OAuth2 callback');
|
const errorResponse = new ResponseHelper.ResponseError('Insufficient parameters for OAuth2 callback. Received following query parameters: ' + JSON.stringify(req.query), undefined, 503);
|
||||||
|
return ResponseHelper.sendErrorResponse(res, errorResponse);
|
||||||
}
|
}
|
||||||
|
|
||||||
let state;
|
let state;
|
||||||
|
@ -1202,17 +1236,20 @@ class App {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
const redirectUri = `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth2-credential/callback`;
|
||||||
|
|
||||||
const oAuthObj = new clientOAuth2({
|
const oAuthObj = new clientOAuth2({
|
||||||
clientId: _.get(oauthCredentials, 'clientId') as string,
|
clientId: _.get(oauthCredentials, 'clientId') as string,
|
||||||
clientSecret: _.get(oauthCredentials, 'clientSecret', '') as string,
|
clientSecret: _.get(oauthCredentials, 'clientSecret', '') as string,
|
||||||
accessTokenUri: _.get(oauthCredentials, 'accessTokenUrl', '') as string,
|
accessTokenUri: _.get(oauthCredentials, 'accessTokenUrl', '') as string,
|
||||||
authorizationUri: _.get(oauthCredentials, 'authUrl', '') as string,
|
authorizationUri: _.get(oauthCredentials, 'authUrl', '') as string,
|
||||||
redirectUri: `${WebhookHelpers.getWebhookBaseUrl()}${this.restEndpoint}/oauth2-credential/callback`,
|
redirectUri,
|
||||||
scopes: _.split(_.get(oauthCredentials, 'scope', 'openid,') as string, ',')
|
scopes: _.split(_.get(oauthCredentials, 'scope', 'openid,') as string, ',')
|
||||||
});
|
});
|
||||||
|
|
||||||
const oauthToken = await oAuthObj.code.getToken(req.originalUrl, options);
|
const queryParameters = req.originalUrl.split('?').splice(1, 1).join('');
|
||||||
|
|
||||||
|
const oauthToken = await oAuthObj.code.getToken(`${redirectUri}?${queryParameters}`, options);
|
||||||
|
|
||||||
if (oauthToken === undefined) {
|
if (oauthToken === undefined) {
|
||||||
const errorResponse = new ResponseHelper.ResponseError('Unable to get access tokens!', undefined, 404);
|
const errorResponse = new ResponseHelper.ResponseError('Unable to get access tokens!', undefined, 404);
|
||||||
|
@ -1300,7 +1337,7 @@ class App {
|
||||||
retrySuccessId: result.retrySuccessId ? result.retrySuccessId.toString() : undefined,
|
retrySuccessId: result.retrySuccessId ? result.retrySuccessId.toString() : undefined,
|
||||||
startedAt: result.startedAt,
|
startedAt: result.startedAt,
|
||||||
stoppedAt: result.stoppedAt,
|
stoppedAt: result.stoppedAt,
|
||||||
workflowId: result.workflowData!.id!.toString(),
|
workflowId: result.workflowData!.id ? result.workflowData!.id!.toString() : '',
|
||||||
workflowName: result.workflowData!.name,
|
workflowName: result.workflowData!.name,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1511,6 +1548,8 @@ class App {
|
||||||
saveDataErrorExecution: this.saveDataErrorExecution,
|
saveDataErrorExecution: this.saveDataErrorExecution,
|
||||||
saveDataSuccessExecution: this.saveDataSuccessExecution,
|
saveDataSuccessExecution: this.saveDataSuccessExecution,
|
||||||
saveManualExecutions: this.saveManualExecutions,
|
saveManualExecutions: this.saveManualExecutions,
|
||||||
|
executionTimeout: this.executionTimeout,
|
||||||
|
maxExecutionTimeout: this.maxExecutionTimeout,
|
||||||
timezone: this.timezone,
|
timezone: this.timezone,
|
||||||
urlBaseWebhook: WebhookHelpers.getWebhookBaseUrl(),
|
urlBaseWebhook: WebhookHelpers.getWebhookBaseUrl(),
|
||||||
versionCli: this.versions!.cli,
|
versionCli: this.versions!.cli,
|
||||||
|
@ -1544,6 +1583,26 @@ class App {
|
||||||
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
|
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// OPTIONS webhook requests
|
||||||
|
this.app.options(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
|
||||||
|
// Cut away the "/webhook/" to get the registred part of the url
|
||||||
|
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhook.length + 2);
|
||||||
|
|
||||||
|
let allowedMethods: string[];
|
||||||
|
try {
|
||||||
|
allowedMethods = await this.activeWorkflowRunner.getWebhookMethods(requestUrl);
|
||||||
|
allowedMethods.push('OPTIONS');
|
||||||
|
|
||||||
|
// Add custom "Allow" header to satisfy OPTIONS response.
|
||||||
|
res.append('Allow', allowedMethods);
|
||||||
|
} catch (error) {
|
||||||
|
ResponseHelper.sendErrorResponse(res, error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ResponseHelper.sendSuccessResponse(res, {}, true, 204);
|
||||||
|
});
|
||||||
|
|
||||||
// GET webhook requests
|
// GET webhook requests
|
||||||
this.app.get(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
|
this.app.get(`/${this.endpointWebhook}/*`, async (req: express.Request, res: express.Response) => {
|
||||||
// Cut away the "/webhook/" to get the registred part of the url
|
// Cut away the "/webhook/" to get the registred part of the url
|
||||||
|
@ -1607,6 +1666,26 @@ class App {
|
||||||
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
|
ResponseHelper.sendSuccessResponse(res, response.data, true, response.responseCode);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// HEAD webhook requests (test for UI)
|
||||||
|
this.app.options(`/${this.endpointWebhookTest}/*`, async (req: express.Request, res: express.Response) => {
|
||||||
|
// Cut away the "/webhook-test/" to get the registred part of the url
|
||||||
|
const requestUrl = (req as ICustomRequest).parsedUrl!.pathname!.slice(this.endpointWebhookTest.length + 2);
|
||||||
|
|
||||||
|
let allowedMethods: string[];
|
||||||
|
try {
|
||||||
|
allowedMethods = await this.testWebhooks.getWebhookMethods(requestUrl);
|
||||||
|
allowedMethods.push('OPTIONS');
|
||||||
|
|
||||||
|
// Add custom "Allow" header to satisfy OPTIONS response.
|
||||||
|
res.append('Allow', allowedMethods);
|
||||||
|
} catch (error) {
|
||||||
|
ResponseHelper.sendErrorResponse(res, error);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
ResponseHelper.sendSuccessResponse(res, {}, true, 204);
|
||||||
|
});
|
||||||
|
|
||||||
// GET webhook requests (test for UI)
|
// GET webhook requests (test for UI)
|
||||||
this.app.get(`/${this.endpointWebhookTest}/*`, async (req: express.Request, res: express.Response) => {
|
this.app.get(`/${this.endpointWebhookTest}/*`, async (req: express.Request, res: express.Response) => {
|
||||||
// Cut away the "/webhook-test/" to get the registred part of the url
|
// Cut away the "/webhook-test/" to get the registred part of the url
|
||||||
|
@ -1650,9 +1729,57 @@ class App {
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
if (this.endpointPresetCredentials !== '') {
|
||||||
|
|
||||||
|
// POST endpoint to set preset credentials
|
||||||
|
this.app.post(`/${this.endpointPresetCredentials}`, async (req: express.Request, res: express.Response) => {
|
||||||
|
|
||||||
|
if (this.presetCredentialsLoaded === false) {
|
||||||
|
|
||||||
|
const body = req.body as ICredentialsOverwrite;
|
||||||
|
|
||||||
|
if (req.headers['content-type'] !== 'application/json') {
|
||||||
|
ResponseHelper.sendErrorResponse(res, new Error('Body must be a valid JSON, make sure the content-type is application/json'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const loadNodesAndCredentials = LoadNodesAndCredentials();
|
||||||
|
|
||||||
|
const credentialsOverwrites = CredentialsOverwrites();
|
||||||
|
|
||||||
|
await credentialsOverwrites.init(body);
|
||||||
|
|
||||||
|
const credentialTypes = CredentialTypes();
|
||||||
|
|
||||||
|
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
|
||||||
|
|
||||||
|
this.presetCredentialsLoaded = true;
|
||||||
|
|
||||||
|
ResponseHelper.sendSuccessResponse(res, { success: true }, true, 200);
|
||||||
|
|
||||||
|
} else {
|
||||||
|
ResponseHelper.sendErrorResponse(res, new Error('Preset credentials can be set once'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// Read the index file and replace the path placeholder
|
||||||
|
const editorUiPath = require.resolve('n8n-editor-ui');
|
||||||
|
const filePath = pathJoin(pathDirname(editorUiPath), 'dist', 'index.html');
|
||||||
|
const n8nPath = config.get('path');
|
||||||
|
|
||||||
|
let readIndexFile = readFileSync(filePath, 'utf8');
|
||||||
|
readIndexFile = readIndexFile.replace(/\/%BASE_PATH%\//g, n8nPath);
|
||||||
|
readIndexFile = readIndexFile.replace(/\/favicon.ico/g, `${n8nPath}/favicon.ico`);
|
||||||
|
|
||||||
|
// Serve the altered index.html file separately
|
||||||
|
this.app.get(`/index.html`, async (req: express.Request, res: express.Response) => {
|
||||||
|
res.send(readIndexFile);
|
||||||
|
});
|
||||||
|
|
||||||
// Serve the website
|
// Serve the website
|
||||||
const startTime = (new Date()).toUTCString();
|
const startTime = (new Date()).toUTCString();
|
||||||
const editorUiPath = require.resolve('n8n-editor-ui');
|
|
||||||
this.app.use('/', express.static(pathJoin(pathDirname(editorUiPath), 'dist'), {
|
this.app.use('/', express.static(pathJoin(pathDirname(editorUiPath), 'dist'), {
|
||||||
index: 'index.html',
|
index: 'index.html',
|
||||||
setHeaders: (res, path) => {
|
setHeaders: (res, path) => {
|
||||||
|
|
|
@ -110,6 +110,21 @@ export class TestWebhooks {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all request methods associated with a single test webhook
|
||||||
|
* @param path webhook path
|
||||||
|
*/
|
||||||
|
async getWebhookMethods(path : string) : Promise<string[]> {
|
||||||
|
const webhookMethods: string[] = this.activeWebhooks!.getWebhookMethods(path);
|
||||||
|
|
||||||
|
if (webhookMethods === undefined) {
|
||||||
|
// The requested webhook is not registered
|
||||||
|
throw new ResponseHelper.ResponseError(`The requested webhook "${path}" is not registered.`, 404, 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return webhookMethods;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if it has to wait for webhook data to execute the workflow. If yes it waits
|
* Checks if it has to wait for webhook data to execute the workflow. If yes it waits
|
||||||
|
@ -141,12 +156,14 @@ export class TestWebhooks {
|
||||||
let key: string;
|
let key: string;
|
||||||
for (const webhookData of webhooks) {
|
for (const webhookData of webhooks) {
|
||||||
key = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path);
|
key = this.activeWebhooks!.getWebhookKey(webhookData.httpMethod, webhookData.path);
|
||||||
|
|
||||||
|
await this.activeWebhooks!.add(workflow, webhookData, mode);
|
||||||
|
|
||||||
this.testWebhookData[key] = {
|
this.testWebhookData[key] = {
|
||||||
sessionId,
|
sessionId,
|
||||||
timeout,
|
timeout,
|
||||||
workflowData,
|
workflowData,
|
||||||
};
|
};
|
||||||
await this.activeWebhooks!.add(workflow, webhookData, mode);
|
|
||||||
|
|
||||||
// Save static data!
|
// Save static data!
|
||||||
this.testWebhookData[key].workflowData.staticData = workflow.staticData;
|
this.testWebhookData[key].workflowData.staticData = workflow.staticData;
|
||||||
|
|
|
@ -69,6 +69,26 @@ export function getWorkflowWebhooks(workflow: Workflow, additionalData: IWorkflo
|
||||||
return returnData;
|
return returnData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns all the webhooks which should be created for the give workflow
|
||||||
|
*
|
||||||
|
* @export
|
||||||
|
* @param {string} workflowId
|
||||||
|
* @param {Workflow} workflow
|
||||||
|
* @returns {IWebhookData[]}
|
||||||
|
*/
|
||||||
|
export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
|
||||||
|
// Check all the nodes in the workflow if they have webhooks
|
||||||
|
|
||||||
|
const returnData: IWebhookData[] = [];
|
||||||
|
|
||||||
|
for (const node of Object.values(workflow.nodes)) {
|
||||||
|
returnData.push.apply(returnData, NodeHelpers.getNodeWebhooksBasic(workflow, node));
|
||||||
|
}
|
||||||
|
|
||||||
|
return returnData;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a webhook
|
* Executes a webhook
|
||||||
|
|
|
@ -41,6 +41,8 @@ import {
|
||||||
|
|
||||||
import * as config from '../config';
|
import * as config from '../config';
|
||||||
|
|
||||||
|
import { LessThanOrEqual } from "typeorm";
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if there was an error and if errorWorkflow is defined. If so it collects
|
* Checks if there was an error and if errorWorkflow is defined. If so it collects
|
||||||
|
@ -79,6 +81,30 @@ function executeErrorWorkflow(workflowData: IWorkflowBase, fullRunData: IRun, mo
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prunes Saved Execution which are older than configured.
|
||||||
|
* Throttled to be executed just once in configured timeframe.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
let throttling = false;
|
||||||
|
function pruneExecutionData(): void {
|
||||||
|
if (!throttling) {
|
||||||
|
throttling = true;
|
||||||
|
const timeout = config.get('executions.pruneDataTimeout') as number; // in seconds
|
||||||
|
const maxAge = config.get('executions.pruneDataMaxAge') as number; // in h
|
||||||
|
const date = new Date(); // today
|
||||||
|
date.setHours(date.getHours() - maxAge);
|
||||||
|
|
||||||
|
// throttle just on success to allow for self healing on failure
|
||||||
|
Db.collections.Execution!.delete({ stoppedAt: LessThanOrEqual(date.toISOString()) })
|
||||||
|
.then(data =>
|
||||||
|
setTimeout(() => {
|
||||||
|
throttling = false;
|
||||||
|
}, timeout * 1000)
|
||||||
|
).catch(err => throttling = false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pushes the execution out to all connected clients
|
* Pushes the execution out to all connected clients
|
||||||
|
@ -189,6 +215,11 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
||||||
workflowExecuteAfter: [
|
workflowExecuteAfter: [
|
||||||
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
|
async function (this: WorkflowHooks, fullRunData: IRun, newStaticData: IDataObject): Promise<void> {
|
||||||
|
|
||||||
|
// Prune old execution data
|
||||||
|
if (config.get('executions.pruneData')) {
|
||||||
|
pruneExecutionData();
|
||||||
|
}
|
||||||
|
|
||||||
const isManualMode = [this.mode, parentProcessMode].includes('manual');
|
const isManualMode = [this.mode, parentProcessMode].includes('manual');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -90,7 +90,6 @@ export class WorkflowRunner {
|
||||||
WorkflowExecuteAdditionalData.pushExecutionFinished(executionMode, fullRunData, executionId);
|
WorkflowExecuteAdditionalData.pushExecutionFinished(executionMode, fullRunData, executionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run the workflow
|
* Run the workflow
|
||||||
*
|
*
|
||||||
|
@ -155,7 +154,25 @@ export class WorkflowRunner {
|
||||||
|
|
||||||
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
|
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
|
||||||
|
|
||||||
|
// Soft timeout to stop workflow execution after current running node
|
||||||
|
let executionTimeout: NodeJS.Timeout;
|
||||||
|
let workflowTimeout = config.get('executions.timeout') as number > 0 && config.get('executions.timeout') as number; // initialize with default
|
||||||
|
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
||||||
|
workflowTimeout = data.workflowData.settings!.executionTimeout as number > 0 && data.workflowData.settings!.executionTimeout as number; // preference on workflow setting
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowTimeout) {
|
||||||
|
const timeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
||||||
|
executionTimeout = setTimeout(() => {
|
||||||
|
this.activeExecutions.stopExecution(executionId, 'timeout');
|
||||||
|
}, timeout);
|
||||||
|
}
|
||||||
|
|
||||||
workflowExecution.then((fullRunData) => {
|
workflowExecution.then((fullRunData) => {
|
||||||
|
clearTimeout(executionTimeout);
|
||||||
|
if (workflowExecution.isCanceled) {
|
||||||
|
fullRunData.finished = false;
|
||||||
|
}
|
||||||
this.activeExecutions.remove(executionId, fullRunData);
|
this.activeExecutions.remove(executionId, fullRunData);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -218,24 +235,54 @@ export class WorkflowRunner {
|
||||||
// Send all data to subprocess it needs to run the workflow
|
// Send all data to subprocess it needs to run the workflow
|
||||||
subprocess.send({ type: 'startWorkflow', data } as IProcessMessage);
|
subprocess.send({ type: 'startWorkflow', data } as IProcessMessage);
|
||||||
|
|
||||||
|
// Start timeout for the execution
|
||||||
|
let executionTimeout: NodeJS.Timeout;
|
||||||
|
let workflowTimeout = config.get('executions.timeout') as number > 0 && config.get('executions.timeout') as number; // initialize with default
|
||||||
|
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
||||||
|
workflowTimeout = data.workflowData.settings!.executionTimeout as number > 0 && data.workflowData.settings!.executionTimeout as number; // preference on workflow setting
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowTimeout) {
|
||||||
|
const timeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
||||||
|
executionTimeout = setTimeout(() => {
|
||||||
|
this.activeExecutions.stopExecution(executionId, 'timeout');
|
||||||
|
|
||||||
|
executionTimeout = setTimeout(() => subprocess.kill(), Math.max(timeout * 0.2, 5000)); // minimum 5 seconds
|
||||||
|
}, timeout);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Listen to data from the subprocess
|
// Listen to data from the subprocess
|
||||||
subprocess.on('message', (message: IProcessMessage) => {
|
subprocess.on('message', (message: IProcessMessage) => {
|
||||||
if (message.type === 'end') {
|
if (message.type === 'end') {
|
||||||
|
clearTimeout(executionTimeout);
|
||||||
this.activeExecutions.remove(executionId!, message.data.runData);
|
this.activeExecutions.remove(executionId!, message.data.runData);
|
||||||
|
|
||||||
} else if (message.type === 'processError') {
|
} else if (message.type === 'processError') {
|
||||||
|
clearTimeout(executionTimeout);
|
||||||
const executionError = message.data.executionError as IExecutionError;
|
const executionError = message.data.executionError as IExecutionError;
|
||||||
|
|
||||||
this.processError(executionError, startedAt, data.executionMode, executionId);
|
this.processError(executionError, startedAt, data.executionMode, executionId);
|
||||||
|
|
||||||
} else if (message.type === 'processHook') {
|
} else if (message.type === 'processHook') {
|
||||||
this.processHookMessage(workflowHooks, message.data as IProcessMessageDataHook);
|
this.processHookMessage(workflowHooks, message.data as IProcessMessageDataHook);
|
||||||
|
} else if (message.type === 'timeout') {
|
||||||
|
// Execution timed out and its process has been terminated
|
||||||
|
const timeoutError = { message: 'Workflow execution timed out!' } as IExecutionError;
|
||||||
|
|
||||||
|
this.processError(timeoutError, startedAt, data.executionMode, executionId);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
// Also get informed when the processes does exit especially when it did crash
|
// Also get informed when the processes does exit especially when it did crash or timed out
|
||||||
subprocess.on('exit', (code, signal) => {
|
subprocess.on('exit', (code, signal) => {
|
||||||
if (code !== 0) {
|
if (signal === 'SIGTERM'){
|
||||||
|
// Execution timed out and its process has been terminated
|
||||||
|
const timeoutError = {
|
||||||
|
message: 'Workflow execution timed out!',
|
||||||
|
} as IExecutionError;
|
||||||
|
|
||||||
|
this.processError(timeoutError, startedAt, data.executionMode, executionId);
|
||||||
|
} else if (code !== 0) {
|
||||||
// Process did exit with error code, so something went wrong.
|
// Process did exit with error code, so something went wrong.
|
||||||
const executionError = {
|
const executionError = {
|
||||||
message: 'Workflow execution process did crash for an unknown reason!',
|
message: 'Workflow execution process did crash for an unknown reason!',
|
||||||
|
@ -243,6 +290,7 @@ export class WorkflowRunner {
|
||||||
|
|
||||||
this.processError(executionError, startedAt, data.executionMode, executionId);
|
this.processError(executionError, startedAt, data.executionMode, executionId);
|
||||||
}
|
}
|
||||||
|
clearTimeout(executionTimeout);
|
||||||
});
|
});
|
||||||
|
|
||||||
return executionId;
|
return executionId;
|
||||||
|
|
|
@ -190,17 +190,18 @@ process.on('message', async (message: IProcessMessage) => {
|
||||||
|
|
||||||
// Once the workflow got executed make sure the process gets killed again
|
// Once the workflow got executed make sure the process gets killed again
|
||||||
process.exit();
|
process.exit();
|
||||||
} else if (message.type === 'stopExecution') {
|
} else if (message.type === 'stopExecution' || message.type === 'timeout') {
|
||||||
// The workflow execution should be stopped
|
// The workflow execution should be stopped
|
||||||
let runData: IRun;
|
let runData: IRun;
|
||||||
|
|
||||||
if (workflowRunner.workflowExecute !== undefined) {
|
if (workflowRunner.workflowExecute !== undefined) {
|
||||||
// Workflow started already executing
|
// Workflow started already executing
|
||||||
|
|
||||||
runData = workflowRunner.workflowExecute.getFullRunData(workflowRunner.startedAt);
|
runData = workflowRunner.workflowExecute.getFullRunData(workflowRunner.startedAt);
|
||||||
|
|
||||||
// If there is any data send it to parent process
|
const timeOutError = message.type === 'timeout' ? { message: 'Workflow execution timed out!' } as IExecutionError : undefined;
|
||||||
await workflowRunner.workflowExecute.processSuccessExecution(workflowRunner.startedAt, workflowRunner.workflow!);
|
|
||||||
|
// If there is any data send it to parent process, if execution timedout add the error
|
||||||
|
await workflowRunner.workflowExecute.processSuccessExecution(workflowRunner.startedAt, workflowRunner.workflow!, timeOutError);
|
||||||
} else {
|
} else {
|
||||||
// Workflow did not get started yet
|
// Workflow did not get started yet
|
||||||
runData = {
|
runData = {
|
||||||
|
@ -209,7 +210,7 @@ process.on('message', async (message: IProcessMessage) => {
|
||||||
runData: {},
|
runData: {},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
finished: true,
|
finished: message.type !== 'timeout',
|
||||||
mode: workflowRunner.data!.executionMode,
|
mode: workflowRunner.data!.executionMode,
|
||||||
startedAt: workflowRunner.startedAt,
|
startedAt: workflowRunner.startedAt,
|
||||||
stoppedAt: new Date(),
|
stoppedAt: new Date(),
|
||||||
|
@ -218,7 +219,7 @@ process.on('message', async (message: IProcessMessage) => {
|
||||||
workflowRunner.sendHookToParentProcess('workflowExecuteAfter', [runData]);
|
workflowRunner.sendHookToParentProcess('workflowExecuteAfter', [runData]);
|
||||||
}
|
}
|
||||||
|
|
||||||
await sendToParentProcess('end', {
|
await sendToParentProcess(message.type === 'timeout' ? message.type : 'end', {
|
||||||
runData,
|
runData,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
|
||||||
@Column('Date')
|
@Column('Date')
|
||||||
startedAt: Date;
|
startedAt: Date;
|
||||||
|
|
||||||
|
@Index()
|
||||||
@Column('Date')
|
@Column('Date')
|
||||||
stoppedAt: Date;
|
stoppedAt: Date;
|
||||||
|
|
||||||
|
|
30
packages/cli/src/databases/mongodb/WebhookEntity.ts
Normal file
30
packages/cli/src/databases/mongodb/WebhookEntity.ts
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
import {
|
||||||
|
Column,
|
||||||
|
Entity,
|
||||||
|
Index,
|
||||||
|
ObjectID,
|
||||||
|
ObjectIdColumn,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../Interfaces';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class WebhookEntity implements IWebhookDb {
|
||||||
|
|
||||||
|
@ObjectIdColumn()
|
||||||
|
id: ObjectID;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
workflowId: number;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
webhookPath: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
method: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
node: string;
|
||||||
|
}
|
|
@ -1,3 +1,5 @@
|
||||||
export * from './CredentialsEntity';
|
export * from './CredentialsEntity';
|
||||||
export * from './ExecutionEntity';
|
export * from './ExecutionEntity';
|
||||||
export * from './WorkflowEntity';
|
export * from './WorkflowEntity';
|
||||||
|
export * from './WebhookEntity';
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
import { MigrationInterface } from "typeorm";
|
||||||
|
import {
|
||||||
|
MongoQueryRunner,
|
||||||
|
} from 'typeorm/driver/mongodb/MongoQueryRunner';
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
export class CreateIndexStoppedAt1594910478695 implements MigrationInterface {
|
||||||
|
name = 'CreateIndexStoppedAt1594910478695';
|
||||||
|
|
||||||
|
async up(queryRunner: MongoQueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
await queryRunner.manager.createCollectionIndex(`${tablePrefix}execution_entity`, 'stoppedAt', { name: `IDX_${tablePrefix}execution_entity_stoppedAt` });
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: MongoQueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
await queryRunner.manager.dropCollectionIndex
|
||||||
|
(`${tablePrefix}execution_entity`, `IDX_${tablePrefix}execution_entity_stoppedAt`);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,57 @@
|
||||||
|
import {
|
||||||
|
MigrationInterface,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWorkflowDb,
|
||||||
|
NodeTypes,
|
||||||
|
WebhookHelpers,
|
||||||
|
} from '../../..';
|
||||||
|
|
||||||
|
import {
|
||||||
|
Workflow,
|
||||||
|
} from 'n8n-workflow/dist/src/Workflow';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../../Interfaces';
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
import {
|
||||||
|
MongoQueryRunner,
|
||||||
|
} from 'typeorm/driver/mongodb/MongoQueryRunner';
|
||||||
|
|
||||||
|
export class WebhookModel1592679094242 implements MigrationInterface {
|
||||||
|
name = 'WebhookModel1592679094242';
|
||||||
|
|
||||||
|
async up(queryRunner: MongoQueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
const workflows = await queryRunner.cursor( `${tablePrefix}workflow_entity`, { active: true }).toArray() as IWorkflowDb[];
|
||||||
|
const data: IWebhookDb[] = [];
|
||||||
|
const nodeTypes = NodeTypes();
|
||||||
|
for (const workflow of workflows) {
|
||||||
|
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
|
||||||
|
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
|
||||||
|
for (const webhook of webhooks) {
|
||||||
|
data.push({
|
||||||
|
workflowId: workflowInstance.id as string,
|
||||||
|
webhookPath: webhook.path,
|
||||||
|
method: webhook.httpMethod,
|
||||||
|
node: webhook.node,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.length !== 0) {
|
||||||
|
await queryRunner.manager.insertMany(`${tablePrefix}webhook_entity`, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryRunner.manager.createCollectionIndex(`${tablePrefix}webhook_entity`, ['webhookPath', 'method'], { unique: true, background: false });
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: MongoQueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
await queryRunner.dropTable(`${tablePrefix}webhook_entity`);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1 +1,3 @@
|
||||||
export * from './1587563438936-InitialMigration';
|
export * from './1587563438936-InitialMigration';
|
||||||
|
export * from './1592679094242-WebhookModel';
|
||||||
|
export * from './151594910478695-CreateIndexStoppedAt';
|
||||||
|
|
|
@ -39,6 +39,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
|
||||||
@Column('datetime')
|
@Column('datetime')
|
||||||
startedAt: Date;
|
startedAt: Date;
|
||||||
|
|
||||||
|
@Index()
|
||||||
@Column('datetime')
|
@Column('datetime')
|
||||||
stoppedAt: Date;
|
stoppedAt: Date;
|
||||||
|
|
||||||
|
|
25
packages/cli/src/databases/mysqldb/WebhookEntity.ts
Normal file
25
packages/cli/src/databases/mysqldb/WebhookEntity.ts
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import {
|
||||||
|
Column,
|
||||||
|
Entity,
|
||||||
|
PrimaryColumn,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../Interfaces';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class WebhookEntity implements IWebhookDb {
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
workflowId: number;
|
||||||
|
|
||||||
|
@PrimaryColumn()
|
||||||
|
webhookPath: string;
|
||||||
|
|
||||||
|
@PrimaryColumn()
|
||||||
|
method: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
node: string;
|
||||||
|
}
|
|
@ -1,3 +1,4 @@
|
||||||
export * from './CredentialsEntity';
|
export * from './CredentialsEntity';
|
||||||
export * from './ExecutionEntity';
|
export * from './ExecutionEntity';
|
||||||
export * from './WorkflowEntity';
|
export * from './WorkflowEntity';
|
||||||
|
export * from './WebhookEntity';
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
import {
|
||||||
|
MigrationInterface,
|
||||||
|
QueryRunner,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWorkflowDb,
|
||||||
|
NodeTypes,
|
||||||
|
WebhookHelpers,
|
||||||
|
} from '../../..';
|
||||||
|
|
||||||
|
import {
|
||||||
|
Workflow,
|
||||||
|
} from 'n8n-workflow';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../../Interfaces';
|
||||||
|
|
||||||
|
export class WebhookModel1592447867632 implements MigrationInterface {
|
||||||
|
name = 'WebhookModel1592447867632';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`);
|
||||||
|
|
||||||
|
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
|
||||||
|
const data: IWebhookDb[] = [];
|
||||||
|
const nodeTypes = NodeTypes();
|
||||||
|
for (const workflow of workflows) {
|
||||||
|
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
|
||||||
|
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
|
||||||
|
for (const webhook of webhooks) {
|
||||||
|
data.push({
|
||||||
|
workflowId: workflowInstance.id as string,
|
||||||
|
webhookPath: webhook.path,
|
||||||
|
method: webhook.httpMethod,
|
||||||
|
node: webhook.node,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.length !== 0) {
|
||||||
|
await queryRunner.manager.createQueryBuilder()
|
||||||
|
.insert()
|
||||||
|
.into(`${tablePrefix}webhook_entity`)
|
||||||
|
.values(data)
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
export class CreateIndexStoppedAt1594902918301 implements MigrationInterface {
|
||||||
|
name = 'CreateIndexStoppedAt1594902918301';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity` (`stoppedAt`)');
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity`');
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1 +1,3 @@
|
||||||
export * from './1588157391238-InitialMigration';
|
export * from './1588157391238-InitialMigration';
|
||||||
|
export * from './1592447867632-WebhookModel';
|
||||||
|
export * from './1594902918301-CreateIndexStoppedAt';
|
||||||
|
|
|
@ -39,6 +39,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
|
||||||
@Column('timestamp')
|
@Column('timestamp')
|
||||||
startedAt: Date;
|
startedAt: Date;
|
||||||
|
|
||||||
|
@Index()
|
||||||
@Column('timestamp')
|
@Column('timestamp')
|
||||||
stoppedAt: Date;
|
stoppedAt: Date;
|
||||||
|
|
||||||
|
|
25
packages/cli/src/databases/postgresdb/WebhookEntity.ts
Normal file
25
packages/cli/src/databases/postgresdb/WebhookEntity.ts
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import {
|
||||||
|
Column,
|
||||||
|
Entity,
|
||||||
|
PrimaryColumn,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class WebhookEntity implements IWebhookDb {
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
workflowId: number;
|
||||||
|
|
||||||
|
@PrimaryColumn()
|
||||||
|
webhookPath: string;
|
||||||
|
|
||||||
|
@PrimaryColumn()
|
||||||
|
method: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
node: string;
|
||||||
|
}
|
|
@ -1,3 +1,5 @@
|
||||||
export * from './CredentialsEntity';
|
export * from './CredentialsEntity';
|
||||||
export * from './ExecutionEntity';
|
export * from './ExecutionEntity';
|
||||||
export * from './WorkflowEntity';
|
export * from './WorkflowEntity';
|
||||||
|
export * from './WebhookEntity';
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
import {
|
||||||
|
MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
import * as config from '../../../../config';
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
import {
|
||||||
|
MigrationInterface,
|
||||||
|
QueryRunner,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWorkflowDb,
|
||||||
|
NodeTypes,
|
||||||
|
WebhookHelpers,
|
||||||
|
} from '../../..';
|
||||||
|
|
||||||
|
import {
|
||||||
|
Workflow,
|
||||||
|
} from 'n8n-workflow';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../../Interfaces';
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
export class WebhookModel1589476000887 implements MigrationInterface {
|
||||||
|
name = 'WebhookModel1589476000887';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
let tablePrefix = config.get('database.tablePrefix');
|
||||||
|
const tablePrefixIndex = tablePrefix;
|
||||||
|
const schema = config.get('database.postgresdb.schema');
|
||||||
|
if (schema) {
|
||||||
|
tablePrefix = schema + '.' + tablePrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryRunner.query(`CREATE TABLE ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`, undefined);
|
||||||
|
|
||||||
|
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
|
||||||
|
const data: IWebhookDb[] = [];
|
||||||
|
const nodeTypes = NodeTypes();
|
||||||
|
for (const workflow of workflows) {
|
||||||
|
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
|
||||||
|
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
|
||||||
|
for (const webhook of webhooks) {
|
||||||
|
data.push({
|
||||||
|
workflowId: workflowInstance.id as string,
|
||||||
|
webhookPath: webhook.path,
|
||||||
|
method: webhook.httpMethod,
|
||||||
|
node: webhook.node,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.length !== 0) {
|
||||||
|
await queryRunner.manager.createQueryBuilder()
|
||||||
|
.insert()
|
||||||
|
.into(`${tablePrefix}webhook_entity`)
|
||||||
|
.values(data)
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
let tablePrefix = config.get('database.tablePrefix');
|
||||||
|
const schema = config.get('database.postgresdb.schema');
|
||||||
|
if (schema) {
|
||||||
|
tablePrefix = schema + '.' + tablePrefix;
|
||||||
|
}
|
||||||
|
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`, undefined);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
|
||||||
|
name = 'CreateIndexStoppedAt1594828256133';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
let tablePrefix = config.get('database.tablePrefix');
|
||||||
|
const tablePrefixPure = tablePrefix;
|
||||||
|
const schema = config.get('database.postgresdb.schema');
|
||||||
|
if (schema) {
|
||||||
|
tablePrefix = schema + '.' + tablePrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}33228da131bb1112247cf52a42 ON ${tablePrefix}execution_entity ("stoppedAt") `);
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}33228da131bb1112247cf52a42`);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1 +1,4 @@
|
||||||
export * from './1587669153312-InitialMigration';
|
export * from './1587669153312-InitialMigration';
|
||||||
|
export * from './1589476000887-WebhookModel';
|
||||||
|
export * from './1594828256133-CreateIndexStoppedAt';
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ export class ExecutionEntity implements IExecutionFlattedDb {
|
||||||
@Column()
|
@Column()
|
||||||
startedAt: Date;
|
startedAt: Date;
|
||||||
|
|
||||||
|
@Index()
|
||||||
@Column()
|
@Column()
|
||||||
stoppedAt: Date;
|
stoppedAt: Date;
|
||||||
|
|
||||||
|
|
25
packages/cli/src/databases/sqlite/WebhookEntity.ts
Normal file
25
packages/cli/src/databases/sqlite/WebhookEntity.ts
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import {
|
||||||
|
Column,
|
||||||
|
Entity,
|
||||||
|
PrimaryColumn,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../Interfaces';
|
||||||
|
|
||||||
|
@Entity()
|
||||||
|
export class WebhookEntity implements IWebhookDb {
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
workflowId: number;
|
||||||
|
|
||||||
|
@PrimaryColumn()
|
||||||
|
webhookPath: string;
|
||||||
|
|
||||||
|
@PrimaryColumn()
|
||||||
|
method: string;
|
||||||
|
|
||||||
|
@Column()
|
||||||
|
node: string;
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
export * from './CredentialsEntity';
|
export * from './CredentialsEntity';
|
||||||
export * from './ExecutionEntity';
|
export * from './ExecutionEntity';
|
||||||
export * from './WorkflowEntity';
|
export * from './WorkflowEntity';
|
||||||
|
export * from './WebhookEntity';
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
import { MigrationInterface, QueryRunner } from "typeorm";
|
import {
|
||||||
|
MigrationInterface,
|
||||||
|
QueryRunner,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
import * as config from '../../../../config';
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
import {
|
||||||
|
MigrationInterface,
|
||||||
|
QueryRunner,
|
||||||
|
} from 'typeorm';
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWorkflowDb,
|
||||||
|
NodeTypes,
|
||||||
|
WebhookHelpers,
|
||||||
|
} from '../../..';
|
||||||
|
|
||||||
|
import {
|
||||||
|
Workflow,
|
||||||
|
} from 'n8n-workflow';
|
||||||
|
|
||||||
|
import {
|
||||||
|
IWebhookDb,
|
||||||
|
} from '../../../Interfaces';
|
||||||
|
|
||||||
|
export class WebhookModel1592445003908 implements MigrationInterface {
|
||||||
|
name = 'WebhookModel1592445003908';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`);
|
||||||
|
|
||||||
|
const workflows = await queryRunner.query(`SELECT * FROM ${tablePrefix}workflow_entity WHERE active=true`) as IWorkflowDb[];
|
||||||
|
const data: IWebhookDb[] = [];
|
||||||
|
const nodeTypes = NodeTypes();
|
||||||
|
for (const workflow of workflows) {
|
||||||
|
workflow.nodes = JSON.parse(workflow.nodes as unknown as string);
|
||||||
|
workflow.connections = JSON.parse(workflow.connections as unknown as string);
|
||||||
|
workflow.staticData = JSON.parse(workflow.staticData as unknown as string);
|
||||||
|
workflow.settings = JSON.parse(workflow.settings as unknown as string);
|
||||||
|
const workflowInstance = new Workflow({ id: workflow.id as string, name: workflow.name, nodes: workflow.nodes, connections: workflow.connections, active: workflow.active, nodeTypes, staticData: workflow.staticData, settings: workflow.settings });
|
||||||
|
const webhooks = WebhookHelpers.getWorkflowWebhooksBasic(workflowInstance);
|
||||||
|
for (const webhook of webhooks) {
|
||||||
|
data.push({
|
||||||
|
workflowId: workflowInstance.id as string,
|
||||||
|
webhookPath: webhook.path,
|
||||||
|
method: webhook.httpMethod,
|
||||||
|
node: webhook.node,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.length !== 0) {
|
||||||
|
await queryRunner.manager.createQueryBuilder()
|
||||||
|
.insert()
|
||||||
|
.into(`${tablePrefix}webhook_entity`)
|
||||||
|
.values(data)
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||||
|
|
||||||
|
import * as config from '../../../../config';
|
||||||
|
|
||||||
|
export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
|
||||||
|
name = 'CreateIndexStoppedAt1594825041918';
|
||||||
|
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query(`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "execution_entity" ("stoppedAt") `);
|
||||||
|
}
|
||||||
|
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
|
const tablePrefix = config.get('database.tablePrefix');
|
||||||
|
|
||||||
|
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1 +1,3 @@
|
||||||
export * from './1588102412422-InitialMigration';
|
export * from './1588102412422-InitialMigration';
|
||||||
|
export * from './1592445003908-WebhookModel';
|
||||||
|
export * from './1594825041918-CreateIndexStoppedAt';
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# n8n-core
|
# n8n-core
|
||||||
|
|
||||||
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/docs/images/n8n-logo.png)
|
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
|
||||||
|
|
||||||
Core components for n8n
|
Core components for n8n
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "n8n-core",
|
"name": "n8n-core",
|
||||||
"version": "0.37.0",
|
"version": "0.41.0",
|
||||||
"description": "Core functionality of n8n",
|
"description": "Core functionality of n8n",
|
||||||
"license": "SEE LICENSE IN LICENSE.md",
|
"license": "SEE LICENSE IN LICENSE.md",
|
||||||
"homepage": "https://n8n.io",
|
"homepage": "https://n8n.io",
|
||||||
|
@ -30,7 +30,7 @@
|
||||||
"@types/express": "^4.16.1",
|
"@types/express": "^4.16.1",
|
||||||
"@types/jest": "^24.0.18",
|
"@types/jest": "^24.0.18",
|
||||||
"@types/lodash.get": "^4.4.6",
|
"@types/lodash.get": "^4.4.6",
|
||||||
"@types/mmmagic": "^0.4.29",
|
"@types/mime-types": "^2.1.0",
|
||||||
"@types/node": "^10.10.1",
|
"@types/node": "^10.10.1",
|
||||||
"@types/request-promise-native": "^1.0.15",
|
"@types/request-promise-native": "^1.0.15",
|
||||||
"jest": "^24.9.0",
|
"jest": "^24.9.0",
|
||||||
|
@ -43,9 +43,10 @@
|
||||||
"client-oauth2": "^4.2.5",
|
"client-oauth2": "^4.2.5",
|
||||||
"cron": "^1.7.2",
|
"cron": "^1.7.2",
|
||||||
"crypto-js": "3.1.9-1",
|
"crypto-js": "3.1.9-1",
|
||||||
|
"file-type": "^14.6.2",
|
||||||
"lodash.get": "^4.4.2",
|
"lodash.get": "^4.4.2",
|
||||||
"mmmagic": "^0.5.2",
|
"mime-types": "^2.1.27",
|
||||||
"n8n-workflow": "~0.32.0",
|
"n8n-workflow": "~0.37.0",
|
||||||
"p-cancelable": "^2.0.0",
|
"p-cancelable": "^2.0.0",
|
||||||
"request": "^2.88.2",
|
"request": "^2.88.2",
|
||||||
"request-promise-native": "^1.0.7"
|
"request-promise-native": "^1.0.7"
|
||||||
|
|
|
@ -35,18 +35,33 @@ export class ActiveWebhooks {
|
||||||
throw new Error('Webhooks can only be added for saved workflows as an id is needed!');
|
throw new Error('Webhooks can only be added for saved workflows as an id is needed!');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const webhookKey = this.getWebhookKey(webhookData.httpMethod, webhookData.path);
|
||||||
|
|
||||||
|
//check that there is not a webhook already registed with that path/method
|
||||||
|
if (this.webhookUrls[webhookKey] !== undefined) {
|
||||||
|
throw new Error(`Test-Webhook can not be activated because another one with the same method "${webhookData.httpMethod}" and path "${webhookData.path}" is already active!`);
|
||||||
|
}
|
||||||
|
|
||||||
if (this.workflowWebhooks[webhookData.workflowId] === undefined) {
|
if (this.workflowWebhooks[webhookData.workflowId] === undefined) {
|
||||||
this.workflowWebhooks[webhookData.workflowId] = [];
|
this.workflowWebhooks[webhookData.workflowId] = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make the webhook available directly because sometimes to create it successfully
|
// Make the webhook available directly because sometimes to create it successfully
|
||||||
// it gets called
|
// it gets called
|
||||||
this.webhookUrls[this.getWebhookKey(webhookData.httpMethod, webhookData.path)] = webhookData;
|
this.webhookUrls[webhookKey] = webhookData;
|
||||||
|
|
||||||
const webhookExists = await workflow.runWebhookMethod('checkExists', webhookData, NodeExecuteFunctions, mode, this.testWebhooks);
|
const webhookExists = await workflow.runWebhookMethod('checkExists', webhookData, NodeExecuteFunctions, mode, this.testWebhooks);
|
||||||
if (webhookExists === false) {
|
if (webhookExists === false) {
|
||||||
// If webhook does not exist yet create it
|
// If webhook does not exist yet create it
|
||||||
await workflow.runWebhookMethod('create', webhookData, NodeExecuteFunctions, mode, this.testWebhooks);
|
try {
|
||||||
|
await workflow.runWebhookMethod('create', webhookData, NodeExecuteFunctions, mode, this.testWebhooks);
|
||||||
|
} catch (error) {
|
||||||
|
// If there was a problem unregister the webhook again
|
||||||
|
delete this.webhookUrls[webhookKey];
|
||||||
|
delete this.workflowWebhooks[webhookData.workflowId];
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.workflowWebhooks[webhookData.workflowId].push(webhookData);
|
this.workflowWebhooks[webhookData.workflowId].push(webhookData);
|
||||||
|
@ -70,6 +85,21 @@ export class ActiveWebhooks {
|
||||||
return this.webhookUrls[webhookKey];
|
return this.webhookUrls[webhookKey];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all request methods associated with a single webhook
|
||||||
|
* @param path
|
||||||
|
*/
|
||||||
|
getWebhookMethods(path: string): string[] {
|
||||||
|
const methods : string[] = [];
|
||||||
|
|
||||||
|
Object.keys(this.webhookUrls)
|
||||||
|
.filter(key => key.includes(path))
|
||||||
|
.map(key => {
|
||||||
|
methods.push(key.split('|')[0]);
|
||||||
|
});
|
||||||
|
|
||||||
|
return methods;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the ids of all the workflows which have active webhooks
|
* Returns the ids of all the workflows which have active webhooks
|
||||||
|
|
|
@ -15,6 +15,7 @@ import {
|
||||||
ITriggerResponse,
|
ITriggerResponse,
|
||||||
IWebhookFunctions as IWebhookFunctionsBase,
|
IWebhookFunctions as IWebhookFunctionsBase,
|
||||||
IWorkflowSettings as IWorkflowSettingsWorkflow,
|
IWorkflowSettings as IWorkflowSettingsWorkflow,
|
||||||
|
IOAuth2Options,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,7 +37,7 @@ export interface IExecuteFunctions extends IExecuteFunctionsBase {
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
||||||
request: requestPromise.RequestPromiseAPI,
|
request: requestPromise.RequestPromiseAPI,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any>, // tslint:disable-line:no-any
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
||||||
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
||||||
};
|
};
|
||||||
|
@ -47,7 +48,7 @@ export interface IExecuteSingleFunctions extends IExecuteSingleFunctionsBase {
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
||||||
request: requestPromise.RequestPromiseAPI,
|
request: requestPromise.RequestPromiseAPI,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any>, // tslint:disable-line:no-any
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -57,7 +58,7 @@ export interface IPollFunctions extends IPollFunctionsBase {
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
||||||
request: requestPromise.RequestPromiseAPI,
|
request: requestPromise.RequestPromiseAPI,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any>, // tslint:disable-line:no-any
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
||||||
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
||||||
};
|
};
|
||||||
|
@ -73,7 +74,7 @@ export interface ITriggerFunctions extends ITriggerFunctionsBase {
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
||||||
request: requestPromise.RequestPromiseAPI,
|
request: requestPromise.RequestPromiseAPI,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any>, // tslint:disable-line:no-any
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
||||||
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
||||||
};
|
};
|
||||||
|
@ -98,7 +99,7 @@ export interface IUserSettings {
|
||||||
export interface ILoadOptionsFunctions extends ILoadOptionsFunctionsBase {
|
export interface ILoadOptionsFunctions extends ILoadOptionsFunctionsBase {
|
||||||
helpers: {
|
helpers: {
|
||||||
request?: requestPromise.RequestPromiseAPI,
|
request?: requestPromise.RequestPromiseAPI,
|
||||||
requestOAuth2?: (this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions) => Promise<any>, // tslint:disable-line:no-any
|
requestOAuth2?: (this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options) => Promise<any>, // tslint:disable-line:no-any
|
||||||
requestOAuth1?(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth1?(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -107,7 +108,7 @@ export interface ILoadOptionsFunctions extends ILoadOptionsFunctionsBase {
|
||||||
export interface IHookFunctions extends IHookFunctionsBase {
|
export interface IHookFunctions extends IHookFunctionsBase {
|
||||||
helpers: {
|
helpers: {
|
||||||
request: requestPromise.RequestPromiseAPI,
|
request: requestPromise.RequestPromiseAPI,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any>, // tslint:disable-line:no-any
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -117,7 +118,7 @@ export interface IWebhookFunctions extends IWebhookFunctionsBase {
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData>;
|
||||||
request: requestPromise.RequestPromiseAPI,
|
request: requestPromise.RequestPromiseAPI,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any>, // tslint:disable-line:no-any
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any>, // tslint:disable-line:no-any
|
||||||
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
returnJsonArray(jsonData: IDataObject | IDataObject[]): INodeExecutionData[];
|
||||||
};
|
};
|
||||||
|
|
|
@ -34,6 +34,7 @@ import {
|
||||||
Workflow,
|
Workflow,
|
||||||
WorkflowDataProxy,
|
WorkflowDataProxy,
|
||||||
WorkflowExecuteMode,
|
WorkflowExecuteMode,
|
||||||
|
IOAuth2Options,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
|
|
||||||
import * as clientOAuth1 from 'oauth-1.0a';
|
import * as clientOAuth1 from 'oauth-1.0a';
|
||||||
|
@ -44,14 +45,9 @@ import * as express from 'express';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { OptionsWithUrl, OptionsWithUri } from 'request';
|
import { OptionsWithUrl, OptionsWithUri } from 'request';
|
||||||
import * as requestPromise from 'request-promise-native';
|
import * as requestPromise from 'request-promise-native';
|
||||||
|
|
||||||
import { Magic, MAGIC_MIME_TYPE } from 'mmmagic';
|
|
||||||
|
|
||||||
import { createHmac } from 'crypto';
|
import { createHmac } from 'crypto';
|
||||||
|
import { fromBuffer } from 'file-type';
|
||||||
|
import { lookup } from 'mime-types';
|
||||||
const magic = new Magic(MAGIC_MIME_TYPE);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -66,18 +62,28 @@ const magic = new Magic(MAGIC_MIME_TYPE);
|
||||||
*/
|
*/
|
||||||
export async function prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData> {
|
export async function prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData> {
|
||||||
if (!mimeType) {
|
if (!mimeType) {
|
||||||
// If not mime type is given figure it out
|
// If no mime type is given figure it out
|
||||||
mimeType = await new Promise<string>(
|
|
||||||
(resolve, reject) => {
|
|
||||||
magic.detect(binaryData, (err: Error, mimeType: string) => {
|
|
||||||
if (err) {
|
|
||||||
return reject(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
return resolve(mimeType);
|
if (filePath) {
|
||||||
});
|
// Use file path to guess mime type
|
||||||
|
const mimeTypeLookup = lookup(filePath);
|
||||||
|
if (mimeTypeLookup) {
|
||||||
|
mimeType = mimeTypeLookup;
|
||||||
}
|
}
|
||||||
);
|
}
|
||||||
|
|
||||||
|
if (!mimeType) {
|
||||||
|
// Use buffer to guess mime type
|
||||||
|
const fileTypeData = await fromBuffer(binaryData);
|
||||||
|
if (fileTypeData) {
|
||||||
|
mimeType = fileTypeData.mime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!mimeType) {
|
||||||
|
// Fall back to text
|
||||||
|
mimeType = 'text/plain';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const returnData: IBinaryData = {
|
const returnData: IBinaryData = {
|
||||||
|
@ -119,9 +125,10 @@ export async function prepareBinaryData(binaryData: Buffer, filePath?: string, m
|
||||||
* @param {(OptionsWithUri | requestPromise.RequestPromiseOptions)} requestOptions
|
* @param {(OptionsWithUri | requestPromise.RequestPromiseOptions)} requestOptions
|
||||||
* @param {INode} node
|
* @param {INode} node
|
||||||
* @param {IWorkflowExecuteAdditionalData} additionalData
|
* @param {IWorkflowExecuteAdditionalData} additionalData
|
||||||
|
*
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
export function requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, node: INode, additionalData: IWorkflowExecuteAdditionalData, tokenType?: string, property?: string) {
|
export function requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, node: INode, additionalData: IWorkflowExecuteAdditionalData, oAuth2Options?: IOAuth2Options) {
|
||||||
const credentials = this.getCredentials(credentialsType) as ICredentialDataDecryptedObject;
|
const credentials = this.getCredentials(credentialsType) as ICredentialDataDecryptedObject;
|
||||||
|
|
||||||
if (credentials === undefined) {
|
if (credentials === undefined) {
|
||||||
|
@ -140,7 +147,7 @@ export function requestOAuth2(this: IAllExecuteFunctions, credentialsType: strin
|
||||||
|
|
||||||
const oauthTokenData = credentials.oauthTokenData as clientOAuth2.Data;
|
const oauthTokenData = credentials.oauthTokenData as clientOAuth2.Data;
|
||||||
|
|
||||||
const token = oAuthClient.createToken(get(oauthTokenData, property as string) || oauthTokenData.accessToken, oauthTokenData.refreshToken, tokenType || oauthTokenData.tokenType, oauthTokenData);
|
const token = oAuthClient.createToken(get(oauthTokenData, oAuth2Options?.property as string) || oauthTokenData.accessToken, oauthTokenData.refreshToken, oAuth2Options?.tokenType || oauthTokenData.tokenType, oauthTokenData);
|
||||||
// Signs the request by adding authorization headers or query parameters depending
|
// Signs the request by adding authorization headers or query parameters depending
|
||||||
// on the token-type used.
|
// on the token-type used.
|
||||||
const newRequestOptions = token.sign(requestOptions as clientOAuth2.RequestObject);
|
const newRequestOptions = token.sign(requestOptions as clientOAuth2.RequestObject);
|
||||||
|
@ -151,7 +158,18 @@ export function requestOAuth2(this: IAllExecuteFunctions, credentialsType: strin
|
||||||
if (error.statusCode === 401) {
|
if (error.statusCode === 401) {
|
||||||
// TODO: Whole refresh process is not tested yet
|
// TODO: Whole refresh process is not tested yet
|
||||||
// Token is probably not valid anymore. So try refresh it.
|
// Token is probably not valid anymore. So try refresh it.
|
||||||
const newToken = await token.refresh();
|
|
||||||
|
const tokenRefreshOptions: IDataObject = {};
|
||||||
|
|
||||||
|
if (oAuth2Options?.includeCredentialsOnRefreshOnBody) {
|
||||||
|
const body: IDataObject = {
|
||||||
|
client_id: credentials.clientId as string,
|
||||||
|
client_secret: credentials.clientSecret as string,
|
||||||
|
};
|
||||||
|
tokenRefreshOptions.body = body;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newToken = await token.refresh(tokenRefreshOptions);
|
||||||
|
|
||||||
credentials.oauthTokenData = newToken.data;
|
credentials.oauthTokenData = newToken.data;
|
||||||
|
|
||||||
|
@ -183,7 +201,7 @@ export function requestOAuth2(this: IAllExecuteFunctions, credentialsType: strin
|
||||||
* @param {(OptionsWithUrl | requestPromise.RequestPromiseOptions)} requestOptionså
|
* @param {(OptionsWithUrl | requestPromise.RequestPromiseOptions)} requestOptionså
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
export function requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions) {
|
export function requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | OptionsWithUri | requestPromise.RequestPromiseOptions) {
|
||||||
const credentials = this.getCredentials(credentialsType) as ICredentialDataDecryptedObject;
|
const credentials = this.getCredentials(credentialsType) as ICredentialDataDecryptedObject;
|
||||||
|
|
||||||
if (credentials === undefined) {
|
if (credentials === undefined) {
|
||||||
|
@ -216,14 +234,22 @@ export function requestOAuth1(this: IAllExecuteFunctions, credentialsType: strin
|
||||||
};
|
};
|
||||||
|
|
||||||
const newRequestOptions = {
|
const newRequestOptions = {
|
||||||
//@ts-ignore
|
|
||||||
url: requestOptions.url,
|
|
||||||
method: requestOptions.method,
|
method: requestOptions.method,
|
||||||
data: { ...requestOptions.qs, ...requestOptions.body },
|
data: { ...requestOptions.qs, ...requestOptions.body },
|
||||||
json: requestOptions.json,
|
json: requestOptions.json,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (Object.keys(requestOptions.qs).length !== 0) {
|
// Some RequestOptions have a URI and some have a URL
|
||||||
|
//@ts-ignores
|
||||||
|
if (requestOptions.url !== undefined) {
|
||||||
|
//@ts-ignore
|
||||||
|
newRequestOptions.url = requestOptions.url;
|
||||||
|
} else {
|
||||||
|
//@ts-ignore
|
||||||
|
newRequestOptions.url = requestOptions.uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (requestOptions.qs !== undefined) {
|
||||||
//@ts-ignore
|
//@ts-ignore
|
||||||
newRequestOptions.qs = oauth.authorize(newRequestOptions as RequestOptions, token);
|
newRequestOptions.qs = oauth.authorize(newRequestOptions as RequestOptions, token);
|
||||||
} else {
|
} else {
|
||||||
|
@ -413,7 +439,8 @@ export function getNodeWebhookUrl(name: string, workflow: Workflow, node: INode,
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
return NodeHelpers.getNodeWebhookUrl(baseUrl, workflow.id!, node, path.toString());
|
const isFullPath: boolean = workflow.getSimpleParameterValue(node, webhookDescription['isFullPath'], false) as boolean;
|
||||||
|
return NodeHelpers.getNodeWebhookUrl(baseUrl, workflow.id!, node, path.toString(), isFullPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -529,8 +556,8 @@ export function getExecutePollFunctions(workflow: Workflow, node: INode, additio
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData,
|
prepareBinaryData,
|
||||||
request: requestPromise,
|
request: requestPromise,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, tokenType?: string, property?: string): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, tokenType, property);
|
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, oAuth2Options);
|
||||||
},
|
},
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth1.call(this, credentialsType, requestOptions);
|
return requestOAuth1.call(this, credentialsType, requestOptions);
|
||||||
|
@ -592,8 +619,8 @@ export function getExecuteTriggerFunctions(workflow: Workflow, node: INode, addi
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData,
|
prepareBinaryData,
|
||||||
request: requestPromise,
|
request: requestPromise,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, tokenType?: string, property?: string): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, tokenType, property);
|
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, oAuth2Options);
|
||||||
},
|
},
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth1.call(this, credentialsType, requestOptions);
|
return requestOAuth1.call(this, credentialsType, requestOptions);
|
||||||
|
@ -688,8 +715,8 @@ export function getExecuteFunctions(workflow: Workflow, runExecutionData: IRunEx
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData,
|
prepareBinaryData,
|
||||||
request: requestPromise,
|
request: requestPromise,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, tokenType?: string, property?: string): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, tokenType, property);
|
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, oAuth2Options);
|
||||||
},
|
},
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth1.call(this, credentialsType, requestOptions);
|
return requestOAuth1.call(this, credentialsType, requestOptions);
|
||||||
|
@ -786,8 +813,8 @@ export function getExecuteSingleFunctions(workflow: Workflow, runExecutionData:
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData,
|
prepareBinaryData,
|
||||||
request: requestPromise,
|
request: requestPromise,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, tokenType?: string, property?: string): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, tokenType, property);
|
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, oAuth2Options);
|
||||||
},
|
},
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth1.call(this, credentialsType, requestOptions);
|
return requestOAuth1.call(this, credentialsType, requestOptions);
|
||||||
|
@ -842,8 +869,8 @@ export function getLoadOptionsFunctions(workflow: Workflow, node: INode, additio
|
||||||
},
|
},
|
||||||
helpers: {
|
helpers: {
|
||||||
request: requestPromise,
|
request: requestPromise,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, tokenType?: string, property?: string): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, tokenType, property);
|
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, oAuth2Options);
|
||||||
},
|
},
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth1.call(this, credentialsType, requestOptions);
|
return requestOAuth1.call(this, credentialsType, requestOptions);
|
||||||
|
@ -909,8 +936,8 @@ export function getExecuteHookFunctions(workflow: Workflow, node: INode, additio
|
||||||
},
|
},
|
||||||
helpers: {
|
helpers: {
|
||||||
request: requestPromise,
|
request: requestPromise,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, tokenType?: string, property?: string): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, tokenType, property);
|
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, oAuth2Options);
|
||||||
},
|
},
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth1.call(this, credentialsType, requestOptions);
|
return requestOAuth1.call(this, credentialsType, requestOptions);
|
||||||
|
@ -1003,8 +1030,8 @@ export function getExecuteWebhookFunctions(workflow: Workflow, node: INode, addi
|
||||||
helpers: {
|
helpers: {
|
||||||
prepareBinaryData,
|
prepareBinaryData,
|
||||||
request: requestPromise,
|
request: requestPromise,
|
||||||
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, tokenType?: string, property?: string): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth2(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUri | requestPromise.RequestPromiseOptions, oAuth2Options?: IOAuth2Options): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, tokenType, property);
|
return requestOAuth2.call(this, credentialsType, requestOptions, node, additionalData, oAuth2Options);
|
||||||
},
|
},
|
||||||
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
requestOAuth1(this: IAllExecuteFunctions, credentialsType: string, requestOptions: OptionsWithUrl | requestPromise.RequestPromiseOptions): Promise<any> { // tslint:disable-line:no-any
|
||||||
return requestOAuth1.call(this, credentialsType, requestOptions);
|
return requestOAuth1.call(this, credentialsType, requestOptions);
|
||||||
|
|
|
@ -459,7 +459,7 @@ export class WorkflowExecute {
|
||||||
let executionData: IExecuteData;
|
let executionData: IExecuteData;
|
||||||
let executionError: IExecutionError | undefined;
|
let executionError: IExecutionError | undefined;
|
||||||
let executionNode: INode;
|
let executionNode: INode;
|
||||||
let nodeSuccessData: INodeExecutionData[][] | null;
|
let nodeSuccessData: INodeExecutionData[][] | null | undefined;
|
||||||
let runIndex: number;
|
let runIndex: number;
|
||||||
let startTime: number;
|
let startTime: number;
|
||||||
let taskData: ITaskData;
|
let taskData: ITaskData;
|
||||||
|
@ -708,6 +708,9 @@ export class WorkflowExecute {
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
})()
|
})()
|
||||||
.then(async () => {
|
.then(async () => {
|
||||||
|
if (gotCancel && executionError === undefined) {
|
||||||
|
return this.processSuccessExecution(startedAt, workflow, { message: 'Workflow has been canceled!' } as IExecutionError);
|
||||||
|
}
|
||||||
return this.processSuccessExecution(startedAt, workflow, executionError);
|
return this.processSuccessExecution(startedAt, workflow, executionError);
|
||||||
})
|
})
|
||||||
.catch(async (error) => {
|
.catch(async (error) => {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# n8n-editor-ui
|
# n8n-editor-ui
|
||||||
|
|
||||||
![n8n.io - Workflow Automation](https://n8n.io/n8n-logo.png)
|
![n8n.io - Workflow Automation](https://raw.githubusercontent.com/n8n-io/n8n/master/assets/n8n-logo.png)
|
||||||
|
|
||||||
The UI to create and update n8n workflows
|
The UI to create and update n8n workflows
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "n8n-editor-ui",
|
"name": "n8n-editor-ui",
|
||||||
"version": "0.48.0",
|
"version": "0.52.0",
|
||||||
"description": "Workflow Editor UI for n8n",
|
"description": "Workflow Editor UI for n8n",
|
||||||
"license": "SEE LICENSE IN LICENSE.md",
|
"license": "SEE LICENSE IN LICENSE.md",
|
||||||
"homepage": "https://n8n.io",
|
"homepage": "https://n8n.io",
|
||||||
|
@ -14,7 +14,7 @@
|
||||||
"url": "git+https://github.com/n8n-io/n8n.git"
|
"url": "git+https://github.com/n8n-io/n8n.git"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "vue-cli-service build",
|
"build": "cross-env VUE_APP_PUBLIC_PATH=\"/%BASE_PATH%/\" vue-cli-service build",
|
||||||
"dev": "npm run serve",
|
"dev": "npm run serve",
|
||||||
"lint": "vue-cli-service lint",
|
"lint": "vue-cli-service lint",
|
||||||
"serve": "cross-env VUE_APP_URL_BASE_API=http://localhost:5678/ vue-cli-service serve",
|
"serve": "cross-env VUE_APP_URL_BASE_API=http://localhost:5678/ vue-cli-service serve",
|
||||||
|
@ -23,7 +23,9 @@
|
||||||
"test:e2e": "vue-cli-service test:e2e",
|
"test:e2e": "vue-cli-service test:e2e",
|
||||||
"test:unit": "vue-cli-service test:unit"
|
"test:unit": "vue-cli-service test:unit"
|
||||||
},
|
},
|
||||||
"dependencies": {},
|
"dependencies": {
|
||||||
|
"uuid": "^8.1.0"
|
||||||
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@beyonk/google-fonts-webpack-plugin": "^1.2.3",
|
"@beyonk/google-fonts-webpack-plugin": "^1.2.3",
|
||||||
"@fortawesome/fontawesome-svg-core": "^1.2.19",
|
"@fortawesome/fontawesome-svg-core": "^1.2.19",
|
||||||
|
@ -64,7 +66,7 @@
|
||||||
"lodash.debounce": "^4.0.8",
|
"lodash.debounce": "^4.0.8",
|
||||||
"lodash.get": "^4.4.2",
|
"lodash.get": "^4.4.2",
|
||||||
"lodash.set": "^4.3.2",
|
"lodash.set": "^4.3.2",
|
||||||
"n8n-workflow": "~0.33.0",
|
"n8n-workflow": "~0.37.0",
|
||||||
"node-sass": "^4.12.0",
|
"node-sass": "^4.12.0",
|
||||||
"prismjs": "^1.17.1",
|
"prismjs": "^1.17.1",
|
||||||
"quill": "^2.0.0-dev.3",
|
"quill": "^2.0.0-dev.3",
|
||||||
|
|
|
@ -4,12 +4,13 @@
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
||||||
<link rel="icon" href="<%= BASE_URL %>favicon.ico">
|
<link rel="icon" href="/favicon.ico">
|
||||||
|
<script type="text/javascript">window.BASE_PATH = "/%BASE_PATH%/";</script>
|
||||||
<title>n8n.io - Workflow Automation</title>
|
<title>n8n.io - Workflow Automation</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<noscript>
|
<noscript>
|
||||||
<strong>We're sorry but editor-ui-ts-default-lint doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
|
<strong>We're sorry but the n8n Editor-UI doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
|
||||||
</noscript>
|
</noscript>
|
||||||
<div id="app"></div>
|
<div id="app"></div>
|
||||||
<!-- built files will be auto injected -->
|
<!-- built files will be auto injected -->
|
||||||
|
|
|
@ -397,6 +397,8 @@ export interface IN8nUISettings {
|
||||||
saveDataSuccessExecution: string;
|
saveDataSuccessExecution: string;
|
||||||
saveManualExecutions: boolean;
|
saveManualExecutions: boolean;
|
||||||
timezone: string;
|
timezone: string;
|
||||||
|
executionTimeout: number;
|
||||||
|
maxExecutionTimeout: number;
|
||||||
urlBaseWebhook: string;
|
urlBaseWebhook: string;
|
||||||
versionCli: string;
|
versionCli: string;
|
||||||
}
|
}
|
||||||
|
@ -407,4 +409,11 @@ export interface IWorkflowSettings extends IWorkflowSettingsWorkflow {
|
||||||
saveDataSuccessExecution?: string;
|
saveDataSuccessExecution?: string;
|
||||||
saveManualExecutions?: boolean;
|
saveManualExecutions?: boolean;
|
||||||
timezone?: string;
|
timezone?: string;
|
||||||
|
executionTimeout?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ITimeoutHMS {
|
||||||
|
hours: number;
|
||||||
|
minutes: number;
|
||||||
|
seconds: number;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<template>
|
<template>
|
||||||
<transition name="el-fade-in">
|
<transition name="el-fade-in" @after-enter="showDocumentHelp = true">
|
||||||
<div class="data-display-wrapper close-on-click" v-show="node" @click="close">
|
<div class="data-display-wrapper close-on-click" v-show="node" @click="close">
|
||||||
<div class="data-display" >
|
<div class="data-display" >
|
||||||
<NodeSettings @valueChanged="valueChanged" />
|
<NodeSettings @valueChanged="valueChanged" />
|
||||||
|
@ -7,9 +7,33 @@
|
||||||
<div class="close-button clickable close-on-click" title="Close">
|
<div class="close-button clickable close-on-click" title="Close">
|
||||||
<i class="el-icon-close close-on-click"></i>
|
<i class="el-icon-close close-on-click"></i>
|
||||||
</div>
|
</div>
|
||||||
|
<transition name="fade">
|
||||||
|
<div v-if="showDocumentHelp && nodeType" class="doc-help-wrapper">
|
||||||
|
<svg id="help-logo" v-if="showDocumentHelp && nodeType" :href="'https://docs.n8n.io/nodes/' + nodeType.name" target="_blank" width="18px" height="18px" viewBox="0 0 18 18" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<title>Node Documentation</title>
|
||||||
|
<g id="MVP-Onboard-proposal" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
|
||||||
|
<g id="Node-modal-(docs-link)" transform="translate(-1127.000000, -836.000000)" fill-rule="nonzero">
|
||||||
|
<g id="Group" transform="translate(1117.000000, 825.000000)">
|
||||||
|
<g id="mdi-help-box" transform="translate(10.000000, 11.000000)">
|
||||||
|
<g id="Icon" transform="translate(2.250000, 2.250000)" fill="#FF6150">
|
||||||
|
<path d="M6,11.25 L7.5,11.25 L7.5,9.75 L6,9.75 L6,11.25 M6.75,2.25 C5.09314575,2.25 3.75,3.59314575 3.75,5.25 L5.25,5.25 C5.25,4.42157288 5.92157288,3.75 6.75,3.75 C7.57842712,3.75 8.25,4.42157288 8.25,5.25 C8.25,6.75 6,6.5625 6,9 L7.5,9 C7.5,7.3125 9.75,7.125 9.75,5.25 C9.75,3.59314575 8.40685425,2.25 6.75,2.25 M1.5,0 L12,0 C12.8284271,0 13.5,0.671572875 13.5,1.5 L13.5,12 C13.5,12.8284271 12.8284271,13.5 12,13.5 L1.5,13.5 C0.671572875,13.5 0,12.8284271 0,12 L0,1.5 C0,0.671572875 0.671572875,0 1.5,0 Z" id="Icon-Shape"></path>
|
||||||
|
</g>
|
||||||
|
<rect id="ViewBox" x="0" y="0" width="18" height="18"></rect>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
|
||||||
|
<div v-if="showDocumentHelp && nodeType" class="text">
|
||||||
|
Need help? <a id="doc-hyperlink" v-if="showDocumentHelp && nodeType" :href="'https://docs.n8n.io/nodes/' + nodeType.name + '?utm_source=n8n_app&utm_medium=node_settings_modal-credential_link&utm_campaign=' + nodeType.name" target="_blank">Open {{nodeType.displayName}} documentation</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</transition>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</transition>
|
</transition>
|
||||||
|
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
@ -18,6 +42,7 @@ import Vue from 'vue';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
IRunData,
|
IRunData,
|
||||||
|
INodeTypeDescription,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import {
|
import {
|
||||||
INodeUi,
|
INodeUi,
|
||||||
|
@ -33,10 +58,24 @@ export default Vue.extend({
|
||||||
NodeSettings,
|
NodeSettings,
|
||||||
RunData,
|
RunData,
|
||||||
},
|
},
|
||||||
|
data () {
|
||||||
|
return {
|
||||||
|
basePath: this.$store.getters.getBaseUrl,
|
||||||
|
showDocumentHelp: false,
|
||||||
|
};
|
||||||
|
},
|
||||||
computed: {
|
computed: {
|
||||||
node (): INodeUi {
|
node (): INodeUi {
|
||||||
return this.$store.getters.activeNode;
|
return this.$store.getters.activeNode;
|
||||||
},
|
},
|
||||||
|
nodeType (): INodeTypeDescription | null {
|
||||||
|
const activeNode = this.node;
|
||||||
|
if (this.node) {
|
||||||
|
return this.$store.getters.nodeType(this.node.type);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
},
|
||||||
},
|
},
|
||||||
methods: {
|
methods: {
|
||||||
valueChanged (parameterData: IUpdateInformation) {
|
valueChanged (parameterData: IUpdateInformation) {
|
||||||
|
@ -48,15 +87,18 @@ export default Vue.extend({
|
||||||
close (e: MouseEvent) {
|
close (e: MouseEvent) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
if (e.target.className && e.target.className.includes && e.target.className.includes('close-on-click')) {
|
if (e.target.className && e.target.className.includes && e.target.className.includes('close-on-click')) {
|
||||||
|
this.showDocumentHelp = false;
|
||||||
this.$store.commit('setActiveNode', null);
|
this.$store.commit('setActiveNode', null);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style lang="scss">
|
<style lang="scss">
|
||||||
|
|
||||||
|
|
||||||
.data-display-wrapper {
|
.data-display-wrapper {
|
||||||
position: fixed;
|
position: fixed;
|
||||||
top: 0;
|
top: 0;
|
||||||
|
@ -101,7 +143,59 @@ export default Vue.extend({
|
||||||
margin: 1em auto;
|
margin: 1em auto;
|
||||||
height: 95%;
|
height: 95%;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.fade-enter-active, .fade-enter-to, .fade-leave-active {
|
||||||
|
transition: all .75s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fade-enter, .fade-leave-to /* .fade-leave-active below version 2.1.8 */ {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.doc-help-wrapper {
|
||||||
|
transition-delay: 2s;
|
||||||
|
background-color: #fff;
|
||||||
|
margin-top: 1%;
|
||||||
|
box-sizing: border-box;
|
||||||
|
border: 1px solid #DCDFE6;
|
||||||
|
border-radius: 4px;
|
||||||
|
background-color: #FFFFFF;
|
||||||
|
box-shadow: 0 2px 7px 0 rgba(0,0,0,0.15);
|
||||||
|
min-width: 319px;
|
||||||
|
height: 40px;
|
||||||
|
float: right;
|
||||||
|
padding: 5px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
padding-top: 10px;
|
||||||
|
padding-right: 12px;
|
||||||
|
|
||||||
|
#help-logo {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text {
|
||||||
|
margin-left: 5px;
|
||||||
|
flex: 9;
|
||||||
|
font-family: "Open Sans";
|
||||||
|
color: #666666;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 600;
|
||||||
|
letter-spacing: 0;
|
||||||
|
line-height: 17px;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
#doc-hyperlink, #doc-hyperlink:visited, #doc-hyperlink:focus, #doc-hyperlink:active {
|
||||||
|
text-decoration: none;
|
||||||
|
color: #FF6150;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -122,6 +122,13 @@ export default mixins(
|
||||||
readOnly: !!this.resolvedValue,
|
readOnly: !!this.resolvedValue,
|
||||||
modules: {
|
modules: {
|
||||||
autoformat: {},
|
autoformat: {},
|
||||||
|
keyboard: {
|
||||||
|
bindings: {
|
||||||
|
'list autofill': {
|
||||||
|
prefix: /^$/,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
<el-menu-item index="logo" class="logo-item">
|
<el-menu-item index="logo" class="logo-item">
|
||||||
<a href="https://n8n.io" target="_blank" class="logo">
|
<a href="https://n8n.io" target="_blank" class="logo">
|
||||||
<img src="/n8n-icon-small.png" class="icon" alt="n8n.io"/>
|
<img :src="basePath + 'n8n-icon-small.png'" class="icon" alt="n8n.io"/>
|
||||||
<span class="logo-text" slot="title">n8n.io</span>
|
<span class="logo-text" slot="title">n8n.io</span>
|
||||||
</a>
|
</a>
|
||||||
</el-menu-item>
|
</el-menu-item>
|
||||||
|
@ -208,6 +208,8 @@ export default mixins(
|
||||||
data () {
|
data () {
|
||||||
return {
|
return {
|
||||||
aboutDialogVisible: false,
|
aboutDialogVisible: false,
|
||||||
|
// @ts-ignore
|
||||||
|
basePath: this.$store.getters.getBaseUrl,
|
||||||
isCollapsed: true,
|
isCollapsed: true,
|
||||||
credentialNewDialogVisible: false,
|
credentialNewDialogVisible: false,
|
||||||
credentialOpenDialogVisible: false,
|
credentialOpenDialogVisible: false,
|
||||||
|
|
|
@ -110,8 +110,9 @@ export default mixins(
|
||||||
|
|
||||||
const workflowId = this.$store.getters.workflowId;
|
const workflowId = this.$store.getters.workflowId;
|
||||||
const path = this.getValue(webhookData, 'path');
|
const path = this.getValue(webhookData, 'path');
|
||||||
|
const isFullPath = this.getValue(webhookData, 'isFullPath') as unknown as boolean || false;
|
||||||
|
|
||||||
return NodeHelpers.getNodeWebhookUrl(baseUrl, workflowId, this.node, path);
|
return NodeHelpers.getNodeWebhookUrl(baseUrl, workflowId, this.node, path, isFullPath);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
watch: {
|
watch: {
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue