mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
🔀 Merge master
This commit is contained in:
commit
fa91b47452
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
|
@ -11,7 +11,7 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [16.x]
|
||||
node-version: [14.x, 16.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
|
72
CHANGELOG.md
72
CHANGELOG.md
|
@ -1,3 +1,73 @@
|
|||
# [0.171.0](https://github.com/n8n-io/n8n/compare/n8n@0.170.0...n8n@0.171.0) (2022-04-03)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **core:** Fix crash on webhook when last node did not return data ([c50d04a](https://github.com/n8n-io/n8n/commit/c50d04af9eb033d82860c336fc7350b5c3f22242))
|
||||
* **EmailReadImap Node:** Fix issue that crashed process if node was configured wrong ([#3079](https://github.com/n8n-io/n8n/issues/3079)) ([85f15d4](https://github.com/n8n-io/n8n/commit/85f15d49896d876fa3ab84e9fa1846f856851274))
|
||||
* **Google Tasks Node:** Fix "Show Completed" option and hide title field where not needed ([#2741](https://github.com/n8n-io/n8n/issues/2741)) ([9d703e3](https://github.com/n8n-io/n8n/commit/9d703e366b8e191e0f588469892ebb7b6d03c1d3))
|
||||
* **NocoDB Node:** Fix pagination ([#3081](https://github.com/n8n-io/n8n/issues/3081)) ([5f44b0d](https://github.com/n8n-io/n8n/commit/5f44b0dad5254fe9f985b314db8f7d43ab48c712))
|
||||
* **Salesforce Node:** Fix issue that "status" did not get used for Case => Create & Update ([#2212](https://github.com/n8n-io/n8n/issues/2212)) ([1018146](https://github.com/n8n-io/n8n/commit/1018146f21c47eda9f888bd19e92d1106c49267a))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **editor:** Add download button for binary data ([#2992](https://github.com/n8n-io/n8n/issues/2992)) ([13a9db7](https://github.com/n8n-io/n8n/commit/13a9db774576a00d4e3ce1988557654d00067073))
|
||||
* **Emelia Node:** Add Campaign > Duplicate functionality ([#3000](https://github.com/n8n-io/n8n/issues/3000)) ([0b08be1](https://github.com/n8n-io/n8n/commit/0b08be1c0b2961f235fc2446a36afe3995b4d847)), closes [#3065](https://github.com/n8n-io/n8n/issues/3065) [#2741](https://github.com/n8n-io/n8n/issues/2741) [#3075](https://github.com/n8n-io/n8n/issues/3075)
|
||||
* **FTP Node:** Add option to recursively create directories on rename ([#3001](https://github.com/n8n-io/n8n/issues/3001)) ([39a6f41](https://github.com/n8n-io/n8n/commit/39a6f417203b76cfa2c68816c49e86dc7236aba4))
|
||||
* **Mautic Node:** Add credential test and allow trailing slash in host ([#3080](https://github.com/n8n-io/n8n/issues/3080)) ([0a75539](https://github.com/n8n-io/n8n/commit/0a75539cc3d696a8946d7db5ff5842ff54835134))
|
||||
* **Microsoft Teams Node:** Add chat message support ([#2635](https://github.com/n8n-io/n8n/issues/2635)) ([984f62d](https://github.com/n8n-io/n8n/commit/984f62df9ed92cdf297b3b56300c9f23bf128d2d))
|
||||
* **Mocean Node:** Add "Delivery Report URL" option and credential tests ([#3075](https://github.com/n8n-io/n8n/issues/3075)) ([c89d2b1](https://github.com/n8n-io/n8n/commit/c89d2b10f2461ff8e90209b8f29c222f9430dba5))
|
||||
* **ServiceNow Node:** Add basicAuth support and fix getColumns loadOptions ([#2712](https://github.com/n8n-io/n8n/issues/2712)) ([2c72584](https://github.com/n8n-io/n8n/commit/2c72584b55521b437baa20ddad7c919807fd9f8f)), closes [#2741](https://github.com/n8n-io/n8n/issues/2741) [#3075](https://github.com/n8n-io/n8n/issues/3075) [#3000](https://github.com/n8n-io/n8n/issues/3000) [#3065](https://github.com/n8n-io/n8n/issues/3065) [#2741](https://github.com/n8n-io/n8n/issues/2741) [#3075](https://github.com/n8n-io/n8n/issues/3075) [#3071](https://github.com/n8n-io/n8n/issues/3071) [#3001](https://github.com/n8n-io/n8n/issues/3001) [#2635](https://github.com/n8n-io/n8n/issues/2635) [#3080](https://github.com/n8n-io/n8n/issues/3080) [#3061](https://github.com/n8n-io/n8n/issues/3061) [#3081](https://github.com/n8n-io/n8n/issues/3081) [#2582](https://github.com/n8n-io/n8n/issues/2582) [#2212](https://github.com/n8n-io/n8n/issues/2212)
|
||||
* **Strava Node:** Add "Get Streams" operation ([#2582](https://github.com/n8n-io/n8n/issues/2582)) ([6bbb4df](https://github.com/n8n-io/n8n/commit/6bbb4df05925362404f844a23a695f186d27b72e))
|
||||
|
||||
|
||||
|
||||
# [0.170.0](https://github.com/n8n-io/n8n/compare/n8n@0.169.0...n8n@0.170.0) (2022-03-27)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **core:** Add logs and error catches for possible failures in queue mode ([#3032](https://github.com/n8n-io/n8n/issues/3032)) ([3b4a97d](https://github.com/n8n-io/n8n/commit/3b4a97dd576bd3c2f53f958266964d3e02f01c96))
|
||||
- **AWS Lambda Node:** Fix "Invocation Type" > "Continue Workflow" ([#3010](https://github.com/n8n-io/n8n/issues/3010)) ([9547a08](https://github.com/n8n-io/n8n/commit/9547a08f0344825e42f5580da035bb1f21c03368))
|
||||
- **Supabase Node:** Fix Row > Get operation ([#3045](https://github.com/n8n-io/n8n/issues/3045)) ([b9aa440](https://github.com/n8n-io/n8n/commit/b9aa440be3d52bf412990b93cfc3758353fb4943))
|
||||
- **Supabase Node:** Send token also via Authorization Bearer ([#2814](https://github.com/n8n-io/n8n/issues/2814)) ([5774dd8](https://github.com/n8n-io/n8n/commit/5774dd8885a87a1ebe70f4ef4a06a42013112afe))
|
||||
- **Xero Node:** Fix some operations and add support for setting address and phone number ([#3048](https://github.com/n8n-io/n8n/issues/3048)) ([ab08c0d](https://github.com/n8n-io/n8n/commit/ab08c0df1599d44326b45c37f80918e5c107cc6a))
|
||||
- **Wise Node:** Fix issue when executing a transfer ([#3039](https://github.com/n8n-io/n8n/issues/3039)) ([b90bf45](https://github.com/n8n-io/n8n/commit/b90bf4576c6e3f86000d61606f412ea0544b59ef))
|
||||
|
||||
### Features
|
||||
|
||||
- **Crypto Node:** Add Generate operation to generate random values ([#2541](https://github.com/n8n-io/n8n/issues/2541)) ([b5ecccb](https://github.com/n8n-io/n8n/commit/b5ecccb84080362880a307e3f9d76d429bd1d537))
|
||||
- **HTTP Request Node:** Add support for OPTIONS method ([#3030](https://github.com/n8n-io/n8n/issues/3030)) ([bd9064c](https://github.com/n8n-io/n8n/commit/bd9064cd0ea8833b49a7e3860f12bfa37c286947))
|
||||
- **Jira Node:** Add Simplify Output option to Issue > Get ([#2408](https://github.com/n8n-io/n8n/issues/2408)) ([016aeaa](https://github.com/n8n-io/n8n/commit/016aeaaa791205c5ee3d16eef25f856603cf0085))
|
||||
- **Reddit Node:** Add possibility to query saved posts ([#3034](https://github.com/n8n-io/n8n/issues/3034)) ([5ba4c27](https://github.com/n8n-io/n8n/commit/5ba4c27d8c417964187af89a15d5dd4ce9f3271a))
|
||||
- **Zendesk Node:** Add ticket status "On-hold" ([2b20a46](https://github.com/n8n-io/n8n/commit/2b20a460915655791647d62b48dde97dad3b2fd3))
|
||||
|
||||
# [0.169.0](https://github.com/n8n-io/n8n/compare/n8n@0.168.2...n8n@0.169.0) (2022-03-20)
|
||||
|
||||
### License change
|
||||
|
||||
From [Apache 2.0 with Commons Clause](https://github.com/n8n-io/n8n/blob/181ba3c4e236279b65d102a8a33ae6896f160487/LICENSE.md) to [Sustainable Use License](https://github.com/n8n-io/n8n/blob/master/LICENSE.md)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- **GitHub Node:** Fix credential tests and File > List operation ([#2999](https://github.com/n8n-io/n8n/issues/2999)) ([ec618e2](https://github.com/n8n-io/n8n/commit/ec618e25bba5e36592ff37e7c560d738387c9112))
|
||||
- **Telegram Node:** Fix sending binary data when disable notification is set ([#2990](https://github.com/n8n-io/n8n/issues/2990)) ([26a7c61](https://github.com/n8n-io/n8n/commit/26a7c61175c1aadc101e055067224aa0797db5c5))
|
||||
|
||||
### Features
|
||||
|
||||
- Add support for reading ids from file with executeBatch command ([#3008](https://github.com/n8n-io/n8n/issues/3008)) ([5658593](https://github.com/n8n-io/n8n/commit/5658593df4cde8615f3a8383f1045d8659fffb04))
|
||||
- **HTTP Request Node:** Allow Delete requests with body ([#2900](https://github.com/n8n-io/n8n/issues/2900)) ([8a88f94](https://github.com/n8n-io/n8n/commit/8a88f948f2bb6ab780a58cd284c0f6d4f499f9c6))
|
||||
- **KoBoToolbox Node:** Add KoBoToolbox Regular and Trigger Node ([#2765](https://github.com/n8n-io/n8n/issues/2765)) ([1a7f0a4](https://github.com/n8n-io/n8n/commit/1a7f0a42465574f46f00e4d9d50cf71d947dc2bc)), closes [#2510](https://github.com/n8n-io/n8n/issues/2510)
|
||||
- **Linear Node:** Add Linear Node ([#2971](https://github.com/n8n-io/n8n/issues/2971)) ([8d04474](https://github.com/n8n-io/n8n/commit/8d04474e30dc9109ad84fc945cc734483d0d067b))
|
||||
- **Mailjet Node:** Add credential tests and support for sandbox, JSON parameters & variables ([#2987](https://github.com/n8n-io/n8n/issues/2987)) ([d2756de](https://github.com/n8n-io/n8n/commit/d2756de090f2628f9025ba2f4436870e67576367))
|
||||
- **Mattermost Node:** Add support for Channel Search ([#2687](https://github.com/n8n-io/n8n/issues/2687)) ([1b993e4](https://github.com/n8n-io/n8n/commit/1b993e402297ac400c5167d1bcfa78e9a73c07df))
|
||||
|
||||
## [0.168.2](https://github.com/n8n-io/n8n/compare/n8n@0.168.1...n8n@0.168.2) (2022-03-16)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix issue with n8n not authenticating oauth requests ([#2998](https://github.com/n8n-io/n8n/issues/2998))
|
||||
|
||||
## [0.168.1](https://github.com/n8n-io/n8n/compare/n8n@0.168.0...n8n@0.168.1) (2022-03-15)
|
||||
|
||||
### Bug Fixes
|
||||
|
@ -29,3 +99,5 @@
|
|||
- **MongoDb Node:** Add Aggregate Operation ([2c9a06e](https://github.com/n8n-io/n8n/commit/2c9a06e86346a9e21f877cb508d13a1401c700a9))
|
||||
- **Redis Node:** Add Redis Trigger node and publish operation to regular node ([5c2deb4](https://github.com/n8n-io/n8n/commit/5c2deb468867ec77a05d09ef324d4855210e17d4))
|
||||
- **Wordpress Node:** Add Status option to Get All operation of Posts resource ([4d4db7f](https://github.com/n8n-io/n8n/commit/4d4db7f805673758dfb379c9e86e98815f265db2))
|
||||
|
||||
> **Note:** for changelogs before 0.167.0, refer to the [Release notes](https://docs.n8n.io/reference/release-notes.html) in the documentation.
|
||||
|
|
|
@ -12,7 +12,7 @@ The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensa
|
|||
|
||||
## Limitations
|
||||
|
||||
You may use or modify the software only for your own internal business purposes or for non-commercial or personal use
|
||||
You may use or modify the software only for your own internal business purposes or for non-commercial or personal use.
|
||||
You may distribute the software or provide it to others only if you do so free of charge for non-commercial purposes.
|
||||
You may not alter, remove, or obscure any licensing, copyright, or other notices of the licensor in the software. Any use of the licensor’s trademarks is subject to applicable law.
|
||||
|
||||
|
|
75806
package-lock.json
generated
75806
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -2,13 +2,27 @@
|
|||
|
||||
This list shows all the versions which include breaking changes and how to upgrade.
|
||||
|
||||
## 0.171.0
|
||||
|
||||
### What changed?
|
||||
|
||||
The GraphQL node now errors when the response includes an error.
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
If you are using the GraphQL node.
|
||||
|
||||
### How to upgrade:
|
||||
|
||||
Go to the workflows that use the GraphQL node and adjust them to the new behavior. If you want to continue even on error, you can set "Continue on Fail" to true.
|
||||
|
||||
## 0.165.0
|
||||
|
||||
### What changed?
|
||||
|
||||
The Hive node now correctly rejects invalid SSL certificates when the "Ignore SSL Issues" option is set to False.
|
||||
|
||||
### When is action necassary?
|
||||
### When is action necessary?
|
||||
|
||||
If you are using a self signed certificate with The Hive.
|
||||
|
||||
|
@ -28,7 +42,7 @@ If you are using the Hubspot Trigger.
|
|||
|
||||
### How to upgrade:
|
||||
|
||||
Create an app in HubSpot, use the Client ID, Client Secret, App ID, and the Developer Key, and complete the OAuth2 flow.
|
||||
Create an app in HubSpot, use the Client ID, Client Secret, App ID, and the Developer Key, and complete the OAuth2 flow.
|
||||
|
||||
## 0.135.0
|
||||
|
||||
|
@ -59,15 +73,14 @@ const credentials = await this.getCredentials(myNodeCredentials);
|
|||
Example:
|
||||
|
||||
```typescript
|
||||
|
||||
const items = this.getInputData();
|
||||
|
||||
for (const i = 0; i < items.length; i++) {
|
||||
const item = items[i].binary as IBinaryKeyData;
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i) as string;
|
||||
const binaryData = item[binaryPropertyName] as IBinaryData;
|
||||
const item = items[i].binary as IBinaryKeyData;
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i) as string;
|
||||
const binaryData = item[binaryPropertyName] as IBinaryData;
|
||||
// Before 0.135.0:
|
||||
const binaryDataBuffer = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
const binaryDataBuffer = Buffer.from(binaryData.data, BINARY_ENCODING);
|
||||
// From 0.135.0:
|
||||
const binaryDataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
|
||||
}
|
||||
|
@ -106,17 +119,21 @@ If you are using `lead:create` with "Company" or "Address", reset the parameters
|
|||
## 0.118.0
|
||||
|
||||
### What changed?
|
||||
|
||||
The minimum Node.js version required for n8n is now v14.
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
If you're using n8n via npm or PM2 or if you're contributing to n8n.
|
||||
|
||||
### How to upgrade:
|
||||
|
||||
Update the Node.js version to v14 or above.
|
||||
|
||||
----------------------------
|
||||
---
|
||||
|
||||
### What changed?
|
||||
|
||||
In the Postgres, CrateDB, QuestDB and TimescaleDB nodes the `Execute Query` operation returns the result from all queries executed instead of just one of the results.
|
||||
|
||||
### When is action necessary?
|
||||
|
@ -126,6 +143,7 @@ If you use any of the above mentioned nodes with the `Execute Query` operation a
|
|||
## 0.117.0
|
||||
|
||||
### What changed?
|
||||
|
||||
Removed the "Activation Trigger" node. This node was replaced by two other nodes.
|
||||
|
||||
The "Activation Trigger" node was added on version 0.113.0 but was not fully compliant to UX, so we decided to refactor and change it ASAP so it affects the least possible users.
|
||||
|
@ -140,7 +158,7 @@ If you use the "Activation Trigger" in any of your workflows, please replace it
|
|||
|
||||
Remove the previous node and add the new ones according to your workflows.
|
||||
|
||||
----------------------------
|
||||
---
|
||||
|
||||
Changed the behavior for nodes that use Postgres Wire Protocol: Postgres, QuestDB, CrateDB and TimescaleDB.
|
||||
|
||||
|
@ -158,10 +176,10 @@ By default, all `insert` operations will have `Return fields: *` as the default,
|
|||
|
||||
Previously, the node would return all information it received, without taking into account what actually happened in the database.
|
||||
|
||||
|
||||
## 0.113.0
|
||||
|
||||
### What changed?
|
||||
|
||||
In the Dropbox node, both credential types (Access Token & OAuth2) have a new parameter called "APP Access Type".
|
||||
|
||||
### When is action necessary?
|
||||
|
@ -175,6 +193,7 @@ Open your Dropbox node's credentials and set the "APP Access Type" parameter to
|
|||
## 0.111.0
|
||||
|
||||
### What changed?
|
||||
|
||||
In the Dropbox node, now all operations are performed relative to the user's root directory.
|
||||
|
||||
### When is action necessary?
|
||||
|
@ -192,24 +211,29 @@ Also, if you are using the `folder:list` operation, make sure your logic is taki
|
|||
## 0.105.0
|
||||
|
||||
### What changed?
|
||||
|
||||
In the Hubspot Trigger, now multiple events can be provided and the field `App ID` was so moved to the credentials.
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
If you are using the Hubspot Trigger node.
|
||||
|
||||
### How to upgrade:
|
||||
Open the Hubspot Trigger and set the events again. Also open the credentials `Hubspot Developer API` and set your APP ID.
|
||||
|
||||
Open the Hubspot Trigger and set the events again. Also open the credentials `Hubspot Developer API` and set your APP ID.
|
||||
|
||||
## 0.104.0
|
||||
|
||||
### What changed?
|
||||
|
||||
Support for MongoDB as a database for n8n has been dropped as MongoDB had problems saving large amounts of data in a document, among other issues.
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
If you have been using MongoDB as a database for n8n. Please note that this is not related to the MongoDB node.
|
||||
|
||||
### How to upgrade:
|
||||
|
||||
Before upgrading, you can [export](https://docs.n8n.io/reference/start-workflows-via-cli.html#export-workflows-and-credentials) all your credentials and workflows using the CLI.
|
||||
|
||||
```
|
||||
|
@ -227,21 +251,26 @@ n8n import:credentials --separate --input=backups/latest/
|
|||
## 0.102.0
|
||||
|
||||
### What changed?
|
||||
- The `As User` property and the `User Name` field got combined and renamed to `Send as User`. It also got moved under “Add Options”.
|
||||
|
||||
- The `As User` property and the `User Name` field got combined and renamed to `Send as User`. It also got moved under “Add Options”.
|
||||
- The `Ephemeral` property got removed. To send an ephemeral message, you have to select the "Post (Ephemeral)" operation.
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
If you are using the following fields or properties in the Slack node:
|
||||
|
||||
- As User
|
||||
- Ephemeral
|
||||
- User Name
|
||||
|
||||
### How to upgrade:
|
||||
|
||||
Open the Slack node and set them again to the appropriate values.
|
||||
|
||||
----------------------------
|
||||
---
|
||||
|
||||
### What changed?
|
||||
|
||||
If you have a question in Typeform that uses a previously answered question as part of its text, the question text would look like this in the Typeform Trigger node:
|
||||
|
||||
`You have chosen {{field:23234242}} as your answer. Is this correct?`
|
||||
|
@ -251,9 +280,11 @@ Those curly braces broke the expression editor. The change makes it now display
|
|||
`You have chosen [field:23234242] as your answer. Is this correct?`
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
If you are using the Typeform Trigger node with questions using the [Recall information](https://help.typeform.com/hc/en-us/articles/360050447072-What-is-Recall-information-) feature.
|
||||
|
||||
### How to upgrade:
|
||||
|
||||
In workflows using the Typeform Trigger node, nodes that reference such key names (questions that use a previously answered question as part of its text) will need to be updated.
|
||||
|
||||
## 0.95.0
|
||||
|
@ -281,11 +312,11 @@ In the Segment Node, we have changed how the properties 'traits' and 'properties
|
|||
When the properties 'traits' or 'properties' are set, and one of the following resources/operations is used:
|
||||
|
||||
| Resource | Operation |
|
||||
|--|--|
|
||||
| Identify | Create |
|
||||
| Track | Event |
|
||||
| Track | Page |
|
||||
| Group | Add |
|
||||
| -------- | --------- |
|
||||
| Identify | Create |
|
||||
| Track | Event |
|
||||
| Track | Page |
|
||||
| Group | Add |
|
||||
|
||||
### How to upgrade:
|
||||
|
||||
|
@ -305,7 +336,6 @@ If you had set "Basic Auth" for the "Authentication" field in the node.
|
|||
|
||||
The "Authentication" field has been renamed to "Incoming Authentication". Please set the parameter “Incoming Authentication” to “Basic Auth” to activate it again.
|
||||
|
||||
|
||||
## 0.90.0
|
||||
|
||||
### What changed?
|
||||
|
@ -320,7 +350,6 @@ If you are running Node.js version older than 12.9.
|
|||
|
||||
You can find download and install the latest version of Node.js from [here](https://nodejs.org/en/download/).
|
||||
|
||||
|
||||
## 0.87.0
|
||||
|
||||
### What changed?
|
||||
|
@ -335,7 +364,6 @@ If you are are actively using the link.fish node.
|
|||
|
||||
Unfortunately, that's not possible. We'd recommend you to look for an alternative service.
|
||||
|
||||
|
||||
## 0.83.0
|
||||
|
||||
### What changed?
|
||||
|
@ -346,13 +374,13 @@ In the Active Campaign Node, we have changed how the `getAll` operation works wi
|
|||
|
||||
When one of the following resources/operations is used:
|
||||
|
||||
| Resource | Operation |
|
||||
|--|--|
|
||||
| Deal | Get All |
|
||||
| Connector | Get All |
|
||||
| E-commerce Order | Get All |
|
||||
| E-commerce Customer | Get All |
|
||||
| E-commerce Order Products | Get All |
|
||||
| Resource | Operation |
|
||||
| ------------------------- | --------- |
|
||||
| Deal | Get All |
|
||||
| Connector | Get All |
|
||||
| E-commerce Order | Get All |
|
||||
| E-commerce Customer | Get All |
|
||||
| E-commerce Order Products | Get All |
|
||||
|
||||
### How to upgrade:
|
||||
|
||||
|
@ -393,7 +421,6 @@ If you have used the Attachments option in your Twitter nodes.
|
|||
|
||||
You'll need to re-create the attachments for the Twitter node.
|
||||
|
||||
|
||||
## 0.68.0
|
||||
|
||||
### What changed?
|
||||
|
@ -413,36 +440,39 @@ All values that get referenced which were before under the property "channel" ar
|
|||
This means that these expressions have to get adjusted.
|
||||
|
||||
Meaning if the expression used before was:
|
||||
|
||||
```
|
||||
{{ $node["Slack"].data["channel"]["id"] }}
|
||||
```
|
||||
|
||||
it has to get changed to:
|
||||
|
||||
```
|
||||
{{ $node["Slack"].data["id"] }}
|
||||
```
|
||||
|
||||
|
||||
## 0.67.0
|
||||
|
||||
### What changed?
|
||||
|
||||
The names of the following nodes were not set correctly and got fixed:
|
||||
- AMQP Sender
|
||||
- Bitbucket-Trigger
|
||||
- Coda
|
||||
- Eventbrite-Trigger
|
||||
- Flow
|
||||
- Flow-Trigger
|
||||
- Gumroad-Trigger
|
||||
- Jira
|
||||
- Mailchimp-Trigger
|
||||
- PayPal Trigger
|
||||
- Read PDF
|
||||
- Rocketchat
|
||||
- Shopify
|
||||
- Shopify-Trigger
|
||||
- Stripe-Trigger
|
||||
- Toggl-Trigger
|
||||
|
||||
- AMQP Sender
|
||||
- Bitbucket-Trigger
|
||||
- Coda
|
||||
- Eventbrite-Trigger
|
||||
- Flow
|
||||
- Flow-Trigger
|
||||
- Gumroad-Trigger
|
||||
- Jira
|
||||
- Mailchimp-Trigger
|
||||
- PayPal Trigger
|
||||
- Read PDF
|
||||
- Rocketchat
|
||||
- Shopify
|
||||
- Shopify-Trigger
|
||||
- Stripe-Trigger
|
||||
- Toggl-Trigger
|
||||
|
||||
### When is action necessary?
|
||||
|
||||
|
@ -454,32 +484,32 @@ For the nodes mentioned above, you'll need to give them access to the credential
|
|||
|
||||
**Simple**
|
||||
|
||||
- Note down the settings of the nodes before upgrading
|
||||
- After upgrading, delete the nodes mentioned above from your workflow, and recreate them
|
||||
- Note down the settings of the nodes before upgrading
|
||||
- After upgrading, delete the nodes mentioned above from your workflow, and recreate them
|
||||
|
||||
**Advanced**
|
||||
|
||||
After upgrading, select the whole workflow in the editor, copy it, and paste it into a text editor. In the JSON, change the node types manually by replacing the values for "type" as follows:
|
||||
- "n8n-nodes-base.amqpSender" -> "n8n-nodes-base.amqp"
|
||||
- "n8n-nodes-base.bitbucket" -> "n8n-nodes-base.bitbucketTrigger"
|
||||
- "n8n-nodes-base.Coda" -> "n8n-nodes-base.coda"
|
||||
- "n8n-nodes-base.eventbrite" -> "n8n-nodes-base.eventbriteTrigger"
|
||||
- "n8n-nodes-base.Flow" -> "n8n-nodes-base.flow"
|
||||
- "n8n-nodes-base.flow" -> "n8n-nodes-base.flowTrigger"
|
||||
- "n8n-nodes-base.gumroad" -> "n8n-nodes-base.gumroadTrigger"
|
||||
- "n8n-nodes-base.Jira Software Cloud" -> "n8n-nodes-base.jira"
|
||||
- "n8n-nodes-base.Mailchimp" -> "n8n-nodes-base.mailchimpTrigger"
|
||||
- "n8n-nodes-base.PayPal" -> "n8n-nodes-base.payPalTrigger"
|
||||
- "n8n-nodes-base.Read PDF" -> "n8n-nodes-base.readPDF"
|
||||
- "n8n-nodes-base.Rocketchat" -> "n8n-nodes-base.rocketchat"
|
||||
- "n8n-nodes-base.shopify" -> "n8n-nodes-base.shopifyTrigger"
|
||||
- "n8n-nodes-base.shopifyNode" -> "n8n-nodes-base.shopify"
|
||||
- "n8n-nodes-base.stripe" -> "n8n-nodes-base.stripeTrigger"
|
||||
- "n8n-nodes-base.toggl" -> "n8n-nodes-base.togglTrigger"
|
||||
|
||||
- "n8n-nodes-base.amqpSender" -> "n8n-nodes-base.amqp"
|
||||
- "n8n-nodes-base.bitbucket" -> "n8n-nodes-base.bitbucketTrigger"
|
||||
- "n8n-nodes-base.Coda" -> "n8n-nodes-base.coda"
|
||||
- "n8n-nodes-base.eventbrite" -> "n8n-nodes-base.eventbriteTrigger"
|
||||
- "n8n-nodes-base.Flow" -> "n8n-nodes-base.flow"
|
||||
- "n8n-nodes-base.flow" -> "n8n-nodes-base.flowTrigger"
|
||||
- "n8n-nodes-base.gumroad" -> "n8n-nodes-base.gumroadTrigger"
|
||||
- "n8n-nodes-base.Jira Software Cloud" -> "n8n-nodes-base.jira"
|
||||
- "n8n-nodes-base.Mailchimp" -> "n8n-nodes-base.mailchimpTrigger"
|
||||
- "n8n-nodes-base.PayPal" -> "n8n-nodes-base.payPalTrigger"
|
||||
- "n8n-nodes-base.Read PDF" -> "n8n-nodes-base.readPDF"
|
||||
- "n8n-nodes-base.Rocketchat" -> "n8n-nodes-base.rocketchat"
|
||||
- "n8n-nodes-base.shopify" -> "n8n-nodes-base.shopifyTrigger"
|
||||
- "n8n-nodes-base.shopifyNode" -> "n8n-nodes-base.shopify"
|
||||
- "n8n-nodes-base.stripe" -> "n8n-nodes-base.stripeTrigger"
|
||||
- "n8n-nodes-base.toggl" -> "n8n-nodes-base.togglTrigger"
|
||||
|
||||
Then delete all existing nodes, and then paste the changed JSON directly into n8n. It should then recreate all the nodes and connections again, this time with working nodes.
|
||||
|
||||
|
||||
## 0.62.0
|
||||
|
||||
### What changed?
|
||||
|
@ -496,7 +526,6 @@ If "evaluateExpression(...)" gets used in any Function or FunctionItem Node.
|
|||
|
||||
Simply replace the "evaluateExpression(...)" with "$evaluateExpression(...)".
|
||||
|
||||
|
||||
## 0.52.0
|
||||
|
||||
### What changed?
|
||||
|
@ -517,7 +546,6 @@ Open the "Date & Time"-Nodes and reference the date that should be converted
|
|||
via an expression. Also, set the "Property Name" to the name of the property the
|
||||
converted date should be set on.
|
||||
|
||||
|
||||
## 0.37.0
|
||||
|
||||
### What changed?
|
||||
|
@ -534,7 +562,6 @@ When you currently use the Rocketchat-Node.
|
|||
Open the Rocketchat credentials and fill the parameter `domain`. If you had previously the
|
||||
subdomain "example" set you have to set now "https://example.rocket.chat".
|
||||
|
||||
|
||||
## 0.19.0
|
||||
|
||||
### What changed?
|
||||
|
@ -556,9 +583,7 @@ it and paste it in a text-editor, it will display all the data the node
|
|||
contained). Then set the "Response Format" to "File". Everything will then
|
||||
function again like before.
|
||||
|
||||
|
||||
----------------------------
|
||||
|
||||
---
|
||||
|
||||
### What changed?
|
||||
|
||||
|
@ -576,7 +601,6 @@ When "HTTP Request" nodes get used which have "Response Format" set to "String".
|
|||
After upgrading open all workflows which contain the concerning Nodes and set
|
||||
"Binary Property" to "response".
|
||||
|
||||
|
||||
## 0.18.0
|
||||
|
||||
### What changed?
|
||||
|
@ -591,8 +615,7 @@ When Webhook-Nodes get used which have "Response Mode" set to "Last Node".
|
|||
|
||||
After upgrading open all workflows which contain the concerning Webhook-Nodes and set "Response Mode" again manually to "Last Node".
|
||||
|
||||
|
||||
----------------------------
|
||||
---
|
||||
|
||||
### What changed?
|
||||
|
||||
|
@ -603,6 +626,7 @@ packages with security vulnerabilities we had to switch to a different one.
|
|||
|
||||
When you currently start n8n in your setup directly via its JavaScript file.
|
||||
For example like this:
|
||||
|
||||
```
|
||||
/usr/local/bin/node ./dist/index.js start
|
||||
```
|
||||
|
@ -610,6 +634,7 @@ For example like this:
|
|||
### How to upgrade:
|
||||
|
||||
Change the path to its new location:
|
||||
|
||||
```
|
||||
/usr/local/bin/node bin/n8n start
|
||||
```
|
||||
|
|
|
@ -12,7 +12,7 @@ The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensa
|
|||
|
||||
## Limitations
|
||||
|
||||
You may use or modify the software only for your own internal business purposes or for non-commercial or personal use
|
||||
You may use or modify the software only for your own internal business purposes or for non-commercial or personal use.
|
||||
You may distribute the software or provide it to others only if you do so free of charge for non-commercial purposes.
|
||||
You may not alter, remove, or obscure any licensing, copyright, or other notices of the licensor in the software. Any use of the licensor’s trademarks is subject to applicable law.
|
||||
|
||||
|
|
|
@ -2,12 +2,7 @@
|
|||
/* eslint-disable no-console */
|
||||
import { promises as fs } from 'fs';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
import {
|
||||
BinaryDataManager,
|
||||
IBinaryDataConfig,
|
||||
UserSettings,
|
||||
PLACEHOLDER_EMPTY_WORKFLOW_ID,
|
||||
} from 'n8n-core';
|
||||
import { BinaryDataManager, UserSettings, PLACEHOLDER_EMPTY_WORKFLOW_ID } from 'n8n-core';
|
||||
import { INode, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
|
@ -27,7 +22,7 @@ import {
|
|||
} from '../src';
|
||||
|
||||
import { getLogger } from '../src/Logger';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
import { getInstanceOwner } from '../src/UserManagement/UserManagementHelper';
|
||||
|
||||
export class Execute extends Command {
|
||||
|
@ -52,7 +47,7 @@ export class Execute extends Command {
|
|||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig, true);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
/* eslint-disable no-console */
|
||||
import * as fs from 'fs';
|
||||
import fs from 'fs';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
|
||||
import { BinaryDataManager, IBinaryDataConfig, UserSettings } from 'n8n-core';
|
||||
import { BinaryDataManager, UserSettings } from 'n8n-core';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
import { INode, ITaskData, LoggerProxy } from 'n8n-workflow';
|
||||
|
@ -36,7 +36,7 @@ import {
|
|||
NodeTypes,
|
||||
WorkflowRunner,
|
||||
} from '../src';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
import { User } from '../src/databases/entities/User';
|
||||
import { getInstanceOwner } from '../src/UserManagement/UserManagementHelper';
|
||||
|
||||
|
@ -196,7 +196,7 @@ export class ExecuteBatch extends Command {
|
|||
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig, true);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
|
|
|
@ -7,8 +7,8 @@ import { Credentials, UserSettings } from 'n8n-core';
|
|||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
import { Db, ICredentialsDecryptedDb } from '../../src';
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ import { Command, flags } from '@oclif/command';
|
|||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
import { Db } from '../../src';
|
||||
|
||||
|
|
|
@ -12,9 +12,9 @@ import { Credentials, UserSettings } from 'n8n-core';
|
|||
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'fast-glob';
|
||||
import * as path from 'path';
|
||||
import fs from 'fs';
|
||||
import glob from 'fast-glob';
|
||||
import path from 'path';
|
||||
import { EntityManager, getConnection } from 'typeorm';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
import { Db } from '../../src';
|
||||
|
@ -146,7 +146,7 @@ export class ImportCredentialsCommand extends Command {
|
|||
}
|
||||
|
||||
private reportSuccess(total: number) {
|
||||
console.info(`Successfully imported ${total} ${total === 1 ? 'workflow.' : 'workflows.'}`);
|
||||
console.info(`Successfully imported ${total} ${total === 1 ? 'credential.' : 'credentials.'}`);
|
||||
}
|
||||
|
||||
private async initOwnerCredentialRole() {
|
||||
|
|
|
@ -12,8 +12,8 @@ import { Command, flags } from '@oclif/command';
|
|||
|
||||
import { INode, INodeCredentialsDetails, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'fast-glob';
|
||||
import fs from 'fs';
|
||||
import glob from 'fast-glob';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import { EntityManager, getConnection } from 'typeorm';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
/* eslint-disable no-console */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as localtunnel from 'localtunnel';
|
||||
import { BinaryDataManager, IBinaryDataConfig, TUNNEL_SUBDOMAIN_ENV, UserSettings } from 'n8n-core';
|
||||
import localtunnel from 'localtunnel';
|
||||
import { BinaryDataManager, TUNNEL_SUBDOMAIN_ENV, UserSettings } from 'n8n-core';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as Redis from 'ioredis';
|
||||
import Redis from 'ioredis';
|
||||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
import { createHash } from 'crypto';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
ActiveWorkflowRunner,
|
||||
|
@ -100,9 +100,9 @@ export class Start extends Command {
|
|||
|
||||
await InternalHooksManager.getInstance().onN8nStop();
|
||||
|
||||
const skipWebhookDeregistration = config.get(
|
||||
const skipWebhookDeregistration = config.getEnv(
|
||||
'endpoints.skipWebhoooksDeregistrationOnShutdown',
|
||||
) as boolean;
|
||||
);
|
||||
|
||||
const removePromises = [];
|
||||
if (activeWorkflowRunner !== undefined && !skipWebhookDeregistration) {
|
||||
|
@ -169,7 +169,7 @@ export class Start extends Command {
|
|||
// Make sure the settings exist
|
||||
const userSettings = await UserSettings.prepareUserSettings();
|
||||
|
||||
if (!config.get('userManagement.jwtSecret')) {
|
||||
if (!config.getEnv('userManagement.jwtSecret')) {
|
||||
// If we don't have a JWT secret set, generate
|
||||
// one based and save to config.
|
||||
const encryptionKey = await UserSettings.getEncryptionKey();
|
||||
|
@ -222,12 +222,12 @@ export class Start extends Command {
|
|||
config.set(setting.key, JSON.parse(setting.value));
|
||||
});
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
const redisHost = config.get('queue.bull.redis.host');
|
||||
const redisPassword = config.get('queue.bull.redis.password');
|
||||
const redisPort = config.get('queue.bull.redis.port');
|
||||
const redisDB = config.get('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const redisHost = config.getEnv('queue.bull.redis.host');
|
||||
const redisPassword = config.getEnv('queue.bull.redis.password');
|
||||
const redisPort = config.getEnv('queue.bull.redis.port');
|
||||
const redisDB = config.getEnv('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
|
||||
let lastTimer = 0;
|
||||
let cumulativeTimeout = 0;
|
||||
|
||||
|
@ -285,7 +285,7 @@ export class Start extends Command {
|
|||
const dbType = (await GenericHelpers.getConfigValue('database.type')) as DatabaseType;
|
||||
|
||||
if (dbType === 'sqlite') {
|
||||
const shouldRunVacuum = config.get('database.sqlite.executeVacuumOnStartup') as number;
|
||||
const shouldRunVacuum = config.getEnv('database.sqlite.executeVacuumOnStartup');
|
||||
if (shouldRunVacuum) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises, @typescript-eslint/no-non-null-assertion
|
||||
await Db.collections.Execution!.query('VACUUM;');
|
||||
|
@ -324,7 +324,7 @@ export class Start extends Command {
|
|||
subdomain: tunnelSubdomain,
|
||||
};
|
||||
|
||||
const port = config.get('port');
|
||||
const port = config.getEnv('port');
|
||||
|
||||
// @ts-ignore
|
||||
const webhookTunnel = await localtunnel(port, tunnelSettings);
|
||||
|
@ -340,7 +340,7 @@ export class Start extends Command {
|
|||
const { cli } = await GenericHelpers.getVersions();
|
||||
InternalHooksManager.init(instanceId, cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig, true);
|
||||
|
||||
await Server.start();
|
||||
|
@ -354,7 +354,7 @@ export class Start extends Command {
|
|||
const editorUrl = GenericHelpers.getBaseUrl();
|
||||
this.log(`\nEditor is now accessible via:\n${editorUrl}`);
|
||||
|
||||
const saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||
const saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
|
||||
|
||||
if (saveManualExecutions) {
|
||||
this.log('\nManual executions will be visible only for the owner');
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
import { BinaryDataManager, IBinaryDataConfig, UserSettings } from 'n8n-core';
|
||||
import { BinaryDataManager, UserSettings } from 'n8n-core';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as Redis from 'ioredis';
|
||||
import Redis from 'ioredis';
|
||||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
ActiveWorkflowRunner,
|
||||
|
@ -95,7 +95,7 @@ export class Webhook extends Command {
|
|||
|
||||
// Wrap that the process does not close but we can still use async
|
||||
await (async () => {
|
||||
if (config.get('executions.mode') !== 'queue') {
|
||||
if (config.getEnv('executions.mode') !== 'queue') {
|
||||
/**
|
||||
* It is technically possible to run without queues but
|
||||
* there are 2 known bugs when running in this mode:
|
||||
|
@ -152,15 +152,15 @@ export class Webhook extends Command {
|
|||
const { cli } = await GenericHelpers.getVersions();
|
||||
InternalHooksManager.init(instanceId, cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig);
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
const redisHost = config.get('queue.bull.redis.host');
|
||||
const redisPassword = config.get('queue.bull.redis.password');
|
||||
const redisPort = config.get('queue.bull.redis.port');
|
||||
const redisDB = config.get('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const redisHost = config.getEnv('queue.bull.redis.host');
|
||||
const redisPassword = config.getEnv('queue.bull.redis.password');
|
||||
const redisPort = config.getEnv('queue.bull.redis.port');
|
||||
const redisDB = config.getEnv('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
|
||||
let lastTimer = 0;
|
||||
let cumulativeTimeout = 0;
|
||||
|
||||
|
|
|
@ -7,9 +7,9 @@
|
|||
/* eslint-disable @typescript-eslint/restrict-template-expressions */
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import express from 'express';
|
||||
import http from 'http';
|
||||
import PCancelable from 'p-cancelable';
|
||||
|
||||
import { Command, flags } from '@oclif/command';
|
||||
import { BinaryDataManager, IBinaryDataConfig, UserSettings, WorkflowExecute } from 'n8n-core';
|
||||
|
@ -18,7 +18,7 @@ import { IExecuteResponsePromiseData, INodeTypes, IRun, Workflow, LoggerProxy }
|
|||
|
||||
import { FindOneOptions, getConnectionManager } from 'typeorm';
|
||||
|
||||
import * as Bull from 'bull';
|
||||
import Bull from 'bull';
|
||||
import {
|
||||
CredentialsOverwrites,
|
||||
CredentialTypes,
|
||||
|
@ -39,7 +39,7 @@ import {
|
|||
|
||||
import { getLogger } from '../src/Logger';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import * as Queue from '../src/Queue';
|
||||
import {
|
||||
checkPermissionsForExecution,
|
||||
|
@ -119,9 +119,14 @@ export class Worker extends Command {
|
|||
|
||||
async runJob(job: Bull.Job, nodeTypes: INodeTypes): Promise<IBullJobResponse> {
|
||||
const jobData = job.data as IBullJobData;
|
||||
const executionDb = (await Db.collections.Execution!.findOne(
|
||||
jobData.executionId,
|
||||
)) as IExecutionFlattedDb;
|
||||
const executionDb = await Db.collections.Execution!.findOne(jobData.executionId);
|
||||
|
||||
if (!executionDb) {
|
||||
LoggerProxy.error('Worker failed to find execution data in database. Cannot continue.', {
|
||||
executionId: jobData.executionId,
|
||||
});
|
||||
throw new Error('Unable to find execution data in database. Aborting execution.');
|
||||
}
|
||||
const currentExecutionDb = ResponseHelper.unflattenExecutionData(executionDb);
|
||||
LoggerProxy.info(
|
||||
`Start job: ${job.id} (Workflow ID: ${currentExecutionDb.workflowData.id} | Execution: ${jobData.executionId})`,
|
||||
|
@ -139,6 +144,13 @@ export class Worker extends Command {
|
|||
findOptions,
|
||||
);
|
||||
if (workflowData === undefined) {
|
||||
LoggerProxy.error(
|
||||
'Worker execution failed because workflow could not be found in database.',
|
||||
{
|
||||
workflowId: currentExecutionDb.workflowData.id,
|
||||
executionId: jobData.executionId,
|
||||
},
|
||||
);
|
||||
throw new Error(
|
||||
`The workflow with the ID "${currentExecutionDb.workflowData.id}" could not be found`,
|
||||
);
|
||||
|
@ -146,7 +158,7 @@ export class Worker extends Command {
|
|||
staticData = workflowData.staticData;
|
||||
}
|
||||
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
if (
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||
currentExecutionDb.workflowData.settings &&
|
||||
|
@ -157,7 +169,7 @@ export class Worker extends Command {
|
|||
|
||||
let executionTimeoutTimestamp: number | undefined;
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number);
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
|
||||
executionTimeoutTimestamp = Date.now() + workflowTimeout * 1000;
|
||||
}
|
||||
|
||||
|
@ -276,7 +288,7 @@ export class Worker extends Command {
|
|||
await startDbInitPromise;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
|
||||
const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
|
||||
|
||||
Worker.jobQueue = Queue.getInstance().getBullObjectInstance();
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
|
@ -287,7 +299,7 @@ export class Worker extends Command {
|
|||
|
||||
InternalHooksManager.init(instanceId, versions.cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig);
|
||||
|
||||
console.info('\nn8n worker is now ready');
|
||||
|
@ -340,8 +352,8 @@ export class Worker extends Command {
|
|||
}
|
||||
});
|
||||
|
||||
if (config.get('queue.health.active')) {
|
||||
const port = config.get('queue.health.port') as number;
|
||||
if (config.getEnv('queue.health.active')) {
|
||||
const port = config.getEnv('queue.health.port');
|
||||
|
||||
const app = express();
|
||||
const server = http.createServer(app);
|
||||
|
|
|
@ -1,899 +1,14 @@
|
|||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
/* eslint-disable no-console */
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import * as convict from 'convict';
|
||||
import * as dotenv from 'dotenv';
|
||||
import * as path from 'path';
|
||||
import * as core from 'n8n-core';
|
||||
import convict from 'convict';
|
||||
import dotenv from 'dotenv';
|
||||
import { schema } from './schema';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const config = convict({
|
||||
database: {
|
||||
type: {
|
||||
doc: 'Type of database to use',
|
||||
format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'],
|
||||
default: 'sqlite',
|
||||
env: 'DB_TYPE',
|
||||
},
|
||||
tablePrefix: {
|
||||
doc: 'Prefix for table names',
|
||||
format: '*',
|
||||
default: '',
|
||||
env: 'DB_TABLE_PREFIX',
|
||||
},
|
||||
logging: {
|
||||
enabled: {
|
||||
doc: 'Typeorm logging enabled flag.',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'DB_LOGGING_ENABLED',
|
||||
},
|
||||
options: {
|
||||
doc: 'Logging level options, default is "error". Possible values: query,error,schema,warn,info,log. To enable all logging, specify "all"',
|
||||
format: String,
|
||||
default: 'error',
|
||||
env: 'DB_LOGGING_OPTIONS',
|
||||
},
|
||||
maxQueryExecutionTime: {
|
||||
doc: 'Maximum number of milliseconds query should be executed before logger logs a warning. Set 0 to disable long running query warning',
|
||||
format: Number,
|
||||
default: 1000,
|
||||
env: 'DB_LOGGING_MAX_EXECUTION_TIME',
|
||||
},
|
||||
},
|
||||
postgresdb: {
|
||||
database: {
|
||||
doc: 'PostgresDB Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_POSTGRESDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'PostgresDB Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_POSTGRESDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'PostgresDB Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'PostgresDB Port',
|
||||
format: Number,
|
||||
default: 5432,
|
||||
env: 'DB_POSTGRESDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'PostgresDB User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_POSTGRESDB_USER',
|
||||
},
|
||||
schema: {
|
||||
doc: 'PostgresDB Schema',
|
||||
format: String,
|
||||
default: 'public',
|
||||
env: 'DB_POSTGRESDB_SCHEMA',
|
||||
},
|
||||
const config = convict(schema);
|
||||
|
||||
ssl: {
|
||||
ca: {
|
||||
doc: 'SSL certificate authority',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CA',
|
||||
},
|
||||
cert: {
|
||||
doc: 'SSL certificate',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CERT',
|
||||
},
|
||||
key: {
|
||||
doc: 'SSL key',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_KEY',
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
doc: 'If unauthorized SSL connections should be rejected',
|
||||
format: 'Boolean',
|
||||
default: true,
|
||||
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
|
||||
},
|
||||
},
|
||||
},
|
||||
mysqldb: {
|
||||
database: {
|
||||
doc: 'MySQL Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_MYSQLDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'MySQL Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_MYSQLDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'MySQL Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_MYSQLDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'MySQL Port',
|
||||
format: Number,
|
||||
default: 3306,
|
||||
env: 'DB_MYSQLDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'MySQL User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_MYSQLDB_USER',
|
||||
},
|
||||
},
|
||||
sqlite: {
|
||||
executeVacuumOnStartup: {
|
||||
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
credentials: {
|
||||
overwrite: {
|
||||
data: {
|
||||
// Allows to set default values for credentials which
|
||||
// get automatically prefilled and the user does not get
|
||||
// displayed and can not change.
|
||||
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
|
||||
doc: 'Overwrites for credentials',
|
||||
format: '*',
|
||||
default: '{}',
|
||||
env: 'CREDENTIALS_OVERWRITE_DATA',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Fetch credentials from API',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
|
||||
},
|
||||
},
|
||||
defaultName: {
|
||||
doc: 'Default name for credentials',
|
||||
format: String,
|
||||
default: 'My credentials',
|
||||
env: 'CREDENTIALS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
workflows: {
|
||||
defaultName: {
|
||||
doc: 'Default name for workflow',
|
||||
format: String,
|
||||
default: 'My workflow',
|
||||
env: 'WORKFLOWS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
executions: {
|
||||
// By default workflows get always executed in their own process.
|
||||
// If this option gets set to "main" it will run them in the
|
||||
// main-process instead.
|
||||
process: {
|
||||
doc: 'In what process workflows should be executed',
|
||||
format: ['main', 'own'],
|
||||
default: 'own',
|
||||
env: 'EXECUTIONS_PROCESS',
|
||||
},
|
||||
|
||||
mode: {
|
||||
doc: 'If it should run executions directly or via queue',
|
||||
format: ['regular', 'queue'],
|
||||
default: 'regular',
|
||||
env: 'EXECUTIONS_MODE',
|
||||
},
|
||||
|
||||
// A Workflow times out and gets canceled after this time (seconds).
|
||||
// If the workflow is executed in the main process a soft timeout
|
||||
// is executed (takes effect after the current node finishes).
|
||||
// If a workflow is running in its own process is a soft timeout
|
||||
// tried first, before killing the process after waiting for an
|
||||
// additional fifth of the given timeout duration.
|
||||
//
|
||||
// To deactivate timeout set it to -1
|
||||
//
|
||||
// Timeout is currently not activated by default which will change
|
||||
// in a future version.
|
||||
timeout: {
|
||||
doc: 'Max run time (seconds) before stopping the workflow execution',
|
||||
format: Number,
|
||||
default: -1,
|
||||
env: 'EXECUTIONS_TIMEOUT',
|
||||
},
|
||||
maxTimeout: {
|
||||
doc: 'Max execution time (seconds) that can be set for a workflow individually',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_TIMEOUT_MAX',
|
||||
},
|
||||
|
||||
// If a workflow executes all the data gets saved by default. This
|
||||
// could be a problem when a workflow gets executed a lot and processes
|
||||
// a lot of data. To not exceed the database's capacity it is possible to
|
||||
// prune the database regularly or to not save the execution at all.
|
||||
// Depending on if the execution did succeed or error a different
|
||||
// save behaviour can be set.
|
||||
saveDataOnError: {
|
||||
doc: 'What workflow execution data to save on error',
|
||||
format: ['all', 'none'],
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
|
||||
},
|
||||
saveDataOnSuccess: {
|
||||
doc: 'What workflow execution data to save on success',
|
||||
format: ['all', 'none'],
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
|
||||
},
|
||||
saveExecutionProgress: {
|
||||
doc: 'Wether or not to save progress for each node executed',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
|
||||
},
|
||||
|
||||
// If the executions of workflows which got started via the editor
|
||||
// should be saved. By default they will not be saved as this runs
|
||||
// are normally only for testing and debugging. This setting can
|
||||
// also be overwritten on a per workflow basis in the workflow settings
|
||||
// in the editor.
|
||||
saveDataManualExecutions: {
|
||||
doc: 'Save data of executions when started manually via editor',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
|
||||
},
|
||||
|
||||
// To not exceed the database's capacity and keep its size moderate
|
||||
// the execution data gets pruned regularly (default: 1 hour interval).
|
||||
// All saved execution data older than the max age will be deleted.
|
||||
// Pruning is currently not activated by default, which will change in
|
||||
// a future version.
|
||||
pruneData: {
|
||||
doc: 'Delete data of past executions on a rolling basis',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_PRUNE',
|
||||
},
|
||||
pruneDataMaxAge: {
|
||||
doc: 'How old (hours) the execution data has to be to get deleted',
|
||||
format: Number,
|
||||
default: 336,
|
||||
env: 'EXECUTIONS_DATA_MAX_AGE',
|
||||
},
|
||||
pruneDataTimeout: {
|
||||
doc: 'Timeout (seconds) after execution data has been pruned',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
|
||||
},
|
||||
},
|
||||
|
||||
queue: {
|
||||
health: {
|
||||
active: {
|
||||
doc: 'If health checks should be enabled',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'QUEUE_HEALTH_CHECK_ACTIVE',
|
||||
},
|
||||
port: {
|
||||
doc: 'Port to serve health check on if activated',
|
||||
format: Number,
|
||||
default: 5678,
|
||||
env: 'QUEUE_HEALTH_CHECK_PORT',
|
||||
},
|
||||
},
|
||||
bull: {
|
||||
prefix: {
|
||||
doc: 'Prefix for all queue keys',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_PREFIX',
|
||||
},
|
||||
redis: {
|
||||
db: {
|
||||
doc: 'Redis DB',
|
||||
format: Number,
|
||||
default: 0,
|
||||
env: 'QUEUE_BULL_REDIS_DB',
|
||||
},
|
||||
host: {
|
||||
doc: 'Redis Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'QUEUE_BULL_REDIS_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'Redis Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_REDIS_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'Redis Port',
|
||||
format: Number,
|
||||
default: 6379,
|
||||
env: 'QUEUE_BULL_REDIS_PORT',
|
||||
},
|
||||
timeoutThreshold: {
|
||||
doc: 'Redis timeout threshold',
|
||||
format: Number,
|
||||
default: 10000,
|
||||
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
|
||||
},
|
||||
},
|
||||
queueRecoveryInterval: {
|
||||
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'QUEUE_RECOVERY_INTERVAL',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
generic: {
|
||||
// The timezone to use. Is important for nodes like "Cron" which start the
|
||||
// workflow automatically at a specified time. This setting can also be
|
||||
// overwritten on a per worfklow basis in the workflow settings in the
|
||||
// editor.
|
||||
timezone: {
|
||||
doc: 'The timezone to use',
|
||||
format: '*',
|
||||
default: 'America/New_York',
|
||||
env: 'GENERIC_TIMEZONE',
|
||||
},
|
||||
},
|
||||
|
||||
// How n8n can be reached (Editor & REST-API)
|
||||
path: {
|
||||
format: String,
|
||||
default: '/',
|
||||
arg: 'path',
|
||||
env: 'N8N_PATH',
|
||||
doc: 'Path n8n is deployed to',
|
||||
},
|
||||
host: {
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
arg: 'host',
|
||||
env: 'N8N_HOST',
|
||||
doc: 'Host name n8n can be reached',
|
||||
},
|
||||
port: {
|
||||
format: Number,
|
||||
default: 5678,
|
||||
arg: 'port',
|
||||
env: 'N8N_PORT',
|
||||
doc: 'HTTP port n8n can be reached',
|
||||
},
|
||||
listen_address: {
|
||||
format: String,
|
||||
default: '0.0.0.0',
|
||||
env: 'N8N_LISTEN_ADDRESS',
|
||||
doc: 'IP address n8n should listen on',
|
||||
},
|
||||
protocol: {
|
||||
format: ['http', 'https'],
|
||||
default: 'http',
|
||||
env: 'N8N_PROTOCOL',
|
||||
doc: 'HTTP Protocol via which n8n can be reached',
|
||||
},
|
||||
ssl_key: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_KEY',
|
||||
doc: 'SSL Key for HTTPS Protocol',
|
||||
},
|
||||
ssl_cert: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_CERT',
|
||||
doc: 'SSL Cert for HTTPS Protocol',
|
||||
},
|
||||
editorBaseUrl: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EDITOR_BASE_URL',
|
||||
doc: 'Public URL where the editor is accessible. Also used for emails sent from n8n.',
|
||||
},
|
||||
|
||||
security: {
|
||||
excludeEndpoints: {
|
||||
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
|
||||
},
|
||||
basicAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_ACTIVE',
|
||||
doc: 'If basic auth should be activated for editor and REST-API',
|
||||
},
|
||||
user: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_USER',
|
||||
doc: 'The name of the basic auth user',
|
||||
},
|
||||
password: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_PASSWORD',
|
||||
doc: 'The password of the basic auth user',
|
||||
},
|
||||
hash: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_HASH',
|
||||
doc: 'If password for basic auth is hashed',
|
||||
},
|
||||
},
|
||||
jwtAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_JWT_AUTH_ACTIVE',
|
||||
doc: 'If JWT auth should be activated for editor and REST-API',
|
||||
},
|
||||
jwtHeader: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER',
|
||||
doc: 'The request header containing a signed JWT',
|
||||
},
|
||||
jwtHeaderValuePrefix: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
|
||||
doc: 'The request header value prefix to strip (optional)',
|
||||
},
|
||||
jwksUri: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWKS_URI',
|
||||
doc: 'The URI to fetch JWK Set for JWT authentication',
|
||||
},
|
||||
jwtIssuer: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ISSUER',
|
||||
doc: 'JWT issuer to expect (optional)',
|
||||
},
|
||||
jwtNamespace: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_NAMESPACE',
|
||||
doc: 'JWT namespace to expect (optional)',
|
||||
},
|
||||
jwtAllowedTenantKey: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT_KEY',
|
||||
doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
|
||||
},
|
||||
jwtAllowedTenant: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT',
|
||||
doc: 'JWT tenant to allow (optional)',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
endpoints: {
|
||||
payloadSizeMax: {
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_PAYLOAD_SIZE_MAX',
|
||||
doc: 'Maximum payload size in MB.',
|
||||
},
|
||||
metrics: {
|
||||
enable: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_METRICS',
|
||||
doc: 'Enable metrics endpoint',
|
||||
},
|
||||
prefix: {
|
||||
format: String,
|
||||
default: 'n8n_',
|
||||
env: 'N8N_METRICS_PREFIX',
|
||||
doc: 'An optional prefix for metric names. Default: n8n_',
|
||||
},
|
||||
},
|
||||
rest: {
|
||||
format: String,
|
||||
default: 'rest',
|
||||
env: 'N8N_ENDPOINT_REST',
|
||||
doc: 'Path for rest endpoint',
|
||||
},
|
||||
webhook: {
|
||||
format: String,
|
||||
default: 'webhook',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK',
|
||||
doc: 'Path for webhook endpoint',
|
||||
},
|
||||
webhookWaiting: {
|
||||
format: String,
|
||||
default: 'webhook-waiting',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_WAIT',
|
||||
doc: 'Path for waiting-webhook endpoint',
|
||||
},
|
||||
webhookTest: {
|
||||
format: String,
|
||||
default: 'webhook-test',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_TEST',
|
||||
doc: 'Path for test-webhook endpoint',
|
||||
},
|
||||
disableUi: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_UI',
|
||||
doc: 'Disable N8N UI (Frontend).',
|
||||
},
|
||||
disableProductionWebhooksOnMainProcess: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
|
||||
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
|
||||
},
|
||||
skipWebhoooksDeregistrationOnShutdown: {
|
||||
/**
|
||||
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
|
||||
* and registers on startup / activation. If we skip
|
||||
* deactivation on shutdown, webhooks will remain active on 3rd party services.
|
||||
* We don't have to worry about startup as it always
|
||||
* checks if webhooks already exist.
|
||||
* If users want to upgrade n8n, it is possible to run
|
||||
* two instances simultaneously without downtime, similar
|
||||
* to blue/green deployment.
|
||||
* WARNING: Trigger nodes (like Cron) will cause duplication
|
||||
* of work, so be aware when using.
|
||||
*/
|
||||
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
|
||||
},
|
||||
},
|
||||
|
||||
publicApiEndpoints: {
|
||||
path: {
|
||||
format: String,
|
||||
default: 'api',
|
||||
env: 'N8N_PUBLIC_API_ENDPOINT',
|
||||
doc: 'Path for the public api endpoints',
|
||||
},
|
||||
},
|
||||
|
||||
workflowTagsDisabled: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_WORKFLOW_TAGS_DISABLED',
|
||||
doc: 'Disable worfklow tags.',
|
||||
},
|
||||
|
||||
userManagement: {
|
||||
disabled: {
|
||||
doc: 'Disable user management and hide it completely.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_USER_MANAGEMENT_DISABLED',
|
||||
},
|
||||
jwtSecret: {
|
||||
doc: 'Set a specific JWT secret (optional - n8n can generate one)', // Generated @ start.ts
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_USER_MANAGEMENT_JWT_SECRET',
|
||||
},
|
||||
emails: {
|
||||
mode: {
|
||||
doc: 'How to send emails',
|
||||
format: ['', 'smtp'],
|
||||
default: 'smtp',
|
||||
env: 'N8N_EMAIL_MODE',
|
||||
},
|
||||
smtp: {
|
||||
host: {
|
||||
doc: 'SMTP server host',
|
||||
format: String, // e.g. 'smtp.gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_HOST',
|
||||
},
|
||||
port: {
|
||||
doc: 'SMTP server port',
|
||||
format: Number,
|
||||
default: 465,
|
||||
env: 'N8N_SMTP_PORT',
|
||||
},
|
||||
secure: {
|
||||
doc: 'Whether or not to use SSL for SMTP',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_SMTP_SSL',
|
||||
},
|
||||
auth: {
|
||||
user: {
|
||||
doc: 'SMTP login username',
|
||||
format: String, // e.g.'you@gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_USER',
|
||||
},
|
||||
pass: {
|
||||
doc: 'SMTP login password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_PASS',
|
||||
},
|
||||
},
|
||||
sender: {
|
||||
doc: 'How to display sender name',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_SENDER',
|
||||
},
|
||||
},
|
||||
templates: {
|
||||
invite: {
|
||||
doc: 'Overrides default HTML template for inviting new people (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_INVITE',
|
||||
},
|
||||
passwordReset: {
|
||||
doc: 'Overrides default HTML template for resetting password (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_PWRESET',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
externalHookFiles: {
|
||||
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'EXTERNAL_HOOK_FILES',
|
||||
},
|
||||
|
||||
nodes: {
|
||||
include: {
|
||||
doc: 'Nodes to load',
|
||||
format: function check(rawValue) {
|
||||
if (rawValue === '') {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: undefined,
|
||||
env: 'NODES_INCLUDE',
|
||||
},
|
||||
exclude: {
|
||||
doc: 'Nodes not to load',
|
||||
format: function check(rawValue) {
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to exclude is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: '[]',
|
||||
env: 'NODES_EXCLUDE',
|
||||
},
|
||||
errorTriggerType: {
|
||||
doc: 'Node Type to use as Error Trigger',
|
||||
format: String,
|
||||
default: 'n8n-nodes-base.errorTrigger',
|
||||
env: 'NODES_ERROR_TRIGGER_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
logs: {
|
||||
level: {
|
||||
doc: 'Log output level',
|
||||
format: ['error', 'warn', 'info', 'verbose', 'debug'],
|
||||
default: 'info',
|
||||
env: 'N8N_LOG_LEVEL',
|
||||
},
|
||||
output: {
|
||||
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
|
||||
format: String,
|
||||
default: 'console',
|
||||
env: 'N8N_LOG_OUTPUT',
|
||||
},
|
||||
file: {
|
||||
fileCountMax: {
|
||||
doc: 'Maximum number of files to keep.',
|
||||
format: Number,
|
||||
default: 100,
|
||||
env: 'N8N_LOG_FILE_COUNT_MAX',
|
||||
},
|
||||
fileSizeMax: {
|
||||
doc: 'Maximum size for each log file in MB.',
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_LOG_FILE_SIZE_MAX',
|
||||
},
|
||||
location: {
|
||||
doc: 'Log file location; only used if log output is set to file.',
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'),
|
||||
env: 'N8N_LOG_FILE_LOCATION',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
versionNotifications: {
|
||||
enabled: {
|
||||
doc: 'Whether feature is enabled to request notifications about new versions and security updates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENABLED',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Endpoint to retrieve version information from.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/versions/',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENDPOINT',
|
||||
},
|
||||
infoUrl: {
|
||||
doc: `Url in New Versions Panel with more information on updating one's instance.`,
|
||||
format: String,
|
||||
default: 'https://docs.n8n.io/getting-started/installation/updating.html',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_INFO_URL',
|
||||
},
|
||||
},
|
||||
|
||||
templates: {
|
||||
enabled: {
|
||||
doc: 'Whether templates feature is enabled to load workflow templates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_TEMPLATES_ENABLED',
|
||||
},
|
||||
host: {
|
||||
doc: 'Endpoint host to retrieve workflow templates from endpoints.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/',
|
||||
env: 'N8N_TEMPLATES_HOST',
|
||||
},
|
||||
},
|
||||
|
||||
binaryDataManager: {
|
||||
availableModes: {
|
||||
format: String,
|
||||
default: 'filesystem',
|
||||
env: 'N8N_AVAILABLE_BINARY_DATA_MODES',
|
||||
doc: 'Available modes of binary data storage, as comma separated strings',
|
||||
},
|
||||
mode: {
|
||||
format: ['default', 'filesystem'],
|
||||
default: 'default',
|
||||
env: 'N8N_DEFAULT_BINARY_DATA_MODE',
|
||||
doc: 'Storage mode for binary data',
|
||||
},
|
||||
localStoragePath: {
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'binaryData'),
|
||||
env: 'N8N_BINARY_DATA_STORAGE_PATH',
|
||||
doc: 'Path for binary data storage in "filesystem" mode',
|
||||
},
|
||||
binaryDataTTL: {
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'N8N_BINARY_DATA_TTL',
|
||||
doc: 'TTL for binary data of unsaved executions in minutes',
|
||||
},
|
||||
persistedBinaryDataTTL: {
|
||||
format: Number,
|
||||
default: 1440,
|
||||
env: 'N8N_PERSISTED_BINARY_DATA_TTL',
|
||||
doc: 'TTL for persisted binary data in minutes (binary data gets deleted if not persisted before TTL expires)',
|
||||
},
|
||||
},
|
||||
|
||||
deployment: {
|
||||
type: {
|
||||
format: String,
|
||||
default: 'default',
|
||||
env: 'N8N_DEPLOYMENT_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
hiringBanner: {
|
||||
enabled: {
|
||||
doc: 'Whether hiring banner in browser console is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_HIRING_BANNER_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
personalization: {
|
||||
enabled: {
|
||||
doc: 'Whether personalization is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_PERSONALIZATION_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
diagnostics: {
|
||||
enabled: {
|
||||
doc: 'Whether diagnostic mode is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_DIAGNOSTICS_ENABLED',
|
||||
},
|
||||
config: {
|
||||
frontend: {
|
||||
doc: 'Diagnostics config for frontend.',
|
||||
format: String,
|
||||
default: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_FRONTEND',
|
||||
},
|
||||
backend: {
|
||||
doc: 'Diagnostics config for backend.',
|
||||
format: String,
|
||||
default: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io/v1/batch',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_BACKEND',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
defaultLocale: {
|
||||
doc: 'Default locale for the UI',
|
||||
format: String,
|
||||
default: 'en',
|
||||
env: 'N8N_DEFAULT_LOCALE',
|
||||
},
|
||||
});
|
||||
config.getEnv = config.get;
|
||||
|
||||
// Overwrite default configuration with settings which got defined in
|
||||
// optional configuration files
|
||||
|
|
883
packages/cli/config/schema.ts
Normal file
883
packages/cli/config/schema.ts
Normal file
|
@ -0,0 +1,883 @@
|
|||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
|
||||
import path from 'path';
|
||||
import * as core from 'n8n-core';
|
||||
|
||||
export const schema = {
|
||||
database: {
|
||||
type: {
|
||||
doc: 'Type of database to use',
|
||||
format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'] as const,
|
||||
default: 'sqlite',
|
||||
env: 'DB_TYPE',
|
||||
},
|
||||
tablePrefix: {
|
||||
doc: 'Prefix for table names',
|
||||
format: '*',
|
||||
default: '',
|
||||
env: 'DB_TABLE_PREFIX',
|
||||
},
|
||||
logging: {
|
||||
enabled: {
|
||||
doc: 'Typeorm logging enabled flag.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'DB_LOGGING_ENABLED',
|
||||
},
|
||||
options: {
|
||||
doc: 'Logging level options, default is "error". Possible values: query,error,schema,warn,info,log. To enable all logging, specify "all"',
|
||||
format: String,
|
||||
default: 'error',
|
||||
env: 'DB_LOGGING_OPTIONS',
|
||||
},
|
||||
maxQueryExecutionTime: {
|
||||
doc: 'Maximum number of milliseconds query should be executed before logger logs a warning. Set 0 to disable long running query warning',
|
||||
format: Number,
|
||||
default: 1000,
|
||||
env: 'DB_LOGGING_MAX_EXECUTION_TIME',
|
||||
},
|
||||
},
|
||||
postgresdb: {
|
||||
database: {
|
||||
doc: 'PostgresDB Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_POSTGRESDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'PostgresDB Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_POSTGRESDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'PostgresDB Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'PostgresDB Port',
|
||||
format: Number,
|
||||
default: 5432,
|
||||
env: 'DB_POSTGRESDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'PostgresDB User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_POSTGRESDB_USER',
|
||||
},
|
||||
schema: {
|
||||
doc: 'PostgresDB Schema',
|
||||
format: String,
|
||||
default: 'public',
|
||||
env: 'DB_POSTGRESDB_SCHEMA',
|
||||
},
|
||||
|
||||
ssl: {
|
||||
ca: {
|
||||
doc: 'SSL certificate authority',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CA',
|
||||
},
|
||||
cert: {
|
||||
doc: 'SSL certificate',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CERT',
|
||||
},
|
||||
key: {
|
||||
doc: 'SSL key',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_KEY',
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
doc: 'If unauthorized SSL connections should be rejected',
|
||||
format: 'Boolean',
|
||||
default: true,
|
||||
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
|
||||
},
|
||||
},
|
||||
},
|
||||
mysqldb: {
|
||||
database: {
|
||||
doc: 'MySQL Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_MYSQLDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'MySQL Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_MYSQLDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'MySQL Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_MYSQLDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'MySQL Port',
|
||||
format: Number,
|
||||
default: 3306,
|
||||
env: 'DB_MYSQLDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'MySQL User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_MYSQLDB_USER',
|
||||
},
|
||||
},
|
||||
sqlite: {
|
||||
executeVacuumOnStartup: {
|
||||
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
credentials: {
|
||||
overwrite: {
|
||||
data: {
|
||||
// Allows to set default values for credentials which
|
||||
// get automatically prefilled and the user does not get
|
||||
// displayed and can not change.
|
||||
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
|
||||
doc: 'Overwrites for credentials',
|
||||
format: '*',
|
||||
default: '{}',
|
||||
env: 'CREDENTIALS_OVERWRITE_DATA',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Fetch credentials from API',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
|
||||
},
|
||||
},
|
||||
defaultName: {
|
||||
doc: 'Default name for credentials',
|
||||
format: String,
|
||||
default: 'My credentials',
|
||||
env: 'CREDENTIALS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
workflows: {
|
||||
defaultName: {
|
||||
doc: 'Default name for workflow',
|
||||
format: String,
|
||||
default: 'My workflow',
|
||||
env: 'WORKFLOWS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
executions: {
|
||||
// By default workflows get always executed in their own process.
|
||||
// If this option gets set to "main" it will run them in the
|
||||
// main-process instead.
|
||||
process: {
|
||||
doc: 'In what process workflows should be executed',
|
||||
format: ['main', 'own'] as const,
|
||||
default: 'own',
|
||||
env: 'EXECUTIONS_PROCESS',
|
||||
},
|
||||
|
||||
mode: {
|
||||
doc: 'If it should run executions directly or via queue',
|
||||
format: ['regular', 'queue'] as const,
|
||||
default: 'regular',
|
||||
env: 'EXECUTIONS_MODE',
|
||||
},
|
||||
|
||||
// A Workflow times out and gets canceled after this time (seconds).
|
||||
// If the workflow is executed in the main process a soft timeout
|
||||
// is executed (takes effect after the current node finishes).
|
||||
// If a workflow is running in its own process is a soft timeout
|
||||
// tried first, before killing the process after waiting for an
|
||||
// additional fifth of the given timeout duration.
|
||||
//
|
||||
// To deactivate timeout set it to -1
|
||||
//
|
||||
// Timeout is currently not activated by default which will change
|
||||
// in a future version.
|
||||
timeout: {
|
||||
doc: 'Max run time (seconds) before stopping the workflow execution',
|
||||
format: Number,
|
||||
default: -1,
|
||||
env: 'EXECUTIONS_TIMEOUT',
|
||||
},
|
||||
maxTimeout: {
|
||||
doc: 'Max execution time (seconds) that can be set for a workflow individually',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_TIMEOUT_MAX',
|
||||
},
|
||||
|
||||
// If a workflow executes all the data gets saved by default. This
|
||||
// could be a problem when a workflow gets executed a lot and processes
|
||||
// a lot of data. To not exceed the database's capacity it is possible to
|
||||
// prune the database regularly or to not save the execution at all.
|
||||
// Depending on if the execution did succeed or error a different
|
||||
// save behaviour can be set.
|
||||
saveDataOnError: {
|
||||
doc: 'What workflow execution data to save on error',
|
||||
format: ['all', 'none'] as const,
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
|
||||
},
|
||||
saveDataOnSuccess: {
|
||||
doc: 'What workflow execution data to save on success',
|
||||
format: ['all', 'none'] as const,
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
|
||||
},
|
||||
saveExecutionProgress: {
|
||||
doc: 'Wether or not to save progress for each node executed',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
|
||||
},
|
||||
|
||||
// If the executions of workflows which got started via the editor
|
||||
// should be saved. By default they will not be saved as this runs
|
||||
// are normally only for testing and debugging. This setting can
|
||||
// also be overwritten on a per workflow basis in the workflow settings
|
||||
// in the editor.
|
||||
saveDataManualExecutions: {
|
||||
doc: 'Save data of executions when started manually via editor',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
|
||||
},
|
||||
|
||||
// To not exceed the database's capacity and keep its size moderate
|
||||
// the execution data gets pruned regularly (default: 1 hour interval).
|
||||
// All saved execution data older than the max age will be deleted.
|
||||
// Pruning is currently not activated by default, which will change in
|
||||
// a future version.
|
||||
pruneData: {
|
||||
doc: 'Delete data of past executions on a rolling basis',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_PRUNE',
|
||||
},
|
||||
pruneDataMaxAge: {
|
||||
doc: 'How old (hours) the execution data has to be to get deleted',
|
||||
format: Number,
|
||||
default: 336,
|
||||
env: 'EXECUTIONS_DATA_MAX_AGE',
|
||||
},
|
||||
pruneDataTimeout: {
|
||||
doc: 'Timeout (seconds) after execution data has been pruned',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
|
||||
},
|
||||
},
|
||||
|
||||
queue: {
|
||||
health: {
|
||||
active: {
|
||||
doc: 'If health checks should be enabled',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'QUEUE_HEALTH_CHECK_ACTIVE',
|
||||
},
|
||||
port: {
|
||||
doc: 'Port to serve health check on if activated',
|
||||
format: Number,
|
||||
default: 5678,
|
||||
env: 'QUEUE_HEALTH_CHECK_PORT',
|
||||
},
|
||||
},
|
||||
bull: {
|
||||
prefix: {
|
||||
doc: 'Prefix for all queue keys',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_PREFIX',
|
||||
},
|
||||
redis: {
|
||||
db: {
|
||||
doc: 'Redis DB',
|
||||
format: Number,
|
||||
default: 0,
|
||||
env: 'QUEUE_BULL_REDIS_DB',
|
||||
},
|
||||
host: {
|
||||
doc: 'Redis Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'QUEUE_BULL_REDIS_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'Redis Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_REDIS_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'Redis Port',
|
||||
format: Number,
|
||||
default: 6379,
|
||||
env: 'QUEUE_BULL_REDIS_PORT',
|
||||
},
|
||||
timeoutThreshold: {
|
||||
doc: 'Redis timeout threshold',
|
||||
format: Number,
|
||||
default: 10000,
|
||||
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
|
||||
},
|
||||
},
|
||||
queueRecoveryInterval: {
|
||||
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'QUEUE_RECOVERY_INTERVAL',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
generic: {
|
||||
// The timezone to use. Is important for nodes like "Cron" which start the
|
||||
// workflow automatically at a specified time. This setting can also be
|
||||
// overwritten on a per worfklow basis in the workflow settings in the
|
||||
// editor.
|
||||
timezone: {
|
||||
doc: 'The timezone to use',
|
||||
format: '*',
|
||||
default: 'America/New_York',
|
||||
env: 'GENERIC_TIMEZONE',
|
||||
},
|
||||
},
|
||||
|
||||
// How n8n can be reached (Editor & REST-API)
|
||||
path: {
|
||||
format: String,
|
||||
default: '/',
|
||||
arg: 'path',
|
||||
env: 'N8N_PATH',
|
||||
doc: 'Path n8n is deployed to',
|
||||
},
|
||||
host: {
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
arg: 'host',
|
||||
env: 'N8N_HOST',
|
||||
doc: 'Host name n8n can be reached',
|
||||
},
|
||||
port: {
|
||||
format: Number,
|
||||
default: 5678,
|
||||
arg: 'port',
|
||||
env: 'N8N_PORT',
|
||||
doc: 'HTTP port n8n can be reached',
|
||||
},
|
||||
listen_address: {
|
||||
format: String,
|
||||
default: '0.0.0.0',
|
||||
env: 'N8N_LISTEN_ADDRESS',
|
||||
doc: 'IP address n8n should listen on',
|
||||
},
|
||||
protocol: {
|
||||
format: ['http', 'https'] as const,
|
||||
default: 'http',
|
||||
env: 'N8N_PROTOCOL',
|
||||
doc: 'HTTP Protocol via which n8n can be reached',
|
||||
},
|
||||
ssl_key: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_KEY',
|
||||
doc: 'SSL Key for HTTPS Protocol',
|
||||
},
|
||||
ssl_cert: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_CERT',
|
||||
doc: 'SSL Cert for HTTPS Protocol',
|
||||
},
|
||||
editorBaseUrl: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EDITOR_BASE_URL',
|
||||
doc: 'Public URL where the editor is accessible. Also used for emails sent from n8n.',
|
||||
},
|
||||
|
||||
security: {
|
||||
excludeEndpoints: {
|
||||
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
|
||||
},
|
||||
basicAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_ACTIVE',
|
||||
doc: 'If basic auth should be activated for editor and REST-API',
|
||||
},
|
||||
user: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_USER',
|
||||
doc: 'The name of the basic auth user',
|
||||
},
|
||||
password: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_PASSWORD',
|
||||
doc: 'The password of the basic auth user',
|
||||
},
|
||||
hash: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_HASH',
|
||||
doc: 'If password for basic auth is hashed',
|
||||
},
|
||||
},
|
||||
jwtAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_JWT_AUTH_ACTIVE',
|
||||
doc: 'If JWT auth should be activated for editor and REST-API',
|
||||
},
|
||||
jwtHeader: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER',
|
||||
doc: 'The request header containing a signed JWT',
|
||||
},
|
||||
jwtHeaderValuePrefix: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
|
||||
doc: 'The request header value prefix to strip (optional)',
|
||||
},
|
||||
jwksUri: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWKS_URI',
|
||||
doc: 'The URI to fetch JWK Set for JWT authentication',
|
||||
},
|
||||
jwtIssuer: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ISSUER',
|
||||
doc: 'JWT issuer to expect (optional)',
|
||||
},
|
||||
jwtNamespace: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_NAMESPACE',
|
||||
doc: 'JWT namespace to expect (optional)',
|
||||
},
|
||||
jwtAllowedTenantKey: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT_KEY',
|
||||
doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
|
||||
},
|
||||
jwtAllowedTenant: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT',
|
||||
doc: 'JWT tenant to allow (optional)',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
endpoints: {
|
||||
payloadSizeMax: {
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_PAYLOAD_SIZE_MAX',
|
||||
doc: 'Maximum payload size in MB.',
|
||||
},
|
||||
metrics: {
|
||||
enable: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_METRICS',
|
||||
doc: 'Enable metrics endpoint',
|
||||
},
|
||||
prefix: {
|
||||
format: String,
|
||||
default: 'n8n_',
|
||||
env: 'N8N_METRICS_PREFIX',
|
||||
doc: 'An optional prefix for metric names. Default: n8n_',
|
||||
},
|
||||
},
|
||||
rest: {
|
||||
format: String,
|
||||
default: 'rest',
|
||||
env: 'N8N_ENDPOINT_REST',
|
||||
doc: 'Path for rest endpoint',
|
||||
},
|
||||
webhook: {
|
||||
format: String,
|
||||
default: 'webhook',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK',
|
||||
doc: 'Path for webhook endpoint',
|
||||
},
|
||||
webhookWaiting: {
|
||||
format: String,
|
||||
default: 'webhook-waiting',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_WAIT',
|
||||
doc: 'Path for waiting-webhook endpoint',
|
||||
},
|
||||
webhookTest: {
|
||||
format: String,
|
||||
default: 'webhook-test',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_TEST',
|
||||
doc: 'Path for test-webhook endpoint',
|
||||
},
|
||||
disableUi: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_UI',
|
||||
doc: 'Disable N8N UI (Frontend).',
|
||||
},
|
||||
disableProductionWebhooksOnMainProcess: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
|
||||
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
|
||||
},
|
||||
skipWebhoooksDeregistrationOnShutdown: {
|
||||
/**
|
||||
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
|
||||
* and registers on startup / activation. If we skip
|
||||
* deactivation on shutdown, webhooks will remain active on 3rd party services.
|
||||
* We don't have to worry about startup as it always
|
||||
* checks if webhooks already exist.
|
||||
* If users want to upgrade n8n, it is possible to run
|
||||
* two instances simultaneously without downtime, similar
|
||||
* to blue/green deployment.
|
||||
* WARNING: Trigger nodes (like Cron) will cause duplication
|
||||
* of work, so be aware when using.
|
||||
*/
|
||||
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
|
||||
},
|
||||
},
|
||||
|
||||
workflowTagsDisabled: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_WORKFLOW_TAGS_DISABLED',
|
||||
doc: 'Disable worfklow tags.',
|
||||
},
|
||||
|
||||
userManagement: {
|
||||
disabled: {
|
||||
doc: 'Disable user management and hide it completely.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_USER_MANAGEMENT_DISABLED',
|
||||
},
|
||||
jwtSecret: {
|
||||
doc: 'Set a specific JWT secret (optional - n8n can generate one)', // Generated @ start.ts
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_USER_MANAGEMENT_JWT_SECRET',
|
||||
},
|
||||
emails: {
|
||||
mode: {
|
||||
doc: 'How to send emails',
|
||||
format: ['', 'smtp'] as const,
|
||||
default: 'smtp',
|
||||
env: 'N8N_EMAIL_MODE',
|
||||
},
|
||||
smtp: {
|
||||
host: {
|
||||
doc: 'SMTP server host',
|
||||
format: String, // e.g. 'smtp.gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_HOST',
|
||||
},
|
||||
port: {
|
||||
doc: 'SMTP server port',
|
||||
format: Number,
|
||||
default: 465,
|
||||
env: 'N8N_SMTP_PORT',
|
||||
},
|
||||
secure: {
|
||||
doc: 'Whether or not to use SSL for SMTP',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_SMTP_SSL',
|
||||
},
|
||||
auth: {
|
||||
user: {
|
||||
doc: 'SMTP login username',
|
||||
format: String, // e.g.'you@gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_USER',
|
||||
},
|
||||
pass: {
|
||||
doc: 'SMTP login password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_PASS',
|
||||
},
|
||||
},
|
||||
sender: {
|
||||
doc: 'How to display sender name',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_SENDER',
|
||||
},
|
||||
},
|
||||
templates: {
|
||||
invite: {
|
||||
doc: 'Overrides default HTML template for inviting new people (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_INVITE',
|
||||
},
|
||||
passwordReset: {
|
||||
doc: 'Overrides default HTML template for resetting password (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_PWRESET',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
externalHookFiles: {
|
||||
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'EXTERNAL_HOOK_FILES',
|
||||
},
|
||||
|
||||
nodes: {
|
||||
include: {
|
||||
doc: 'Nodes to load',
|
||||
format: function check(rawValue: string): void {
|
||||
if (rawValue === '') {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: undefined,
|
||||
env: 'NODES_INCLUDE',
|
||||
},
|
||||
exclude: {
|
||||
doc: 'Nodes not to load',
|
||||
format: function check(rawValue: string): void {
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to exclude is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: '[]',
|
||||
env: 'NODES_EXCLUDE',
|
||||
},
|
||||
errorTriggerType: {
|
||||
doc: 'Node Type to use as Error Trigger',
|
||||
format: String,
|
||||
default: 'n8n-nodes-base.errorTrigger',
|
||||
env: 'NODES_ERROR_TRIGGER_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
logs: {
|
||||
level: {
|
||||
doc: 'Log output level',
|
||||
format: ['error', 'warn', 'info', 'verbose', 'debug', 'silent'] as const,
|
||||
default: 'info',
|
||||
env: 'N8N_LOG_LEVEL',
|
||||
},
|
||||
output: {
|
||||
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
|
||||
format: String,
|
||||
default: 'console',
|
||||
env: 'N8N_LOG_OUTPUT',
|
||||
},
|
||||
file: {
|
||||
fileCountMax: {
|
||||
doc: 'Maximum number of files to keep.',
|
||||
format: Number,
|
||||
default: 100,
|
||||
env: 'N8N_LOG_FILE_COUNT_MAX',
|
||||
},
|
||||
fileSizeMax: {
|
||||
doc: 'Maximum size for each log file in MB.',
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_LOG_FILE_SIZE_MAX',
|
||||
},
|
||||
location: {
|
||||
doc: 'Log file location; only used if log output is set to file.',
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'),
|
||||
env: 'N8N_LOG_FILE_LOCATION',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
versionNotifications: {
|
||||
enabled: {
|
||||
doc: 'Whether feature is enabled to request notifications about new versions and security updates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENABLED',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Endpoint to retrieve version information from.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/versions/',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENDPOINT',
|
||||
},
|
||||
infoUrl: {
|
||||
doc: `Url in New Versions Panel with more information on updating one's instance.`,
|
||||
format: String,
|
||||
default: 'https://docs.n8n.io/getting-started/installation/updating.html',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_INFO_URL',
|
||||
},
|
||||
},
|
||||
|
||||
templates: {
|
||||
enabled: {
|
||||
doc: 'Whether templates feature is enabled to load workflow templates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_TEMPLATES_ENABLED',
|
||||
},
|
||||
host: {
|
||||
doc: 'Endpoint host to retrieve workflow templates from endpoints.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/',
|
||||
env: 'N8N_TEMPLATES_HOST',
|
||||
},
|
||||
},
|
||||
|
||||
binaryDataManager: {
|
||||
availableModes: {
|
||||
format: String,
|
||||
default: 'filesystem',
|
||||
env: 'N8N_AVAILABLE_BINARY_DATA_MODES',
|
||||
doc: 'Available modes of binary data storage, as comma separated strings',
|
||||
},
|
||||
mode: {
|
||||
format: ['default', 'filesystem'] as const,
|
||||
default: 'default',
|
||||
env: 'N8N_DEFAULT_BINARY_DATA_MODE',
|
||||
doc: 'Storage mode for binary data',
|
||||
},
|
||||
localStoragePath: {
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'binaryData'),
|
||||
env: 'N8N_BINARY_DATA_STORAGE_PATH',
|
||||
doc: 'Path for binary data storage in "filesystem" mode',
|
||||
},
|
||||
binaryDataTTL: {
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'N8N_BINARY_DATA_TTL',
|
||||
doc: 'TTL for binary data of unsaved executions in minutes',
|
||||
},
|
||||
persistedBinaryDataTTL: {
|
||||
format: Number,
|
||||
default: 1440,
|
||||
env: 'N8N_PERSISTED_BINARY_DATA_TTL',
|
||||
doc: 'TTL for persisted binary data in minutes (binary data gets deleted if not persisted before TTL expires)',
|
||||
},
|
||||
},
|
||||
|
||||
deployment: {
|
||||
type: {
|
||||
format: String,
|
||||
default: 'default',
|
||||
env: 'N8N_DEPLOYMENT_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
hiringBanner: {
|
||||
enabled: {
|
||||
doc: 'Whether hiring banner in browser console is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_HIRING_BANNER_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
personalization: {
|
||||
enabled: {
|
||||
doc: 'Whether personalization is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_PERSONALIZATION_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
diagnostics: {
|
||||
enabled: {
|
||||
doc: 'Whether diagnostic mode is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_DIAGNOSTICS_ENABLED',
|
||||
},
|
||||
config: {
|
||||
frontend: {
|
||||
doc: 'Diagnostics config for frontend.',
|
||||
format: String,
|
||||
default: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_FRONTEND',
|
||||
},
|
||||
backend: {
|
||||
doc: 'Diagnostics config for backend.',
|
||||
format: String,
|
||||
default: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io/v1/batch',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_BACKEND',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
defaultLocale: {
|
||||
doc: 'Default locale for the UI',
|
||||
format: String,
|
||||
default: 'en',
|
||||
env: 'N8N_DEFAULT_LOCALE',
|
||||
},
|
||||
};
|
132
packages/cli/config/types.d.ts
vendored
Normal file
132
packages/cli/config/types.d.ts
vendored
Normal file
|
@ -0,0 +1,132 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
|
||||
import { IBinaryDataConfig } from '../../core/dist/src';
|
||||
import { schema } from './schema';
|
||||
|
||||
// -----------------------------------
|
||||
// transformers
|
||||
// -----------------------------------
|
||||
|
||||
/**
|
||||
* Transform an object (convict schema) into a union of string arrays (path segments),
|
||||
* one for every valid path in the schema object, filtered by type.
|
||||
*
|
||||
* ```ts
|
||||
* ["port", "default"] | ["queue", "bull", "redis", "port", "default"] | ["queue", "bull", "redis", "db", "default"] | ["queue", "bull", "redis", "timeoutThreshold", "default"] | etc
|
||||
* ```
|
||||
*/
|
||||
type GetPathSegments<Traversable, Filter> = Traversable extends Filter
|
||||
? []
|
||||
: {
|
||||
[K in ValidKeys<Traversable>]: [K, ...GetPathSegments<Traversable[K], Filter>];
|
||||
}[ValidKeys<Traversable>];
|
||||
|
||||
/**
|
||||
* Transform a union of string arrays (path segments) into a union of strings (dotted paths).
|
||||
*
|
||||
* ```ts
|
||||
* "port" | "queue.bull.redis.port" | "queue.bull.redis.db" | "queue.bull.redis.timeoutThreshold" | etc
|
||||
* ```
|
||||
*/
|
||||
type JoinByDotting<T extends string[]> = T extends [infer F]
|
||||
? F
|
||||
: T extends [infer F, ...infer R]
|
||||
? F extends string
|
||||
? R extends string[]
|
||||
? `${F}.${JoinByDotting<R>}`
|
||||
: never
|
||||
: never
|
||||
: string;
|
||||
|
||||
type ToDottedPath<T> = JoinByDotting<RemoveExcess<T>>;
|
||||
|
||||
type CollectPathsByType<T> = ToDottedPath<GetPathSegments<typeof schema, T>>;
|
||||
|
||||
// -----------------------------------
|
||||
// path-to-return-type mapper
|
||||
// -----------------------------------
|
||||
|
||||
type NumericPath = CollectPathsByType<number>;
|
||||
|
||||
type BooleanPath = CollectPathsByType<boolean>;
|
||||
|
||||
type StringLiteralArrayPath = CollectPathsByType<Readonly<string[]>>;
|
||||
|
||||
type StringPath = CollectPathsByType<string>;
|
||||
|
||||
type ConfigOptionPath =
|
||||
| NumericPath
|
||||
| BooleanPath
|
||||
| StringPath
|
||||
| StringLiteralArrayPath
|
||||
| keyof ExceptionPaths;
|
||||
|
||||
type ToReturnType<T extends ConfigOptionPath> = T extends NumericPath
|
||||
? number
|
||||
: T extends BooleanPath
|
||||
? boolean
|
||||
: T extends StringLiteralArrayPath
|
||||
? StringLiteralMap[T]
|
||||
: T extends keyof ExceptionPaths
|
||||
? ExceptionPaths[T]
|
||||
: T extends StringPath
|
||||
? string
|
||||
: unknown;
|
||||
|
||||
type ExceptionPaths = {
|
||||
'queue.bull.redis': object;
|
||||
binaryDataManager: IBinaryDataConfig;
|
||||
'nodes.include': undefined;
|
||||
'userManagement.isInstanceOwnerSetUp': boolean;
|
||||
'userManagement.skipInstanceOwnerSetup': boolean;
|
||||
};
|
||||
|
||||
// -----------------------------------
|
||||
// string literals map
|
||||
// -----------------------------------
|
||||
|
||||
type GetPathSegmentsWithUnions<T> = T extends ReadonlyArray<infer C>
|
||||
? [C]
|
||||
: {
|
||||
[K in ValidKeys<T>]: [K, ...GetPathSegmentsWithUnions<T[K]>];
|
||||
}[ValidKeys<T>];
|
||||
|
||||
type ToPathUnionPair<T extends string[]> = T extends [...infer Path, infer Union]
|
||||
? Path extends string[]
|
||||
? { path: ToDottedPath<Path>; union: Union }
|
||||
: never
|
||||
: never;
|
||||
|
||||
type ToStringLiteralMap<T extends { path: string; union: string }> = {
|
||||
[Path in T['path']]: Extract<T, { path: Path }>['union'];
|
||||
};
|
||||
|
||||
type StringLiteralMap = ToStringLiteralMap<
|
||||
ToPathUnionPair<GetPathSegmentsWithUnions<typeof schema>>
|
||||
>;
|
||||
|
||||
// -----------------------------------
|
||||
// utils
|
||||
// -----------------------------------
|
||||
|
||||
type ValidKeys<T> = keyof T extends string
|
||||
? keyof T extends keyof NumberConstructor
|
||||
? never
|
||||
: keyof T
|
||||
: never;
|
||||
|
||||
type RemoveExcess<T> = T extends [...infer Path, 'format' | 'default']
|
||||
? Path extends string[]
|
||||
? Path
|
||||
: never
|
||||
: never;
|
||||
|
||||
// -----------------------------------
|
||||
// module augmentation
|
||||
// -----------------------------------
|
||||
|
||||
declare module 'convict' {
|
||||
interface Config<T> {
|
||||
getEnv<Path extends ConfigOptionPath>(path: Path): ToReturnType<Path>;
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
import * as path from 'path';
|
||||
import path from 'path';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import { entities } from '../src/databases/entities';
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"version": "0.169.0",
|
||||
"version": "0.171.1",
|
||||
"description": "n8n Workflow Automation Tool",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -30,9 +30,9 @@
|
|||
"start:default": "cd bin && ./n8n",
|
||||
"start:windows": "cd bin && n8n",
|
||||
"test": "npm run test:sqlite",
|
||||
"test:sqlite": "export DB_TYPE=sqlite && jest --forceExit",
|
||||
"test:postgres": "export DB_TYPE=postgresdb && jest",
|
||||
"test:mysql": "export DB_TYPE=mysqldb && jest",
|
||||
"test:sqlite": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=sqlite; jest",
|
||||
"test:postgres": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=postgresdb && jest",
|
||||
"test:mysql": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=mysqldb && jest",
|
||||
"watch": "tsc --watch",
|
||||
"typeorm": "ts-node ../../node_modules/typeorm/cli.js"
|
||||
},
|
||||
|
@ -89,7 +89,7 @@
|
|||
"ts-jest": "^27.1.3",
|
||||
"ts-node": "^8.9.1",
|
||||
"tslint": "^6.1.2",
|
||||
"typescript": "~4.3.5"
|
||||
"typescript": "~4.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apidevtools/swagger-parser": "^10.0.3",
|
||||
|
@ -127,10 +127,10 @@
|
|||
"lodash.get": "^4.4.2",
|
||||
"lodash.merge": "^4.6.2",
|
||||
"mysql2": "~2.3.0",
|
||||
"n8n-core": "~0.110.0",
|
||||
"n8n-editor-ui": "~0.136.0",
|
||||
"n8n-nodes-base": "~0.167.0",
|
||||
"n8n-workflow": "~0.92.0",
|
||||
"n8n-core": "~0.112.0",
|
||||
"n8n-editor-ui": "~0.138.0",
|
||||
"n8n-nodes-base": "~0.169.1",
|
||||
"n8n-workflow": "~0.94.0",
|
||||
"nodemailer": "^6.7.1",
|
||||
"oauth-1.0a": "^2.2.6",
|
||||
"open": "^7.0.0",
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
import { ChildProcess } from 'child_process';
|
||||
import { stringify } from 'flatted';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import PCancelable from 'p-cancelable';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
Db,
|
||||
|
|
|
@ -30,7 +30,7 @@ import {
|
|||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
|
@ -48,7 +48,7 @@ import {
|
|||
WorkflowRunner,
|
||||
ExternalHooks,
|
||||
} from '.';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
import { User } from './databases/entities/User';
|
||||
import { whereClause } from './WorkflowHelpers';
|
||||
import { WorkflowEntity } from './databases/entities/WorkflowEntity';
|
||||
|
@ -74,7 +74,7 @@ export class ActiveWorkflowRunner {
|
|||
relations: ['shared', 'shared.user', 'shared.user.globalRole'],
|
||||
})) as IWorkflowDb[];
|
||||
|
||||
if (!config.get('endpoints.skipWebhoooksDeregistrationOnShutdown')) {
|
||||
if (!config.getEnv('endpoints.skipWebhoooksDeregistrationOnShutdown')) {
|
||||
// Do not clean up database when skip registration is done.
|
||||
// This flag is set when n8n is running in scaled mode.
|
||||
// Impact is minimal, but for a short while, n8n will stop accepting requests.
|
||||
|
@ -466,7 +466,7 @@ export class ActiveWorkflowRunner {
|
|||
} catch (error) {
|
||||
if (
|
||||
activation === 'init' &&
|
||||
config.get('endpoints.skipWebhoooksDeregistrationOnShutdown') &&
|
||||
config.getEnv('endpoints.skipWebhoooksDeregistrationOnShutdown') &&
|
||||
error.name === 'QueryFailedError'
|
||||
) {
|
||||
// When skipWebhoooksDeregistrationOnShutdown is enabled,
|
||||
|
@ -682,6 +682,15 @@ export class ActiveWorkflowRunner {
|
|||
(error) => console.error(error),
|
||||
);
|
||||
};
|
||||
returnFunctions.emitError = async (error: Error): Promise<void> => {
|
||||
await this.activeWorkflows?.remove(workflowData.id.toString());
|
||||
this.activationErrors[workflowData.id.toString()] = {
|
||||
time: new Date().getTime(),
|
||||
error: {
|
||||
message: error.message,
|
||||
},
|
||||
};
|
||||
};
|
||||
return returnFunctions;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -79,6 +79,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
incomingRequestOptions: IHttpRequestOptions | IRequestOptionsSimplified,
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
defaultTimezone: string,
|
||||
): Promise<IHttpRequestOptions> {
|
||||
const requestOptions = incomingRequestOptions;
|
||||
const credentialType = this.credentialTypes.getByName(typeName);
|
||||
|
@ -127,6 +128,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
|
||||
const value = this.resolveValue(
|
||||
|
@ -135,6 +137,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
requestOptions.headers[key] = value;
|
||||
} else if (authenticate.type === 'queryAuth') {
|
||||
|
@ -144,6 +147,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
|
||||
const value = this.resolveValue(
|
||||
|
@ -152,6 +156,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
if (!requestOptions.qs) {
|
||||
requestOptions.qs = {};
|
||||
|
@ -172,6 +177,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
additionalKeys: IWorkflowDataProxyAdditionalKeys,
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
defaultTimezone: string,
|
||||
): string {
|
||||
if (parameterValue.charAt(0) !== '=') {
|
||||
return parameterValue;
|
||||
|
@ -181,6 +187,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
node,
|
||||
parameterValue,
|
||||
'internal',
|
||||
defaultTimezone,
|
||||
additionalKeys,
|
||||
'',
|
||||
);
|
||||
|
@ -293,6 +300,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
nodeCredentials: INodeCredentialsDetails,
|
||||
type: string,
|
||||
mode: WorkflowExecuteMode,
|
||||
defaultTimezone: string,
|
||||
raw?: boolean,
|
||||
expressionResolveValues?: ICredentialsExpressionResolveValues,
|
||||
): Promise<ICredentialDataDecryptedObject> {
|
||||
|
@ -307,6 +315,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
decryptedDataOriginal,
|
||||
type,
|
||||
mode,
|
||||
defaultTimezone,
|
||||
expressionResolveValues,
|
||||
);
|
||||
}
|
||||
|
@ -323,6 +332,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
decryptedDataOriginal: ICredentialDataDecryptedObject,
|
||||
type: string,
|
||||
mode: WorkflowExecuteMode,
|
||||
defaultTimezone: string,
|
||||
expressionResolveValues?: ICredentialsExpressionResolveValues,
|
||||
): ICredentialDataDecryptedObject {
|
||||
const credentialsProperties = this.getCredentialsProperties(type);
|
||||
|
@ -342,14 +352,11 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
}
|
||||
|
||||
if (expressionResolveValues) {
|
||||
const timezone =
|
||||
(expressionResolveValues.workflow.settings.timezone as string) || defaultTimezone;
|
||||
|
||||
try {
|
||||
const workflow = new Workflow({
|
||||
nodes: Object.values(expressionResolveValues.workflow.nodes),
|
||||
connections: expressionResolveValues.workflow.connectionsBySourceNode,
|
||||
active: false,
|
||||
nodeTypes: expressionResolveValues.workflow.nodeTypes,
|
||||
});
|
||||
decryptedData = workflow.expression.getParameterValue(
|
||||
decryptedData = expressionResolveValues.workflow.expression.getParameterValue(
|
||||
decryptedData as INodeParameters,
|
||||
expressionResolveValues.runExecutionData,
|
||||
expressionResolveValues.runIndex,
|
||||
|
@ -357,6 +364,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
expressionResolveValues.node.name,
|
||||
expressionResolveValues.connectionInputData,
|
||||
mode,
|
||||
timezone,
|
||||
{},
|
||||
false,
|
||||
decryptedData,
|
||||
|
@ -387,6 +395,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
node,
|
||||
decryptedData as INodeParameters,
|
||||
mode,
|
||||
defaultTimezone,
|
||||
{},
|
||||
undefined,
|
||||
decryptedData,
|
||||
|
|
|
@ -15,11 +15,11 @@ import {
|
|||
Repository,
|
||||
} from 'typeorm';
|
||||
import { TlsOptions } from 'tls';
|
||||
import * as path from 'path';
|
||||
import path from 'path';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { DatabaseType, GenericHelpers, IDatabaseCollections } from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { entities } from './databases/entities';
|
||||
|
@ -59,7 +59,7 @@ export async function init(
|
|||
|
||||
let connectionOptions: ConnectionOptions;
|
||||
|
||||
const entityPrefix = config.get('database.tablePrefix');
|
||||
const entityPrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
if (testConnectionOptions) {
|
||||
connectionOptions = testConnectionOptions;
|
||||
|
@ -95,7 +95,7 @@ export async function init(
|
|||
password: (await GenericHelpers.getConfigValue('database.postgresdb.password')) as string,
|
||||
port: (await GenericHelpers.getConfigValue('database.postgresdb.port')) as number,
|
||||
username: (await GenericHelpers.getConfigValue('database.postgresdb.user')) as string,
|
||||
schema: config.get('database.postgresdb.schema'),
|
||||
schema: config.getEnv('database.postgresdb.schema'),
|
||||
migrations: postgresMigrations,
|
||||
migrationsRun: true,
|
||||
migrationsTableName: `${entityPrefix}migrations`,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
// eslint-disable-next-line import/no-cycle
|
||||
import { Db, IExternalHooksClass, IExternalHooksFileData, IExternalHooksFunctions } from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
class ExternalHooksClass implements IExternalHooksClass {
|
||||
externalHooks: {
|
||||
|
@ -34,7 +34,7 @@ class ExternalHooksClass implements IExternalHooksClass {
|
|||
}
|
||||
|
||||
async loadHooksFiles(reload = false) {
|
||||
const externalHookFiles = config.get('externalHookFiles').split(':');
|
||||
const externalHookFiles = config.getEnv('externalHookFiles').split(':');
|
||||
|
||||
// Load all the provided hook-files
|
||||
for (let hookFilePath of externalHookFiles) {
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { readFile as fsReadFile } from 'fs/promises';
|
||||
import { IDataObject } from 'n8n-workflow';
|
||||
import { validate } from 'class-validator';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { Db, ICredentialsDb, IPackageVersions, ResponseHelper } from '.';
|
||||
|
@ -31,10 +31,10 @@ let versionCache: IPackageVersions | undefined;
|
|||
* @returns {string}
|
||||
*/
|
||||
export function getBaseUrl(): string {
|
||||
const protocol = config.get('protocol');
|
||||
const host = config.get('host');
|
||||
const port = config.get('port');
|
||||
const path = config.get('path');
|
||||
const protocol = config.getEnv('protocol');
|
||||
const host = config.getEnv('host');
|
||||
const port = config.getEnv('port');
|
||||
const path = config.getEnv('path');
|
||||
|
||||
if ((protocol === 'http' && port === 80) || (protocol === 'https' && port === 443)) {
|
||||
return `${protocol}://${host}${path}`;
|
||||
|
@ -117,14 +117,16 @@ export async function getConfigValue(
|
|||
// Check if environment variable is defined for config key
|
||||
if (currentSchema.env === undefined) {
|
||||
// No environment variable defined, so return value from config
|
||||
return config.get(configKey);
|
||||
// @ts-ignore
|
||||
return config.getEnv(configKey);
|
||||
}
|
||||
|
||||
// Check if special file enviroment variable exists
|
||||
const fileEnvironmentVariable = process.env[`${currentSchema.env}_FILE`];
|
||||
if (fileEnvironmentVariable === undefined) {
|
||||
// Does not exist, so return value from config
|
||||
return config.get(configKey);
|
||||
// @ts-ignore
|
||||
return config.getEnv(configKey);
|
||||
}
|
||||
|
||||
let data;
|
||||
|
|
|
@ -21,7 +21,7 @@ import {
|
|||
import { WorkflowExecute } from 'n8n-core';
|
||||
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import PCancelable from 'p-cancelable';
|
||||
import { Repository } from 'typeorm';
|
||||
|
||||
import { ChildProcess } from 'child_process';
|
||||
|
@ -329,7 +329,7 @@ export interface IDiagnosticInfo {
|
|||
};
|
||||
};
|
||||
executionVariables: {
|
||||
[key: string]: string | number | undefined;
|
||||
[key: string]: string | number | boolean | undefined;
|
||||
};
|
||||
deploymentType: string;
|
||||
binaryDataMode: string;
|
||||
|
@ -458,7 +458,7 @@ export interface IN8nUISettings {
|
|||
defaultLocale: string;
|
||||
userManagement: IUserManagementSettings;
|
||||
workflowTagsDisabled: boolean;
|
||||
logLevel: 'info' | 'debug' | 'warn' | 'error' | 'verbose';
|
||||
logLevel: 'info' | 'debug' | 'warn' | 'error' | 'verbose' | 'silent';
|
||||
hiringBannerEnabled: boolean;
|
||||
templates: {
|
||||
enabled: boolean;
|
||||
|
|
|
@ -26,10 +26,10 @@ import {
|
|||
readFile as fsReadFile,
|
||||
stat as fsStat,
|
||||
} from 'fs/promises';
|
||||
import * as glob from 'fast-glob';
|
||||
import * as path from 'path';
|
||||
import glob from 'fast-glob';
|
||||
import path from 'path';
|
||||
import { getLogger } from './Logger';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
const CUSTOM_NODES_CATEGORY = 'Custom Nodes';
|
||||
|
||||
|
@ -38,9 +38,9 @@ class LoadNodesAndCredentialsClass {
|
|||
|
||||
credentialTypes: ICredentialTypeData = {};
|
||||
|
||||
excludeNodes: string[] | undefined = undefined;
|
||||
excludeNodes: string | undefined = undefined;
|
||||
|
||||
includeNodes: string[] | undefined = undefined;
|
||||
includeNodes: string | undefined = undefined;
|
||||
|
||||
nodeModulesPath = '';
|
||||
|
||||
|
@ -76,8 +76,8 @@ class LoadNodesAndCredentialsClass {
|
|||
throw new Error('Could not find "node_modules" folder!');
|
||||
}
|
||||
|
||||
this.excludeNodes = config.get('nodes.exclude');
|
||||
this.includeNodes = config.get('nodes.include');
|
||||
this.excludeNodes = config.getEnv('nodes.exclude');
|
||||
this.includeNodes = config.getEnv('nodes.include');
|
||||
|
||||
// Get all the installed packages which contain n8n nodes
|
||||
const packages = await this.getN8nNodePackages();
|
||||
|
|
|
@ -1,24 +1,28 @@
|
|||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import * as winston from 'winston';
|
||||
import winston from 'winston';
|
||||
|
||||
import { IDataObject, ILogger, LogTypes } from 'n8n-workflow';
|
||||
|
||||
import * as callsites from 'callsites';
|
||||
import callsites from 'callsites';
|
||||
import { basename } from 'path';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
|
||||
class Logger implements ILogger {
|
||||
private logger: winston.Logger;
|
||||
|
||||
constructor() {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
const level = config.get('logs.level');
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
const output = (config.get('logs.output') as string).split(',').map((output) => output.trim());
|
||||
const level = config.getEnv('logs.level');
|
||||
|
||||
const output = config
|
||||
.getEnv('logs.output')
|
||||
.split(',')
|
||||
.map((output) => output.trim());
|
||||
|
||||
this.logger = winston.createLogger({
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
level,
|
||||
silent: level === 'silent',
|
||||
});
|
||||
|
||||
if (output.includes('console')) {
|
||||
|
@ -55,10 +59,10 @@ class Logger implements ILogger {
|
|||
);
|
||||
this.logger.add(
|
||||
new winston.transports.File({
|
||||
filename: config.get('logs.file.location'),
|
||||
filename: config.getEnv('logs.file.location'),
|
||||
format: fileLogFormat,
|
||||
maxsize: (config.get('logs.file.fileSizeMax') as number) * 1048576, // config * 1mb
|
||||
maxFiles: config.get('logs.file.fileCountMax'),
|
||||
maxsize: config.getEnv('logs.file.fileSizeMax') * 1048576, // config * 1mb
|
||||
maxFiles: config.getEnv('logs.file.fileCountMax'),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
// @ts-ignore
|
||||
import * as sseChannel from 'sse-channel';
|
||||
import * as express from 'express';
|
||||
import sseChannel from 'sse-channel';
|
||||
import express from 'express';
|
||||
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as Bull from 'bull';
|
||||
import * as config from '../config';
|
||||
import Bull from 'bull';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { IBullJobData, IBullWebhookResponse } from './Interfaces';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
|
@ -16,8 +16,8 @@ export class Queue {
|
|||
constructor() {
|
||||
this.activeExecutions = ActiveExecutions.getInstance();
|
||||
|
||||
const prefix = config.get('queue.bull.prefix') as string;
|
||||
const redisOptions = config.get('queue.bull.redis') as object;
|
||||
const prefix = config.getEnv('queue.bull.prefix');
|
||||
const redisOptions = config.getEnv('queue.bull.redis');
|
||||
// Disabling ready check is necessary as it allows worker to
|
||||
// quickly reconnect to Redis if Redis crashes or is unreachable
|
||||
// for some time. With it enabled, worker might take minutes to realize
|
||||
|
|
|
@ -96,15 +96,13 @@ export function sendSuccessResponse(
|
|||
}
|
||||
}
|
||||
|
||||
export function sendErrorResponse(res: Response, error: ResponseError, shouldLog = true) {
|
||||
export function sendErrorResponse(res: Response, error: ResponseError) {
|
||||
let httpStatusCode = 500;
|
||||
if (error.httpStatusCode) {
|
||||
httpStatusCode = error.httpStatusCode;
|
||||
}
|
||||
|
||||
shouldLog = !process.argv[1].split('/').includes('jest');
|
||||
|
||||
if (process.env.NODE_ENV !== 'production' && shouldLog) {
|
||||
if (!process.env.NODE_ENV || process.env.NODE_ENV === 'development') {
|
||||
console.error('ERROR RESPONSE');
|
||||
console.error(error);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */
|
||||
/* eslint-disable @typescript-eslint/no-unnecessary-type-assertion */
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
/* eslint-disable @typescript-eslint/await-thenable */
|
||||
|
@ -28,10 +29,10 @@
|
|||
/* eslint-disable import/no-dynamic-require */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import { readFileSync } from 'fs';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import _, { cloneDeep } from 'lodash';
|
||||
import { dirname as pathDirname, join as pathJoin, resolve as pathResolve } from 'path';
|
||||
import {
|
||||
FindConditions,
|
||||
|
@ -46,22 +47,19 @@ import {
|
|||
Not,
|
||||
Raw,
|
||||
} from 'typeorm';
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as cookieParser from 'cookie-parser';
|
||||
import * as history from 'connect-history-api-fallback';
|
||||
import * as os from 'os';
|
||||
import bodyParser from 'body-parser';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import history from 'connect-history-api-fallback';
|
||||
import os from 'os';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as _ from 'lodash';
|
||||
import * as clientOAuth2 from 'client-oauth2';
|
||||
import * as clientOAuth1 from 'oauth-1.0a';
|
||||
import { RequestOptions } from 'oauth-1.0a';
|
||||
import * as csrf from 'csrf';
|
||||
import * as requestPromise from 'request-promise-native';
|
||||
import clientOAuth2 from 'client-oauth2';
|
||||
import clientOAuth1, { RequestOptions } from 'oauth-1.0a';
|
||||
import csrf from 'csrf';
|
||||
import requestPromise, { OptionsWithUrl } from 'request-promise-native';
|
||||
import { createHmac, randomBytes } from 'crypto';
|
||||
// IMPORTANT! Do not switch to anther bcrypt library unless really necessary and
|
||||
// tested with all possible systems like Windows, Alpine on ARM, FreeBSD, ...
|
||||
import { compare } from 'bcryptjs';
|
||||
import * as promClient from 'prom-client';
|
||||
|
||||
import {
|
||||
BinaryDataManager,
|
||||
|
@ -90,16 +88,15 @@ import {
|
|||
WorkflowExecuteMode,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as basicAuth from 'basic-auth';
|
||||
import * as compression from 'compression';
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import * as jwks from 'jwks-rsa';
|
||||
import basicAuth from 'basic-auth';
|
||||
import compression from 'compression';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import jwks from 'jwks-rsa';
|
||||
// @ts-ignore
|
||||
import * as timezones from 'google-timezones-json';
|
||||
import * as parseUrl from 'parseurl';
|
||||
import * as querystring from 'querystring';
|
||||
import { OptionsWithUrl } from 'request-promise-native';
|
||||
import { Registry } from 'prom-client';
|
||||
import timezones from 'google-timezones-json';
|
||||
import parseUrl from 'parseurl';
|
||||
import querystring from 'querystring';
|
||||
import promClient, { Registry } from 'prom-client';
|
||||
import * as Queue from './Queue';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
|
@ -142,7 +139,7 @@ import {
|
|||
WorkflowRunner,
|
||||
} from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
import * as TagHelpers from './TagHelpers';
|
||||
|
||||
|
@ -201,9 +198,9 @@ class App {
|
|||
|
||||
defaultCredentialsName: string;
|
||||
|
||||
saveDataErrorExecution: string;
|
||||
saveDataErrorExecution: 'all' | 'none';
|
||||
|
||||
saveDataSuccessExecution: string;
|
||||
saveDataSuccessExecution: 'all' | 'none';
|
||||
|
||||
saveManualExecutions: boolean;
|
||||
|
||||
|
@ -240,12 +237,12 @@ class App {
|
|||
constructor() {
|
||||
this.app = express();
|
||||
|
||||
this.endpointWebhook = config.get('endpoints.webhook') as string;
|
||||
this.endpointWebhookWaiting = config.get('endpoints.webhookWaiting') as string;
|
||||
this.endpointWebhookTest = config.get('endpoints.webhookTest') as string;
|
||||
this.endpointWebhook = config.getEnv('endpoints.webhook');
|
||||
this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
|
||||
this.endpointWebhookTest = config.getEnv('endpoints.webhookTest');
|
||||
|
||||
this.defaultWorkflowName = config.get('workflows.defaultName') as string;
|
||||
this.defaultCredentialsName = config.get('credentials.defaultName') as string;
|
||||
this.defaultWorkflowName = config.getEnv('workflows.defaultName');
|
||||
this.defaultCredentialsName = config.getEnv('credentials.defaultName');
|
||||
|
||||
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
|
@ -264,22 +261,22 @@ class App {
|
|||
this.activeExecutionsInstance = ActiveExecutions.getInstance();
|
||||
this.waitTracker = WaitTracker();
|
||||
|
||||
this.protocol = config.get('protocol');
|
||||
this.sslKey = config.get('ssl_key');
|
||||
this.sslCert = config.get('ssl_cert');
|
||||
this.protocol = config.getEnv('protocol');
|
||||
this.sslKey = config.getEnv('ssl_key');
|
||||
this.sslCert = config.getEnv('ssl_cert');
|
||||
|
||||
this.externalHooks = externalHooks;
|
||||
|
||||
this.presetCredentialsLoaded = false;
|
||||
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
|
||||
this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
|
||||
|
||||
const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl();
|
||||
const telemetrySettings: ITelemetrySettings = {
|
||||
enabled: config.get('diagnostics.enabled') as boolean,
|
||||
enabled: config.getEnv('diagnostics.enabled'),
|
||||
};
|
||||
|
||||
if (telemetrySettings.enabled) {
|
||||
const conf = config.get('diagnostics.config.frontend') as string;
|
||||
const conf = config.getEnv('diagnostics.config.frontend');
|
||||
const [key, url] = conf.split(';');
|
||||
|
||||
if (!key || !url) {
|
||||
|
@ -307,31 +304,31 @@ class App {
|
|||
oauth2: `${urlBaseWebhook}${this.restEndpoint}/oauth2-credential/callback`,
|
||||
},
|
||||
versionNotifications: {
|
||||
enabled: config.get('versionNotifications.enabled'),
|
||||
endpoint: config.get('versionNotifications.endpoint'),
|
||||
infoUrl: config.get('versionNotifications.infoUrl'),
|
||||
enabled: config.getEnv('versionNotifications.enabled'),
|
||||
endpoint: config.getEnv('versionNotifications.endpoint'),
|
||||
infoUrl: config.getEnv('versionNotifications.infoUrl'),
|
||||
},
|
||||
instanceId: '',
|
||||
telemetry: telemetrySettings,
|
||||
personalizationSurveyEnabled:
|
||||
config.get('personalization.enabled') && config.get('diagnostics.enabled'),
|
||||
defaultLocale: config.get('defaultLocale'),
|
||||
config.getEnv('personalization.enabled') && config.getEnv('diagnostics.enabled'),
|
||||
defaultLocale: config.getEnv('defaultLocale'),
|
||||
userManagement: {
|
||||
enabled:
|
||||
config.get('userManagement.disabled') === false ||
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true,
|
||||
config.getEnv('userManagement.disabled') === false ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
|
||||
showSetupOnFirstLoad:
|
||||
config.get('userManagement.disabled') === false &&
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.get('userManagement.skipInstanceOwnerSetup') === false,
|
||||
config.getEnv('userManagement.disabled') === false &&
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.getEnv('userManagement.skipInstanceOwnerSetup') === false,
|
||||
smtpSetup: isEmailSetUp(),
|
||||
},
|
||||
workflowTagsDisabled: config.get('workflowTagsDisabled'),
|
||||
logLevel: config.get('logs.level'),
|
||||
hiringBannerEnabled: config.get('hiringBanner.enabled'),
|
||||
workflowTagsDisabled: config.getEnv('workflowTagsDisabled'),
|
||||
logLevel: config.getEnv('logs.level'),
|
||||
hiringBannerEnabled: config.getEnv('hiringBanner.enabled'),
|
||||
templates: {
|
||||
enabled: config.get('templates.enabled'),
|
||||
host: config.get('templates.host'),
|
||||
enabled: config.getEnv('templates.enabled'),
|
||||
host: config.getEnv('templates.host'),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -353,23 +350,23 @@ class App {
|
|||
// refresh user management status
|
||||
Object.assign(this.frontendSettings.userManagement, {
|
||||
enabled:
|
||||
config.get('userManagement.disabled') === false ||
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true,
|
||||
config.getEnv('userManagement.disabled') === false ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
|
||||
showSetupOnFirstLoad:
|
||||
config.get('userManagement.disabled') === false &&
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.get('userManagement.skipInstanceOwnerSetup') === false,
|
||||
config.getEnv('userManagement.disabled') === false &&
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.getEnv('userManagement.skipInstanceOwnerSetup') === false,
|
||||
});
|
||||
|
||||
return this.frontendSettings;
|
||||
}
|
||||
|
||||
async config(): Promise<void> {
|
||||
const enableMetrics = config.get('endpoints.metrics.enable') as boolean;
|
||||
const enableMetrics = config.getEnv('endpoints.metrics.enable');
|
||||
let register: Registry;
|
||||
|
||||
if (enableMetrics) {
|
||||
const prefix = config.get('endpoints.metrics.prefix') as string;
|
||||
const prefix = config.getEnv('endpoints.metrics.prefix');
|
||||
register = new promClient.Registry();
|
||||
register.setDefaultLabels({ prefix });
|
||||
promClient.collectDefaultMetrics({ register });
|
||||
|
@ -382,7 +379,7 @@ class App {
|
|||
|
||||
await this.externalHooks.run('frontend.settings', [this.frontendSettings]);
|
||||
|
||||
const excludeEndpoints = config.get('security.excludeEndpoints') as string;
|
||||
const excludeEndpoints = config.getEnv('security.excludeEndpoints');
|
||||
|
||||
const ignoredEndpoints = [
|
||||
'healthz',
|
||||
|
@ -399,7 +396,7 @@ class App {
|
|||
const authIgnoreRegex = new RegExp(`^\/(${_(ignoredEndpoints).compact().join('|')})\/?.*$`);
|
||||
|
||||
// Check for basic auth credentials if activated
|
||||
const basicAuthActive = config.get('security.basicAuth.active') as boolean;
|
||||
const basicAuthActive = config.getEnv('security.basicAuth.active');
|
||||
if (basicAuthActive) {
|
||||
const basicAuthUser = (await GenericHelpers.getConfigValue(
|
||||
'security.basicAuth.user',
|
||||
|
@ -424,7 +421,10 @@ class App {
|
|||
this.app.use(
|
||||
async (req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
// Skip basic auth for a few listed endpoints or when instance owner has been setup
|
||||
if (authIgnoreRegex.exec(req.url) || config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
if (
|
||||
authIgnoreRegex.exec(req.url) ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp')
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
const realm = 'n8n - Editor UI';
|
||||
|
@ -470,7 +470,7 @@ class App {
|
|||
}
|
||||
|
||||
// Check for and validate JWT if configured
|
||||
const jwtAuthActive = config.get('security.jwtAuth.active') as boolean;
|
||||
const jwtAuthActive = config.getEnv('security.jwtAuth.active');
|
||||
if (jwtAuthActive) {
|
||||
const jwtAuthHeader = (await GenericHelpers.getConfigValue(
|
||||
'security.jwtAuth.jwtHeader',
|
||||
|
@ -778,7 +778,7 @@ class App {
|
|||
|
||||
const { tags: tagIds } = req.body;
|
||||
|
||||
if (tagIds?.length && !config.get('workflowTagsDisabled')) {
|
||||
if (tagIds?.length && !config.getEnv('workflowTagsDisabled')) {
|
||||
newWorkflow.tags = await Db.collections.Tag!.findByIds(tagIds, {
|
||||
select: ['id', 'name'],
|
||||
});
|
||||
|
@ -812,7 +812,7 @@ class App {
|
|||
throw new ResponseHelper.ResponseError('Failed to save workflow');
|
||||
}
|
||||
|
||||
if (tagIds && !config.get('workflowTagsDisabled')) {
|
||||
if (tagIds && !config.getEnv('workflowTagsDisabled')) {
|
||||
savedWorkflow.tags = TagHelpers.sortByRequestOrder(savedWorkflow.tags, {
|
||||
requestOrder: tagIds,
|
||||
});
|
||||
|
@ -896,7 +896,7 @@ class App {
|
|||
relations: ['tags'],
|
||||
};
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
delete query.relations;
|
||||
}
|
||||
|
||||
|
@ -956,7 +956,7 @@ class App {
|
|||
|
||||
let relations = ['workflow', 'workflow.tags'];
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
relations = relations.filter((relation) => relation !== 'workflow.tags');
|
||||
}
|
||||
|
||||
|
@ -1066,8 +1066,8 @@ class App {
|
|||
|
||||
await Db.collections.Workflow!.update(workflowId, updateData);
|
||||
|
||||
if (tags && !config.get('workflowTagsDisabled')) {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
if (tags && !config.getEnv('workflowTagsDisabled')) {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await TagHelpers.removeRelations(workflowId, tablePrefix);
|
||||
|
||||
if (tags.length) {
|
||||
|
@ -1079,7 +1079,7 @@ class App {
|
|||
relations: ['tags'],
|
||||
};
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
delete options.relations;
|
||||
}
|
||||
|
||||
|
@ -1261,11 +1261,11 @@ class App {
|
|||
req: express.Request,
|
||||
res: express.Response,
|
||||
): Promise<TagEntity[] | ITagWithCountDb[]> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
if (req.query.withUsageCount === 'true') {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
return TagHelpers.getTagsWithCountDb(tablePrefix);
|
||||
}
|
||||
|
||||
|
@ -1279,7 +1279,7 @@ class App {
|
|||
`/${this.restEndpoint}/tags`,
|
||||
ResponseHelper.send(
|
||||
async (req: express.Request, res: express.Response): Promise<TagEntity | void> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
const newTag = new TagEntity();
|
||||
|
@ -1302,7 +1302,7 @@ class App {
|
|||
`/${this.restEndpoint}/tags/:id`,
|
||||
ResponseHelper.send(
|
||||
async (req: express.Request, res: express.Response): Promise<TagEntity | void> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
|
||||
|
@ -1331,11 +1331,11 @@ class App {
|
|||
`/${this.restEndpoint}/tags/:id`,
|
||||
ResponseHelper.send(
|
||||
async (req: TagsRequest.Delete, res: express.Response): Promise<boolean> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
if (
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true &&
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true &&
|
||||
req.user.globalRole.name !== 'owner'
|
||||
) {
|
||||
throw new ResponseHelper.ResponseError(
|
||||
|
@ -1734,11 +1734,13 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -1746,6 +1748,7 @@ class App {
|
|||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const signatureMethod = _.get(oauthCredentials, 'signatureMethod') as string;
|
||||
|
@ -1872,17 +1875,20 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(
|
||||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const options: OptionsWithUrl = {
|
||||
|
@ -1987,11 +1993,13 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -1999,6 +2007,7 @@ class App {
|
|||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const token = new csrf();
|
||||
|
@ -2127,17 +2136,20 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(
|
||||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const token = new csrf();
|
||||
|
@ -2260,7 +2272,7 @@ class App {
|
|||
|
||||
const executingWorkflowIds: string[] = [];
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
|
||||
executingWorkflowIds.push(...currentJobs.map(({ data }) => data.executionId));
|
||||
}
|
||||
|
@ -2623,7 +2635,7 @@ class App {
|
|||
`/${this.restEndpoint}/executions-current`,
|
||||
ResponseHelper.send(
|
||||
async (req: ExecutionRequest.GetAllCurrent): Promise<IExecutionsSummary[]> => {
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
|
||||
|
||||
const currentlyRunningQueueIds = currentJobs.map((job) => job.data.executionId);
|
||||
|
@ -2688,7 +2700,7 @@ class App {
|
|||
for (const data of executingWorkflows) {
|
||||
if (
|
||||
(filter.workflowId !== undefined && filter.workflowId !== data.workflowId) ||
|
||||
!sharedWorkflowIds.includes(data.workflowId)
|
||||
!sharedWorkflowIds.includes(data.workflowId.toString())
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
@ -2732,7 +2744,7 @@ class App {
|
|||
throw new ResponseHelper.ResponseError('Execution not found', undefined, 404);
|
||||
}
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
// Manual executions should still be stoppable, so
|
||||
// try notifying the `activeExecutions` to stop it.
|
||||
const result = await this.activeExecutionsInstance.stopExecution(req.params.id);
|
||||
|
@ -2859,7 +2871,7 @@ class App {
|
|||
// Webhooks
|
||||
// ----------------------------------------
|
||||
|
||||
if (config.get('endpoints.disableProductionWebhooksOnMainProcess') !== true) {
|
||||
if (!config.getEnv('endpoints.disableProductionWebhooksOnMainProcess')) {
|
||||
WebhookServer.registerProductionWebhooks.apply(this);
|
||||
}
|
||||
|
||||
|
@ -2954,11 +2966,11 @@ class App {
|
|||
);
|
||||
}
|
||||
|
||||
if (config.get('endpoints.disableUi') !== true) {
|
||||
if (!config.getEnv('endpoints.disableUi')) {
|
||||
// Read the index file and replace the path placeholder
|
||||
const editorUiPath = require.resolve('n8n-editor-ui');
|
||||
const filePath = pathJoin(pathDirname(editorUiPath), 'dist', 'index.html');
|
||||
const n8nPath = config.get('path');
|
||||
const n8nPath = config.getEnv('path');
|
||||
|
||||
let readIndexFile = readFileSync(filePath, 'utf8');
|
||||
readIndexFile = readIndexFile.replace(/\/%BASE_PATH%\//g, n8nPath);
|
||||
|
@ -2990,8 +3002,8 @@ class App {
|
|||
}
|
||||
|
||||
export async function start(): Promise<void> {
|
||||
const PORT = config.get('port');
|
||||
const ADDRESS = config.get('listen_address');
|
||||
const PORT = config.getEnv('port');
|
||||
const ADDRESS = config.getEnv('listen_address');
|
||||
|
||||
const app = new App();
|
||||
|
||||
|
@ -3015,7 +3027,7 @@ export async function start(): Promise<void> {
|
|||
console.log(`n8n ready on ${ADDRESS}, port ${PORT}`);
|
||||
console.log(`Version: ${versions.cli}`);
|
||||
|
||||
const defaultLocale = config.get('defaultLocale');
|
||||
const defaultLocale = config.getEnv('defaultLocale');
|
||||
|
||||
if (defaultLocale !== 'en') {
|
||||
console.log(`Locale: ${defaultLocale}`);
|
||||
|
@ -3023,13 +3035,14 @@ export async function start(): Promise<void> {
|
|||
|
||||
await app.externalHooks.run('n8n.ready', [app]);
|
||||
const cpus = os.cpus();
|
||||
const binarDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binarDataConfig = config.getEnv('binaryDataManager');
|
||||
const diagnosticInfo: IDiagnosticInfo = {
|
||||
basicAuthActive: config.get('security.basicAuth.active') as boolean,
|
||||
basicAuthActive: config.getEnv('security.basicAuth.active'),
|
||||
databaseType: (await GenericHelpers.getConfigValue('database.type')) as DatabaseType,
|
||||
disableProductionWebhooksOnMainProcess:
|
||||
config.get('endpoints.disableProductionWebhooksOnMainProcess') === true,
|
||||
notificationsEnabled: config.get('versionNotifications.enabled') === true,
|
||||
disableProductionWebhooksOnMainProcess: config.getEnv(
|
||||
'endpoints.disableProductionWebhooksOnMainProcess',
|
||||
),
|
||||
notificationsEnabled: config.getEnv('versionNotifications.enabled'),
|
||||
versionCli: versions.cli,
|
||||
systemInfo: {
|
||||
os: {
|
||||
|
@ -3044,24 +3057,26 @@ export async function start(): Promise<void> {
|
|||
},
|
||||
},
|
||||
executionVariables: {
|
||||
executions_process: config.get('executions.process'),
|
||||
executions_mode: config.get('executions.mode'),
|
||||
executions_timeout: config.get('executions.timeout'),
|
||||
executions_timeout_max: config.get('executions.maxTimeout'),
|
||||
executions_data_save_on_error: config.get('executions.saveDataOnError'),
|
||||
executions_data_save_on_success: config.get('executions.saveDataOnSuccess'),
|
||||
executions_data_save_on_progress: config.get('executions.saveExecutionProgress'),
|
||||
executions_data_save_manual_executions: config.get('executions.saveDataManualExecutions'),
|
||||
executions_data_prune: config.get('executions.pruneData'),
|
||||
executions_data_max_age: config.get('executions.pruneDataMaxAge'),
|
||||
executions_data_prune_timeout: config.get('executions.pruneDataTimeout'),
|
||||
executions_process: config.getEnv('executions.process'),
|
||||
executions_mode: config.getEnv('executions.mode'),
|
||||
executions_timeout: config.getEnv('executions.timeout'),
|
||||
executions_timeout_max: config.getEnv('executions.maxTimeout'),
|
||||
executions_data_save_on_error: config.getEnv('executions.saveDataOnError'),
|
||||
executions_data_save_on_success: config.getEnv('executions.saveDataOnSuccess'),
|
||||
executions_data_save_on_progress: config.getEnv('executions.saveExecutionProgress'),
|
||||
executions_data_save_manual_executions: config.getEnv(
|
||||
'executions.saveDataManualExecutions',
|
||||
),
|
||||
executions_data_prune: config.getEnv('executions.pruneData'),
|
||||
executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'),
|
||||
executions_data_prune_timeout: config.getEnv('executions.pruneDataTimeout'),
|
||||
},
|
||||
deploymentType: config.get('deployment.type'),
|
||||
deploymentType: config.getEnv('deployment.type'),
|
||||
binaryDataMode: binarDataConfig.mode,
|
||||
n8n_multi_user_allowed:
|
||||
config.get('userManagement.disabled') === false ||
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true,
|
||||
smtp_set_up: config.get('userManagement.emails.mode') === 'smtp',
|
||||
config.getEnv('userManagement.disabled') === false ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
|
||||
smtp_set_up: config.getEnv('userManagement.emails.mode') === 'smtp',
|
||||
};
|
||||
|
||||
void Db.collections
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable consistent-return */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable no-param-reassign */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
|
||||
import { ActiveWebhooks } from 'n8n-core';
|
||||
|
||||
|
|
|
@ -2,14 +2,16 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
import { Workflow } from 'n8n-workflow';
|
||||
import { In, IsNull, Not } from 'typeorm';
|
||||
import express = require('express');
|
||||
import { In } from 'typeorm';
|
||||
import express from 'express';
|
||||
import { compare, genSaltSync, hash } from 'bcryptjs';
|
||||
|
||||
import { PublicUser } from './Interfaces';
|
||||
import { Db, GenericHelpers, ResponseHelper } from '..';
|
||||
import { Db, ResponseHelper } from '..';
|
||||
import { MAX_PASSWORD_LENGTH, MIN_PASSWORD_LENGTH, User } from '../databases/entities/User';
|
||||
import { Role } from '../databases/entities/Role';
|
||||
import { AuthenticatedRequest } from '../requests';
|
||||
import config = require('../../config');
|
||||
import * as config from '../../config';
|
||||
import { getWebhookBaseUrl } from '../WebhookHelpers';
|
||||
|
||||
export async function getWorkflowOwner(workflowId: string | number): Promise<User> {
|
||||
|
@ -22,10 +24,10 @@ export async function getWorkflowOwner(workflowId: string | number): Promise<Use
|
|||
}
|
||||
|
||||
export function isEmailSetUp(): boolean {
|
||||
const smtp = config.get('userManagement.emails.mode') === 'smtp';
|
||||
const host = !!config.get('userManagement.emails.smtp.host');
|
||||
const user = !!config.get('userManagement.emails.smtp.auth.user');
|
||||
const pass = !!config.get('userManagement.emails.smtp.auth.pass');
|
||||
const smtp = config.getEnv('userManagement.emails.mode') === 'smtp';
|
||||
const host = !!config.getEnv('userManagement.emails.smtp.host');
|
||||
const user = !!config.getEnv('userManagement.emails.smtp.auth.user');
|
||||
const pass = !!config.getEnv('userManagement.emails.smtp.auth.pass');
|
||||
|
||||
return smtp && host && user && pass;
|
||||
}
|
||||
|
@ -56,16 +58,11 @@ export async function getInstanceOwner(): Promise<User> {
|
|||
* Return the n8n instance base URL without trailing slash.
|
||||
*/
|
||||
export function getInstanceBaseUrl(): string {
|
||||
const n8nBaseUrl = config.get('editorBaseUrl') || getWebhookBaseUrl();
|
||||
const n8nBaseUrl = config.getEnv('editorBaseUrl') || getWebhookBaseUrl();
|
||||
|
||||
return n8nBaseUrl.endsWith('/') ? n8nBaseUrl.slice(0, n8nBaseUrl.length - 1) : n8nBaseUrl;
|
||||
}
|
||||
|
||||
export async function isInstanceOwnerSetup(): Promise<boolean> {
|
||||
const users = await Db.collections.User!.find({ email: Not(IsNull()) });
|
||||
return users.length !== 0;
|
||||
}
|
||||
|
||||
// TODO: Enforce at model level
|
||||
export function validatePassword(password?: string): string {
|
||||
if (!password) {
|
||||
|
@ -199,7 +196,7 @@ export async function checkPermissionsForExecution(
|
|||
export function isAuthExcluded(url: string, ignoredEndpoints: string[]): boolean {
|
||||
return !!ignoredEndpoints
|
||||
.filter(Boolean) // skip empty paths
|
||||
.find((ignoredEndpoint) => url.includes(ignoredEndpoint));
|
||||
.find((ignoredEndpoint) => url.startsWith(`/${ignoredEndpoint}`));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -216,3 +213,23 @@ export function isPostUsersId(req: express.Request, restEndpoint: string): boole
|
|||
export function isAuthenticatedRequest(request: express.Request): request is AuthenticatedRequest {
|
||||
return request.user !== undefined;
|
||||
}
|
||||
|
||||
// ----------------------------------
|
||||
// hashing
|
||||
// ----------------------------------
|
||||
|
||||
export const hashPassword = async (validPassword: string): Promise<string> =>
|
||||
hash(validPassword, genSaltSync(10));
|
||||
|
||||
export async function compareHash(plaintext: string, hashed: string): Promise<boolean | undefined> {
|
||||
try {
|
||||
return await compare(plaintext, hashed);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes('Invalid salt version')) {
|
||||
error.message +=
|
||||
'. Comparison against unhashed string. Please check that the value compared against has been hashed.';
|
||||
}
|
||||
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { Response } from 'express';
|
||||
import { createHash } from 'crypto';
|
||||
import { Db } from '../..';
|
||||
import { AUTH_COOKIE_NAME } from '../../constants';
|
||||
import { JwtPayload, JwtToken } from '../Interfaces';
|
||||
import { User } from '../../databases/entities/User';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
|
||||
export function issueJWT(user: User): JwtToken {
|
||||
const { id, email, password } = user;
|
||||
|
@ -26,7 +26,7 @@ export function issueJWT(user: User): JwtToken {
|
|||
.digest('hex');
|
||||
}
|
||||
|
||||
const signedToken = jwt.sign(payload, config.get('userManagement.jwtSecret'), {
|
||||
const signedToken = jwt.sign(payload, config.getEnv('userManagement.jwtSecret'), {
|
||||
expiresIn: expiresIn / 1000 /* in seconds */,
|
||||
});
|
||||
|
||||
|
@ -57,7 +57,7 @@ export async function resolveJwtContent(jwtPayload: JwtPayload): Promise<User> {
|
|||
}
|
||||
|
||||
export async function resolveJwt(token: string): Promise<User> {
|
||||
const jwtPayload = jwt.verify(token, config.get('userManagement.jwtSecret')) as JwtPayload;
|
||||
const jwtPayload = jwt.verify(token, config.getEnv('userManagement.jwtSecret')) as JwtPayload;
|
||||
return resolveJwtContent(jwtPayload);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import { createTransport, Transporter } from 'nodemailer';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { MailData, SendEmailResult, UserManagementMailerImplementation } from './Interfaces';
|
||||
|
||||
export class NodeMailer implements UserManagementMailerImplementation {
|
||||
|
@ -9,20 +9,20 @@ export class NodeMailer implements UserManagementMailerImplementation {
|
|||
|
||||
constructor() {
|
||||
this.transport = createTransport({
|
||||
host: config.get('userManagement.emails.smtp.host'),
|
||||
port: config.get('userManagement.emails.smtp.port'),
|
||||
secure: config.get('userManagement.emails.smtp.secure'),
|
||||
host: config.getEnv('userManagement.emails.smtp.host'),
|
||||
port: config.getEnv('userManagement.emails.smtp.port'),
|
||||
secure: config.getEnv('userManagement.emails.smtp.secure'),
|
||||
auth: {
|
||||
user: config.get('userManagement.emails.smtp.auth.user'),
|
||||
pass: config.get('userManagement.emails.smtp.auth.pass'),
|
||||
user: config.getEnv('userManagement.emails.smtp.auth.user'),
|
||||
pass: config.getEnv('userManagement.emails.smtp.auth.pass'),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async verifyConnection(): Promise<void> {
|
||||
const host = config.get('userManagement.emails.smtp.host') as string;
|
||||
const user = config.get('userManagement.emails.smtp.auth.user') as string;
|
||||
const pass = config.get('userManagement.emails.smtp.auth.pass') as string;
|
||||
const host = config.getEnv('userManagement.emails.smtp.host');
|
||||
const user = config.getEnv('userManagement.emails.smtp.auth.user');
|
||||
const pass = config.getEnv('userManagement.emails.smtp.auth.pass');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this.transport.verify((error: Error) => {
|
||||
|
@ -43,8 +43,8 @@ export class NodeMailer implements UserManagementMailerImplementation {
|
|||
}
|
||||
|
||||
async sendMail(mailData: MailData): Promise<SendEmailResult> {
|
||||
let sender = config.get('userManagement.emails.smtp.sender');
|
||||
const user = config.get('userManagement.emails.smtp.auth.user') as string;
|
||||
let sender = config.getEnv('userManagement.emails.smtp.sender');
|
||||
const user = config.getEnv('userManagement.emails.smtp.auth.user');
|
||||
|
||||
if (!sender && user.includes('@')) {
|
||||
sender = user;
|
||||
|
|
|
@ -3,7 +3,7 @@ import { existsSync, readFileSync } from 'fs';
|
|||
import { IDataObject } from 'n8n-workflow';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { GenericHelpers } from '../..';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import {
|
||||
InviteEmailData,
|
||||
PasswordResetData,
|
||||
|
@ -45,7 +45,7 @@ export class UserManagementMailer {
|
|||
|
||||
constructor() {
|
||||
// Other implementations can be used in the future.
|
||||
if (config.get('userManagement.emails.mode') === 'smtp') {
|
||||
if (config.getEnv('userManagement.emails.mode') === 'smtp') {
|
||||
this.mailer = new NodeMailer();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,15 +3,15 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import { Request, Response } from 'express';
|
||||
import { compare } from 'bcryptjs';
|
||||
import { IDataObject } from 'n8n-workflow';
|
||||
import { Db, ResponseHelper } from '../..';
|
||||
import { AUTH_COOKIE_NAME } from '../../constants';
|
||||
import { issueCookie, resolveJwt } from '../auth/jwt';
|
||||
import { N8nApp, PublicUser } from '../Interfaces';
|
||||
import { isInstanceOwnerSetup, sanitizeUser } from '../UserManagementHelper';
|
||||
import { compareHash, sanitizeUser } from '../UserManagementHelper';
|
||||
import { User } from '../../databases/entities/User';
|
||||
import type { LoginRequest } from '../../requests';
|
||||
import config = require('../../../config');
|
||||
|
||||
export function authenticationMethods(this: N8nApp): void {
|
||||
/**
|
||||
|
@ -43,7 +43,8 @@ export function authenticationMethods(this: N8nApp): void {
|
|||
} catch (error) {
|
||||
throw new Error('Unable to access database.');
|
||||
}
|
||||
if (!user || !user.password || !(await compare(req.body.password, user.password))) {
|
||||
|
||||
if (!user || !user.password || !(await compareHash(req.body.password, user.password))) {
|
||||
// password is empty until user signs up
|
||||
const error = new Error('Wrong username or password. Do you have caps lock on?');
|
||||
// @ts-ignore
|
||||
|
@ -71,13 +72,18 @@ export function authenticationMethods(this: N8nApp): void {
|
|||
// If logged in, return user
|
||||
try {
|
||||
user = await resolveJwt(cookieContents);
|
||||
|
||||
if (!config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
res.cookie(AUTH_COOKIE_NAME, cookieContents);
|
||||
}
|
||||
|
||||
return sanitizeUser(user);
|
||||
} catch (error) {
|
||||
res.clearCookie(AUTH_COOKIE_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
if (await isInstanceOwnerSetup()) {
|
||||
if (config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
const error = new Error('Not logged in');
|
||||
// @ts-ignore
|
||||
error.httpStatusCode = 401;
|
||||
|
|
|
@ -2,16 +2,16 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable import/no-cycle */
|
||||
import cookieParser = require('cookie-parser');
|
||||
import * as passport from 'passport';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import passport from 'passport';
|
||||
import { Strategy } from 'passport-jwt';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { JwtPayload, N8nApp } from '../Interfaces';
|
||||
import { authenticationMethods } from './auth';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { AUTH_COOKIE_NAME } from '../../constants';
|
||||
import { issueCookie, resolveJwtContent } from '../auth/jwt';
|
||||
import { meNamespace } from './me';
|
||||
|
@ -30,7 +30,7 @@ export function addRoutes(this: N8nApp, ignoredEndpoints: string[], restEndpoint
|
|||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
return (req.cookies?.[AUTH_COOKIE_NAME] as string | undefined) ?? null;
|
||||
},
|
||||
secretOrKey: config.get('userManagement.jwtSecret') as string,
|
||||
secretOrKey: config.getEnv('userManagement.jwtSecret'),
|
||||
};
|
||||
|
||||
passport.use(
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
|
||||
import { compare, genSaltSync, hashSync } from 'bcryptjs';
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import validator from 'validator';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { Db, InternalHooksManager, ResponseHelper } from '../..';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
import { N8nApp, PublicUser } from '../Interfaces';
|
||||
import { validatePassword, sanitizeUser } from '../UserManagementHelper';
|
||||
import { validatePassword, sanitizeUser, compareHash, hashPassword } from '../UserManagementHelper';
|
||||
import type { AuthenticatedRequest, MeRequest } from '../../requests';
|
||||
import { validateEntity } from '../../GenericHelpers';
|
||||
import { User } from '../../databases/entities/User';
|
||||
|
@ -87,7 +86,7 @@ export function meNamespace(this: N8nApp): void {
|
|||
throw new ResponseHelper.ResponseError('Requesting user not set up.');
|
||||
}
|
||||
|
||||
const isCurrentPwCorrect = await compare(currentPassword, req.user.password);
|
||||
const isCurrentPwCorrect = await compareHash(currentPassword, req.user.password);
|
||||
if (!isCurrentPwCorrect) {
|
||||
throw new ResponseHelper.ResponseError(
|
||||
'Provided current password is incorrect.',
|
||||
|
@ -98,7 +97,7 @@ export function meNamespace(this: N8nApp): void {
|
|||
|
||||
const validPassword = validatePassword(newPassword);
|
||||
|
||||
req.user.password = hashSync(validPassword, genSaltSync(10));
|
||||
req.user.password = await hashPassword(validPassword);
|
||||
|
||||
const user = await Db.collections.User!.save(req.user);
|
||||
Logger.info('Password updated successfully', { userId: user.id });
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
/* eslint-disable import/no-cycle */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { hashSync, genSaltSync } from 'bcryptjs';
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import validator from 'validator';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { Db, InternalHooksManager, ResponseHelper } from '../..';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { validateEntity } from '../../GenericHelpers';
|
||||
import { AuthenticatedRequest, OwnerRequest } from '../../requests';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
import { N8nApp } from '../Interfaces';
|
||||
import { sanitizeUser, validatePassword } from '../UserManagementHelper';
|
||||
import { hashPassword, sanitizeUser, validatePassword } from '../UserManagementHelper';
|
||||
|
||||
export function ownerNamespace(this: N8nApp): void {
|
||||
/**
|
||||
|
@ -24,7 +23,7 @@ export function ownerNamespace(this: N8nApp): void {
|
|||
const { email, firstName, lastName, password } = req.body;
|
||||
const { id: userId } = req.user;
|
||||
|
||||
if (config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
if (config.getEnv('userManagement.isInstanceOwnerSetUp')) {
|
||||
Logger.debug(
|
||||
'Request to claim instance ownership failed because instance owner already exists',
|
||||
{
|
||||
|
@ -74,7 +73,7 @@ export function ownerNamespace(this: N8nApp): void {
|
|||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
password: hashSync(validPassword, genSaltSync(10)),
|
||||
password: await hashPassword(validPassword),
|
||||
});
|
||||
|
||||
await validateEntity(owner);
|
||||
|
|
|
@ -1,21 +1,20 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { URL } from 'url';
|
||||
import { genSaltSync, hashSync } from 'bcryptjs';
|
||||
import validator from 'validator';
|
||||
import { IsNull, MoreThanOrEqual, Not } from 'typeorm';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { Db, InternalHooksManager, ResponseHelper } from '../..';
|
||||
import { N8nApp } from '../Interfaces';
|
||||
import { getInstanceBaseUrl, validatePassword } from '../UserManagementHelper';
|
||||
import { getInstanceBaseUrl, hashPassword, validatePassword } from '../UserManagementHelper';
|
||||
import * as UserManagementMailer from '../email';
|
||||
import type { PasswordResetRequest } from '../../requests';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
|
||||
export function passwordResetNamespace(this: N8nApp): void {
|
||||
/**
|
||||
|
@ -26,7 +25,7 @@ export function passwordResetNamespace(this: N8nApp): void {
|
|||
this.app.post(
|
||||
`/${this.restEndpoint}/forgot-password`,
|
||||
ResponseHelper.send(async (req: PasswordResetRequest.Email) => {
|
||||
if (config.get('userManagement.emails.mode') === '') {
|
||||
if (config.getEnv('userManagement.emails.mode') === '') {
|
||||
Logger.debug('Request to send password reset email failed because emailing was not set up');
|
||||
throw new ResponseHelper.ResponseError(
|
||||
'Email sending must be set up in order to request a password reset email',
|
||||
|
@ -206,7 +205,7 @@ export function passwordResetNamespace(this: N8nApp): void {
|
|||
}
|
||||
|
||||
await Db.collections.User!.update(userId, {
|
||||
password: hashSync(validPassword, genSaltSync(10)),
|
||||
password: await hashPassword(validPassword),
|
||||
resetPasswordToken: null,
|
||||
resetPasswordTokenExpiration: null,
|
||||
});
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { Response } from 'express';
|
||||
import { In } from 'typeorm';
|
||||
import { genSaltSync, hashSync } from 'bcryptjs';
|
||||
import validator from 'validator';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
|
@ -12,6 +11,7 @@ import { N8nApp, PublicUser } from '../Interfaces';
|
|||
import { UserRequest } from '../../requests';
|
||||
import {
|
||||
getInstanceBaseUrl,
|
||||
hashPassword,
|
||||
isEmailSetUp,
|
||||
sanitizeUser,
|
||||
validatePassword,
|
||||
|
@ -21,7 +21,7 @@ import { SharedWorkflow } from '../../databases/entities/SharedWorkflow';
|
|||
import { SharedCredentials } from '../../databases/entities/SharedCredentials';
|
||||
import * as UserManagementMailer from '../email/UserManagementMailer';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
|
||||
export function usersNamespace(this: N8nApp): void {
|
||||
|
@ -31,7 +31,7 @@ export function usersNamespace(this: N8nApp): void {
|
|||
this.app.post(
|
||||
`/${this.restEndpoint}/users`,
|
||||
ResponseHelper.send(async (req: UserRequest.Invite) => {
|
||||
if (config.get('userManagement.emails.mode') === '') {
|
||||
if (config.getEnv('userManagement.emails.mode') === '') {
|
||||
Logger.debug(
|
||||
'Request to send email invite(s) to user(s) failed because emailing was not set up',
|
||||
);
|
||||
|
@ -56,14 +56,14 @@ export function usersNamespace(this: N8nApp): void {
|
|||
}
|
||||
|
||||
// TODO: this should be checked in the middleware rather than here
|
||||
if (config.get('userManagement.disabled')) {
|
||||
if (config.getEnv('userManagement.disabled')) {
|
||||
Logger.debug(
|
||||
'Request to send email invite(s) to user(s) failed because user management is disabled',
|
||||
);
|
||||
throw new ResponseHelper.ResponseError('User management is disabled');
|
||||
}
|
||||
|
||||
if (!config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
if (!config.getEnv('userManagement.isInstanceOwnerSetUp')) {
|
||||
Logger.debug(
|
||||
'Request to send email invite(s) to user(s) failed because the owner account is not set up',
|
||||
);
|
||||
|
@ -349,7 +349,7 @@ export function usersNamespace(this: N8nApp): void {
|
|||
|
||||
invitee.firstName = firstName;
|
||||
invitee.lastName = lastName;
|
||||
invitee.password = hashSync(validPassword, genSaltSync(10));
|
||||
invitee.password = await hashPassword(validPassword);
|
||||
|
||||
const updatedUser = await Db.collections.User!.save(invitee);
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
|
||||
import {
|
||||
Db,
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
/* eslint-disable @typescript-eslint/restrict-template-expressions */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import { get } from 'lodash';
|
||||
|
||||
|
@ -132,26 +132,6 @@ export function encodeWebhookResponse(
|
|||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all the webhooks which should be created for the give workflow
|
||||
*
|
||||
* @export
|
||||
* @param {string} workflowId
|
||||
* @param {Workflow} workflow
|
||||
* @returns {IWebhookData[]}
|
||||
*/
|
||||
export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
|
||||
// Check all the nodes in the workflow if they have webhooks
|
||||
|
||||
const returnData: IWebhookData[] = [];
|
||||
|
||||
for (const node of Object.values(workflow.nodes)) {
|
||||
returnData.push.apply(returnData, NodeHelpers.getNodeWebhooksBasic(workflow, node));
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a webhook
|
||||
*
|
||||
|
@ -194,39 +174,6 @@ export async function executeWebhook(
|
|||
$executionId: executionId,
|
||||
};
|
||||
|
||||
// Get the responseMode
|
||||
const responseMode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseMode,
|
||||
executionMode,
|
||||
additionalKeys,
|
||||
'onReceived',
|
||||
);
|
||||
const responseCode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseCode,
|
||||
executionMode,
|
||||
additionalKeys,
|
||||
200,
|
||||
) as number;
|
||||
|
||||
const responseData = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseData,
|
||||
executionMode,
|
||||
additionalKeys,
|
||||
'firstEntryJson',
|
||||
);
|
||||
|
||||
if (!['onReceived', 'lastNode', 'responseNode'].includes(responseMode as string)) {
|
||||
// If the mode is not known we error. Is probably best like that instead of using
|
||||
// the default that people know as early as possible (probably already testing phase)
|
||||
// that something does not resolve properly.
|
||||
const errorMessage = `The response mode ${responseMode} is not valid!`;
|
||||
responseCallback(new Error(errorMessage), {});
|
||||
throw new ResponseHelper.ResponseError(errorMessage, 500, 500);
|
||||
}
|
||||
|
||||
let user: User;
|
||||
if (
|
||||
(workflowData as WorkflowEntity).shared?.length &&
|
||||
|
@ -244,6 +191,42 @@ export async function executeWebhook(
|
|||
// Prepare everything that is needed to run the workflow
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id);
|
||||
|
||||
// Get the responseMode
|
||||
const responseMode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseMode,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
'onReceived',
|
||||
);
|
||||
const responseCode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseCode,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
200,
|
||||
) as number;
|
||||
|
||||
const responseData = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseData,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
'firstEntryJson',
|
||||
);
|
||||
|
||||
if (!['onReceived', 'lastNode', 'responseNode'].includes(responseMode as string)) {
|
||||
// If the mode is not known we error. Is probably best like that instead of using
|
||||
// the default that people know as early as possible (probably already testing phase)
|
||||
// that something does not resolve properly.
|
||||
const errorMessage = `The response mode ${responseMode} is not valid!`;
|
||||
responseCallback(new Error(errorMessage), {});
|
||||
throw new ResponseHelper.ResponseError(errorMessage, 500, 500);
|
||||
}
|
||||
|
||||
// Add the Response and Request so that this data can be accessed in the node
|
||||
additionalData.httpRequest = req;
|
||||
additionalData.httpResponse = res;
|
||||
|
@ -302,6 +285,7 @@ export async function executeWebhook(
|
|||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseHeaders,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
undefined,
|
||||
) as {
|
||||
|
@ -551,6 +535,7 @@ export async function executeWebhook(
|
|||
if (returnData.data!.main[0]![0] === undefined) {
|
||||
responseCallback(new Error('No item to return got found.'), {});
|
||||
didSendResponse = true;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
data = returnData.data!.main[0]![0].json;
|
||||
|
@ -559,6 +544,7 @@ export async function executeWebhook(
|
|||
workflowStartNode,
|
||||
webhookData.webhookDescription.responsePropertyName,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
undefined,
|
||||
);
|
||||
|
@ -571,6 +557,7 @@ export async function executeWebhook(
|
|||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseContentType,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
undefined,
|
||||
);
|
||||
|
@ -602,17 +589,20 @@ export async function executeWebhook(
|
|||
if (data === undefined) {
|
||||
responseCallback(new Error('No item to return got found.'), {});
|
||||
didSendResponse = true;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (data.binary === undefined) {
|
||||
responseCallback(new Error('No binary data to return got found.'), {});
|
||||
didSendResponse = true;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const responseBinaryPropertyName = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseBinaryPropertyName,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
'data',
|
||||
);
|
||||
|
|
|
@ -6,16 +6,16 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import { readFileSync } from 'fs';
|
||||
import { getConnectionManager } from 'typeorm';
|
||||
import * as bodyParser from 'body-parser';
|
||||
import bodyParser from 'body-parser';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, @typescript-eslint/no-unused-vars
|
||||
import * as _ from 'lodash';
|
||||
import _ from 'lodash';
|
||||
|
||||
import * as compression from 'compression';
|
||||
import compression from 'compression';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as parseUrl from 'parseurl';
|
||||
import parseUrl from 'parseurl';
|
||||
import { WebhookHttpMethod } from 'n8n-workflow';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
|
@ -31,7 +31,7 @@ import {
|
|||
WaitingWebhooks,
|
||||
} from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { WEBHOOK_METHODS } from './WebhookHelpers';
|
||||
|
||||
|
@ -193,28 +193,28 @@ class App {
|
|||
constructor() {
|
||||
this.app = express();
|
||||
|
||||
this.endpointWebhook = config.get('endpoints.webhook') as string;
|
||||
this.endpointWebhookWaiting = config.get('endpoints.webhookWaiting') as string;
|
||||
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||
this.executionTimeout = config.get('executions.timeout') as number;
|
||||
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number;
|
||||
this.timezone = config.get('generic.timezone') as string;
|
||||
this.restEndpoint = config.get('endpoints.rest') as string;
|
||||
this.endpointWebhook = config.getEnv('endpoints.webhook');
|
||||
this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
|
||||
this.saveDataErrorExecution = config.getEnv('executions.saveDataOnError');
|
||||
this.saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess');
|
||||
this.saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
|
||||
this.executionTimeout = config.getEnv('executions.timeout');
|
||||
this.maxExecutionTimeout = config.getEnv('executions.maxTimeout');
|
||||
this.timezone = config.getEnv('generic.timezone');
|
||||
this.restEndpoint = config.getEnv('endpoints.rest');
|
||||
|
||||
this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
|
||||
|
||||
this.activeExecutionsInstance = ActiveExecutions.getInstance();
|
||||
|
||||
this.protocol = config.get('protocol');
|
||||
this.sslKey = config.get('ssl_key');
|
||||
this.sslCert = config.get('ssl_cert');
|
||||
this.protocol = config.getEnv('protocol');
|
||||
this.sslKey = config.getEnv('ssl_key');
|
||||
this.sslCert = config.getEnv('ssl_cert');
|
||||
|
||||
this.externalHooks = ExternalHooks();
|
||||
|
||||
this.presetCredentialsLoaded = false;
|
||||
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
|
||||
this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -342,8 +342,8 @@ class App {
|
|||
}
|
||||
|
||||
export async function start(): Promise<void> {
|
||||
const PORT = config.get('port');
|
||||
const ADDRESS = config.get('listen_address');
|
||||
const PORT = config.getEnv('port');
|
||||
const ADDRESS = config.getEnv('listen_address');
|
||||
|
||||
const app = new App();
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ import {
|
|||
|
||||
import { LessThanOrEqual } from 'typeorm';
|
||||
import { DateUtils } from 'typeorm/util/DateUtils';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
CredentialsHelper,
|
||||
|
@ -67,7 +67,7 @@ import {
|
|||
} from './UserManagement/UserManagementHelper';
|
||||
import { whereClause } from './WorkflowHelpers';
|
||||
|
||||
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string;
|
||||
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
||||
|
||||
/**
|
||||
* Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects
|
||||
|
@ -171,8 +171,8 @@ function pruneExecutionData(this: WorkflowHooks): void {
|
|||
Logger.verbose('Pruning execution data from database');
|
||||
|
||||
throttling = true;
|
||||
const timeout = config.get('executions.pruneDataTimeout') as number; // in seconds
|
||||
const maxAge = config.get('executions.pruneDataMaxAge') as number; // in h
|
||||
const timeout = config.getEnv('executions.pruneDataTimeout'); // in seconds
|
||||
const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h
|
||||
const date = new Date(); // today
|
||||
date.setHours(date.getHours() - maxAge);
|
||||
|
||||
|
@ -357,11 +357,11 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
|||
}
|
||||
if (
|
||||
this.workflowData.settings.saveExecutionProgress !== true &&
|
||||
!config.get('executions.saveExecutionProgress')
|
||||
!config.getEnv('executions.saveExecutionProgress')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
} else if (!config.get('executions.saveExecutionProgress')) {
|
||||
} else if (!config.getEnv('executions.saveExecutionProgress')) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -466,7 +466,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
});
|
||||
|
||||
// Prune old execution data
|
||||
if (config.get('executions.pruneData')) {
|
||||
if (config.getEnv('executions.pruneData')) {
|
||||
pruneExecutionData.call(this);
|
||||
}
|
||||
|
||||
|
@ -492,7 +492,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
}
|
||||
}
|
||||
|
||||
let saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||
let saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
|
||||
if (
|
||||
this.workflowData.settings !== undefined &&
|
||||
this.workflowData.settings.saveManualExecutions !== undefined
|
||||
|
@ -512,8 +512,8 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
}
|
||||
|
||||
// Check config to know if execution should be saved or not
|
||||
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
let saveDataErrorExecution = config.getEnv('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess') as string;
|
||||
if (this.workflowData.settings !== undefined) {
|
||||
saveDataErrorExecution =
|
||||
(this.workflowData.settings.saveDataErrorExecution as string) ||
|
||||
|
@ -800,7 +800,7 @@ export async function getWorkflowData(
|
|||
const user = await getUserById(userId);
|
||||
let relations = ['workflow', 'workflow.tags'];
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
relations = relations.filter((relation) => relation !== 'workflow.tags');
|
||||
}
|
||||
|
||||
|
@ -1028,10 +1028,10 @@ export async function getBase(
|
|||
): Promise<IWorkflowExecuteAdditionalData> {
|
||||
const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl();
|
||||
|
||||
const timezone = config.get('generic.timezone') as string;
|
||||
const webhookBaseUrl = urlBaseWebhook + config.get('endpoints.webhook');
|
||||
const webhookWaitingBaseUrl = urlBaseWebhook + config.get('endpoints.webhookWaiting');
|
||||
const webhookTestBaseUrl = urlBaseWebhook + config.get('endpoints.webhookTest');
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const webhookBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhook');
|
||||
const webhookWaitingBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhookWaiting');
|
||||
const webhookTestBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhookTest');
|
||||
|
||||
const encryptionKey = await UserSettings.getEncryptionKey();
|
||||
if (encryptionKey === undefined) {
|
||||
|
@ -1042,7 +1042,7 @@ export async function getBase(
|
|||
credentialsHelper: new CredentialsHelper(encryptionKey),
|
||||
encryptionKey,
|
||||
executeWorkflow,
|
||||
restApiUrl: urlBaseWebhook + config.get('endpoints.rest'),
|
||||
restApiUrl: urlBaseWebhook + config.getEnv('endpoints.rest'),
|
||||
timezone,
|
||||
webhookBaseUrl,
|
||||
webhookWaitingBaseUrl,
|
||||
|
|
|
@ -33,13 +33,13 @@ import {
|
|||
WorkflowRunner,
|
||||
} from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { WorkflowEntity } from './databases/entities/WorkflowEntity';
|
||||
import { User } from './databases/entities/User';
|
||||
import { getWorkflowOwner } from './UserManagement/UserManagementHelper';
|
||||
|
||||
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string;
|
||||
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
||||
|
||||
/**
|
||||
* Returns the data of the last executed node
|
||||
|
|
|
@ -26,12 +26,12 @@ import {
|
|||
} from 'n8n-workflow';
|
||||
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import PCancelable from 'p-cancelable';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { fork } from 'child_process';
|
||||
|
||||
import * as Bull from 'bull';
|
||||
import * as config from '../config';
|
||||
import Bull from 'bull';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
ActiveExecutions,
|
||||
|
@ -73,7 +73,7 @@ export class WorkflowRunner {
|
|||
this.activeExecutions = ActiveExecutions.getInstance();
|
||||
this.credentialsOverwrites = CredentialsOverwrites().getAll();
|
||||
|
||||
const executionsMode = config.get('executions.mode') as string;
|
||||
const executionsMode = config.getEnv('executions.mode');
|
||||
|
||||
if (executionsMode === 'queue') {
|
||||
this.jobQueue = Queue.getInstance().getBullObjectInstance();
|
||||
|
@ -150,8 +150,8 @@ export class WorkflowRunner {
|
|||
executionId?: string,
|
||||
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
|
||||
): Promise<string> {
|
||||
const executionsProcess = config.get('executions.process') as string;
|
||||
const executionsMode = config.get('executions.mode') as string;
|
||||
const executionsProcess = config.getEnv('executions.process');
|
||||
const executionsMode = config.getEnv('executions.mode');
|
||||
|
||||
if (executionsMode === 'queue' && data.executionMode !== 'manual') {
|
||||
// Do not run "manual" executions in bull because sending events to the
|
||||
|
@ -229,13 +229,13 @@ export class WorkflowRunner {
|
|||
// Changes were made by adding the `workflowTimeout` to the `additionalData`
|
||||
// So that the timeout will also work for executions with nested workflows.
|
||||
let executionTimeout: NodeJS.Timeout;
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
||||
workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting
|
||||
}
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number);
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
|
||||
}
|
||||
|
||||
const workflow = new Workflow({
|
||||
|
@ -326,8 +326,7 @@ export class WorkflowRunner {
|
|||
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
const timeout =
|
||||
Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
||||
const timeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds
|
||||
executionTimeout = setTimeout(() => {
|
||||
this.activeExecutions.stopExecution(executionId, 'timeout');
|
||||
}, timeout);
|
||||
|
@ -450,7 +449,7 @@ export class WorkflowRunner {
|
|||
|
||||
const jobData: Promise<IBullJobResponse> = job.finished();
|
||||
|
||||
const queueRecoveryInterval = config.get('queue.bull.queueRecoveryInterval') as number;
|
||||
const queueRecoveryInterval = config.getEnv('queue.bull.queueRecoveryInterval');
|
||||
|
||||
const racingPromises: Array<Promise<IBullJobResponse | object>> = [jobData];
|
||||
|
||||
|
@ -533,8 +532,8 @@ export class WorkflowRunner {
|
|||
try {
|
||||
// Check if this execution data has to be removed from database
|
||||
// based on workflow settings.
|
||||
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
let saveDataErrorExecution = config.getEnv('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess') as string;
|
||||
if (data.workflowData.settings !== undefined) {
|
||||
saveDataErrorExecution =
|
||||
(data.workflowData.settings.saveDataErrorExecution as string) ||
|
||||
|
@ -643,7 +642,7 @@ export class WorkflowRunner {
|
|||
|
||||
// Start timeout for the execution
|
||||
let executionTimeout: NodeJS.Timeout;
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
||||
workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting
|
||||
}
|
||||
|
@ -654,8 +653,7 @@ export class WorkflowRunner {
|
|||
};
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout =
|
||||
Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds
|
||||
// Start timeout already now but give process at least 5 seconds to start.
|
||||
// Without it could would it be possible that the workflow executions times out before it even got started if
|
||||
// the timeout time is very short as the process start time can be quite long.
|
||||
|
|
|
@ -5,13 +5,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
import {
|
||||
BinaryDataManager,
|
||||
IBinaryDataConfig,
|
||||
IProcessMessage,
|
||||
UserSettings,
|
||||
WorkflowExecute,
|
||||
} from 'n8n-core';
|
||||
import { BinaryDataManager, IProcessMessage, UserSettings, WorkflowExecute } from 'n8n-core';
|
||||
|
||||
import {
|
||||
ExecutionError,
|
||||
|
@ -50,7 +44,7 @@ import {
|
|||
|
||||
import { getLogger } from './Logger';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import { InternalHooksManager } from './InternalHooksManager';
|
||||
import { checkPermissionsForExecution } from './UserManagement/UserManagementHelper';
|
||||
|
||||
|
@ -176,7 +170,7 @@ export class WorkflowRunnerProcess {
|
|||
const { cli } = await GenericHelpers.getVersions();
|
||||
InternalHooksManager.init(instanceId, cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig);
|
||||
|
||||
// Credentials should now be loaded from database.
|
||||
|
@ -204,27 +198,27 @@ export class WorkflowRunnerProcess {
|
|||
} else if (
|
||||
inputData.workflowData.settings !== undefined &&
|
||||
inputData.workflowData.settings.saveExecutionProgress !== false &&
|
||||
(config.get('executions.saveExecutionProgress') as boolean)
|
||||
config.getEnv('executions.saveExecutionProgress')
|
||||
) {
|
||||
// Workflow settings not saying anything about saving but default settings says so
|
||||
await Db.init();
|
||||
} else if (
|
||||
inputData.workflowData.settings === undefined &&
|
||||
(config.get('executions.saveExecutionProgress') as boolean)
|
||||
config.getEnv('executions.saveExecutionProgress')
|
||||
) {
|
||||
// Workflow settings not saying anything about saving but default settings says so
|
||||
await Db.init();
|
||||
}
|
||||
|
||||
// Start timeout for the execution
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||
if (this.data.workflowData.settings && this.data.workflowData.settings.executionTimeout) {
|
||||
workflowTimeout = this.data.workflowData.settings.executionTimeout as number; // preference on workflow setting
|
||||
}
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number);
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
|
||||
}
|
||||
|
||||
this.workflow = new Workflow({
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import { In } from 'typeorm';
|
||||
import { UserSettings, Credentials } from 'n8n-core';
|
||||
import { INodeCredentialTestResult, LoggerProxy } from 'n8n-workflow';
|
||||
|
@ -24,7 +24,7 @@ import { CredentialsEntity } from '../databases/entities/CredentialsEntity';
|
|||
import { SharedCredentials } from '../databases/entities/SharedCredentials';
|
||||
import { validateEntity } from '../GenericHelpers';
|
||||
import type { CredentialRequest } from '../requests';
|
||||
import config = require('../../config');
|
||||
import * as config from '../../config';
|
||||
import { externalHooks } from '../Server';
|
||||
|
||||
export const credentialsController = express.Router();
|
||||
|
@ -99,7 +99,7 @@ credentialsController.get(
|
|||
const { name: newName } = req.query;
|
||||
|
||||
return GenericHelpers.generateUniqueName(
|
||||
newName ?? config.get('credentials.defaultName'),
|
||||
newName ?? config.getEnv('credentials.defaultName'),
|
||||
'credentials',
|
||||
);
|
||||
}),
|
||||
|
|
|
@ -14,12 +14,12 @@ import {
|
|||
} from 'typeorm';
|
||||
|
||||
import { IsArray, IsObject, IsString, Length } from 'class-validator';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, ICredentialsDb } from '../..';
|
||||
import { SharedCredentials } from './SharedCredentials';
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
@ -37,7 +37,7 @@ function resolveDataType(dataType: string) {
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: `STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')`,
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
import { WorkflowExecuteMode } from 'n8n-workflow';
|
||||
|
||||
import { Column, ColumnOptions, Entity, Index, PrimaryGeneratedColumn } from 'typeorm';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, IExecutionFlattedDb, IWorkflowDb } from '../..';
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional, IsString, Length } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { User } from './User';
|
||||
import { SharedWorkflow } from './SharedWorkflow';
|
||||
|
@ -21,7 +21,7 @@ type RoleScopes = 'global' | 'workflow' | 'credential';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { CredentialsEntity } from './CredentialsEntity';
|
||||
import { User } from './User';
|
||||
|
@ -17,7 +17,7 @@ import { Role } from './Role';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { WorkflowEntity } from './WorkflowEntity';
|
||||
import { User } from './User';
|
||||
|
@ -17,7 +17,7 @@ import { Role } from './Role';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional, IsString, Length } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { ITagDb } from '../../Interfaces';
|
||||
import { idStringifier } from '../utils/transformers';
|
||||
|
@ -21,7 +21,7 @@ import { WorkflowEntity } from './WorkflowEntity';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -14,7 +14,7 @@ import {
|
|||
UpdateDateColumn,
|
||||
} from 'typeorm';
|
||||
import { IsEmail, IsString, Length } from 'class-validator';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, IPersonalizationSurveyAnswers } from '../..';
|
||||
import { Role } from './Role';
|
||||
import { SharedWorkflow } from './SharedWorkflow';
|
||||
|
@ -27,7 +27,7 @@ export const MIN_PASSWORD_LENGTH = 8;
|
|||
export const MAX_PASSWORD_LENGTH = 64;
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
@ -45,7 +45,7 @@ function resolveDataType(dataType: string) {
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
@ -62,7 +62,7 @@ export class User {
|
|||
@PrimaryGeneratedColumn('uuid')
|
||||
id: string;
|
||||
|
||||
@Column({ length: 254 })
|
||||
@Column({ length: 254, nullable: true })
|
||||
@Index({ unique: true })
|
||||
@IsEmail()
|
||||
email: string;
|
||||
|
@ -81,7 +81,7 @@ export class User {
|
|||
|
||||
@Column({ nullable: true })
|
||||
@IsString({ message: 'Password must be of type string.' })
|
||||
password?: string;
|
||||
password: string;
|
||||
|
||||
@Column({ type: String, nullable: true })
|
||||
resetPasswordToken?: string | null;
|
||||
|
|
|
@ -18,13 +18,13 @@ import {
|
|||
UpdateDateColumn,
|
||||
} from 'typeorm';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, IWorkflowDb } from '../..';
|
||||
import { TagEntity } from './TagEntity';
|
||||
import { SharedWorkflow } from './SharedWorkflow';
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
@ -42,7 +42,7 @@ function resolveDataType(dataType: string) {
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -6,7 +6,7 @@ export class InitialMigration1588157391238 implements MigrationInterface {
|
|||
name = 'InitialMigration1588157391238';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'credentials_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `data` text NOT NULL, `type` varchar(32) NOT NULL, `nodesAccess` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, INDEX `IDX_' + tablePrefix + '07fde106c0b471d8cc80a64fc8` (`type`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
|
||||
await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'execution_entity` (`id` int NOT NULL AUTO_INCREMENT, `data` text NOT NULL, `finished` tinyint NOT NULL, `mode` varchar(255) NOT NULL, `retryOf` varchar(255) NULL, `retrySuccessId` varchar(255) NULL, `startedAt` datetime NOT NULL, `stoppedAt` datetime NOT NULL, `workflowData` json NOT NULL, `workflowId` varchar(255) NULL, INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` (`workflowId`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
|
||||
|
@ -14,7 +14,7 @@ export class InitialMigration1588157391238 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('DROP TABLE `' + tablePrefix + 'workflow_entity`', undefined);
|
||||
await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`', undefined);
|
||||
|
|
|
@ -9,13 +9,13 @@ export class WebhookModel1592447867632 implements MigrationInterface {
|
|||
name = 'WebhookModel1592447867632';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,13 +6,13 @@ export class CreateIndexStoppedAt1594902918301 implements MigrationInterface {
|
|||
name = 'CreateIndexStoppedAt1594902918301';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity` (`stoppedAt`)');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity`');
|
||||
}
|
||||
|
|
|
@ -5,12 +5,12 @@ import * as config from '../../../../config';
|
|||
export class MakeStoppedAtNullable1607431743767 implements MigrationInterface {
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime', undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime NOT NULL', undefined);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ export class AddWebhookId1611149998770 implements MigrationInterface {
|
|||
name = 'AddWebhookId1611149998770';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `webhookId` varchar(255) NULL');
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `pathLength` int NULL');
|
||||
|
@ -13,7 +13,7 @@ export class AddWebhookId1611149998770 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity`'
|
||||
|
|
|
@ -5,13 +5,13 @@ export class ChangeDataSize1615306975123 implements MigrationInterface {
|
|||
name = 'ChangeDataSize1615306975123';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL');
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ export class CreateTagEntity1617268711084 implements MigrationInterface {
|
|||
name = 'CreateTagEntity1617268711084';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// create tags table + relationship with workflow entity
|
||||
|
||||
|
@ -25,7 +25,7 @@ export class CreateTagEntity1617268711084 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// `createdAt` and `updatedAt`
|
||||
|
||||
|
|
|
@ -5,13 +5,13 @@ export class ChangeCredentialDataSize1620729500000 implements MigrationInterface
|
|||
name = 'ChangeCredentialDataSize1620729500000';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(128) NOT NULL');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(32) NOT NULL');
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||
import config = require("../../../../config");
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class UniqueWorkflowNames1620826335440 implements MigrationInterface {
|
||||
name = 'UniqueWorkflowNames1620826335440';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const workflowNames = await queryRunner.query(`
|
||||
SELECT name
|
||||
|
@ -40,7 +40,7 @@ export class UniqueWorkflowNames1620826335440 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'workflow_entity` DROP INDEX `IDX_' + tablePrefix + '943d8f922be094eb507cb9a7f9`');
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class CertifyCorrectCollation1623936588000 implements MigrationInterface {
|
||||
name = 'CertifyCorrectCollation1623936588000';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const databaseType = config.get('database.type');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const databaseType = config.getEnv('database.type');
|
||||
|
||||
if (databaseType === 'mariadb') {
|
||||
// This applies to MySQL only.
|
||||
|
@ -19,7 +19,7 @@ export class CertifyCorrectCollation1623936588000 implements MigrationInterface
|
|||
collation = 'utf8mb4_0900_ai_ci';
|
||||
}
|
||||
|
||||
const databaseName = config.get(`database.mysqldb.database`);
|
||||
const databaseName = config.getEnv(`database.mysqldb.database`);
|
||||
|
||||
await queryRunner.query(`ALTER DATABASE \`${databaseName}\` CHARACTER SET utf8mb4 COLLATE ${collation};`);
|
||||
|
||||
|
|
|
@ -5,14 +5,14 @@ export class AddWaitColumnId1626183952959 implements MigrationInterface {
|
|||
name = 'AddWaitColumnId1626183952959';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` ADD `waitTill` DATETIME NULL');
|
||||
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity` (`waitTill`)');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity`'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
|
@ -9,7 +9,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
|||
name = 'UpdateWorkflowCredentials1630451444017';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
@ -146,7 +146,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
|
|
@ -5,7 +5,7 @@ export class AddExecutionEntityIndexes1644424784709 implements MigrationInterfac
|
|||
name = 'AddExecutionEntityIndexes1644424784709';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`',
|
||||
|
@ -41,7 +41,7 @@ export class AddExecutionEntityIndexes1644424784709 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_81fc04c8a17de15835713505e4` ON `' + tablePrefix + 'execution_entity`',
|
||||
);
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { loadSurveyFromDisk } from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements MigrationInterface {
|
||||
name = 'CreateUserManagement1646992772331';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}role (
|
||||
|
@ -156,7 +156,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}workflow_entity ADD UNIQUE INDEX \`IDX_${tablePrefix}943d8f922be094eb507cb9a7f9\` (\`name\`)`,
|
||||
|
|
|
@ -7,9 +7,9 @@ export class InitialMigration1587669153312 implements MigrationInterface {
|
|||
name = 'InitialMigration1587669153312';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -22,9 +22,9 @@ export class InitialMigration1587669153312 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -9,9 +9,9 @@ export class WebhookModel1589476000887 implements MigrationInterface {
|
|||
name = 'WebhookModel1589476000887';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -20,8 +20,8 @@ export class WebhookModel1589476000887 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@ export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
|
|||
name = 'CreateIndexStoppedAt1594828256133';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}33228da131bb1112247cf52a42`);
|
||||
}
|
||||
|
|
|
@ -6,8 +6,8 @@ export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
|
|||
name = 'MakeStoppedAtNullable1607431743768';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
|
|||
name = 'AddWebhookId1611144599516';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -18,9 +18,9 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
|
|||
name = 'CreateTagEntity1617270242566';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -40,9 +40,9 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
|
||||
name = 'UniqueWorkflowNames1620824779533';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -58,9 +58,9 @@ export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
|
|||
name = 'AddwaitTill1626176912946';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -17,9 +17,9 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
|
@ -9,8 +9,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
|||
name = 'UpdateWorkflowCredentials1630419189837';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -152,8 +152,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,44 +5,44 @@ export class AddExecutionEntityIndexes1644422880309 implements MigrationInterfac
|
|||
name = 'AddExecutionEntityIndexes1644422880309';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}".IDX_${tablePrefixPure}c4d999a5e90784e8caccf5589d`,
|
||||
`DROP INDEX IF EXISTS "${schema}".IDX_${tablePrefixPure}c4d999a5e90784e8caccf5589d`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}".IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2`,
|
||||
`DROP INDEX IF EXISTS "${schema}".IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}33228da131bb1112247cf52a42" ON ${tablePrefix}execution_entity ("stoppedAt") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}33228da131bb1112247cf52a42" ON ${tablePrefix}execution_entity ("stoppedAt") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}58154df94c686818c99fb754ce" ON ${tablePrefix}execution_entity ("workflowId", "waitTill", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}58154df94c686818c99fb754ce" ON ${tablePrefix}execution_entity ("workflowId", "waitTill", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}4f474ac92be81610439aaad61e" ON ${tablePrefix}execution_entity ("workflowId", "finished", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}4f474ac92be81610439aaad61e" ON ${tablePrefix}execution_entity ("workflowId", "finished", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662" ON ${tablePrefix}execution_entity ("finished", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662" ON ${tablePrefix}execution_entity ("finished", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}85b981df7b444f905f8bf50747" ON ${tablePrefix}execution_entity ("waitTill", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}85b981df7b444f905f8bf50747" ON ${tablePrefix}execution_entity ("waitTill", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3" ON ${tablePrefix}execution_entity ("workflowId", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3" ON ${tablePrefix}execution_entity ("workflowId", "id") `,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
|
|
|
@ -3,13 +3,13 @@ import {
|
|||
QueryRunner,
|
||||
} from 'typeorm';
|
||||
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class IncreaseTypeVarcharLimit1646834195327 implements MigrationInterface {
|
||||
name = 'IncreaseTypeVarcharLimit1646834195327';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "type" TYPE VARCHAR(128)`);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import config = require('../../../../config');
|
||||
import config from '../../../../config';
|
||||
import { loadSurveyFromDisk } from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements MigrationInterface {
|
||||
name = 'CreateUserManagement1646992772331';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -140,9 +140,9 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class InitialMigration1588102412422 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(128) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`,
|
||||
|
@ -35,7 +35,7 @@ export class InitialMigration1588102412422 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity"`, undefined);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d"`, undefined);
|
||||
|
|
|
@ -8,7 +8,7 @@ export class WebhookModel1592445003908 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`,
|
||||
|
@ -18,7 +18,7 @@ export class WebhookModel1592445003908 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt") `,
|
||||
|
@ -18,7 +18,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1"`);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class MakeStoppedAtNullable1607431743769 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
// SQLite does not allow us to simply "alter column"
|
||||
// We're hacking the way sqlite identifies tables
|
||||
// Allowing a column to become nullable
|
||||
|
|
|
@ -8,7 +8,7 @@ export class AddWebhookId1611071044839 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"))`,
|
||||
|
@ -28,7 +28,7 @@ export class AddWebhookId1611071044839 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2"`);
|
||||
await queryRunner.query(
|
||||
|
|
|
@ -8,7 +8,7 @@ export class CreateTagEntity1617213344594 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// create tags table + relationship with workflow entity
|
||||
|
||||
|
@ -73,7 +73,7 @@ export class CreateTagEntity1617213344594 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// `createdAt` and `updatedAt`
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
|
||||
|
@ -8,7 +8,7 @@ export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const workflowNames = await queryRunner.query(`
|
||||
SELECT name
|
||||
|
@ -58,7 +58,7 @@ export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9"`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class AddWaitColumn1621707690587 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "${tablePrefix}temporary_execution_entity"`);
|
||||
await queryRunner.query(
|
||||
|
@ -34,7 +34,7 @@ export class AddWaitColumn1621707690587 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}temporary_execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)`,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
|
@ -12,7 +12,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
|||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
@ -152,7 +152,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
|
|
@ -7,32 +7,32 @@ export class AddExecutionEntityIndexes1644421939510 implements MigrationInterfac
|
|||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d'`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2'`);
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2'`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}06da892aaf92a48e7d3e400003' ON '${tablePrefix}execution_entity' ('workflowId', 'waitTill', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}06da892aaf92a48e7d3e400003' ON '${tablePrefix}execution_entity' ('workflowId', 'waitTill', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}78d62b89dc1433192b86dce18a' ON '${tablePrefix}execution_entity' ('workflowId', 'finished', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}78d62b89dc1433192b86dce18a' ON '${tablePrefix}execution_entity' ('workflowId', 'finished', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}1688846335d274033e15c846a4' ON '${tablePrefix}execution_entity' ('finished', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}1688846335d274033e15c846a4' ON '${tablePrefix}execution_entity' ('finished', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9' ON '${tablePrefix}execution_entity' ('waitTill', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9' ON '${tablePrefix}execution_entity' ('waitTill', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}81fc04c8a17de15835713505e4' ON '${tablePrefix}execution_entity' ('workflowId', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}81fc04c8a17de15835713505e4' ON '${tablePrefix}execution_entity' ('workflowId', 'id') `,
|
||||
);
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}81fc04c8a17de15835713505e4'`);
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9'`);
|
||||
|
@ -43,7 +43,7 @@ export class AddExecutionEntityIndexes1644421939510 implements MigrationInterfac
|
|||
`CREATE INDEX 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2' ON '${tablePrefix}execution_entity' ('waitTill') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d' ON '${tablePrefix}execution_entity' ('workflowId') `,
|
||||
`CREATE INDEX 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d' ON '${tablePrefix}execution_entity' ('workflowId') `,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import {
|
||||
loadSurveyFromDisk,
|
||||
logMigrationEnd,
|
||||
|
@ -13,7 +13,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}role" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(32) NOT NULL, "scope" varchar NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), CONSTRAINT "UQ_${tablePrefix}5b49d0f504f7ef31045a1fb2eb8" UNIQUE ("scope", "name"))`,
|
||||
|
@ -104,7 +104,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9" ON "${tablePrefix}workflow_entity" ("name") `,
|
||||
);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import config = require('../../../../config');
|
||||
import config from '../../../../config';
|
||||
|
||||
import { InitialMigration1588102412422 } from './1588102412422-InitialMigration';
|
||||
import { WebhookModel1592445003908 } from './1592445003908-WebhookModel';
|
||||
|
|
2
packages/cli/src/requests.d.ts
vendored
2
packages/cli/src/requests.d.ts
vendored
|
@ -1,5 +1,5 @@
|
|||
/* eslint-disable import/no-cycle */
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import {
|
||||
IConnections,
|
||||
ICredentialDataDecryptedObject,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable import/no-cycle */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import TelemetryClient = require('@rudderstack/rudder-sdk-node');
|
||||
import TelemetryClient from '@rudderstack/rudder-sdk-node';
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
import config = require('../../config');
|
||||
import * as config from '../../config';
|
||||
import { getLogger } from '../Logger';
|
||||
|
||||
type CountBufferItemKey =
|
||||
|
@ -58,10 +58,10 @@ export class Telemetry {
|
|||
this.instanceId = instanceId;
|
||||
this.versionCli = versionCli;
|
||||
|
||||
const enabled = config.get('diagnostics.enabled') as boolean;
|
||||
const logLevel = config.get('logs.level') as boolean;
|
||||
const enabled = config.getEnv('diagnostics.enabled');
|
||||
const logLevel = config.getEnv('logs.level');
|
||||
if (enabled) {
|
||||
const conf = config.get('diagnostics.config.backend') as string;
|
||||
const conf = config.getEnv('diagnostics.config.backend');
|
||||
const [key, url] = conf.split(';');
|
||||
|
||||
if (!key || !url) {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue