🔨 Infer typings for config schema (#2656)

* 🚚 Move schema to standalone file

*  Add assertions to string literal arrays

*  Infer typings for convict schema

* 🔥 Remove unneeded assertions

* 🔨 Fix errors surfaced by typings

*  Type nodes.include/exclude per docs

*  Account for types for exception paths

*  Set method alias to flag incorrect paths

*  Replace original with alias

*  Make allowance for nodes.include

*  Adjust leftover calls

* 🔀 Fix conflicts

* 🔥 Remove unneeded castings

* 📘 Simplify exception path type

* 📦 Update package-lock.json

* 🔥 Remove unneeded imports

* 🔥 Remove unrelated file

*  Update schema

*  Update interface

* 📦 Update package-lock.json

* 📦 Update package-lock.json

* 🔥 Remove leftover assertions

Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>
This commit is contained in:
Iván Ovejero 2022-04-08 19:37:27 +02:00 committed by GitHub
parent 23f0501f4c
commit 37a6e329af
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
82 changed files with 1393 additions and 1256 deletions

View file

@ -2,12 +2,7 @@
/* eslint-disable no-console */ /* eslint-disable no-console */
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import { Command, flags } from '@oclif/command'; import { Command, flags } from '@oclif/command';
import { import { BinaryDataManager, UserSettings, PLACEHOLDER_EMPTY_WORKFLOW_ID } from 'n8n-core';
BinaryDataManager,
IBinaryDataConfig,
UserSettings,
PLACEHOLDER_EMPTY_WORKFLOW_ID,
} from 'n8n-core';
import { INode, LoggerProxy } from 'n8n-workflow'; import { INode, LoggerProxy } from 'n8n-workflow';
import { import {
@ -52,7 +47,7 @@ export class Execute extends Command {
async run() { async run() {
const logger = getLogger(); const logger = getLogger();
LoggerProxy.init(logger); LoggerProxy.init(logger);
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig; const binaryDataConfig = config.getEnv('binaryDataManager');
await BinaryDataManager.init(binaryDataConfig, true); await BinaryDataManager.init(binaryDataConfig, true);
// eslint-disable-next-line @typescript-eslint/no-shadow // eslint-disable-next-line @typescript-eslint/no-shadow

View file

@ -9,7 +9,7 @@
import * as fs from 'fs'; import * as fs from 'fs';
import { Command, flags } from '@oclif/command'; import { Command, flags } from '@oclif/command';
import { BinaryDataManager, IBinaryDataConfig, UserSettings } from 'n8n-core'; import { BinaryDataManager, UserSettings } from 'n8n-core';
// eslint-disable-next-line @typescript-eslint/no-unused-vars // eslint-disable-next-line @typescript-eslint/no-unused-vars
import { INode, ITaskData, LoggerProxy } from 'n8n-workflow'; import { INode, ITaskData, LoggerProxy } from 'n8n-workflow';
@ -196,7 +196,7 @@ export class ExecuteBatch extends Command {
const logger = getLogger(); const logger = getLogger();
LoggerProxy.init(logger); LoggerProxy.init(logger);
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig; const binaryDataConfig = config.getEnv('binaryDataManager');
await BinaryDataManager.init(binaryDataConfig, true); await BinaryDataManager.init(binaryDataConfig, true);
// eslint-disable-next-line @typescript-eslint/no-shadow // eslint-disable-next-line @typescript-eslint/no-shadow

View file

@ -7,7 +7,7 @@
/* eslint-disable @typescript-eslint/no-unsafe-call */ /* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */
import * as localtunnel from 'localtunnel'; import * as localtunnel from 'localtunnel';
import { BinaryDataManager, IBinaryDataConfig, TUNNEL_SUBDOMAIN_ENV, UserSettings } from 'n8n-core'; import { BinaryDataManager, TUNNEL_SUBDOMAIN_ENV, UserSettings } from 'n8n-core';
import { Command, flags } from '@oclif/command'; import { Command, flags } from '@oclif/command';
// eslint-disable-next-line import/no-extraneous-dependencies // eslint-disable-next-line import/no-extraneous-dependencies
import * as Redis from 'ioredis'; import * as Redis from 'ioredis';
@ -100,9 +100,9 @@ export class Start extends Command {
await InternalHooksManager.getInstance().onN8nStop(); await InternalHooksManager.getInstance().onN8nStop();
const skipWebhookDeregistration = config.get( const skipWebhookDeregistration = config.getEnv(
'endpoints.skipWebhoooksDeregistrationOnShutdown', 'endpoints.skipWebhoooksDeregistrationOnShutdown',
) as boolean; );
const removePromises = []; const removePromises = [];
if (activeWorkflowRunner !== undefined && !skipWebhookDeregistration) { if (activeWorkflowRunner !== undefined && !skipWebhookDeregistration) {
@ -169,7 +169,7 @@ export class Start extends Command {
// Make sure the settings exist // Make sure the settings exist
const userSettings = await UserSettings.prepareUserSettings(); const userSettings = await UserSettings.prepareUserSettings();
if (!config.get('userManagement.jwtSecret')) { if (!config.getEnv('userManagement.jwtSecret')) {
// If we don't have a JWT secret set, generate // If we don't have a JWT secret set, generate
// one based and save to config. // one based and save to config.
const encryptionKey = await UserSettings.getEncryptionKey(); const encryptionKey = await UserSettings.getEncryptionKey();
@ -222,12 +222,12 @@ export class Start extends Command {
config.set(setting.key, JSON.parse(setting.value)); config.set(setting.key, JSON.parse(setting.value));
}); });
if (config.get('executions.mode') === 'queue') { if (config.getEnv('executions.mode') === 'queue') {
const redisHost = config.get('queue.bull.redis.host'); const redisHost = config.getEnv('queue.bull.redis.host');
const redisPassword = config.get('queue.bull.redis.password'); const redisPassword = config.getEnv('queue.bull.redis.password');
const redisPort = config.get('queue.bull.redis.port'); const redisPort = config.getEnv('queue.bull.redis.port');
const redisDB = config.get('queue.bull.redis.db'); const redisDB = config.getEnv('queue.bull.redis.db');
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold'); const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
let lastTimer = 0; let lastTimer = 0;
let cumulativeTimeout = 0; let cumulativeTimeout = 0;
@ -285,7 +285,7 @@ export class Start extends Command {
const dbType = (await GenericHelpers.getConfigValue('database.type')) as DatabaseType; const dbType = (await GenericHelpers.getConfigValue('database.type')) as DatabaseType;
if (dbType === 'sqlite') { if (dbType === 'sqlite') {
const shouldRunVacuum = config.get('database.sqlite.executeVacuumOnStartup') as number; const shouldRunVacuum = config.getEnv('database.sqlite.executeVacuumOnStartup');
if (shouldRunVacuum) { if (shouldRunVacuum) {
// eslint-disable-next-line @typescript-eslint/no-floating-promises, @typescript-eslint/no-non-null-assertion // eslint-disable-next-line @typescript-eslint/no-floating-promises, @typescript-eslint/no-non-null-assertion
await Db.collections.Execution!.query('VACUUM;'); await Db.collections.Execution!.query('VACUUM;');
@ -324,7 +324,7 @@ export class Start extends Command {
subdomain: tunnelSubdomain, subdomain: tunnelSubdomain,
}; };
const port = config.get('port'); const port = config.getEnv('port');
// @ts-ignore // @ts-ignore
const webhookTunnel = await localtunnel(port, tunnelSettings); const webhookTunnel = await localtunnel(port, tunnelSettings);
@ -340,7 +340,7 @@ export class Start extends Command {
const { cli } = await GenericHelpers.getVersions(); const { cli } = await GenericHelpers.getVersions();
InternalHooksManager.init(instanceId, cli, nodeTypes); InternalHooksManager.init(instanceId, cli, nodeTypes);
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig; const binaryDataConfig = config.getEnv('binaryDataManager');
await BinaryDataManager.init(binaryDataConfig, true); await BinaryDataManager.init(binaryDataConfig, true);
await Server.start(); await Server.start();
@ -354,7 +354,7 @@ export class Start extends Command {
const editorUrl = GenericHelpers.getBaseUrl(); const editorUrl = GenericHelpers.getBaseUrl();
this.log(`\nEditor is now accessible via:\n${editorUrl}`); this.log(`\nEditor is now accessible via:\n${editorUrl}`);
const saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean; const saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
if (saveManualExecutions) { if (saveManualExecutions) {
this.log('\nManual executions will be visible only for the owner'); this.log('\nManual executions will be visible only for the owner');

View file

@ -3,7 +3,7 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */ /* eslint-disable @typescript-eslint/no-unsafe-member-access */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/unbound-method */ /* eslint-disable @typescript-eslint/unbound-method */
import { BinaryDataManager, IBinaryDataConfig, UserSettings } from 'n8n-core'; import { BinaryDataManager, UserSettings } from 'n8n-core';
import { Command, flags } from '@oclif/command'; import { Command, flags } from '@oclif/command';
// eslint-disable-next-line import/no-extraneous-dependencies // eslint-disable-next-line import/no-extraneous-dependencies
import * as Redis from 'ioredis'; import * as Redis from 'ioredis';
@ -95,7 +95,7 @@ export class Webhook extends Command {
// Wrap that the process does not close but we can still use async // Wrap that the process does not close but we can still use async
await (async () => { await (async () => {
if (config.get('executions.mode') !== 'queue') { if (config.getEnv('executions.mode') !== 'queue') {
/** /**
* It is technically possible to run without queues but * It is technically possible to run without queues but
* there are 2 known bugs when running in this mode: * there are 2 known bugs when running in this mode:
@ -152,15 +152,15 @@ export class Webhook extends Command {
const { cli } = await GenericHelpers.getVersions(); const { cli } = await GenericHelpers.getVersions();
InternalHooksManager.init(instanceId, cli, nodeTypes); InternalHooksManager.init(instanceId, cli, nodeTypes);
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig; const binaryDataConfig = config.getEnv('binaryDataManager');
await BinaryDataManager.init(binaryDataConfig); await BinaryDataManager.init(binaryDataConfig);
if (config.get('executions.mode') === 'queue') { if (config.getEnv('executions.mode') === 'queue') {
const redisHost = config.get('queue.bull.redis.host'); const redisHost = config.getEnv('queue.bull.redis.host');
const redisPassword = config.get('queue.bull.redis.password'); const redisPassword = config.getEnv('queue.bull.redis.password');
const redisPort = config.get('queue.bull.redis.port'); const redisPort = config.getEnv('queue.bull.redis.port');
const redisDB = config.get('queue.bull.redis.db'); const redisDB = config.getEnv('queue.bull.redis.db');
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold'); const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
let lastTimer = 0; let lastTimer = 0;
let cumulativeTimeout = 0; let cumulativeTimeout = 0;

View file

@ -158,7 +158,7 @@ export class Worker extends Command {
staticData = workflowData.staticData; staticData = workflowData.staticData;
} }
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
if ( if (
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain // eslint-disable-next-line @typescript-eslint/prefer-optional-chain
currentExecutionDb.workflowData.settings && currentExecutionDb.workflowData.settings &&
@ -169,7 +169,7 @@ export class Worker extends Command {
let executionTimeoutTimestamp: number | undefined; let executionTimeoutTimestamp: number | undefined;
if (workflowTimeout > 0) { if (workflowTimeout > 0) {
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number); workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
executionTimeoutTimestamp = Date.now() + workflowTimeout * 1000; executionTimeoutTimestamp = Date.now() + workflowTimeout * 1000;
} }
@ -288,7 +288,7 @@ export class Worker extends Command {
await startDbInitPromise; await startDbInitPromise;
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold'); const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
Worker.jobQueue = Queue.getInstance().getBullObjectInstance(); Worker.jobQueue = Queue.getInstance().getBullObjectInstance();
// eslint-disable-next-line @typescript-eslint/no-floating-promises // eslint-disable-next-line @typescript-eslint/no-floating-promises
@ -299,7 +299,7 @@ export class Worker extends Command {
InternalHooksManager.init(instanceId, versions.cli, nodeTypes); InternalHooksManager.init(instanceId, versions.cli, nodeTypes);
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig; const binaryDataConfig = config.getEnv('binaryDataManager');
await BinaryDataManager.init(binaryDataConfig); await BinaryDataManager.init(binaryDataConfig);
console.info('\nn8n worker is now ready'); console.info('\nn8n worker is now ready');
@ -352,8 +352,8 @@ export class Worker extends Command {
} }
}); });
if (config.get('queue.health.active')) { if (config.getEnv('queue.health.active')) {
const port = config.get('queue.health.port') as number; const port = config.getEnv('queue.health.port');
const app = express(); const app = express();
const server = http.createServer(app); const server = http.createServer(app);

View file

@ -1,890 +1,15 @@
/* eslint-disable @typescript-eslint/unbound-method */
/* eslint-disable no-console */ /* eslint-disable no-console */
/* eslint-disable no-restricted-syntax */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import * as convict from 'convict'; import * as convict from 'convict';
import * as dotenv from 'dotenv'; import * as dotenv from 'dotenv';
import * as path from 'path'; import { schema } from './schema';
import * as core from 'n8n-core';
dotenv.config(); dotenv.config();
const config = convict({ const config = convict(schema);
database: {
type: {
doc: 'Type of database to use',
format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'],
default: 'sqlite',
env: 'DB_TYPE',
},
tablePrefix: {
doc: 'Prefix for table names',
format: '*',
default: '',
env: 'DB_TABLE_PREFIX',
},
logging: {
enabled: {
doc: 'Typeorm logging enabled flag.',
format: 'Boolean',
default: false,
env: 'DB_LOGGING_ENABLED',
},
options: {
doc: 'Logging level options, default is "error". Possible values: query,error,schema,warn,info,log. To enable all logging, specify "all"',
format: String,
default: 'error',
env: 'DB_LOGGING_OPTIONS',
},
maxQueryExecutionTime: {
doc: 'Maximum number of milliseconds query should be executed before logger logs a warning. Set 0 to disable long running query warning',
format: Number,
default: 1000,
env: 'DB_LOGGING_MAX_EXECUTION_TIME',
},
},
postgresdb: {
database: {
doc: 'PostgresDB Database',
format: String,
default: 'n8n',
env: 'DB_POSTGRESDB_DATABASE',
},
host: {
doc: 'PostgresDB Host',
format: String,
default: 'localhost',
env: 'DB_POSTGRESDB_HOST',
},
password: {
doc: 'PostgresDB Password',
format: String,
default: '',
env: 'DB_POSTGRESDB_PASSWORD',
},
port: {
doc: 'PostgresDB Port',
format: Number,
default: 5432,
env: 'DB_POSTGRESDB_PORT',
},
user: {
doc: 'PostgresDB User',
format: String,
default: 'root',
env: 'DB_POSTGRESDB_USER',
},
schema: {
doc: 'PostgresDB Schema',
format: String,
default: 'public',
env: 'DB_POSTGRESDB_SCHEMA',
},
ssl: { config.getEnv = config.get;
ca: {
doc: 'SSL certificate authority',
format: String,
default: '',
env: 'DB_POSTGRESDB_SSL_CA',
},
cert: {
doc: 'SSL certificate',
format: String,
default: '',
env: 'DB_POSTGRESDB_SSL_CERT',
},
key: {
doc: 'SSL key',
format: String,
default: '',
env: 'DB_POSTGRESDB_SSL_KEY',
},
rejectUnauthorized: {
doc: 'If unauthorized SSL connections should be rejected',
format: 'Boolean',
default: true,
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
},
},
},
mysqldb: {
database: {
doc: 'MySQL Database',
format: String,
default: 'n8n',
env: 'DB_MYSQLDB_DATABASE',
},
host: {
doc: 'MySQL Host',
format: String,
default: 'localhost',
env: 'DB_MYSQLDB_HOST',
},
password: {
doc: 'MySQL Password',
format: String,
default: '',
env: 'DB_MYSQLDB_PASSWORD',
},
port: {
doc: 'MySQL Port',
format: Number,
default: 3306,
env: 'DB_MYSQLDB_PORT',
},
user: {
doc: 'MySQL User',
format: String,
default: 'root',
env: 'DB_MYSQLDB_USER',
},
},
sqlite: {
executeVacuumOnStartup: {
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
format: Boolean,
default: false,
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
},
},
},
credentials: {
overwrite: {
data: {
// Allows to set default values for credentials which
// get automatically prefilled and the user does not get
// displayed and can not change.
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
doc: 'Overwrites for credentials',
format: '*',
default: '{}',
env: 'CREDENTIALS_OVERWRITE_DATA',
},
endpoint: {
doc: 'Fetch credentials from API',
format: String,
default: '',
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
},
},
defaultName: {
doc: 'Default name for credentials',
format: String,
default: 'My credentials',
env: 'CREDENTIALS_DEFAULT_NAME',
},
},
workflows: {
defaultName: {
doc: 'Default name for workflow',
format: String,
default: 'My workflow',
env: 'WORKFLOWS_DEFAULT_NAME',
},
},
executions: {
// By default workflows get always executed in their own process.
// If this option gets set to "main" it will run them in the
// main-process instead.
process: {
doc: 'In what process workflows should be executed',
format: ['main', 'own'],
default: 'own',
env: 'EXECUTIONS_PROCESS',
},
mode: {
doc: 'If it should run executions directly or via queue',
format: ['regular', 'queue'],
default: 'regular',
env: 'EXECUTIONS_MODE',
},
// A Workflow times out and gets canceled after this time (seconds).
// If the workflow is executed in the main process a soft timeout
// is executed (takes effect after the current node finishes).
// If a workflow is running in its own process is a soft timeout
// tried first, before killing the process after waiting for an
// additional fifth of the given timeout duration.
//
// To deactivate timeout set it to -1
//
// Timeout is currently not activated by default which will change
// in a future version.
timeout: {
doc: 'Max run time (seconds) before stopping the workflow execution',
format: Number,
default: -1,
env: 'EXECUTIONS_TIMEOUT',
},
maxTimeout: {
doc: 'Max execution time (seconds) that can be set for a workflow individually',
format: Number,
default: 3600,
env: 'EXECUTIONS_TIMEOUT_MAX',
},
// If a workflow executes all the data gets saved by default. This
// could be a problem when a workflow gets executed a lot and processes
// a lot of data. To not exceed the database's capacity it is possible to
// prune the database regularly or to not save the execution at all.
// Depending on if the execution did succeed or error a different
// save behaviour can be set.
saveDataOnError: {
doc: 'What workflow execution data to save on error',
format: ['all', 'none'],
default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
},
saveDataOnSuccess: {
doc: 'What workflow execution data to save on success',
format: ['all', 'none'],
default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
},
saveExecutionProgress: {
doc: 'Wether or not to save progress for each node executed',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
},
// If the executions of workflows which got started via the editor
// should be saved. By default they will not be saved as this runs
// are normally only for testing and debugging. This setting can
// also be overwritten on a per workflow basis in the workflow settings
// in the editor.
saveDataManualExecutions: {
doc: 'Save data of executions when started manually via editor',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
},
// To not exceed the database's capacity and keep its size moderate
// the execution data gets pruned regularly (default: 1 hour interval).
// All saved execution data older than the max age will be deleted.
// Pruning is currently not activated by default, which will change in
// a future version.
pruneData: {
doc: 'Delete data of past executions on a rolling basis',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_PRUNE',
},
pruneDataMaxAge: {
doc: 'How old (hours) the execution data has to be to get deleted',
format: Number,
default: 336,
env: 'EXECUTIONS_DATA_MAX_AGE',
},
pruneDataTimeout: {
doc: 'Timeout (seconds) after execution data has been pruned',
format: Number,
default: 3600,
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
},
},
queue: {
health: {
active: {
doc: 'If health checks should be enabled',
format: 'Boolean',
default: false,
env: 'QUEUE_HEALTH_CHECK_ACTIVE',
},
port: {
doc: 'Port to serve health check on if activated',
format: Number,
default: 5678,
env: 'QUEUE_HEALTH_CHECK_PORT',
},
},
bull: {
prefix: {
doc: 'Prefix for all queue keys',
format: String,
default: '',
env: 'QUEUE_BULL_PREFIX',
},
redis: {
db: {
doc: 'Redis DB',
format: Number,
default: 0,
env: 'QUEUE_BULL_REDIS_DB',
},
host: {
doc: 'Redis Host',
format: String,
default: 'localhost',
env: 'QUEUE_BULL_REDIS_HOST',
},
password: {
doc: 'Redis Password',
format: String,
default: '',
env: 'QUEUE_BULL_REDIS_PASSWORD',
},
port: {
doc: 'Redis Port',
format: Number,
default: 6379,
env: 'QUEUE_BULL_REDIS_PORT',
},
timeoutThreshold: {
doc: 'Redis timeout threshold',
format: Number,
default: 10000,
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
},
},
queueRecoveryInterval: {
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
format: Number,
default: 60,
env: 'QUEUE_RECOVERY_INTERVAL',
},
},
},
generic: {
// The timezone to use. Is important for nodes like "Cron" which start the
// workflow automatically at a specified time. This setting can also be
// overwritten on a per worfklow basis in the workflow settings in the
// editor.
timezone: {
doc: 'The timezone to use',
format: '*',
default: 'America/New_York',
env: 'GENERIC_TIMEZONE',
},
},
// How n8n can be reached (Editor & REST-API)
path: {
format: String,
default: '/',
arg: 'path',
env: 'N8N_PATH',
doc: 'Path n8n is deployed to',
},
host: {
format: String,
default: 'localhost',
arg: 'host',
env: 'N8N_HOST',
doc: 'Host name n8n can be reached',
},
port: {
format: Number,
default: 5678,
arg: 'port',
env: 'N8N_PORT',
doc: 'HTTP port n8n can be reached',
},
listen_address: {
format: String,
default: '0.0.0.0',
env: 'N8N_LISTEN_ADDRESS',
doc: 'IP address n8n should listen on',
},
protocol: {
format: ['http', 'https'],
default: 'http',
env: 'N8N_PROTOCOL',
doc: 'HTTP Protocol via which n8n can be reached',
},
ssl_key: {
format: String,
default: '',
env: 'N8N_SSL_KEY',
doc: 'SSL Key for HTTPS Protocol',
},
ssl_cert: {
format: String,
default: '',
env: 'N8N_SSL_CERT',
doc: 'SSL Cert for HTTPS Protocol',
},
editorBaseUrl: {
format: String,
default: '',
env: 'N8N_EDITOR_BASE_URL',
doc: 'Public URL where the editor is accessible. Also used for emails sent from n8n.',
},
security: {
excludeEndpoints: {
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
format: String,
default: '',
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
},
basicAuth: {
active: {
format: 'Boolean',
default: false,
env: 'N8N_BASIC_AUTH_ACTIVE',
doc: 'If basic auth should be activated for editor and REST-API',
},
user: {
format: String,
default: '',
env: 'N8N_BASIC_AUTH_USER',
doc: 'The name of the basic auth user',
},
password: {
format: String,
default: '',
env: 'N8N_BASIC_AUTH_PASSWORD',
doc: 'The password of the basic auth user',
},
hash: {
format: 'Boolean',
default: false,
env: 'N8N_BASIC_AUTH_HASH',
doc: 'If password for basic auth is hashed',
},
},
jwtAuth: {
active: {
format: 'Boolean',
default: false,
env: 'N8N_JWT_AUTH_ACTIVE',
doc: 'If JWT auth should be activated for editor and REST-API',
},
jwtHeader: {
format: String,
default: '',
env: 'N8N_JWT_AUTH_HEADER',
doc: 'The request header containing a signed JWT',
},
jwtHeaderValuePrefix: {
format: String,
default: '',
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
doc: 'The request header value prefix to strip (optional)',
},
jwksUri: {
format: String,
default: '',
env: 'N8N_JWKS_URI',
doc: 'The URI to fetch JWK Set for JWT authentication',
},
jwtIssuer: {
format: String,
default: '',
env: 'N8N_JWT_ISSUER',
doc: 'JWT issuer to expect (optional)',
},
jwtNamespace: {
format: String,
default: '',
env: 'N8N_JWT_NAMESPACE',
doc: 'JWT namespace to expect (optional)',
},
jwtAllowedTenantKey: {
format: String,
default: '',
env: 'N8N_JWT_ALLOWED_TENANT_KEY',
doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
},
jwtAllowedTenant: {
format: String,
default: '',
env: 'N8N_JWT_ALLOWED_TENANT',
doc: 'JWT tenant to allow (optional)',
},
},
},
endpoints: {
payloadSizeMax: {
format: Number,
default: 16,
env: 'N8N_PAYLOAD_SIZE_MAX',
doc: 'Maximum payload size in MB.',
},
metrics: {
enable: {
format: 'Boolean',
default: false,
env: 'N8N_METRICS',
doc: 'Enable metrics endpoint',
},
prefix: {
format: String,
default: 'n8n_',
env: 'N8N_METRICS_PREFIX',
doc: 'An optional prefix for metric names. Default: n8n_',
},
},
rest: {
format: String,
default: 'rest',
env: 'N8N_ENDPOINT_REST',
doc: 'Path for rest endpoint',
},
webhook: {
format: String,
default: 'webhook',
env: 'N8N_ENDPOINT_WEBHOOK',
doc: 'Path for webhook endpoint',
},
webhookWaiting: {
format: String,
default: 'webhook-waiting',
env: 'N8N_ENDPOINT_WEBHOOK_WAIT',
doc: 'Path for waiting-webhook endpoint',
},
webhookTest: {
format: String,
default: 'webhook-test',
env: 'N8N_ENDPOINT_WEBHOOK_TEST',
doc: 'Path for test-webhook endpoint',
},
disableUi: {
format: Boolean,
default: false,
env: 'N8N_DISABLE_UI',
doc: 'Disable N8N UI (Frontend).',
},
disableProductionWebhooksOnMainProcess: {
format: Boolean,
default: false,
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
},
skipWebhoooksDeregistrationOnShutdown: {
/**
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
* and registers on startup / activation. If we skip
* deactivation on shutdown, webhooks will remain active on 3rd party services.
* We don't have to worry about startup as it always
* checks if webhooks already exist.
* If users want to upgrade n8n, it is possible to run
* two instances simultaneously without downtime, similar
* to blue/green deployment.
* WARNING: Trigger nodes (like Cron) will cause duplication
* of work, so be aware when using.
*/
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
format: Boolean,
default: false,
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
},
},
workflowTagsDisabled: {
format: Boolean,
default: false,
env: 'N8N_WORKFLOW_TAGS_DISABLED',
doc: 'Disable worfklow tags.',
},
userManagement: {
disabled: {
doc: 'Disable user management and hide it completely.',
format: Boolean,
default: false,
env: 'N8N_USER_MANAGEMENT_DISABLED',
},
jwtSecret: {
doc: 'Set a specific JWT secret (optional - n8n can generate one)', // Generated @ start.ts
format: String,
default: '',
env: 'N8N_USER_MANAGEMENT_JWT_SECRET',
},
emails: {
mode: {
doc: 'How to send emails',
format: ['', 'smtp'],
default: 'smtp',
env: 'N8N_EMAIL_MODE',
},
smtp: {
host: {
doc: 'SMTP server host',
format: String, // e.g. 'smtp.gmail.com'
default: '',
env: 'N8N_SMTP_HOST',
},
port: {
doc: 'SMTP server port',
format: Number,
default: 465,
env: 'N8N_SMTP_PORT',
},
secure: {
doc: 'Whether or not to use SSL for SMTP',
format: Boolean,
default: true,
env: 'N8N_SMTP_SSL',
},
auth: {
user: {
doc: 'SMTP login username',
format: String, // e.g.'you@gmail.com'
default: '',
env: 'N8N_SMTP_USER',
},
pass: {
doc: 'SMTP login password',
format: String,
default: '',
env: 'N8N_SMTP_PASS',
},
},
sender: {
doc: 'How to display sender name',
format: String,
default: '',
env: 'N8N_SMTP_SENDER',
},
},
templates: {
invite: {
doc: 'Overrides default HTML template for inviting new people (use full path)',
format: String,
default: '',
env: 'N8N_UM_EMAIL_TEMPLATES_INVITE',
},
passwordReset: {
doc: 'Overrides default HTML template for resetting password (use full path)',
format: String,
default: '',
env: 'N8N_UM_EMAIL_TEMPLATES_PWRESET',
},
},
},
},
externalHookFiles: {
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
format: String,
default: '',
env: 'EXTERNAL_HOOK_FILES',
},
nodes: {
include: {
doc: 'Nodes to load',
format: function check(rawValue) {
if (rawValue === '') {
return;
}
try {
const values = JSON.parse(rawValue);
if (!Array.isArray(values)) {
throw new Error();
}
for (const value of values) {
if (typeof value !== 'string') {
throw new Error();
}
}
} catch (error) {
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
}
},
default: undefined,
env: 'NODES_INCLUDE',
},
exclude: {
doc: 'Nodes not to load',
format: function check(rawValue) {
try {
const values = JSON.parse(rawValue);
if (!Array.isArray(values)) {
throw new Error();
}
for (const value of values) {
if (typeof value !== 'string') {
throw new Error();
}
}
} catch (error) {
throw new TypeError(`The Nodes to exclude is not a valid Array of strings.`);
}
},
default: '[]',
env: 'NODES_EXCLUDE',
},
errorTriggerType: {
doc: 'Node Type to use as Error Trigger',
format: String,
default: 'n8n-nodes-base.errorTrigger',
env: 'NODES_ERROR_TRIGGER_TYPE',
},
},
logs: {
level: {
doc: 'Log output level',
format: ['error', 'warn', 'info', 'verbose', 'debug', 'silent'],
default: 'info',
env: 'N8N_LOG_LEVEL',
},
output: {
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
format: String,
default: 'console',
env: 'N8N_LOG_OUTPUT',
},
file: {
fileCountMax: {
doc: 'Maximum number of files to keep.',
format: Number,
default: 100,
env: 'N8N_LOG_FILE_COUNT_MAX',
},
fileSizeMax: {
doc: 'Maximum size for each log file in MB.',
format: Number,
default: 16,
env: 'N8N_LOG_FILE_SIZE_MAX',
},
location: {
doc: 'Log file location; only used if log output is set to file.',
format: String,
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'),
env: 'N8N_LOG_FILE_LOCATION',
},
},
},
versionNotifications: {
enabled: {
doc: 'Whether feature is enabled to request notifications about new versions and security updates.',
format: Boolean,
default: true,
env: 'N8N_VERSION_NOTIFICATIONS_ENABLED',
},
endpoint: {
doc: 'Endpoint to retrieve version information from.',
format: String,
default: 'https://api.n8n.io/versions/',
env: 'N8N_VERSION_NOTIFICATIONS_ENDPOINT',
},
infoUrl: {
doc: `Url in New Versions Panel with more information on updating one's instance.`,
format: String,
default: 'https://docs.n8n.io/getting-started/installation/updating.html',
env: 'N8N_VERSION_NOTIFICATIONS_INFO_URL',
},
},
templates: {
enabled: {
doc: 'Whether templates feature is enabled to load workflow templates.',
format: Boolean,
default: true,
env: 'N8N_TEMPLATES_ENABLED',
},
host: {
doc: 'Endpoint host to retrieve workflow templates from endpoints.',
format: String,
default: 'https://api.n8n.io/',
env: 'N8N_TEMPLATES_HOST',
},
},
binaryDataManager: {
availableModes: {
format: String,
default: 'filesystem',
env: 'N8N_AVAILABLE_BINARY_DATA_MODES',
doc: 'Available modes of binary data storage, as comma separated strings',
},
mode: {
format: ['default', 'filesystem'],
default: 'default',
env: 'N8N_DEFAULT_BINARY_DATA_MODE',
doc: 'Storage mode for binary data',
},
localStoragePath: {
format: String,
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'binaryData'),
env: 'N8N_BINARY_DATA_STORAGE_PATH',
doc: 'Path for binary data storage in "filesystem" mode',
},
binaryDataTTL: {
format: Number,
default: 60,
env: 'N8N_BINARY_DATA_TTL',
doc: 'TTL for binary data of unsaved executions in minutes',
},
persistedBinaryDataTTL: {
format: Number,
default: 1440,
env: 'N8N_PERSISTED_BINARY_DATA_TTL',
doc: 'TTL for persisted binary data in minutes (binary data gets deleted if not persisted before TTL expires)',
},
},
deployment: {
type: {
format: String,
default: 'default',
env: 'N8N_DEPLOYMENT_TYPE',
},
},
hiringBanner: {
enabled: {
doc: 'Whether hiring banner in browser console is enabled.',
format: Boolean,
default: true,
env: 'N8N_HIRING_BANNER_ENABLED',
},
},
personalization: {
enabled: {
doc: 'Whether personalization is enabled.',
format: Boolean,
default: true,
env: 'N8N_PERSONALIZATION_ENABLED',
},
},
diagnostics: {
enabled: {
doc: 'Whether diagnostic mode is enabled.',
format: Boolean,
default: true,
env: 'N8N_DIAGNOSTICS_ENABLED',
},
config: {
frontend: {
doc: 'Diagnostics config for frontend.',
format: String,
default: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
env: 'N8N_DIAGNOSTICS_CONFIG_FRONTEND',
},
backend: {
doc: 'Diagnostics config for backend.',
format: String,
default: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io/v1/batch',
env: 'N8N_DIAGNOSTICS_CONFIG_BACKEND',
},
},
},
defaultLocale: {
doc: 'Default locale for the UI',
format: String,
default: 'en',
env: 'N8N_DEFAULT_LOCALE',
},
});
// Overwrite default configuration with settings which got defined in // Overwrite default configuration with settings which got defined in
// optional configuration files // optional configuration files

View file

@ -0,0 +1,883 @@
/* eslint-disable no-restricted-syntax */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
import * as path from 'path';
import * as core from 'n8n-core';
export const schema = {
database: {
type: {
doc: 'Type of database to use',
format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'] as const,
default: 'sqlite',
env: 'DB_TYPE',
},
tablePrefix: {
doc: 'Prefix for table names',
format: '*',
default: '',
env: 'DB_TABLE_PREFIX',
},
logging: {
enabled: {
doc: 'Typeorm logging enabled flag.',
format: Boolean,
default: false,
env: 'DB_LOGGING_ENABLED',
},
options: {
doc: 'Logging level options, default is "error". Possible values: query,error,schema,warn,info,log. To enable all logging, specify "all"',
format: String,
default: 'error',
env: 'DB_LOGGING_OPTIONS',
},
maxQueryExecutionTime: {
doc: 'Maximum number of milliseconds query should be executed before logger logs a warning. Set 0 to disable long running query warning',
format: Number,
default: 1000,
env: 'DB_LOGGING_MAX_EXECUTION_TIME',
},
},
postgresdb: {
database: {
doc: 'PostgresDB Database',
format: String,
default: 'n8n',
env: 'DB_POSTGRESDB_DATABASE',
},
host: {
doc: 'PostgresDB Host',
format: String,
default: 'localhost',
env: 'DB_POSTGRESDB_HOST',
},
password: {
doc: 'PostgresDB Password',
format: String,
default: '',
env: 'DB_POSTGRESDB_PASSWORD',
},
port: {
doc: 'PostgresDB Port',
format: Number,
default: 5432,
env: 'DB_POSTGRESDB_PORT',
},
user: {
doc: 'PostgresDB User',
format: String,
default: 'root',
env: 'DB_POSTGRESDB_USER',
},
schema: {
doc: 'PostgresDB Schema',
format: String,
default: 'public',
env: 'DB_POSTGRESDB_SCHEMA',
},
ssl: {
ca: {
doc: 'SSL certificate authority',
format: String,
default: '',
env: 'DB_POSTGRESDB_SSL_CA',
},
cert: {
doc: 'SSL certificate',
format: String,
default: '',
env: 'DB_POSTGRESDB_SSL_CERT',
},
key: {
doc: 'SSL key',
format: String,
default: '',
env: 'DB_POSTGRESDB_SSL_KEY',
},
rejectUnauthorized: {
doc: 'If unauthorized SSL connections should be rejected',
format: 'Boolean',
default: true,
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
},
},
},
mysqldb: {
database: {
doc: 'MySQL Database',
format: String,
default: 'n8n',
env: 'DB_MYSQLDB_DATABASE',
},
host: {
doc: 'MySQL Host',
format: String,
default: 'localhost',
env: 'DB_MYSQLDB_HOST',
},
password: {
doc: 'MySQL Password',
format: String,
default: '',
env: 'DB_MYSQLDB_PASSWORD',
},
port: {
doc: 'MySQL Port',
format: Number,
default: 3306,
env: 'DB_MYSQLDB_PORT',
},
user: {
doc: 'MySQL User',
format: String,
default: 'root',
env: 'DB_MYSQLDB_USER',
},
},
sqlite: {
executeVacuumOnStartup: {
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
format: Boolean,
default: false,
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
},
},
},
credentials: {
overwrite: {
data: {
// Allows to set default values for credentials which
// get automatically prefilled and the user does not get
// displayed and can not change.
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
doc: 'Overwrites for credentials',
format: '*',
default: '{}',
env: 'CREDENTIALS_OVERWRITE_DATA',
},
endpoint: {
doc: 'Fetch credentials from API',
format: String,
default: '',
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
},
},
defaultName: {
doc: 'Default name for credentials',
format: String,
default: 'My credentials',
env: 'CREDENTIALS_DEFAULT_NAME',
},
},
workflows: {
defaultName: {
doc: 'Default name for workflow',
format: String,
default: 'My workflow',
env: 'WORKFLOWS_DEFAULT_NAME',
},
},
executions: {
// By default workflows get always executed in their own process.
// If this option gets set to "main" it will run them in the
// main-process instead.
process: {
doc: 'In what process workflows should be executed',
format: ['main', 'own'] as const,
default: 'own',
env: 'EXECUTIONS_PROCESS',
},
mode: {
doc: 'If it should run executions directly or via queue',
format: ['regular', 'queue'] as const,
default: 'regular',
env: 'EXECUTIONS_MODE',
},
// A Workflow times out and gets canceled after this time (seconds).
// If the workflow is executed in the main process a soft timeout
// is executed (takes effect after the current node finishes).
// If a workflow is running in its own process is a soft timeout
// tried first, before killing the process after waiting for an
// additional fifth of the given timeout duration.
//
// To deactivate timeout set it to -1
//
// Timeout is currently not activated by default which will change
// in a future version.
timeout: {
doc: 'Max run time (seconds) before stopping the workflow execution',
format: Number,
default: -1,
env: 'EXECUTIONS_TIMEOUT',
},
maxTimeout: {
doc: 'Max execution time (seconds) that can be set for a workflow individually',
format: Number,
default: 3600,
env: 'EXECUTIONS_TIMEOUT_MAX',
},
// If a workflow executes all the data gets saved by default. This
// could be a problem when a workflow gets executed a lot and processes
// a lot of data. To not exceed the database's capacity it is possible to
// prune the database regularly or to not save the execution at all.
// Depending on if the execution did succeed or error a different
// save behaviour can be set.
saveDataOnError: {
doc: 'What workflow execution data to save on error',
format: ['all', 'none'] as const,
default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
},
saveDataOnSuccess: {
doc: 'What workflow execution data to save on success',
format: ['all', 'none'] as const,
default: 'all',
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
},
saveExecutionProgress: {
doc: 'Wether or not to save progress for each node executed',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
},
// If the executions of workflows which got started via the editor
// should be saved. By default they will not be saved as this runs
// are normally only for testing and debugging. This setting can
// also be overwritten on a per workflow basis in the workflow settings
// in the editor.
saveDataManualExecutions: {
doc: 'Save data of executions when started manually via editor',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
},
// To not exceed the database's capacity and keep its size moderate
// the execution data gets pruned regularly (default: 1 hour interval).
// All saved execution data older than the max age will be deleted.
// Pruning is currently not activated by default, which will change in
// a future version.
pruneData: {
doc: 'Delete data of past executions on a rolling basis',
format: 'Boolean',
default: false,
env: 'EXECUTIONS_DATA_PRUNE',
},
pruneDataMaxAge: {
doc: 'How old (hours) the execution data has to be to get deleted',
format: Number,
default: 336,
env: 'EXECUTIONS_DATA_MAX_AGE',
},
pruneDataTimeout: {
doc: 'Timeout (seconds) after execution data has been pruned',
format: Number,
default: 3600,
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
},
},
queue: {
health: {
active: {
doc: 'If health checks should be enabled',
format: 'Boolean',
default: false,
env: 'QUEUE_HEALTH_CHECK_ACTIVE',
},
port: {
doc: 'Port to serve health check on if activated',
format: Number,
default: 5678,
env: 'QUEUE_HEALTH_CHECK_PORT',
},
},
bull: {
prefix: {
doc: 'Prefix for all queue keys',
format: String,
default: '',
env: 'QUEUE_BULL_PREFIX',
},
redis: {
db: {
doc: 'Redis DB',
format: Number,
default: 0,
env: 'QUEUE_BULL_REDIS_DB',
},
host: {
doc: 'Redis Host',
format: String,
default: 'localhost',
env: 'QUEUE_BULL_REDIS_HOST',
},
password: {
doc: 'Redis Password',
format: String,
default: '',
env: 'QUEUE_BULL_REDIS_PASSWORD',
},
port: {
doc: 'Redis Port',
format: Number,
default: 6379,
env: 'QUEUE_BULL_REDIS_PORT',
},
timeoutThreshold: {
doc: 'Redis timeout threshold',
format: Number,
default: 10000,
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
},
},
queueRecoveryInterval: {
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
format: Number,
default: 60,
env: 'QUEUE_RECOVERY_INTERVAL',
},
},
},
generic: {
// The timezone to use. Is important for nodes like "Cron" which start the
// workflow automatically at a specified time. This setting can also be
// overwritten on a per worfklow basis in the workflow settings in the
// editor.
timezone: {
doc: 'The timezone to use',
format: '*',
default: 'America/New_York',
env: 'GENERIC_TIMEZONE',
},
},
// How n8n can be reached (Editor & REST-API)
path: {
format: String,
default: '/',
arg: 'path',
env: 'N8N_PATH',
doc: 'Path n8n is deployed to',
},
host: {
format: String,
default: 'localhost',
arg: 'host',
env: 'N8N_HOST',
doc: 'Host name n8n can be reached',
},
port: {
format: Number,
default: 5678,
arg: 'port',
env: 'N8N_PORT',
doc: 'HTTP port n8n can be reached',
},
listen_address: {
format: String,
default: '0.0.0.0',
env: 'N8N_LISTEN_ADDRESS',
doc: 'IP address n8n should listen on',
},
protocol: {
format: ['http', 'https'] as const,
default: 'http',
env: 'N8N_PROTOCOL',
doc: 'HTTP Protocol via which n8n can be reached',
},
ssl_key: {
format: String,
default: '',
env: 'N8N_SSL_KEY',
doc: 'SSL Key for HTTPS Protocol',
},
ssl_cert: {
format: String,
default: '',
env: 'N8N_SSL_CERT',
doc: 'SSL Cert for HTTPS Protocol',
},
editorBaseUrl: {
format: String,
default: '',
env: 'N8N_EDITOR_BASE_URL',
doc: 'Public URL where the editor is accessible. Also used for emails sent from n8n.',
},
security: {
excludeEndpoints: {
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
format: String,
default: '',
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
},
basicAuth: {
active: {
format: 'Boolean',
default: false,
env: 'N8N_BASIC_AUTH_ACTIVE',
doc: 'If basic auth should be activated for editor and REST-API',
},
user: {
format: String,
default: '',
env: 'N8N_BASIC_AUTH_USER',
doc: 'The name of the basic auth user',
},
password: {
format: String,
default: '',
env: 'N8N_BASIC_AUTH_PASSWORD',
doc: 'The password of the basic auth user',
},
hash: {
format: 'Boolean',
default: false,
env: 'N8N_BASIC_AUTH_HASH',
doc: 'If password for basic auth is hashed',
},
},
jwtAuth: {
active: {
format: 'Boolean',
default: false,
env: 'N8N_JWT_AUTH_ACTIVE',
doc: 'If JWT auth should be activated for editor and REST-API',
},
jwtHeader: {
format: String,
default: '',
env: 'N8N_JWT_AUTH_HEADER',
doc: 'The request header containing a signed JWT',
},
jwtHeaderValuePrefix: {
format: String,
default: '',
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
doc: 'The request header value prefix to strip (optional)',
},
jwksUri: {
format: String,
default: '',
env: 'N8N_JWKS_URI',
doc: 'The URI to fetch JWK Set for JWT authentication',
},
jwtIssuer: {
format: String,
default: '',
env: 'N8N_JWT_ISSUER',
doc: 'JWT issuer to expect (optional)',
},
jwtNamespace: {
format: String,
default: '',
env: 'N8N_JWT_NAMESPACE',
doc: 'JWT namespace to expect (optional)',
},
jwtAllowedTenantKey: {
format: String,
default: '',
env: 'N8N_JWT_ALLOWED_TENANT_KEY',
doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
},
jwtAllowedTenant: {
format: String,
default: '',
env: 'N8N_JWT_ALLOWED_TENANT',
doc: 'JWT tenant to allow (optional)',
},
},
},
endpoints: {
payloadSizeMax: {
format: Number,
default: 16,
env: 'N8N_PAYLOAD_SIZE_MAX',
doc: 'Maximum payload size in MB.',
},
metrics: {
enable: {
format: 'Boolean',
default: false,
env: 'N8N_METRICS',
doc: 'Enable metrics endpoint',
},
prefix: {
format: String,
default: 'n8n_',
env: 'N8N_METRICS_PREFIX',
doc: 'An optional prefix for metric names. Default: n8n_',
},
},
rest: {
format: String,
default: 'rest',
env: 'N8N_ENDPOINT_REST',
doc: 'Path for rest endpoint',
},
webhook: {
format: String,
default: 'webhook',
env: 'N8N_ENDPOINT_WEBHOOK',
doc: 'Path for webhook endpoint',
},
webhookWaiting: {
format: String,
default: 'webhook-waiting',
env: 'N8N_ENDPOINT_WEBHOOK_WAIT',
doc: 'Path for waiting-webhook endpoint',
},
webhookTest: {
format: String,
default: 'webhook-test',
env: 'N8N_ENDPOINT_WEBHOOK_TEST',
doc: 'Path for test-webhook endpoint',
},
disableUi: {
format: Boolean,
default: false,
env: 'N8N_DISABLE_UI',
doc: 'Disable N8N UI (Frontend).',
},
disableProductionWebhooksOnMainProcess: {
format: Boolean,
default: false,
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
},
skipWebhoooksDeregistrationOnShutdown: {
/**
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
* and registers on startup / activation. If we skip
* deactivation on shutdown, webhooks will remain active on 3rd party services.
* We don't have to worry about startup as it always
* checks if webhooks already exist.
* If users want to upgrade n8n, it is possible to run
* two instances simultaneously without downtime, similar
* to blue/green deployment.
* WARNING: Trigger nodes (like Cron) will cause duplication
* of work, so be aware when using.
*/
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
format: Boolean,
default: false,
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
},
},
workflowTagsDisabled: {
format: Boolean,
default: false,
env: 'N8N_WORKFLOW_TAGS_DISABLED',
doc: 'Disable worfklow tags.',
},
userManagement: {
disabled: {
doc: 'Disable user management and hide it completely.',
format: Boolean,
default: false,
env: 'N8N_USER_MANAGEMENT_DISABLED',
},
jwtSecret: {
doc: 'Set a specific JWT secret (optional - n8n can generate one)', // Generated @ start.ts
format: String,
default: '',
env: 'N8N_USER_MANAGEMENT_JWT_SECRET',
},
emails: {
mode: {
doc: 'How to send emails',
format: ['', 'smtp'] as const,
default: 'smtp',
env: 'N8N_EMAIL_MODE',
},
smtp: {
host: {
doc: 'SMTP server host',
format: String, // e.g. 'smtp.gmail.com'
default: '',
env: 'N8N_SMTP_HOST',
},
port: {
doc: 'SMTP server port',
format: Number,
default: 465,
env: 'N8N_SMTP_PORT',
},
secure: {
doc: 'Whether or not to use SSL for SMTP',
format: Boolean,
default: true,
env: 'N8N_SMTP_SSL',
},
auth: {
user: {
doc: 'SMTP login username',
format: String, // e.g.'you@gmail.com'
default: '',
env: 'N8N_SMTP_USER',
},
pass: {
doc: 'SMTP login password',
format: String,
default: '',
env: 'N8N_SMTP_PASS',
},
},
sender: {
doc: 'How to display sender name',
format: String,
default: '',
env: 'N8N_SMTP_SENDER',
},
},
templates: {
invite: {
doc: 'Overrides default HTML template for inviting new people (use full path)',
format: String,
default: '',
env: 'N8N_UM_EMAIL_TEMPLATES_INVITE',
},
passwordReset: {
doc: 'Overrides default HTML template for resetting password (use full path)',
format: String,
default: '',
env: 'N8N_UM_EMAIL_TEMPLATES_PWRESET',
},
},
},
},
externalHookFiles: {
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
format: String,
default: '',
env: 'EXTERNAL_HOOK_FILES',
},
nodes: {
include: {
doc: 'Nodes to load',
format: function check(rawValue: string): void {
if (rawValue === '') {
return;
}
try {
const values = JSON.parse(rawValue);
if (!Array.isArray(values)) {
throw new Error();
}
for (const value of values) {
if (typeof value !== 'string') {
throw new Error();
}
}
} catch (error) {
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
}
},
default: undefined,
env: 'NODES_INCLUDE',
},
exclude: {
doc: 'Nodes not to load',
format: function check(rawValue: string): void {
try {
const values = JSON.parse(rawValue);
if (!Array.isArray(values)) {
throw new Error();
}
for (const value of values) {
if (typeof value !== 'string') {
throw new Error();
}
}
} catch (error) {
throw new TypeError(`The Nodes to exclude is not a valid Array of strings.`);
}
},
default: '[]',
env: 'NODES_EXCLUDE',
},
errorTriggerType: {
doc: 'Node Type to use as Error Trigger',
format: String,
default: 'n8n-nodes-base.errorTrigger',
env: 'NODES_ERROR_TRIGGER_TYPE',
},
},
logs: {
level: {
doc: 'Log output level',
format: ['error', 'warn', 'info', 'verbose', 'debug', 'silent'] as const,
default: 'info',
env: 'N8N_LOG_LEVEL',
},
output: {
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
format: String,
default: 'console',
env: 'N8N_LOG_OUTPUT',
},
file: {
fileCountMax: {
doc: 'Maximum number of files to keep.',
format: Number,
default: 100,
env: 'N8N_LOG_FILE_COUNT_MAX',
},
fileSizeMax: {
doc: 'Maximum size for each log file in MB.',
format: Number,
default: 16,
env: 'N8N_LOG_FILE_SIZE_MAX',
},
location: {
doc: 'Log file location; only used if log output is set to file.',
format: String,
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'),
env: 'N8N_LOG_FILE_LOCATION',
},
},
},
versionNotifications: {
enabled: {
doc: 'Whether feature is enabled to request notifications about new versions and security updates.',
format: Boolean,
default: true,
env: 'N8N_VERSION_NOTIFICATIONS_ENABLED',
},
endpoint: {
doc: 'Endpoint to retrieve version information from.',
format: String,
default: 'https://api.n8n.io/versions/',
env: 'N8N_VERSION_NOTIFICATIONS_ENDPOINT',
},
infoUrl: {
doc: `Url in New Versions Panel with more information on updating one's instance.`,
format: String,
default: 'https://docs.n8n.io/getting-started/installation/updating.html',
env: 'N8N_VERSION_NOTIFICATIONS_INFO_URL',
},
},
templates: {
enabled: {
doc: 'Whether templates feature is enabled to load workflow templates.',
format: Boolean,
default: true,
env: 'N8N_TEMPLATES_ENABLED',
},
host: {
doc: 'Endpoint host to retrieve workflow templates from endpoints.',
format: String,
default: 'https://api.n8n.io/',
env: 'N8N_TEMPLATES_HOST',
},
},
binaryDataManager: {
availableModes: {
format: String,
default: 'filesystem',
env: 'N8N_AVAILABLE_BINARY_DATA_MODES',
doc: 'Available modes of binary data storage, as comma separated strings',
},
mode: {
format: ['default', 'filesystem'] as const,
default: 'default',
env: 'N8N_DEFAULT_BINARY_DATA_MODE',
doc: 'Storage mode for binary data',
},
localStoragePath: {
format: String,
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'binaryData'),
env: 'N8N_BINARY_DATA_STORAGE_PATH',
doc: 'Path for binary data storage in "filesystem" mode',
},
binaryDataTTL: {
format: Number,
default: 60,
env: 'N8N_BINARY_DATA_TTL',
doc: 'TTL for binary data of unsaved executions in minutes',
},
persistedBinaryDataTTL: {
format: Number,
default: 1440,
env: 'N8N_PERSISTED_BINARY_DATA_TTL',
doc: 'TTL for persisted binary data in minutes (binary data gets deleted if not persisted before TTL expires)',
},
},
deployment: {
type: {
format: String,
default: 'default',
env: 'N8N_DEPLOYMENT_TYPE',
},
},
hiringBanner: {
enabled: {
doc: 'Whether hiring banner in browser console is enabled.',
format: Boolean,
default: true,
env: 'N8N_HIRING_BANNER_ENABLED',
},
},
personalization: {
enabled: {
doc: 'Whether personalization is enabled.',
format: Boolean,
default: true,
env: 'N8N_PERSONALIZATION_ENABLED',
},
},
diagnostics: {
enabled: {
doc: 'Whether diagnostic mode is enabled.',
format: Boolean,
default: true,
env: 'N8N_DIAGNOSTICS_ENABLED',
},
config: {
frontend: {
doc: 'Diagnostics config for frontend.',
format: String,
default: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
env: 'N8N_DIAGNOSTICS_CONFIG_FRONTEND',
},
backend: {
doc: 'Diagnostics config for backend.',
format: String,
default: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io/v1/batch',
env: 'N8N_DIAGNOSTICS_CONFIG_BACKEND',
},
},
},
defaultLocale: {
doc: 'Default locale for the UI',
format: String,
default: 'en',
env: 'N8N_DEFAULT_LOCALE',
},
};

132
packages/cli/config/types.d.ts vendored Normal file
View file

@ -0,0 +1,132 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { IBinaryDataConfig } from '../../core/dist/src';
import { schema } from './schema';
// -----------------------------------
// transformers
// -----------------------------------
/**
* Transform an object (convict schema) into a union of string arrays (path segments),
* one for every valid path in the schema object, filtered by type.
*
* ```ts
* ["port", "default"] | ["queue", "bull", "redis", "port", "default"] | ["queue", "bull", "redis", "db", "default"] | ["queue", "bull", "redis", "timeoutThreshold", "default"] | etc
* ```
*/
type GetPathSegments<Traversable, Filter> = Traversable extends Filter
? []
: {
[K in ValidKeys<Traversable>]: [K, ...GetPathSegments<Traversable[K], Filter>];
}[ValidKeys<Traversable>];
/**
* Transform a union of string arrays (path segments) into a union of strings (dotted paths).
*
* ```ts
* "port" | "queue.bull.redis.port" | "queue.bull.redis.db" | "queue.bull.redis.timeoutThreshold" | etc
* ```
*/
type JoinByDotting<T extends string[]> = T extends [infer F]
? F
: T extends [infer F, ...infer R]
? F extends string
? R extends string[]
? `${F}.${JoinByDotting<R>}`
: never
: never
: string;
type ToDottedPath<T> = JoinByDotting<RemoveExcess<T>>;
type CollectPathsByType<T> = ToDottedPath<GetPathSegments<typeof schema, T>>;
// -----------------------------------
// path-to-return-type mapper
// -----------------------------------
type NumericPath = CollectPathsByType<number>;
type BooleanPath = CollectPathsByType<boolean>;
type StringLiteralArrayPath = CollectPathsByType<Readonly<string[]>>;
type StringPath = CollectPathsByType<string>;
type ConfigOptionPath =
| NumericPath
| BooleanPath
| StringPath
| StringLiteralArrayPath
| keyof ExceptionPaths;
type ToReturnType<T extends ConfigOptionPath> = T extends NumericPath
? number
: T extends BooleanPath
? boolean
: T extends StringLiteralArrayPath
? StringLiteralMap[T]
: T extends keyof ExceptionPaths
? ExceptionPaths[T]
: T extends StringPath
? string
: unknown;
type ExceptionPaths = {
'queue.bull.redis': object;
binaryDataManager: IBinaryDataConfig;
'nodes.include': undefined;
'userManagement.isInstanceOwnerSetUp': boolean;
'userManagement.skipInstanceOwnerSetup': boolean;
};
// -----------------------------------
// string literals map
// -----------------------------------
type GetPathSegmentsWithUnions<T> = T extends ReadonlyArray<infer C>
? [C]
: {
[K in ValidKeys<T>]: [K, ...GetPathSegmentsWithUnions<T[K]>];
}[ValidKeys<T>];
type ToPathUnionPair<T extends string[]> = T extends [...infer Path, infer Union]
? Path extends string[]
? { path: ToDottedPath<Path>; union: Union }
: never
: never;
type ToStringLiteralMap<T extends { path: string; union: string }> = {
[Path in T['path']]: Extract<T, { path: Path }>['union'];
};
type StringLiteralMap = ToStringLiteralMap<
ToPathUnionPair<GetPathSegmentsWithUnions<typeof schema>>
>;
// -----------------------------------
// utils
// -----------------------------------
type ValidKeys<T> = keyof T extends string
? keyof T extends keyof NumberConstructor
? never
: keyof T
: never;
type RemoveExcess<T> = T extends [...infer Path, 'format' | 'default']
? Path extends string[]
? Path
: never
: never;
// -----------------------------------
// module augmentation
// -----------------------------------
declare module 'convict' {
interface Config<T> {
getEnv<Path extends ConfigOptionPath>(path: Path): ToReturnType<Path>;
}
}

View file

@ -74,7 +74,7 @@ export class ActiveWorkflowRunner {
relations: ['shared', 'shared.user', 'shared.user.globalRole'], relations: ['shared', 'shared.user', 'shared.user.globalRole'],
})) as IWorkflowDb[]; })) as IWorkflowDb[];
if (!config.get('endpoints.skipWebhoooksDeregistrationOnShutdown')) { if (!config.getEnv('endpoints.skipWebhoooksDeregistrationOnShutdown')) {
// Do not clean up database when skip registration is done. // Do not clean up database when skip registration is done.
// This flag is set when n8n is running in scaled mode. // This flag is set when n8n is running in scaled mode.
// Impact is minimal, but for a short while, n8n will stop accepting requests. // Impact is minimal, but for a short while, n8n will stop accepting requests.
@ -466,7 +466,7 @@ export class ActiveWorkflowRunner {
} catch (error) { } catch (error) {
if ( if (
activation === 'init' && activation === 'init' &&
config.get('endpoints.skipWebhoooksDeregistrationOnShutdown') && config.getEnv('endpoints.skipWebhoooksDeregistrationOnShutdown') &&
error.name === 'QueryFailedError' error.name === 'QueryFailedError'
) { ) {
// When skipWebhoooksDeregistrationOnShutdown is enabled, // When skipWebhoooksDeregistrationOnShutdown is enabled,

View file

@ -59,7 +59,7 @@ export async function init(
let connectionOptions: ConnectionOptions; let connectionOptions: ConnectionOptions;
const entityPrefix = config.get('database.tablePrefix'); const entityPrefix = config.getEnv('database.tablePrefix');
if (testConnectionOptions) { if (testConnectionOptions) {
connectionOptions = testConnectionOptions; connectionOptions = testConnectionOptions;
@ -95,7 +95,7 @@ export async function init(
password: (await GenericHelpers.getConfigValue('database.postgresdb.password')) as string, password: (await GenericHelpers.getConfigValue('database.postgresdb.password')) as string,
port: (await GenericHelpers.getConfigValue('database.postgresdb.port')) as number, port: (await GenericHelpers.getConfigValue('database.postgresdb.port')) as number,
username: (await GenericHelpers.getConfigValue('database.postgresdb.user')) as string, username: (await GenericHelpers.getConfigValue('database.postgresdb.user')) as string,
schema: config.get('database.postgresdb.schema'), schema: config.getEnv('database.postgresdb.schema'),
migrations: postgresMigrations, migrations: postgresMigrations,
migrationsRun: true, migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`, migrationsTableName: `${entityPrefix}migrations`,

View file

@ -34,7 +34,7 @@ class ExternalHooksClass implements IExternalHooksClass {
} }
async loadHooksFiles(reload = false) { async loadHooksFiles(reload = false) {
const externalHookFiles = config.get('externalHookFiles').split(':'); const externalHookFiles = config.getEnv('externalHookFiles').split(':');
// Load all the provided hook-files // Load all the provided hook-files
for (let hookFilePath of externalHookFiles) { for (let hookFilePath of externalHookFiles) {

View file

@ -31,10 +31,10 @@ let versionCache: IPackageVersions | undefined;
* @returns {string} * @returns {string}
*/ */
export function getBaseUrl(): string { export function getBaseUrl(): string {
const protocol = config.get('protocol'); const protocol = config.getEnv('protocol');
const host = config.get('host'); const host = config.getEnv('host');
const port = config.get('port'); const port = config.getEnv('port');
const path = config.get('path'); const path = config.getEnv('path');
if ((protocol === 'http' && port === 80) || (protocol === 'https' && port === 443)) { if ((protocol === 'http' && port === 80) || (protocol === 'https' && port === 443)) {
return `${protocol}://${host}${path}`; return `${protocol}://${host}${path}`;
@ -117,14 +117,16 @@ export async function getConfigValue(
// Check if environment variable is defined for config key // Check if environment variable is defined for config key
if (currentSchema.env === undefined) { if (currentSchema.env === undefined) {
// No environment variable defined, so return value from config // No environment variable defined, so return value from config
return config.get(configKey); // @ts-ignore
return config.getEnv(configKey);
} }
// Check if special file enviroment variable exists // Check if special file enviroment variable exists
const fileEnvironmentVariable = process.env[`${currentSchema.env}_FILE`]; const fileEnvironmentVariable = process.env[`${currentSchema.env}_FILE`];
if (fileEnvironmentVariable === undefined) { if (fileEnvironmentVariable === undefined) {
// Does not exist, so return value from config // Does not exist, so return value from config
return config.get(configKey); // @ts-ignore
return config.getEnv(configKey);
} }
let data; let data;

View file

@ -329,7 +329,7 @@ export interface IDiagnosticInfo {
}; };
}; };
executionVariables: { executionVariables: {
[key: string]: string | number | undefined; [key: string]: string | number | boolean | undefined;
}; };
deploymentType: string; deploymentType: string;
binaryDataMode: string; binaryDataMode: string;
@ -458,7 +458,7 @@ export interface IN8nUISettings {
defaultLocale: string; defaultLocale: string;
userManagement: IUserManagementSettings; userManagement: IUserManagementSettings;
workflowTagsDisabled: boolean; workflowTagsDisabled: boolean;
logLevel: 'info' | 'debug' | 'warn' | 'error' | 'verbose'; logLevel: 'info' | 'debug' | 'warn' | 'error' | 'verbose' | 'silent';
hiringBannerEnabled: boolean; hiringBannerEnabled: boolean;
templates: { templates: {
enabled: boolean; enabled: boolean;

View file

@ -38,9 +38,9 @@ class LoadNodesAndCredentialsClass {
credentialTypes: ICredentialTypeData = {}; credentialTypes: ICredentialTypeData = {};
excludeNodes: string[] | undefined = undefined; excludeNodes: string | undefined = undefined;
includeNodes: string[] | undefined = undefined; includeNodes: string | undefined = undefined;
nodeModulesPath = ''; nodeModulesPath = '';
@ -76,8 +76,8 @@ class LoadNodesAndCredentialsClass {
throw new Error('Could not find "node_modules" folder!'); throw new Error('Could not find "node_modules" folder!');
} }
this.excludeNodes = config.get('nodes.exclude'); this.excludeNodes = config.getEnv('nodes.exclude');
this.includeNodes = config.get('nodes.include'); this.includeNodes = config.getEnv('nodes.include');
// Get all the installed packages which contain n8n nodes // Get all the installed packages which contain n8n nodes
const packages = await this.getN8nNodePackages(); const packages = await this.getN8nNodePackages();

View file

@ -1,3 +1,4 @@
/* eslint-disable @typescript-eslint/no-shadow */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */ /* eslint-disable @typescript-eslint/no-unsafe-assignment */
import * as winston from 'winston'; import * as winston from 'winston';
@ -11,10 +12,12 @@ class Logger implements ILogger {
private logger: winston.Logger; private logger: winston.Logger;
constructor() { constructor() {
const level = config.get('logs.level') as string; const level = config.getEnv('logs.level');
// eslint-disable-next-line @typescript-eslint/no-shadow const output = config
const output = (config.get('logs.output') as string).split(',').map((output) => output.trim()); .getEnv('logs.output')
.split(',')
.map((output) => output.trim());
this.logger = winston.createLogger({ this.logger = winston.createLogger({
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
@ -56,10 +59,10 @@ class Logger implements ILogger {
); );
this.logger.add( this.logger.add(
new winston.transports.File({ new winston.transports.File({
filename: config.get('logs.file.location'), filename: config.getEnv('logs.file.location'),
format: fileLogFormat, format: fileLogFormat,
maxsize: (config.get('logs.file.fileSizeMax') as number) * 1048576, // config * 1mb maxsize: config.getEnv('logs.file.fileSizeMax') * 1048576, // config * 1mb
maxFiles: config.get('logs.file.fileCountMax'), maxFiles: config.getEnv('logs.file.fileCountMax'),
}), }),
); );
} }

View file

@ -16,8 +16,8 @@ export class Queue {
constructor() { constructor() {
this.activeExecutions = ActiveExecutions.getInstance(); this.activeExecutions = ActiveExecutions.getInstance();
const prefix = config.get('queue.bull.prefix') as string; const prefix = config.getEnv('queue.bull.prefix');
const redisOptions = config.get('queue.bull.redis') as object; const redisOptions = config.getEnv('queue.bull.redis');
// Disabling ready check is necessary as it allows worker to // Disabling ready check is necessary as it allows worker to
// quickly reconnect to Redis if Redis crashes or is unreachable // quickly reconnect to Redis if Redis crashes or is unreachable
// for some time. With it enabled, worker might take minutes to realize // for some time. With it enabled, worker might take minutes to realize

View file

@ -1,3 +1,4 @@
/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */
/* eslint-disable @typescript-eslint/no-unnecessary-type-assertion */ /* eslint-disable @typescript-eslint/no-unnecessary-type-assertion */
/* eslint-disable @typescript-eslint/no-use-before-define */ /* eslint-disable @typescript-eslint/no-use-before-define */
/* eslint-disable @typescript-eslint/await-thenable */ /* eslint-disable @typescript-eslint/await-thenable */
@ -200,9 +201,9 @@ class App {
defaultCredentialsName: string; defaultCredentialsName: string;
saveDataErrorExecution: string; saveDataErrorExecution: 'all' | 'none';
saveDataSuccessExecution: string; saveDataSuccessExecution: 'all' | 'none';
saveManualExecutions: boolean; saveManualExecutions: boolean;
@ -237,21 +238,21 @@ class App {
constructor() { constructor() {
this.app = express(); this.app = express();
this.endpointWebhook = config.get('endpoints.webhook') as string; this.endpointWebhook = config.getEnv('endpoints.webhook');
this.endpointWebhookWaiting = config.get('endpoints.webhookWaiting') as string; this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
this.endpointWebhookTest = config.get('endpoints.webhookTest') as string; this.endpointWebhookTest = config.getEnv('endpoints.webhookTest');
this.defaultWorkflowName = config.get('workflows.defaultName') as string; this.defaultWorkflowName = config.getEnv('workflows.defaultName');
this.defaultCredentialsName = config.get('credentials.defaultName') as string; this.defaultCredentialsName = config.getEnv('credentials.defaultName');
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string; this.saveDataErrorExecution = config.getEnv('executions.saveDataOnError');
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string; this.saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess');
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean; this.saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
this.executionTimeout = config.get('executions.timeout') as number; this.executionTimeout = config.getEnv('executions.timeout');
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number; this.maxExecutionTimeout = config.getEnv('executions.maxTimeout');
this.payloadSizeMax = config.get('endpoints.payloadSizeMax') as number; this.payloadSizeMax = config.getEnv('endpoints.payloadSizeMax');
this.timezone = config.get('generic.timezone') as string; this.timezone = config.getEnv('generic.timezone');
this.restEndpoint = config.get('endpoints.rest') as string; this.restEndpoint = config.getEnv('endpoints.rest');
this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance(); this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
this.testWebhooks = TestWebhooks.getInstance(); this.testWebhooks = TestWebhooks.getInstance();
@ -260,22 +261,22 @@ class App {
this.activeExecutionsInstance = ActiveExecutions.getInstance(); this.activeExecutionsInstance = ActiveExecutions.getInstance();
this.waitTracker = WaitTracker(); this.waitTracker = WaitTracker();
this.protocol = config.get('protocol'); this.protocol = config.getEnv('protocol');
this.sslKey = config.get('ssl_key'); this.sslKey = config.getEnv('ssl_key');
this.sslCert = config.get('ssl_cert'); this.sslCert = config.getEnv('ssl_cert');
this.externalHooks = externalHooks; this.externalHooks = externalHooks;
this.presetCredentialsLoaded = false; this.presetCredentialsLoaded = false;
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string; this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl(); const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl();
const telemetrySettings: ITelemetrySettings = { const telemetrySettings: ITelemetrySettings = {
enabled: config.get('diagnostics.enabled') as boolean, enabled: config.getEnv('diagnostics.enabled'),
}; };
if (telemetrySettings.enabled) { if (telemetrySettings.enabled) {
const conf = config.get('diagnostics.config.frontend') as string; const conf = config.getEnv('diagnostics.config.frontend');
const [key, url] = conf.split(';'); const [key, url] = conf.split(';');
if (!key || !url) { if (!key || !url) {
@ -303,31 +304,31 @@ class App {
oauth2: `${urlBaseWebhook}${this.restEndpoint}/oauth2-credential/callback`, oauth2: `${urlBaseWebhook}${this.restEndpoint}/oauth2-credential/callback`,
}, },
versionNotifications: { versionNotifications: {
enabled: config.get('versionNotifications.enabled'), enabled: config.getEnv('versionNotifications.enabled'),
endpoint: config.get('versionNotifications.endpoint'), endpoint: config.getEnv('versionNotifications.endpoint'),
infoUrl: config.get('versionNotifications.infoUrl'), infoUrl: config.getEnv('versionNotifications.infoUrl'),
}, },
instanceId: '', instanceId: '',
telemetry: telemetrySettings, telemetry: telemetrySettings,
personalizationSurveyEnabled: personalizationSurveyEnabled:
config.get('personalization.enabled') && config.get('diagnostics.enabled'), config.getEnv('personalization.enabled') && config.getEnv('diagnostics.enabled'),
defaultLocale: config.get('defaultLocale'), defaultLocale: config.getEnv('defaultLocale'),
userManagement: { userManagement: {
enabled: enabled:
config.get('userManagement.disabled') === false || config.getEnv('userManagement.disabled') === false ||
config.get('userManagement.isInstanceOwnerSetUp') === true, config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
showSetupOnFirstLoad: showSetupOnFirstLoad:
config.get('userManagement.disabled') === false && config.getEnv('userManagement.disabled') === false &&
config.get('userManagement.isInstanceOwnerSetUp') === false && config.getEnv('userManagement.isInstanceOwnerSetUp') === false &&
config.get('userManagement.skipInstanceOwnerSetup') === false, config.getEnv('userManagement.skipInstanceOwnerSetup') === false,
smtpSetup: isEmailSetUp(), smtpSetup: isEmailSetUp(),
}, },
workflowTagsDisabled: config.get('workflowTagsDisabled'), workflowTagsDisabled: config.getEnv('workflowTagsDisabled'),
logLevel: config.get('logs.level'), logLevel: config.getEnv('logs.level'),
hiringBannerEnabled: config.get('hiringBanner.enabled'), hiringBannerEnabled: config.getEnv('hiringBanner.enabled'),
templates: { templates: {
enabled: config.get('templates.enabled'), enabled: config.getEnv('templates.enabled'),
host: config.get('templates.host'), host: config.getEnv('templates.host'),
}, },
}; };
} }
@ -349,23 +350,23 @@ class App {
// refresh user management status // refresh user management status
Object.assign(this.frontendSettings.userManagement, { Object.assign(this.frontendSettings.userManagement, {
enabled: enabled:
config.get('userManagement.disabled') === false || config.getEnv('userManagement.disabled') === false ||
config.get('userManagement.isInstanceOwnerSetUp') === true, config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
showSetupOnFirstLoad: showSetupOnFirstLoad:
config.get('userManagement.disabled') === false && config.getEnv('userManagement.disabled') === false &&
config.get('userManagement.isInstanceOwnerSetUp') === false && config.getEnv('userManagement.isInstanceOwnerSetUp') === false &&
config.get('userManagement.skipInstanceOwnerSetup') === false, config.getEnv('userManagement.skipInstanceOwnerSetup') === false,
}); });
return this.frontendSettings; return this.frontendSettings;
} }
async config(): Promise<void> { async config(): Promise<void> {
const enableMetrics = config.get('endpoints.metrics.enable') as boolean; const enableMetrics = config.getEnv('endpoints.metrics.enable');
let register: Registry; let register: Registry;
if (enableMetrics) { if (enableMetrics) {
const prefix = config.get('endpoints.metrics.prefix') as string; const prefix = config.getEnv('endpoints.metrics.prefix');
register = new promClient.Registry(); register = new promClient.Registry();
register.setDefaultLabels({ prefix }); register.setDefaultLabels({ prefix });
promClient.collectDefaultMetrics({ register }); promClient.collectDefaultMetrics({ register });
@ -378,7 +379,7 @@ class App {
await this.externalHooks.run('frontend.settings', [this.frontendSettings]); await this.externalHooks.run('frontend.settings', [this.frontendSettings]);
const excludeEndpoints = config.get('security.excludeEndpoints') as string; const excludeEndpoints = config.getEnv('security.excludeEndpoints');
const ignoredEndpoints = [ const ignoredEndpoints = [
'healthz', 'healthz',
@ -394,7 +395,7 @@ class App {
const authIgnoreRegex = new RegExp(`^\/(${_(ignoredEndpoints).compact().join('|')})\/?.*$`); const authIgnoreRegex = new RegExp(`^\/(${_(ignoredEndpoints).compact().join('|')})\/?.*$`);
// Check for basic auth credentials if activated // Check for basic auth credentials if activated
const basicAuthActive = config.get('security.basicAuth.active') as boolean; const basicAuthActive = config.getEnv('security.basicAuth.active');
if (basicAuthActive) { if (basicAuthActive) {
const basicAuthUser = (await GenericHelpers.getConfigValue( const basicAuthUser = (await GenericHelpers.getConfigValue(
'security.basicAuth.user', 'security.basicAuth.user',
@ -419,7 +420,10 @@ class App {
this.app.use( this.app.use(
async (req: express.Request, res: express.Response, next: express.NextFunction) => { async (req: express.Request, res: express.Response, next: express.NextFunction) => {
// Skip basic auth for a few listed endpoints or when instance owner has been setup // Skip basic auth for a few listed endpoints or when instance owner has been setup
if (authIgnoreRegex.exec(req.url) || config.get('userManagement.isInstanceOwnerSetUp')) { if (
authIgnoreRegex.exec(req.url) ||
config.getEnv('userManagement.isInstanceOwnerSetUp')
) {
return next(); return next();
} }
const realm = 'n8n - Editor UI'; const realm = 'n8n - Editor UI';
@ -465,7 +469,7 @@ class App {
} }
// Check for and validate JWT if configured // Check for and validate JWT if configured
const jwtAuthActive = config.get('security.jwtAuth.active') as boolean; const jwtAuthActive = config.getEnv('security.jwtAuth.active');
if (jwtAuthActive) { if (jwtAuthActive) {
const jwtAuthHeader = (await GenericHelpers.getConfigValue( const jwtAuthHeader = (await GenericHelpers.getConfigValue(
'security.jwtAuth.jwtHeader', 'security.jwtAuth.jwtHeader',
@ -750,7 +754,7 @@ class App {
const { tags: tagIds } = req.body; const { tags: tagIds } = req.body;
if (tagIds?.length && !config.get('workflowTagsDisabled')) { if (tagIds?.length && !config.getEnv('workflowTagsDisabled')) {
newWorkflow.tags = await Db.collections.Tag!.findByIds(tagIds, { newWorkflow.tags = await Db.collections.Tag!.findByIds(tagIds, {
select: ['id', 'name'], select: ['id', 'name'],
}); });
@ -784,7 +788,7 @@ class App {
throw new ResponseHelper.ResponseError('Failed to save workflow'); throw new ResponseHelper.ResponseError('Failed to save workflow');
} }
if (tagIds && !config.get('workflowTagsDisabled')) { if (tagIds && !config.getEnv('workflowTagsDisabled')) {
savedWorkflow.tags = TagHelpers.sortByRequestOrder(savedWorkflow.tags, { savedWorkflow.tags = TagHelpers.sortByRequestOrder(savedWorkflow.tags, {
requestOrder: tagIds, requestOrder: tagIds,
}); });
@ -868,7 +872,7 @@ class App {
relations: ['tags'], relations: ['tags'],
}; };
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
delete query.relations; delete query.relations;
} }
@ -928,7 +932,7 @@ class App {
let relations = ['workflow', 'workflow.tags']; let relations = ['workflow', 'workflow.tags'];
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
relations = relations.filter((relation) => relation !== 'workflow.tags'); relations = relations.filter((relation) => relation !== 'workflow.tags');
} }
@ -1038,8 +1042,8 @@ class App {
await Db.collections.Workflow!.update(workflowId, updateData); await Db.collections.Workflow!.update(workflowId, updateData);
if (tags && !config.get('workflowTagsDisabled')) { if (tags && !config.getEnv('workflowTagsDisabled')) {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await TagHelpers.removeRelations(workflowId, tablePrefix); await TagHelpers.removeRelations(workflowId, tablePrefix);
if (tags.length) { if (tags.length) {
@ -1051,7 +1055,7 @@ class App {
relations: ['tags'], relations: ['tags'],
}; };
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
delete options.relations; delete options.relations;
} }
@ -1233,11 +1237,11 @@ class App {
req: express.Request, req: express.Request,
res: express.Response, res: express.Response,
): Promise<TagEntity[] | ITagWithCountDb[]> => { ): Promise<TagEntity[] | ITagWithCountDb[]> => {
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
throw new ResponseHelper.ResponseError('Workflow tags are disabled'); throw new ResponseHelper.ResponseError('Workflow tags are disabled');
} }
if (req.query.withUsageCount === 'true') { if (req.query.withUsageCount === 'true') {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
return TagHelpers.getTagsWithCountDb(tablePrefix); return TagHelpers.getTagsWithCountDb(tablePrefix);
} }
@ -1251,7 +1255,7 @@ class App {
`/${this.restEndpoint}/tags`, `/${this.restEndpoint}/tags`,
ResponseHelper.send( ResponseHelper.send(
async (req: express.Request, res: express.Response): Promise<TagEntity | void> => { async (req: express.Request, res: express.Response): Promise<TagEntity | void> => {
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
throw new ResponseHelper.ResponseError('Workflow tags are disabled'); throw new ResponseHelper.ResponseError('Workflow tags are disabled');
} }
const newTag = new TagEntity(); const newTag = new TagEntity();
@ -1274,7 +1278,7 @@ class App {
`/${this.restEndpoint}/tags/:id`, `/${this.restEndpoint}/tags/:id`,
ResponseHelper.send( ResponseHelper.send(
async (req: express.Request, res: express.Response): Promise<TagEntity | void> => { async (req: express.Request, res: express.Response): Promise<TagEntity | void> => {
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
throw new ResponseHelper.ResponseError('Workflow tags are disabled'); throw new ResponseHelper.ResponseError('Workflow tags are disabled');
} }
@ -1303,11 +1307,11 @@ class App {
`/${this.restEndpoint}/tags/:id`, `/${this.restEndpoint}/tags/:id`,
ResponseHelper.send( ResponseHelper.send(
async (req: TagsRequest.Delete, res: express.Response): Promise<boolean> => { async (req: TagsRequest.Delete, res: express.Response): Promise<boolean> => {
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
throw new ResponseHelper.ResponseError('Workflow tags are disabled'); throw new ResponseHelper.ResponseError('Workflow tags are disabled');
} }
if ( if (
config.get('userManagement.isInstanceOwnerSetUp') === true && config.getEnv('userManagement.isInstanceOwnerSetUp') === true &&
req.user.globalRole.name !== 'owner' req.user.globalRole.name !== 'owner'
) { ) {
throw new ResponseHelper.ResponseError( throw new ResponseHelper.ResponseError(
@ -2232,7 +2236,7 @@ class App {
const executingWorkflowIds: string[] = []; const executingWorkflowIds: string[] = [];
if (config.get('executions.mode') === 'queue') { if (config.getEnv('executions.mode') === 'queue') {
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']); const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
executingWorkflowIds.push(...currentJobs.map(({ data }) => data.executionId)); executingWorkflowIds.push(...currentJobs.map(({ data }) => data.executionId));
} }
@ -2595,7 +2599,7 @@ class App {
`/${this.restEndpoint}/executions-current`, `/${this.restEndpoint}/executions-current`,
ResponseHelper.send( ResponseHelper.send(
async (req: ExecutionRequest.GetAllCurrent): Promise<IExecutionsSummary[]> => { async (req: ExecutionRequest.GetAllCurrent): Promise<IExecutionsSummary[]> => {
if (config.get('executions.mode') === 'queue') { if (config.getEnv('executions.mode') === 'queue') {
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']); const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
const currentlyRunningQueueIds = currentJobs.map((job) => job.data.executionId); const currentlyRunningQueueIds = currentJobs.map((job) => job.data.executionId);
@ -2704,7 +2708,7 @@ class App {
throw new ResponseHelper.ResponseError('Execution not found', undefined, 404); throw new ResponseHelper.ResponseError('Execution not found', undefined, 404);
} }
if (config.get('executions.mode') === 'queue') { if (config.getEnv('executions.mode') === 'queue') {
// Manual executions should still be stoppable, so // Manual executions should still be stoppable, so
// try notifying the `activeExecutions` to stop it. // try notifying the `activeExecutions` to stop it.
const result = await this.activeExecutionsInstance.stopExecution(req.params.id); const result = await this.activeExecutionsInstance.stopExecution(req.params.id);
@ -2831,7 +2835,7 @@ class App {
// Webhooks // Webhooks
// ---------------------------------------- // ----------------------------------------
if (config.get('endpoints.disableProductionWebhooksOnMainProcess') !== true) { if (!config.getEnv('endpoints.disableProductionWebhooksOnMainProcess')) {
WebhookServer.registerProductionWebhooks.apply(this); WebhookServer.registerProductionWebhooks.apply(this);
} }
@ -2926,11 +2930,11 @@ class App {
); );
} }
if (config.get('endpoints.disableUi') !== true) { if (!config.getEnv('endpoints.disableUi')) {
// Read the index file and replace the path placeholder // Read the index file and replace the path placeholder
const editorUiPath = require.resolve('n8n-editor-ui'); const editorUiPath = require.resolve('n8n-editor-ui');
const filePath = pathJoin(pathDirname(editorUiPath), 'dist', 'index.html'); const filePath = pathJoin(pathDirname(editorUiPath), 'dist', 'index.html');
const n8nPath = config.get('path'); const n8nPath = config.getEnv('path');
let readIndexFile = readFileSync(filePath, 'utf8'); let readIndexFile = readFileSync(filePath, 'utf8');
readIndexFile = readIndexFile.replace(/\/%BASE_PATH%\//g, n8nPath); readIndexFile = readIndexFile.replace(/\/%BASE_PATH%\//g, n8nPath);
@ -2962,8 +2966,8 @@ class App {
} }
export async function start(): Promise<void> { export async function start(): Promise<void> {
const PORT = config.get('port'); const PORT = config.getEnv('port');
const ADDRESS = config.get('listen_address'); const ADDRESS = config.getEnv('listen_address');
const app = new App(); const app = new App();
@ -2987,7 +2991,7 @@ export async function start(): Promise<void> {
console.log(`n8n ready on ${ADDRESS}, port ${PORT}`); console.log(`n8n ready on ${ADDRESS}, port ${PORT}`);
console.log(`Version: ${versions.cli}`); console.log(`Version: ${versions.cli}`);
const defaultLocale = config.get('defaultLocale'); const defaultLocale = config.getEnv('defaultLocale');
if (defaultLocale !== 'en') { if (defaultLocale !== 'en') {
console.log(`Locale: ${defaultLocale}`); console.log(`Locale: ${defaultLocale}`);
@ -2995,13 +2999,14 @@ export async function start(): Promise<void> {
await app.externalHooks.run('n8n.ready', [app]); await app.externalHooks.run('n8n.ready', [app]);
const cpus = os.cpus(); const cpus = os.cpus();
const binarDataConfig = config.get('binaryDataManager') as IBinaryDataConfig; const binarDataConfig = config.getEnv('binaryDataManager');
const diagnosticInfo: IDiagnosticInfo = { const diagnosticInfo: IDiagnosticInfo = {
basicAuthActive: config.get('security.basicAuth.active') as boolean, basicAuthActive: config.getEnv('security.basicAuth.active'),
databaseType: (await GenericHelpers.getConfigValue('database.type')) as DatabaseType, databaseType: (await GenericHelpers.getConfigValue('database.type')) as DatabaseType,
disableProductionWebhooksOnMainProcess: disableProductionWebhooksOnMainProcess: config.getEnv(
config.get('endpoints.disableProductionWebhooksOnMainProcess') === true, 'endpoints.disableProductionWebhooksOnMainProcess',
notificationsEnabled: config.get('versionNotifications.enabled') === true, ),
notificationsEnabled: config.getEnv('versionNotifications.enabled'),
versionCli: versions.cli, versionCli: versions.cli,
systemInfo: { systemInfo: {
os: { os: {
@ -3016,24 +3021,26 @@ export async function start(): Promise<void> {
}, },
}, },
executionVariables: { executionVariables: {
executions_process: config.get('executions.process'), executions_process: config.getEnv('executions.process'),
executions_mode: config.get('executions.mode'), executions_mode: config.getEnv('executions.mode'),
executions_timeout: config.get('executions.timeout'), executions_timeout: config.getEnv('executions.timeout'),
executions_timeout_max: config.get('executions.maxTimeout'), executions_timeout_max: config.getEnv('executions.maxTimeout'),
executions_data_save_on_error: config.get('executions.saveDataOnError'), executions_data_save_on_error: config.getEnv('executions.saveDataOnError'),
executions_data_save_on_success: config.get('executions.saveDataOnSuccess'), executions_data_save_on_success: config.getEnv('executions.saveDataOnSuccess'),
executions_data_save_on_progress: config.get('executions.saveExecutionProgress'), executions_data_save_on_progress: config.getEnv('executions.saveExecutionProgress'),
executions_data_save_manual_executions: config.get('executions.saveDataManualExecutions'), executions_data_save_manual_executions: config.getEnv(
executions_data_prune: config.get('executions.pruneData'), 'executions.saveDataManualExecutions',
executions_data_max_age: config.get('executions.pruneDataMaxAge'), ),
executions_data_prune_timeout: config.get('executions.pruneDataTimeout'), executions_data_prune: config.getEnv('executions.pruneData'),
executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'),
executions_data_prune_timeout: config.getEnv('executions.pruneDataTimeout'),
}, },
deploymentType: config.get('deployment.type'), deploymentType: config.getEnv('deployment.type'),
binaryDataMode: binarDataConfig.mode, binaryDataMode: binarDataConfig.mode,
n8n_multi_user_allowed: n8n_multi_user_allowed:
config.get('userManagement.disabled') === false || config.getEnv('userManagement.disabled') === false ||
config.get('userManagement.isInstanceOwnerSetUp') === true, config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
smtp_set_up: config.get('userManagement.emails.mode') === 'smtp', smtp_set_up: config.getEnv('userManagement.emails.mode') === 'smtp',
}; };
void Db.collections void Db.collections

View file

@ -24,10 +24,10 @@ export async function getWorkflowOwner(workflowId: string | number): Promise<Use
} }
export function isEmailSetUp(): boolean { export function isEmailSetUp(): boolean {
const smtp = config.get('userManagement.emails.mode') === 'smtp'; const smtp = config.getEnv('userManagement.emails.mode') === 'smtp';
const host = !!config.get('userManagement.emails.smtp.host'); const host = !!config.getEnv('userManagement.emails.smtp.host');
const user = !!config.get('userManagement.emails.smtp.auth.user'); const user = !!config.getEnv('userManagement.emails.smtp.auth.user');
const pass = !!config.get('userManagement.emails.smtp.auth.pass'); const pass = !!config.getEnv('userManagement.emails.smtp.auth.pass');
return smtp && host && user && pass; return smtp && host && user && pass;
} }
@ -58,7 +58,7 @@ export async function getInstanceOwner(): Promise<User> {
* Return the n8n instance base URL without trailing slash. * Return the n8n instance base URL without trailing slash.
*/ */
export function getInstanceBaseUrl(): string { export function getInstanceBaseUrl(): string {
const n8nBaseUrl = config.get('editorBaseUrl') || getWebhookBaseUrl(); const n8nBaseUrl = config.getEnv('editorBaseUrl') || getWebhookBaseUrl();
return n8nBaseUrl.endsWith('/') ? n8nBaseUrl.slice(0, n8nBaseUrl.length - 1) : n8nBaseUrl; return n8nBaseUrl.endsWith('/') ? n8nBaseUrl.slice(0, n8nBaseUrl.length - 1) : n8nBaseUrl;
} }

View file

@ -26,7 +26,7 @@ export function issueJWT(user: User): JwtToken {
.digest('hex'); .digest('hex');
} }
const signedToken = jwt.sign(payload, config.get('userManagement.jwtSecret'), { const signedToken = jwt.sign(payload, config.getEnv('userManagement.jwtSecret'), {
expiresIn: expiresIn / 1000 /* in seconds */, expiresIn: expiresIn / 1000 /* in seconds */,
}); });
@ -57,7 +57,7 @@ export async function resolveJwtContent(jwtPayload: JwtPayload): Promise<User> {
} }
export async function resolveJwt(token: string): Promise<User> { export async function resolveJwt(token: string): Promise<User> {
const jwtPayload = jwt.verify(token, config.get('userManagement.jwtSecret')) as JwtPayload; const jwtPayload = jwt.verify(token, config.getEnv('userManagement.jwtSecret')) as JwtPayload;
return resolveJwtContent(jwtPayload); return resolveJwtContent(jwtPayload);
} }

View file

@ -9,20 +9,20 @@ export class NodeMailer implements UserManagementMailerImplementation {
constructor() { constructor() {
this.transport = createTransport({ this.transport = createTransport({
host: config.get('userManagement.emails.smtp.host'), host: config.getEnv('userManagement.emails.smtp.host'),
port: config.get('userManagement.emails.smtp.port'), port: config.getEnv('userManagement.emails.smtp.port'),
secure: config.get('userManagement.emails.smtp.secure'), secure: config.getEnv('userManagement.emails.smtp.secure'),
auth: { auth: {
user: config.get('userManagement.emails.smtp.auth.user'), user: config.getEnv('userManagement.emails.smtp.auth.user'),
pass: config.get('userManagement.emails.smtp.auth.pass'), pass: config.getEnv('userManagement.emails.smtp.auth.pass'),
}, },
}); });
} }
async verifyConnection(): Promise<void> { async verifyConnection(): Promise<void> {
const host = config.get('userManagement.emails.smtp.host') as string; const host = config.getEnv('userManagement.emails.smtp.host');
const user = config.get('userManagement.emails.smtp.auth.user') as string; const user = config.getEnv('userManagement.emails.smtp.auth.user');
const pass = config.get('userManagement.emails.smtp.auth.pass') as string; const pass = config.getEnv('userManagement.emails.smtp.auth.pass');
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
this.transport.verify((error: Error) => { this.transport.verify((error: Error) => {
@ -43,8 +43,8 @@ export class NodeMailer implements UserManagementMailerImplementation {
} }
async sendMail(mailData: MailData): Promise<SendEmailResult> { async sendMail(mailData: MailData): Promise<SendEmailResult> {
let sender = config.get('userManagement.emails.smtp.sender'); let sender = config.getEnv('userManagement.emails.smtp.sender');
const user = config.get('userManagement.emails.smtp.auth.user') as string; const user = config.getEnv('userManagement.emails.smtp.auth.user');
if (!sender && user.includes('@')) { if (!sender && user.includes('@')) {
sender = user; sender = user;

View file

@ -45,7 +45,7 @@ export class UserManagementMailer {
constructor() { constructor() {
// Other implementations can be used in the future. // Other implementations can be used in the future.
if (config.get('userManagement.emails.mode') === 'smtp') { if (config.getEnv('userManagement.emails.mode') === 'smtp') {
this.mailer = new NodeMailer(); this.mailer = new NodeMailer();
} }
} }

View file

@ -30,7 +30,7 @@ export function addRoutes(this: N8nApp, ignoredEndpoints: string[], restEndpoint
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
return (req.cookies?.[AUTH_COOKIE_NAME] as string | undefined) ?? null; return (req.cookies?.[AUTH_COOKIE_NAME] as string | undefined) ?? null;
}, },
secretOrKey: config.get('userManagement.jwtSecret') as string, secretOrKey: config.getEnv('userManagement.jwtSecret'),
}; };
passport.use( passport.use(

View file

@ -23,7 +23,7 @@ export function ownerNamespace(this: N8nApp): void {
const { email, firstName, lastName, password } = req.body; const { email, firstName, lastName, password } = req.body;
const { id: userId } = req.user; const { id: userId } = req.user;
if (config.get('userManagement.isInstanceOwnerSetUp')) { if (config.getEnv('userManagement.isInstanceOwnerSetUp')) {
Logger.debug( Logger.debug(
'Request to claim instance ownership failed because instance owner already exists', 'Request to claim instance ownership failed because instance owner already exists',
{ {

View file

@ -25,7 +25,7 @@ export function passwordResetNamespace(this: N8nApp): void {
this.app.post( this.app.post(
`/${this.restEndpoint}/forgot-password`, `/${this.restEndpoint}/forgot-password`,
ResponseHelper.send(async (req: PasswordResetRequest.Email) => { ResponseHelper.send(async (req: PasswordResetRequest.Email) => {
if (config.get('userManagement.emails.mode') === '') { if (config.getEnv('userManagement.emails.mode') === '') {
Logger.debug('Request to send password reset email failed because emailing was not set up'); Logger.debug('Request to send password reset email failed because emailing was not set up');
throw new ResponseHelper.ResponseError( throw new ResponseHelper.ResponseError(
'Email sending must be set up in order to request a password reset email', 'Email sending must be set up in order to request a password reset email',

View file

@ -31,7 +31,7 @@ export function usersNamespace(this: N8nApp): void {
this.app.post( this.app.post(
`/${this.restEndpoint}/users`, `/${this.restEndpoint}/users`,
ResponseHelper.send(async (req: UserRequest.Invite) => { ResponseHelper.send(async (req: UserRequest.Invite) => {
if (config.get('userManagement.emails.mode') === '') { if (config.getEnv('userManagement.emails.mode') === '') {
Logger.debug( Logger.debug(
'Request to send email invite(s) to user(s) failed because emailing was not set up', 'Request to send email invite(s) to user(s) failed because emailing was not set up',
); );
@ -56,14 +56,14 @@ export function usersNamespace(this: N8nApp): void {
} }
// TODO: this should be checked in the middleware rather than here // TODO: this should be checked in the middleware rather than here
if (config.get('userManagement.disabled')) { if (config.getEnv('userManagement.disabled')) {
Logger.debug( Logger.debug(
'Request to send email invite(s) to user(s) failed because user management is disabled', 'Request to send email invite(s) to user(s) failed because user management is disabled',
); );
throw new ResponseHelper.ResponseError('User management is disabled'); throw new ResponseHelper.ResponseError('User management is disabled');
} }
if (!config.get('userManagement.isInstanceOwnerSetUp')) { if (!config.getEnv('userManagement.isInstanceOwnerSetUp')) {
Logger.debug( Logger.debug(
'Request to send email invite(s) to user(s) failed because the owner account is not set up', 'Request to send email invite(s) to user(s) failed because the owner account is not set up',
); );

View file

@ -193,28 +193,28 @@ class App {
constructor() { constructor() {
this.app = express(); this.app = express();
this.endpointWebhook = config.get('endpoints.webhook') as string; this.endpointWebhook = config.getEnv('endpoints.webhook');
this.endpointWebhookWaiting = config.get('endpoints.webhookWaiting') as string; this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string; this.saveDataErrorExecution = config.getEnv('executions.saveDataOnError');
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string; this.saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess');
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean; this.saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
this.executionTimeout = config.get('executions.timeout') as number; this.executionTimeout = config.getEnv('executions.timeout');
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number; this.maxExecutionTimeout = config.getEnv('executions.maxTimeout');
this.timezone = config.get('generic.timezone') as string; this.timezone = config.getEnv('generic.timezone');
this.restEndpoint = config.get('endpoints.rest') as string; this.restEndpoint = config.getEnv('endpoints.rest');
this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance(); this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
this.activeExecutionsInstance = ActiveExecutions.getInstance(); this.activeExecutionsInstance = ActiveExecutions.getInstance();
this.protocol = config.get('protocol'); this.protocol = config.getEnv('protocol');
this.sslKey = config.get('ssl_key'); this.sslKey = config.getEnv('ssl_key');
this.sslCert = config.get('ssl_cert'); this.sslCert = config.getEnv('ssl_cert');
this.externalHooks = ExternalHooks(); this.externalHooks = ExternalHooks();
this.presetCredentialsLoaded = false; this.presetCredentialsLoaded = false;
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string; this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
} }
/** /**
@ -342,8 +342,8 @@ class App {
} }
export async function start(): Promise<void> { export async function start(): Promise<void> {
const PORT = config.get('port'); const PORT = config.getEnv('port');
const ADDRESS = config.get('listen_address'); const ADDRESS = config.getEnv('listen_address');
const app = new App(); const app = new App();

View file

@ -67,7 +67,7 @@ import {
} from './UserManagement/UserManagementHelper'; } from './UserManagement/UserManagementHelper';
import { whereClause } from './WorkflowHelpers'; import { whereClause } from './WorkflowHelpers';
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string; const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
/** /**
* Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects * Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects
@ -171,8 +171,8 @@ function pruneExecutionData(this: WorkflowHooks): void {
Logger.verbose('Pruning execution data from database'); Logger.verbose('Pruning execution data from database');
throttling = true; throttling = true;
const timeout = config.get('executions.pruneDataTimeout') as number; // in seconds const timeout = config.getEnv('executions.pruneDataTimeout'); // in seconds
const maxAge = config.get('executions.pruneDataMaxAge') as number; // in h const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h
const date = new Date(); // today const date = new Date(); // today
date.setHours(date.getHours() - maxAge); date.setHours(date.getHours() - maxAge);
@ -357,11 +357,11 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
} }
if ( if (
this.workflowData.settings.saveExecutionProgress !== true && this.workflowData.settings.saveExecutionProgress !== true &&
!config.get('executions.saveExecutionProgress') !config.getEnv('executions.saveExecutionProgress')
) { ) {
return; return;
} }
} else if (!config.get('executions.saveExecutionProgress')) { } else if (!config.getEnv('executions.saveExecutionProgress')) {
return; return;
} }
@ -466,7 +466,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
}); });
// Prune old execution data // Prune old execution data
if (config.get('executions.pruneData')) { if (config.getEnv('executions.pruneData')) {
pruneExecutionData.call(this); pruneExecutionData.call(this);
} }
@ -492,7 +492,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
} }
} }
let saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean; let saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
if ( if (
this.workflowData.settings !== undefined && this.workflowData.settings !== undefined &&
this.workflowData.settings.saveManualExecutions !== undefined this.workflowData.settings.saveManualExecutions !== undefined
@ -512,8 +512,8 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
} }
// Check config to know if execution should be saved or not // Check config to know if execution should be saved or not
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string; let saveDataErrorExecution = config.getEnv('executions.saveDataOnError') as string;
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string; let saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess') as string;
if (this.workflowData.settings !== undefined) { if (this.workflowData.settings !== undefined) {
saveDataErrorExecution = saveDataErrorExecution =
(this.workflowData.settings.saveDataErrorExecution as string) || (this.workflowData.settings.saveDataErrorExecution as string) ||
@ -800,7 +800,7 @@ export async function getWorkflowData(
const user = await getUserById(userId); const user = await getUserById(userId);
let relations = ['workflow', 'workflow.tags']; let relations = ['workflow', 'workflow.tags'];
if (config.get('workflowTagsDisabled')) { if (config.getEnv('workflowTagsDisabled')) {
relations = relations.filter((relation) => relation !== 'workflow.tags'); relations = relations.filter((relation) => relation !== 'workflow.tags');
} }
@ -1028,10 +1028,10 @@ export async function getBase(
): Promise<IWorkflowExecuteAdditionalData> { ): Promise<IWorkflowExecuteAdditionalData> {
const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl(); const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl();
const timezone = config.get('generic.timezone') as string; const timezone = config.getEnv('generic.timezone');
const webhookBaseUrl = urlBaseWebhook + config.get('endpoints.webhook'); const webhookBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhook');
const webhookWaitingBaseUrl = urlBaseWebhook + config.get('endpoints.webhookWaiting'); const webhookWaitingBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhookWaiting');
const webhookTestBaseUrl = urlBaseWebhook + config.get('endpoints.webhookTest'); const webhookTestBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhookTest');
const encryptionKey = await UserSettings.getEncryptionKey(); const encryptionKey = await UserSettings.getEncryptionKey();
if (encryptionKey === undefined) { if (encryptionKey === undefined) {
@ -1042,7 +1042,7 @@ export async function getBase(
credentialsHelper: new CredentialsHelper(encryptionKey), credentialsHelper: new CredentialsHelper(encryptionKey),
encryptionKey, encryptionKey,
executeWorkflow, executeWorkflow,
restApiUrl: urlBaseWebhook + config.get('endpoints.rest'), restApiUrl: urlBaseWebhook + config.getEnv('endpoints.rest'),
timezone, timezone,
webhookBaseUrl, webhookBaseUrl,
webhookWaitingBaseUrl, webhookWaitingBaseUrl,

View file

@ -39,7 +39,7 @@ import { WorkflowEntity } from './databases/entities/WorkflowEntity';
import { User } from './databases/entities/User'; import { User } from './databases/entities/User';
import { getWorkflowOwner } from './UserManagement/UserManagementHelper'; import { getWorkflowOwner } from './UserManagement/UserManagementHelper';
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string; const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
/** /**
* Returns the data of the last executed node * Returns the data of the last executed node

View file

@ -73,7 +73,7 @@ export class WorkflowRunner {
this.activeExecutions = ActiveExecutions.getInstance(); this.activeExecutions = ActiveExecutions.getInstance();
this.credentialsOverwrites = CredentialsOverwrites().getAll(); this.credentialsOverwrites = CredentialsOverwrites().getAll();
const executionsMode = config.get('executions.mode') as string; const executionsMode = config.getEnv('executions.mode');
if (executionsMode === 'queue') { if (executionsMode === 'queue') {
this.jobQueue = Queue.getInstance().getBullObjectInstance(); this.jobQueue = Queue.getInstance().getBullObjectInstance();
@ -150,8 +150,8 @@ export class WorkflowRunner {
executionId?: string, executionId?: string,
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>, responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
): Promise<string> { ): Promise<string> {
const executionsProcess = config.get('executions.process') as string; const executionsProcess = config.getEnv('executions.process');
const executionsMode = config.get('executions.mode') as string; const executionsMode = config.getEnv('executions.mode');
if (executionsMode === 'queue' && data.executionMode !== 'manual') { if (executionsMode === 'queue' && data.executionMode !== 'manual') {
// Do not run "manual" executions in bull because sending events to the // Do not run "manual" executions in bull because sending events to the
@ -229,13 +229,13 @@ export class WorkflowRunner {
// Changes were made by adding the `workflowTimeout` to the `additionalData` // Changes were made by adding the `workflowTimeout` to the `additionalData`
// So that the timeout will also work for executions with nested workflows. // So that the timeout will also work for executions with nested workflows.
let executionTimeout: NodeJS.Timeout; let executionTimeout: NodeJS.Timeout;
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) { if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting
} }
if (workflowTimeout > 0) { if (workflowTimeout > 0) {
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number); workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
} }
const workflow = new Workflow({ const workflow = new Workflow({
@ -326,8 +326,7 @@ export class WorkflowRunner {
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution); this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
if (workflowTimeout > 0) { if (workflowTimeout > 0) {
const timeout = const timeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds
Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
executionTimeout = setTimeout(() => { executionTimeout = setTimeout(() => {
this.activeExecutions.stopExecution(executionId, 'timeout'); this.activeExecutions.stopExecution(executionId, 'timeout');
}, timeout); }, timeout);
@ -450,7 +449,7 @@ export class WorkflowRunner {
const jobData: Promise<IBullJobResponse> = job.finished(); const jobData: Promise<IBullJobResponse> = job.finished();
const queueRecoveryInterval = config.get('queue.bull.queueRecoveryInterval') as number; const queueRecoveryInterval = config.getEnv('queue.bull.queueRecoveryInterval');
const racingPromises: Array<Promise<IBullJobResponse | object>> = [jobData]; const racingPromises: Array<Promise<IBullJobResponse | object>> = [jobData];
@ -533,8 +532,8 @@ export class WorkflowRunner {
try { try {
// Check if this execution data has to be removed from database // Check if this execution data has to be removed from database
// based on workflow settings. // based on workflow settings.
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string; let saveDataErrorExecution = config.getEnv('executions.saveDataOnError') as string;
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string; let saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess') as string;
if (data.workflowData.settings !== undefined) { if (data.workflowData.settings !== undefined) {
saveDataErrorExecution = saveDataErrorExecution =
(data.workflowData.settings.saveDataErrorExecution as string) || (data.workflowData.settings.saveDataErrorExecution as string) ||
@ -643,7 +642,7 @@ export class WorkflowRunner {
// Start timeout for the execution // Start timeout for the execution
let executionTimeout: NodeJS.Timeout; let executionTimeout: NodeJS.Timeout;
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) { if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting
} }
@ -654,8 +653,7 @@ export class WorkflowRunner {
}; };
if (workflowTimeout > 0) { if (workflowTimeout > 0) {
workflowTimeout = workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds
Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
// Start timeout already now but give process at least 5 seconds to start. // Start timeout already now but give process at least 5 seconds to start.
// Without it could would it be possible that the workflow executions times out before it even got started if // Without it could would it be possible that the workflow executions times out before it even got started if
// the timeout time is very short as the process start time can be quite long. // the timeout time is very short as the process start time can be quite long.

View file

@ -5,13 +5,7 @@
/* eslint-disable @typescript-eslint/no-non-null-assertion */ /* eslint-disable @typescript-eslint/no-non-null-assertion */
/* eslint-disable @typescript-eslint/no-use-before-define */ /* eslint-disable @typescript-eslint/no-use-before-define */
/* eslint-disable @typescript-eslint/unbound-method */ /* eslint-disable @typescript-eslint/unbound-method */
import { import { BinaryDataManager, IProcessMessage, UserSettings, WorkflowExecute } from 'n8n-core';
BinaryDataManager,
IBinaryDataConfig,
IProcessMessage,
UserSettings,
WorkflowExecute,
} from 'n8n-core';
import { import {
ExecutionError, ExecutionError,
@ -176,7 +170,7 @@ export class WorkflowRunnerProcess {
const { cli } = await GenericHelpers.getVersions(); const { cli } = await GenericHelpers.getVersions();
InternalHooksManager.init(instanceId, cli, nodeTypes); InternalHooksManager.init(instanceId, cli, nodeTypes);
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig; const binaryDataConfig = config.getEnv('binaryDataManager');
await BinaryDataManager.init(binaryDataConfig); await BinaryDataManager.init(binaryDataConfig);
// Credentials should now be loaded from database. // Credentials should now be loaded from database.
@ -204,27 +198,27 @@ export class WorkflowRunnerProcess {
} else if ( } else if (
inputData.workflowData.settings !== undefined && inputData.workflowData.settings !== undefined &&
inputData.workflowData.settings.saveExecutionProgress !== false && inputData.workflowData.settings.saveExecutionProgress !== false &&
(config.get('executions.saveExecutionProgress') as boolean) config.getEnv('executions.saveExecutionProgress')
) { ) {
// Workflow settings not saying anything about saving but default settings says so // Workflow settings not saying anything about saving but default settings says so
await Db.init(); await Db.init();
} else if ( } else if (
inputData.workflowData.settings === undefined && inputData.workflowData.settings === undefined &&
(config.get('executions.saveExecutionProgress') as boolean) config.getEnv('executions.saveExecutionProgress')
) { ) {
// Workflow settings not saying anything about saving but default settings says so // Workflow settings not saying anything about saving but default settings says so
await Db.init(); await Db.init();
} }
// Start timeout for the execution // Start timeout for the execution
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain // eslint-disable-next-line @typescript-eslint/prefer-optional-chain
if (this.data.workflowData.settings && this.data.workflowData.settings.executionTimeout) { if (this.data.workflowData.settings && this.data.workflowData.settings.executionTimeout) {
workflowTimeout = this.data.workflowData.settings.executionTimeout as number; // preference on workflow setting workflowTimeout = this.data.workflowData.settings.executionTimeout as number; // preference on workflow setting
} }
if (workflowTimeout > 0) { if (workflowTimeout > 0) {
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number); workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
} }
this.workflow = new Workflow({ this.workflow = new Workflow({

View file

@ -99,7 +99,7 @@ credentialsController.get(
const { name: newName } = req.query; const { name: newName } = req.query;
return GenericHelpers.generateUniqueName( return GenericHelpers.generateUniqueName(
newName ?? config.get('credentials.defaultName'), newName ?? config.getEnv('credentials.defaultName'),
'credentials', 'credentials',
); );
}), }),

View file

@ -19,7 +19,7 @@ import { DatabaseType, ICredentialsDb } from '../..';
import { SharedCredentials } from './SharedCredentials'; import { SharedCredentials } from './SharedCredentials';
function resolveDataType(dataType: string) { function resolveDataType(dataType: string) {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = { const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
sqlite: { sqlite: {
@ -37,7 +37,7 @@ function resolveDataType(dataType: string) {
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function getTimestampSyntax() { function getTimestampSyntax() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const map: { [key in DatabaseType]: string } = { const map: { [key in DatabaseType]: string } = {
sqlite: `STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')`, sqlite: `STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')`,

View file

@ -6,7 +6,7 @@ import config = require('../../../config');
import { DatabaseType, IExecutionFlattedDb, IWorkflowDb } from '../..'; import { DatabaseType, IExecutionFlattedDb, IWorkflowDb } from '../..';
function resolveDataType(dataType: string) { function resolveDataType(dataType: string) {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = { const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
sqlite: { sqlite: {

View file

@ -21,7 +21,7 @@ type RoleScopes = 'global' | 'workflow' | 'credential';
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function getTimestampSyntax() { function getTimestampSyntax() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const map: { [key in DatabaseType]: string } = { const map: { [key in DatabaseType]: string } = {
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')", sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",

View file

@ -17,7 +17,7 @@ import { Role } from './Role';
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function getTimestampSyntax() { function getTimestampSyntax() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const map: { [key in DatabaseType]: string } = { const map: { [key in DatabaseType]: string } = {
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')", sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",

View file

@ -17,7 +17,7 @@ import { Role } from './Role';
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function getTimestampSyntax() { function getTimestampSyntax() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const map: { [key in DatabaseType]: string } = { const map: { [key in DatabaseType]: string } = {
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')", sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",

View file

@ -21,7 +21,7 @@ import { WorkflowEntity } from './WorkflowEntity';
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function getTimestampSyntax() { function getTimestampSyntax() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const map: { [key in DatabaseType]: string } = { const map: { [key in DatabaseType]: string } = {
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')", sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",

View file

@ -27,7 +27,7 @@ export const MIN_PASSWORD_LENGTH = 8;
export const MAX_PASSWORD_LENGTH = 64; export const MAX_PASSWORD_LENGTH = 64;
function resolveDataType(dataType: string) { function resolveDataType(dataType: string) {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = { const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
sqlite: { sqlite: {
@ -45,7 +45,7 @@ function resolveDataType(dataType: string) {
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function getTimestampSyntax() { function getTimestampSyntax() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const map: { [key in DatabaseType]: string } = { const map: { [key in DatabaseType]: string } = {
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')", sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",

View file

@ -24,7 +24,7 @@ import { TagEntity } from './TagEntity';
import { SharedWorkflow } from './SharedWorkflow'; import { SharedWorkflow } from './SharedWorkflow';
function resolveDataType(dataType: string) { function resolveDataType(dataType: string) {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = { const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
sqlite: { sqlite: {
@ -42,7 +42,7 @@ function resolveDataType(dataType: string) {
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
function getTimestampSyntax() { function getTimestampSyntax() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
const map: { [key in DatabaseType]: string } = { const map: { [key in DatabaseType]: string } = {
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')", sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",

View file

@ -6,7 +6,7 @@ export class InitialMigration1588157391238 implements MigrationInterface {
name = 'InitialMigration1588157391238'; name = 'InitialMigration1588157391238';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'credentials_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `data` text NOT NULL, `type` varchar(32) NOT NULL, `nodesAccess` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, INDEX `IDX_' + tablePrefix + '07fde106c0b471d8cc80a64fc8` (`type`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined); await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'credentials_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `data` text NOT NULL, `type` varchar(32) NOT NULL, `nodesAccess` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, INDEX `IDX_' + tablePrefix + '07fde106c0b471d8cc80a64fc8` (`type`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'execution_entity` (`id` int NOT NULL AUTO_INCREMENT, `data` text NOT NULL, `finished` tinyint NOT NULL, `mode` varchar(255) NOT NULL, `retryOf` varchar(255) NULL, `retrySuccessId` varchar(255) NULL, `startedAt` datetime NOT NULL, `stoppedAt` datetime NOT NULL, `workflowData` json NOT NULL, `workflowId` varchar(255) NULL, INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` (`workflowId`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined); await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'execution_entity` (`id` int NOT NULL AUTO_INCREMENT, `data` text NOT NULL, `finished` tinyint NOT NULL, `mode` varchar(255) NOT NULL, `retryOf` varchar(255) NULL, `retrySuccessId` varchar(255) NULL, `startedAt` datetime NOT NULL, `stoppedAt` datetime NOT NULL, `workflowData` json NOT NULL, `workflowId` varchar(255) NULL, INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` (`workflowId`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
@ -14,7 +14,7 @@ export class InitialMigration1588157391238 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('DROP TABLE `' + tablePrefix + 'workflow_entity`', undefined); await queryRunner.query('DROP TABLE `' + tablePrefix + 'workflow_entity`', undefined);
await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`', undefined); await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`', undefined);

View file

@ -9,13 +9,13 @@ export class WebhookModel1592447867632 implements MigrationInterface {
name = 'WebhookModel1592447867632'; name = 'WebhookModel1592447867632';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`); await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`);
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`); await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
} }
} }

View file

@ -6,13 +6,13 @@ export class CreateIndexStoppedAt1594902918301 implements MigrationInterface {
name = 'CreateIndexStoppedAt1594902918301'; name = 'CreateIndexStoppedAt1594902918301';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity` (`stoppedAt`)'); await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity` (`stoppedAt`)');
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity`'); await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity`');
} }

View file

@ -5,12 +5,12 @@ import * as config from '../../../../config';
export class MakeStoppedAtNullable1607431743767 implements MigrationInterface { export class MakeStoppedAtNullable1607431743767 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime', undefined); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime', undefined);
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime NOT NULL', undefined); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime NOT NULL', undefined);
} }

View file

@ -5,7 +5,7 @@ export class AddWebhookId1611149998770 implements MigrationInterface {
name = 'AddWebhookId1611149998770'; name = 'AddWebhookId1611149998770';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `webhookId` varchar(255) NULL'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `webhookId` varchar(255) NULL');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `pathLength` int NULL'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `pathLength` int NULL');
@ -13,7 +13,7 @@ export class AddWebhookId1611149998770 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
'DROP INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity`' 'DROP INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity`'

View file

@ -5,13 +5,13 @@ export class ChangeDataSize1615306975123 implements MigrationInterface {
name = 'ChangeDataSize1615306975123'; name = 'ChangeDataSize1615306975123';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL');
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL');
} }

View file

@ -5,7 +5,7 @@ export class CreateTagEntity1617268711084 implements MigrationInterface {
name = 'CreateTagEntity1617268711084'; name = 'CreateTagEntity1617268711084';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
// create tags table + relationship with workflow entity // create tags table + relationship with workflow entity
@ -25,7 +25,7 @@ export class CreateTagEntity1617268711084 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
// `createdAt` and `updatedAt` // `createdAt` and `updatedAt`

View file

@ -5,13 +5,13 @@ export class ChangeCredentialDataSize1620729500000 implements MigrationInterface
name = 'ChangeCredentialDataSize1620729500000'; name = 'ChangeCredentialDataSize1620729500000';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(128) NOT NULL'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(128) NOT NULL');
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(32) NOT NULL'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(32) NOT NULL');
} }

View file

@ -5,7 +5,7 @@ export class UniqueWorkflowNames1620826335440 implements MigrationInterface {
name = 'UniqueWorkflowNames1620826335440'; name = 'UniqueWorkflowNames1620826335440';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const workflowNames = await queryRunner.query(` const workflowNames = await queryRunner.query(`
SELECT name SELECT name
@ -40,7 +40,7 @@ export class UniqueWorkflowNames1620826335440 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'workflow_entity` DROP INDEX `IDX_' + tablePrefix + '943d8f922be094eb507cb9a7f9`'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'workflow_entity` DROP INDEX `IDX_' + tablePrefix + '943d8f922be094eb507cb9a7f9`');
} }

View file

@ -5,8 +5,8 @@ export class CertifyCorrectCollation1623936588000 implements MigrationInterface
name = 'CertifyCorrectCollation1623936588000'; name = 'CertifyCorrectCollation1623936588000';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const databaseType = config.get('database.type'); const databaseType = config.getEnv('database.type');
if (databaseType === 'mariadb') { if (databaseType === 'mariadb') {
// This applies to MySQL only. // This applies to MySQL only.
@ -19,7 +19,7 @@ export class CertifyCorrectCollation1623936588000 implements MigrationInterface
collation = 'utf8mb4_0900_ai_ci'; collation = 'utf8mb4_0900_ai_ci';
} }
const databaseName = config.get(`database.mysqldb.database`); const databaseName = config.getEnv(`database.mysqldb.database`);
await queryRunner.query(`ALTER DATABASE \`${databaseName}\` CHARACTER SET utf8mb4 COLLATE ${collation};`); await queryRunner.query(`ALTER DATABASE \`${databaseName}\` CHARACTER SET utf8mb4 COLLATE ${collation};`);

View file

@ -5,14 +5,14 @@ export class AddWaitColumnId1626183952959 implements MigrationInterface {
name = 'AddWaitColumnId1626183952959'; name = 'AddWaitColumnId1626183952959';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` ADD `waitTill` DATETIME NULL'); await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` ADD `waitTill` DATETIME NULL');
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity` (`waitTill`)'); await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity` (`waitTill`)');
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
'DROP INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity`' 'DROP INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity`'

View file

@ -9,7 +9,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
name = 'UpdateWorkflowCredentials1630451444017'; name = 'UpdateWorkflowCredentials1630451444017';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const helpers = new MigrationHelpers(queryRunner); const helpers = new MigrationHelpers(queryRunner);
const credentialsEntities = await queryRunner.query(` const credentialsEntities = await queryRunner.query(`
@ -146,7 +146,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const helpers = new MigrationHelpers(queryRunner); const helpers = new MigrationHelpers(queryRunner);
const credentialsEntities = await queryRunner.query(` const credentialsEntities = await queryRunner.query(`

View file

@ -5,7 +5,7 @@ export class AddExecutionEntityIndexes1644424784709 implements MigrationInterfac
name = 'AddExecutionEntityIndexes1644424784709'; name = 'AddExecutionEntityIndexes1644424784709';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
'DROP INDEX `IDX_c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`', 'DROP INDEX `IDX_c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`',
@ -41,7 +41,7 @@ export class AddExecutionEntityIndexes1644424784709 implements MigrationInterfac
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
'DROP INDEX `IDX_81fc04c8a17de15835713505e4` ON `' + tablePrefix + 'execution_entity`', 'DROP INDEX `IDX_81fc04c8a17de15835713505e4` ON `' + tablePrefix + 'execution_entity`',
); );

View file

@ -7,7 +7,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
name = 'CreateUserManagement1646992772331'; name = 'CreateUserManagement1646992772331';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE TABLE ${tablePrefix}role ( `CREATE TABLE ${tablePrefix}role (
@ -156,7 +156,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflow_entity ADD UNIQUE INDEX \`IDX_${tablePrefix}943d8f922be094eb507cb9a7f9\` (\`name\`)`, `ALTER TABLE ${tablePrefix}workflow_entity ADD UNIQUE INDEX \`IDX_${tablePrefix}943d8f922be094eb507cb9a7f9\` (\`name\`)`,

View file

@ -7,9 +7,9 @@ export class InitialMigration1587669153312 implements MigrationInterface {
name = 'InitialMigration1587669153312'; name = 'InitialMigration1587669153312';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixIndex = tablePrefix; const tablePrefixIndex = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -22,9 +22,9 @@ export class InitialMigration1587669153312 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixIndex = tablePrefix; const tablePrefixIndex = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -9,9 +9,9 @@ export class WebhookModel1589476000887 implements MigrationInterface {
name = 'WebhookModel1589476000887'; name = 'WebhookModel1589476000887';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixIndex = tablePrefix; const tablePrefixIndex = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -20,8 +20,8 @@ export class WebhookModel1589476000887 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -6,9 +6,9 @@ export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
name = 'CreateIndexStoppedAt1594828256133'; name = 'CreateIndexStoppedAt1594828256133';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -17,7 +17,7 @@ export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}33228da131bb1112247cf52a42`); await queryRunner.query(`DROP INDEX IDX_${tablePrefix}33228da131bb1112247cf52a42`);
} }

View file

@ -6,8 +6,8 @@ export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
name = 'MakeStoppedAtNullable1607431743768'; name = 'MakeStoppedAtNullable1607431743768';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -5,9 +5,9 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
name = 'AddWebhookId1611144599516'; name = 'AddWebhookId1611144599516';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -18,9 +18,9 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -5,9 +5,9 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
name = 'CreateTagEntity1617270242566'; name = 'CreateTagEntity1617270242566';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -40,9 +40,9 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -5,9 +5,9 @@ export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
name = 'UniqueWorkflowNames1620824779533'; name = 'UniqueWorkflowNames1620824779533';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -58,9 +58,9 @@ export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -5,9 +5,9 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
name = 'AddwaitTill1626176912946'; name = 'AddwaitTill1626176912946';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -17,9 +17,9 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -9,8 +9,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
name = 'UpdateWorkflowCredentials1630419189837'; name = 'UpdateWorkflowCredentials1630419189837';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -152,8 +152,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -5,9 +5,9 @@ export class AddExecutionEntityIndexes1644422880309 implements MigrationInterfac
name = 'AddExecutionEntityIndexes1644422880309'; name = 'AddExecutionEntityIndexes1644422880309';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
@ -40,9 +40,9 @@ export class AddExecutionEntityIndexes1644422880309 implements MigrationInterfac
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;

View file

@ -9,7 +9,7 @@ export class IncreaseTypeVarcharLimit1646834195327 implements MigrationInterface
name = 'IncreaseTypeVarcharLimit1646834195327'; name = 'IncreaseTypeVarcharLimit1646834195327';
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "type" TYPE VARCHAR(128)`); await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "type" TYPE VARCHAR(128)`);
} }

View file

@ -7,9 +7,9 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
name = 'CreateUserManagement1646992772331'; name = 'CreateUserManagement1646992772331';
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }
@ -140,9 +140,9 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.get('database.tablePrefix'); let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix; const tablePrefixPure = tablePrefix;
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
if (schema) { if (schema) {
tablePrefix = schema + '.' + tablePrefix; tablePrefix = schema + '.' + tablePrefix;
} }

View file

@ -8,7 +8,7 @@ export class InitialMigration1588102412422 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE TABLE IF NOT EXISTS "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(128) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`, `CREATE TABLE IF NOT EXISTS "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(128) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`,
@ -35,7 +35,7 @@ export class InitialMigration1588102412422 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity"`, undefined); await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity"`, undefined);
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d"`, undefined); await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d"`, undefined);

View file

@ -8,7 +8,7 @@ export class WebhookModel1592445003908 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`, `CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`,
@ -18,7 +18,7 @@ export class WebhookModel1592445003908 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`); await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
} }
} }

View file

@ -8,7 +8,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt") `, `CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt") `,
@ -18,7 +18,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1"`); await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1"`);
} }

View file

@ -8,7 +8,7 @@ export class MakeStoppedAtNullable1607431743769 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
// SQLite does not allow us to simply "alter column" // SQLite does not allow us to simply "alter column"
// We're hacking the way sqlite identifies tables // We're hacking the way sqlite identifies tables
// Allowing a column to become nullable // Allowing a column to become nullable

View file

@ -8,7 +8,7 @@ export class AddWebhookId1611071044839 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "temporary_webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"))`, `CREATE TABLE "temporary_webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"))`,
@ -28,7 +28,7 @@ export class AddWebhookId1611071044839 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2"`); await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2"`);
await queryRunner.query( await queryRunner.query(

View file

@ -8,7 +8,7 @@ export class CreateTagEntity1617213344594 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
// create tags table + relationship with workflow entity // create tags table + relationship with workflow entity
@ -73,7 +73,7 @@ export class CreateTagEntity1617213344594 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
// `createdAt` and `updatedAt` // `createdAt` and `updatedAt`

View file

@ -8,7 +8,7 @@ export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const workflowNames = await queryRunner.query(` const workflowNames = await queryRunner.query(`
SELECT name SELECT name
@ -58,7 +58,7 @@ export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9"`); await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9"`);
} }
} }

View file

@ -8,7 +8,7 @@ export class AddWaitColumn1621707690587 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> { async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP TABLE IF EXISTS "${tablePrefix}temporary_execution_entity"`); await queryRunner.query(`DROP TABLE IF EXISTS "${tablePrefix}temporary_execution_entity"`);
await queryRunner.query( await queryRunner.query(
@ -34,7 +34,7 @@ export class AddWaitColumn1621707690587 implements MigrationInterface {
} }
async down(queryRunner: QueryRunner): Promise<void> { async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE TABLE IF NOT EXISTS "${tablePrefix}temporary_execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)`, `CREATE TABLE IF NOT EXISTS "${tablePrefix}temporary_execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)`,

View file

@ -12,7 +12,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const helpers = new MigrationHelpers(queryRunner); const helpers = new MigrationHelpers(queryRunner);
const credentialsEntities = await queryRunner.query(` const credentialsEntities = await queryRunner.query(`
@ -152,7 +152,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
const helpers = new MigrationHelpers(queryRunner); const helpers = new MigrationHelpers(queryRunner);
const credentialsEntities = await queryRunner.query(` const credentialsEntities = await queryRunner.query(`

View file

@ -7,7 +7,7 @@ export class AddExecutionEntityIndexes1644421939510 implements MigrationInterfac
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d'`); await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d'`);
@ -32,7 +32,7 @@ export class AddExecutionEntityIndexes1644421939510 implements MigrationInterfac
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}81fc04c8a17de15835713505e4'`); await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}81fc04c8a17de15835713505e4'`);
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9'`); await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9'`);

View file

@ -13,7 +13,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> { public async up(queryRunner: QueryRunner): Promise<void> {
logMigrationStart(this.name); logMigrationStart(this.name);
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE TABLE "${tablePrefix}role" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(32) NOT NULL, "scope" varchar NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), CONSTRAINT "UQ_${tablePrefix}5b49d0f504f7ef31045a1fb2eb8" UNIQUE ("scope", "name"))`, `CREATE TABLE "${tablePrefix}role" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(32) NOT NULL, "scope" varchar NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), CONSTRAINT "UQ_${tablePrefix}5b49d0f504f7ef31045a1fb2eb8" UNIQUE ("scope", "name"))`,
@ -104,7 +104,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
} }
public async down(queryRunner: QueryRunner): Promise<void> { public async down(queryRunner: QueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix'); const tablePrefix = config.getEnv('database.tablePrefix');
await queryRunner.query( await queryRunner.query(
`CREATE UNIQUE INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9" ON "${tablePrefix}workflow_entity" ("name") `, `CREATE UNIQUE INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9" ON "${tablePrefix}workflow_entity" ("name") `,
); );

View file

@ -58,10 +58,10 @@ export class Telemetry {
this.instanceId = instanceId; this.instanceId = instanceId;
this.versionCli = versionCli; this.versionCli = versionCli;
const enabled = config.get('diagnostics.enabled') as boolean; const enabled = config.getEnv('diagnostics.enabled');
const logLevel = config.get('logs.level') as boolean; const logLevel = config.getEnv('logs.level');
if (enabled) { if (enabled) {
const conf = config.get('diagnostics.config.backend') as string; const conf = config.getEnv('diagnostics.config.backend');
const [key, url] = conf.split(';'); const [key, url] = conf.split(';');
if (!key || !url) { if (!key || !url) {

View file

@ -81,7 +81,7 @@ test('POST /owner should create owner and enable isInstanceOwnerSetUp', async ()
expect(storedOwner.firstName).toBe(newOwnerData.firstName); expect(storedOwner.firstName).toBe(newOwnerData.firstName);
expect(storedOwner.lastName).toBe(newOwnerData.lastName); expect(storedOwner.lastName).toBe(newOwnerData.lastName);
const isInstanceOwnerSetUpConfig = config.get('userManagement.isInstanceOwnerSetUp'); const isInstanceOwnerSetUpConfig = config.getEnv('userManagement.isInstanceOwnerSetUp');
expect(isInstanceOwnerSetUpConfig).toBe(true); expect(isInstanceOwnerSetUpConfig).toBe(true);
const isInstanceOwnerSetUpSetting = await utils.isInstanceOwnerSetUp(); const isInstanceOwnerSetUpSetting = await utils.isInstanceOwnerSetUp();
@ -108,7 +108,7 @@ test('POST /owner/skip-setup should persist skipping setup to the DB', async ()
expect(response.statusCode).toBe(200); expect(response.statusCode).toBe(200);
const skipConfig = config.get('userManagement.skipInstanceOwnerSetup'); const skipConfig = config.getEnv('userManagement.skipInstanceOwnerSetup');
expect(skipConfig).toBe(true); expect(skipConfig).toBe(true);
const { value } = await Db.collections.Settings!.findOneOrFail({ const { value } = await Db.collections.Settings!.findOneOrFail({

View file

@ -1,13 +1,13 @@
import config = require('../../../config'); import config = require('../../../config');
export const REST_PATH_SEGMENT = config.get('endpoints.rest') as Readonly<string>; export const REST_PATH_SEGMENT = config.getEnv('endpoints.rest') as Readonly<string>;
export const AUTHLESS_ENDPOINTS: Readonly<string[]> = [ export const AUTHLESS_ENDPOINTS: Readonly<string[]> = [
'healthz', 'healthz',
'metrics', 'metrics',
config.get('endpoints.webhook') as string, config.getEnv('endpoints.webhook'),
config.get('endpoints.webhookWaiting') as string, config.getEnv('endpoints.webhookWaiting'),
config.get('endpoints.webhookTest') as string, config.getEnv('endpoints.webhookTest'),
]; ];
export const SUCCESS_RESPONSE_BODY = { export const SUCCESS_RESPONSE_BODY = {

View file

@ -22,7 +22,7 @@ import type { CollectionName, CredentialPayload } from './types';
* Initialize one test DB per suite run, with bootstrap connection if needed. * Initialize one test DB per suite run, with bootstrap connection if needed.
*/ */
export async function init() { export async function init() {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
if (dbType === 'sqlite') { if (dbType === 'sqlite') {
// no bootstrap connection required // no bootstrap connection required
@ -73,7 +73,7 @@ export async function init() {
* Drop test DB, closing bootstrap connection if existing. * Drop test DB, closing bootstrap connection if existing.
*/ */
export async function terminate(testDbName: string) { export async function terminate(testDbName: string) {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
if (dbType === 'sqlite') { if (dbType === 'sqlite') {
await getConnection(testDbName).close(); await getConnection(testDbName).close();
@ -103,7 +103,7 @@ export async function terminate(testDbName: string) {
* @param testDbName Name of the test DB to truncate tables in. * @param testDbName Name of the test DB to truncate tables in.
*/ */
export async function truncate(collections: CollectionName[], testDbName: string) { export async function truncate(collections: CollectionName[], testDbName: string) {
const dbType = config.get('database.type'); const dbType = config.getEnv('database.type');
const testDb = getConnection(testDbName); const testDb = getConnection(testDbName);
@ -295,11 +295,11 @@ export const getSqliteOptions = ({ name }: { name: string }): ConnectionOptions
* to create and drop test Postgres databases. * to create and drop test Postgres databases.
*/ */
export const getBootstrapPostgresOptions = () => { export const getBootstrapPostgresOptions = () => {
const username = config.get('database.postgresdb.user'); const username = config.getEnv('database.postgresdb.user');
const password = config.get('database.postgresdb.password'); const password = config.getEnv('database.postgresdb.password');
const host = config.get('database.postgresdb.host'); const host = config.getEnv('database.postgresdb.host');
const port = config.get('database.postgresdb.port'); const port = config.getEnv('database.postgresdb.port');
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
return { return {
name: BOOTSTRAP_POSTGRES_CONNECTION_NAME, name: BOOTSTRAP_POSTGRES_CONNECTION_NAME,
@ -314,11 +314,11 @@ export const getBootstrapPostgresOptions = () => {
}; };
export const getPostgresOptions = ({ name }: { name: string }): ConnectionOptions => { export const getPostgresOptions = ({ name }: { name: string }): ConnectionOptions => {
const username = config.get('database.postgresdb.user'); const username = config.getEnv('database.postgresdb.user');
const password = config.get('database.postgresdb.password'); const password = config.getEnv('database.postgresdb.password');
const host = config.get('database.postgresdb.host'); const host = config.getEnv('database.postgresdb.host');
const port = config.get('database.postgresdb.port'); const port = config.getEnv('database.postgresdb.port');
const schema = config.get('database.postgresdb.schema'); const schema = config.getEnv('database.postgresdb.schema');
return { return {
name, name,
@ -345,10 +345,10 @@ export const getPostgresOptions = ({ name }: { name: string }): ConnectionOption
* to create and drop test MySQL databases. * to create and drop test MySQL databases.
*/ */
export const getBootstrapMySqlOptions = (): ConnectionOptions => { export const getBootstrapMySqlOptions = (): ConnectionOptions => {
const username = config.get('database.mysqldb.user'); const username = config.getEnv('database.mysqldb.user');
const password = config.get('database.mysqldb.password'); const password = config.getEnv('database.mysqldb.password');
const host = config.get('database.mysqldb.host'); const host = config.getEnv('database.mysqldb.host');
const port = config.get('database.mysqldb.port'); const port = config.getEnv('database.mysqldb.port');
return { return {
name: BOOTSTRAP_MYSQL_CONNECTION_NAME, name: BOOTSTRAP_MYSQL_CONNECTION_NAME,
@ -366,10 +366,10 @@ export const getBootstrapMySqlOptions = (): ConnectionOptions => {
* one per test suite run. * one per test suite run.
*/ */
export const getMySqlOptions = ({ name }: { name: string }): ConnectionOptions => { export const getMySqlOptions = ({ name }: { name: string }): ConnectionOptions => {
const username = config.get('database.mysqldb.user'); const username = config.getEnv('database.mysqldb.user');
const password = config.get('database.mysqldb.password'); const password = config.getEnv('database.mysqldb.password');
const host = config.get('database.mysqldb.host'); const host = config.getEnv('database.mysqldb.host');
const port = config.get('database.mysqldb.port'); const port = config.getEnv('database.mysqldb.port');
return { return {
name, name,

View file

@ -3,16 +3,15 @@ import { promisify } from 'util';
import config = require('../config'); import config = require('../config');
import { BOOTSTRAP_MYSQL_CONNECTION_NAME } from './integration/shared/constants'; import { BOOTSTRAP_MYSQL_CONNECTION_NAME } from './integration/shared/constants';
import { DatabaseType } from '../src';
const exec = promisify(callbackExec); const exec = promisify(callbackExec);
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
if (dbType === 'mysqldb') { if (dbType === 'mysqldb') {
const username = config.get('database.mysqldb.user'); const username = config.getEnv('database.mysqldb.user');
const password = config.get('database.mysqldb.password'); const password = config.getEnv('database.mysqldb.password');
const host = config.get('database.mysqldb.host'); const host = config.getEnv('database.mysqldb.host');
const passwordSegment = password ? `-p${password}` : ''; const passwordSegment = password ? `-p${password}` : '';

View file

@ -1,12 +1,11 @@
import { createConnection } from 'typeorm'; import { createConnection } from 'typeorm';
import config = require('../config'); import config = require('../config');
import { exec } from 'child_process'; import { exec } from 'child_process';
import { DatabaseType } from '../src';
import { getBootstrapMySqlOptions, getBootstrapPostgresOptions } from './integration/shared/testDb'; import { getBootstrapMySqlOptions, getBootstrapPostgresOptions } from './integration/shared/testDb';
import { BOOTSTRAP_MYSQL_CONNECTION_NAME } from './integration/shared/constants'; import { BOOTSTRAP_MYSQL_CONNECTION_NAME } from './integration/shared/constants';
export default async () => { export default async () => {
const dbType = config.get('database.type') as DatabaseType; const dbType = config.getEnv('database.type');
if (dbType === 'postgresdb') { if (dbType === 'postgresdb') {
const bootstrapPostgres = await createConnection(getBootstrapPostgresOptions()); const bootstrapPostgres = await createConnection(getBootstrapPostgresOptions());
@ -25,9 +24,9 @@ export default async () => {
} }
if (dbType === 'mysqldb') { if (dbType === 'mysqldb') {
const user = config.get('database.mysqldb.user'); const user = config.getEnv('database.mysqldb.user');
const password = config.get('database.mysqldb.password'); const password = config.getEnv('database.mysqldb.password');
const host = config.get('database.mysqldb.host'); const host = config.getEnv('database.mysqldb.host');
const bootstrapMySql = await createConnection(getBootstrapMySqlOptions()); const bootstrapMySql = await createConnection(getBootstrapMySqlOptions());