mirror of
https://github.com/n8n-io/n8n.git
synced 2024-12-26 21:19:43 -08:00
Merge branch 'master' of github.com:n8n-io/n8n into N8N-3194-output-panels
This commit is contained in:
commit
0ec553ea47
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
|
@ -11,7 +11,7 @@ jobs:
|
|||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [16.x]
|
||||
node-version: [14.x, 16.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
|
2804
package-lock.json
generated
2804
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -2,12 +2,7 @@
|
|||
/* eslint-disable no-console */
|
||||
import { promises as fs } from 'fs';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
import {
|
||||
BinaryDataManager,
|
||||
IBinaryDataConfig,
|
||||
UserSettings,
|
||||
PLACEHOLDER_EMPTY_WORKFLOW_ID,
|
||||
} from 'n8n-core';
|
||||
import { BinaryDataManager, UserSettings, PLACEHOLDER_EMPTY_WORKFLOW_ID } from 'n8n-core';
|
||||
import { INode, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
|
@ -27,7 +22,7 @@ import {
|
|||
} from '../src';
|
||||
|
||||
import { getLogger } from '../src/Logger';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
import { getInstanceOwner } from '../src/UserManagement/UserManagementHelper';
|
||||
|
||||
export class Execute extends Command {
|
||||
|
@ -52,7 +47,7 @@ export class Execute extends Command {
|
|||
async run() {
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig, true);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
/* eslint-disable no-console */
|
||||
import * as fs from 'fs';
|
||||
import fs from 'fs';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
|
||||
import { BinaryDataManager, IBinaryDataConfig, UserSettings } from 'n8n-core';
|
||||
import { BinaryDataManager, UserSettings } from 'n8n-core';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
import { INode, ITaskData, LoggerProxy } from 'n8n-workflow';
|
||||
|
@ -36,7 +36,7 @@ import {
|
|||
NodeTypes,
|
||||
WorkflowRunner,
|
||||
} from '../src';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
import { User } from '../src/databases/entities/User';
|
||||
import { getInstanceOwner } from '../src/UserManagement/UserManagementHelper';
|
||||
|
||||
|
@ -196,7 +196,7 @@ export class ExecuteBatch extends Command {
|
|||
|
||||
const logger = getLogger();
|
||||
LoggerProxy.init(logger);
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig, true);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
|
|
|
@ -7,8 +7,8 @@ import { Credentials, UserSettings } from 'n8n-core';
|
|||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
import { Db, ICredentialsDecryptedDb } from '../../src';
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ import { Command, flags } from '@oclif/command';
|
|||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
import { Db } from '../../src';
|
||||
|
||||
|
|
|
@ -12,9 +12,9 @@ import { Credentials, UserSettings } from 'n8n-core';
|
|||
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'fast-glob';
|
||||
import * as path from 'path';
|
||||
import fs from 'fs';
|
||||
import glob from 'fast-glob';
|
||||
import path from 'path';
|
||||
import { EntityManager, getConnection } from 'typeorm';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
import { Db } from '../../src';
|
||||
|
|
|
@ -12,8 +12,8 @@ import { Command, flags } from '@oclif/command';
|
|||
|
||||
import { INode, INodeCredentialsDetails, LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'fast-glob';
|
||||
import fs from 'fs';
|
||||
import glob from 'fast-glob';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import { EntityManager, getConnection } from 'typeorm';
|
||||
import { getLogger } from '../../src/Logger';
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
/* eslint-disable no-console */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as localtunnel from 'localtunnel';
|
||||
import { BinaryDataManager, IBinaryDataConfig, TUNNEL_SUBDOMAIN_ENV, UserSettings } from 'n8n-core';
|
||||
import localtunnel from 'localtunnel';
|
||||
import { BinaryDataManager, TUNNEL_SUBDOMAIN_ENV, UserSettings } from 'n8n-core';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as Redis from 'ioredis';
|
||||
import Redis from 'ioredis';
|
||||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
import { createHash } from 'crypto';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
ActiveWorkflowRunner,
|
||||
|
@ -100,9 +100,9 @@ export class Start extends Command {
|
|||
|
||||
await InternalHooksManager.getInstance().onN8nStop();
|
||||
|
||||
const skipWebhookDeregistration = config.get(
|
||||
const skipWebhookDeregistration = config.getEnv(
|
||||
'endpoints.skipWebhoooksDeregistrationOnShutdown',
|
||||
) as boolean;
|
||||
);
|
||||
|
||||
const removePromises = [];
|
||||
if (activeWorkflowRunner !== undefined && !skipWebhookDeregistration) {
|
||||
|
@ -169,7 +169,7 @@ export class Start extends Command {
|
|||
// Make sure the settings exist
|
||||
const userSettings = await UserSettings.prepareUserSettings();
|
||||
|
||||
if (!config.get('userManagement.jwtSecret')) {
|
||||
if (!config.getEnv('userManagement.jwtSecret')) {
|
||||
// If we don't have a JWT secret set, generate
|
||||
// one based and save to config.
|
||||
const encryptionKey = await UserSettings.getEncryptionKey();
|
||||
|
@ -222,12 +222,12 @@ export class Start extends Command {
|
|||
config.set(setting.key, JSON.parse(setting.value));
|
||||
});
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
const redisHost = config.get('queue.bull.redis.host');
|
||||
const redisPassword = config.get('queue.bull.redis.password');
|
||||
const redisPort = config.get('queue.bull.redis.port');
|
||||
const redisDB = config.get('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const redisHost = config.getEnv('queue.bull.redis.host');
|
||||
const redisPassword = config.getEnv('queue.bull.redis.password');
|
||||
const redisPort = config.getEnv('queue.bull.redis.port');
|
||||
const redisDB = config.getEnv('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
|
||||
let lastTimer = 0;
|
||||
let cumulativeTimeout = 0;
|
||||
|
||||
|
@ -285,7 +285,7 @@ export class Start extends Command {
|
|||
const dbType = (await GenericHelpers.getConfigValue('database.type')) as DatabaseType;
|
||||
|
||||
if (dbType === 'sqlite') {
|
||||
const shouldRunVacuum = config.get('database.sqlite.executeVacuumOnStartup') as number;
|
||||
const shouldRunVacuum = config.getEnv('database.sqlite.executeVacuumOnStartup');
|
||||
if (shouldRunVacuum) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises, @typescript-eslint/no-non-null-assertion
|
||||
await Db.collections.Execution!.query('VACUUM;');
|
||||
|
@ -324,7 +324,7 @@ export class Start extends Command {
|
|||
subdomain: tunnelSubdomain,
|
||||
};
|
||||
|
||||
const port = config.get('port');
|
||||
const port = config.getEnv('port');
|
||||
|
||||
// @ts-ignore
|
||||
const webhookTunnel = await localtunnel(port, tunnelSettings);
|
||||
|
@ -340,7 +340,7 @@ export class Start extends Command {
|
|||
const { cli } = await GenericHelpers.getVersions();
|
||||
InternalHooksManager.init(instanceId, cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig, true);
|
||||
|
||||
await Server.start();
|
||||
|
@ -354,7 +354,7 @@ export class Start extends Command {
|
|||
const editorUrl = GenericHelpers.getBaseUrl();
|
||||
this.log(`\nEditor is now accessible via:\n${editorUrl}`);
|
||||
|
||||
const saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||
const saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
|
||||
|
||||
if (saveManualExecutions) {
|
||||
this.log('\nManual executions will be visible only for the owner');
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
import { BinaryDataManager, IBinaryDataConfig, UserSettings } from 'n8n-core';
|
||||
import { BinaryDataManager, UserSettings } from 'n8n-core';
|
||||
import { Command, flags } from '@oclif/command';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as Redis from 'ioredis';
|
||||
import Redis from 'ioredis';
|
||||
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
ActiveWorkflowRunner,
|
||||
|
@ -95,7 +95,7 @@ export class Webhook extends Command {
|
|||
|
||||
// Wrap that the process does not close but we can still use async
|
||||
await (async () => {
|
||||
if (config.get('executions.mode') !== 'queue') {
|
||||
if (config.getEnv('executions.mode') !== 'queue') {
|
||||
/**
|
||||
* It is technically possible to run without queues but
|
||||
* there are 2 known bugs when running in this mode:
|
||||
|
@ -152,15 +152,15 @@ export class Webhook extends Command {
|
|||
const { cli } = await GenericHelpers.getVersions();
|
||||
InternalHooksManager.init(instanceId, cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig);
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
const redisHost = config.get('queue.bull.redis.host');
|
||||
const redisPassword = config.get('queue.bull.redis.password');
|
||||
const redisPort = config.get('queue.bull.redis.port');
|
||||
const redisDB = config.get('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const redisHost = config.getEnv('queue.bull.redis.host');
|
||||
const redisPassword = config.getEnv('queue.bull.redis.password');
|
||||
const redisPort = config.getEnv('queue.bull.redis.port');
|
||||
const redisDB = config.getEnv('queue.bull.redis.db');
|
||||
const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
|
||||
let lastTimer = 0;
|
||||
let cumulativeTimeout = 0;
|
||||
|
||||
|
|
|
@ -7,9 +7,9 @@
|
|||
/* eslint-disable @typescript-eslint/restrict-template-expressions */
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as express from 'express';
|
||||
import * as http from 'http';
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import express from 'express';
|
||||
import http from 'http';
|
||||
import PCancelable from 'p-cancelable';
|
||||
|
||||
import { Command, flags } from '@oclif/command';
|
||||
import { BinaryDataManager, IBinaryDataConfig, UserSettings, WorkflowExecute } from 'n8n-core';
|
||||
|
@ -18,7 +18,7 @@ import { IExecuteResponsePromiseData, INodeTypes, IRun, Workflow, LoggerProxy }
|
|||
|
||||
import { FindOneOptions, getConnectionManager } from 'typeorm';
|
||||
|
||||
import * as Bull from 'bull';
|
||||
import Bull from 'bull';
|
||||
import {
|
||||
CredentialsOverwrites,
|
||||
CredentialTypes,
|
||||
|
@ -39,7 +39,7 @@ import {
|
|||
|
||||
import { getLogger } from '../src/Logger';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import * as Queue from '../src/Queue';
|
||||
import {
|
||||
checkPermissionsForExecution,
|
||||
|
@ -158,7 +158,7 @@ export class Worker extends Command {
|
|||
staticData = workflowData.staticData;
|
||||
}
|
||||
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
if (
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||
currentExecutionDb.workflowData.settings &&
|
||||
|
@ -169,7 +169,7 @@ export class Worker extends Command {
|
|||
|
||||
let executionTimeoutTimestamp: number | undefined;
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number);
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
|
||||
executionTimeoutTimestamp = Date.now() + workflowTimeout * 1000;
|
||||
}
|
||||
|
||||
|
@ -288,7 +288,7 @@ export class Worker extends Command {
|
|||
await startDbInitPromise;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
const redisConnectionTimeoutLimit = config.get('queue.bull.redis.timeoutThreshold');
|
||||
const redisConnectionTimeoutLimit = config.getEnv('queue.bull.redis.timeoutThreshold');
|
||||
|
||||
Worker.jobQueue = Queue.getInstance().getBullObjectInstance();
|
||||
// eslint-disable-next-line @typescript-eslint/no-floating-promises
|
||||
|
@ -299,7 +299,7 @@ export class Worker extends Command {
|
|||
|
||||
InternalHooksManager.init(instanceId, versions.cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig);
|
||||
|
||||
console.info('\nn8n worker is now ready');
|
||||
|
@ -352,8 +352,8 @@ export class Worker extends Command {
|
|||
}
|
||||
});
|
||||
|
||||
if (config.get('queue.health.active')) {
|
||||
const port = config.get('queue.health.port') as number;
|
||||
if (config.getEnv('queue.health.active')) {
|
||||
const port = config.getEnv('queue.health.port');
|
||||
|
||||
const app = express();
|
||||
const server = http.createServer(app);
|
||||
|
|
|
@ -1,890 +1,14 @@
|
|||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
/* eslint-disable no-console */
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import * as convict from 'convict';
|
||||
import * as dotenv from 'dotenv';
|
||||
import * as path from 'path';
|
||||
import * as core from 'n8n-core';
|
||||
import convict from 'convict';
|
||||
import dotenv from 'dotenv';
|
||||
import { schema } from './schema';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const config = convict({
|
||||
database: {
|
||||
type: {
|
||||
doc: 'Type of database to use',
|
||||
format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'],
|
||||
default: 'sqlite',
|
||||
env: 'DB_TYPE',
|
||||
},
|
||||
tablePrefix: {
|
||||
doc: 'Prefix for table names',
|
||||
format: '*',
|
||||
default: '',
|
||||
env: 'DB_TABLE_PREFIX',
|
||||
},
|
||||
logging: {
|
||||
enabled: {
|
||||
doc: 'Typeorm logging enabled flag.',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'DB_LOGGING_ENABLED',
|
||||
},
|
||||
options: {
|
||||
doc: 'Logging level options, default is "error". Possible values: query,error,schema,warn,info,log. To enable all logging, specify "all"',
|
||||
format: String,
|
||||
default: 'error',
|
||||
env: 'DB_LOGGING_OPTIONS',
|
||||
},
|
||||
maxQueryExecutionTime: {
|
||||
doc: 'Maximum number of milliseconds query should be executed before logger logs a warning. Set 0 to disable long running query warning',
|
||||
format: Number,
|
||||
default: 1000,
|
||||
env: 'DB_LOGGING_MAX_EXECUTION_TIME',
|
||||
},
|
||||
},
|
||||
postgresdb: {
|
||||
database: {
|
||||
doc: 'PostgresDB Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_POSTGRESDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'PostgresDB Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_POSTGRESDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'PostgresDB Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'PostgresDB Port',
|
||||
format: Number,
|
||||
default: 5432,
|
||||
env: 'DB_POSTGRESDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'PostgresDB User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_POSTGRESDB_USER',
|
||||
},
|
||||
schema: {
|
||||
doc: 'PostgresDB Schema',
|
||||
format: String,
|
||||
default: 'public',
|
||||
env: 'DB_POSTGRESDB_SCHEMA',
|
||||
},
|
||||
const config = convict(schema);
|
||||
|
||||
ssl: {
|
||||
ca: {
|
||||
doc: 'SSL certificate authority',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CA',
|
||||
},
|
||||
cert: {
|
||||
doc: 'SSL certificate',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CERT',
|
||||
},
|
||||
key: {
|
||||
doc: 'SSL key',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_KEY',
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
doc: 'If unauthorized SSL connections should be rejected',
|
||||
format: 'Boolean',
|
||||
default: true,
|
||||
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
|
||||
},
|
||||
},
|
||||
},
|
||||
mysqldb: {
|
||||
database: {
|
||||
doc: 'MySQL Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_MYSQLDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'MySQL Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_MYSQLDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'MySQL Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_MYSQLDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'MySQL Port',
|
||||
format: Number,
|
||||
default: 3306,
|
||||
env: 'DB_MYSQLDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'MySQL User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_MYSQLDB_USER',
|
||||
},
|
||||
},
|
||||
sqlite: {
|
||||
executeVacuumOnStartup: {
|
||||
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
credentials: {
|
||||
overwrite: {
|
||||
data: {
|
||||
// Allows to set default values for credentials which
|
||||
// get automatically prefilled and the user does not get
|
||||
// displayed and can not change.
|
||||
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
|
||||
doc: 'Overwrites for credentials',
|
||||
format: '*',
|
||||
default: '{}',
|
||||
env: 'CREDENTIALS_OVERWRITE_DATA',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Fetch credentials from API',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
|
||||
},
|
||||
},
|
||||
defaultName: {
|
||||
doc: 'Default name for credentials',
|
||||
format: String,
|
||||
default: 'My credentials',
|
||||
env: 'CREDENTIALS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
workflows: {
|
||||
defaultName: {
|
||||
doc: 'Default name for workflow',
|
||||
format: String,
|
||||
default: 'My workflow',
|
||||
env: 'WORKFLOWS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
executions: {
|
||||
// By default workflows get always executed in their own process.
|
||||
// If this option gets set to "main" it will run them in the
|
||||
// main-process instead.
|
||||
process: {
|
||||
doc: 'In what process workflows should be executed',
|
||||
format: ['main', 'own'],
|
||||
default: 'own',
|
||||
env: 'EXECUTIONS_PROCESS',
|
||||
},
|
||||
|
||||
mode: {
|
||||
doc: 'If it should run executions directly or via queue',
|
||||
format: ['regular', 'queue'],
|
||||
default: 'regular',
|
||||
env: 'EXECUTIONS_MODE',
|
||||
},
|
||||
|
||||
// A Workflow times out and gets canceled after this time (seconds).
|
||||
// If the workflow is executed in the main process a soft timeout
|
||||
// is executed (takes effect after the current node finishes).
|
||||
// If a workflow is running in its own process is a soft timeout
|
||||
// tried first, before killing the process after waiting for an
|
||||
// additional fifth of the given timeout duration.
|
||||
//
|
||||
// To deactivate timeout set it to -1
|
||||
//
|
||||
// Timeout is currently not activated by default which will change
|
||||
// in a future version.
|
||||
timeout: {
|
||||
doc: 'Max run time (seconds) before stopping the workflow execution',
|
||||
format: Number,
|
||||
default: -1,
|
||||
env: 'EXECUTIONS_TIMEOUT',
|
||||
},
|
||||
maxTimeout: {
|
||||
doc: 'Max execution time (seconds) that can be set for a workflow individually',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_TIMEOUT_MAX',
|
||||
},
|
||||
|
||||
// If a workflow executes all the data gets saved by default. This
|
||||
// could be a problem when a workflow gets executed a lot and processes
|
||||
// a lot of data. To not exceed the database's capacity it is possible to
|
||||
// prune the database regularly or to not save the execution at all.
|
||||
// Depending on if the execution did succeed or error a different
|
||||
// save behaviour can be set.
|
||||
saveDataOnError: {
|
||||
doc: 'What workflow execution data to save on error',
|
||||
format: ['all', 'none'],
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
|
||||
},
|
||||
saveDataOnSuccess: {
|
||||
doc: 'What workflow execution data to save on success',
|
||||
format: ['all', 'none'],
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
|
||||
},
|
||||
saveExecutionProgress: {
|
||||
doc: 'Wether or not to save progress for each node executed',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
|
||||
},
|
||||
|
||||
// If the executions of workflows which got started via the editor
|
||||
// should be saved. By default they will not be saved as this runs
|
||||
// are normally only for testing and debugging. This setting can
|
||||
// also be overwritten on a per workflow basis in the workflow settings
|
||||
// in the editor.
|
||||
saveDataManualExecutions: {
|
||||
doc: 'Save data of executions when started manually via editor',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
|
||||
},
|
||||
|
||||
// To not exceed the database's capacity and keep its size moderate
|
||||
// the execution data gets pruned regularly (default: 1 hour interval).
|
||||
// All saved execution data older than the max age will be deleted.
|
||||
// Pruning is currently not activated by default, which will change in
|
||||
// a future version.
|
||||
pruneData: {
|
||||
doc: 'Delete data of past executions on a rolling basis',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_PRUNE',
|
||||
},
|
||||
pruneDataMaxAge: {
|
||||
doc: 'How old (hours) the execution data has to be to get deleted',
|
||||
format: Number,
|
||||
default: 336,
|
||||
env: 'EXECUTIONS_DATA_MAX_AGE',
|
||||
},
|
||||
pruneDataTimeout: {
|
||||
doc: 'Timeout (seconds) after execution data has been pruned',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
|
||||
},
|
||||
},
|
||||
|
||||
queue: {
|
||||
health: {
|
||||
active: {
|
||||
doc: 'If health checks should be enabled',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'QUEUE_HEALTH_CHECK_ACTIVE',
|
||||
},
|
||||
port: {
|
||||
doc: 'Port to serve health check on if activated',
|
||||
format: Number,
|
||||
default: 5678,
|
||||
env: 'QUEUE_HEALTH_CHECK_PORT',
|
||||
},
|
||||
},
|
||||
bull: {
|
||||
prefix: {
|
||||
doc: 'Prefix for all queue keys',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_PREFIX',
|
||||
},
|
||||
redis: {
|
||||
db: {
|
||||
doc: 'Redis DB',
|
||||
format: Number,
|
||||
default: 0,
|
||||
env: 'QUEUE_BULL_REDIS_DB',
|
||||
},
|
||||
host: {
|
||||
doc: 'Redis Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'QUEUE_BULL_REDIS_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'Redis Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_REDIS_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'Redis Port',
|
||||
format: Number,
|
||||
default: 6379,
|
||||
env: 'QUEUE_BULL_REDIS_PORT',
|
||||
},
|
||||
timeoutThreshold: {
|
||||
doc: 'Redis timeout threshold',
|
||||
format: Number,
|
||||
default: 10000,
|
||||
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
|
||||
},
|
||||
},
|
||||
queueRecoveryInterval: {
|
||||
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'QUEUE_RECOVERY_INTERVAL',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
generic: {
|
||||
// The timezone to use. Is important for nodes like "Cron" which start the
|
||||
// workflow automatically at a specified time. This setting can also be
|
||||
// overwritten on a per worfklow basis in the workflow settings in the
|
||||
// editor.
|
||||
timezone: {
|
||||
doc: 'The timezone to use',
|
||||
format: '*',
|
||||
default: 'America/New_York',
|
||||
env: 'GENERIC_TIMEZONE',
|
||||
},
|
||||
},
|
||||
|
||||
// How n8n can be reached (Editor & REST-API)
|
||||
path: {
|
||||
format: String,
|
||||
default: '/',
|
||||
arg: 'path',
|
||||
env: 'N8N_PATH',
|
||||
doc: 'Path n8n is deployed to',
|
||||
},
|
||||
host: {
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
arg: 'host',
|
||||
env: 'N8N_HOST',
|
||||
doc: 'Host name n8n can be reached',
|
||||
},
|
||||
port: {
|
||||
format: Number,
|
||||
default: 5678,
|
||||
arg: 'port',
|
||||
env: 'N8N_PORT',
|
||||
doc: 'HTTP port n8n can be reached',
|
||||
},
|
||||
listen_address: {
|
||||
format: String,
|
||||
default: '0.0.0.0',
|
||||
env: 'N8N_LISTEN_ADDRESS',
|
||||
doc: 'IP address n8n should listen on',
|
||||
},
|
||||
protocol: {
|
||||
format: ['http', 'https'],
|
||||
default: 'http',
|
||||
env: 'N8N_PROTOCOL',
|
||||
doc: 'HTTP Protocol via which n8n can be reached',
|
||||
},
|
||||
ssl_key: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_KEY',
|
||||
doc: 'SSL Key for HTTPS Protocol',
|
||||
},
|
||||
ssl_cert: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_CERT',
|
||||
doc: 'SSL Cert for HTTPS Protocol',
|
||||
},
|
||||
editorBaseUrl: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EDITOR_BASE_URL',
|
||||
doc: 'Public URL where the editor is accessible. Also used for emails sent from n8n.',
|
||||
},
|
||||
|
||||
security: {
|
||||
excludeEndpoints: {
|
||||
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
|
||||
},
|
||||
basicAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_ACTIVE',
|
||||
doc: 'If basic auth should be activated for editor and REST-API',
|
||||
},
|
||||
user: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_USER',
|
||||
doc: 'The name of the basic auth user',
|
||||
},
|
||||
password: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_PASSWORD',
|
||||
doc: 'The password of the basic auth user',
|
||||
},
|
||||
hash: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_HASH',
|
||||
doc: 'If password for basic auth is hashed',
|
||||
},
|
||||
},
|
||||
jwtAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_JWT_AUTH_ACTIVE',
|
||||
doc: 'If JWT auth should be activated for editor and REST-API',
|
||||
},
|
||||
jwtHeader: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER',
|
||||
doc: 'The request header containing a signed JWT',
|
||||
},
|
||||
jwtHeaderValuePrefix: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
|
||||
doc: 'The request header value prefix to strip (optional)',
|
||||
},
|
||||
jwksUri: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWKS_URI',
|
||||
doc: 'The URI to fetch JWK Set for JWT authentication',
|
||||
},
|
||||
jwtIssuer: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ISSUER',
|
||||
doc: 'JWT issuer to expect (optional)',
|
||||
},
|
||||
jwtNamespace: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_NAMESPACE',
|
||||
doc: 'JWT namespace to expect (optional)',
|
||||
},
|
||||
jwtAllowedTenantKey: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT_KEY',
|
||||
doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
|
||||
},
|
||||
jwtAllowedTenant: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT',
|
||||
doc: 'JWT tenant to allow (optional)',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
endpoints: {
|
||||
payloadSizeMax: {
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_PAYLOAD_SIZE_MAX',
|
||||
doc: 'Maximum payload size in MB.',
|
||||
},
|
||||
metrics: {
|
||||
enable: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_METRICS',
|
||||
doc: 'Enable metrics endpoint',
|
||||
},
|
||||
prefix: {
|
||||
format: String,
|
||||
default: 'n8n_',
|
||||
env: 'N8N_METRICS_PREFIX',
|
||||
doc: 'An optional prefix for metric names. Default: n8n_',
|
||||
},
|
||||
},
|
||||
rest: {
|
||||
format: String,
|
||||
default: 'rest',
|
||||
env: 'N8N_ENDPOINT_REST',
|
||||
doc: 'Path for rest endpoint',
|
||||
},
|
||||
webhook: {
|
||||
format: String,
|
||||
default: 'webhook',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK',
|
||||
doc: 'Path for webhook endpoint',
|
||||
},
|
||||
webhookWaiting: {
|
||||
format: String,
|
||||
default: 'webhook-waiting',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_WAIT',
|
||||
doc: 'Path for waiting-webhook endpoint',
|
||||
},
|
||||
webhookTest: {
|
||||
format: String,
|
||||
default: 'webhook-test',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_TEST',
|
||||
doc: 'Path for test-webhook endpoint',
|
||||
},
|
||||
disableUi: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_UI',
|
||||
doc: 'Disable N8N UI (Frontend).',
|
||||
},
|
||||
disableProductionWebhooksOnMainProcess: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
|
||||
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
|
||||
},
|
||||
skipWebhoooksDeregistrationOnShutdown: {
|
||||
/**
|
||||
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
|
||||
* and registers on startup / activation. If we skip
|
||||
* deactivation on shutdown, webhooks will remain active on 3rd party services.
|
||||
* We don't have to worry about startup as it always
|
||||
* checks if webhooks already exist.
|
||||
* If users want to upgrade n8n, it is possible to run
|
||||
* two instances simultaneously without downtime, similar
|
||||
* to blue/green deployment.
|
||||
* WARNING: Trigger nodes (like Cron) will cause duplication
|
||||
* of work, so be aware when using.
|
||||
*/
|
||||
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
|
||||
},
|
||||
},
|
||||
|
||||
workflowTagsDisabled: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_WORKFLOW_TAGS_DISABLED',
|
||||
doc: 'Disable worfklow tags.',
|
||||
},
|
||||
|
||||
userManagement: {
|
||||
disabled: {
|
||||
doc: 'Disable user management and hide it completely.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_USER_MANAGEMENT_DISABLED',
|
||||
},
|
||||
jwtSecret: {
|
||||
doc: 'Set a specific JWT secret (optional - n8n can generate one)', // Generated @ start.ts
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_USER_MANAGEMENT_JWT_SECRET',
|
||||
},
|
||||
emails: {
|
||||
mode: {
|
||||
doc: 'How to send emails',
|
||||
format: ['', 'smtp'],
|
||||
default: 'smtp',
|
||||
env: 'N8N_EMAIL_MODE',
|
||||
},
|
||||
smtp: {
|
||||
host: {
|
||||
doc: 'SMTP server host',
|
||||
format: String, // e.g. 'smtp.gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_HOST',
|
||||
},
|
||||
port: {
|
||||
doc: 'SMTP server port',
|
||||
format: Number,
|
||||
default: 465,
|
||||
env: 'N8N_SMTP_PORT',
|
||||
},
|
||||
secure: {
|
||||
doc: 'Whether or not to use SSL for SMTP',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_SMTP_SSL',
|
||||
},
|
||||
auth: {
|
||||
user: {
|
||||
doc: 'SMTP login username',
|
||||
format: String, // e.g.'you@gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_USER',
|
||||
},
|
||||
pass: {
|
||||
doc: 'SMTP login password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_PASS',
|
||||
},
|
||||
},
|
||||
sender: {
|
||||
doc: 'How to display sender name',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_SENDER',
|
||||
},
|
||||
},
|
||||
templates: {
|
||||
invite: {
|
||||
doc: 'Overrides default HTML template for inviting new people (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_INVITE',
|
||||
},
|
||||
passwordReset: {
|
||||
doc: 'Overrides default HTML template for resetting password (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_PWRESET',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
externalHookFiles: {
|
||||
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'EXTERNAL_HOOK_FILES',
|
||||
},
|
||||
|
||||
nodes: {
|
||||
include: {
|
||||
doc: 'Nodes to load',
|
||||
format: function check(rawValue) {
|
||||
if (rawValue === '') {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: undefined,
|
||||
env: 'NODES_INCLUDE',
|
||||
},
|
||||
exclude: {
|
||||
doc: 'Nodes not to load',
|
||||
format: function check(rawValue) {
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to exclude is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: '[]',
|
||||
env: 'NODES_EXCLUDE',
|
||||
},
|
||||
errorTriggerType: {
|
||||
doc: 'Node Type to use as Error Trigger',
|
||||
format: String,
|
||||
default: 'n8n-nodes-base.errorTrigger',
|
||||
env: 'NODES_ERROR_TRIGGER_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
logs: {
|
||||
level: {
|
||||
doc: 'Log output level',
|
||||
format: ['error', 'warn', 'info', 'verbose', 'debug', 'silent'],
|
||||
default: 'info',
|
||||
env: 'N8N_LOG_LEVEL',
|
||||
},
|
||||
output: {
|
||||
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
|
||||
format: String,
|
||||
default: 'console',
|
||||
env: 'N8N_LOG_OUTPUT',
|
||||
},
|
||||
file: {
|
||||
fileCountMax: {
|
||||
doc: 'Maximum number of files to keep.',
|
||||
format: Number,
|
||||
default: 100,
|
||||
env: 'N8N_LOG_FILE_COUNT_MAX',
|
||||
},
|
||||
fileSizeMax: {
|
||||
doc: 'Maximum size for each log file in MB.',
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_LOG_FILE_SIZE_MAX',
|
||||
},
|
||||
location: {
|
||||
doc: 'Log file location; only used if log output is set to file.',
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'),
|
||||
env: 'N8N_LOG_FILE_LOCATION',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
versionNotifications: {
|
||||
enabled: {
|
||||
doc: 'Whether feature is enabled to request notifications about new versions and security updates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENABLED',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Endpoint to retrieve version information from.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/versions/',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENDPOINT',
|
||||
},
|
||||
infoUrl: {
|
||||
doc: `Url in New Versions Panel with more information on updating one's instance.`,
|
||||
format: String,
|
||||
default: 'https://docs.n8n.io/getting-started/installation/updating.html',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_INFO_URL',
|
||||
},
|
||||
},
|
||||
|
||||
templates: {
|
||||
enabled: {
|
||||
doc: 'Whether templates feature is enabled to load workflow templates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_TEMPLATES_ENABLED',
|
||||
},
|
||||
host: {
|
||||
doc: 'Endpoint host to retrieve workflow templates from endpoints.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/',
|
||||
env: 'N8N_TEMPLATES_HOST',
|
||||
},
|
||||
},
|
||||
|
||||
binaryDataManager: {
|
||||
availableModes: {
|
||||
format: String,
|
||||
default: 'filesystem',
|
||||
env: 'N8N_AVAILABLE_BINARY_DATA_MODES',
|
||||
doc: 'Available modes of binary data storage, as comma separated strings',
|
||||
},
|
||||
mode: {
|
||||
format: ['default', 'filesystem'],
|
||||
default: 'default',
|
||||
env: 'N8N_DEFAULT_BINARY_DATA_MODE',
|
||||
doc: 'Storage mode for binary data',
|
||||
},
|
||||
localStoragePath: {
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'binaryData'),
|
||||
env: 'N8N_BINARY_DATA_STORAGE_PATH',
|
||||
doc: 'Path for binary data storage in "filesystem" mode',
|
||||
},
|
||||
binaryDataTTL: {
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'N8N_BINARY_DATA_TTL',
|
||||
doc: 'TTL for binary data of unsaved executions in minutes',
|
||||
},
|
||||
persistedBinaryDataTTL: {
|
||||
format: Number,
|
||||
default: 1440,
|
||||
env: 'N8N_PERSISTED_BINARY_DATA_TTL',
|
||||
doc: 'TTL for persisted binary data in minutes (binary data gets deleted if not persisted before TTL expires)',
|
||||
},
|
||||
},
|
||||
|
||||
deployment: {
|
||||
type: {
|
||||
format: String,
|
||||
default: 'default',
|
||||
env: 'N8N_DEPLOYMENT_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
hiringBanner: {
|
||||
enabled: {
|
||||
doc: 'Whether hiring banner in browser console is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_HIRING_BANNER_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
personalization: {
|
||||
enabled: {
|
||||
doc: 'Whether personalization is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_PERSONALIZATION_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
diagnostics: {
|
||||
enabled: {
|
||||
doc: 'Whether diagnostic mode is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_DIAGNOSTICS_ENABLED',
|
||||
},
|
||||
config: {
|
||||
frontend: {
|
||||
doc: 'Diagnostics config for frontend.',
|
||||
format: String,
|
||||
default: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_FRONTEND',
|
||||
},
|
||||
backend: {
|
||||
doc: 'Diagnostics config for backend.',
|
||||
format: String,
|
||||
default: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io/v1/batch',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_BACKEND',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
defaultLocale: {
|
||||
doc: 'Default locale for the UI',
|
||||
format: String,
|
||||
default: 'en',
|
||||
env: 'N8N_DEFAULT_LOCALE',
|
||||
},
|
||||
});
|
||||
config.getEnv = config.get;
|
||||
|
||||
// Overwrite default configuration with settings which got defined in
|
||||
// optional configuration files
|
||||
|
|
883
packages/cli/config/schema.ts
Normal file
883
packages/cli/config/schema.ts
Normal file
|
@ -0,0 +1,883 @@
|
|||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
|
||||
import path from 'path';
|
||||
import * as core from 'n8n-core';
|
||||
|
||||
export const schema = {
|
||||
database: {
|
||||
type: {
|
||||
doc: 'Type of database to use',
|
||||
format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'] as const,
|
||||
default: 'sqlite',
|
||||
env: 'DB_TYPE',
|
||||
},
|
||||
tablePrefix: {
|
||||
doc: 'Prefix for table names',
|
||||
format: '*',
|
||||
default: '',
|
||||
env: 'DB_TABLE_PREFIX',
|
||||
},
|
||||
logging: {
|
||||
enabled: {
|
||||
doc: 'Typeorm logging enabled flag.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'DB_LOGGING_ENABLED',
|
||||
},
|
||||
options: {
|
||||
doc: 'Logging level options, default is "error". Possible values: query,error,schema,warn,info,log. To enable all logging, specify "all"',
|
||||
format: String,
|
||||
default: 'error',
|
||||
env: 'DB_LOGGING_OPTIONS',
|
||||
},
|
||||
maxQueryExecutionTime: {
|
||||
doc: 'Maximum number of milliseconds query should be executed before logger logs a warning. Set 0 to disable long running query warning',
|
||||
format: Number,
|
||||
default: 1000,
|
||||
env: 'DB_LOGGING_MAX_EXECUTION_TIME',
|
||||
},
|
||||
},
|
||||
postgresdb: {
|
||||
database: {
|
||||
doc: 'PostgresDB Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_POSTGRESDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'PostgresDB Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_POSTGRESDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'PostgresDB Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'PostgresDB Port',
|
||||
format: Number,
|
||||
default: 5432,
|
||||
env: 'DB_POSTGRESDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'PostgresDB User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_POSTGRESDB_USER',
|
||||
},
|
||||
schema: {
|
||||
doc: 'PostgresDB Schema',
|
||||
format: String,
|
||||
default: 'public',
|
||||
env: 'DB_POSTGRESDB_SCHEMA',
|
||||
},
|
||||
|
||||
ssl: {
|
||||
ca: {
|
||||
doc: 'SSL certificate authority',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CA',
|
||||
},
|
||||
cert: {
|
||||
doc: 'SSL certificate',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_CERT',
|
||||
},
|
||||
key: {
|
||||
doc: 'SSL key',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_POSTGRESDB_SSL_KEY',
|
||||
},
|
||||
rejectUnauthorized: {
|
||||
doc: 'If unauthorized SSL connections should be rejected',
|
||||
format: 'Boolean',
|
||||
default: true,
|
||||
env: 'DB_POSTGRESDB_SSL_REJECT_UNAUTHORIZED',
|
||||
},
|
||||
},
|
||||
},
|
||||
mysqldb: {
|
||||
database: {
|
||||
doc: 'MySQL Database',
|
||||
format: String,
|
||||
default: 'n8n',
|
||||
env: 'DB_MYSQLDB_DATABASE',
|
||||
},
|
||||
host: {
|
||||
doc: 'MySQL Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'DB_MYSQLDB_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'MySQL Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'DB_MYSQLDB_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'MySQL Port',
|
||||
format: Number,
|
||||
default: 3306,
|
||||
env: 'DB_MYSQLDB_PORT',
|
||||
},
|
||||
user: {
|
||||
doc: 'MySQL User',
|
||||
format: String,
|
||||
default: 'root',
|
||||
env: 'DB_MYSQLDB_USER',
|
||||
},
|
||||
},
|
||||
sqlite: {
|
||||
executeVacuumOnStartup: {
|
||||
doc: 'Runs VACUUM operation on startup to rebuild the database. Reduces filesize and optimizes indexes. WARNING: This is a long running blocking operation. Will increase start-up time.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'DB_SQLITE_VACUUM_ON_STARTUP',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
credentials: {
|
||||
overwrite: {
|
||||
data: {
|
||||
// Allows to set default values for credentials which
|
||||
// get automatically prefilled and the user does not get
|
||||
// displayed and can not change.
|
||||
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
|
||||
doc: 'Overwrites for credentials',
|
||||
format: '*',
|
||||
default: '{}',
|
||||
env: 'CREDENTIALS_OVERWRITE_DATA',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Fetch credentials from API',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
|
||||
},
|
||||
},
|
||||
defaultName: {
|
||||
doc: 'Default name for credentials',
|
||||
format: String,
|
||||
default: 'My credentials',
|
||||
env: 'CREDENTIALS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
workflows: {
|
||||
defaultName: {
|
||||
doc: 'Default name for workflow',
|
||||
format: String,
|
||||
default: 'My workflow',
|
||||
env: 'WORKFLOWS_DEFAULT_NAME',
|
||||
},
|
||||
},
|
||||
|
||||
executions: {
|
||||
// By default workflows get always executed in their own process.
|
||||
// If this option gets set to "main" it will run them in the
|
||||
// main-process instead.
|
||||
process: {
|
||||
doc: 'In what process workflows should be executed',
|
||||
format: ['main', 'own'] as const,
|
||||
default: 'own',
|
||||
env: 'EXECUTIONS_PROCESS',
|
||||
},
|
||||
|
||||
mode: {
|
||||
doc: 'If it should run executions directly or via queue',
|
||||
format: ['regular', 'queue'] as const,
|
||||
default: 'regular',
|
||||
env: 'EXECUTIONS_MODE',
|
||||
},
|
||||
|
||||
// A Workflow times out and gets canceled after this time (seconds).
|
||||
// If the workflow is executed in the main process a soft timeout
|
||||
// is executed (takes effect after the current node finishes).
|
||||
// If a workflow is running in its own process is a soft timeout
|
||||
// tried first, before killing the process after waiting for an
|
||||
// additional fifth of the given timeout duration.
|
||||
//
|
||||
// To deactivate timeout set it to -1
|
||||
//
|
||||
// Timeout is currently not activated by default which will change
|
||||
// in a future version.
|
||||
timeout: {
|
||||
doc: 'Max run time (seconds) before stopping the workflow execution',
|
||||
format: Number,
|
||||
default: -1,
|
||||
env: 'EXECUTIONS_TIMEOUT',
|
||||
},
|
||||
maxTimeout: {
|
||||
doc: 'Max execution time (seconds) that can be set for a workflow individually',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_TIMEOUT_MAX',
|
||||
},
|
||||
|
||||
// If a workflow executes all the data gets saved by default. This
|
||||
// could be a problem when a workflow gets executed a lot and processes
|
||||
// a lot of data. To not exceed the database's capacity it is possible to
|
||||
// prune the database regularly or to not save the execution at all.
|
||||
// Depending on if the execution did succeed or error a different
|
||||
// save behaviour can be set.
|
||||
saveDataOnError: {
|
||||
doc: 'What workflow execution data to save on error',
|
||||
format: ['all', 'none'] as const,
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_ERROR',
|
||||
},
|
||||
saveDataOnSuccess: {
|
||||
doc: 'What workflow execution data to save on success',
|
||||
format: ['all', 'none'] as const,
|
||||
default: 'all',
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_SUCCESS',
|
||||
},
|
||||
saveExecutionProgress: {
|
||||
doc: 'Wether or not to save progress for each node executed',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_ON_PROGRESS',
|
||||
},
|
||||
|
||||
// If the executions of workflows which got started via the editor
|
||||
// should be saved. By default they will not be saved as this runs
|
||||
// are normally only for testing and debugging. This setting can
|
||||
// also be overwritten on a per workflow basis in the workflow settings
|
||||
// in the editor.
|
||||
saveDataManualExecutions: {
|
||||
doc: 'Save data of executions when started manually via editor',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
|
||||
},
|
||||
|
||||
// To not exceed the database's capacity and keep its size moderate
|
||||
// the execution data gets pruned regularly (default: 1 hour interval).
|
||||
// All saved execution data older than the max age will be deleted.
|
||||
// Pruning is currently not activated by default, which will change in
|
||||
// a future version.
|
||||
pruneData: {
|
||||
doc: 'Delete data of past executions on a rolling basis',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'EXECUTIONS_DATA_PRUNE',
|
||||
},
|
||||
pruneDataMaxAge: {
|
||||
doc: 'How old (hours) the execution data has to be to get deleted',
|
||||
format: Number,
|
||||
default: 336,
|
||||
env: 'EXECUTIONS_DATA_MAX_AGE',
|
||||
},
|
||||
pruneDataTimeout: {
|
||||
doc: 'Timeout (seconds) after execution data has been pruned',
|
||||
format: Number,
|
||||
default: 3600,
|
||||
env: 'EXECUTIONS_DATA_PRUNE_TIMEOUT',
|
||||
},
|
||||
},
|
||||
|
||||
queue: {
|
||||
health: {
|
||||
active: {
|
||||
doc: 'If health checks should be enabled',
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'QUEUE_HEALTH_CHECK_ACTIVE',
|
||||
},
|
||||
port: {
|
||||
doc: 'Port to serve health check on if activated',
|
||||
format: Number,
|
||||
default: 5678,
|
||||
env: 'QUEUE_HEALTH_CHECK_PORT',
|
||||
},
|
||||
},
|
||||
bull: {
|
||||
prefix: {
|
||||
doc: 'Prefix for all queue keys',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_PREFIX',
|
||||
},
|
||||
redis: {
|
||||
db: {
|
||||
doc: 'Redis DB',
|
||||
format: Number,
|
||||
default: 0,
|
||||
env: 'QUEUE_BULL_REDIS_DB',
|
||||
},
|
||||
host: {
|
||||
doc: 'Redis Host',
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
env: 'QUEUE_BULL_REDIS_HOST',
|
||||
},
|
||||
password: {
|
||||
doc: 'Redis Password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'QUEUE_BULL_REDIS_PASSWORD',
|
||||
},
|
||||
port: {
|
||||
doc: 'Redis Port',
|
||||
format: Number,
|
||||
default: 6379,
|
||||
env: 'QUEUE_BULL_REDIS_PORT',
|
||||
},
|
||||
timeoutThreshold: {
|
||||
doc: 'Redis timeout threshold',
|
||||
format: Number,
|
||||
default: 10000,
|
||||
env: 'QUEUE_BULL_REDIS_TIMEOUT_THRESHOLD',
|
||||
},
|
||||
},
|
||||
queueRecoveryInterval: {
|
||||
doc: 'If > 0 enables an active polling to the queue that can recover for Redis crashes. Given in seconds; 0 is disabled. May increase Redis traffic significantly.',
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'QUEUE_RECOVERY_INTERVAL',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
generic: {
|
||||
// The timezone to use. Is important for nodes like "Cron" which start the
|
||||
// workflow automatically at a specified time. This setting can also be
|
||||
// overwritten on a per worfklow basis in the workflow settings in the
|
||||
// editor.
|
||||
timezone: {
|
||||
doc: 'The timezone to use',
|
||||
format: '*',
|
||||
default: 'America/New_York',
|
||||
env: 'GENERIC_TIMEZONE',
|
||||
},
|
||||
},
|
||||
|
||||
// How n8n can be reached (Editor & REST-API)
|
||||
path: {
|
||||
format: String,
|
||||
default: '/',
|
||||
arg: 'path',
|
||||
env: 'N8N_PATH',
|
||||
doc: 'Path n8n is deployed to',
|
||||
},
|
||||
host: {
|
||||
format: String,
|
||||
default: 'localhost',
|
||||
arg: 'host',
|
||||
env: 'N8N_HOST',
|
||||
doc: 'Host name n8n can be reached',
|
||||
},
|
||||
port: {
|
||||
format: Number,
|
||||
default: 5678,
|
||||
arg: 'port',
|
||||
env: 'N8N_PORT',
|
||||
doc: 'HTTP port n8n can be reached',
|
||||
},
|
||||
listen_address: {
|
||||
format: String,
|
||||
default: '0.0.0.0',
|
||||
env: 'N8N_LISTEN_ADDRESS',
|
||||
doc: 'IP address n8n should listen on',
|
||||
},
|
||||
protocol: {
|
||||
format: ['http', 'https'] as const,
|
||||
default: 'http',
|
||||
env: 'N8N_PROTOCOL',
|
||||
doc: 'HTTP Protocol via which n8n can be reached',
|
||||
},
|
||||
ssl_key: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_KEY',
|
||||
doc: 'SSL Key for HTTPS Protocol',
|
||||
},
|
||||
ssl_cert: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SSL_CERT',
|
||||
doc: 'SSL Cert for HTTPS Protocol',
|
||||
},
|
||||
editorBaseUrl: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_EDITOR_BASE_URL',
|
||||
doc: 'Public URL where the editor is accessible. Also used for emails sent from n8n.',
|
||||
},
|
||||
|
||||
security: {
|
||||
excludeEndpoints: {
|
||||
doc: 'Additional endpoints to exclude auth checks. Multiple endpoints can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_AUTH_EXCLUDE_ENDPOINTS',
|
||||
},
|
||||
basicAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_ACTIVE',
|
||||
doc: 'If basic auth should be activated for editor and REST-API',
|
||||
},
|
||||
user: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_USER',
|
||||
doc: 'The name of the basic auth user',
|
||||
},
|
||||
password: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_BASIC_AUTH_PASSWORD',
|
||||
doc: 'The password of the basic auth user',
|
||||
},
|
||||
hash: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_BASIC_AUTH_HASH',
|
||||
doc: 'If password for basic auth is hashed',
|
||||
},
|
||||
},
|
||||
jwtAuth: {
|
||||
active: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_JWT_AUTH_ACTIVE',
|
||||
doc: 'If JWT auth should be activated for editor and REST-API',
|
||||
},
|
||||
jwtHeader: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER',
|
||||
doc: 'The request header containing a signed JWT',
|
||||
},
|
||||
jwtHeaderValuePrefix: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_AUTH_HEADER_VALUE_PREFIX',
|
||||
doc: 'The request header value prefix to strip (optional)',
|
||||
},
|
||||
jwksUri: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWKS_URI',
|
||||
doc: 'The URI to fetch JWK Set for JWT authentication',
|
||||
},
|
||||
jwtIssuer: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ISSUER',
|
||||
doc: 'JWT issuer to expect (optional)',
|
||||
},
|
||||
jwtNamespace: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_NAMESPACE',
|
||||
doc: 'JWT namespace to expect (optional)',
|
||||
},
|
||||
jwtAllowedTenantKey: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT_KEY',
|
||||
doc: 'JWT tenant key name to inspect within JWT namespace (optional)',
|
||||
},
|
||||
jwtAllowedTenant: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_JWT_ALLOWED_TENANT',
|
||||
doc: 'JWT tenant to allow (optional)',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
endpoints: {
|
||||
payloadSizeMax: {
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_PAYLOAD_SIZE_MAX',
|
||||
doc: 'Maximum payload size in MB.',
|
||||
},
|
||||
metrics: {
|
||||
enable: {
|
||||
format: 'Boolean',
|
||||
default: false,
|
||||
env: 'N8N_METRICS',
|
||||
doc: 'Enable metrics endpoint',
|
||||
},
|
||||
prefix: {
|
||||
format: String,
|
||||
default: 'n8n_',
|
||||
env: 'N8N_METRICS_PREFIX',
|
||||
doc: 'An optional prefix for metric names. Default: n8n_',
|
||||
},
|
||||
},
|
||||
rest: {
|
||||
format: String,
|
||||
default: 'rest',
|
||||
env: 'N8N_ENDPOINT_REST',
|
||||
doc: 'Path for rest endpoint',
|
||||
},
|
||||
webhook: {
|
||||
format: String,
|
||||
default: 'webhook',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK',
|
||||
doc: 'Path for webhook endpoint',
|
||||
},
|
||||
webhookWaiting: {
|
||||
format: String,
|
||||
default: 'webhook-waiting',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_WAIT',
|
||||
doc: 'Path for waiting-webhook endpoint',
|
||||
},
|
||||
webhookTest: {
|
||||
format: String,
|
||||
default: 'webhook-test',
|
||||
env: 'N8N_ENDPOINT_WEBHOOK_TEST',
|
||||
doc: 'Path for test-webhook endpoint',
|
||||
},
|
||||
disableUi: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_UI',
|
||||
doc: 'Disable N8N UI (Frontend).',
|
||||
},
|
||||
disableProductionWebhooksOnMainProcess: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_DISABLE_PRODUCTION_MAIN_PROCESS',
|
||||
doc: 'Disable production webhooks from main process. This helps ensures no http traffic load to main process when using webhook-specific processes.',
|
||||
},
|
||||
skipWebhoooksDeregistrationOnShutdown: {
|
||||
/**
|
||||
* Longer explanation: n8n deregisters webhooks on shutdown / deactivation
|
||||
* and registers on startup / activation. If we skip
|
||||
* deactivation on shutdown, webhooks will remain active on 3rd party services.
|
||||
* We don't have to worry about startup as it always
|
||||
* checks if webhooks already exist.
|
||||
* If users want to upgrade n8n, it is possible to run
|
||||
* two instances simultaneously without downtime, similar
|
||||
* to blue/green deployment.
|
||||
* WARNING: Trigger nodes (like Cron) will cause duplication
|
||||
* of work, so be aware when using.
|
||||
*/
|
||||
doc: 'Deregister webhooks on external services only when workflows are deactivated.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_SKIP_WEBHOOK_DEREGISTRATION_SHUTDOWN',
|
||||
},
|
||||
},
|
||||
|
||||
workflowTagsDisabled: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_WORKFLOW_TAGS_DISABLED',
|
||||
doc: 'Disable worfklow tags.',
|
||||
},
|
||||
|
||||
userManagement: {
|
||||
disabled: {
|
||||
doc: 'Disable user management and hide it completely.',
|
||||
format: Boolean,
|
||||
default: false,
|
||||
env: 'N8N_USER_MANAGEMENT_DISABLED',
|
||||
},
|
||||
jwtSecret: {
|
||||
doc: 'Set a specific JWT secret (optional - n8n can generate one)', // Generated @ start.ts
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_USER_MANAGEMENT_JWT_SECRET',
|
||||
},
|
||||
emails: {
|
||||
mode: {
|
||||
doc: 'How to send emails',
|
||||
format: ['', 'smtp'] as const,
|
||||
default: 'smtp',
|
||||
env: 'N8N_EMAIL_MODE',
|
||||
},
|
||||
smtp: {
|
||||
host: {
|
||||
doc: 'SMTP server host',
|
||||
format: String, // e.g. 'smtp.gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_HOST',
|
||||
},
|
||||
port: {
|
||||
doc: 'SMTP server port',
|
||||
format: Number,
|
||||
default: 465,
|
||||
env: 'N8N_SMTP_PORT',
|
||||
},
|
||||
secure: {
|
||||
doc: 'Whether or not to use SSL for SMTP',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_SMTP_SSL',
|
||||
},
|
||||
auth: {
|
||||
user: {
|
||||
doc: 'SMTP login username',
|
||||
format: String, // e.g.'you@gmail.com'
|
||||
default: '',
|
||||
env: 'N8N_SMTP_USER',
|
||||
},
|
||||
pass: {
|
||||
doc: 'SMTP login password',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_PASS',
|
||||
},
|
||||
},
|
||||
sender: {
|
||||
doc: 'How to display sender name',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_SMTP_SENDER',
|
||||
},
|
||||
},
|
||||
templates: {
|
||||
invite: {
|
||||
doc: 'Overrides default HTML template for inviting new people (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_INVITE',
|
||||
},
|
||||
passwordReset: {
|
||||
doc: 'Overrides default HTML template for resetting password (use full path)',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_UM_EMAIL_TEMPLATES_PWRESET',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
externalHookFiles: {
|
||||
doc: 'Files containing external hooks. Multiple files can be separated by colon (":")',
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'EXTERNAL_HOOK_FILES',
|
||||
},
|
||||
|
||||
nodes: {
|
||||
include: {
|
||||
doc: 'Nodes to load',
|
||||
format: function check(rawValue: string): void {
|
||||
if (rawValue === '') {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to include is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: undefined,
|
||||
env: 'NODES_INCLUDE',
|
||||
},
|
||||
exclude: {
|
||||
doc: 'Nodes not to load',
|
||||
format: function check(rawValue: string): void {
|
||||
try {
|
||||
const values = JSON.parse(rawValue);
|
||||
if (!Array.isArray(values)) {
|
||||
throw new Error();
|
||||
}
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
throw new TypeError(`The Nodes to exclude is not a valid Array of strings.`);
|
||||
}
|
||||
},
|
||||
default: '[]',
|
||||
env: 'NODES_EXCLUDE',
|
||||
},
|
||||
errorTriggerType: {
|
||||
doc: 'Node Type to use as Error Trigger',
|
||||
format: String,
|
||||
default: 'n8n-nodes-base.errorTrigger',
|
||||
env: 'NODES_ERROR_TRIGGER_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
logs: {
|
||||
level: {
|
||||
doc: 'Log output level',
|
||||
format: ['error', 'warn', 'info', 'verbose', 'debug', 'silent'] as const,
|
||||
default: 'info',
|
||||
env: 'N8N_LOG_LEVEL',
|
||||
},
|
||||
output: {
|
||||
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
|
||||
format: String,
|
||||
default: 'console',
|
||||
env: 'N8N_LOG_OUTPUT',
|
||||
},
|
||||
file: {
|
||||
fileCountMax: {
|
||||
doc: 'Maximum number of files to keep.',
|
||||
format: Number,
|
||||
default: 100,
|
||||
env: 'N8N_LOG_FILE_COUNT_MAX',
|
||||
},
|
||||
fileSizeMax: {
|
||||
doc: 'Maximum size for each log file in MB.',
|
||||
format: Number,
|
||||
default: 16,
|
||||
env: 'N8N_LOG_FILE_SIZE_MAX',
|
||||
},
|
||||
location: {
|
||||
doc: 'Log file location; only used if log output is set to file.',
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'logs/n8n.log'),
|
||||
env: 'N8N_LOG_FILE_LOCATION',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
versionNotifications: {
|
||||
enabled: {
|
||||
doc: 'Whether feature is enabled to request notifications about new versions and security updates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENABLED',
|
||||
},
|
||||
endpoint: {
|
||||
doc: 'Endpoint to retrieve version information from.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/versions/',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_ENDPOINT',
|
||||
},
|
||||
infoUrl: {
|
||||
doc: `Url in New Versions Panel with more information on updating one's instance.`,
|
||||
format: String,
|
||||
default: 'https://docs.n8n.io/getting-started/installation/updating.html',
|
||||
env: 'N8N_VERSION_NOTIFICATIONS_INFO_URL',
|
||||
},
|
||||
},
|
||||
|
||||
templates: {
|
||||
enabled: {
|
||||
doc: 'Whether templates feature is enabled to load workflow templates.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_TEMPLATES_ENABLED',
|
||||
},
|
||||
host: {
|
||||
doc: 'Endpoint host to retrieve workflow templates from endpoints.',
|
||||
format: String,
|
||||
default: 'https://api.n8n.io/',
|
||||
env: 'N8N_TEMPLATES_HOST',
|
||||
},
|
||||
},
|
||||
|
||||
binaryDataManager: {
|
||||
availableModes: {
|
||||
format: String,
|
||||
default: 'filesystem',
|
||||
env: 'N8N_AVAILABLE_BINARY_DATA_MODES',
|
||||
doc: 'Available modes of binary data storage, as comma separated strings',
|
||||
},
|
||||
mode: {
|
||||
format: ['default', 'filesystem'] as const,
|
||||
default: 'default',
|
||||
env: 'N8N_DEFAULT_BINARY_DATA_MODE',
|
||||
doc: 'Storage mode for binary data',
|
||||
},
|
||||
localStoragePath: {
|
||||
format: String,
|
||||
default: path.join(core.UserSettings.getUserN8nFolderPath(), 'binaryData'),
|
||||
env: 'N8N_BINARY_DATA_STORAGE_PATH',
|
||||
doc: 'Path for binary data storage in "filesystem" mode',
|
||||
},
|
||||
binaryDataTTL: {
|
||||
format: Number,
|
||||
default: 60,
|
||||
env: 'N8N_BINARY_DATA_TTL',
|
||||
doc: 'TTL for binary data of unsaved executions in minutes',
|
||||
},
|
||||
persistedBinaryDataTTL: {
|
||||
format: Number,
|
||||
default: 1440,
|
||||
env: 'N8N_PERSISTED_BINARY_DATA_TTL',
|
||||
doc: 'TTL for persisted binary data in minutes (binary data gets deleted if not persisted before TTL expires)',
|
||||
},
|
||||
},
|
||||
|
||||
deployment: {
|
||||
type: {
|
||||
format: String,
|
||||
default: 'default',
|
||||
env: 'N8N_DEPLOYMENT_TYPE',
|
||||
},
|
||||
},
|
||||
|
||||
hiringBanner: {
|
||||
enabled: {
|
||||
doc: 'Whether hiring banner in browser console is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_HIRING_BANNER_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
personalization: {
|
||||
enabled: {
|
||||
doc: 'Whether personalization is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_PERSONALIZATION_ENABLED',
|
||||
},
|
||||
},
|
||||
|
||||
diagnostics: {
|
||||
enabled: {
|
||||
doc: 'Whether diagnostic mode is enabled.',
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_DIAGNOSTICS_ENABLED',
|
||||
},
|
||||
config: {
|
||||
frontend: {
|
||||
doc: 'Diagnostics config for frontend.',
|
||||
format: String,
|
||||
default: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_FRONTEND',
|
||||
},
|
||||
backend: {
|
||||
doc: 'Diagnostics config for backend.',
|
||||
format: String,
|
||||
default: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io/v1/batch',
|
||||
env: 'N8N_DIAGNOSTICS_CONFIG_BACKEND',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
defaultLocale: {
|
||||
doc: 'Default locale for the UI',
|
||||
format: String,
|
||||
default: 'en',
|
||||
env: 'N8N_DEFAULT_LOCALE',
|
||||
},
|
||||
};
|
132
packages/cli/config/types.d.ts
vendored
Normal file
132
packages/cli/config/types.d.ts
vendored
Normal file
|
@ -0,0 +1,132 @@
|
|||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
|
||||
import { IBinaryDataConfig } from '../../core/dist/src';
|
||||
import { schema } from './schema';
|
||||
|
||||
// -----------------------------------
|
||||
// transformers
|
||||
// -----------------------------------
|
||||
|
||||
/**
|
||||
* Transform an object (convict schema) into a union of string arrays (path segments),
|
||||
* one for every valid path in the schema object, filtered by type.
|
||||
*
|
||||
* ```ts
|
||||
* ["port", "default"] | ["queue", "bull", "redis", "port", "default"] | ["queue", "bull", "redis", "db", "default"] | ["queue", "bull", "redis", "timeoutThreshold", "default"] | etc
|
||||
* ```
|
||||
*/
|
||||
type GetPathSegments<Traversable, Filter> = Traversable extends Filter
|
||||
? []
|
||||
: {
|
||||
[K in ValidKeys<Traversable>]: [K, ...GetPathSegments<Traversable[K], Filter>];
|
||||
}[ValidKeys<Traversable>];
|
||||
|
||||
/**
|
||||
* Transform a union of string arrays (path segments) into a union of strings (dotted paths).
|
||||
*
|
||||
* ```ts
|
||||
* "port" | "queue.bull.redis.port" | "queue.bull.redis.db" | "queue.bull.redis.timeoutThreshold" | etc
|
||||
* ```
|
||||
*/
|
||||
type JoinByDotting<T extends string[]> = T extends [infer F]
|
||||
? F
|
||||
: T extends [infer F, ...infer R]
|
||||
? F extends string
|
||||
? R extends string[]
|
||||
? `${F}.${JoinByDotting<R>}`
|
||||
: never
|
||||
: never
|
||||
: string;
|
||||
|
||||
type ToDottedPath<T> = JoinByDotting<RemoveExcess<T>>;
|
||||
|
||||
type CollectPathsByType<T> = ToDottedPath<GetPathSegments<typeof schema, T>>;
|
||||
|
||||
// -----------------------------------
|
||||
// path-to-return-type mapper
|
||||
// -----------------------------------
|
||||
|
||||
type NumericPath = CollectPathsByType<number>;
|
||||
|
||||
type BooleanPath = CollectPathsByType<boolean>;
|
||||
|
||||
type StringLiteralArrayPath = CollectPathsByType<Readonly<string[]>>;
|
||||
|
||||
type StringPath = CollectPathsByType<string>;
|
||||
|
||||
type ConfigOptionPath =
|
||||
| NumericPath
|
||||
| BooleanPath
|
||||
| StringPath
|
||||
| StringLiteralArrayPath
|
||||
| keyof ExceptionPaths;
|
||||
|
||||
type ToReturnType<T extends ConfigOptionPath> = T extends NumericPath
|
||||
? number
|
||||
: T extends BooleanPath
|
||||
? boolean
|
||||
: T extends StringLiteralArrayPath
|
||||
? StringLiteralMap[T]
|
||||
: T extends keyof ExceptionPaths
|
||||
? ExceptionPaths[T]
|
||||
: T extends StringPath
|
||||
? string
|
||||
: unknown;
|
||||
|
||||
type ExceptionPaths = {
|
||||
'queue.bull.redis': object;
|
||||
binaryDataManager: IBinaryDataConfig;
|
||||
'nodes.include': undefined;
|
||||
'userManagement.isInstanceOwnerSetUp': boolean;
|
||||
'userManagement.skipInstanceOwnerSetup': boolean;
|
||||
};
|
||||
|
||||
// -----------------------------------
|
||||
// string literals map
|
||||
// -----------------------------------
|
||||
|
||||
type GetPathSegmentsWithUnions<T> = T extends ReadonlyArray<infer C>
|
||||
? [C]
|
||||
: {
|
||||
[K in ValidKeys<T>]: [K, ...GetPathSegmentsWithUnions<T[K]>];
|
||||
}[ValidKeys<T>];
|
||||
|
||||
type ToPathUnionPair<T extends string[]> = T extends [...infer Path, infer Union]
|
||||
? Path extends string[]
|
||||
? { path: ToDottedPath<Path>; union: Union }
|
||||
: never
|
||||
: never;
|
||||
|
||||
type ToStringLiteralMap<T extends { path: string; union: string }> = {
|
||||
[Path in T['path']]: Extract<T, { path: Path }>['union'];
|
||||
};
|
||||
|
||||
type StringLiteralMap = ToStringLiteralMap<
|
||||
ToPathUnionPair<GetPathSegmentsWithUnions<typeof schema>>
|
||||
>;
|
||||
|
||||
// -----------------------------------
|
||||
// utils
|
||||
// -----------------------------------
|
||||
|
||||
type ValidKeys<T> = keyof T extends string
|
||||
? keyof T extends keyof NumberConstructor
|
||||
? never
|
||||
: keyof T
|
||||
: never;
|
||||
|
||||
type RemoveExcess<T> = T extends [...infer Path, 'format' | 'default']
|
||||
? Path extends string[]
|
||||
? Path
|
||||
: never
|
||||
: never;
|
||||
|
||||
// -----------------------------------
|
||||
// module augmentation
|
||||
// -----------------------------------
|
||||
|
||||
declare module 'convict' {
|
||||
interface Config<T> {
|
||||
getEnv<Path extends ConfigOptionPath>(path: Path): ToReturnType<Path>;
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
import * as path from 'path';
|
||||
import path from 'path';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import { entities } from '../src/databases/entities';
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"version": "0.171.0",
|
||||
"version": "0.171.1",
|
||||
"description": "n8n Workflow Automation Tool",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -31,8 +31,8 @@
|
|||
"start:windows": "cd bin && n8n",
|
||||
"test": "npm run test:sqlite",
|
||||
"test:sqlite": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=sqlite; jest",
|
||||
"test:postgres": "export DB_TYPE=postgresdb && jest",
|
||||
"test:mysql": "export DB_TYPE=mysqldb && jest",
|
||||
"test:postgres": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=postgresdb && jest",
|
||||
"test:mysql": "export N8N_LOG_LEVEL='silent'; export DB_TYPE=mysqldb && jest",
|
||||
"watch": "tsc --watch",
|
||||
"typeorm": "ts-node ../../node_modules/typeorm/cli.js"
|
||||
},
|
||||
|
@ -89,7 +89,7 @@
|
|||
"ts-jest": "^27.1.3",
|
||||
"ts-node": "^8.9.1",
|
||||
"tslint": "^6.1.2",
|
||||
"typescript": "~4.3.5"
|
||||
"typescript": "~4.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@oclif/command": "^1.5.18",
|
||||
|
@ -127,7 +127,7 @@
|
|||
"mysql2": "~2.3.0",
|
||||
"n8n-core": "~0.112.0",
|
||||
"n8n-editor-ui": "~0.138.0",
|
||||
"n8n-nodes-base": "~0.169.0",
|
||||
"n8n-nodes-base": "~0.169.1",
|
||||
"n8n-workflow": "~0.94.0",
|
||||
"nodemailer": "^6.7.1",
|
||||
"oauth-1.0a": "^2.2.6",
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
import { ChildProcess } from 'child_process';
|
||||
import { stringify } from 'flatted';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import PCancelable from 'p-cancelable';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
Db,
|
||||
|
|
|
@ -30,7 +30,7 @@ import {
|
|||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
|
@ -48,7 +48,7 @@ import {
|
|||
WorkflowRunner,
|
||||
ExternalHooks,
|
||||
} from '.';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
import { User } from './databases/entities/User';
|
||||
import { whereClause } from './WorkflowHelpers';
|
||||
import { WorkflowEntity } from './databases/entities/WorkflowEntity';
|
||||
|
@ -74,7 +74,7 @@ export class ActiveWorkflowRunner {
|
|||
relations: ['shared', 'shared.user', 'shared.user.globalRole'],
|
||||
})) as IWorkflowDb[];
|
||||
|
||||
if (!config.get('endpoints.skipWebhoooksDeregistrationOnShutdown')) {
|
||||
if (!config.getEnv('endpoints.skipWebhoooksDeregistrationOnShutdown')) {
|
||||
// Do not clean up database when skip registration is done.
|
||||
// This flag is set when n8n is running in scaled mode.
|
||||
// Impact is minimal, but for a short while, n8n will stop accepting requests.
|
||||
|
@ -466,7 +466,7 @@ export class ActiveWorkflowRunner {
|
|||
} catch (error) {
|
||||
if (
|
||||
activation === 'init' &&
|
||||
config.get('endpoints.skipWebhoooksDeregistrationOnShutdown') &&
|
||||
config.getEnv('endpoints.skipWebhoooksDeregistrationOnShutdown') &&
|
||||
error.name === 'QueryFailedError'
|
||||
) {
|
||||
// When skipWebhoooksDeregistrationOnShutdown is enabled,
|
||||
|
|
|
@ -79,6 +79,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
incomingRequestOptions: IHttpRequestOptions | IRequestOptionsSimplified,
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
defaultTimezone: string,
|
||||
): Promise<IHttpRequestOptions> {
|
||||
const requestOptions = incomingRequestOptions;
|
||||
const credentialType = this.credentialTypes.getByName(typeName);
|
||||
|
@ -127,6 +128,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
|
||||
const value = this.resolveValue(
|
||||
|
@ -135,6 +137,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
requestOptions.headers[key] = value;
|
||||
} else if (authenticate.type === 'queryAuth') {
|
||||
|
@ -144,6 +147,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
|
||||
const value = this.resolveValue(
|
||||
|
@ -152,6 +156,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
{ $credentials: credentials },
|
||||
workflow,
|
||||
node,
|
||||
defaultTimezone,
|
||||
);
|
||||
if (!requestOptions.qs) {
|
||||
requestOptions.qs = {};
|
||||
|
@ -172,6 +177,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
additionalKeys: IWorkflowDataProxyAdditionalKeys,
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
defaultTimezone: string,
|
||||
): string {
|
||||
if (parameterValue.charAt(0) !== '=') {
|
||||
return parameterValue;
|
||||
|
@ -181,6 +187,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
node,
|
||||
parameterValue,
|
||||
'internal',
|
||||
defaultTimezone,
|
||||
additionalKeys,
|
||||
'',
|
||||
);
|
||||
|
@ -293,6 +300,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
nodeCredentials: INodeCredentialsDetails,
|
||||
type: string,
|
||||
mode: WorkflowExecuteMode,
|
||||
defaultTimezone: string,
|
||||
raw?: boolean,
|
||||
expressionResolveValues?: ICredentialsExpressionResolveValues,
|
||||
): Promise<ICredentialDataDecryptedObject> {
|
||||
|
@ -307,6 +315,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
decryptedDataOriginal,
|
||||
type,
|
||||
mode,
|
||||
defaultTimezone,
|
||||
expressionResolveValues,
|
||||
);
|
||||
}
|
||||
|
@ -323,6 +332,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
decryptedDataOriginal: ICredentialDataDecryptedObject,
|
||||
type: string,
|
||||
mode: WorkflowExecuteMode,
|
||||
defaultTimezone: string,
|
||||
expressionResolveValues?: ICredentialsExpressionResolveValues,
|
||||
): ICredentialDataDecryptedObject {
|
||||
const credentialsProperties = this.getCredentialsProperties(type);
|
||||
|
@ -342,14 +352,11 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
}
|
||||
|
||||
if (expressionResolveValues) {
|
||||
const timezone =
|
||||
(expressionResolveValues.workflow.settings.timezone as string) || defaultTimezone;
|
||||
|
||||
try {
|
||||
const workflow = new Workflow({
|
||||
nodes: Object.values(expressionResolveValues.workflow.nodes),
|
||||
connections: expressionResolveValues.workflow.connectionsBySourceNode,
|
||||
active: false,
|
||||
nodeTypes: expressionResolveValues.workflow.nodeTypes,
|
||||
});
|
||||
decryptedData = workflow.expression.getParameterValue(
|
||||
decryptedData = expressionResolveValues.workflow.expression.getParameterValue(
|
||||
decryptedData as INodeParameters,
|
||||
expressionResolveValues.runExecutionData,
|
||||
expressionResolveValues.runIndex,
|
||||
|
@ -357,6 +364,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
expressionResolveValues.node.name,
|
||||
expressionResolveValues.connectionInputData,
|
||||
mode,
|
||||
timezone,
|
||||
{},
|
||||
false,
|
||||
decryptedData,
|
||||
|
@ -387,6 +395,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
node,
|
||||
decryptedData as INodeParameters,
|
||||
mode,
|
||||
defaultTimezone,
|
||||
{},
|
||||
undefined,
|
||||
decryptedData,
|
||||
|
|
|
@ -15,11 +15,11 @@ import {
|
|||
Repository,
|
||||
} from 'typeorm';
|
||||
import { TlsOptions } from 'tls';
|
||||
import * as path from 'path';
|
||||
import path from 'path';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { DatabaseType, GenericHelpers, IDatabaseCollections } from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { entities } from './databases/entities';
|
||||
|
@ -59,7 +59,7 @@ export async function init(
|
|||
|
||||
let connectionOptions: ConnectionOptions;
|
||||
|
||||
const entityPrefix = config.get('database.tablePrefix');
|
||||
const entityPrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
if (testConnectionOptions) {
|
||||
connectionOptions = testConnectionOptions;
|
||||
|
@ -95,7 +95,7 @@ export async function init(
|
|||
password: (await GenericHelpers.getConfigValue('database.postgresdb.password')) as string,
|
||||
port: (await GenericHelpers.getConfigValue('database.postgresdb.port')) as number,
|
||||
username: (await GenericHelpers.getConfigValue('database.postgresdb.user')) as string,
|
||||
schema: config.get('database.postgresdb.schema'),
|
||||
schema: config.getEnv('database.postgresdb.schema'),
|
||||
migrations: postgresMigrations,
|
||||
migrationsRun: true,
|
||||
migrationsTableName: `${entityPrefix}migrations`,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
// eslint-disable-next-line import/no-cycle
|
||||
import { Db, IExternalHooksClass, IExternalHooksFileData, IExternalHooksFunctions } from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
class ExternalHooksClass implements IExternalHooksClass {
|
||||
externalHooks: {
|
||||
|
@ -34,7 +34,7 @@ class ExternalHooksClass implements IExternalHooksClass {
|
|||
}
|
||||
|
||||
async loadHooksFiles(reload = false) {
|
||||
const externalHookFiles = config.get('externalHookFiles').split(':');
|
||||
const externalHookFiles = config.getEnv('externalHookFiles').split(':');
|
||||
|
||||
// Load all the provided hook-files
|
||||
for (let hookFilePath of externalHookFiles) {
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { readFile as fsReadFile } from 'fs/promises';
|
||||
import { IDataObject } from 'n8n-workflow';
|
||||
import { validate } from 'class-validator';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { Db, ICredentialsDb, IPackageVersions, ResponseHelper } from '.';
|
||||
|
@ -31,10 +31,10 @@ let versionCache: IPackageVersions | undefined;
|
|||
* @returns {string}
|
||||
*/
|
||||
export function getBaseUrl(): string {
|
||||
const protocol = config.get('protocol');
|
||||
const host = config.get('host');
|
||||
const port = config.get('port');
|
||||
const path = config.get('path');
|
||||
const protocol = config.getEnv('protocol');
|
||||
const host = config.getEnv('host');
|
||||
const port = config.getEnv('port');
|
||||
const path = config.getEnv('path');
|
||||
|
||||
if ((protocol === 'http' && port === 80) || (protocol === 'https' && port === 443)) {
|
||||
return `${protocol}://${host}${path}`;
|
||||
|
@ -117,14 +117,16 @@ export async function getConfigValue(
|
|||
// Check if environment variable is defined for config key
|
||||
if (currentSchema.env === undefined) {
|
||||
// No environment variable defined, so return value from config
|
||||
return config.get(configKey);
|
||||
// @ts-ignore
|
||||
return config.getEnv(configKey);
|
||||
}
|
||||
|
||||
// Check if special file enviroment variable exists
|
||||
const fileEnvironmentVariable = process.env[`${currentSchema.env}_FILE`];
|
||||
if (fileEnvironmentVariable === undefined) {
|
||||
// Does not exist, so return value from config
|
||||
return config.get(configKey);
|
||||
// @ts-ignore
|
||||
return config.getEnv(configKey);
|
||||
}
|
||||
|
||||
let data;
|
||||
|
|
|
@ -21,7 +21,7 @@ import {
|
|||
import { WorkflowExecute } from 'n8n-core';
|
||||
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import PCancelable from 'p-cancelable';
|
||||
import { Repository } from 'typeorm';
|
||||
|
||||
import { ChildProcess } from 'child_process';
|
||||
|
@ -329,7 +329,7 @@ export interface IDiagnosticInfo {
|
|||
};
|
||||
};
|
||||
executionVariables: {
|
||||
[key: string]: string | number | undefined;
|
||||
[key: string]: string | number | boolean | undefined;
|
||||
};
|
||||
deploymentType: string;
|
||||
binaryDataMode: string;
|
||||
|
@ -458,7 +458,7 @@ export interface IN8nUISettings {
|
|||
defaultLocale: string;
|
||||
userManagement: IUserManagementSettings;
|
||||
workflowTagsDisabled: boolean;
|
||||
logLevel: 'info' | 'debug' | 'warn' | 'error' | 'verbose';
|
||||
logLevel: 'info' | 'debug' | 'warn' | 'error' | 'verbose' | 'silent';
|
||||
hiringBannerEnabled: boolean;
|
||||
templates: {
|
||||
enabled: boolean;
|
||||
|
|
|
@ -26,10 +26,10 @@ import {
|
|||
readFile as fsReadFile,
|
||||
stat as fsStat,
|
||||
} from 'fs/promises';
|
||||
import * as glob from 'fast-glob';
|
||||
import * as path from 'path';
|
||||
import glob from 'fast-glob';
|
||||
import path from 'path';
|
||||
import { getLogger } from './Logger';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
const CUSTOM_NODES_CATEGORY = 'Custom Nodes';
|
||||
|
||||
|
@ -38,9 +38,9 @@ class LoadNodesAndCredentialsClass {
|
|||
|
||||
credentialTypes: ICredentialTypeData = {};
|
||||
|
||||
excludeNodes: string[] | undefined = undefined;
|
||||
excludeNodes: string | undefined = undefined;
|
||||
|
||||
includeNodes: string[] | undefined = undefined;
|
||||
includeNodes: string | undefined = undefined;
|
||||
|
||||
nodeModulesPath = '';
|
||||
|
||||
|
@ -76,8 +76,8 @@ class LoadNodesAndCredentialsClass {
|
|||
throw new Error('Could not find "node_modules" folder!');
|
||||
}
|
||||
|
||||
this.excludeNodes = config.get('nodes.exclude');
|
||||
this.includeNodes = config.get('nodes.include');
|
||||
this.excludeNodes = config.getEnv('nodes.exclude');
|
||||
this.includeNodes = config.getEnv('nodes.include');
|
||||
|
||||
// Get all the installed packages which contain n8n nodes
|
||||
const packages = await this.getN8nNodePackages();
|
||||
|
|
|
@ -1,20 +1,23 @@
|
|||
/* eslint-disable @typescript-eslint/no-shadow */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import * as winston from 'winston';
|
||||
import winston from 'winston';
|
||||
|
||||
import { IDataObject, ILogger, LogTypes } from 'n8n-workflow';
|
||||
|
||||
import * as callsites from 'callsites';
|
||||
import callsites from 'callsites';
|
||||
import { basename } from 'path';
|
||||
import config = require('../config');
|
||||
import config from '../config';
|
||||
|
||||
class Logger implements ILogger {
|
||||
private logger: winston.Logger;
|
||||
|
||||
constructor() {
|
||||
const level = config.get('logs.level') as string;
|
||||
const level = config.getEnv('logs.level');
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
const output = (config.get('logs.output') as string).split(',').map((output) => output.trim());
|
||||
const output = config
|
||||
.getEnv('logs.output')
|
||||
.split(',')
|
||||
.map((output) => output.trim());
|
||||
|
||||
this.logger = winston.createLogger({
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
|
@ -56,10 +59,10 @@ class Logger implements ILogger {
|
|||
);
|
||||
this.logger.add(
|
||||
new winston.transports.File({
|
||||
filename: config.get('logs.file.location'),
|
||||
filename: config.getEnv('logs.file.location'),
|
||||
format: fileLogFormat,
|
||||
maxsize: (config.get('logs.file.fileSizeMax') as number) * 1048576, // config * 1mb
|
||||
maxFiles: config.get('logs.file.fileCountMax'),
|
||||
maxsize: config.getEnv('logs.file.fileSizeMax') * 1048576, // config * 1mb
|
||||
maxFiles: config.getEnv('logs.file.fileCountMax'),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
// @ts-ignore
|
||||
import * as sseChannel from 'sse-channel';
|
||||
import * as express from 'express';
|
||||
import sseChannel from 'sse-channel';
|
||||
import express from 'express';
|
||||
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as Bull from 'bull';
|
||||
import * as config from '../config';
|
||||
import Bull from 'bull';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { IBullJobData, IBullWebhookResponse } from './Interfaces';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
|
@ -16,8 +16,8 @@ export class Queue {
|
|||
constructor() {
|
||||
this.activeExecutions = ActiveExecutions.getInstance();
|
||||
|
||||
const prefix = config.get('queue.bull.prefix') as string;
|
||||
const redisOptions = config.get('queue.bull.redis') as object;
|
||||
const prefix = config.getEnv('queue.bull.prefix');
|
||||
const redisOptions = config.getEnv('queue.bull.redis');
|
||||
// Disabling ready check is necessary as it allows worker to
|
||||
// quickly reconnect to Redis if Redis crashes or is unreachable
|
||||
// for some time. With it enabled, worker might take minutes to realize
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */
|
||||
/* eslint-disable @typescript-eslint/no-unnecessary-type-assertion */
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
/* eslint-disable @typescript-eslint/await-thenable */
|
||||
|
@ -28,10 +29,10 @@
|
|||
/* eslint-disable import/no-dynamic-require */
|
||||
/* eslint-disable no-await-in-loop */
|
||||
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import { readFileSync } from 'fs';
|
||||
import { readFile } from 'fs/promises';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import _, { cloneDeep } from 'lodash';
|
||||
import { dirname as pathDirname, join as pathJoin, resolve as pathResolve } from 'path';
|
||||
import {
|
||||
FindConditions,
|
||||
|
@ -46,22 +47,19 @@ import {
|
|||
Not,
|
||||
Raw,
|
||||
} from 'typeorm';
|
||||
import * as bodyParser from 'body-parser';
|
||||
import * as cookieParser from 'cookie-parser';
|
||||
import * as history from 'connect-history-api-fallback';
|
||||
import * as os from 'os';
|
||||
import bodyParser from 'body-parser';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import history from 'connect-history-api-fallback';
|
||||
import os from 'os';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as _ from 'lodash';
|
||||
import * as clientOAuth2 from 'client-oauth2';
|
||||
import * as clientOAuth1 from 'oauth-1.0a';
|
||||
import { RequestOptions } from 'oauth-1.0a';
|
||||
import * as csrf from 'csrf';
|
||||
import * as requestPromise from 'request-promise-native';
|
||||
import clientOAuth2 from 'client-oauth2';
|
||||
import clientOAuth1, { RequestOptions } from 'oauth-1.0a';
|
||||
import csrf from 'csrf';
|
||||
import requestPromise, { OptionsWithUrl } from 'request-promise-native';
|
||||
import { createHmac } from 'crypto';
|
||||
// IMPORTANT! Do not switch to anther bcrypt library unless really necessary and
|
||||
// tested with all possible systems like Windows, Alpine on ARM, FreeBSD, ...
|
||||
import { compare } from 'bcryptjs';
|
||||
import * as promClient from 'prom-client';
|
||||
|
||||
import {
|
||||
BinaryDataManager,
|
||||
|
@ -90,16 +88,15 @@ import {
|
|||
WorkflowExecuteMode,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as basicAuth from 'basic-auth';
|
||||
import * as compression from 'compression';
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import * as jwks from 'jwks-rsa';
|
||||
import basicAuth from 'basic-auth';
|
||||
import compression from 'compression';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import jwks from 'jwks-rsa';
|
||||
// @ts-ignore
|
||||
import * as timezones from 'google-timezones-json';
|
||||
import * as parseUrl from 'parseurl';
|
||||
import * as querystring from 'querystring';
|
||||
import { OptionsWithUrl } from 'request-promise-native';
|
||||
import { Registry } from 'prom-client';
|
||||
import timezones from 'google-timezones-json';
|
||||
import parseUrl from 'parseurl';
|
||||
import querystring from 'querystring';
|
||||
import promClient, { Registry } from 'prom-client';
|
||||
import * as Queue from './Queue';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
|
@ -142,7 +139,7 @@ import {
|
|||
getCredentialForUser,
|
||||
} from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
|
||||
import * as TagHelpers from './TagHelpers';
|
||||
|
||||
|
@ -200,9 +197,9 @@ class App {
|
|||
|
||||
defaultCredentialsName: string;
|
||||
|
||||
saveDataErrorExecution: string;
|
||||
saveDataErrorExecution: 'all' | 'none';
|
||||
|
||||
saveDataSuccessExecution: string;
|
||||
saveDataSuccessExecution: 'all' | 'none';
|
||||
|
||||
saveManualExecutions: boolean;
|
||||
|
||||
|
@ -237,21 +234,21 @@ class App {
|
|||
constructor() {
|
||||
this.app = express();
|
||||
|
||||
this.endpointWebhook = config.get('endpoints.webhook') as string;
|
||||
this.endpointWebhookWaiting = config.get('endpoints.webhookWaiting') as string;
|
||||
this.endpointWebhookTest = config.get('endpoints.webhookTest') as string;
|
||||
this.endpointWebhook = config.getEnv('endpoints.webhook');
|
||||
this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
|
||||
this.endpointWebhookTest = config.getEnv('endpoints.webhookTest');
|
||||
|
||||
this.defaultWorkflowName = config.get('workflows.defaultName') as string;
|
||||
this.defaultCredentialsName = config.get('credentials.defaultName') as string;
|
||||
this.defaultWorkflowName = config.getEnv('workflows.defaultName');
|
||||
this.defaultCredentialsName = config.getEnv('credentials.defaultName');
|
||||
|
||||
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||
this.executionTimeout = config.get('executions.timeout') as number;
|
||||
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number;
|
||||
this.payloadSizeMax = config.get('endpoints.payloadSizeMax') as number;
|
||||
this.timezone = config.get('generic.timezone') as string;
|
||||
this.restEndpoint = config.get('endpoints.rest') as string;
|
||||
this.saveDataErrorExecution = config.getEnv('executions.saveDataOnError');
|
||||
this.saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess');
|
||||
this.saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
|
||||
this.executionTimeout = config.getEnv('executions.timeout');
|
||||
this.maxExecutionTimeout = config.getEnv('executions.maxTimeout');
|
||||
this.payloadSizeMax = config.getEnv('endpoints.payloadSizeMax');
|
||||
this.timezone = config.getEnv('generic.timezone');
|
||||
this.restEndpoint = config.getEnv('endpoints.rest');
|
||||
|
||||
this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
|
||||
this.testWebhooks = TestWebhooks.getInstance();
|
||||
|
@ -260,22 +257,22 @@ class App {
|
|||
this.activeExecutionsInstance = ActiveExecutions.getInstance();
|
||||
this.waitTracker = WaitTracker();
|
||||
|
||||
this.protocol = config.get('protocol');
|
||||
this.sslKey = config.get('ssl_key');
|
||||
this.sslCert = config.get('ssl_cert');
|
||||
this.protocol = config.getEnv('protocol');
|
||||
this.sslKey = config.getEnv('ssl_key');
|
||||
this.sslCert = config.getEnv('ssl_cert');
|
||||
|
||||
this.externalHooks = externalHooks;
|
||||
|
||||
this.presetCredentialsLoaded = false;
|
||||
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
|
||||
this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
|
||||
|
||||
const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl();
|
||||
const telemetrySettings: ITelemetrySettings = {
|
||||
enabled: config.get('diagnostics.enabled') as boolean,
|
||||
enabled: config.getEnv('diagnostics.enabled'),
|
||||
};
|
||||
|
||||
if (telemetrySettings.enabled) {
|
||||
const conf = config.get('diagnostics.config.frontend') as string;
|
||||
const conf = config.getEnv('diagnostics.config.frontend');
|
||||
const [key, url] = conf.split(';');
|
||||
|
||||
if (!key || !url) {
|
||||
|
@ -303,31 +300,31 @@ class App {
|
|||
oauth2: `${urlBaseWebhook}${this.restEndpoint}/oauth2-credential/callback`,
|
||||
},
|
||||
versionNotifications: {
|
||||
enabled: config.get('versionNotifications.enabled'),
|
||||
endpoint: config.get('versionNotifications.endpoint'),
|
||||
infoUrl: config.get('versionNotifications.infoUrl'),
|
||||
enabled: config.getEnv('versionNotifications.enabled'),
|
||||
endpoint: config.getEnv('versionNotifications.endpoint'),
|
||||
infoUrl: config.getEnv('versionNotifications.infoUrl'),
|
||||
},
|
||||
instanceId: '',
|
||||
telemetry: telemetrySettings,
|
||||
personalizationSurveyEnabled:
|
||||
config.get('personalization.enabled') && config.get('diagnostics.enabled'),
|
||||
defaultLocale: config.get('defaultLocale'),
|
||||
config.getEnv('personalization.enabled') && config.getEnv('diagnostics.enabled'),
|
||||
defaultLocale: config.getEnv('defaultLocale'),
|
||||
userManagement: {
|
||||
enabled:
|
||||
config.get('userManagement.disabled') === false ||
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true,
|
||||
config.getEnv('userManagement.disabled') === false ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
|
||||
showSetupOnFirstLoad:
|
||||
config.get('userManagement.disabled') === false &&
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.get('userManagement.skipInstanceOwnerSetup') === false,
|
||||
config.getEnv('userManagement.disabled') === false &&
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.getEnv('userManagement.skipInstanceOwnerSetup') === false,
|
||||
smtpSetup: isEmailSetUp(),
|
||||
},
|
||||
workflowTagsDisabled: config.get('workflowTagsDisabled'),
|
||||
logLevel: config.get('logs.level'),
|
||||
hiringBannerEnabled: config.get('hiringBanner.enabled'),
|
||||
workflowTagsDisabled: config.getEnv('workflowTagsDisabled'),
|
||||
logLevel: config.getEnv('logs.level'),
|
||||
hiringBannerEnabled: config.getEnv('hiringBanner.enabled'),
|
||||
templates: {
|
||||
enabled: config.get('templates.enabled'),
|
||||
host: config.get('templates.host'),
|
||||
enabled: config.getEnv('templates.enabled'),
|
||||
host: config.getEnv('templates.host'),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -349,23 +346,23 @@ class App {
|
|||
// refresh user management status
|
||||
Object.assign(this.frontendSettings.userManagement, {
|
||||
enabled:
|
||||
config.get('userManagement.disabled') === false ||
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true,
|
||||
config.getEnv('userManagement.disabled') === false ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
|
||||
showSetupOnFirstLoad:
|
||||
config.get('userManagement.disabled') === false &&
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.get('userManagement.skipInstanceOwnerSetup') === false,
|
||||
config.getEnv('userManagement.disabled') === false &&
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === false &&
|
||||
config.getEnv('userManagement.skipInstanceOwnerSetup') === false,
|
||||
});
|
||||
|
||||
return this.frontendSettings;
|
||||
}
|
||||
|
||||
async config(): Promise<void> {
|
||||
const enableMetrics = config.get('endpoints.metrics.enable') as boolean;
|
||||
const enableMetrics = config.getEnv('endpoints.metrics.enable');
|
||||
let register: Registry;
|
||||
|
||||
if (enableMetrics) {
|
||||
const prefix = config.get('endpoints.metrics.prefix') as string;
|
||||
const prefix = config.getEnv('endpoints.metrics.prefix');
|
||||
register = new promClient.Registry();
|
||||
register.setDefaultLabels({ prefix });
|
||||
promClient.collectDefaultMetrics({ register });
|
||||
|
@ -378,7 +375,7 @@ class App {
|
|||
|
||||
await this.externalHooks.run('frontend.settings', [this.frontendSettings]);
|
||||
|
||||
const excludeEndpoints = config.get('security.excludeEndpoints') as string;
|
||||
const excludeEndpoints = config.getEnv('security.excludeEndpoints');
|
||||
|
||||
const ignoredEndpoints = [
|
||||
'healthz',
|
||||
|
@ -394,7 +391,7 @@ class App {
|
|||
const authIgnoreRegex = new RegExp(`^\/(${_(ignoredEndpoints).compact().join('|')})\/?.*$`);
|
||||
|
||||
// Check for basic auth credentials if activated
|
||||
const basicAuthActive = config.get('security.basicAuth.active') as boolean;
|
||||
const basicAuthActive = config.getEnv('security.basicAuth.active');
|
||||
if (basicAuthActive) {
|
||||
const basicAuthUser = (await GenericHelpers.getConfigValue(
|
||||
'security.basicAuth.user',
|
||||
|
@ -419,7 +416,10 @@ class App {
|
|||
this.app.use(
|
||||
async (req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
// Skip basic auth for a few listed endpoints or when instance owner has been setup
|
||||
if (authIgnoreRegex.exec(req.url) || config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
if (
|
||||
authIgnoreRegex.exec(req.url) ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp')
|
||||
) {
|
||||
return next();
|
||||
}
|
||||
const realm = 'n8n - Editor UI';
|
||||
|
@ -465,7 +465,7 @@ class App {
|
|||
}
|
||||
|
||||
// Check for and validate JWT if configured
|
||||
const jwtAuthActive = config.get('security.jwtAuth.active') as boolean;
|
||||
const jwtAuthActive = config.getEnv('security.jwtAuth.active');
|
||||
if (jwtAuthActive) {
|
||||
const jwtAuthHeader = (await GenericHelpers.getConfigValue(
|
||||
'security.jwtAuth.jwtHeader',
|
||||
|
@ -750,7 +750,7 @@ class App {
|
|||
|
||||
const { tags: tagIds } = req.body;
|
||||
|
||||
if (tagIds?.length && !config.get('workflowTagsDisabled')) {
|
||||
if (tagIds?.length && !config.getEnv('workflowTagsDisabled')) {
|
||||
newWorkflow.tags = await Db.collections.Tag!.findByIds(tagIds, {
|
||||
select: ['id', 'name'],
|
||||
});
|
||||
|
@ -784,7 +784,7 @@ class App {
|
|||
throw new ResponseHelper.ResponseError('Failed to save workflow');
|
||||
}
|
||||
|
||||
if (tagIds && !config.get('workflowTagsDisabled')) {
|
||||
if (tagIds && !config.getEnv('workflowTagsDisabled')) {
|
||||
savedWorkflow.tags = TagHelpers.sortByRequestOrder(savedWorkflow.tags, {
|
||||
requestOrder: tagIds,
|
||||
});
|
||||
|
@ -868,7 +868,7 @@ class App {
|
|||
relations: ['tags'],
|
||||
};
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
delete query.relations;
|
||||
}
|
||||
|
||||
|
@ -928,7 +928,7 @@ class App {
|
|||
|
||||
let relations = ['workflow', 'workflow.tags'];
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
relations = relations.filter((relation) => relation !== 'workflow.tags');
|
||||
}
|
||||
|
||||
|
@ -1038,8 +1038,8 @@ class App {
|
|||
|
||||
await Db.collections.Workflow!.update(workflowId, updateData);
|
||||
|
||||
if (tags && !config.get('workflowTagsDisabled')) {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
if (tags && !config.getEnv('workflowTagsDisabled')) {
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await TagHelpers.removeRelations(workflowId, tablePrefix);
|
||||
|
||||
if (tags.length) {
|
||||
|
@ -1051,7 +1051,7 @@ class App {
|
|||
relations: ['tags'],
|
||||
};
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
delete options.relations;
|
||||
}
|
||||
|
||||
|
@ -1233,11 +1233,11 @@ class App {
|
|||
req: express.Request,
|
||||
res: express.Response,
|
||||
): Promise<TagEntity[] | ITagWithCountDb[]> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
if (req.query.withUsageCount === 'true') {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
return TagHelpers.getTagsWithCountDb(tablePrefix);
|
||||
}
|
||||
|
||||
|
@ -1251,7 +1251,7 @@ class App {
|
|||
`/${this.restEndpoint}/tags`,
|
||||
ResponseHelper.send(
|
||||
async (req: express.Request, res: express.Response): Promise<TagEntity | void> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
const newTag = new TagEntity();
|
||||
|
@ -1274,7 +1274,7 @@ class App {
|
|||
`/${this.restEndpoint}/tags/:id`,
|
||||
ResponseHelper.send(
|
||||
async (req: express.Request, res: express.Response): Promise<TagEntity | void> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
|
||||
|
@ -1303,11 +1303,11 @@ class App {
|
|||
`/${this.restEndpoint}/tags/:id`,
|
||||
ResponseHelper.send(
|
||||
async (req: TagsRequest.Delete, res: express.Response): Promise<boolean> => {
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
throw new ResponseHelper.ResponseError('Workflow tags are disabled');
|
||||
}
|
||||
if (
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true &&
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true &&
|
||||
req.user.globalRole.name !== 'owner'
|
||||
) {
|
||||
throw new ResponseHelper.ResponseError(
|
||||
|
@ -1706,11 +1706,13 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -1718,6 +1720,7 @@ class App {
|
|||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const signatureMethod = _.get(oauthCredentials, 'signatureMethod') as string;
|
||||
|
@ -1844,17 +1847,20 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(
|
||||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const options: OptionsWithUrl = {
|
||||
|
@ -1959,11 +1965,13 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
|
||||
|
@ -1971,6 +1979,7 @@ class App {
|
|||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const token = new csrf();
|
||||
|
@ -2099,17 +2108,20 @@ class App {
|
|||
}
|
||||
|
||||
const mode: WorkflowExecuteMode = 'internal';
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const credentialsHelper = new CredentialsHelper(encryptionKey);
|
||||
const decryptedDataOriginal = await credentialsHelper.getDecrypted(
|
||||
credential as INodeCredentialsDetails,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
true,
|
||||
);
|
||||
const oauthCredentials = credentialsHelper.applyDefaultsAndOverwrites(
|
||||
decryptedDataOriginal,
|
||||
credential.type,
|
||||
mode,
|
||||
timezone,
|
||||
);
|
||||
|
||||
const token = new csrf();
|
||||
|
@ -2232,7 +2244,7 @@ class App {
|
|||
|
||||
const executingWorkflowIds: string[] = [];
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
|
||||
executingWorkflowIds.push(...currentJobs.map(({ data }) => data.executionId));
|
||||
}
|
||||
|
@ -2595,7 +2607,7 @@ class App {
|
|||
`/${this.restEndpoint}/executions-current`,
|
||||
ResponseHelper.send(
|
||||
async (req: ExecutionRequest.GetAllCurrent): Promise<IExecutionsSummary[]> => {
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
const currentJobs = await Queue.getInstance().getJobs(['active', 'waiting']);
|
||||
|
||||
const currentlyRunningQueueIds = currentJobs.map((job) => job.data.executionId);
|
||||
|
@ -2660,7 +2672,7 @@ class App {
|
|||
for (const data of executingWorkflows) {
|
||||
if (
|
||||
(filter.workflowId !== undefined && filter.workflowId !== data.workflowId) ||
|
||||
!sharedWorkflowIds.includes(data.workflowId)
|
||||
!sharedWorkflowIds.includes(data.workflowId.toString())
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
@ -2704,7 +2716,7 @@ class App {
|
|||
throw new ResponseHelper.ResponseError('Execution not found', undefined, 404);
|
||||
}
|
||||
|
||||
if (config.get('executions.mode') === 'queue') {
|
||||
if (config.getEnv('executions.mode') === 'queue') {
|
||||
// Manual executions should still be stoppable, so
|
||||
// try notifying the `activeExecutions` to stop it.
|
||||
const result = await this.activeExecutionsInstance.stopExecution(req.params.id);
|
||||
|
@ -2831,7 +2843,7 @@ class App {
|
|||
// Webhooks
|
||||
// ----------------------------------------
|
||||
|
||||
if (config.get('endpoints.disableProductionWebhooksOnMainProcess') !== true) {
|
||||
if (!config.getEnv('endpoints.disableProductionWebhooksOnMainProcess')) {
|
||||
WebhookServer.registerProductionWebhooks.apply(this);
|
||||
}
|
||||
|
||||
|
@ -2926,11 +2938,11 @@ class App {
|
|||
);
|
||||
}
|
||||
|
||||
if (config.get('endpoints.disableUi') !== true) {
|
||||
if (!config.getEnv('endpoints.disableUi')) {
|
||||
// Read the index file and replace the path placeholder
|
||||
const editorUiPath = require.resolve('n8n-editor-ui');
|
||||
const filePath = pathJoin(pathDirname(editorUiPath), 'dist', 'index.html');
|
||||
const n8nPath = config.get('path');
|
||||
const n8nPath = config.getEnv('path');
|
||||
|
||||
let readIndexFile = readFileSync(filePath, 'utf8');
|
||||
readIndexFile = readIndexFile.replace(/\/%BASE_PATH%\//g, n8nPath);
|
||||
|
@ -2962,8 +2974,8 @@ class App {
|
|||
}
|
||||
|
||||
export async function start(): Promise<void> {
|
||||
const PORT = config.get('port');
|
||||
const ADDRESS = config.get('listen_address');
|
||||
const PORT = config.getEnv('port');
|
||||
const ADDRESS = config.getEnv('listen_address');
|
||||
|
||||
const app = new App();
|
||||
|
||||
|
@ -2987,7 +2999,7 @@ export async function start(): Promise<void> {
|
|||
console.log(`n8n ready on ${ADDRESS}, port ${PORT}`);
|
||||
console.log(`Version: ${versions.cli}`);
|
||||
|
||||
const defaultLocale = config.get('defaultLocale');
|
||||
const defaultLocale = config.getEnv('defaultLocale');
|
||||
|
||||
if (defaultLocale !== 'en') {
|
||||
console.log(`Locale: ${defaultLocale}`);
|
||||
|
@ -2995,13 +3007,14 @@ export async function start(): Promise<void> {
|
|||
|
||||
await app.externalHooks.run('n8n.ready', [app]);
|
||||
const cpus = os.cpus();
|
||||
const binarDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binarDataConfig = config.getEnv('binaryDataManager');
|
||||
const diagnosticInfo: IDiagnosticInfo = {
|
||||
basicAuthActive: config.get('security.basicAuth.active') as boolean,
|
||||
basicAuthActive: config.getEnv('security.basicAuth.active'),
|
||||
databaseType: (await GenericHelpers.getConfigValue('database.type')) as DatabaseType,
|
||||
disableProductionWebhooksOnMainProcess:
|
||||
config.get('endpoints.disableProductionWebhooksOnMainProcess') === true,
|
||||
notificationsEnabled: config.get('versionNotifications.enabled') === true,
|
||||
disableProductionWebhooksOnMainProcess: config.getEnv(
|
||||
'endpoints.disableProductionWebhooksOnMainProcess',
|
||||
),
|
||||
notificationsEnabled: config.getEnv('versionNotifications.enabled'),
|
||||
versionCli: versions.cli,
|
||||
systemInfo: {
|
||||
os: {
|
||||
|
@ -3016,24 +3029,26 @@ export async function start(): Promise<void> {
|
|||
},
|
||||
},
|
||||
executionVariables: {
|
||||
executions_process: config.get('executions.process'),
|
||||
executions_mode: config.get('executions.mode'),
|
||||
executions_timeout: config.get('executions.timeout'),
|
||||
executions_timeout_max: config.get('executions.maxTimeout'),
|
||||
executions_data_save_on_error: config.get('executions.saveDataOnError'),
|
||||
executions_data_save_on_success: config.get('executions.saveDataOnSuccess'),
|
||||
executions_data_save_on_progress: config.get('executions.saveExecutionProgress'),
|
||||
executions_data_save_manual_executions: config.get('executions.saveDataManualExecutions'),
|
||||
executions_data_prune: config.get('executions.pruneData'),
|
||||
executions_data_max_age: config.get('executions.pruneDataMaxAge'),
|
||||
executions_data_prune_timeout: config.get('executions.pruneDataTimeout'),
|
||||
executions_process: config.getEnv('executions.process'),
|
||||
executions_mode: config.getEnv('executions.mode'),
|
||||
executions_timeout: config.getEnv('executions.timeout'),
|
||||
executions_timeout_max: config.getEnv('executions.maxTimeout'),
|
||||
executions_data_save_on_error: config.getEnv('executions.saveDataOnError'),
|
||||
executions_data_save_on_success: config.getEnv('executions.saveDataOnSuccess'),
|
||||
executions_data_save_on_progress: config.getEnv('executions.saveExecutionProgress'),
|
||||
executions_data_save_manual_executions: config.getEnv(
|
||||
'executions.saveDataManualExecutions',
|
||||
),
|
||||
executions_data_prune: config.getEnv('executions.pruneData'),
|
||||
executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'),
|
||||
executions_data_prune_timeout: config.getEnv('executions.pruneDataTimeout'),
|
||||
},
|
||||
deploymentType: config.get('deployment.type'),
|
||||
deploymentType: config.getEnv('deployment.type'),
|
||||
binaryDataMode: binarDataConfig.mode,
|
||||
n8n_multi_user_allowed:
|
||||
config.get('userManagement.disabled') === false ||
|
||||
config.get('userManagement.isInstanceOwnerSetUp') === true,
|
||||
smtp_set_up: config.get('userManagement.emails.mode') === 'smtp',
|
||||
config.getEnv('userManagement.disabled') === false ||
|
||||
config.getEnv('userManagement.isInstanceOwnerSetUp') === true,
|
||||
smtp_set_up: config.getEnv('userManagement.emails.mode') === 'smtp',
|
||||
};
|
||||
|
||||
void Db.collections
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable consistent-return */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable no-param-reassign */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
|
||||
import { ActiveWebhooks } from 'n8n-core';
|
||||
|
||||
|
|
|
@ -2,16 +2,16 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
import { Workflow } from 'n8n-workflow';
|
||||
import { In, IsNull, Not } from 'typeorm';
|
||||
import express = require('express');
|
||||
import { compare } from 'bcryptjs';
|
||||
import { In } from 'typeorm';
|
||||
import express from 'express';
|
||||
import { compare, genSaltSync, hash } from 'bcryptjs';
|
||||
|
||||
import { PublicUser } from './Interfaces';
|
||||
import { Db, ResponseHelper } from '..';
|
||||
import { MAX_PASSWORD_LENGTH, MIN_PASSWORD_LENGTH, User } from '../databases/entities/User';
|
||||
import { Role } from '../databases/entities/Role';
|
||||
import { AuthenticatedRequest } from '../requests';
|
||||
import config = require('../../config');
|
||||
import * as config from '../../config';
|
||||
import { getWebhookBaseUrl } from '../WebhookHelpers';
|
||||
|
||||
export async function getWorkflowOwner(workflowId: string | number): Promise<User> {
|
||||
|
@ -24,10 +24,10 @@ export async function getWorkflowOwner(workflowId: string | number): Promise<Use
|
|||
}
|
||||
|
||||
export function isEmailSetUp(): boolean {
|
||||
const smtp = config.get('userManagement.emails.mode') === 'smtp';
|
||||
const host = !!config.get('userManagement.emails.smtp.host');
|
||||
const user = !!config.get('userManagement.emails.smtp.auth.user');
|
||||
const pass = !!config.get('userManagement.emails.smtp.auth.pass');
|
||||
const smtp = config.getEnv('userManagement.emails.mode') === 'smtp';
|
||||
const host = !!config.getEnv('userManagement.emails.smtp.host');
|
||||
const user = !!config.getEnv('userManagement.emails.smtp.auth.user');
|
||||
const pass = !!config.getEnv('userManagement.emails.smtp.auth.pass');
|
||||
|
||||
return smtp && host && user && pass;
|
||||
}
|
||||
|
@ -58,16 +58,11 @@ export async function getInstanceOwner(): Promise<User> {
|
|||
* Return the n8n instance base URL without trailing slash.
|
||||
*/
|
||||
export function getInstanceBaseUrl(): string {
|
||||
const n8nBaseUrl = config.get('editorBaseUrl') || getWebhookBaseUrl();
|
||||
const n8nBaseUrl = config.getEnv('editorBaseUrl') || getWebhookBaseUrl();
|
||||
|
||||
return n8nBaseUrl.endsWith('/') ? n8nBaseUrl.slice(0, n8nBaseUrl.length - 1) : n8nBaseUrl;
|
||||
}
|
||||
|
||||
export async function isInstanceOwnerSetup(): Promise<boolean> {
|
||||
const users = await Db.collections.User!.find({ email: Not(IsNull()) });
|
||||
return users.length !== 0;
|
||||
}
|
||||
|
||||
// TODO: Enforce at model level
|
||||
export function validatePassword(password?: string): string {
|
||||
if (!password) {
|
||||
|
@ -201,7 +196,7 @@ export async function checkPermissionsForExecution(
|
|||
export function isAuthExcluded(url: string, ignoredEndpoints: string[]): boolean {
|
||||
return !!ignoredEndpoints
|
||||
.filter(Boolean) // skip empty paths
|
||||
.find((ignoredEndpoint) => url.includes(ignoredEndpoint));
|
||||
.find((ignoredEndpoint) => url.startsWith(`/${ignoredEndpoint}`));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -223,9 +218,12 @@ export function isAuthenticatedRequest(request: express.Request): request is Aut
|
|||
// hashing
|
||||
// ----------------------------------
|
||||
|
||||
export async function compareHash(str: string, hash: string): Promise<boolean | undefined> {
|
||||
export const hashPassword = async (validPassword: string): Promise<string> =>
|
||||
hash(validPassword, genSaltSync(10));
|
||||
|
||||
export async function compareHash(plaintext: string, hashed: string): Promise<boolean | undefined> {
|
||||
try {
|
||||
return await compare(str, hash);
|
||||
return await compare(plaintext, hashed);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes('Invalid salt version')) {
|
||||
error.message +=
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { Response } from 'express';
|
||||
import { createHash } from 'crypto';
|
||||
import { Db } from '../..';
|
||||
import { AUTH_COOKIE_NAME } from '../../constants';
|
||||
import { JwtToken, JwtPayload } from '../Interfaces';
|
||||
import { User } from '../../databases/entities/User';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
|
||||
export function issueJWT(user: User): JwtToken {
|
||||
const { id, email, password } = user;
|
||||
|
@ -26,7 +26,7 @@ export function issueJWT(user: User): JwtToken {
|
|||
.digest('hex');
|
||||
}
|
||||
|
||||
const signedToken = jwt.sign(payload, config.get('userManagement.jwtSecret'), {
|
||||
const signedToken = jwt.sign(payload, config.getEnv('userManagement.jwtSecret'), {
|
||||
expiresIn: expiresIn / 1000 /* in seconds */,
|
||||
});
|
||||
|
||||
|
@ -57,7 +57,7 @@ export async function resolveJwtContent(jwtPayload: JwtPayload): Promise<User> {
|
|||
}
|
||||
|
||||
export async function resolveJwt(token: string): Promise<User> {
|
||||
const jwtPayload = jwt.verify(token, config.get('userManagement.jwtSecret')) as JwtPayload;
|
||||
const jwtPayload = jwt.verify(token, config.getEnv('userManagement.jwtSecret')) as JwtPayload;
|
||||
return resolveJwtContent(jwtPayload);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import { createTransport, Transporter } from 'nodemailer';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { MailData, SendEmailResult, UserManagementMailerImplementation } from './Interfaces';
|
||||
|
||||
export class NodeMailer implements UserManagementMailerImplementation {
|
||||
|
@ -9,20 +9,20 @@ export class NodeMailer implements UserManagementMailerImplementation {
|
|||
|
||||
constructor() {
|
||||
this.transport = createTransport({
|
||||
host: config.get('userManagement.emails.smtp.host'),
|
||||
port: config.get('userManagement.emails.smtp.port'),
|
||||
secure: config.get('userManagement.emails.smtp.secure'),
|
||||
host: config.getEnv('userManagement.emails.smtp.host'),
|
||||
port: config.getEnv('userManagement.emails.smtp.port'),
|
||||
secure: config.getEnv('userManagement.emails.smtp.secure'),
|
||||
auth: {
|
||||
user: config.get('userManagement.emails.smtp.auth.user'),
|
||||
pass: config.get('userManagement.emails.smtp.auth.pass'),
|
||||
user: config.getEnv('userManagement.emails.smtp.auth.user'),
|
||||
pass: config.getEnv('userManagement.emails.smtp.auth.pass'),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async verifyConnection(): Promise<void> {
|
||||
const host = config.get('userManagement.emails.smtp.host') as string;
|
||||
const user = config.get('userManagement.emails.smtp.auth.user') as string;
|
||||
const pass = config.get('userManagement.emails.smtp.auth.pass') as string;
|
||||
const host = config.getEnv('userManagement.emails.smtp.host');
|
||||
const user = config.getEnv('userManagement.emails.smtp.auth.user');
|
||||
const pass = config.getEnv('userManagement.emails.smtp.auth.pass');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this.transport.verify((error: Error) => {
|
||||
|
@ -43,8 +43,8 @@ export class NodeMailer implements UserManagementMailerImplementation {
|
|||
}
|
||||
|
||||
async sendMail(mailData: MailData): Promise<SendEmailResult> {
|
||||
let sender = config.get('userManagement.emails.smtp.sender');
|
||||
const user = config.get('userManagement.emails.smtp.auth.user') as string;
|
||||
let sender = config.getEnv('userManagement.emails.smtp.sender');
|
||||
const user = config.getEnv('userManagement.emails.smtp.auth.user');
|
||||
|
||||
if (!sender && user.includes('@')) {
|
||||
sender = user;
|
||||
|
|
|
@ -3,7 +3,7 @@ import { existsSync, readFileSync } from 'fs';
|
|||
import { IDataObject } from 'n8n-workflow';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { GenericHelpers } from '../..';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import {
|
||||
InviteEmailData,
|
||||
PasswordResetData,
|
||||
|
@ -45,7 +45,7 @@ export class UserManagementMailer {
|
|||
|
||||
constructor() {
|
||||
// Other implementations can be used in the future.
|
||||
if (config.get('userManagement.emails.mode') === 'smtp') {
|
||||
if (config.getEnv('userManagement.emails.mode') === 'smtp') {
|
||||
this.mailer = new NodeMailer();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,9 +8,10 @@ import { Db, ResponseHelper } from '../..';
|
|||
import { AUTH_COOKIE_NAME } from '../../constants';
|
||||
import { issueCookie, resolveJwt } from '../auth/jwt';
|
||||
import { N8nApp, PublicUser } from '../Interfaces';
|
||||
import { compareHash, isInstanceOwnerSetup, sanitizeUser } from '../UserManagementHelper';
|
||||
import { compareHash, sanitizeUser } from '../UserManagementHelper';
|
||||
import { User } from '../../databases/entities/User';
|
||||
import type { LoginRequest } from '../../requests';
|
||||
import config = require('../../../config');
|
||||
|
||||
export function authenticationMethods(this: N8nApp): void {
|
||||
/**
|
||||
|
@ -71,13 +72,18 @@ export function authenticationMethods(this: N8nApp): void {
|
|||
// If logged in, return user
|
||||
try {
|
||||
user = await resolveJwt(cookieContents);
|
||||
|
||||
if (!config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
res.cookie(AUTH_COOKIE_NAME, cookieContents);
|
||||
}
|
||||
|
||||
return sanitizeUser(user);
|
||||
} catch (error) {
|
||||
res.clearCookie(AUTH_COOKIE_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
if (await isInstanceOwnerSetup()) {
|
||||
if (config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
const error = new Error('Not logged in');
|
||||
// @ts-ignore
|
||||
error.httpStatusCode = 401;
|
||||
|
|
|
@ -2,16 +2,16 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
||||
/* eslint-disable import/no-cycle */
|
||||
import cookieParser = require('cookie-parser');
|
||||
import * as passport from 'passport';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import passport from 'passport';
|
||||
import { Strategy } from 'passport-jwt';
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import * as jwt from 'jsonwebtoken';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { JwtPayload, N8nApp } from '../Interfaces';
|
||||
import { authenticationMethods } from './auth';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { AUTH_COOKIE_NAME } from '../../constants';
|
||||
import { issueCookie, resolveJwtContent } from '../auth/jwt';
|
||||
import { meNamespace } from './me';
|
||||
|
@ -30,7 +30,7 @@ export function addRoutes(this: N8nApp, ignoredEndpoints: string[], restEndpoint
|
|||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
return (req.cookies?.[AUTH_COOKIE_NAME] as string | undefined) ?? null;
|
||||
},
|
||||
secretOrKey: config.get('userManagement.jwtSecret') as string,
|
||||
secretOrKey: config.getEnv('userManagement.jwtSecret'),
|
||||
};
|
||||
|
||||
passport.use(
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
|
||||
import { compare, genSaltSync, hashSync } from 'bcryptjs';
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import validator from 'validator';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { Db, InternalHooksManager, ResponseHelper } from '../..';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
import { N8nApp, PublicUser } from '../Interfaces';
|
||||
import { validatePassword, sanitizeUser } from '../UserManagementHelper';
|
||||
import { validatePassword, sanitizeUser, compareHash, hashPassword } from '../UserManagementHelper';
|
||||
import type { AuthenticatedRequest, MeRequest } from '../../requests';
|
||||
import { validateEntity } from '../../GenericHelpers';
|
||||
import { User } from '../../databases/entities/User';
|
||||
|
@ -87,7 +86,7 @@ export function meNamespace(this: N8nApp): void {
|
|||
throw new ResponseHelper.ResponseError('Requesting user not set up.');
|
||||
}
|
||||
|
||||
const isCurrentPwCorrect = await compare(currentPassword, req.user.password);
|
||||
const isCurrentPwCorrect = await compareHash(currentPassword, req.user.password);
|
||||
if (!isCurrentPwCorrect) {
|
||||
throw new ResponseHelper.ResponseError(
|
||||
'Provided current password is incorrect.',
|
||||
|
@ -98,7 +97,7 @@ export function meNamespace(this: N8nApp): void {
|
|||
|
||||
const validPassword = validatePassword(newPassword);
|
||||
|
||||
req.user.password = hashSync(validPassword, genSaltSync(10));
|
||||
req.user.password = await hashPassword(validPassword);
|
||||
|
||||
const user = await Db.collections.User!.save(req.user);
|
||||
Logger.info('Password updated successfully', { userId: user.id });
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
/* eslint-disable import/no-cycle */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { hashSync, genSaltSync } from 'bcryptjs';
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import validator from 'validator';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { Db, InternalHooksManager, ResponseHelper } from '../..';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { validateEntity } from '../../GenericHelpers';
|
||||
import { AuthenticatedRequest, OwnerRequest } from '../../requests';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
import { N8nApp } from '../Interfaces';
|
||||
import { sanitizeUser, validatePassword } from '../UserManagementHelper';
|
||||
import { hashPassword, sanitizeUser, validatePassword } from '../UserManagementHelper';
|
||||
|
||||
export function ownerNamespace(this: N8nApp): void {
|
||||
/**
|
||||
|
@ -24,7 +23,7 @@ export function ownerNamespace(this: N8nApp): void {
|
|||
const { email, firstName, lastName, password } = req.body;
|
||||
const { id: userId } = req.user;
|
||||
|
||||
if (config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
if (config.getEnv('userManagement.isInstanceOwnerSetUp')) {
|
||||
Logger.debug(
|
||||
'Request to claim instance ownership failed because instance owner already exists',
|
||||
{
|
||||
|
@ -74,7 +73,7 @@ export function ownerNamespace(this: N8nApp): void {
|
|||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
password: hashSync(validPassword, genSaltSync(10)),
|
||||
password: await hashPassword(validPassword),
|
||||
});
|
||||
|
||||
await validateEntity(owner);
|
||||
|
|
|
@ -1,21 +1,20 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { URL } from 'url';
|
||||
import { genSaltSync, hashSync } from 'bcryptjs';
|
||||
import validator from 'validator';
|
||||
import { IsNull, MoreThanOrEqual, Not } from 'typeorm';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
import { Db, InternalHooksManager, ResponseHelper } from '../..';
|
||||
import { N8nApp } from '../Interfaces';
|
||||
import { getInstanceBaseUrl, validatePassword } from '../UserManagementHelper';
|
||||
import { getInstanceBaseUrl, hashPassword, validatePassword } from '../UserManagementHelper';
|
||||
import * as UserManagementMailer from '../email';
|
||||
import type { PasswordResetRequest } from '../../requests';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
|
||||
export function passwordResetNamespace(this: N8nApp): void {
|
||||
/**
|
||||
|
@ -26,7 +25,7 @@ export function passwordResetNamespace(this: N8nApp): void {
|
|||
this.app.post(
|
||||
`/${this.restEndpoint}/forgot-password`,
|
||||
ResponseHelper.send(async (req: PasswordResetRequest.Email) => {
|
||||
if (config.get('userManagement.emails.mode') === '') {
|
||||
if (config.getEnv('userManagement.emails.mode') === '') {
|
||||
Logger.debug('Request to send password reset email failed because emailing was not set up');
|
||||
throw new ResponseHelper.ResponseError(
|
||||
'Email sending must be set up in order to request a password reset email',
|
||||
|
@ -206,7 +205,7 @@ export function passwordResetNamespace(this: N8nApp): void {
|
|||
}
|
||||
|
||||
await Db.collections.User!.update(userId, {
|
||||
password: hashSync(validPassword, genSaltSync(10)),
|
||||
password: await hashPassword(validPassword),
|
||||
resetPasswordToken: null,
|
||||
resetPasswordTokenExpiration: null,
|
||||
});
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import { Response } from 'express';
|
||||
import { In } from 'typeorm';
|
||||
import { genSaltSync, hashSync } from 'bcryptjs';
|
||||
import validator from 'validator';
|
||||
import { LoggerProxy as Logger } from 'n8n-workflow';
|
||||
|
||||
|
@ -12,6 +11,7 @@ import { N8nApp, PublicUser } from '../Interfaces';
|
|||
import { UserRequest } from '../../requests';
|
||||
import {
|
||||
getInstanceBaseUrl,
|
||||
hashPassword,
|
||||
isEmailSetUp,
|
||||
sanitizeUser,
|
||||
validatePassword,
|
||||
|
@ -21,7 +21,7 @@ import { SharedWorkflow } from '../../databases/entities/SharedWorkflow';
|
|||
import { SharedCredentials } from '../../databases/entities/SharedCredentials';
|
||||
import * as UserManagementMailer from '../email/UserManagementMailer';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { issueCookie } from '../auth/jwt';
|
||||
|
||||
export function usersNamespace(this: N8nApp): void {
|
||||
|
@ -31,7 +31,7 @@ export function usersNamespace(this: N8nApp): void {
|
|||
this.app.post(
|
||||
`/${this.restEndpoint}/users`,
|
||||
ResponseHelper.send(async (req: UserRequest.Invite) => {
|
||||
if (config.get('userManagement.emails.mode') === '') {
|
||||
if (config.getEnv('userManagement.emails.mode') === '') {
|
||||
Logger.debug(
|
||||
'Request to send email invite(s) to user(s) failed because emailing was not set up',
|
||||
);
|
||||
|
@ -56,14 +56,14 @@ export function usersNamespace(this: N8nApp): void {
|
|||
}
|
||||
|
||||
// TODO: this should be checked in the middleware rather than here
|
||||
if (config.get('userManagement.disabled')) {
|
||||
if (config.getEnv('userManagement.disabled')) {
|
||||
Logger.debug(
|
||||
'Request to send email invite(s) to user(s) failed because user management is disabled',
|
||||
);
|
||||
throw new ResponseHelper.ResponseError('User management is disabled');
|
||||
}
|
||||
|
||||
if (!config.get('userManagement.isInstanceOwnerSetUp')) {
|
||||
if (!config.getEnv('userManagement.isInstanceOwnerSetUp')) {
|
||||
Logger.debug(
|
||||
'Request to send email invite(s) to user(s) failed because the owner account is not set up',
|
||||
);
|
||||
|
@ -349,7 +349,7 @@ export function usersNamespace(this: N8nApp): void {
|
|||
|
||||
invitee.firstName = firstName;
|
||||
invitee.lastName = lastName;
|
||||
invitee.password = hashSync(validPassword, genSaltSync(10));
|
||||
invitee.password = await hashPassword(validPassword);
|
||||
|
||||
const updatedUser = await Db.collections.User!.save(invitee);
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
LoggerProxy as Logger,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
|
||||
import {
|
||||
Db,
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
/* eslint-disable @typescript-eslint/restrict-template-expressions */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable prefer-destructuring */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import { get } from 'lodash';
|
||||
|
||||
|
@ -132,26 +132,6 @@ export function encodeWebhookResponse(
|
|||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all the webhooks which should be created for the give workflow
|
||||
*
|
||||
* @export
|
||||
* @param {string} workflowId
|
||||
* @param {Workflow} workflow
|
||||
* @returns {IWebhookData[]}
|
||||
*/
|
||||
export function getWorkflowWebhooksBasic(workflow: Workflow): IWebhookData[] {
|
||||
// Check all the nodes in the workflow if they have webhooks
|
||||
|
||||
const returnData: IWebhookData[] = [];
|
||||
|
||||
for (const node of Object.values(workflow.nodes)) {
|
||||
returnData.push.apply(returnData, NodeHelpers.getNodeWebhooksBasic(workflow, node));
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a webhook
|
||||
*
|
||||
|
@ -194,39 +174,6 @@ export async function executeWebhook(
|
|||
$executionId: executionId,
|
||||
};
|
||||
|
||||
// Get the responseMode
|
||||
const responseMode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseMode,
|
||||
executionMode,
|
||||
additionalKeys,
|
||||
'onReceived',
|
||||
);
|
||||
const responseCode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseCode,
|
||||
executionMode,
|
||||
additionalKeys,
|
||||
200,
|
||||
) as number;
|
||||
|
||||
const responseData = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseData,
|
||||
executionMode,
|
||||
additionalKeys,
|
||||
'firstEntryJson',
|
||||
);
|
||||
|
||||
if (!['onReceived', 'lastNode', 'responseNode'].includes(responseMode as string)) {
|
||||
// If the mode is not known we error. Is probably best like that instead of using
|
||||
// the default that people know as early as possible (probably already testing phase)
|
||||
// that something does not resolve properly.
|
||||
const errorMessage = `The response mode ${responseMode} is not valid!`;
|
||||
responseCallback(new Error(errorMessage), {});
|
||||
throw new ResponseHelper.ResponseError(errorMessage, 500, 500);
|
||||
}
|
||||
|
||||
let user: User;
|
||||
if (
|
||||
(workflowData as WorkflowEntity).shared?.length &&
|
||||
|
@ -244,6 +191,42 @@ export async function executeWebhook(
|
|||
// Prepare everything that is needed to run the workflow
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id);
|
||||
|
||||
// Get the responseMode
|
||||
const responseMode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseMode,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
'onReceived',
|
||||
);
|
||||
const responseCode = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseCode,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
200,
|
||||
) as number;
|
||||
|
||||
const responseData = workflow.expression.getSimpleParameterValue(
|
||||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseData,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
'firstEntryJson',
|
||||
);
|
||||
|
||||
if (!['onReceived', 'lastNode', 'responseNode'].includes(responseMode as string)) {
|
||||
// If the mode is not known we error. Is probably best like that instead of using
|
||||
// the default that people know as early as possible (probably already testing phase)
|
||||
// that something does not resolve properly.
|
||||
const errorMessage = `The response mode ${responseMode} is not valid!`;
|
||||
responseCallback(new Error(errorMessage), {});
|
||||
throw new ResponseHelper.ResponseError(errorMessage, 500, 500);
|
||||
}
|
||||
|
||||
// Add the Response and Request so that this data can be accessed in the node
|
||||
additionalData.httpRequest = req;
|
||||
additionalData.httpResponse = res;
|
||||
|
@ -302,6 +285,7 @@ export async function executeWebhook(
|
|||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseHeaders,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
undefined,
|
||||
) as {
|
||||
|
@ -560,6 +544,7 @@ export async function executeWebhook(
|
|||
workflowStartNode,
|
||||
webhookData.webhookDescription.responsePropertyName,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
undefined,
|
||||
);
|
||||
|
@ -572,6 +557,7 @@ export async function executeWebhook(
|
|||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseContentType,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
undefined,
|
||||
);
|
||||
|
@ -616,6 +602,7 @@ export async function executeWebhook(
|
|||
workflowStartNode,
|
||||
webhookData.webhookDescription.responseBinaryPropertyName,
|
||||
executionMode,
|
||||
additionalData.timezone,
|
||||
additionalKeys,
|
||||
'data',
|
||||
);
|
||||
|
|
|
@ -6,16 +6,16 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import * as express from 'express';
|
||||
import express from 'express';
|
||||
import { readFileSync } from 'fs';
|
||||
import { getConnectionManager } from 'typeorm';
|
||||
import * as bodyParser from 'body-parser';
|
||||
import bodyParser from 'body-parser';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies, @typescript-eslint/no-unused-vars
|
||||
import * as _ from 'lodash';
|
||||
import _ from 'lodash';
|
||||
|
||||
import * as compression from 'compression';
|
||||
import compression from 'compression';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as parseUrl from 'parseurl';
|
||||
import parseUrl from 'parseurl';
|
||||
import { WebhookHttpMethod } from 'n8n-workflow';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
|
@ -31,7 +31,7 @@ import {
|
|||
WaitingWebhooks,
|
||||
} from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { WEBHOOK_METHODS } from './WebhookHelpers';
|
||||
|
||||
|
@ -193,28 +193,28 @@ class App {
|
|||
constructor() {
|
||||
this.app = express();
|
||||
|
||||
this.endpointWebhook = config.get('endpoints.webhook') as string;
|
||||
this.endpointWebhookWaiting = config.get('endpoints.webhookWaiting') as string;
|
||||
this.saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
this.saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
this.saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||
this.executionTimeout = config.get('executions.timeout') as number;
|
||||
this.maxExecutionTimeout = config.get('executions.maxTimeout') as number;
|
||||
this.timezone = config.get('generic.timezone') as string;
|
||||
this.restEndpoint = config.get('endpoints.rest') as string;
|
||||
this.endpointWebhook = config.getEnv('endpoints.webhook');
|
||||
this.endpointWebhookWaiting = config.getEnv('endpoints.webhookWaiting');
|
||||
this.saveDataErrorExecution = config.getEnv('executions.saveDataOnError');
|
||||
this.saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess');
|
||||
this.saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
|
||||
this.executionTimeout = config.getEnv('executions.timeout');
|
||||
this.maxExecutionTimeout = config.getEnv('executions.maxTimeout');
|
||||
this.timezone = config.getEnv('generic.timezone');
|
||||
this.restEndpoint = config.getEnv('endpoints.rest');
|
||||
|
||||
this.activeWorkflowRunner = ActiveWorkflowRunner.getInstance();
|
||||
|
||||
this.activeExecutionsInstance = ActiveExecutions.getInstance();
|
||||
|
||||
this.protocol = config.get('protocol');
|
||||
this.sslKey = config.get('ssl_key');
|
||||
this.sslCert = config.get('ssl_cert');
|
||||
this.protocol = config.getEnv('protocol');
|
||||
this.sslKey = config.getEnv('ssl_key');
|
||||
this.sslCert = config.getEnv('ssl_cert');
|
||||
|
||||
this.externalHooks = ExternalHooks();
|
||||
|
||||
this.presetCredentialsLoaded = false;
|
||||
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
|
||||
this.endpointPresetCredentials = config.getEnv('credentials.overwrite.endpoint');
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -342,8 +342,8 @@ class App {
|
|||
}
|
||||
|
||||
export async function start(): Promise<void> {
|
||||
const PORT = config.get('port');
|
||||
const ADDRESS = config.get('listen_address');
|
||||
const PORT = config.getEnv('port');
|
||||
const ADDRESS = config.getEnv('listen_address');
|
||||
|
||||
const app = new App();
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ import {
|
|||
|
||||
import { LessThanOrEqual } from 'typeorm';
|
||||
import { DateUtils } from 'typeorm/util/DateUtils';
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import {
|
||||
ActiveExecutions,
|
||||
CredentialsHelper,
|
||||
|
@ -67,7 +67,7 @@ import {
|
|||
} from './UserManagement/UserManagementHelper';
|
||||
import { whereClause } from './WorkflowHelpers';
|
||||
|
||||
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string;
|
||||
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
||||
|
||||
/**
|
||||
* Checks if there was an error and if errorWorkflow or a trigger is defined. If so it collects
|
||||
|
@ -171,8 +171,8 @@ function pruneExecutionData(this: WorkflowHooks): void {
|
|||
Logger.verbose('Pruning execution data from database');
|
||||
|
||||
throttling = true;
|
||||
const timeout = config.get('executions.pruneDataTimeout') as number; // in seconds
|
||||
const maxAge = config.get('executions.pruneDataMaxAge') as number; // in h
|
||||
const timeout = config.getEnv('executions.pruneDataTimeout'); // in seconds
|
||||
const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h
|
||||
const date = new Date(); // today
|
||||
date.setHours(date.getHours() - maxAge);
|
||||
|
||||
|
@ -357,11 +357,11 @@ export function hookFunctionsPreExecute(parentProcessMode?: string): IWorkflowEx
|
|||
}
|
||||
if (
|
||||
this.workflowData.settings.saveExecutionProgress !== true &&
|
||||
!config.get('executions.saveExecutionProgress')
|
||||
!config.getEnv('executions.saveExecutionProgress')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
} else if (!config.get('executions.saveExecutionProgress')) {
|
||||
} else if (!config.getEnv('executions.saveExecutionProgress')) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -466,7 +466,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
});
|
||||
|
||||
// Prune old execution data
|
||||
if (config.get('executions.pruneData')) {
|
||||
if (config.getEnv('executions.pruneData')) {
|
||||
pruneExecutionData.call(this);
|
||||
}
|
||||
|
||||
|
@ -492,7 +492,7 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
}
|
||||
}
|
||||
|
||||
let saveManualExecutions = config.get('executions.saveDataManualExecutions') as boolean;
|
||||
let saveManualExecutions = config.getEnv('executions.saveDataManualExecutions');
|
||||
if (
|
||||
this.workflowData.settings !== undefined &&
|
||||
this.workflowData.settings.saveManualExecutions !== undefined
|
||||
|
@ -512,8 +512,8 @@ function hookFunctionsSave(parentProcessMode?: string): IWorkflowExecuteHooks {
|
|||
}
|
||||
|
||||
// Check config to know if execution should be saved or not
|
||||
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
let saveDataErrorExecution = config.getEnv('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess') as string;
|
||||
if (this.workflowData.settings !== undefined) {
|
||||
saveDataErrorExecution =
|
||||
(this.workflowData.settings.saveDataErrorExecution as string) ||
|
||||
|
@ -800,7 +800,7 @@ export async function getWorkflowData(
|
|||
const user = await getUserById(userId);
|
||||
let relations = ['workflow', 'workflow.tags'];
|
||||
|
||||
if (config.get('workflowTagsDisabled')) {
|
||||
if (config.getEnv('workflowTagsDisabled')) {
|
||||
relations = relations.filter((relation) => relation !== 'workflow.tags');
|
||||
}
|
||||
|
||||
|
@ -1028,10 +1028,10 @@ export async function getBase(
|
|||
): Promise<IWorkflowExecuteAdditionalData> {
|
||||
const urlBaseWebhook = WebhookHelpers.getWebhookBaseUrl();
|
||||
|
||||
const timezone = config.get('generic.timezone') as string;
|
||||
const webhookBaseUrl = urlBaseWebhook + config.get('endpoints.webhook');
|
||||
const webhookWaitingBaseUrl = urlBaseWebhook + config.get('endpoints.webhookWaiting');
|
||||
const webhookTestBaseUrl = urlBaseWebhook + config.get('endpoints.webhookTest');
|
||||
const timezone = config.getEnv('generic.timezone');
|
||||
const webhookBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhook');
|
||||
const webhookWaitingBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhookWaiting');
|
||||
const webhookTestBaseUrl = urlBaseWebhook + config.getEnv('endpoints.webhookTest');
|
||||
|
||||
const encryptionKey = await UserSettings.getEncryptionKey();
|
||||
if (encryptionKey === undefined) {
|
||||
|
@ -1042,7 +1042,7 @@ export async function getBase(
|
|||
credentialsHelper: new CredentialsHelper(encryptionKey),
|
||||
encryptionKey,
|
||||
executeWorkflow,
|
||||
restApiUrl: urlBaseWebhook + config.get('endpoints.rest'),
|
||||
restApiUrl: urlBaseWebhook + config.getEnv('endpoints.rest'),
|
||||
timezone,
|
||||
webhookBaseUrl,
|
||||
webhookWaitingBaseUrl,
|
||||
|
|
|
@ -33,13 +33,13 @@ import {
|
|||
WorkflowRunner,
|
||||
} from '.';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import { WorkflowEntity } from './databases/entities/WorkflowEntity';
|
||||
import { User } from './databases/entities/User';
|
||||
import { getWorkflowOwner } from './UserManagement/UserManagementHelper';
|
||||
|
||||
const ERROR_TRIGGER_TYPE = config.get('nodes.errorTriggerType') as string;
|
||||
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
||||
|
||||
/**
|
||||
* Returns the data of the last executed node
|
||||
|
|
|
@ -26,12 +26,12 @@ import {
|
|||
} from 'n8n-workflow';
|
||||
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import * as PCancelable from 'p-cancelable';
|
||||
import PCancelable from 'p-cancelable';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { fork } from 'child_process';
|
||||
|
||||
import * as Bull from 'bull';
|
||||
import * as config from '../config';
|
||||
import Bull from 'bull';
|
||||
import config from '../config';
|
||||
// eslint-disable-next-line import/no-cycle
|
||||
import {
|
||||
ActiveExecutions,
|
||||
|
@ -73,7 +73,7 @@ export class WorkflowRunner {
|
|||
this.activeExecutions = ActiveExecutions.getInstance();
|
||||
this.credentialsOverwrites = CredentialsOverwrites().getAll();
|
||||
|
||||
const executionsMode = config.get('executions.mode') as string;
|
||||
const executionsMode = config.getEnv('executions.mode');
|
||||
|
||||
if (executionsMode === 'queue') {
|
||||
this.jobQueue = Queue.getInstance().getBullObjectInstance();
|
||||
|
@ -150,8 +150,8 @@ export class WorkflowRunner {
|
|||
executionId?: string,
|
||||
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
|
||||
): Promise<string> {
|
||||
const executionsProcess = config.get('executions.process') as string;
|
||||
const executionsMode = config.get('executions.mode') as string;
|
||||
const executionsProcess = config.getEnv('executions.process');
|
||||
const executionsMode = config.getEnv('executions.mode');
|
||||
|
||||
if (executionsMode === 'queue' && data.executionMode !== 'manual') {
|
||||
// Do not run "manual" executions in bull because sending events to the
|
||||
|
@ -229,13 +229,13 @@ export class WorkflowRunner {
|
|||
// Changes were made by adding the `workflowTimeout` to the `additionalData`
|
||||
// So that the timeout will also work for executions with nested workflows.
|
||||
let executionTimeout: NodeJS.Timeout;
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
||||
workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting
|
||||
}
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number);
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
|
||||
}
|
||||
|
||||
const workflow = new Workflow({
|
||||
|
@ -326,8 +326,7 @@ export class WorkflowRunner {
|
|||
this.activeExecutions.attachWorkflowExecution(executionId, workflowExecution);
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
const timeout =
|
||||
Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
||||
const timeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds
|
||||
executionTimeout = setTimeout(() => {
|
||||
this.activeExecutions.stopExecution(executionId, 'timeout');
|
||||
}, timeout);
|
||||
|
@ -450,7 +449,7 @@ export class WorkflowRunner {
|
|||
|
||||
const jobData: Promise<IBullJobResponse> = job.finished();
|
||||
|
||||
const queueRecoveryInterval = config.get('queue.bull.queueRecoveryInterval') as number;
|
||||
const queueRecoveryInterval = config.getEnv('queue.bull.queueRecoveryInterval');
|
||||
|
||||
const racingPromises: Array<Promise<IBullJobResponse | object>> = [jobData];
|
||||
|
||||
|
@ -533,8 +532,8 @@ export class WorkflowRunner {
|
|||
try {
|
||||
// Check if this execution data has to be removed from database
|
||||
// based on workflow settings.
|
||||
let saveDataErrorExecution = config.get('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.get('executions.saveDataOnSuccess') as string;
|
||||
let saveDataErrorExecution = config.getEnv('executions.saveDataOnError') as string;
|
||||
let saveDataSuccessExecution = config.getEnv('executions.saveDataOnSuccess') as string;
|
||||
if (data.workflowData.settings !== undefined) {
|
||||
saveDataErrorExecution =
|
||||
(data.workflowData.settings.saveDataErrorExecution as string) ||
|
||||
|
@ -643,7 +642,7 @@ export class WorkflowRunner {
|
|||
|
||||
// Start timeout for the execution
|
||||
let executionTimeout: NodeJS.Timeout;
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
if (data.workflowData.settings && data.workflowData.settings.executionTimeout) {
|
||||
workflowTimeout = data.workflowData.settings.executionTimeout as number; // preference on workflow setting
|
||||
}
|
||||
|
@ -654,8 +653,7 @@ export class WorkflowRunner {
|
|||
};
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout =
|
||||
Math.min(workflowTimeout, config.get('executions.maxTimeout') as number) * 1000; // as seconds
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout')) * 1000; // as seconds
|
||||
// Start timeout already now but give process at least 5 seconds to start.
|
||||
// Without it could would it be possible that the workflow executions times out before it even got started if
|
||||
// the timeout time is very short as the process start time can be quite long.
|
||||
|
|
|
@ -5,13 +5,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
/* eslint-disable @typescript-eslint/unbound-method */
|
||||
import {
|
||||
BinaryDataManager,
|
||||
IBinaryDataConfig,
|
||||
IProcessMessage,
|
||||
UserSettings,
|
||||
WorkflowExecute,
|
||||
} from 'n8n-core';
|
||||
import { BinaryDataManager, IProcessMessage, UserSettings, WorkflowExecute } from 'n8n-core';
|
||||
|
||||
import {
|
||||
ExecutionError,
|
||||
|
@ -50,7 +44,7 @@ import {
|
|||
|
||||
import { getLogger } from './Logger';
|
||||
|
||||
import * as config from '../config';
|
||||
import config from '../config';
|
||||
import { InternalHooksManager } from './InternalHooksManager';
|
||||
import { checkPermissionsForExecution } from './UserManagement/UserManagementHelper';
|
||||
|
||||
|
@ -176,7 +170,7 @@ export class WorkflowRunnerProcess {
|
|||
const { cli } = await GenericHelpers.getVersions();
|
||||
InternalHooksManager.init(instanceId, cli, nodeTypes);
|
||||
|
||||
const binaryDataConfig = config.get('binaryDataManager') as IBinaryDataConfig;
|
||||
const binaryDataConfig = config.getEnv('binaryDataManager');
|
||||
await BinaryDataManager.init(binaryDataConfig);
|
||||
|
||||
// Credentials should now be loaded from database.
|
||||
|
@ -204,27 +198,27 @@ export class WorkflowRunnerProcess {
|
|||
} else if (
|
||||
inputData.workflowData.settings !== undefined &&
|
||||
inputData.workflowData.settings.saveExecutionProgress !== false &&
|
||||
(config.get('executions.saveExecutionProgress') as boolean)
|
||||
config.getEnv('executions.saveExecutionProgress')
|
||||
) {
|
||||
// Workflow settings not saying anything about saving but default settings says so
|
||||
await Db.init();
|
||||
} else if (
|
||||
inputData.workflowData.settings === undefined &&
|
||||
(config.get('executions.saveExecutionProgress') as boolean)
|
||||
config.getEnv('executions.saveExecutionProgress')
|
||||
) {
|
||||
// Workflow settings not saying anything about saving but default settings says so
|
||||
await Db.init();
|
||||
}
|
||||
|
||||
// Start timeout for the execution
|
||||
let workflowTimeout = config.get('executions.timeout') as number; // initialize with default
|
||||
let workflowTimeout = config.getEnv('executions.timeout'); // initialize with default
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-optional-chain
|
||||
if (this.data.workflowData.settings && this.data.workflowData.settings.executionTimeout) {
|
||||
workflowTimeout = this.data.workflowData.settings.executionTimeout as number; // preference on workflow setting
|
||||
}
|
||||
|
||||
if (workflowTimeout > 0) {
|
||||
workflowTimeout = Math.min(workflowTimeout, config.get('executions.maxTimeout') as number);
|
||||
workflowTimeout = Math.min(workflowTimeout, config.getEnv('executions.maxTimeout'));
|
||||
}
|
||||
|
||||
this.workflow = new Workflow({
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
/* eslint-disable no-restricted-syntax */
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/* eslint-disable import/no-cycle */
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import { In } from 'typeorm';
|
||||
import { UserSettings, Credentials } from 'n8n-core';
|
||||
import { INodeCredentialTestResult, LoggerProxy } from 'n8n-workflow';
|
||||
|
@ -24,7 +24,7 @@ import { CredentialsEntity } from '../databases/entities/CredentialsEntity';
|
|||
import { SharedCredentials } from '../databases/entities/SharedCredentials';
|
||||
import { validateEntity } from '../GenericHelpers';
|
||||
import type { CredentialRequest } from '../requests';
|
||||
import config = require('../../config');
|
||||
import * as config from '../../config';
|
||||
import { externalHooks } from '../Server';
|
||||
|
||||
export const credentialsController = express.Router();
|
||||
|
@ -99,7 +99,7 @@ credentialsController.get(
|
|||
const { name: newName } = req.query;
|
||||
|
||||
return GenericHelpers.generateUniqueName(
|
||||
newName ?? config.get('credentials.defaultName'),
|
||||
newName ?? config.getEnv('credentials.defaultName'),
|
||||
'credentials',
|
||||
);
|
||||
}),
|
||||
|
|
|
@ -14,12 +14,12 @@ import {
|
|||
} from 'typeorm';
|
||||
|
||||
import { IsArray, IsObject, IsString, Length } from 'class-validator';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, ICredentialsDb } from '../..';
|
||||
import { SharedCredentials } from './SharedCredentials';
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
@ -37,7 +37,7 @@ function resolveDataType(dataType: string) {
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: `STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')`,
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
import { WorkflowExecuteMode } from 'n8n-workflow';
|
||||
|
||||
import { Column, ColumnOptions, Entity, Index, PrimaryGeneratedColumn } from 'typeorm';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, IExecutionFlattedDb, IWorkflowDb } from '../..';
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional, IsString, Length } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { User } from './User';
|
||||
import { SharedWorkflow } from './SharedWorkflow';
|
||||
|
@ -21,7 +21,7 @@ type RoleScopes = 'global' | 'workflow' | 'credential';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { CredentialsEntity } from './CredentialsEntity';
|
||||
import { User } from './User';
|
||||
|
@ -17,7 +17,7 @@ import { Role } from './Role';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { WorkflowEntity } from './WorkflowEntity';
|
||||
import { User } from './User';
|
||||
|
@ -17,7 +17,7 @@ import { Role } from './Role';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
} from 'typeorm';
|
||||
import { IsDate, IsOptional, IsString, Length } from 'class-validator';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType } from '../../index';
|
||||
import { ITagDb } from '../../Interfaces';
|
||||
import { idStringifier } from '../utils/transformers';
|
||||
|
@ -21,7 +21,7 @@ import { WorkflowEntity } from './WorkflowEntity';
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -14,7 +14,7 @@ import {
|
|||
UpdateDateColumn,
|
||||
} from 'typeorm';
|
||||
import { IsEmail, IsString, Length } from 'class-validator';
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, IPersonalizationSurveyAnswers } from '../..';
|
||||
import { Role } from './Role';
|
||||
import { SharedWorkflow } from './SharedWorkflow';
|
||||
|
@ -27,7 +27,7 @@ export const MIN_PASSWORD_LENGTH = 8;
|
|||
export const MAX_PASSWORD_LENGTH = 64;
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
@ -45,7 +45,7 @@ function resolveDataType(dataType: string) {
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
@ -62,7 +62,7 @@ export class User {
|
|||
@PrimaryGeneratedColumn('uuid')
|
||||
id: string;
|
||||
|
||||
@Column({ length: 254 })
|
||||
@Column({ length: 254, nullable: true })
|
||||
@Index({ unique: true })
|
||||
@IsEmail()
|
||||
email: string;
|
||||
|
@ -81,7 +81,7 @@ export class User {
|
|||
|
||||
@Column({ nullable: true })
|
||||
@IsString({ message: 'Password must be of type string.' })
|
||||
password?: string;
|
||||
password: string;
|
||||
|
||||
@Column({ type: String, nullable: true })
|
||||
resetPasswordToken?: string | null;
|
||||
|
|
|
@ -18,13 +18,13 @@ import {
|
|||
UpdateDateColumn,
|
||||
} from 'typeorm';
|
||||
|
||||
import config = require('../../../config');
|
||||
import * as config from '../../../config';
|
||||
import { DatabaseType, IWorkflowDb } from '../..';
|
||||
import { TagEntity } from './TagEntity';
|
||||
import { SharedWorkflow } from './SharedWorkflow';
|
||||
|
||||
function resolveDataType(dataType: string) {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const typeMap: { [key in DatabaseType]: { [key: string]: string } } = {
|
||||
sqlite: {
|
||||
|
@ -42,7 +42,7 @@ function resolveDataType(dataType: string) {
|
|||
|
||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||
function getTimestampSyntax() {
|
||||
const dbType = config.get('database.type') as DatabaseType;
|
||||
const dbType = config.getEnv('database.type');
|
||||
|
||||
const map: { [key in DatabaseType]: string } = {
|
||||
sqlite: "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')",
|
||||
|
|
|
@ -6,7 +6,7 @@ export class InitialMigration1588157391238 implements MigrationInterface {
|
|||
name = 'InitialMigration1588157391238';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'credentials_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `data` text NOT NULL, `type` varchar(32) NOT NULL, `nodesAccess` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, INDEX `IDX_' + tablePrefix + '07fde106c0b471d8cc80a64fc8` (`type`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
|
||||
await queryRunner.query('CREATE TABLE IF NOT EXISTS `' + tablePrefix + 'execution_entity` (`id` int NOT NULL AUTO_INCREMENT, `data` text NOT NULL, `finished` tinyint NOT NULL, `mode` varchar(255) NOT NULL, `retryOf` varchar(255) NULL, `retrySuccessId` varchar(255) NULL, `startedAt` datetime NOT NULL, `stoppedAt` datetime NOT NULL, `workflowData` json NOT NULL, `workflowId` varchar(255) NULL, INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` (`workflowId`), PRIMARY KEY (`id`)) ENGINE=InnoDB', undefined);
|
||||
|
@ -14,7 +14,7 @@ export class InitialMigration1588157391238 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('DROP TABLE `' + tablePrefix + 'workflow_entity`', undefined);
|
||||
await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`', undefined);
|
||||
|
|
|
@ -9,13 +9,13 @@ export class WebhookModel1592447867632 implements MigrationInterface {
|
|||
name = 'WebhookModel1592447867632';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity (workflowId int NOT NULL, webhookPath varchar(255) NOT NULL, method varchar(255) NOT NULL, node varchar(255) NOT NULL, PRIMARY KEY (webhookPath, method)) ENGINE=InnoDB`);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,13 +6,13 @@ export class CreateIndexStoppedAt1594902918301 implements MigrationInterface {
|
|||
name = 'CreateIndexStoppedAt1594902918301';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity` (`stoppedAt`)');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('DROP INDEX `IDX_' + tablePrefix + 'cefb067df2402f6aed0638a6c1` ON `' + tablePrefix + 'execution_entity`');
|
||||
}
|
||||
|
|
|
@ -5,12 +5,12 @@ import * as config from '../../../../config';
|
|||
export class MakeStoppedAtNullable1607431743767 implements MigrationInterface {
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime', undefined);
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY `stoppedAt` datetime NOT NULL', undefined);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ export class AddWebhookId1611149998770 implements MigrationInterface {
|
|||
name = 'AddWebhookId1611149998770';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `webhookId` varchar(255) NULL');
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'webhook_entity` ADD `pathLength` int NULL');
|
||||
|
@ -13,7 +13,7 @@ export class AddWebhookId1611149998770 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_' + tablePrefix + '742496f199721a057051acf4c2` ON `' + tablePrefix + 'webhook_entity`'
|
||||
|
|
|
@ -5,13 +5,13 @@ export class ChangeDataSize1615306975123 implements MigrationInterface {
|
|||
name = 'ChangeDataSize1615306975123';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` MEDIUMTEXT NOT NULL');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` MODIFY COLUMN `data` TEXT NOT NULL');
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ export class CreateTagEntity1617268711084 implements MigrationInterface {
|
|||
name = 'CreateTagEntity1617268711084';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// create tags table + relationship with workflow entity
|
||||
|
||||
|
@ -25,7 +25,7 @@ export class CreateTagEntity1617268711084 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// `createdAt` and `updatedAt`
|
||||
|
||||
|
|
|
@ -5,13 +5,13 @@ export class ChangeCredentialDataSize1620729500000 implements MigrationInterface
|
|||
name = 'ChangeCredentialDataSize1620729500000';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(128) NOT NULL');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'credentials_entity` MODIFY COLUMN `type` varchar(32) NOT NULL');
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||
import config = require("../../../../config");
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class UniqueWorkflowNames1620826335440 implements MigrationInterface {
|
||||
name = 'UniqueWorkflowNames1620826335440';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const workflowNames = await queryRunner.query(`
|
||||
SELECT name
|
||||
|
@ -40,7 +40,7 @@ export class UniqueWorkflowNames1620826335440 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'workflow_entity` DROP INDEX `IDX_' + tablePrefix + '943d8f922be094eb507cb9a7f9`');
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class CertifyCorrectCollation1623936588000 implements MigrationInterface {
|
||||
name = 'CertifyCorrectCollation1623936588000';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const databaseType = config.get('database.type');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const databaseType = config.getEnv('database.type');
|
||||
|
||||
if (databaseType === 'mariadb') {
|
||||
// This applies to MySQL only.
|
||||
|
@ -19,7 +19,7 @@ export class CertifyCorrectCollation1623936588000 implements MigrationInterface
|
|||
collation = 'utf8mb4_0900_ai_ci';
|
||||
}
|
||||
|
||||
const databaseName = config.get(`database.mysqldb.database`);
|
||||
const databaseName = config.getEnv(`database.mysqldb.database`);
|
||||
|
||||
await queryRunner.query(`ALTER DATABASE \`${databaseName}\` CHARACTER SET utf8mb4 COLLATE ${collation};`);
|
||||
|
||||
|
|
|
@ -5,14 +5,14 @@ export class AddWaitColumnId1626183952959 implements MigrationInterface {
|
|||
name = 'AddWaitColumnId1626183952959';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query('ALTER TABLE `' + tablePrefix + 'execution_entity` ADD `waitTill` DATETIME NULL');
|
||||
await queryRunner.query('CREATE INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity` (`waitTill`)');
|
||||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_' + tablePrefix + 'ca4a71b47f28ac6ea88293a8e2` ON `' + tablePrefix + 'execution_entity`'
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
|
@ -9,7 +9,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
|||
name = 'UpdateWorkflowCredentials1630451444017';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
@ -146,7 +146,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
|
|
@ -5,7 +5,7 @@ export class AddExecutionEntityIndexes1644424784709 implements MigrationInterfac
|
|||
name = 'AddExecutionEntityIndexes1644424784709';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_c4d999a5e90784e8caccf5589d` ON `' + tablePrefix + 'execution_entity`',
|
||||
|
@ -41,7 +41,7 @@ export class AddExecutionEntityIndexes1644424784709 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(
|
||||
'DROP INDEX `IDX_81fc04c8a17de15835713505e4` ON `' + tablePrefix + 'execution_entity`',
|
||||
);
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { loadSurveyFromDisk } from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements MigrationInterface {
|
||||
name = 'CreateUserManagement1646992772331';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}role (
|
||||
|
@ -156,7 +156,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}workflow_entity ADD UNIQUE INDEX \`IDX_${tablePrefix}943d8f922be094eb507cb9a7f9\` (\`name\`)`,
|
||||
|
|
|
@ -7,9 +7,9 @@ export class InitialMigration1587669153312 implements MigrationInterface {
|
|||
name = 'InitialMigration1587669153312';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -22,9 +22,9 @@ export class InitialMigration1587669153312 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -9,9 +9,9 @@ export class WebhookModel1589476000887 implements MigrationInterface {
|
|||
name = 'WebhookModel1589476000887';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixIndex = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -20,8 +20,8 @@ export class WebhookModel1589476000887 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@ export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
|
|||
name = 'CreateIndexStoppedAt1594828256133';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}33228da131bb1112247cf52a42`);
|
||||
}
|
||||
|
|
|
@ -6,8 +6,8 @@ export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
|
|||
name = 'MakeStoppedAtNullable1607431743768';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
|
|||
name = 'AddWebhookId1611144599516';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -18,9 +18,9 @@ export class AddWebhookId1611144599516 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
|
|||
name = 'CreateTagEntity1617270242566';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -40,9 +40,9 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
|
||||
name = 'UniqueWorkflowNames1620824779533';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -58,9 +58,9 @@ export class UniqueWorkflowNames1620824779533 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
|
|||
name = 'AddwaitTill1626176912946';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -17,9 +17,9 @@ export class AddwaitTill1626176912946 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
|
@ -9,8 +9,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
|||
name = 'UpdateWorkflowCredentials1630419189837';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -152,8 +152,8 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -5,44 +5,44 @@ export class AddExecutionEntityIndexes1644422880309 implements MigrationInterfac
|
|||
name = 'AddExecutionEntityIndexes1644422880309';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}".IDX_${tablePrefixPure}c4d999a5e90784e8caccf5589d`,
|
||||
`DROP INDEX IF EXISTS "${schema}".IDX_${tablePrefixPure}c4d999a5e90784e8caccf5589d`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`DROP INDEX "${schema}".IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2`,
|
||||
`DROP INDEX IF EXISTS "${schema}".IDX_${tablePrefixPure}ca4a71b47f28ac6ea88293a8e2`,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}33228da131bb1112247cf52a42" ON ${tablePrefix}execution_entity ("stoppedAt") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}33228da131bb1112247cf52a42" ON ${tablePrefix}execution_entity ("stoppedAt") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}58154df94c686818c99fb754ce" ON ${tablePrefix}execution_entity ("workflowId", "waitTill", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}58154df94c686818c99fb754ce" ON ${tablePrefix}execution_entity ("workflowId", "waitTill", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}4f474ac92be81610439aaad61e" ON ${tablePrefix}execution_entity ("workflowId", "finished", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}4f474ac92be81610439aaad61e" ON ${tablePrefix}execution_entity ("workflowId", "finished", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662" ON ${tablePrefix}execution_entity ("finished", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}72ffaaab9f04c2c1f1ea86e662" ON ${tablePrefix}execution_entity ("finished", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}85b981df7b444f905f8bf50747" ON ${tablePrefix}execution_entity ("waitTill", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}85b981df7b444f905f8bf50747" ON ${tablePrefix}execution_entity ("waitTill", "id") `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3" ON ${tablePrefix}execution_entity ("workflowId", "id") `,
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_${tablePrefixPure}d160d4771aba5a0d78943edbe3" ON ${tablePrefix}execution_entity ("workflowId", "id") `,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
|
|
|
@ -3,13 +3,13 @@ import {
|
|||
QueryRunner,
|
||||
} from 'typeorm';
|
||||
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
|
||||
export class IncreaseTypeVarcharLimit1646834195327 implements MigrationInterface {
|
||||
name = 'IncreaseTypeVarcharLimit1646834195327';
|
||||
|
||||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}credentials_entity ALTER COLUMN "type" TYPE VARCHAR(128)`);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import config = require('../../../../config');
|
||||
import config from '../../../../config';
|
||||
import { loadSurveyFromDisk } from '../../utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements MigrationInterface {
|
||||
name = 'CreateUserManagement1646992772331';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
@ -140,9 +140,9 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
let tablePrefix = config.get('database.tablePrefix');
|
||||
let tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const tablePrefixPure = tablePrefix;
|
||||
const schema = config.get('database.postgresdb.schema');
|
||||
const schema = config.getEnv('database.postgresdb.schema');
|
||||
if (schema) {
|
||||
tablePrefix = schema + '.' + tablePrefix;
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class InitialMigration1588102412422 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(128) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`,
|
||||
|
@ -35,7 +35,7 @@ export class InitialMigration1588102412422 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity"`, undefined);
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}c4d999a5e90784e8caccf5589d"`, undefined);
|
||||
|
|
|
@ -8,7 +8,7 @@ export class WebhookModel1592445003908 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, PRIMARY KEY ("webhookPath", "method"))`,
|
||||
|
@ -18,7 +18,7 @@ export class WebhookModel1592445003908 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1" ON "${tablePrefix}execution_entity" ("stoppedAt") `,
|
||||
|
@ -18,7 +18,7 @@ export class CreateIndexStoppedAt1594825041918 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}cefb067df2402f6aed0638a6c1"`);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class MakeStoppedAtNullable1607431743769 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
// SQLite does not allow us to simply "alter column"
|
||||
// We're hacking the way sqlite identifies tables
|
||||
// Allowing a column to become nullable
|
||||
|
|
|
@ -8,7 +8,7 @@ export class AddWebhookId1611071044839 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_webhook_entity" ("workflowId" integer NOT NULL, "webhookPath" varchar NOT NULL, "method" varchar NOT NULL, "node" varchar NOT NULL, "webhookId" varchar, "pathLength" integer, PRIMARY KEY ("webhookPath", "method"))`,
|
||||
|
@ -28,7 +28,7 @@ export class AddWebhookId1611071044839 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}742496f199721a057051acf4c2"`);
|
||||
await queryRunner.query(
|
||||
|
|
|
@ -8,7 +8,7 @@ export class CreateTagEntity1617213344594 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// create tags table + relationship with workflow entity
|
||||
|
||||
|
@ -73,7 +73,7 @@ export class CreateTagEntity1617213344594 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
// `createdAt` and `updatedAt`
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
|
||||
|
@ -8,7 +8,7 @@ export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const workflowNames = await queryRunner.query(`
|
||||
SELECT name
|
||||
|
@ -58,7 +58,7 @@ export class UniqueWorkflowNames1620821879465 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9"`);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ export class AddWaitColumn1621707690587 implements MigrationInterface {
|
|||
async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP TABLE IF EXISTS "${tablePrefix}temporary_execution_entity"`);
|
||||
await queryRunner.query(
|
||||
|
@ -34,7 +34,7 @@ export class AddWaitColumn1621707690587 implements MigrationInterface {
|
|||
}
|
||||
|
||||
async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}temporary_execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime, "workflowData" text NOT NULL, "workflowId" varchar)`,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||
|
||||
|
@ -12,7 +12,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
|||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
@ -152,7 +152,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const helpers = new MigrationHelpers(queryRunner);
|
||||
|
||||
const credentialsEntities = await queryRunner.query(`
|
||||
|
|
|
@ -7,32 +7,32 @@ export class AddExecutionEntityIndexes1644421939510 implements MigrationInterfac
|
|||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d'`);
|
||||
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2'`);
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2'`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}06da892aaf92a48e7d3e400003' ON '${tablePrefix}execution_entity' ('workflowId', 'waitTill', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}06da892aaf92a48e7d3e400003' ON '${tablePrefix}execution_entity' ('workflowId', 'waitTill', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}78d62b89dc1433192b86dce18a' ON '${tablePrefix}execution_entity' ('workflowId', 'finished', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}78d62b89dc1433192b86dce18a' ON '${tablePrefix}execution_entity' ('workflowId', 'finished', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}1688846335d274033e15c846a4' ON '${tablePrefix}execution_entity' ('finished', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}1688846335d274033e15c846a4' ON '${tablePrefix}execution_entity' ('finished', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9' ON '${tablePrefix}execution_entity' ('waitTill', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9' ON '${tablePrefix}execution_entity' ('waitTill', 'id') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX 'IDX_${tablePrefix}81fc04c8a17de15835713505e4' ON '${tablePrefix}execution_entity' ('workflowId', 'id') `,
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}81fc04c8a17de15835713505e4' ON '${tablePrefix}execution_entity' ('workflowId', 'id') `,
|
||||
);
|
||||
logMigrationEnd(this.name);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}81fc04c8a17de15835713505e4'`);
|
||||
await queryRunner.query(`DROP INDEX 'IDX_${tablePrefix}b94b45ce2c73ce46c54f20b5f9'`);
|
||||
|
@ -43,7 +43,7 @@ export class AddExecutionEntityIndexes1644421939510 implements MigrationInterfac
|
|||
`CREATE INDEX 'IDX_${tablePrefix}ca4a71b47f28ac6ea88293a8e2' ON '${tablePrefix}execution_entity' ('waitTill') `,
|
||||
);
|
||||
await queryRunner.query(
|
||||
`CREATE INDEX IF NOT EXISTS 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d' ON '${tablePrefix}execution_entity' ('workflowId') `,
|
||||
`CREATE INDEX 'IDX_${tablePrefix}c4d999a5e90784e8caccf5589d' ON '${tablePrefix}execution_entity' ('workflowId') `,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import config = require('../../../../config');
|
||||
import * as config from '../../../../config';
|
||||
import {
|
||||
loadSurveyFromDisk,
|
||||
logMigrationEnd,
|
||||
|
@ -13,7 +13,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
logMigrationStart(this.name);
|
||||
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}role" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(32) NOT NULL, "scope" varchar NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), CONSTRAINT "UQ_${tablePrefix}5b49d0f504f7ef31045a1fb2eb8" UNIQUE ("scope", "name"))`,
|
||||
|
@ -104,7 +104,7 @@ export class CreateUserManagement1646992772331 implements MigrationInterface {
|
|||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
const tablePrefix = config.get('database.tablePrefix');
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9" ON "${tablePrefix}workflow_entity" ("name") `,
|
||||
);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import config = require('../../../../config');
|
||||
import config from '../../../../config';
|
||||
|
||||
import { InitialMigration1588102412422 } from './1588102412422-InitialMigration';
|
||||
import { WebhookModel1592445003908 } from './1592445003908-WebhookModel';
|
||||
|
|
2
packages/cli/src/requests.d.ts
vendored
2
packages/cli/src/requests.d.ts
vendored
|
@ -1,5 +1,5 @@
|
|||
/* eslint-disable import/no-cycle */
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import {
|
||||
IConnections,
|
||||
ICredentialDataDecryptedObject,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable import/no-cycle */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
import TelemetryClient = require('@rudderstack/rudder-sdk-node');
|
||||
import TelemetryClient from '@rudderstack/rudder-sdk-node';
|
||||
import { IDataObject, LoggerProxy } from 'n8n-workflow';
|
||||
import config = require('../../config');
|
||||
import * as config from '../../config';
|
||||
import { getLogger } from '../Logger';
|
||||
|
||||
type CountBufferItemKey =
|
||||
|
@ -58,10 +58,10 @@ export class Telemetry {
|
|||
this.instanceId = instanceId;
|
||||
this.versionCli = versionCli;
|
||||
|
||||
const enabled = config.get('diagnostics.enabled') as boolean;
|
||||
const logLevel = config.get('logs.level') as boolean;
|
||||
const enabled = config.getEnv('diagnostics.enabled');
|
||||
const logLevel = config.getEnv('logs.level');
|
||||
if (enabled) {
|
||||
const conf = config.get('diagnostics.config.backend') as string;
|
||||
const conf = config.getEnv('diagnostics.config.backend');
|
||||
const [key, url] = conf.split(';');
|
||||
|
||||
if (!key || !url) {
|
||||
|
|
283
packages/cli/test/integration/auth.api.test.ts
Normal file
283
packages/cli/test/integration/auth.api.test.ts
Normal file
|
@ -0,0 +1,283 @@
|
|||
import express = require('express');
|
||||
import validator from 'validator';
|
||||
|
||||
import config = require('../../config');
|
||||
import * as utils from './shared/utils';
|
||||
import { LOGGED_OUT_RESPONSE_BODY } from './shared/constants';
|
||||
import { Db } from '../../src';
|
||||
import type { Role } from '../../src/databases/entities/Role';
|
||||
import { randomValidPassword } from './shared/random';
|
||||
import * as testDb from './shared/testDb';
|
||||
import { AUTH_COOKIE_NAME } from '../../src/constants';
|
||||
|
||||
jest.mock('../../src/telemetry');
|
||||
|
||||
let app: express.Application;
|
||||
let testDbName = '';
|
||||
let globalOwnerRole: Role;
|
||||
let globalMemberRole: Role;
|
||||
|
||||
beforeAll(async () => {
|
||||
app = utils.initTestServer({ endpointGroups: ['auth'], applyAuth: true });
|
||||
const initResult = await testDb.init();
|
||||
testDbName = initResult.testDbName;
|
||||
|
||||
globalOwnerRole = await testDb.getGlobalOwnerRole();
|
||||
globalMemberRole = await testDb.getGlobalMemberRole();
|
||||
utils.initTestLogger();
|
||||
utils.initTestTelemetry();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await testDb.truncate(['User'], testDbName);
|
||||
|
||||
config.set('userManagement.isInstanceOwnerSetUp', true);
|
||||
|
||||
await Db.collections.Settings!.update(
|
||||
{ key: 'userManagement.isInstanceOwnerSetUp' },
|
||||
{ value: JSON.stringify(true) },
|
||||
);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await testDb.terminate(testDbName);
|
||||
});
|
||||
|
||||
test('POST /login should log user in', async () => {
|
||||
const ownerPassword = randomValidPassword();
|
||||
const owner = await testDb.createUser({
|
||||
password: ownerPassword,
|
||||
globalRole: globalOwnerRole,
|
||||
});
|
||||
|
||||
const authlessAgent = utils.createAgent(app);
|
||||
|
||||
const response = await authlessAgent.post('/login').send({
|
||||
email: owner.email,
|
||||
password: ownerPassword,
|
||||
});
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const {
|
||||
id,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
password,
|
||||
personalizationAnswers,
|
||||
globalRole,
|
||||
resetPasswordToken,
|
||||
} = response.body.data;
|
||||
|
||||
expect(validator.isUUID(id)).toBe(true);
|
||||
expect(email).toBe(owner.email);
|
||||
expect(firstName).toBe(owner.firstName);
|
||||
expect(lastName).toBe(owner.lastName);
|
||||
expect(password).toBeUndefined();
|
||||
expect(personalizationAnswers).toBeNull();
|
||||
expect(password).toBeUndefined();
|
||||
expect(resetPasswordToken).toBeUndefined();
|
||||
expect(globalRole).toBeDefined();
|
||||
expect(globalRole.name).toBe('owner');
|
||||
expect(globalRole.scope).toBe('global');
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeDefined();
|
||||
});
|
||||
|
||||
test('GET /login should return 401 Unauthorized if no cookie', async () => {
|
||||
const authlessAgent = utils.createAgent(app);
|
||||
|
||||
const response = await authlessAgent.get('/login');
|
||||
|
||||
expect(response.statusCode).toBe(401);
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeUndefined();
|
||||
});
|
||||
|
||||
test('GET /login should return cookie if UM is disabled', async () => {
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
|
||||
config.set('userManagement.isInstanceOwnerSetUp', false);
|
||||
|
||||
await Db.collections.Settings!.update(
|
||||
{ key: 'userManagement.isInstanceOwnerSetUp' },
|
||||
{ value: JSON.stringify(false) },
|
||||
);
|
||||
|
||||
const authOwnerShellAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authOwnerShellAgent.get('/login');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeDefined();
|
||||
});
|
||||
|
||||
test('GET /login should return 401 Unauthorized if invalid cookie', async () => {
|
||||
const invalidAuthAgent = utils.createAgent(app);
|
||||
invalidAuthAgent.jar.setCookie(`${AUTH_COOKIE_NAME}=invalid`);
|
||||
|
||||
const response = await invalidAuthAgent.get('/login');
|
||||
|
||||
expect(response.statusCode).toBe(401);
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeUndefined();
|
||||
});
|
||||
|
||||
test('GET /login should return logged-in owner shell', async () => {
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authMemberAgent.get('/login');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const {
|
||||
id,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
password,
|
||||
personalizationAnswers,
|
||||
globalRole,
|
||||
resetPasswordToken,
|
||||
} = response.body.data;
|
||||
|
||||
expect(validator.isUUID(id)).toBe(true);
|
||||
expect(email).toBeDefined();
|
||||
expect(firstName).toBeNull();
|
||||
expect(lastName).toBeNull();
|
||||
expect(password).toBeUndefined();
|
||||
expect(personalizationAnswers).toBeNull();
|
||||
expect(password).toBeUndefined();
|
||||
expect(resetPasswordToken).toBeUndefined();
|
||||
expect(globalRole).toBeDefined();
|
||||
expect(globalRole.name).toBe('owner');
|
||||
expect(globalRole.scope).toBe('global');
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeUndefined();
|
||||
});
|
||||
|
||||
test('GET /login should return logged-in member shell', async () => {
|
||||
const memberShell = await testDb.createUserShell(globalMemberRole);
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: memberShell });
|
||||
|
||||
const response = await authMemberAgent.get('/login');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const {
|
||||
id,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
password,
|
||||
personalizationAnswers,
|
||||
globalRole,
|
||||
resetPasswordToken,
|
||||
} = response.body.data;
|
||||
|
||||
expect(validator.isUUID(id)).toBe(true);
|
||||
expect(email).toBeDefined();
|
||||
expect(firstName).toBeNull();
|
||||
expect(lastName).toBeNull();
|
||||
expect(password).toBeUndefined();
|
||||
expect(personalizationAnswers).toBeNull();
|
||||
expect(password).toBeUndefined();
|
||||
expect(resetPasswordToken).toBeUndefined();
|
||||
expect(globalRole).toBeDefined();
|
||||
expect(globalRole.name).toBe('member');
|
||||
expect(globalRole.scope).toBe('global');
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeUndefined();
|
||||
});
|
||||
|
||||
test('GET /login should return logged-in owner', async () => {
|
||||
const owner = await testDb.createUser({ globalRole: globalOwnerRole });
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
|
||||
const response = await authOwnerAgent.get('/login');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const {
|
||||
id,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
password,
|
||||
personalizationAnswers,
|
||||
globalRole,
|
||||
resetPasswordToken,
|
||||
} = response.body.data;
|
||||
|
||||
expect(validator.isUUID(id)).toBe(true);
|
||||
expect(email).toBe(owner.email);
|
||||
expect(firstName).toBe(owner.firstName);
|
||||
expect(lastName).toBe(owner.lastName);
|
||||
expect(password).toBeUndefined();
|
||||
expect(personalizationAnswers).toBeNull();
|
||||
expect(password).toBeUndefined();
|
||||
expect(resetPasswordToken).toBeUndefined();
|
||||
expect(globalRole).toBeDefined();
|
||||
expect(globalRole.name).toBe('owner');
|
||||
expect(globalRole.scope).toBe('global');
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeUndefined();
|
||||
});
|
||||
|
||||
test('GET /login should return logged-in member', async () => {
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
const response = await authMemberAgent.get('/login');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
|
||||
const {
|
||||
id,
|
||||
email,
|
||||
firstName,
|
||||
lastName,
|
||||
password,
|
||||
personalizationAnswers,
|
||||
globalRole,
|
||||
resetPasswordToken,
|
||||
} = response.body.data;
|
||||
|
||||
expect(validator.isUUID(id)).toBe(true);
|
||||
expect(email).toBe(member.email);
|
||||
expect(firstName).toBe(member.firstName);
|
||||
expect(lastName).toBe(member.lastName);
|
||||
expect(password).toBeUndefined();
|
||||
expect(personalizationAnswers).toBeNull();
|
||||
expect(password).toBeUndefined();
|
||||
expect(resetPasswordToken).toBeUndefined();
|
||||
expect(globalRole).toBeDefined();
|
||||
expect(globalRole.name).toBe('member');
|
||||
expect(globalRole.scope).toBe('global');
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeUndefined();
|
||||
});
|
||||
|
||||
test('POST /logout should log user out', async () => {
|
||||
const owner = await testDb.createUser({ globalRole: globalOwnerRole });
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
|
||||
const response = await authOwnerAgent.post('/logout');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body).toEqual(LOGGED_OUT_RESPONSE_BODY);
|
||||
|
||||
const authToken = utils.getAuthToken(response);
|
||||
expect(authToken).toBeUndefined();
|
||||
});
|
|
@ -1,9 +1,9 @@
|
|||
import { hashSync, genSaltSync } from 'bcryptjs';
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import validator from 'validator';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import config = require('../../config');
|
||||
import * as config from '../../config';
|
||||
import * as utils from './shared/utils';
|
||||
import { LOGGED_OUT_RESPONSE_BODY } from './shared/constants';
|
||||
import { Db } from '../../src';
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import express = require('express');
|
||||
import express from 'express';
|
||||
|
||||
import * as request from 'supertest';
|
||||
import request from 'supertest';
|
||||
import {
|
||||
REST_PATH_SEGMENT,
|
||||
ROUTES_REQUIRING_AUTHORIZATION,
|
||||
|
@ -8,11 +8,13 @@ import {
|
|||
} from './shared/constants';
|
||||
import * as utils from './shared/utils';
|
||||
import * as testDb from './shared/testDb';
|
||||
import type { Role } from '../../src/databases/entities/Role';
|
||||
|
||||
jest.mock('../../src/telemetry');
|
||||
|
||||
let app: express.Application;
|
||||
let testDbName = '';
|
||||
let globalMemberRole: Role;
|
||||
|
||||
beforeAll(async () => {
|
||||
app = utils.initTestServer({
|
||||
|
@ -21,6 +23,9 @@ beforeAll(async () => {
|
|||
});
|
||||
const initResult = await testDb.init();
|
||||
testDbName = initResult.testDbName;
|
||||
|
||||
globalMemberRole = await testDb.getGlobalMemberRole();
|
||||
|
||||
utils.initTestLogger();
|
||||
utils.initTestTelemetry();
|
||||
});
|
||||
|
@ -43,12 +48,9 @@ ROUTES_REQUIRING_AUTHORIZATION.forEach(async (route) => {
|
|||
const [method, endpoint] = getMethodAndEndpoint(route);
|
||||
|
||||
test(`${route} should return 403 Forbidden for member`, async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
const response = await authMemberAgent[method](endpoint);
|
||||
if (response.statusCode === 500) {
|
||||
console.log(response);
|
||||
}
|
||||
|
||||
expect(response.statusCode).toBe(403);
|
||||
});
|
|
@ -1,17 +1,20 @@
|
|||
import express = require('express');
|
||||
import express from 'express';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import { Db } from '../../src';
|
||||
import { randomName, randomString } from './shared/random';
|
||||
import * as utils from './shared/utils';
|
||||
import type { CredentialPayload, SaveCredentialFunction } from './shared/types';
|
||||
import { Role } from '../../src/databases/entities/Role';
|
||||
import { User } from '../../src/databases/entities/User';
|
||||
import type { Role } from '../../src/databases/entities/Role';
|
||||
import type { User } from '../../src/databases/entities/User';
|
||||
import * as testDb from './shared/testDb';
|
||||
import { CredentialsEntity } from '../../src/databases/entities/CredentialsEntity';
|
||||
|
||||
jest.mock('../../src/telemetry');
|
||||
|
||||
let app: express.Application;
|
||||
let testDbName = '';
|
||||
let globalOwnerRole: Role;
|
||||
let globalMemberRole: Role;
|
||||
let saveCredential: SaveCredentialFunction;
|
||||
|
||||
beforeAll(async () => {
|
||||
|
@ -24,19 +27,17 @@ beforeAll(async () => {
|
|||
|
||||
utils.initConfigFile();
|
||||
|
||||
globalOwnerRole = await testDb.getGlobalOwnerRole();
|
||||
globalMemberRole = await testDb.getGlobalMemberRole();
|
||||
const credentialOwnerRole = await testDb.getCredentialOwnerRole();
|
||||
saveCredential = affixRoleToSaveCredential(credentialOwnerRole);
|
||||
|
||||
utils.initTestLogger();
|
||||
utils.initTestTelemetry();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await testDb.createOwnerShell();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// do not combine calls - shared table must be cleared first and separately
|
||||
await testDb.truncate(['SharedCredentials'], testDbName);
|
||||
await testDb.truncate(['User', 'Credentials'], testDbName);
|
||||
await testDb.truncate(['User', 'SharedCredentials', 'Credentials'], testDbName);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -44,8 +45,9 @@ afterAll(async () => {
|
|||
});
|
||||
|
||||
test('POST /credentials should create cred', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const payload = credentialPayload();
|
||||
|
||||
const response = await authOwnerAgent.post('/credentials').send(payload);
|
||||
|
@ -71,26 +73,28 @@ test('POST /credentials should create cred', async () => {
|
|||
where: { credentials: credential },
|
||||
});
|
||||
|
||||
expect(sharedCredential.user.id).toBe(owner.id);
|
||||
expect(sharedCredential.user.id).toBe(ownerShell.id);
|
||||
expect(sharedCredential.credentials.name).toBe(payload.name);
|
||||
});
|
||||
|
||||
test('POST /credentials should fail with invalid inputs', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
for (const invalidPayload of INVALID_PAYLOADS) {
|
||||
await Promise.all(
|
||||
INVALID_PAYLOADS.map(async (invalidPayload) => {
|
||||
const response = await authOwnerAgent.post('/credentials').send(invalidPayload);
|
||||
expect(response.statusCode).toBe(400);
|
||||
}
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('POST /credentials should fail with missing encryption key', async () => {
|
||||
const mock = jest.spyOn(UserSettings, 'getEncryptionKey');
|
||||
mock.mockResolvedValue(undefined);
|
||||
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authOwnerAgent.post('/credentials').send(credentialPayload());
|
||||
|
||||
|
@ -100,8 +104,8 @@ test('POST /credentials should fail with missing encryption key', async () => {
|
|||
});
|
||||
|
||||
test('POST /credentials should ignore ID in payload', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const firstResponse = await authOwnerAgent
|
||||
.post('/credentials')
|
||||
|
@ -117,9 +121,9 @@ test('POST /credentials should ignore ID in payload', async () => {
|
|||
});
|
||||
|
||||
test('DELETE /credentials/:id should delete owned cred for owner', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
|
||||
const response = await authOwnerAgent.delete(`/credentials/${savedCredential.id}`);
|
||||
|
||||
|
@ -136,9 +140,9 @@ test('DELETE /credentials/:id should delete owned cred for owner', async () => {
|
|||
});
|
||||
|
||||
test('DELETE /credentials/:id should delete non-owned cred for owner', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const member = await testDb.createUser();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: member });
|
||||
|
||||
const response = await authOwnerAgent.delete(`/credentials/${savedCredential.id}`);
|
||||
|
@ -156,7 +160,7 @@ test('DELETE /credentials/:id should delete non-owned cred for owner', async ()
|
|||
});
|
||||
|
||||
test('DELETE /credentials/:id should delete owned cred for member', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: member });
|
||||
|
||||
|
@ -175,10 +179,10 @@ test('DELETE /credentials/:id should delete owned cred for member', async () =>
|
|||
});
|
||||
|
||||
test('DELETE /credentials/:id should not delete non-owned cred for member', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const member = await testDb.createUser();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
|
||||
const response = await authMemberAgent.delete(`/credentials/${savedCredential.id}`);
|
||||
|
||||
|
@ -194,8 +198,8 @@ test('DELETE /credentials/:id should not delete non-owned cred for member', asyn
|
|||
});
|
||||
|
||||
test('DELETE /credentials/:id should fail if cred not found', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authOwnerAgent.delete('/credentials/123');
|
||||
|
||||
|
@ -203,9 +207,9 @@ test('DELETE /credentials/:id should fail if cred not found', async () => {
|
|||
});
|
||||
|
||||
test('PATCH /credentials/:id should update owned cred for owner', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
const patchPayload = credentialPayload();
|
||||
|
||||
const response = await authOwnerAgent
|
||||
|
@ -237,9 +241,9 @@ test('PATCH /credentials/:id should update owned cred for owner', async () => {
|
|||
});
|
||||
|
||||
test('PATCH /credentials/:id should update non-owned cred for owner', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const member = await testDb.createUser();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: member });
|
||||
const patchPayload = credentialPayload();
|
||||
|
||||
|
@ -272,7 +276,7 @@ test('PATCH /credentials/:id should update non-owned cred for owner', async () =
|
|||
});
|
||||
|
||||
test('PATCH /credentials/:id should update owned cred for member', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: member });
|
||||
const patchPayload = credentialPayload();
|
||||
|
@ -306,10 +310,10 @@ test('PATCH /credentials/:id should update owned cred for member', async () => {
|
|||
});
|
||||
|
||||
test('PATCH /credentials/:id should not update non-owned cred for member', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const member = await testDb.createUser();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
const patchPayload = credentialPayload();
|
||||
|
||||
const response = await authMemberAgent
|
||||
|
@ -324,22 +328,24 @@ test('PATCH /credentials/:id should not update non-owned cred for member', async
|
|||
});
|
||||
|
||||
test('PATCH /credentials/:id should fail with invalid inputs', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
|
||||
for (const invalidPayload of INVALID_PAYLOADS) {
|
||||
await Promise.all(
|
||||
INVALID_PAYLOADS.map(async (invalidPayload) => {
|
||||
const response = await authOwnerAgent
|
||||
.patch(`/credentials/${savedCredential.id}`)
|
||||
.send(invalidPayload);
|
||||
|
||||
expect(response.statusCode).toBe(400);
|
||||
}
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('PATCH /credentials/:id should fail if cred not found', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authOwnerAgent.patch('/credentials/123').send(credentialPayload());
|
||||
|
||||
|
@ -350,8 +356,8 @@ test('PATCH /credentials/:id should fail with missing encryption key', async ()
|
|||
const mock = jest.spyOn(UserSettings, 'getEncryptionKey');
|
||||
mock.mockResolvedValue(undefined);
|
||||
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authOwnerAgent.post('/credentials').send(credentialPayload());
|
||||
|
||||
|
@ -361,14 +367,14 @@ test('PATCH /credentials/:id should fail with missing encryption key', async ()
|
|||
});
|
||||
|
||||
test('GET /credentials should retrieve all creds for owner', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
await saveCredential(credentialPayload(), { user: owner });
|
||||
await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
}
|
||||
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
|
||||
await saveCredential(credentialPayload(), { user: member });
|
||||
|
||||
|
@ -377,18 +383,20 @@ test('GET /credentials should retrieve all creds for owner', async () => {
|
|||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(4); // 3 owner + 1 member
|
||||
|
||||
for (const credential of response.body.data) {
|
||||
await Promise.all(
|
||||
response.body.data.map(async (credential: CredentialsEntity) => {
|
||||
const { name, type, nodesAccess, data: encryptedData } = credential;
|
||||
|
||||
expect(typeof name).toBe('string');
|
||||
expect(typeof type).toBe('string');
|
||||
expect(typeof nodesAccess[0].nodeType).toBe('string');
|
||||
expect(encryptedData).toBeUndefined();
|
||||
}
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('GET /credentials should retrieve owned creds for member', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
|
@ -400,23 +408,25 @@ test('GET /credentials should retrieve owned creds for member', async () => {
|
|||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(3);
|
||||
|
||||
for (const credential of response.body.data) {
|
||||
await Promise.all(
|
||||
response.body.data.map(async (credential: CredentialsEntity) => {
|
||||
const { name, type, nodesAccess, data: encryptedData } = credential;
|
||||
|
||||
expect(typeof name).toBe('string');
|
||||
expect(typeof type).toBe('string');
|
||||
expect(typeof nodesAccess[0].nodeType).toBe('string');
|
||||
expect(encryptedData).toBeUndefined();
|
||||
}
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test('GET /credentials should not retrieve non-owned creds for member', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const member = await testDb.createUser();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
await saveCredential(credentialPayload(), { user: owner });
|
||||
await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
}
|
||||
|
||||
const response = await authMemberAgent.get('/credentials');
|
||||
|
@ -426,9 +436,9 @@ test('GET /credentials should not retrieve non-owned creds for member', async ()
|
|||
});
|
||||
|
||||
test('GET /credentials/:id should retrieve owned cred for owner', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
|
||||
const firstResponse = await authOwnerAgent.get(`/credentials/${savedCredential.id}`);
|
||||
|
||||
|
@ -451,7 +461,7 @@ test('GET /credentials/:id should retrieve owned cred for owner', async () => {
|
|||
});
|
||||
|
||||
test('GET /credentials/:id should retrieve owned cred for member', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: member });
|
||||
|
||||
|
@ -477,10 +487,10 @@ test('GET /credentials/:id should retrieve owned cred for member', async () => {
|
|||
});
|
||||
|
||||
test('GET /credentials/:id should not retrieve non-owned cred for member', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const member = await testDb.createUser();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
|
||||
const response = await authMemberAgent.get(`/credentials/${savedCredential.id}`);
|
||||
|
||||
|
@ -489,9 +499,9 @@ test('GET /credentials/:id should not retrieve non-owned cred for member', async
|
|||
});
|
||||
|
||||
test('GET /credentials/:id should fail with missing encryption key', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
const savedCredential = await saveCredential(credentialPayload(), { user: ownerShell });
|
||||
|
||||
const mock = jest.spyOn(UserSettings, 'getEncryptionKey');
|
||||
mock.mockResolvedValue(undefined);
|
||||
|
@ -506,8 +516,8 @@ test('GET /credentials/:id should fail with missing encryption key', async () =>
|
|||
});
|
||||
|
||||
test('GET /credentials/:id should return 404 if cred not found', async () => {
|
||||
const owner = await Db.collections.User!.findOneOrFail();
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: owner });
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authMemberAgent.get('/credentials/789');
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import { hashSync, genSaltSync } from 'bcryptjs';
|
||||
import express = require('express');
|
||||
import express from 'express';
|
||||
import validator from 'validator';
|
||||
import { IsNull } from 'typeorm';
|
||||
|
||||
import config = require('../../config');
|
||||
import config from '../../config';
|
||||
import * as utils from './shared/utils';
|
||||
import { SUCCESS_RESPONSE_BODY } from './shared/constants';
|
||||
import { Db } from '../../src';
|
||||
import { Role } from '../../src/databases/entities/Role';
|
||||
import type { Role } from '../../src/databases/entities/Role';
|
||||
import { randomValidPassword, randomEmail, randomName, randomString } from './shared/random';
|
||||
import * as testDb from './shared/testDb';
|
||||
|
||||
|
@ -15,6 +15,7 @@ jest.mock('../../src/telemetry');
|
|||
let app: express.Application;
|
||||
let testDbName = '';
|
||||
let globalOwnerRole: Role;
|
||||
let globalMemberRole: Role;
|
||||
|
||||
beforeAll(async () => {
|
||||
app = utils.initTestServer({ endpointGroups: ['me'], applyAuth: true });
|
||||
|
@ -22,6 +23,7 @@ beforeAll(async () => {
|
|||
testDbName = initResult.testDbName;
|
||||
|
||||
globalOwnerRole = await testDb.getGlobalOwnerRole();
|
||||
globalMemberRole = await testDb.getGlobalMemberRole();
|
||||
utils.initTestLogger();
|
||||
utils.initTestTelemetry();
|
||||
});
|
||||
|
@ -32,15 +34,11 @@ afterAll(async () => {
|
|||
|
||||
describe('Owner shell', () => {
|
||||
beforeEach(async () => {
|
||||
await testDb.createOwnerShell();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await testDb.truncate(['User'], testDbName);
|
||||
});
|
||||
|
||||
test('GET /me should return sanitized owner shell', async () => {
|
||||
const ownerShell = await Db.collections.User!.findOneOrFail();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerShellAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const response = await authOwnerShellAgent.get('/me');
|
||||
|
@ -72,7 +70,7 @@ describe('Owner shell', () => {
|
|||
});
|
||||
|
||||
test('PATCH /me should succeed with valid inputs', async () => {
|
||||
const ownerShell = await Db.collections.User!.findOneOrFail();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerShellAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
for (const validPayload of VALID_PATCH_ME_PAYLOADS) {
|
||||
|
@ -112,7 +110,7 @@ describe('Owner shell', () => {
|
|||
});
|
||||
|
||||
test('PATCH /me should fail with invalid inputs', async () => {
|
||||
const ownerShell = await Db.collections.User!.findOneOrFail();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerShellAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
for (const invalidPayload of INVALID_PATCH_ME_PAYLOADS) {
|
||||
|
@ -127,7 +125,7 @@ describe('Owner shell', () => {
|
|||
});
|
||||
|
||||
test('PATCH /me/password should fail for shell', async () => {
|
||||
const ownerShell = await Db.collections.User!.findOneOrFail();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerShellAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const validPasswordPayload = {
|
||||
|
@ -135,9 +133,10 @@ describe('Owner shell', () => {
|
|||
newPassword: randomValidPassword(),
|
||||
};
|
||||
|
||||
const payloads = [validPasswordPayload, ...INVALID_PASSWORD_PAYLOADS];
|
||||
const validPayloads = [validPasswordPayload, ...INVALID_PASSWORD_PAYLOADS];
|
||||
|
||||
for (const payload of payloads) {
|
||||
await Promise.all(
|
||||
validPayloads.map(async (payload) => {
|
||||
const response = await authOwnerShellAgent.patch('/me/password').send(payload);
|
||||
expect([400, 500].includes(response.statusCode)).toBe(true);
|
||||
|
||||
|
@ -146,29 +145,34 @@ describe('Owner shell', () => {
|
|||
if (payload.newPassword) {
|
||||
expect(storedMember.password).not.toBe(payload.newPassword);
|
||||
}
|
||||
|
||||
if (payload.currentPassword) {
|
||||
expect(storedMember.password).not.toBe(payload.currentPassword);
|
||||
}
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
const storedOwnerShell = await Db.collections.User!.findOneOrFail();
|
||||
expect(storedOwnerShell.password).toBeNull();
|
||||
});
|
||||
|
||||
test('POST /me/survey should succeed with valid inputs', async () => {
|
||||
const ownerShell = await Db.collections.User!.findOneOrFail();
|
||||
const ownerShell = await testDb.createUserShell(globalOwnerRole);
|
||||
const authOwnerShellAgent = utils.createAgent(app, { auth: true, user: ownerShell });
|
||||
|
||||
const validPayloads = [SURVEY, {}];
|
||||
|
||||
for (const validPayload of validPayloads) {
|
||||
const response = await authOwnerShellAgent.post('/me/survey').send(validPayload);
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body).toEqual(SUCCESS_RESPONSE_BODY);
|
||||
|
||||
const { personalizationAnswers: storedAnswers } = await Db.collections.User!.findOneOrFail();
|
||||
const storedShellOwner = await Db.collections.User!.findOneOrFail({
|
||||
where: { email: IsNull() },
|
||||
});
|
||||
|
||||
expect(storedAnswers).toEqual(validPayload);
|
||||
expect(storedShellOwner.personalizationAnswers).toEqual(validPayload);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -188,7 +192,7 @@ describe('Member', () => {
|
|||
});
|
||||
|
||||
test('GET /me should return sanitized member', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
const response = await authMemberAgent.get('/me');
|
||||
|
@ -220,7 +224,7 @@ describe('Member', () => {
|
|||
});
|
||||
|
||||
test('PATCH /me should succeed with valid inputs', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
for (const validPayload of VALID_PATCH_ME_PAYLOADS) {
|
||||
|
@ -260,7 +264,7 @@ describe('Member', () => {
|
|||
});
|
||||
|
||||
test('PATCH /me should fail with invalid inputs', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
for (const invalidPayload of INVALID_PATCH_ME_PAYLOADS) {
|
||||
|
@ -278,6 +282,7 @@ describe('Member', () => {
|
|||
const memberPassword = randomValidPassword();
|
||||
const member = await testDb.createUser({
|
||||
password: memberPassword,
|
||||
globalRole: globalMemberRole,
|
||||
});
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
|
@ -296,7 +301,7 @@ describe('Member', () => {
|
|||
});
|
||||
|
||||
test('PATCH /me/password should fail with invalid inputs', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
for (const payload of INVALID_PASSWORD_PAYLOADS) {
|
||||
|
@ -315,7 +320,7 @@ describe('Member', () => {
|
|||
});
|
||||
|
||||
test('POST /me/survey should succeed with valid inputs', async () => {
|
||||
const member = await testDb.createUser();
|
||||
const member = await testDb.createUser({ globalRole: globalMemberRole });
|
||||
const authMemberAgent = utils.createAgent(app, { auth: true, user: member });
|
||||
|
||||
const validPayloads = [SURVEY, {}];
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue