💥 Remove MongoDB support

This commit is contained in:
Jan 2021-01-23 20:35:38 +01:00 committed by GitHub
parent d395498882
commit b33a5fcd13
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 9 additions and 339 deletions

View file

@ -20,7 +20,3 @@ To stop it execute:
``` ```
docker-compose stop docker-compose stop
``` ```
## Configuration
The default name of the database, user and password for MongoDB can be changed in the `.env` file in the current directory.

View file

@ -107,7 +107,7 @@ docker run -it --rm \
### Start with other Database ### Start with other Database
By default n8n uses SQLite to save credentials, past executions and workflows. By default n8n uses SQLite to save credentials, past executions and workflows.
n8n however also supports MongoDB, PostgresDB and MySQL. To use them simply a few n8n however also supports PostgresDB, MySQL and MariaDB. To use them simply a few
environment variables have to be set. environment variables have to be set.
It is important to still persist the data in the `/root/.n8n` folder. The reason It is important to still persist the data in the `/root/.n8n` folder. The reason
@ -117,32 +117,6 @@ for the credentials. If none gets found n8n creates automatically one on
startup. In case credentials are already saved with a different encryption key startup. In case credentials are already saved with a different encryption key
it can not be used anymore as encrypting it is not possible anymore. it can not be used anymore as encrypting it is not possible anymore.
#### Use with MongoDB
> **WARNING**: Use Postgres if possible! Mongo has problems with saving large
> amounts of data in a document and causes also other problems. So support will
> may be dropped in the future.
Replace the following placeholders with the actual data:
- MONGO_DATABASE
- MONGO_HOST
- MONGO_PORT
- MONGO_USER
- MONGO_PASSWORD
```
docker run -it --rm \
--name n8n \
-p 5678:5678 \
-e DB_TYPE=mongodb \
-e DB_MONGODB_CONNECTION_URL="mongodb://<MONGO_USER>:<MONGO_PASSWORD>@<MONGO_SERVER>:<MONGO_PORT>/<MONGO_DATABASE>" \
-v ~/.n8n:/home/node/.n8n \
n8nio/n8n \
n8n start
```
A full working setup with docker-compose can be found [here](https://github.com/n8n-io/n8n/blob/master/docker/compose/withMongo/README.md)
#### Use with PostgresDB #### Use with PostgresDB
Replace the following placeholders with the actual data: Replace the following placeholders with the actual data:
@ -203,7 +177,6 @@ with the given name. That makes it possible to load data easily from
Docker- and Kubernetes-Secrets. Docker- and Kubernetes-Secrets.
The following environment variables support file input: The following environment variables support file input:
- DB_MONGODB_CONNECTION_URL_FILE
- DB_POSTGRESDB_DATABASE_FILE - DB_POSTGRESDB_DATABASE_FILE
- DB_POSTGRESDB_HOST_FILE - DB_POSTGRESDB_HOST_FILE
- DB_POSTGRESDB_PASSWORD_FILE - DB_POSTGRESDB_PASSWORD_FILE

View file

@ -8,18 +8,10 @@ const config = convict({
database: { database: {
type: { type: {
doc: 'Type of database to use', doc: 'Type of database to use',
format: ['sqlite', 'mariadb', 'mongodb', 'mysqldb', 'postgresdb'], format: ['sqlite', 'mariadb', 'mysqldb', 'postgresdb'],
default: 'sqlite', default: 'sqlite',
env: 'DB_TYPE', env: 'DB_TYPE',
}, },
mongodb: {
connectionUrl: {
doc: 'MongoDB Connection URL',
format: '*',
default: 'mongodb://user:password@localhost:27017/database',
env: 'DB_MONGODB_CONNECTION_URL',
},
},
tablePrefix: { tablePrefix: {
doc: 'Prefix for table names', doc: 'Prefix for table names',
format: '*', format: '*',

View file

@ -1,4 +1,4 @@
import {MongoDb, SQLite, MySQLDb, PostgresDb} from '../src/databases/index'; import { SQLite, MySQLDb, PostgresDb} from '../src/databases/index';
module.exports = [ module.exports = [
{ {
@ -19,25 +19,6 @@ module.exports = [
"subscribersDir": "./src/databases/sqlite/subscribers" "subscribersDir": "./src/databases/sqlite/subscribers"
} }
}, },
{
"name": "mongodb",
"type": "mongodb",
"logging": false,
"entities": Object.values(MongoDb),
"url": "mongodb://root:example@localhost:27017/n8n",
"authSource": 'admin',
"migrations": [
"./src/databases/mongodb/migrations/*.ts"
],
"subscribers": [
"src/subscriber/**/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/mongodb",
"migrationsDir": "./src/databases/mongodb/Migrations",
"subscribersDir": "./src/databases/mongodb/Subscribers"
}
},
{ {
"name": "postgres", "name": "postgres",
"type": "postgres", "type": "postgres",

View file

@ -101,7 +101,6 @@
"jwks-rsa": "~1.9.0", "jwks-rsa": "~1.9.0",
"localtunnel": "^2.0.0", "localtunnel": "^2.0.0",
"lodash.get": "^4.4.2", "lodash.get": "^4.4.2",
"mongodb": "^3.5.5",
"mysql2": "~2.1.0", "mysql2": "~2.1.0",
"n8n-core": "~0.59.0", "n8n-core": "~0.59.0",
"n8n-editor-ui": "~0.72.0", "n8n-editor-ui": "~0.72.0",

View file

@ -20,6 +20,7 @@ import {
} from 'n8n-core'; } from 'n8n-core';
import { import {
IDataObject,
IExecuteData, IExecuteData,
IGetExecutePollFunctions, IGetExecutePollFunctions,
IGetExecuteTriggerFunctions, IGetExecuteTriggerFunctions,
@ -321,8 +322,10 @@ export class ActiveWorkflowRunner {
// if it's a workflow from the the insert // if it's a workflow from the the insert
// TODO check if there is standard error code for duplicate key violation that works // TODO check if there is standard error code for duplicate key violation that works
// with all databases // with all databases
if (error.name === 'MongoError' || error.name === 'QueryFailedError') { if (error.name === 'QueryFailedError') {
errorMessage = `The webhook path [${webhook.webhookPath}] and method [${webhook.method}] already exist.`; errorMessage = error.parameters.length === 5
? `Node [${webhook.node}] can't be saved, please duplicate [${webhook.node}] and delete the currently existing one.`
: `The webhook path [${webhook.webhookPath}] and method [${webhook.method}] already exist.`;
} else if (error.detail) { } else if (error.detail) {
// it's a error runnig the webhook methods (checkExists, create) // it's a error runnig the webhook methods (checkExists, create)
@ -368,11 +371,6 @@ export class ActiveWorkflowRunner {
await WorkflowHelpers.saveStaticData(workflow); await WorkflowHelpers.saveStaticData(workflow);
// if it's a mongo objectId convert it to string
if (typeof workflowData.id === 'object') {
workflowData.id = workflowData.id.toString();
}
const webhook = { const webhook = {
workflowId: workflowData.id, workflowId: workflowData.id,
} as IWebhookDb; } as IWebhookDb;

View file

@ -19,7 +19,6 @@ import { TlsOptions } from 'tls';
import * as config from '../config'; import * as config from '../config';
import { import {
MongoDb,
MySQLDb, MySQLDb,
PostgresDb, PostgresDb,
SQLite, SQLite,
@ -33,7 +32,6 @@ export let collections: IDatabaseCollections = {
}; };
import { postgresMigrations } from './databases/postgresdb/migrations'; import { postgresMigrations } from './databases/postgresdb/migrations';
import { mongodbMigrations } from './databases/mongodb/migrations';
import { mysqlMigrations } from './databases/mysqldb/migrations'; import { mysqlMigrations } from './databases/mysqldb/migrations';
import { sqliteMigrations } from './databases/sqlite/migrations'; import { sqliteMigrations } from './databases/sqlite/migrations';
@ -49,19 +47,6 @@ export async function init(): Promise<IDatabaseCollections> {
const entityPrefix = config.get('database.tablePrefix'); const entityPrefix = config.get('database.tablePrefix');
switch (dbType) { switch (dbType) {
case 'mongodb':
entities = MongoDb;
connectionOptions = {
type: 'mongodb',
entityPrefix,
url: await GenericHelpers.getConfigValue('database.mongodb.connectionUrl') as string,
useNewUrlParser: true,
migrations: mongodbMigrations,
migrationsRun: true,
migrationsTableName: `${entityPrefix}migrations`,
};
break;
case 'postgresdb': case 'postgresdb':
entities = PostgresDb; entities = PostgresDb;

View file

@ -105,7 +105,7 @@ export interface ICredentialsDecryptedResponse extends ICredentialsDecryptedDb {
id: string; id: string;
} }
export type DatabaseType = 'mariadb' | 'mongodb' | 'postgresdb' | 'mysqldb' | 'sqlite'; export type DatabaseType = 'mariadb' | 'postgresdb' | 'mysqldb' | 'sqlite';
export type SaveExecutionDataType = 'all' | 'none'; export type SaveExecutionDataType = 'all' | 'none';
export interface IExecutionBase { export interface IExecutionBase {
@ -249,9 +249,6 @@ export interface IN8nConfig {
export interface IN8nConfigDatabase { export interface IN8nConfigDatabase {
type: DatabaseType; type: DatabaseType;
mongodb: {
connectionUrl: string;
};
postgresdb: { postgresdb: {
host: string; host: string;
password: string; password: string;

View file

@ -1,10 +1,8 @@
import * as MongoDb from './mongodb';
import * as PostgresDb from './postgresdb'; import * as PostgresDb from './postgresdb';
import * as SQLite from './sqlite'; import * as SQLite from './sqlite';
import * as MySQLDb from './mysqldb'; import * as MySQLDb from './mysqldb';
export { export {
MongoDb,
PostgresDb, PostgresDb,
SQLite, SQLite,
MySQLDb, MySQLDb,

View file

@ -1,41 +0,0 @@
import {
ICredentialNodeAccess,
} from 'n8n-workflow';
import {
ICredentialsDb,
} from '../../';
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class CredentialsEntity implements ICredentialsDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
name: string;
@Column()
data: string;
@Index()
@Column()
type: string;
@Column('json')
nodesAccess: ICredentialNodeAccess[];
@Column('Date')
createdAt: Date;
@Column('Date')
updatedAt: Date;
}

View file

@ -1,52 +0,0 @@
import {
WorkflowExecuteMode,
} from 'n8n-workflow';
import {
IExecutionFlattedDb,
IWorkflowDb,
} from '../../';
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class ExecutionEntity implements IExecutionFlattedDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
data: string;
@Column()
finished: boolean;
@Column()
mode: WorkflowExecuteMode;
@Column()
retryOf: string;
@Column()
retrySuccessId: string;
@Column('Date')
startedAt: Date;
@Index()
@Column('Date')
stoppedAt: Date;
@Column('json')
workflowData: IWorkflowDb;
@Index()
@Column()
workflowId: string;
}

View file

@ -1,38 +0,0 @@
import {
Column,
Entity,
Index,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
import {
IWebhookDb,
} from '../../Interfaces';
@Entity()
@Index(["webhookPath", "method"], { unique: true })
@Index(["webhookId", "method"], { unique: true })
export class WebhookEntity implements IWebhookDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
workflowId: number;
@Column()
webhookPath: string;
@Column()
method: string;
@Column()
node: string;
@Column()
webhookId: string;
@Column({ nullable: true })
pathLength: number;
}

View file

@ -1,48 +0,0 @@
import {
IConnections,
IDataObject,
INode,
IWorkflowSettings,
} from 'n8n-workflow';
import {
IWorkflowDb,
} from '../../';
import {
Column,
Entity,
ObjectID,
ObjectIdColumn,
} from 'typeorm';
@Entity()
export class WorkflowEntity implements IWorkflowDb {
@ObjectIdColumn()
id: ObjectID;
@Column()
name: string;
@Column()
active: boolean;
@Column('json')
nodes: INode[];
@Column('json')
connections: IConnections;
@Column('Date')
createdAt: Date;
@Column('Date')
updatedAt: Date;
@Column('json')
settings?: IWorkflowSettings;
@Column('json')
staticData?: IDataObject;
}

View file

@ -1,5 +0,0 @@
export * from './CredentialsEntity';
export * from './ExecutionEntity';
export * from './WorkflowEntity';
export * from './WebhookEntity';

View file

@ -1,22 +0,0 @@
import { MigrationInterface } from "typeorm";
import {
MongoQueryRunner,
} from 'typeorm/driver/mongodb/MongoQueryRunner';
import * as config from '../../../../config';
export class CreateIndexStoppedAt1594910478695 implements MigrationInterface {
name = 'CreateIndexStoppedAt1594910478695';
async up(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.manager.createCollectionIndex(`${tablePrefix}execution_entity`, 'stoppedAt', { name: `IDX_${tablePrefix}execution_entity_stoppedAt` });
}
async down(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.manager.dropCollectionIndex
(`${tablePrefix}execution_entity`, `IDX_${tablePrefix}execution_entity_stoppedAt`);
}
}

View file

@ -1,11 +0,0 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
export class InitialMigration1587563438936 implements MigrationInterface {
async up(queryRunner: QueryRunner): Promise<void> {
}
async down(queryRunner: QueryRunner): Promise<void> {
}
}

View file

@ -1,23 +0,0 @@
import {
MigrationInterface,
} from 'typeorm';
import * as config from '../../../../config';
import {
MongoQueryRunner,
} from 'typeorm/driver/mongodb/MongoQueryRunner';
export class WebhookModel1592679094242 implements MigrationInterface {
name = 'WebhookModel1592679094242';
async up(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.manager.createCollectionIndex(`${tablePrefix}webhook_entity`, ['webhookPath', 'method'], { unique: true, background: false });
}
async down(queryRunner: MongoQueryRunner): Promise<void> {
const tablePrefix = config.get('database.tablePrefix');
await queryRunner.dropTable(`${tablePrefix}webhook_entity`);
}
}

View file

@ -1,9 +0,0 @@
import { InitialMigration1587563438936 } from './1587563438936-InitialMigration';
import { WebhookModel1592679094242 } from './1592679094242-WebhookModel';
import { CreateIndexStoppedAt1594910478695 } from './151594910478695-CreateIndexStoppedAt';
export const mongodbMigrations = [
InitialMigration1587563438936,
WebhookModel1592679094242,
CreateIndexStoppedAt1594910478695,
];