🔀 Merge branch 'Programmatic-migrations' of https://github.com/Rupenieks/n8n into Rupenieks-Programmatic-migrations

This commit is contained in:
Jan Oberhauser 2020-05-04 16:27:30 +02:00
commit 75f0a653e4
14 changed files with 235 additions and 33 deletions

Binary file not shown.

View file

@ -0,0 +1,108 @@
import {MongoDb, SQLite, MySQLDb, PostgresDb} from '../src/databases/index';
module.exports = [
{
"name": "sqlite",
"type": "sqlite",
"logging": true,
"entities": Object.values(SQLite),
"database": "./packages/cli/database.sqlite",
"migrations": [
"./src/databases/sqlite/migrations/*.ts"
],
"subscribers": [
"./src/databases/sqlite/subscribers/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/sqlite",
"migrationsDir": "./src/databases/sqlite/migrations",
"subscribersDir": "./src/databases/sqlite/subscribers"
}
},
{
"name": "mongodb",
"type": "mongodb",
"logging": false,
"entities": Object.values(MongoDb),
"url": "mongodb://root:example@localhost:27017/n8n",
"authSource": 'admin',
"migrations": [
"./src/databases/mongodb/migrations/*.ts"
],
"subscribers": [
"src/subscriber/**/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/mongodb",
"migrationsDir": "./src/databases/mongodb/Migrations",
"subscribersDir": "./src/databases/mongodb/Subscribers"
}
},
{
"name": "postgres",
"type": "postgres",
"logging": false,
"host": "localhost",
"username": "postgres",
"password": "docker",
"port": 5432,
"database": "postgres",
"schema": "public",
"entities": Object.values(PostgresDb),
"migrations": [
"./src/databases/postgresdb/migrations/*.ts"
],
"subscribers": [
"src/subscriber/**/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/postgresdb",
"migrationsDir": "./src/databases/postgresdb/migrations",
"subscribersDir": "./src/databases/postgresdb/subscribers"
}
},
{
"name": "mysql",
"type": "mysql",
"database": "n8n",
"username": "root",
"password": "password",
"host": "localhost",
"port": "3308",
"logging": false,
"entities": Object.values(MySQLDb),
"migrations": [
"./src/databases/mysqldb/migrations/*.ts"
],
"subscribers": [
"src/subscriber/**/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/mysqldb",
"migrationsDir": "./src/databases/mysqldb/migrations",
"subscribersDir": "./src/databases/mysqldb/Subscribers"
}
},
{
"name": "mariadb",
"type": "mariadb",
"database": "n8n",
"username": "root",
"password": "password",
"host": "localhost",
"port": "3308",
"logging": false,
"entities": Object.values(MySQLDb),
"migrations": [
"./src/databases/mysqldb/migrations/*.ts"
],
"subscribers": [
"src/subscriber/**/*.ts"
],
"cli": {
"entitiesDir": "./src/databases/mysqldb",
"migrationsDir": "./src/databases/mysqldb/migrations",
"subscribersDir": "./src/databases/mysqldb/Subscribers"
}
},
];

View file

@ -28,7 +28,8 @@
"start:windows": "cd bin && n8n",
"test": "jest",
"tslint": "tslint -p tsconfig.json -c tslint.json",
"watch": "tsc --watch"
"watch": "tsc --watch",
"typeorm": "ts-node ./node_modules/typeorm/cli.js"
},
"bin": {
"n8n": "./bin/n8n"
@ -71,7 +72,8 @@
"run-script-os": "^1.0.7",
"ts-jest": "^24.0.2",
"tslint": "^5.17.0",
"typescript": "~3.7.4"
"typescript": "~3.7.4",
"ts-node": "^8.9.1"
},
"dependencies": {
"@oclif/command": "^1.5.18",
@ -102,9 +104,9 @@
"open": "^7.0.0",
"pg": "^7.11.0",
"request-promise-native": "^1.0.7",
"sqlite3": "^4.0.6",
"sqlite3": "^4.2.0",
"sse-channel": "^3.1.1",
"typeorm": "^0.2.16"
"typeorm": "^0.2.24"
},
"jest": {
"transform": {

View file

@ -12,6 +12,7 @@ import {
ConnectionOptions,
createConnection,
getRepository,
Connection,
} from 'typeorm';
import {
@ -27,14 +28,27 @@ export let collections: IDatabaseCollections = {
Workflow: null,
};
import {
InitialMigration1587669153312
} from './databases/postgresdb/migrations';
import {
InitialMigration1588157391238
} from './databases/mysqldb/migrations';
import {
InitialMigration1588102412422
} from './databases/sqlite/migrations';
import * as path from 'path';
export async function init(synchronize?: boolean): Promise<IDatabaseCollections> {
export async function init(): Promise<IDatabaseCollections> {
const dbType = await GenericHelpers.getConfigValue('database.type') as DatabaseType;
const n8nFolder = UserSettings.getUserN8nFolderPath();
let entities;
let connectionOptions: ConnectionOptions;
let connection;
let dbNotExistError: string | undefined;
switch (dbType) {
@ -60,6 +74,8 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
port: await GenericHelpers.getConfigValue('database.postgresdb.port') as number,
username: await GenericHelpers.getConfigValue('database.postgresdb.user') as string,
schema: await GenericHelpers.getConfigValue('database.postgresdb.schema') as string,
migrations: [InitialMigration1587669153312],
migrationsRun: true
};
break;
@ -75,6 +91,8 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
password: await GenericHelpers.getConfigValue('database.mysqldb.password') as string,
port: await GenericHelpers.getConfigValue('database.mysqldb.port') as number,
username: await GenericHelpers.getConfigValue('database.mysqldb.user') as string,
migrations: [InitialMigration1588157391238],
migrationsRun: true
};
break;
@ -83,8 +101,10 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
entities = SQLite;
connectionOptions = {
type: 'sqlite',
database: path.join(n8nFolder, 'database.sqlite'),
database: path.join(n8nFolder, 'database.sqlite'),
entityPrefix: await GenericHelpers.getConfigValue('database.tablePrefix') as string,
migrations: [InitialMigration1588102412422],
migrationsRun: true
};
break;
@ -94,38 +114,30 @@ export async function init(synchronize?: boolean): Promise<IDatabaseCollections>
Object.assign(connectionOptions, {
entities: Object.values(entities),
synchronize: synchronize === true || process.env['NODE_ENV'] !== 'production',
logging: false
synchronize: false,
logging: true
});
const connection = await createConnection(connectionOptions);
// TODO: Fix that properly
// @ts-ignore
collections.Credentials = getRepository(entities.CredentialsEntity);
// @ts-ignore
collections.Execution = getRepository(entities.ExecutionEntity);
// @ts-ignore
collections.Workflow = getRepository(entities.WorkflowEntity);
// Make sure that database did already get initialized
try {
// Try a simple query, if it fails it is normally a sign that
// database did not get initialized
await collections.Workflow!.findOne({ id: 1 });
} catch (error) {
// If query errors and the problem is that the database does not exist
// run the init again with "synchronize: true"
if (dbNotExistError !== undefined && error.message.includes(dbNotExistError)) {
// Disconnect before we try to connect again
if (connection.isConnected) {
await connection.close();
}
connection = await createConnection(connectionOptions);
return init(true);
await connection.runMigrations({
transaction: 'none'
});
if(connection.isConnected){
collections.Credentials = getRepository(entities.CredentialsEntity);
collections.Execution = getRepository(entities.ExecutionEntity);
collections.Workflow = getRepository(entities.WorkflowEntity);
} else {
init();
}
throw error;
} catch (e){
console.log(e);
}
return collections;
}
};

View file

@ -0,0 +1,11 @@
import {MigrationInterface, QueryRunner} from "typeorm";
export class InitialMigration1587563438936 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<any> {
}
public async down(queryRunner: QueryRunner): Promise<any> {
}
}

View file

@ -0,0 +1,20 @@
import {MigrationInterface, QueryRunner} from "typeorm";
export class InitialMigration1588157391238 implements MigrationInterface {
name = 'InitialMigration1588157391238'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query("CREATE TABLE IF NOT EXISTS `credentials_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `data` text NOT NULL, `type` varchar(32) NOT NULL, `nodesAccess` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, INDEX `IDX_07fde106c0b471d8cc80a64fc8` (`type`), PRIMARY KEY (`id`)) ENGINE=InnoDB", undefined);
await queryRunner.query("CREATE TABLE IF NOT EXISTS `execution_entity` (`id` int NOT NULL AUTO_INCREMENT, `data` text NOT NULL, `finished` tinyint NOT NULL, `mode` varchar(255) NOT NULL, `retryOf` varchar(255) NULL, `retrySuccessId` varchar(255) NULL, `startedAt` datetime NOT NULL, `stoppedAt` datetime NOT NULL, `workflowData` json NOT NULL, `workflowId` varchar(255) NULL, INDEX `IDX_c4d999a5e90784e8caccf5589d` (`workflowId`), PRIMARY KEY (`id`)) ENGINE=InnoDB", undefined);
await queryRunner.query("CREATE TABLE IF NOT EXISTS`workflow_entity` (`id` int NOT NULL AUTO_INCREMENT, `name` varchar(128) NOT NULL, `active` tinyint NOT NULL, `nodes` json NOT NULL, `connections` json NOT NULL, `createdAt` datetime NOT NULL, `updatedAt` datetime NOT NULL, `settings` json NULL, `staticData` json NULL, PRIMARY KEY (`id`)) ENGINE=InnoDB", undefined);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query("DROP TABLE `workflow_entity`", undefined);
await queryRunner.query("DROP INDEX `IDX_c4d999a5e90784e8caccf5589d` ON `execution_entity`", undefined);
await queryRunner.query("DROP TABLE `execution_entity`", undefined);
await queryRunner.query("DROP INDEX `IDX_07fde106c0b471d8cc80a64fc8` ON `credentials_entity`", undefined);
await queryRunner.query("DROP TABLE `credentials_entity`", undefined);
}
}

View file

@ -0,0 +1 @@
export * from './1588157391238-InitialMigration';

View file

@ -41,4 +41,5 @@ export class CredentialsEntity implements ICredentialsDb {
@Column('timestamp')
updatedAt: Date;
}

View file

@ -0,0 +1,22 @@
import {MigrationInterface, QueryRunner} from "typeorm";
export class InitialMigration1587669153312 implements MigrationInterface {
name = 'InitialMigration1587669153312'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`CREATE TABLE IF NOT EXISTS credentials_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "data" text NOT NULL, "type" character varying(32) NOT NULL, "nodesAccess" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT PK_814c3d3c36e8a27fa8edb761b0e PRIMARY KEY ("id"))`, undefined);
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_07fde106c0b471d8cc80a64fc8 ON credentials_entity (type) `, undefined);
await queryRunner.query(`CREATE TABLE IF NOT EXISTS execution_entity ("id" SERIAL NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" character varying NOT NULL, "retryOf" character varying, "retrySuccessId" character varying, "startedAt" TIMESTAMP NOT NULL, "stoppedAt" TIMESTAMP NOT NULL, "workflowData" json NOT NULL, "workflowId" character varying, CONSTRAINT PK_e3e63bbf986767844bbe1166d4e PRIMARY KEY ("id"))`, undefined);
await queryRunner.query(`CREATE INDEX IF NOT EXISTS IDX_c4d999a5e90784e8caccf5589d ON execution_entity ("workflowId") `, undefined);
await queryRunner.query(`CREATE TABLE IF NOT EXISTS workflow_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "active" boolean NOT NULL, "nodes" json NOT NULL, "connections" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, "settings" json, "staticData" json, CONSTRAINT PK_eded7d72664448da7745d551207 PRIMARY KEY ("id"))`, undefined);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE workflow_entity`, undefined);
await queryRunner.query(`DROP INDEX IDX_c4d999a5e90784e8caccf5589d`, undefined);
await queryRunner.query(`DROP TABLE execution_entity`, undefined);
await queryRunner.query(`DROP INDEX IDX_07fde106c0b471d8cc80a64fc8`, undefined);
await queryRunner.query(`DROP TABLE credentials_entity`, undefined);
}
}

View file

@ -0,0 +1 @@
export * from './1587669153312-InitialMigration';

View file

@ -1,3 +1,4 @@
export * from './CredentialsEntity';
export * from './ExecutionEntity';
export * from './WorkflowEntity';

View file

@ -0,0 +1,22 @@
import {MigrationInterface, QueryRunner} from "typeorm";
export class InitialMigration1588102412422 implements MigrationInterface {
name = 'InitialMigration1588102412422'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "credentials_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "data" text NOT NULL, "type" varchar(32) NOT NULL, "nodesAccess" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL)`, undefined);
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_07fde106c0b471d8cc80a64fc8" ON "credentials_entity" ("type") `, undefined);
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "execution_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" varchar NOT NULL, "retryOf" varchar, "retrySuccessId" varchar, "startedAt" datetime NOT NULL, "stoppedAt" datetime NOT NULL, "workflowData" text NOT NULL, "workflowId" varchar)`, undefined);
await queryRunner.query(`CREATE INDEX IF NOT EXISTS "IDX_c4d999a5e90784e8caccf5589d" ON "execution_entity" ("workflowId") `, undefined);
await queryRunner.query(`CREATE TABLE IF NOT EXISTS "workflow_entity" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text NOT NULL, "connections" text NOT NULL, "createdAt" datetime NOT NULL, "updatedAt" datetime NOT NULL, "settings" text, "staticData" text)`, undefined);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP TABLE "workflow_entity"`, undefined);
await queryRunner.query(`DROP INDEX "IDX_c4d999a5e90784e8caccf5589d"`, undefined);
await queryRunner.query(`DROP TABLE "execution_entity"`, undefined);
await queryRunner.query(`DROP INDEX "IDX_07fde106c0b471d8cc80a64fc8"`, undefined);
await queryRunner.query(`DROP TABLE "credentials_entity"`, undefined);
}
}

View file

@ -0,0 +1 @@
export * from './1588102412422-InitialMigration';