mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-11 04:47:29 -08:00
refactor(core): Improve DB directory setup (#3502)
This commit is contained in:
parent
d18a29d588
commit
189009a8b7
|
@ -1,75 +0,0 @@
|
||||||
import path from 'path';
|
|
||||||
import { UserSettings } from 'n8n-core';
|
|
||||||
import { entities } from '../src/databases/entities';
|
|
||||||
|
|
||||||
module.exports = [
|
|
||||||
{
|
|
||||||
name: 'sqlite',
|
|
||||||
type: 'sqlite',
|
|
||||||
logging: true,
|
|
||||||
entities: Object.values(entities),
|
|
||||||
database: path.join(UserSettings.getUserN8nFolderPath(), 'database.sqlite'),
|
|
||||||
migrations: ['./src/databases/sqlite/migrations/index.ts'],
|
|
||||||
subscribers: ['./src/databases/sqlite/subscribers/*.ts'],
|
|
||||||
cli: {
|
|
||||||
entitiesDir: './src/databases/entities',
|
|
||||||
migrationsDir: './src/databases/sqlite/migrations',
|
|
||||||
subscribersDir: './src/databases/sqlite/subscribers',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'postgres',
|
|
||||||
type: 'postgres',
|
|
||||||
logging: false,
|
|
||||||
host: 'localhost',
|
|
||||||
username: 'postgres',
|
|
||||||
password: '',
|
|
||||||
port: 5432,
|
|
||||||
database: 'n8n',
|
|
||||||
schema: 'public',
|
|
||||||
entities: Object.values(entities),
|
|
||||||
migrations: ['./src/databases/postgresdb/migrations/index.ts'],
|
|
||||||
subscribers: ['src/subscriber/**/*.ts'],
|
|
||||||
cli: {
|
|
||||||
entitiesDir: './src/databases/entities',
|
|
||||||
migrationsDir: './src/databases/postgresdb/migrations',
|
|
||||||
subscribersDir: './src/databases/postgresdb/subscribers',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'mysql',
|
|
||||||
type: 'mysql',
|
|
||||||
database: 'n8n',
|
|
||||||
username: 'root',
|
|
||||||
password: 'password',
|
|
||||||
host: 'localhost',
|
|
||||||
port: '3306',
|
|
||||||
logging: false,
|
|
||||||
entities: Object.values(entities),
|
|
||||||
migrations: ['./src/databases/mysqldb/migrations/index.ts'],
|
|
||||||
subscribers: ['src/subscriber/**/*.ts'],
|
|
||||||
cli: {
|
|
||||||
entitiesDir: './src/databases/entities',
|
|
||||||
migrationsDir: './src/databases/mysqldb/migrations',
|
|
||||||
subscribersDir: './src/databases/mysqldb/Subscribers',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'mariadb',
|
|
||||||
type: 'mariadb',
|
|
||||||
database: 'n8n',
|
|
||||||
username: 'root',
|
|
||||||
password: 'password',
|
|
||||||
host: 'localhost',
|
|
||||||
port: '3306',
|
|
||||||
logging: false,
|
|
||||||
entities: Object.values(entities),
|
|
||||||
migrations: ['./src/databases/mysqldb/migrations/*.ts'],
|
|
||||||
subscribers: ['src/subscriber/**/*.ts'],
|
|
||||||
cli: {
|
|
||||||
entitiesDir: './src/databases/entities',
|
|
||||||
migrationsDir: './src/databases/mysqldb/migrations',
|
|
||||||
subscribersDir: './src/databases/mysqldb/Subscribers',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
|
@ -25,9 +25,9 @@ import config from '../config';
|
||||||
// eslint-disable-next-line import/no-cycle
|
// eslint-disable-next-line import/no-cycle
|
||||||
import { entities } from './databases/entities';
|
import { entities } from './databases/entities';
|
||||||
|
|
||||||
import { postgresMigrations } from './databases/postgresdb/migrations';
|
import { postgresMigrations } from './databases/migrations/postgresdb';
|
||||||
import { mysqlMigrations } from './databases/mysqldb/migrations';
|
import { mysqlMigrations } from './databases/migrations/mysqldb';
|
||||||
import { sqliteMigrations } from './databases/sqlite/migrations';
|
import { sqliteMigrations } from './databases/migrations/sqlite';
|
||||||
|
|
||||||
export let isInitialized = false;
|
export let isInitialized = false;
|
||||||
export const collections = {} as IDatabaseCollections;
|
export const collections = {} as IDatabaseCollections;
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
import { QueryRunner } from 'typeorm';
|
|
||||||
|
|
||||||
export class MigrationHelpers {
|
|
||||||
queryRunner: QueryRunner;
|
|
||||||
|
|
||||||
constructor(queryRunner: QueryRunner) {
|
|
||||||
this.queryRunner = queryRunner;
|
|
||||||
}
|
|
||||||
|
|
||||||
// runs an operation sequential on chunks of a query that returns a potentially large Array.
|
|
||||||
/* eslint-disable no-await-in-loop */
|
|
||||||
async runChunked(
|
|
||||||
query: string,
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
operation: (results: any[]) => Promise<void>,
|
|
||||||
limit = 100,
|
|
||||||
): Promise<void> {
|
|
||||||
let offset = 0;
|
|
||||||
let chunkedQuery: string;
|
|
||||||
let chunkedQueryResults: unknown[];
|
|
||||||
|
|
||||||
do {
|
|
||||||
chunkedQuery = this.chunkQuery(query, limit, offset);
|
|
||||||
chunkedQueryResults = (await this.queryRunner.query(chunkedQuery)) as unknown[];
|
|
||||||
// pass a copy to prevent errors from mutation
|
|
||||||
await operation([...chunkedQueryResults]);
|
|
||||||
offset += limit;
|
|
||||||
} while (chunkedQueryResults.length === limit);
|
|
||||||
}
|
|
||||||
/* eslint-enable no-await-in-loop */
|
|
||||||
|
|
||||||
private chunkQuery(query: string, limit: number, offset = 0): string {
|
|
||||||
return `
|
|
||||||
${query}
|
|
||||||
LIMIT ${limit}
|
|
||||||
OFFSET ${offset}
|
|
||||||
`;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
import * as config from '../../../../config';
|
import * as config from '../../../../config';
|
||||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
import { runChunked } from '../../utils/migrationHelpers';
|
||||||
|
|
||||||
// replacing the credentials in workflows and execution
|
// replacing the credentials in workflows and execution
|
||||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||||
|
@ -10,7 +10,6 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
||||||
|
|
||||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||||
const helpers = new MigrationHelpers(queryRunner);
|
|
||||||
|
|
||||||
const credentialsEntities = await queryRunner.query(`
|
const credentialsEntities = await queryRunner.query(`
|
||||||
SELECT id, name, type
|
SELECT id, name, type
|
||||||
|
@ -21,8 +20,9 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
||||||
SELECT id, nodes
|
SELECT id, nodes
|
||||||
FROM ${tablePrefix}workflow_entity
|
FROM ${tablePrefix}workflow_entity
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(workflowsQuery, (workflows) => {
|
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||||
workflows.forEach(async (workflow) => {
|
workflows.forEach(async (workflow) => {
|
||||||
const nodes = workflow.nodes;
|
const nodes = workflow.nodes;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -65,7 +65,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
||||||
WHERE waitTill IS NOT NULL AND finished = 0
|
WHERE waitTill IS NOT NULL AND finished = 0
|
||||||
`;
|
`;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => {
|
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||||
waitingExecutions.forEach(async (execution) => {
|
waitingExecutions.forEach(async (execution) => {
|
||||||
const data = execution.workflowData;
|
const data = execution.workflowData;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -147,7 +147,6 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
||||||
|
|
||||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||||
const helpers = new MigrationHelpers(queryRunner);
|
|
||||||
|
|
||||||
const credentialsEntities = await queryRunner.query(`
|
const credentialsEntities = await queryRunner.query(`
|
||||||
SELECT id, name, type
|
SELECT id, name, type
|
||||||
|
@ -159,7 +158,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
||||||
FROM ${tablePrefix}workflow_entity
|
FROM ${tablePrefix}workflow_entity
|
||||||
`;
|
`;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(workflowsQuery, (workflows) => {
|
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||||
workflows.forEach(async (workflow) => {
|
workflows.forEach(async (workflow) => {
|
||||||
const nodes = workflow.nodes;
|
const nodes = workflow.nodes;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -207,7 +206,7 @@ export class UpdateWorkflowCredentials1630451444017 implements MigrationInterfac
|
||||||
WHERE waitTill IS NOT NULL AND finished = 0
|
WHERE waitTill IS NOT NULL AND finished = 0
|
||||||
`;
|
`;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => {
|
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||||
waitingExecutions.forEach(async (execution) => {
|
waitingExecutions.forEach(async (execution) => {
|
||||||
const data = execution.workflowData;
|
const data = execution.workflowData;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
|
@ -1,6 +1,6 @@
|
||||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
import * as config from '../../../../config';
|
import * as config from '../../../../config';
|
||||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
import { runChunked } from '../../utils/migrationHelpers';
|
||||||
|
|
||||||
// replacing the credentials in workflows and execution
|
// replacing the credentials in workflows and execution
|
||||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||||
|
@ -17,7 +17,6 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
||||||
|
|
||||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||||
|
|
||||||
const helpers = new MigrationHelpers(queryRunner);
|
|
||||||
|
|
||||||
const credentialsEntities = await queryRunner.query(`
|
const credentialsEntities = await queryRunner.query(`
|
||||||
SELECT id, name, type
|
SELECT id, name, type
|
||||||
|
@ -28,8 +27,9 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
||||||
SELECT id, nodes
|
SELECT id, nodes
|
||||||
FROM ${tablePrefix}workflow_entity
|
FROM ${tablePrefix}workflow_entity
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(workflowsQuery, (workflows) => {
|
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||||
workflows.forEach(async (workflow) => {
|
workflows.forEach(async (workflow) => {
|
||||||
const nodes = workflow.nodes;
|
const nodes = workflow.nodes;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -72,7 +72,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
||||||
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
||||||
`;
|
`;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => {
|
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||||
waitingExecutions.forEach(async (execution) => {
|
waitingExecutions.forEach(async (execution) => {
|
||||||
const data = execution.workflowData;
|
const data = execution.workflowData;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -161,7 +161,6 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
||||||
tablePrefix = schema + '.' + tablePrefix;
|
tablePrefix = schema + '.' + tablePrefix;
|
||||||
}
|
}
|
||||||
await queryRunner.query(`SET search_path TO ${schema};`);
|
await queryRunner.query(`SET search_path TO ${schema};`);
|
||||||
const helpers = new MigrationHelpers(queryRunner);
|
|
||||||
|
|
||||||
const credentialsEntities = await queryRunner.query(`
|
const credentialsEntities = await queryRunner.query(`
|
||||||
SELECT id, name, type
|
SELECT id, name, type
|
||||||
|
@ -173,7 +172,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
||||||
FROM ${tablePrefix}workflow_entity
|
FROM ${tablePrefix}workflow_entity
|
||||||
`;
|
`;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(workflowsQuery, (workflows) => {
|
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||||
workflows.forEach(async (workflow) => {
|
workflows.forEach(async (workflow) => {
|
||||||
const nodes = workflow.nodes;
|
const nodes = workflow.nodes;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -222,7 +221,7 @@ export class UpdateWorkflowCredentials1630419189837 implements MigrationInterfac
|
||||||
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
||||||
`;
|
`;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => {
|
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||||
waitingExecutions.forEach(async (execution) => {
|
waitingExecutions.forEach(async (execution) => {
|
||||||
const data = execution.workflowData;
|
const data = execution.workflowData;
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
|
@ -1,7 +1,7 @@
|
||||||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||||
import * as config from '../../../../config';
|
import * as config from '../../../../config';
|
||||||
import { MigrationHelpers } from '../../MigrationHelpers';
|
|
||||||
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
import { logMigrationEnd, logMigrationStart } from '../../utils/migrationHelpers';
|
||||||
|
import { runChunked } from '../../utils/migrationHelpers';
|
||||||
|
|
||||||
// replacing the credentials in workflows and execution
|
// replacing the credentials in workflows and execution
|
||||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||||
|
@ -13,7 +13,6 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
||||||
logMigrationStart(this.name);
|
logMigrationStart(this.name);
|
||||||
|
|
||||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||||
const helpers = new MigrationHelpers(queryRunner);
|
|
||||||
|
|
||||||
const credentialsEntities = await queryRunner.query(`
|
const credentialsEntities = await queryRunner.query(`
|
||||||
SELECT id, name, type
|
SELECT id, name, type
|
||||||
|
@ -26,7 +25,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(workflowsQuery, (workflows) => {
|
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||||
workflows.forEach(async (workflow) => {
|
workflows.forEach(async (workflow) => {
|
||||||
const nodes = JSON.parse(workflow.nodes);
|
const nodes = JSON.parse(workflow.nodes);
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -69,7 +68,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
||||||
WHERE "waitTill" IS NOT NULL AND finished = 0
|
WHERE "waitTill" IS NOT NULL AND finished = 0
|
||||||
`;
|
`;
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => {
|
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||||
waitingExecutions.forEach(async (execution) => {
|
waitingExecutions.forEach(async (execution) => {
|
||||||
const data = JSON.parse(execution.workflowData);
|
const data = JSON.parse(execution.workflowData);
|
||||||
let credentialsUpdated = false;
|
let credentialsUpdated = false;
|
||||||
|
@ -153,7 +152,6 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
||||||
|
|
||||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||||
const helpers = new MigrationHelpers(queryRunner);
|
|
||||||
|
|
||||||
const credentialsEntities = await queryRunner.query(`
|
const credentialsEntities = await queryRunner.query(`
|
||||||
SELECT id, name, type
|
SELECT id, name, type
|
||||||
|
@ -166,7 +164,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(workflowsQuery, (workflows) => {
|
await runChunked(queryRunner, workflowsQuery, (workflows) => {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
workflows.forEach(async (workflow) => {
|
workflows.forEach(async (workflow) => {
|
||||||
const nodes = JSON.parse(workflow.nodes);
|
const nodes = JSON.parse(workflow.nodes);
|
||||||
|
@ -216,7 +214,7 @@ export class UpdateWorkflowCredentials1630330987096 implements MigrationInterfac
|
||||||
`;
|
`;
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
await helpers.runChunked(waitingExecutionsQuery, (waitingExecutions) => {
|
await runChunked(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
waitingExecutions.forEach(async (execution) => {
|
waitingExecutions.forEach(async (execution) => {
|
||||||
const data = JSON.parse(execution.workflowData);
|
const data = JSON.parse(execution.workflowData);
|
|
@ -1,5 +1,3 @@
|
||||||
import config from '../../../../config';
|
|
||||||
|
|
||||||
import { InitialMigration1588102412422 } from './1588102412422-InitialMigration';
|
import { InitialMigration1588102412422 } from './1588102412422-InitialMigration';
|
||||||
import { WebhookModel1592445003908 } from './1592445003908-WebhookModel';
|
import { WebhookModel1592445003908 } from './1592445003908-WebhookModel';
|
||||||
import { CreateIndexStoppedAt1594825041918 } from './1594825041918-CreateIndexStoppedAt';
|
import { CreateIndexStoppedAt1594825041918 } from './1594825041918-CreateIndexStoppedAt';
|
67
packages/cli/src/databases/ormconfig.ts
Normal file
67
packages/cli/src/databases/ormconfig.ts
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
import path from 'path';
|
||||||
|
import { UserSettings } from 'n8n-core';
|
||||||
|
import { entities } from './entities';
|
||||||
|
|
||||||
|
export default [
|
||||||
|
{
|
||||||
|
name: 'sqlite',
|
||||||
|
type: 'sqlite',
|
||||||
|
logging: true,
|
||||||
|
entities: Object.values(entities),
|
||||||
|
database: path.resolve(UserSettings.getUserN8nFolderPath(), 'database.sqlite'),
|
||||||
|
migrations: [path.resolve('migrations', 'sqlite', 'index.ts')],
|
||||||
|
cli: {
|
||||||
|
entitiesDir: path.resolve('entities'),
|
||||||
|
migrationsDir: path.resolve('migrations', 'sqlite'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'postgres',
|
||||||
|
type: 'postgres',
|
||||||
|
database: 'n8n',
|
||||||
|
schema: 'public',
|
||||||
|
username: 'postgres',
|
||||||
|
password: '',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 5432,
|
||||||
|
logging: false,
|
||||||
|
entities: Object.values(entities),
|
||||||
|
migrations: [path.resolve('migrations', 'postgresdb', 'index.ts')],
|
||||||
|
cli: {
|
||||||
|
entitiesDir: path.resolve('entities'),
|
||||||
|
migrationsDir: path.resolve('migrations', 'postgresdb'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mysql',
|
||||||
|
type: 'mysql',
|
||||||
|
database: 'n8n',
|
||||||
|
username: 'root',
|
||||||
|
password: 'password',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 3306,
|
||||||
|
logging: false,
|
||||||
|
entities: Object.values(entities),
|
||||||
|
migrations: [path.resolve('migrations', 'mysqldb', 'index.ts')],
|
||||||
|
cli: {
|
||||||
|
entitiesDir: path.resolve('entities'),
|
||||||
|
migrationsDir: path.resolve('migrations', 'mysqldb'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mariadb',
|
||||||
|
type: 'mariadb',
|
||||||
|
database: 'n8n',
|
||||||
|
username: 'root',
|
||||||
|
password: 'password',
|
||||||
|
host: 'localhost',
|
||||||
|
port: 3306,
|
||||||
|
logging: false,
|
||||||
|
entities: Object.values(entities),
|
||||||
|
migrations: [path.resolve('migrations', 'mysqldb', 'index.ts')],
|
||||||
|
cli: {
|
||||||
|
entitiesDir: path.resolve('entities'),
|
||||||
|
migrationsDir: path.resolve('migrations', 'mysqldb'),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
|
@ -1,5 +1,7 @@
|
||||||
|
/* eslint-disable no-await-in-loop */
|
||||||
import { readFileSync, rmSync } from 'fs';
|
import { readFileSync, rmSync } from 'fs';
|
||||||
import { UserSettings } from 'n8n-core';
|
import { UserSettings } from 'n8n-core';
|
||||||
|
import { QueryRunner } from 'typeorm/query-runner/QueryRunner';
|
||||||
import { getLogger } from '../../Logger';
|
import { getLogger } from '../../Logger';
|
||||||
|
|
||||||
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
|
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
|
||||||
|
@ -53,3 +55,31 @@ export function logMigrationEnd(migrationName: string): void {
|
||||||
logger.warn('Migrations finished.');
|
logger.warn('Migrations finished.');
|
||||||
}, 100);
|
}, 100);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function chunkQuery(query: string, limit: number, offset = 0): string {
|
||||||
|
return `
|
||||||
|
${query}
|
||||||
|
LIMIT ${limit}
|
||||||
|
OFFSET ${offset}
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runChunked(
|
||||||
|
queryRunner: QueryRunner,
|
||||||
|
query: string,
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
operation: (results: any[]) => Promise<void>,
|
||||||
|
limit = 100,
|
||||||
|
): Promise<void> {
|
||||||
|
let offset = 0;
|
||||||
|
let chunkedQuery: string;
|
||||||
|
let chunkedQueryResults: unknown[];
|
||||||
|
|
||||||
|
do {
|
||||||
|
chunkedQuery = chunkQuery(query, limit, offset);
|
||||||
|
chunkedQueryResults = (await queryRunner.query(chunkedQuery)) as unknown[];
|
||||||
|
// pass a copy to prevent errors from mutation
|
||||||
|
await operation([...chunkedQueryResults]);
|
||||||
|
offset += limit;
|
||||||
|
} while (chunkedQueryResults.length === limit);
|
||||||
|
}
|
||||||
|
|
|
@ -11,9 +11,9 @@ import { randomApiKey, randomEmail, randomName, randomString, randomValidPasswor
|
||||||
import { CredentialsEntity } from '../../../src/databases/entities/CredentialsEntity';
|
import { CredentialsEntity } from '../../../src/databases/entities/CredentialsEntity';
|
||||||
import { hashPassword } from '../../../src/UserManagement/UserManagementHelper';
|
import { hashPassword } from '../../../src/UserManagement/UserManagementHelper';
|
||||||
import { entities } from '../../../src/databases/entities';
|
import { entities } from '../../../src/databases/entities';
|
||||||
import { mysqlMigrations } from '../../../src/databases/mysqldb/migrations';
|
import { mysqlMigrations } from '../../../src/databases/migrations/mysqldb';
|
||||||
import { postgresMigrations } from '../../../src/databases/postgresdb/migrations';
|
import { postgresMigrations } from '../../../src/databases/migrations/postgresdb';
|
||||||
import { sqliteMigrations } from '../../../src/databases/sqlite/migrations';
|
import { sqliteMigrations } from '../../../src/databases/migrations/sqlite';
|
||||||
import { categorize, getPostgresSchemaSection } from './utils';
|
import { categorize, getPostgresSchemaSection } from './utils';
|
||||||
import { createCredentiasFromCredentialsEntity } from '../../../src/CredentialsHelper';
|
import { createCredentiasFromCredentialsEntity } from '../../../src/CredentialsHelper';
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue