mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
fix(core): Improve the performance of last 2 sqlite migrations (#6522)
This commit is contained in:
parent
76aca62dd3
commit
31cba87d30
|
@ -1,9 +1,19 @@
|
||||||
|
import { statSync } from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { UserSettings } from 'n8n-core';
|
||||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||||
|
import config from '@/config';
|
||||||
|
import { copyTable } from '@/databases/utils/migrationHelpers';
|
||||||
|
|
||||||
export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigration {
|
export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigration {
|
||||||
transaction = false as const;
|
transaction = false as const;
|
||||||
|
|
||||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
async up(context: MigrationContext) {
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||||
|
await pruneExecutionsData(context);
|
||||||
|
|
||||||
|
const { queryRunner, tablePrefix } = context;
|
||||||
|
|
||||||
await queryRunner.query(`
|
await queryRunner.query(`
|
||||||
CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`);
|
CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`);
|
||||||
await queryRunner.query(
|
await queryRunner.query(
|
||||||
|
@ -108,9 +118,7 @@ export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigr
|
||||||
"data" text NOT NULL, "status" varchar,
|
"data" text NOT NULL, "status" varchar,
|
||||||
FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE
|
FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE
|
||||||
);`);
|
);`);
|
||||||
await queryRunner.query(
|
await copyTable({ tablePrefix, queryRunner }, 'execution_entity', 'TMP_execution_entity');
|
||||||
`INSERT INTO "${tablePrefix}TMP_execution_entity" SELECT * FROM "${tablePrefix}execution_entity";`,
|
|
||||||
);
|
|
||||||
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`);
|
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`);
|
||||||
await queryRunner.query(
|
await queryRunner.query(
|
||||||
`ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`,
|
`ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`,
|
||||||
|
@ -178,3 +186,44 @@ export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigr
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const DESIRED_DATABASE_FILE_SIZE = 1 * 1024 * 1024 * 1024; // 1 GB
|
||||||
|
const migrationsPruningEnabled = process.env.MIGRATIONS_PRUNING_ENABLED === 'true';
|
||||||
|
|
||||||
|
function getSqliteDbFileSize(): number {
|
||||||
|
const filename = path.resolve(
|
||||||
|
UserSettings.getUserN8nFolderPath(),
|
||||||
|
config.getEnv('database.sqlite.database'),
|
||||||
|
);
|
||||||
|
const { size } = statSync(filename);
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
|
const pruneExecutionsData = async ({ queryRunner, tablePrefix }: MigrationContext) => {
|
||||||
|
if (migrationsPruningEnabled) {
|
||||||
|
const dbFileSize = getSqliteDbFileSize();
|
||||||
|
if (dbFileSize < DESIRED_DATABASE_FILE_SIZE) {
|
||||||
|
console.log(`DB Size not large enough to prune: ${dbFileSize}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.time('pruningData');
|
||||||
|
const counting = (await queryRunner.query(
|
||||||
|
`select count(id) as rows from "${tablePrefix}execution_entity";`,
|
||||||
|
)) as Array<{ rows: number }>;
|
||||||
|
|
||||||
|
const averageExecutionSize = dbFileSize / counting[0].rows;
|
||||||
|
const numberOfExecutionsToKeep = Math.floor(DESIRED_DATABASE_FILE_SIZE / averageExecutionSize);
|
||||||
|
|
||||||
|
const query = `SELECT id FROM "${tablePrefix}execution_entity" ORDER BY id DESC limit ${numberOfExecutionsToKeep}, 1`;
|
||||||
|
const idToKeep = await queryRunner
|
||||||
|
.query(query)
|
||||||
|
.then((rows: Array<{ id: number }>) => rows[0].id);
|
||||||
|
|
||||||
|
const removalQuery = `DELETE FROM "${tablePrefix}execution_entity" WHERE id < ${idToKeep} and status IN ('success')`;
|
||||||
|
await queryRunner.query(removalQuery);
|
||||||
|
console.timeEnd('pruningData');
|
||||||
|
} else {
|
||||||
|
console.log('Pruning was requested, but was not enabled');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
|
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
|
||||||
|
import { copyTable } from '@/databases/utils/migrationHelpers';
|
||||||
|
|
||||||
export class SeparateExecutionData1690000000010 implements ReversibleMigration {
|
export class SeparateExecutionData1690000000010 implements ReversibleMigration {
|
||||||
async up({ queryRunner, tablePrefix }: MigrationContext): Promise<void> {
|
async up({ queryRunner, tablePrefix }: MigrationContext): Promise<void> {
|
||||||
|
@ -11,13 +12,12 @@ export class SeparateExecutionData1690000000010 implements ReversibleMigration {
|
||||||
)`,
|
)`,
|
||||||
);
|
);
|
||||||
|
|
||||||
await queryRunner.query(
|
await copyTable(
|
||||||
`INSERT INTO "${tablePrefix}execution_data" (
|
{ tablePrefix, queryRunner },
|
||||||
"executionId",
|
'execution_entity',
|
||||||
"workflowData",
|
'execution_data',
|
||||||
"data")
|
['id', 'workflowData', 'data'],
|
||||||
SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity"
|
['executionId', 'workflowData', 'data'],
|
||||||
`,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
await queryRunner.query(
|
await queryRunner.query(
|
||||||
|
|
|
@ -115,6 +115,37 @@ export const wrapMigration = (migration: Migration) => {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const copyTable = async (
|
||||||
|
{ tablePrefix, queryRunner }: Pick<MigrationContext, 'queryRunner' | 'tablePrefix'>,
|
||||||
|
fromTable: string,
|
||||||
|
toTable: string,
|
||||||
|
fromFields: string[] = [],
|
||||||
|
toFields: string[] = [],
|
||||||
|
batchSize = 10,
|
||||||
|
) => {
|
||||||
|
const driver = queryRunner.connection.driver;
|
||||||
|
fromTable = driver.escape(`${tablePrefix}${fromTable}`);
|
||||||
|
toTable = driver.escape(`${tablePrefix}${toTable}`);
|
||||||
|
const fromFieldsStr = fromFields.length
|
||||||
|
? fromFields.map((f) => driver.escape(f)).join(', ')
|
||||||
|
: '*';
|
||||||
|
const toFieldsStr = toFields.length
|
||||||
|
? `(${toFields.map((f) => driver.escape(f)).join(', ')})`
|
||||||
|
: '';
|
||||||
|
|
||||||
|
const total = await queryRunner
|
||||||
|
.query(`SELECT COUNT(*) as count from ${fromTable}`)
|
||||||
|
.then((rows: Array<{ count: number }>) => rows[0].count);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
while (migrated < total) {
|
||||||
|
await queryRunner.query(
|
||||||
|
`INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`,
|
||||||
|
);
|
||||||
|
migrated += batchSize;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
function batchQuery(query: string, limit: number, offset = 0): string {
|
function batchQuery(query: string, limit: number, offset = 0): string {
|
||||||
return `
|
return `
|
||||||
${query}
|
${query}
|
||||||
|
|
Loading…
Reference in a new issue