diff --git a/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts b/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts index 80e8dcaed6..d3ea972cce 100644 --- a/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts +++ b/packages/cli/src/databases/migrations/sqlite/1690000000002-MigrateIntegerKeysToString.ts @@ -1,9 +1,19 @@ +import { statSync } from 'fs'; +import path from 'path'; +import { UserSettings } from 'n8n-core'; import type { MigrationContext, IrreversibleMigration } from '@db/types'; +import config from '@/config'; +import { copyTable } from '@/databases/utils/migrationHelpers'; export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigration { transaction = false as const; - async up({ queryRunner, tablePrefix }: MigrationContext) { + async up(context: MigrationContext) { + // eslint-disable-next-line @typescript-eslint/no-use-before-define + await pruneExecutionsData(context); + + const { queryRunner, tablePrefix } = context; + await queryRunner.query(` CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`); await queryRunner.query( @@ -108,9 +118,7 @@ export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigr "data" text NOT NULL, "status" varchar, FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE );`); - await queryRunner.query( - `INSERT INTO "${tablePrefix}TMP_execution_entity" SELECT * FROM "${tablePrefix}execution_entity";`, - ); + await copyTable({ tablePrefix, queryRunner }, 'execution_entity', 'TMP_execution_entity'); await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`); await queryRunner.query( `ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`, @@ -178,3 +186,44 @@ export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigr ); } } + +const DESIRED_DATABASE_FILE_SIZE = 1 * 1024 * 1024 * 1024; // 1 GB +const migrationsPruningEnabled = process.env.MIGRATIONS_PRUNING_ENABLED === 'true'; + +function getSqliteDbFileSize(): number { + const filename = path.resolve( + UserSettings.getUserN8nFolderPath(), + config.getEnv('database.sqlite.database'), + ); + const { size } = statSync(filename); + return size; +} + +const pruneExecutionsData = async ({ queryRunner, tablePrefix }: MigrationContext) => { + if (migrationsPruningEnabled) { + const dbFileSize = getSqliteDbFileSize(); + if (dbFileSize < DESIRED_DATABASE_FILE_SIZE) { + console.log(`DB Size not large enough to prune: ${dbFileSize}`); + return; + } + + console.time('pruningData'); + const counting = (await queryRunner.query( + `select count(id) as rows from "${tablePrefix}execution_entity";`, + )) as Array<{ rows: number }>; + + const averageExecutionSize = dbFileSize / counting[0].rows; + const numberOfExecutionsToKeep = Math.floor(DESIRED_DATABASE_FILE_SIZE / averageExecutionSize); + + const query = `SELECT id FROM "${tablePrefix}execution_entity" ORDER BY id DESC limit ${numberOfExecutionsToKeep}, 1`; + const idToKeep = await queryRunner + .query(query) + .then((rows: Array<{ id: number }>) => rows[0].id); + + const removalQuery = `DELETE FROM "${tablePrefix}execution_entity" WHERE id < ${idToKeep} and status IN ('success')`; + await queryRunner.query(removalQuery); + console.timeEnd('pruningData'); + } else { + console.log('Pruning was requested, but was not enabled'); + } +}; diff --git a/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts b/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts index 3d8943a7a6..55f3e74687 100644 --- a/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts +++ b/packages/cli/src/databases/migrations/sqlite/1690000000010-SeparateExecutionData.ts @@ -1,4 +1,5 @@ import type { MigrationContext, ReversibleMigration } from '@/databases/types'; +import { copyTable } from '@/databases/utils/migrationHelpers'; export class SeparateExecutionData1690000000010 implements ReversibleMigration { async up({ queryRunner, tablePrefix }: MigrationContext): Promise { @@ -11,13 +12,12 @@ export class SeparateExecutionData1690000000010 implements ReversibleMigration { )`, ); - await queryRunner.query( - `INSERT INTO "${tablePrefix}execution_data" ( - "executionId", - "workflowData", - "data") - SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity" - `, + await copyTable( + { tablePrefix, queryRunner }, + 'execution_entity', + 'execution_data', + ['id', 'workflowData', 'data'], + ['executionId', 'workflowData', 'data'], ); await queryRunner.query( diff --git a/packages/cli/src/databases/utils/migrationHelpers.ts b/packages/cli/src/databases/utils/migrationHelpers.ts index 5c1a91763e..bacfcfc315 100644 --- a/packages/cli/src/databases/utils/migrationHelpers.ts +++ b/packages/cli/src/databases/utils/migrationHelpers.ts @@ -115,6 +115,37 @@ export const wrapMigration = (migration: Migration) => { }); }; +export const copyTable = async ( + { tablePrefix, queryRunner }: Pick, + fromTable: string, + toTable: string, + fromFields: string[] = [], + toFields: string[] = [], + batchSize = 10, +) => { + const driver = queryRunner.connection.driver; + fromTable = driver.escape(`${tablePrefix}${fromTable}`); + toTable = driver.escape(`${tablePrefix}${toTable}`); + const fromFieldsStr = fromFields.length + ? fromFields.map((f) => driver.escape(f)).join(', ') + : '*'; + const toFieldsStr = toFields.length + ? `(${toFields.map((f) => driver.escape(f)).join(', ')})` + : ''; + + const total = await queryRunner + .query(`SELECT COUNT(*) as count from ${fromTable}`) + .then((rows: Array<{ count: number }>) => rows[0].count); + + let migrated = 0; + while (migrated < total) { + await queryRunner.query( + `INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`, + ); + migrated += batchSize; + } +}; + function batchQuery(query: string, limit: number, offset = 0): string { return ` ${query}