From 47b296ae236f7a8ebb949323fdc5e51a0b44cea5 Mon Sep 17 00:00:00 2001 From: Danny Martini Date: Thu, 19 Sep 2024 13:58:17 +0200 Subject: [PATCH] --wip-- --- packages/cli/src/commands/export/all.ts | 30 ++++++-- packages/cli/src/commands/import/all.ts | 94 +++++++++++++++++++++++++ tasks.md | 6 ++ 3 files changed, 123 insertions(+), 7 deletions(-) diff --git a/packages/cli/src/commands/export/all.ts b/packages/cli/src/commands/export/all.ts index 2d83e2e432..51755fa6a3 100644 --- a/packages/cli/src/commands/export/all.ts +++ b/packages/cli/src/commands/export/all.ts @@ -5,6 +5,7 @@ import { join } from 'path'; import Container from 'typedi'; import { BaseCommand } from '../base-command'; +import { jsonColumnType } from '@/databases/entities/abstract-entity'; export class ExportAllCommand extends BaseCommand { static description = 'Export Everything'; @@ -14,7 +15,6 @@ export class ExportAllCommand extends BaseCommand { // TODO: add `exportPath` flag static flags = {}; - // eslint-disable-next-line complexity async run() { const connection = Container.get(DataSource); const excludeList = [ @@ -26,22 +26,38 @@ export class ExportAllCommand extends BaseCommand { 'annotation_tag_entity', ]; const tables = connection.entityMetadatas - .map((v) => v.tableName) - .filter((name) => !excludeList.includes(name)); + .filter((v) => !excludeList.includes(v.name)) + .map((v) => ({ + name: v.tableName, + columns: v.columns, + })); const backupPath = '/tmp/backup'; await fs.promises.mkdir(backupPath, { recursive: true }); - for (const tableName of tables) { + for (const { name, columns } of tables) { // TODO: implement batching //const rows = await repo.find({ relations: [] }); - const rows = await connection.query(`SELECT * from ${tableName}`); + const rows = await connection.query(`SELECT * from ${name}`); - const stream = fs.createWriteStream(join(backupPath, `${tableName}.jsonl`)); + const stream = fs.createWriteStream(join(backupPath, `${name}.jsonl`)); for (const row of rows) { - stream.write(JSON.stringify(row)); + const data = JSON.stringify(row); + + // TODO: fix the types + for (const column of columns) { + // TODO: only do this for sqlite + // + // + //TODO: STOPPED HERE + if (column.type === jsonColumnType) { + console.log(column.type); + } + } + + stream.write(data); stream.write('\n'); } diff --git a/packages/cli/src/commands/import/all.ts b/packages/cli/src/commands/import/all.ts index e69de29bb2..911ae8cc06 100644 --- a/packages/cli/src/commands/import/all.ts +++ b/packages/cli/src/commands/import/all.ts @@ -0,0 +1,94 @@ +import { DataSource, MigrationExecutor } from '@n8n/typeorm'; +import * as assert from 'assert/strict'; +import fs from 'fs'; +import { join } from 'path'; +import Container from 'typedi'; + +import { BaseCommand } from '../base-command'; +import { ApplicationError } from 'n8n-workflow'; + +// TODO: do this +//const fs = require('fs'); +//const readline = require('readline'); +// +//(async () => { +// const fileStream = fs.createReadStream(__dirname + '/test.jsonl'); +// const lineStream = readline.createInterface({ +// input: fileStream, +// crlfDelay: Infinity, +// }); +// +// for await (const line of lineStream) { +// console.log(JSON.parse(line)); +// } +//})(); + +export class ImportAllCommand extends BaseCommand { + static description = 'Import Everything'; + + static examples = ['$ n8n import:all']; + + // TODO: add `importPath` flag + // TODO: add `clean` flag + static flags = {}; + + // TODO: do batching + async run() { + // TODO: + // 1. check last migrations + const connection = Container.get(DataSource); + const migrationExecutor = new MigrationExecutor(connection); + const executedMigrations = await migrationExecutor.getExecutedMigrations(); + const lastExecutedMigration = executedMigrations.at(0); + + assert.ok(lastExecutedMigration, 'should have been run by db.ts'); + + const backupPath = '/tmp/backup'; + + const lastMigrationInBackup = ( + await fs.promises.readFile(join(backupPath, 'lastMigration'), 'utf8') + ).trim(); + + if (lastMigrationInBackup !== lastExecutedMigration.name) { + throw new ApplicationError('Last Migrations Differ, make sure to use the same n8n version'); + } + + // (2. if clean truncate) + // (2. if no clean, check if tables are empty) + // 3. disable foreign keys + + // 4. import each jsonl + const excludeList = [ + 'execution_annotation_tags', + 'execution_annotations', + 'execution_data', + 'execution_entity', + 'execution_metadata', + 'annotation_tag_entity', + ]; + const tables = connection.entityMetadatas + .filter((v) => !excludeList.includes(v.tableName)) + .map((v) => ({ name: v.tableName, target: v.target })); + + for (const { name, target } of tables) { + const repo = connection.getRepository(target); + await repo.delete({}); + + const rows = (await fs.promises.readFile(`${join(backupPath, name)}.jsonl`, 'utf8')) + .split('\n') + .filter((row) => row !== ''); + + for (const row of rows) { + await repo.insert(JSON.parse(row)); + } + } + + // 5. enable foreign keys + } + + async catch(error: Error) { + console.log(error.stack); + this.logger.error('Error exporting workflows. See log messages for details.'); + this.logger.error(error.message); + } +} diff --git a/tasks.md b/tasks.md index 986ccc9e26..0e9dda99b5 100644 --- a/tasks.md +++ b/tasks.md @@ -29,4 +29,10 @@ * include only columns (and all of them) when serializing * ignoring virtual properties and including ignored things like passwords +* make sure sequence counters are updated + +# Questions + +* what if one record fails? + * skip, abort?