mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
init
This commit is contained in:
parent
08ba9a36a4
commit
603cc67c64
68
packages/cli/src/commands/export/all.ts
Normal file
68
packages/cli/src/commands/export/all.ts
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
import { DataSource, MigrationExecutor } from '@n8n/typeorm';
|
||||||
|
import * as assert from 'assert/strict';
|
||||||
|
import fs from 'fs';
|
||||||
|
import { join } from 'path';
|
||||||
|
import Container from 'typedi';
|
||||||
|
|
||||||
|
import { BaseCommand } from '../base-command';
|
||||||
|
|
||||||
|
export class ExportAllCommand extends BaseCommand {
|
||||||
|
static description = 'Export Everything';
|
||||||
|
|
||||||
|
static examples = ['$ n8n export:all'];
|
||||||
|
|
||||||
|
// TODO: add `exportPath` flag
|
||||||
|
static flags = {};
|
||||||
|
|
||||||
|
// eslint-disable-next-line complexity
|
||||||
|
async run() {
|
||||||
|
const connection = Container.get(DataSource);
|
||||||
|
const excludeList = [
|
||||||
|
'execution_annotation_tags',
|
||||||
|
'execution_annotations',
|
||||||
|
'execution_data',
|
||||||
|
'execution_entity',
|
||||||
|
'execution_metadata',
|
||||||
|
'annotation_tag_entity',
|
||||||
|
];
|
||||||
|
const tables = connection.entityMetadatas
|
||||||
|
.map((v) => v.tableName)
|
||||||
|
.filter((name) => !excludeList.includes(name));
|
||||||
|
|
||||||
|
const backupPath = '/tmp/backup';
|
||||||
|
await fs.promises.mkdir(backupPath, { recursive: true });
|
||||||
|
|
||||||
|
for (const tableName of tables) {
|
||||||
|
// TODO: implement batching
|
||||||
|
//const rows = await repo.find({ relations: [] });
|
||||||
|
|
||||||
|
const rows = await connection.query(`SELECT * from ${tableName}`);
|
||||||
|
|
||||||
|
const stream = fs.createWriteStream(join(backupPath, `${tableName}.jsonl`));
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
stream.write(JSON.stringify(row));
|
||||||
|
stream.write('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
stream.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
const migrationExecutor = new MigrationExecutor(connection);
|
||||||
|
const executedMigrations = await migrationExecutor.getExecutedMigrations();
|
||||||
|
const lastExecutedMigration = executedMigrations.at(0);
|
||||||
|
|
||||||
|
assert.ok(lastExecutedMigration, 'should have been run by db.ts');
|
||||||
|
|
||||||
|
await fs.promises.writeFile(
|
||||||
|
join(backupPath, 'lastMigration'),
|
||||||
|
lastExecutedMigration.name,
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async catch(error: Error) {
|
||||||
|
this.logger.error('Error exporting workflows. See log messages for details.');
|
||||||
|
this.logger.error(error.message);
|
||||||
|
}
|
||||||
|
}
|
0
packages/cli/src/commands/import/all.ts
Normal file
0
packages/cli/src/commands/import/all.ts
Normal file
32
tasks.md
Normal file
32
tasks.md
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# Goals
|
||||||
|
|
||||||
|
* export all data (except execution related data) into a file
|
||||||
|
* import from said file
|
||||||
|
* support flag `clean` (default: false)
|
||||||
|
* false: if db is empty commence else print error
|
||||||
|
* true: truncate all tables and commence
|
||||||
|
* foreign keys are retained
|
||||||
|
|
||||||
|
# Future Goals
|
||||||
|
|
||||||
|
* making the export atomic
|
||||||
|
* for now users have to shut down all n8n instances while exporting and importing
|
||||||
|
* have foreign keys on while importing
|
||||||
|
* for now we disable them before the import and enable them after it again
|
||||||
|
|
||||||
|
# File Format
|
||||||
|
|
||||||
|
* 1 file per table
|
||||||
|
* 1 line per row
|
||||||
|
* format: JSONL
|
||||||
|
|
||||||
|
* metadata file
|
||||||
|
* contains last migration name that was run
|
||||||
|
|
||||||
|
* tared/zipped in the end
|
||||||
|
|
||||||
|
# Tasks
|
||||||
|
|
||||||
|
* include only columns (and all of them) when serializing
|
||||||
|
* ignoring virtual properties and including ignored things like passwords
|
||||||
|
|
Loading…
Reference in a new issue