feat(core): Export and import all data across DB types

This commit is contained in:
Iván Ovejero 2024-09-24 10:04:20 +02:00
parent 583d3a7acb
commit 87a40581bf
No known key found for this signature in database
24 changed files with 881 additions and 30 deletions

View file

@ -70,6 +70,7 @@
"@types/superagent": "^8.1.7",
"@types/swagger-ui-express": "^4.1.6",
"@types/syslog-client": "^1.1.2",
"@types/tar-stream": "^3.1.3",
"@types/uuid": "catalog:",
"@types/validator": "^13.7.0",
"@types/ws": "^8.5.4",
@ -167,6 +168,7 @@
"sshpk": "1.17.0",
"swagger-ui-express": "5.0.0",
"syslog-client": "1.1.1",
"tar-stream": "^3.1.7",
"typedi": "catalog:",
"uuid": "catalog:",
"validator": "13.7.0",

View file

@ -12,7 +12,7 @@ import { UserRepository } from '@/databases/repositories/user.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { generateNanoId } from '@/databases/utils/generators';
import type { IWorkflowToImport } from '@/interfaces';
import { ImportService } from '@/services/import.service';
import { WorkflowImportService } from '@/services/workflow-import.service';
import { BaseCommand } from '../base-command';
@ -97,7 +97,7 @@ export class ImportWorkflowsCommand extends BaseCommand {
this.logger.info(`Importing ${workflows.length} workflows...`);
await Container.get(ImportService).importWorkflows(workflows, project.id);
await Container.get(WorkflowImportService).importWorkflows(workflows, project.id);
this.reportSuccess(workflows.length);
}

View file

@ -14,6 +14,8 @@ import { ActiveExecutions } from '@/active-executions';
import { ActiveWorkflowManager } from '@/active-workflow-manager';
import config from '@/config';
import { EDITOR_UI_DIST_DIR, LICENSE_FEATURES } from '@/constants';
import { DatabaseExportService } from '@/databases/import-export/database-export.service';
import { DatabaseImportService } from '@/databases/import-export/database-import.service';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { SettingsRepository } from '@/databases/repositories/settings.repository';
import { FeatureNotLicensedError } from '@/errors/feature-not-licensed.error';
@ -260,6 +262,13 @@ export class Start extends BaseCommand {
async run() {
const { flags } = await this.parse(Start);
// @TEMP
// await Container.get(DatabaseExportService).export();
Container.get(DatabaseImportService).setConfig({
importFilePath: '/tmp/backup/n8n-db-export-2024-09-24.tar.gz',
});
await Container.get(DatabaseImportService).import();
// Load settings from database and set them to config.
const databaseSettings = await Container.get(SettingsRepository).findBy({
loadOnStartup: true,

View file

@ -0,0 +1,112 @@
import { GlobalConfig } from '@n8n/config';
import type { EntityMetadata } from '@n8n/typeorm';
import { DataSource, MigrationExecutor } from '@n8n/typeorm';
import { ApplicationError } from 'n8n-workflow';
import { strict } from 'node:assert';
import { Service } from 'typedi';
import { NonEmptyTableError } from '@/errors/non-empty-table.error';
import type { Sequence } from './import-export/types';
import { LastMigrationNotFoundError } from '../errors/last-migration-not-found.error';
/**
* Responsible for providing schema information about the connected DB.
*/
@Service()
export class DatabaseSchemaService {
private dbType: 'sqlite' | 'mariadb' | 'mysqldb' | 'postgresdb';
constructor(
private readonly dataSource: DataSource,
private readonly globalConfig: GlobalConfig,
) {
this.dbType = this.globalConfig.database.type;
}
/** Get the name of the last executed migration. */
async getLastMigration() {
const migrationExecutor = new MigrationExecutor(this.dataSource);
const executedMigrations = await migrationExecutor.getExecutedMigrations();
const lastExecutedMigration = executedMigrations.at(0);
if (!lastExecutedMigration) throw new LastMigrationNotFoundError();
return lastExecutedMigration.name;
}
getTables() {
return this.dataSource.entityMetadatas
.map((value) => ({
tableName: value.tableName,
columns: value.columns,
entityTarget: value.target,
}))
.filter(({ entityTarget }) => this.hasTable(entityTarget));
}
async checkAllTablesEmpty() {
for (const { tableName, entityTarget } of this.getTables()) {
if ((await this.dataSource.getRepository(entityTarget).count()) > 0) {
throw new NonEmptyTableError(tableName);
}
}
}
async disableForeignKeysPostgres() {
strict(this.dbType === 'postgresdb', 'Method only for Postgres');
await this.dataSource.query('SET session_replication_role = replica;');
}
async enableForeignKeysPostgres() {
strict(this.dbType === 'postgresdb', 'Method only for Postgres');
await this.dataSource.query('SET session_replication_role = origin;');
}
getDataSource() {
return this.dataSource;
}
/** Get the names and values of all incremental ID sequences. */
async getSequences() {
if (this.dbType === 'sqlite') {
return await this.dataSource.query<Sequence[]>(
"SELECT name, seq AS value FROM sqlite_sequence WHERE name != 'migrations';",
);
}
if (this.dbType === 'postgresdb') {
return await this.dataSource.query<Sequence[]>(
"SELECT sequencename AS name, start_value AS value FROM pg_sequences WHERE sequencename != 'migrations_id_seq';",
);
}
// @TODO: Does this work for MariaDB?
if (this.dbType === 'mysqldb' || this.dbType === 'mariadb') {
const schema = this.globalConfig.database.mysqldb.database; // @TODO: Why deprecated? How to filter otherwise?
return await this.dataSource.query<Sequence[]>(
`SELECT table_name AS name, ordinal_position AS value FROM information_schema.columns
WHERE table_schema = '${schema}' AND extra = 'auto_increment' AND table_name != 'migrations';`,
);
}
throw new ApplicationError('Unknown database type', { extra: { dbType: this.dbType } });
}
/** Check whether the given entity target has a corresponding table. */
private hasTable(entityTarget: EntityMetadata['target']) {
const prototype: unknown = Object.getPrototypeOf(entityTarget);
// target is an entity that does not extend a parent class, e.g. `ExecutionEntity`
// or that extends the `Derived` mixin, e.g. `WorkflowEntity` extends `WithTimestamps`
if (typeof prototype === 'function' && (!prototype.name || prototype.name === 'Derived')) {
return true; // @TODO: Too brittle to rely on `'Derived'`?
}
// target is an entity that extends another entity, e.g. `AuthUser` extends `User`,
// or is a string alias for an entity, e.g. `workflows_tags` for `WorkflowTagMapping`
return false;
}
}

View file

@ -0,0 +1,5 @@
/**
* Name of the file containing metadata about exported files.
* Underscored to prevent accidental match with table name.
*/
export const MANIFEST_FILENAME = '_manifest.json';

View file

@ -0,0 +1,205 @@
import { GlobalConfig } from '@n8n/config';
import type { ColumnMetadata } from '@n8n/typeorm/metadata/ColumnMetadata';
import { jsonParse } from 'n8n-workflow';
import { strict } from 'node:assert';
import fs from 'node:fs';
import path from 'node:path';
import { Service } from 'typedi';
import { Logger } from '@/logger';
import { MANIFEST_FILENAME } from './constants';
import type { Manifest } from './manifest.schema';
import type { DatabaseExportConfig, Row } from './types';
import { FilesystemService } from '../../filesystem/filesystem.service';
import { DatabaseSchemaService } from '../database-schema.service';
// @TODO: Check minimum version for each DB type?
@Service()
export class DatabaseExportService {
private config: DatabaseExportConfig = {
storageDirPath: '/tmp/backup',
tarballBaseFileName: 'n8n-db-export',
batchSize: 500,
};
/** Paths to the files to include in the tarball. */
private readonly exportFilePaths: string[] = [];
/** Number of rows in tables being exported. */
private readonly rowCounts: { [tableName: string]: number } = {};
constructor(
private readonly globalConfig: GlobalConfig,
private readonly fsService: FilesystemService,
private readonly schemaService: DatabaseSchemaService,
private readonly logger: Logger,
) {}
setConfig(config: Partial<DatabaseExportConfig>) {
this.config = { ...this.config, ...config };
}
// #region Export
/** Export DB tables into a tarball of `.jsonl` files plus a `.json` metadata file. */
async export() {
await this.fsService.ensureDir(this.config.storageDirPath);
this.logger.info('[ExportService] Starting export', {
dbType: this.globalConfig.database.type,
storageDirPath: this.config.storageDirPath,
});
await this.writeJsonlFiles();
if (this.exportFilePaths.length === 0) {
this.logger.info('[ExportService] Found no tables to export, aborted export');
return;
}
this.logger.info('[ExportService] Exported tables', { exportedTables: this.exportFilePaths });
await this.writeManifest();
const tarballPath = path.join(this.config.storageDirPath, this.tarballFileName());
await this.fsService.createTarball(tarballPath, this.exportFilePaths);
await this.postExportCleanup();
this.logger.info('[ExportService] Completed export', { tarballPath });
}
// #endregion
// #region Export steps
private async writeJsonlFiles() {
for (const { tableName, columns } of this.schemaService.getTables()) {
let offset = 0;
let totalRows = 0;
let writeStream: fs.WriteStream | undefined;
while (true) {
const rows = await this.schemaService
.getDataSource()
.query<Row[]>(
`SELECT * FROM ${tableName} LIMIT ${this.config.batchSize} OFFSET ${offset};`,
); // @TODO: Double-quotes for column in Postgres but not for other DB types?
if (rows.length === 0) break;
writeStream ??= fs.createWriteStream(
path.join(this.config.storageDirPath, tableName) + '.jsonl',
);
for (const row of rows) {
for (const column of columns) {
this.normalizeRow(row, { column, tableName });
}
const json = JSON.stringify(row);
writeStream.write(json);
writeStream.write('\n');
}
totalRows += rows.length;
offset += this.config.batchSize;
this.logger.info(`[ExportService] Exported ${totalRows} rows from ${tableName}`);
}
if (writeStream) {
writeStream.end();
const jsonlFilePath = path.join(this.config.storageDirPath, tableName + '.jsonl');
this.exportFilePaths.push(jsonlFilePath);
this.rowCounts[tableName] = totalRows;
}
}
}
/** Make values in SQLite and MySQL rows compatible with Postgres. */
private normalizeRow(
row: Row,
{ column, tableName }: { column: ColumnMetadata; tableName: string },
) {
const dbType = this.globalConfig.database.type;
if (dbType === 'postgresdb') return;
if (dbType === 'sqlite' && column.type === Boolean) {
const value = row[column.propertyName];
strict(
value === 1 || value === 0,
'Expected boolean column in sqlite to contain number `1` or `0`',
);
row[column.propertyName] = value === 1;
}
if (dbType === 'sqlite' && (this.isJson(column) || this.isPossiblyJson(tableName, column))) {
const value = row[column.propertyName];
if (typeof value === 'string') {
row[column.propertyName] = jsonParse(value, { fallbackValue: value });
}
}
// @TODO: MySQL and MariaDB normalizations
}
/** Write a manifest file describing the export. */
private async writeManifest() {
const manifestFilePath = path.join(this.config.storageDirPath, MANIFEST_FILENAME);
const manifest: Manifest = {
lastExecutedMigration: await this.schemaService.getLastMigration(),
sourceDbType: this.globalConfig.database.type,
exportedAt: new Date().toISOString(),
rowCounts: this.rowCounts,
sequences: await this.schemaService.getSequences(),
};
await fs.promises.writeFile(manifestFilePath, JSON.stringify(manifest, null, 2), 'utf8');
this.exportFilePaths.push(manifestFilePath);
this.logger.info('[ExportService] Wrote manifest', { metadata: manifest });
}
/** Clear all `.jsonl` and `.json` files from the storage dir. */
async postExportCleanup() {
await this.fsService.removeFiles(this.exportFilePaths);
this.exportFilePaths.length = 0;
}
// #endregion
// #region Utils
private isJson(column: ColumnMetadata) {
return this.globalConfig.database.type === 'sqlite'
? column.type === 'simple-json'
: column.type === 'json';
}
/** Check whether the column is not JSON-type but may contain JSON. */
private isPossiblyJson(tableName: string, column: ColumnMetadata) {
return tableName === 'settings' && column.propertyName === 'value';
}
private tarballFileName() {
const now = new Date();
const year = now.getFullYear();
const month = String(now.getMonth() + 1).padStart(2, '0');
const day = String(now.getDate()).padStart(2, '0');
return `${this.config.tarballBaseFileName}-${year}-${month}-${day}.tar.gz`;
}
// #endregion
}

View file

@ -0,0 +1,197 @@
import { GlobalConfig } from '@n8n/config';
import { ensureError, jsonParse } from 'n8n-workflow';
import fs from 'node:fs';
import path from 'node:path';
import readline from 'node:readline';
import { Service } from 'typedi';
import { NotObjectLiteralError } from '@/errors/not-object-literal.error';
import { RowCountMismatchError } from '@/errors/row-count-mismatch.error';
import { Logger } from '@/logger';
import { isObjectLiteral } from '@/utils';
import { MANIFEST_FILENAME } from './constants';
import type { Manifest } from './manifest.schema';
import { manifestSchema } from './manifest.schema';
import type { DatabaseImportConfig } from './types';
import { MalformedManifestError } from '../../errors/malformed-manifest.error';
import { MigrationsMismatchError } from '../../errors/migrations-mismatch.error';
import { UnsupportedDestinationError } from '../../errors/unsupported-destination.error';
import { FilesystemService } from '../../filesystem/filesystem.service';
import { DatabaseSchemaService } from '../database-schema.service';
// @TODO: Check minimum version for Postgres?
// @TODO: Make all info logs debug
@Service()
export class DatabaseImportService {
private config: DatabaseImportConfig = {
importFilePath: '',
extractDirPath: '/tmp/backup',
truncateDestination: true, // @TODO: Only for dev, default it to `false` later
};
/** Paths to files extracted from the tarball. */
private readonly extractFilePaths: string[] = [];
private manifest: Manifest;
constructor(
private readonly globalConfig: GlobalConfig,
private readonly fsService: FilesystemService,
private readonly schemaService: DatabaseSchemaService,
private readonly logger: Logger,
) {}
setConfig(config: Partial<DatabaseImportConfig>) {
this.config = { ...this.config, ...config };
}
// #region Import
/** Import DB tables from a tarball of `.jsonl` files in the storage dir. */
async import() {
this.logger.info('[ImportService] Starting import');
await this.preImportChecks();
try {
await this.schemaService.disableForeignKeysPostgres();
await this.adjustSequences();
await this.importFiles();
await this.checkImportsAgainstManifest();
} catch (error) {
this.logger.error('[ImportService] Import failed - changes rolled back', {
error: ensureError(error),
});
} finally {
await this.schemaService.enableForeignKeysPostgres();
await this.postImportCleanup();
}
this.logger.info('[ImportService] Completed import');
}
// #endregion
// #region Import steps
private async preImportChecks() {
await this.fsService.checkAccessible(this.config.extractDirPath);
const dbType = this.globalConfig.database.type;
if (dbType !== 'postgresdb') throw new UnsupportedDestinationError(dbType);
await this.fsService.extractTarball(this.config.importFilePath, this.config.extractDirPath);
this.manifest = await this.getManifest();
const destinationLastMigration = await this.schemaService.getLastMigration();
if (this.manifest.lastExecutedMigration !== destinationLastMigration) {
throw new MigrationsMismatchError(
this.manifest.lastExecutedMigration,
destinationLastMigration,
);
}
if (this.config.truncateDestination) {
for (const { entityTarget } of this.schemaService.getTables()) {
await this.schemaService.getDataSource().getRepository(entityTarget).delete({});
}
} else {
await this.schemaService.checkAllTablesEmpty();
}
this.logger.info('[ImportService] Pre-import checks passed');
}
private async getManifest() {
const manifestFilePath = path.join(this.config.extractDirPath, MANIFEST_FILENAME);
const manifestJson = await fs.promises.readFile(manifestFilePath, 'utf8');
try {
return manifestSchema.parse(jsonParse(manifestJson));
} catch (error) {
throw new MalformedManifestError(manifestFilePath, ensureError(error));
}
}
/** Insert rows from `.jsonl` files into DB tables in a transaction. */
private async importFiles() {
await this.schemaService.getDataSource().transaction(async (tx) => {
for (const { tableName, entityTarget } of this.schemaService.getTables()) {
const jsonlFilePath = path.join(this.config.extractDirPath, tableName) + '.jsonl';
try {
await fs.promises.access(jsonlFilePath);
} catch (e) {
const error = ensureError(e);
if ('code' in error && error.code === 'ENOENT') continue; // we only exported populated tables
throw error;
}
const lineStream = readline.createInterface({
input: fs.createReadStream(jsonlFilePath),
crlfDelay: Infinity, // treat CR and LF as single char
});
const txRepository = tx.getRepository(entityTarget);
for await (const line of lineStream) {
const parsedLine = jsonParse(line);
if (!isObjectLiteral(parsedLine)) throw new NotObjectLiteralError(parsedLine);
const entity = txRepository.create(parsedLine);
await txRepository.insert(entity);
}
}
});
}
/**
* Adjust incremental ID sequences in Postgres to match the source database.
*/
private async adjustSequences() {
for (const { name, value } of this.manifest.sequences) {
// `execution_metadata` has abnormally named and numbered sequence
const sequenceName = name === 'execution_metadata' ? `${name}_temp_id_seq` : `${name}_id_seq`;
const sequenceValue = value <= 0 ? 1 : value;
await this.schemaService
.getDataSource()
.query(`ALTER SEQUENCE "${sequenceName}" RESTART WITH ${sequenceValue};`);
}
}
private async checkImportsAgainstManifest() {
for (const { tableName, entityTarget } of this.schemaService.getTables()) {
const actualRows = await this.schemaService
.getDataSource()
.getRepository(entityTarget)
.count();
const expectedRows = this.manifest.rowCounts[tableName];
if (actualRows === 0 && expectedRows === undefined) continue; // manifest only contains populated tables
if (expectedRows !== actualRows) {
throw new RowCountMismatchError(tableName, expectedRows, actualRows);
}
}
this.logger.info('[ImportService] Imports match manifest');
}
private async postImportCleanup() {
await this.fsService.removeFiles(this.extractFilePaths);
this.extractFilePaths.length = 0;
}
// #endregion
}

View file

@ -0,0 +1,38 @@
import { z } from 'zod';
export const manifestSchema = z.object({
/**
* Name of the last executed migration in the database the tarball was exported from.
* @example 'CreateAnnotationTables1724753530828'
*/
lastExecutedMigration: z.string(),
/** Type of database the tarball was exported from. */
sourceDbType: z.union([
z.literal('sqlite'),
z.literal('mariadb'),
z.literal('mysqldb'),
z.literal('postgresdb'),
]),
/**
* ISO-8601 timestamp of when the tarball was exported.
* @example '2021-01-01T11:11:11.111Z'
*/
exportedAt: z.string(),
/**
* Number of rows in each populated table being exported.
* @example { 'workflow_entity': 123, 'credentials_entity': 456 }
*/
rowCounts: z.record(z.string(), z.number()),
/**
* Incremental ID sequences in tables being exported.
* @example [ { name: 'workflow_entity', value: 123 }, { name: 'credentials_entity', value: 456 } ]
*/
sequences: z.array(z.object({ name: z.string(), value: z.number() })),
});
/** Manifest describing the export, included as JSON file in the tarball. */
export type Manifest = z.infer<typeof manifestSchema>;

View file

@ -0,0 +1,44 @@
export type Row = Record<string, unknown>;
/** Name and value of incremental ID sequence for column. */
export type Sequence = { name: string; value: number }; // @TODO: Refactor as { [tableName: string]: number }
export type DatabaseExportConfig = {
/**
* Path to the dir to place the export in.
* @default '/tmp/backup'
*/
storageDirPath: string;
/**
* Base filename for the tarball, to be suffixed with `-{timestamp}.tar.gz`.
* @default 'n8n-db-export'
*/
tarballBaseFileName: string;
/**
* Number of rows to retrieve from DB and write to a `.jsonl` file at a time.
* @default 500
*/
batchSize: number;
};
export type DatabaseImportConfig = {
/**
* Path to the file to import. Unset by default.
* @example '/tmp/backup/n8n-db-export-2021-01-01.tar.gz'
*/
importFilePath: string;
/**
* Path to the directory to extract the tarball into.
* @default '/tmp/backup'
*/
extractDirPath: string;
/**
* Whether to truncate all tables in the destination DB.
* @default true // @TODO: Only for dev, change to `false` later
*/
truncateDestination: boolean;
};

View file

@ -0,0 +1,9 @@
import { ApplicationError } from 'n8n-workflow';
export class LastMigrationNotFoundError extends ApplicationError {
constructor() {
super('Failed to find last executed migration. Please run migrations first.', {
level: 'warning',
});
}
}

View file

@ -0,0 +1,10 @@
import { ApplicationError } from 'n8n-workflow';
export class MalformedManifestError extends ApplicationError {
constructor(manifestFilePath: string, error: Error) {
super(`Failed to parse manifest file at: ${manifestFilePath}`, {
level: 'warning',
cause: error,
});
}
}

View file

@ -0,0 +1,10 @@
import { ApplicationError } from 'n8n-workflow';
export class MigrationsMismatchError extends ApplicationError {
constructor(lastSourceMigration: string, lastDestinationMigration: string) {
super(
`The last migration executed in the source database "${lastSourceMigration}" differs from the last migration executed in the destination database "${lastDestinationMigration}". Run the same migrations on both databases before importing.`,
{ level: 'warning' },
);
}
}

View file

@ -0,0 +1,9 @@
import { ApplicationError } from 'n8n-workflow';
export class NonEmptyTableError extends ApplicationError {
constructor(tableName: string) {
super(`Found non-empty table "${tableName}" but expected it to be empty.`, {
level: 'warning',
});
}
}

View file

@ -0,0 +1,10 @@
import { ApplicationError } from 'n8n-workflow';
export class NotObjectLiteralError extends ApplicationError {
constructor(value: unknown) {
super(`Expected object literal but found ${typeof value}`, {
extra: { value },
level: 'warning',
});
}
}

View file

@ -0,0 +1,9 @@
import { ApplicationError } from 'n8n-workflow';
export class RowCountMismatchError extends ApplicationError {
constructor(tableName: string, expectedRows: number, actualRows: number) {
super(`Expected ${expectedRows} rows in table "${tableName}" but found ${actualRows}`, {
level: 'warning',
});
}
}

View file

@ -0,0 +1,9 @@
import { ApplicationError } from 'n8n-workflow';
export class UnsupportedDestinationError extends ApplicationError {
constructor(dbType: string) {
super(`Importing into ${dbType} is not supported. Please import into a Postgres database.`, {
level: 'warning',
});
}
}

View file

@ -0,0 +1 @@
// @TODO

View file

@ -0,0 +1,104 @@
import { FileNotFoundError } from 'n8n-core';
import { ensureError } from 'n8n-workflow';
import fs from 'node:fs';
import path from 'node:path';
import { pipeline } from 'node:stream/promises';
import { createGzip, createGunzip } from 'node:zlib';
import tar from 'tar-stream';
import { Service } from 'typedi';
import { Logger } from '@/logger';
@Service()
export class FilesystemService {
constructor(private readonly logger: Logger) {}
/**
* Ensure a directory exists by checking or creating it.
* @param dirPath Path to the directory to check or create.
*/
async ensureDir(dirPath: string) {
try {
await fs.promises.access(dirPath);
} catch {
await fs.promises.mkdir(dirPath, { recursive: true });
}
}
/**
* Check if a file or dir exists and is accessible.
* @param checkPath Path to the file or dir to check.
*/
async checkAccessible(checkPath: string) {
try {
await fs.promises.access(checkPath);
} catch {
throw new FileNotFoundError(checkPath);
}
}
/**
* Remove files at the given paths, disregarding files not found.
* @param filePaths Paths to the files to remove.
*/
async removeFiles(filePaths: string[]) {
for (const filePath of filePaths) {
try {
await fs.promises.unlink(filePath);
} catch (e) {
const error = ensureError(e);
if ('code' in error && error.code === 'ENOENT') continue;
throw error;
}
}
}
/**
* Create a tarball from the given file paths.
* @param srcPaths Paths to the files to include in the tarball.
* @param tarballPath Path to the tarball file to create.
*/
async createTarball(tarballPath: string, srcPaths: string[]) {
const pack = tar.pack();
for (const filePath of srcPaths) {
const fileContent = await fs.promises.readFile(filePath); // @TODO: Read stream
pack.entry({ name: path.basename(filePath) }, fileContent);
}
pack.finalize();
await pipeline(pack, createGzip(), fs.createWriteStream(tarballPath));
this.logger.info('[FilesystemService] Created tarball', { tarballPath });
}
/**
* Extract a tarball to a given directory.
* @param tarballPath Path to the tarball file to extract.
* @param dstDir Path to the directory to extract the tarball into.
* @returns Paths to the extracted files.
*/
async extractTarball(tarballPath: string, dstDir: string) {
await this.checkAccessible(tarballPath);
const extractedFilePaths: string[] = [];
const extract = tar.extract();
extract.on('entry', async (header, stream, next) => {
const filePath = path.join(dstDir, header.name);
await pipeline(stream, fs.createWriteStream(filePath));
extractedFilePaths.push(filePath);
next();
});
await pipeline(fs.createReadStream(tarballPath), createGunzip(), extract);
this.logger.info('[FilesystemService] Extracted tarball', { tarballPath });
return extractedFilePaths;
}
}

View file

@ -15,7 +15,7 @@ import { Logger } from '@/logger';
import { replaceInvalidCredentials } from '@/workflow-helpers';
@Service()
export class ImportService {
export class WorkflowImportService {
private dbCredentials: ICredentialsDb[] = [];
private dbTags: TagEntity[] = [];

View file

@ -10,7 +10,7 @@ import { CredentialsRepository } from '@/databases/repositories/credentials.repo
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
import { TagRepository } from '@/databases/repositories/tag.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { ImportService } from '@/services/import.service';
import { WorkflowImportService } from '@/services/workflow-import.service';
import { getPersonalProject } from './shared/db/projects';
import { createMember, createOwner } from './shared/db/users';
@ -24,7 +24,7 @@ import * as testDb from './shared/test-db';
import { mockInstance } from '../shared/mocking';
describe('ImportService', () => {
let importService: ImportService;
let importService: WorkflowImportService;
let tagRepository: TagRepository;
let owner: User;
let ownerPersonalProject: Project;
@ -41,7 +41,7 @@ describe('ImportService', () => {
credentialsRepository.find.mockResolvedValue([]);
importService = new ImportService(mock(), credentialsRepository, tagRepository);
importService = new WorkflowImportService(mock(), credentialsRepository, tagRepository);
});
afterEach(async () => {

View file

@ -1,4 +1,4 @@
/** Ensures `error` is an `Error */
/** Ensure `error` is an instance of `Error`. */
export function ensureError(error: unknown): Error {
return error instanceof Error
? error

View file

@ -245,7 +245,7 @@ importers:
version: 4.0.7
axios:
specifier: 'catalog:'
version: 1.7.4(debug@4.3.6)
version: 1.7.4
dotenv:
specifier: 8.6.0
version: 8.6.0
@ -322,7 +322,7 @@ importers:
dependencies:
axios:
specifier: 'catalog:'
version: 1.7.4(debug@4.3.6)
version: 1.7.4
packages/@n8n/codemirror-lang:
dependencies:
@ -717,7 +717,7 @@ importers:
version: 1.11.0
axios:
specifier: 'catalog:'
version: 1.7.4(debug@4.3.6)
version: 1.7.4
bcryptjs:
specifier: 2.4.3
version: 2.4.3
@ -916,6 +916,9 @@ importers:
syslog-client:
specifier: 1.1.1
version: 1.1.1
tar-stream:
specifier: ^3.1.7
version: 3.1.7
typedi:
specifier: 'catalog:'
version: 0.10.0(patch_hash=sk6omkefrosihg7lmqbzh7vfxe)
@ -1004,6 +1007,9 @@ importers:
'@types/syslog-client':
specifier: ^1.1.2
version: 1.1.2
'@types/tar-stream':
specifier: ^3.1.3
version: 3.1.3
'@types/uuid':
specifier: 'catalog:'
version: 10.0.0
@ -1051,7 +1057,7 @@ importers:
version: 1.11.0
axios:
specifier: 'catalog:'
version: 1.7.4(debug@4.3.6)
version: 1.7.4
concat-stream:
specifier: 2.0.0
version: 2.0.0
@ -1335,7 +1341,7 @@ importers:
version: 10.11.0(vue@3.4.21(typescript@5.6.2))
axios:
specifier: 'catalog:'
version: 1.7.4(debug@4.3.6)
version: 1.7.4
bowser:
specifier: 2.11.0
version: 2.11.0
@ -1809,7 +1815,7 @@ importers:
version: 0.15.2
axios:
specifier: 'catalog:'
version: 1.7.4(debug@4.3.6)
version: 1.7.4
callsites:
specifier: 3.1.0
version: 3.1.0
@ -5149,6 +5155,9 @@ packages:
'@types/syslog-client@1.1.2':
resolution: {integrity: sha512-X8MwGedXYNmYltPDaZQCM9X6cSdfFbJZWhrU81gWKsg+Q6mSgRWs/12Mq9nHaUV4wqMYDNrnytbwbMUiVnWegw==}
'@types/tar-stream@3.1.3':
resolution: {integrity: sha512-Zbnx4wpkWBMBSu5CytMbrT5ZpMiF55qgM+EpHzR4yIDu7mv52cej8hTkOc6K+LzpkOAbxwn/m7j3iO+/l42YkQ==}
'@types/tedious@4.0.9':
resolution: {integrity: sha512-ipwFvfy9b2m0gjHsIX0D6NAAwGCKokzf5zJqUZHUGt+7uWVlBIy6n2eyMgiKQ8ChLFVxic/zwQUhjLYNzbHDRA==}
@ -5840,12 +5849,18 @@ packages:
axios-retry@3.7.0:
resolution: {integrity: sha512-ZTnCkJbRtfScvwiRnoVskFAfvU0UG3xNcsjwTR0mawSbIJoothxn67gKsMaNAFHRXJ1RmuLhmZBzvyXi3+9WyQ==}
axios@1.7.3:
resolution: {integrity: sha512-Ar7ND9pU99eJ9GpoGQKhKf58GpUOgnzuaB7ueNQ5BMi0p+LZ5oaEnfF999fAArcTIBwXTCHAmGcHOZJaWPq9Nw==}
axios@1.7.4:
resolution: {integrity: sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==}
axios@1.7.7:
resolution: {integrity: sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==}
b4a@1.6.6:
resolution: {integrity: sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==}
babel-jest@29.6.2:
resolution: {integrity: sha512-BYCzImLos6J3BH/+HvUCHG1dTf2MzmAB4jaVxHV+29RZLjR29XuYTmsf2sdDwkrb+FczkGo3kOhE7ga6sI0P4A==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
@ -5878,6 +5893,9 @@ packages:
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
bare-events@2.4.2:
resolution: {integrity: sha512-qMKFd2qG/36aA4GwvKq8MxnPgCQAmBWmSyLWsJcbn8v03wvIPQ/hG1Ms8bPzndZxMDoHpxez5VOS+gC9Yi24/Q==}
base-64@1.0.0:
resolution: {integrity: sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg==}
@ -7362,6 +7380,9 @@ packages:
fast-deep-equal@3.1.3:
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
fast-fifo@1.3.2:
resolution: {integrity: sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==}
fast-glob@3.2.12:
resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==}
engines: {node: '>=8.6.0'}
@ -9420,10 +9441,6 @@ packages:
resolution: {integrity: sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==}
engines: {node: '>=8'}
minipass@7.0.2:
resolution: {integrity: sha512-eL79dXrE1q9dBbDCLg7xfn/vl7MS4F1gvJAgjJrQli/jbQWdUttuVawphqpffoIYfRdq78LHx6GP4bU/EQ2ATA==}
engines: {node: '>=16 || 14 >=14.17'}
minipass@7.1.2:
resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==}
engines: {node: '>=16 || 14 >=14.17'}
@ -10601,6 +10618,9 @@ packages:
queue-microtask@1.2.3:
resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==}
queue-tick@1.0.1:
resolution: {integrity: sha512-kJt5qhMxoszgU/62PLP1CJytzd2NKetjSRnyuj31fDd3Rlcz3fzlFdFLD1SItunPwyqEOkca6GbV612BWfaBag==}
quoted-printable@1.0.1:
resolution: {integrity: sha512-cihC68OcGiQOjGiXuo5Jk6XHANTHl1K4JLk/xlEJRTIXfy19Sg6XzB95XonYgr+1rB88bCpr7WZE7D7AlZow4g==}
hasBin: true
@ -11313,6 +11333,9 @@ packages:
resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==}
engines: {node: '>=10.0.0'}
streamx@2.20.1:
resolution: {integrity: sha512-uTa0mU6WUC65iUvzKH4X9hEdvSW7rbPxPtwfWiLMSj3qTdQbAiUboZTxauKfpFuGIGa1C2BYijZ7wgdUXICJhA==}
strict-event-emitter-types@2.0.0:
resolution: {integrity: sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==}
@ -11487,6 +11510,9 @@ packages:
resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==}
engines: {node: '>=6'}
tar-stream@3.1.7:
resolution: {integrity: sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==}
tar@6.2.1:
resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==}
engines: {node: '>=10'}
@ -11526,6 +11552,9 @@ packages:
resolution: {integrity: sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==}
engines: {node: '>=18'}
text-decoder@1.2.0:
resolution: {integrity: sha512-n1yg1mOj9DNpk3NeZOx7T6jchTbyJS3i3cucbNN6FcdPriMZx7NsgrGpWWdWZZGxD7ES1XB+3uoqHMgOKaN+fg==}
text-hex@1.0.0:
resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==}
@ -15345,7 +15374,7 @@ snapshots:
'@n8n/localtunnel@3.0.0':
dependencies:
axios: 1.7.4(debug@4.3.6)
axios: 1.7.3(debug@4.3.6)
debug: 4.3.6(supports-color@8.1.1)
transitivePeerDependencies:
- supports-color
@ -15758,7 +15787,7 @@ snapshots:
'@rudderstack/rudder-sdk-node@2.0.9(tslib@2.6.2)':
dependencies:
axios: 1.7.4(debug@4.3.6)
axios: 1.7.4
axios-retry: 3.7.0
component-type: 1.2.1
join-component: 1.1.0
@ -17379,6 +17408,10 @@ snapshots:
dependencies:
'@types/node': 18.16.16
'@types/tar-stream@3.1.3':
dependencies:
'@types/node': 18.16.16
'@types/tedious@4.0.9':
dependencies:
'@types/node': 18.16.16
@ -18212,7 +18245,15 @@ snapshots:
'@babel/runtime': 7.24.7
is-retry-allowed: 2.2.0
axios@1.7.4(debug@4.3.6):
axios@1.7.3(debug@4.3.6):
dependencies:
follow-redirects: 1.15.6(debug@4.3.6)
form-data: 4.0.0
proxy-from-env: 1.1.0
transitivePeerDependencies:
- debug
axios@1.7.4:
dependencies:
follow-redirects: 1.15.6(debug@4.3.6)
form-data: 4.0.0
@ -18237,6 +18278,8 @@ snapshots:
transitivePeerDependencies:
- debug
b4a@1.6.6: {}
babel-jest@29.6.2(@babel/core@7.24.0):
dependencies:
'@babel/core': 7.24.0
@ -18295,6 +18338,9 @@ snapshots:
balanced-match@1.0.2: {}
bare-events@2.4.2:
optional: true
base-64@1.0.0: {}
base64-js@1.5.1: {}
@ -20147,6 +20193,8 @@ snapshots:
fast-deep-equal@3.1.3: {}
fast-fifo@1.3.2: {}
fast-glob@3.2.12:
dependencies:
'@nodelib/fs.stat': 2.0.5
@ -20535,7 +20583,7 @@ snapshots:
foreground-child: 3.1.1
jackspeak: 2.3.6
minimatch: 9.0.5
minipass: 7.0.2
minipass: 7.1.2
path-scurry: 1.10.1
glob@10.3.3:
@ -20543,7 +20591,7 @@ snapshots:
foreground-child: 3.1.1
jackspeak: 2.3.6
minimatch: 9.0.5
minipass: 7.0.2
minipass: 7.1.2
path-scurry: 1.10.1
glob@10.4.5:
@ -20959,7 +21007,7 @@ snapshots:
infisical-node@1.3.0:
dependencies:
axios: 1.7.4(debug@4.3.6)
axios: 1.7.4
dotenv: 16.3.1
tweetnacl: 1.0.3
tweetnacl-util: 0.15.1
@ -21911,7 +21959,7 @@ snapshots:
zod: 3.23.8
zod-to-json-schema: 3.23.2(zod@3.23.8)
optionalDependencies:
axios: 1.7.4(debug@4.3.6)
axios: 1.7.4
transitivePeerDependencies:
- encoding
- openai
@ -21935,7 +21983,7 @@ snapshots:
zod: 3.23.8
zod-to-json-schema: 3.23.2(zod@3.23.8)
optionalDependencies:
axios: 1.7.4(debug@4.3.6)
axios: 1.7.4
transitivePeerDependencies:
- encoding
- openai
@ -22563,8 +22611,6 @@ snapshots:
minipass@5.0.0: {}
minipass@7.0.2: {}
minipass@7.1.2: {}
minizlib@2.1.2:
@ -23555,7 +23601,7 @@ snapshots:
path-scurry@1.10.1:
dependencies:
lru-cache: 10.2.2
minipass: 7.0.2
minipass: 7.1.2
path-scurry@1.11.1:
dependencies:
@ -23780,7 +23826,7 @@ snapshots:
posthog-node@3.2.1:
dependencies:
axios: 1.7.4(debug@4.3.6)
axios: 1.7.4
rusha: 0.8.14
transitivePeerDependencies:
- debug
@ -24041,6 +24087,8 @@ snapshots:
queue-microtask@1.2.3: {}
queue-tick@1.0.1: {}
quoted-printable@1.0.1:
dependencies:
utf8: 2.1.2
@ -24765,7 +24813,7 @@ snapshots:
asn1.js: 5.4.1
asn1.js-rfc2560: 5.0.1(asn1.js@5.4.1)
asn1.js-rfc5280: 3.0.0
axios: 1.7.4(debug@4.3.6)
axios: 1.7.4
big-integer: 1.6.51
bignumber.js: 9.1.2
binascii: 0.0.2
@ -24976,6 +25024,14 @@ snapshots:
streamsearch@1.1.0: {}
streamx@2.20.1:
dependencies:
fast-fifo: 1.3.2
queue-tick: 1.0.1
text-decoder: 1.2.0
optionalDependencies:
bare-events: 2.4.2
strict-event-emitter-types@2.0.0: {}
strict-uri-encode@2.0.0: {}
@ -25215,6 +25271,12 @@ snapshots:
inherits: 2.0.4
readable-stream: 3.6.0
tar-stream@3.1.7:
dependencies:
b4a: 1.6.6
fast-fifo: 1.3.2
streamx: 2.20.1
tar@6.2.1:
dependencies:
chownr: 2.0.0
@ -25286,6 +25348,10 @@ snapshots:
glob: 10.4.5
minimatch: 9.0.5
text-decoder@1.2.0:
dependencies:
b4a: 1.6.6
text-hex@1.0.0: {}
text-table@0.2.0: {}