fix(core): Use owners file to export wf owners (#6547)

* remove owner from exported workflow

* use owners file to export wf owners

* update sharedworkflow owners

* fix logic

* further update logic

* add updatetAt to local changes

* additional filter for cred export

* optimize query

* remove transactions and optimize query

* reduce array size and add updated at to tags status
This commit is contained in:
Michael Auerswald 2023-06-28 11:06:40 +02:00 committed by GitHub
parent b19833d673
commit 4b755fb0b4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 235 additions and 153 deletions

View file

@ -5,6 +5,7 @@ export const SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER = 'workflows';
export const SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credential_stubs'; export const SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER = 'credential_stubs';
export const SOURCE_CONTROL_VARIABLES_EXPORT_FILE = 'variable_stubs.json'; export const SOURCE_CONTROL_VARIABLES_EXPORT_FILE = 'variable_stubs.json';
export const SOURCE_CONTROL_TAGS_EXPORT_FILE = 'tags.json'; export const SOURCE_CONTROL_TAGS_EXPORT_FILE = 'tags.json';
export const SOURCE_CONTROL_OWNERS_EXPORT_FILE = 'owners.json';
export const SOURCE_CONTROL_SSH_FOLDER = 'ssh'; export const SOURCE_CONTROL_SSH_FOLDER = 'ssh';
export const SOURCE_CONTROL_SSH_KEY_NAME = 'key'; export const SOURCE_CONTROL_SSH_KEY_NAME = 'key';
export const SOURCE_CONTROL_DEFAULT_BRANCH = 'main'; export const SOURCE_CONTROL_DEFAULT_BRANCH = 'main';

View file

@ -32,6 +32,8 @@ import type {
import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee'; import { SourceControlPreferencesService } from './sourceControlPreferences.service.ee';
import { writeFileSync } from 'fs'; import { writeFileSync } from 'fs';
import { SourceControlImportService } from './sourceControlImport.service.ee'; import { SourceControlImportService } from './sourceControlImport.service.ee';
import type { WorkflowEntity } from '../../databases/entities/WorkflowEntity';
import type { CredentialsEntity } from '../../databases/entities/CredentialsEntity';
@Service() @Service()
export class SourceControlService { export class SourceControlService {
private sshKeyName: string; private sshKeyName: string;
@ -252,6 +254,7 @@ export class SourceControlService {
...status.modified, ...status.modified,
]); ]);
} }
mergedFileNames.add(this.sourceControlExportService.getOwnersPath());
const deletedFiles = new Set<string>(status.deleted); const deletedFiles = new Set<string>(status.deleted);
deletedFiles.forEach((e) => mergedFileNames.delete(e)); deletedFiles.forEach((e) => mergedFileNames.delete(e));
await this.unstage(); await this.unstage();
@ -285,6 +288,20 @@ export class SourceControlService {
let conflict = false; let conflict = false;
let status: SourceControlledFileStatus = 'unknown'; let status: SourceControlledFileStatus = 'unknown';
let type: SourceControlledFileType = 'file'; let type: SourceControlledFileType = 'file';
let updatedAt = '';
const allWorkflows: Map<string, WorkflowEntity> = new Map();
(await Db.collections.Workflow.find({ select: ['id', 'name', 'updatedAt'] })).forEach(
(workflow) => {
allWorkflows.set(workflow.id, workflow);
},
);
const allCredentials: Map<string, CredentialsEntity> = new Map();
(await Db.collections.Credentials.find({ select: ['id', 'name', 'updatedAt'] })).forEach(
(credential) => {
allCredentials.set(credential.id, credential);
},
);
// initialize status from git status result // initialize status from git status result
if (statusResult.not_added.find((e) => e === fileName)) status = 'new'; if (statusResult.not_added.find((e) => e === fileName)) status = 'new';
@ -303,14 +320,14 @@ export class SourceControlService {
.replace(/[\/,\\]/, '') .replace(/[\/,\\]/, '')
.replace('.json', ''); .replace('.json', '');
if (location === 'remote') { if (location === 'remote') {
const existingWorkflow = await Db.collections.Workflow.find({ const existingWorkflow = allWorkflows.get(id);
where: { id }, if (existingWorkflow) {
}); name = existingWorkflow.name;
if (existingWorkflow?.length > 0) { updatedAt = existingWorkflow.updatedAt.toISOString();
name = existingWorkflow[0].name;
} }
} else { } else {
name = '(deleted)'; name = '(deleted)';
// todo: once we have audit log, this deletion date could be looked up
} }
} else { } else {
const workflow = await this.sourceControlExportService.getWorkflowFromFile(fileName); const workflow = await this.sourceControlExportService.getWorkflowFromFile(fileName);
@ -326,6 +343,11 @@ export class SourceControlService {
id = workflow.id; id = workflow.id;
name = workflow.name; name = workflow.name;
} }
const existingWorkflow = allWorkflows.get(id);
if (existingWorkflow) {
name = existingWorkflow.name;
updatedAt = existingWorkflow.updatedAt.toISOString();
}
} }
} }
if (fileName.startsWith(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER)) { if (fileName.startsWith(SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER)) {
@ -336,11 +358,10 @@ export class SourceControlService {
.replace(/[\/,\\]/, '') .replace(/[\/,\\]/, '')
.replace('.json', ''); .replace('.json', '');
if (location === 'remote') { if (location === 'remote') {
const existingCredential = await Db.collections.Credentials.find({ const existingCredential = allCredentials.get(id);
where: { id }, if (existingCredential) {
}); name = existingCredential.name;
if (existingCredential?.length > 0) { updatedAt = existingCredential.updatedAt.toISOString();
name = existingCredential[0].name;
} }
} else { } else {
name = '(deleted)'; name = '(deleted)';
@ -359,6 +380,11 @@ export class SourceControlService {
id = credential.id; id = credential.id;
name = credential.name; name = credential.name;
} }
const existingCredential = allCredentials.get(id);
if (existingCredential) {
name = existingCredential.name;
updatedAt = existingCredential.updatedAt.toISOString();
}
} }
} }
@ -369,9 +395,15 @@ export class SourceControlService {
} }
if (fileName.startsWith(SOURCE_CONTROL_TAGS_EXPORT_FILE)) { if (fileName.startsWith(SOURCE_CONTROL_TAGS_EXPORT_FILE)) {
const lastUpdatedTag = await Db.collections.Tag.find({
order: { updatedAt: 'DESC' },
take: 1,
select: ['updatedAt'],
});
id = 'tags'; id = 'tags';
name = 'tags'; name = 'tags';
type = 'tags'; type = 'tags';
updatedAt = lastUpdatedTag[0]?.updatedAt.toISOString();
} }
if (!id) return; if (!id) return;
@ -384,6 +416,7 @@ export class SourceControlService {
status, status,
location, location,
conflict, conflict,
updatedAt,
}; };
} }

View file

@ -3,6 +3,7 @@ import path from 'path';
import { import {
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
SOURCE_CONTROL_GIT_FOLDER, SOURCE_CONTROL_GIT_FOLDER,
SOURCE_CONTROL_OWNERS_EXPORT_FILE,
SOURCE_CONTROL_TAGS_EXPORT_FILE, SOURCE_CONTROL_TAGS_EXPORT_FILE,
SOURCE_CONTROL_VARIABLES_EXPORT_FILE, SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER,
@ -50,6 +51,10 @@ export class SourceControlExportService {
return path.join(this.gitFolder, SOURCE_CONTROL_TAGS_EXPORT_FILE); return path.join(this.gitFolder, SOURCE_CONTROL_TAGS_EXPORT_FILE);
} }
getOwnersPath(): string {
return path.join(this.gitFolder, SOURCE_CONTROL_OWNERS_EXPORT_FILE);
}
getVariablesPath(): string { getVariablesPath(): string {
return path.join(this.gitFolder, SOURCE_CONTROL_VARIABLES_EXPORT_FILE); return path.join(this.gitFolder, SOURCE_CONTROL_VARIABLES_EXPORT_FILE);
} }
@ -160,7 +165,6 @@ export class SourceControlExportService {
connections: e.workflow?.connections, connections: e.workflow?.connections,
settings: e.workflow?.settings, settings: e.workflow?.settings,
triggerCount: e.workflow?.triggerCount, triggerCount: e.workflow?.triggerCount,
owner: e.user.email,
versionId: e.workflow?.versionId, versionId: e.workflow?.versionId,
}; };
LoggerProxy.debug(`Writing workflow ${e.workflowId} to ${fileName}`); LoggerProxy.debug(`Writing workflow ${e.workflowId} to ${fileName}`);
@ -186,6 +190,11 @@ export class SourceControlExportService {
const removedFiles = await this.rmDeletedWorkflowsFromExportFolder(sharedWorkflows); const removedFiles = await this.rmDeletedWorkflowsFromExportFolder(sharedWorkflows);
// write the workflows to the export folder as json files // write the workflows to the export folder as json files
await this.writeExportableWorkflowsToExportFolder(sharedWorkflows); await this.writeExportableWorkflowsToExportFolder(sharedWorkflows);
// write list of owners to file
const ownersFileName = this.getOwnersPath();
const owners: Record<string, string> = {};
sharedWorkflows.forEach((e) => (owners[e.workflowId] = e.user.email));
await fsWriteFile(ownersFileName, JSON.stringify(owners, null, 2));
return { return {
count: sharedWorkflows.length, count: sharedWorkflows.length,
folder: this.workflowExportFolder, folder: this.workflowExportFolder,
@ -280,7 +289,10 @@ export class SourceControlExportService {
} else if (typeof data[key] === 'object') { } else if (typeof data[key] === 'object') {
data[key] = this.replaceCredentialData(data[key] as ICredentialDataDecryptedObject); data[key] = this.replaceCredentialData(data[key] as ICredentialDataDecryptedObject);
} else if (typeof data[key] === 'string') { } else if (typeof data[key] === 'string') {
data[key] = (data[key] as string)?.startsWith('={{') ? data[key] : ''; data[key] =
(data[key] as string)?.startsWith('={{') && (data[key] as string)?.includes('$secret')
? data[key]
: '';
} else if (typeof data[key] === 'number') { } else if (typeof data[key] === 'number') {
// TODO: leaving numbers in for now, but maybe we should remove them // TODO: leaving numbers in for now, but maybe we should remove them
continue; continue;

View file

@ -3,6 +3,7 @@ import path from 'path';
import { import {
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER, SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
SOURCE_CONTROL_GIT_FOLDER, SOURCE_CONTROL_GIT_FOLDER,
SOURCE_CONTROL_OWNERS_EXPORT_FILE,
SOURCE_CONTROL_TAGS_EXPORT_FILE, SOURCE_CONTROL_TAGS_EXPORT_FILE,
SOURCE_CONTROL_VARIABLES_EXPORT_FILE, SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER, SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER,
@ -14,15 +15,12 @@ import { readFile as fsReadFile } from 'fs/promises';
import { Credentials, UserSettings } from 'n8n-core'; import { Credentials, UserSettings } from 'n8n-core';
import type { IWorkflowToImport } from '@/Interfaces'; import type { IWorkflowToImport } from '@/Interfaces';
import type { ExportableCredential } from './types/exportableCredential'; import type { ExportableCredential } from './types/exportableCredential';
import { SharedWorkflow } from '@/databases/entities/SharedWorkflow';
import { CredentialsEntity } from '@/databases/entities/CredentialsEntity';
import { Variables } from '@/databases/entities/Variables'; import { Variables } from '@/databases/entities/Variables';
import type { ImportResult } from './types/importResult'; import type { ImportResult } from './types/importResult';
import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand'; import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand';
import { SharedCredentials } from '@/databases/entities/SharedCredentials'; import { SharedCredentials } from '@/databases/entities/SharedCredentials';
import { WorkflowEntity } from '@/databases/entities/WorkflowEntity'; import type { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping';
import { WorkflowTagMapping } from '@/databases/entities/WorkflowTagMapping'; import type { TagEntity } from '@/databases/entities/TagEntity';
import { TagEntity } from '@/databases/entities/TagEntity';
import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner'; import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner';
import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder'; import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder';
import { In } from 'typeorm'; import { In } from 'typeorm';
@ -94,7 +92,6 @@ export class SourceControlImportService {
const ownerGlobalRole = await this.getOwnerGlobalRole(); const ownerGlobalRole = await this.getOwnerGlobalRole();
const encryptionKey = await UserSettings.getEncryptionKey(); const encryptionKey = await UserSettings.getEncryptionKey();
let importCredentialsResult: Array<{ id: string; name: string; type: string }> = []; let importCredentialsResult: Array<{ id: string; name: string; type: string }> = [];
await Db.transaction(async (transactionManager) => {
importCredentialsResult = await Promise.all( importCredentialsResult = await Promise.all(
credentialFiles.map(async (file) => { credentialFiles.map(async (file) => {
LoggerProxy.debug(`Importing credentials file ${file}`); LoggerProxy.debug(`Importing credentials file ${file}`);
@ -122,7 +119,7 @@ export class SourceControlImportService {
newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || []; newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || [];
LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`); LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`);
await transactionManager.upsert(CredentialsEntity, newCredentialObject, ['id']); await Db.collections.Credentials.upsert(newCredentialObject, ['id']);
if (!sharedOwner) { if (!sharedOwner) {
const newSharedCredential = new SharedCredentials(); const newSharedCredential = new SharedCredentials();
@ -130,7 +127,7 @@ export class SourceControlImportService {
newSharedCredential.userId = userId; newSharedCredential.userId = userId;
newSharedCredential.roleId = ownerGlobalRole.id; newSharedCredential.roleId = ownerGlobalRole.id;
await transactionManager.upsert(SharedCredentials, { ...newSharedCredential }, [ await Db.collections.SharedCredentials.upsert({ ...newSharedCredential }, [
'credentialsId', 'credentialsId',
'userId', 'userId',
]); ]);
@ -143,7 +140,6 @@ export class SourceControlImportService {
}; };
}), }),
); );
});
return importCredentialsResult.filter((e) => e !== undefined); return importCredentialsResult.filter((e) => e !== undefined);
} }
@ -224,11 +220,9 @@ export class SourceControlImportService {
).map((e) => e.id), ).map((e) => e.id),
); );
await Db.transaction(async (transactionManager) => {
await Promise.all( await Promise.all(
mappedTags.tags.map(async (tag) => { mappedTags.tags.map(async (tag) => {
await transactionManager.upsert( await Db.collections.Tag.upsert(
TagEntity,
{ {
...tag, ...tag,
}, },
@ -242,8 +236,7 @@ export class SourceControlImportService {
await Promise.all( await Promise.all(
mappedTags.mappings.map(async (mapping) => { mappedTags.mappings.map(async (mapping) => {
if (!existingWorkflowIds.has(String(mapping.workflowId))) return; if (!existingWorkflowIds.has(String(mapping.workflowId))) return;
await transactionManager.upsert( await Db.collections.WorkflowTagMapping.upsert(
WorkflowTagMapping,
{ tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) }, { tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) },
{ {
skipUpdateIfNoValuesChanged: true, skipUpdateIfNoValuesChanged: true,
@ -252,7 +245,6 @@ export class SourceControlImportService {
); );
}), }),
); );
});
return mappedTags; return mappedTags;
} }
return { tags: [], mappings: [] }; return { tags: [], mappings: [] };
@ -273,20 +265,50 @@ export class SourceControlImportService {
const ownerWorkflowRole = await this.getOwnerWorkflowRole(); const ownerWorkflowRole = await this.getOwnerWorkflowRole();
const workflowRunner = Container.get(ActiveWorkflowRunner); const workflowRunner = Container.get(ActiveWorkflowRunner);
let importWorkflowsResult = new Array<{ id: string; name: string }>(); // read owner file if it exists and map workflow ids to owner emails
await Db.transaction(async (transactionManager) => { // then find existing users with those emails or fallback to passed in userId
const ownerRecords: Record<string, string> = {};
const ownersFile = await glob(SOURCE_CONTROL_OWNERS_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
if (ownersFile.length > 0) {
LoggerProxy.debug(`Reading workflow owners from file ${ownersFile[0]}`);
const ownerEmails = jsonParse<Record<string, string>>(
await fsReadFile(ownersFile[0], { encoding: 'utf8' }),
{ fallbackValue: {} },
);
if (ownerEmails) {
const uniqueOwnerEmails = new Set(Object.values(ownerEmails));
const existingUsers = await Db.collections.User.find({
where: { email: In([...uniqueOwnerEmails]) },
});
Object.keys(ownerEmails).forEach((workflowId) => {
ownerRecords[workflowId] =
existingUsers.find((e) => e.email === ownerEmails[workflowId])?.id ?? userId;
});
}
}
let importWorkflowsResult = new Array<{ id: string; name: string } | undefined>();
const allSharedWorkflows = await Db.collections.SharedWorkflow.find({
select: ['workflowId', 'roleId', 'userId'],
});
importWorkflowsResult = await Promise.all( importWorkflowsResult = await Promise.all(
workflowFiles.map(async (file) => { workflowFiles.map(async (file) => {
LoggerProxy.debug(`Parsing workflow file ${file}`); LoggerProxy.debug(`Parsing workflow file ${file}`);
const importedWorkflow = jsonParse<IWorkflowToImport>( const importedWorkflow = jsonParse<IWorkflowToImport>(
await fsReadFile(file, { encoding: 'utf8' }), await fsReadFile(file, { encoding: 'utf8' }),
); );
if (!importedWorkflow?.id) {
return;
}
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id); const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
if (existingWorkflow?.versionId === importedWorkflow.versionId) { if (existingWorkflow?.versionId === importedWorkflow.versionId) {
LoggerProxy.debug( LoggerProxy.debug(
`Skipping import of workflow ${ `Skipping import of workflow ${importedWorkflow.id ?? 'n/a'} - versionId is up to date`,
importedWorkflow.id ?? 'n/a'
} - versionId is up to date`,
); );
return { return {
id: importedWorkflow.id ?? 'n/a', id: importedWorkflow.id ?? 'n/a',
@ -296,27 +318,41 @@ export class SourceControlImportService {
LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`); LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`);
importedWorkflow.active = existingWorkflow?.active ?? false; importedWorkflow.active = existingWorkflow?.active ?? false;
LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`); LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
const upsertResult = await transactionManager.upsert( const upsertResult = await Db.collections.Workflow.upsert({ ...importedWorkflow }, ['id']);
WorkflowEntity,
{ ...importedWorkflow },
['id'],
);
if (upsertResult?.identifiers?.length !== 1) { if (upsertResult?.identifiers?.length !== 1) {
throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`); throw new Error(`Failed to upsert workflow ${importedWorkflow.id ?? 'new'}`);
} }
// due to sequential Ids, this may have changed during the insert // Update workflow owner to the user who exported the workflow, if that user exists
// TODO: once IDs are unique and we removed autoincrement, remove this // in the instance, and the workflow doesn't already have an owner
const upsertedWorkflowId = upsertResult.identifiers[0].id as string; const workflowOwnerId = ownerRecords[importedWorkflow.id] ?? userId;
await transactionManager.upsert( const existingSharedWorkflowOwnerByRoleId = allSharedWorkflows.find(
SharedWorkflow, (e) => e.workflowId === importedWorkflow.id && e.roleId === ownerWorkflowRole.id,
);
const existingSharedWorkflowOwnerByUserId = allSharedWorkflows.find(
(e) => e.workflowId === importedWorkflow.id && e.userId === workflowOwnerId,
);
if (!existingSharedWorkflowOwnerByUserId && !existingSharedWorkflowOwnerByRoleId) {
// no owner exists yet, so create one
await Db.collections.SharedWorkflow.insert({
workflowId: importedWorkflow.id,
userId: workflowOwnerId,
roleId: ownerWorkflowRole.id,
});
} else if (existingSharedWorkflowOwnerByRoleId) {
// skip, because the workflow already has a global owner
} else if (existingSharedWorkflowOwnerByUserId && !existingSharedWorkflowOwnerByRoleId) {
// if the worklflow has a non-global owner that is referenced by the owner file,
// and no existing global owner, update the owner to the user referenced in the owner file
await Db.collections.SharedWorkflow.update(
{
workflowId: importedWorkflow.id,
userId: workflowOwnerId,
},
{ {
workflowId: upsertedWorkflowId,
userId,
roleId: ownerWorkflowRole.id, roleId: ownerWorkflowRole.id,
}, },
['workflowId', 'userId'],
); );
}
if (existingWorkflow?.active) { if (existingWorkflow?.active) {
try { try {
// remove active pre-import workflow // remove active pre-import workflow
@ -326,10 +362,7 @@ export class SourceControlImportService {
LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`); LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.add(existingWorkflow.id, 'activate'); await workflowRunner.add(existingWorkflow.id, 'activate');
} catch (error) { } catch (error) {
LoggerProxy.error( LoggerProxy.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error);
`Failed to activate workflow ${existingWorkflow.id}`,
error as Error,
);
} }
} }
@ -339,8 +372,11 @@ export class SourceControlImportService {
}; };
}), }),
); );
});
return importWorkflowsResult; return importWorkflowsResult.filter((e) => e !== undefined) as Array<{
id: string;
name: string;
}>;
} }
async importFromWorkFolder(options: SourceControllPullOptions): Promise<ImportResult> { async importFromWorkFolder(options: SourceControllPullOptions): Promise<ImportResult> {

View file

@ -8,6 +8,5 @@ export interface ExportableWorkflow {
connections: IConnections; connections: IConnections;
settings?: IWorkflowSettings; settings?: IWorkflowSettings;
triggerCount: number; triggerCount: number;
owner: string;
versionId: string; versionId: string;
} }

View file

@ -16,4 +16,5 @@ export type SourceControlledFile = {
status: SourceControlledFileStatus; status: SourceControlledFileStatus;
location: SourceControlledFileLocation; location: SourceControlledFileLocation;
conflict: boolean; conflict: boolean;
updatedAt: string;
}; };