mirror of
https://github.com/n8n-io/n8n.git
synced 2024-12-23 11:44:06 -08:00
fix(core): Don't create multiple owners when importing credentials or workflows (#9112)
This commit is contained in:
parent
5433004d77
commit
3eb5be5f5a
|
@ -64,67 +64,25 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
}
|
||||
}
|
||||
|
||||
let totalImported = 0;
|
||||
|
||||
const cipher = Container.get(Cipher);
|
||||
const user = flags.userId ? await this.getAssignee(flags.userId) : await this.getOwner();
|
||||
|
||||
if (flags.separate) {
|
||||
let { input: inputPath } = flags;
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
inputPath = inputPath.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
const files = await glob('*.json', {
|
||||
cwd: inputPath,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
totalImported = files.length;
|
||||
|
||||
await Db.getConnection().transaction(async (transactionManager) => {
|
||||
this.transactionManager = transactionManager;
|
||||
for (const file of files) {
|
||||
const credential = jsonParse<ICredentialsEncrypted>(
|
||||
fs.readFileSync(file, { encoding: 'utf8' }),
|
||||
);
|
||||
if (typeof credential.data === 'object') {
|
||||
// plain data / decrypted input. Should be encrypted first.
|
||||
credential.data = cipher.encrypt(credential.data);
|
||||
}
|
||||
await this.storeCredential(credential, user);
|
||||
}
|
||||
});
|
||||
|
||||
this.reportSuccess(totalImported);
|
||||
return;
|
||||
}
|
||||
|
||||
const credentials = jsonParse<ICredentialsEncrypted[]>(
|
||||
fs.readFileSync(flags.input, { encoding: 'utf8' }),
|
||||
);
|
||||
|
||||
totalImported = credentials.length;
|
||||
|
||||
if (!Array.isArray(credentials)) {
|
||||
throw new ApplicationError(
|
||||
'File does not seem to contain credentials. Make sure the credentials are contained in an array.',
|
||||
);
|
||||
}
|
||||
const credentials = await this.readCredentials(flags.input, flags.separate);
|
||||
|
||||
await Db.getConnection().transaction(async (transactionManager) => {
|
||||
this.transactionManager = transactionManager;
|
||||
|
||||
const result = await this.checkRelations(credentials, flags.userId);
|
||||
|
||||
if (!result.success) {
|
||||
throw new ApplicationError(result.message);
|
||||
}
|
||||
|
||||
for (const credential of credentials) {
|
||||
if (typeof credential.data === 'object') {
|
||||
// plain data / decrypted input. Should be encrypted first.
|
||||
credential.data = cipher.encrypt(credential.data);
|
||||
}
|
||||
await this.storeCredential(credential, user);
|
||||
}
|
||||
});
|
||||
|
||||
this.reportSuccess(totalImported);
|
||||
this.reportSuccess(credentials.length);
|
||||
}
|
||||
|
||||
async catch(error: Error) {
|
||||
|
@ -142,15 +100,23 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
|
||||
private async storeCredential(credential: Partial<CredentialsEntity>, user: User) {
|
||||
const result = await this.transactionManager.upsert(CredentialsEntity, credential, ['id']);
|
||||
await this.transactionManager.upsert(
|
||||
SharedCredentials,
|
||||
{
|
||||
credentialsId: result.identifiers[0].id as string,
|
||||
userId: user.id,
|
||||
role: 'credential:owner',
|
||||
},
|
||||
['credentialsId', 'userId'],
|
||||
);
|
||||
|
||||
const sharingExists = await this.transactionManager.existsBy(SharedCredentials, {
|
||||
credentialsId: credential.id,
|
||||
role: 'credential:owner',
|
||||
});
|
||||
|
||||
if (!sharingExists) {
|
||||
await this.transactionManager.upsert(
|
||||
SharedCredentials,
|
||||
{
|
||||
credentialsId: result.identifiers[0].id as string,
|
||||
userId: user.id,
|
||||
role: 'credential:owner',
|
||||
},
|
||||
['credentialsId', 'userId'],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async getOwner() {
|
||||
|
@ -162,6 +128,84 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
return owner;
|
||||
}
|
||||
|
||||
private async checkRelations(credentials: ICredentialsEncrypted[], userId?: string) {
|
||||
if (!userId) {
|
||||
return {
|
||||
success: true as const,
|
||||
message: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
for (const credential of credentials) {
|
||||
if (credential.id === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!(await this.credentialExists(credential.id))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const ownerId = await this.getCredentialOwner(credential.id);
|
||||
if (!ownerId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ownerId !== userId) {
|
||||
return {
|
||||
success: false as const,
|
||||
message: `The credential with id "${credential.id}" is already owned by the user with the id "${ownerId}". It can't be re-owned by the user with the id "${userId}"`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true as const,
|
||||
message: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
private async readCredentials(path: string, separate: boolean): Promise<ICredentialsEncrypted[]> {
|
||||
const cipher = Container.get(Cipher);
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
path = path.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
let credentials: ICredentialsEncrypted[];
|
||||
|
||||
if (separate) {
|
||||
const files = await glob('*.json', {
|
||||
cwd: path,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
credentials = files.map((file) =>
|
||||
jsonParse<ICredentialsEncrypted>(fs.readFileSync(file, { encoding: 'utf8' })),
|
||||
);
|
||||
} else {
|
||||
const credentialsUnchecked = jsonParse<ICredentialsEncrypted[]>(
|
||||
fs.readFileSync(path, { encoding: 'utf8' }),
|
||||
);
|
||||
|
||||
if (!Array.isArray(credentialsUnchecked)) {
|
||||
throw new ApplicationError(
|
||||
'File does not seem to contain credentials. Make sure the credentials are contained in an array.',
|
||||
);
|
||||
}
|
||||
|
||||
credentials = credentialsUnchecked;
|
||||
}
|
||||
|
||||
return credentials.map((credential) => {
|
||||
if (typeof credential.data === 'object') {
|
||||
// plain data / decrypted input. Should be encrypted first.
|
||||
credential.data = cipher.encrypt(credential.data);
|
||||
}
|
||||
|
||||
return credential;
|
||||
});
|
||||
}
|
||||
|
||||
private async getAssignee(userId: string) {
|
||||
const user = await Container.get(UserRepository).findOneBy({ id: userId });
|
||||
|
||||
|
@ -171,4 +215,17 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
|
||||
return user;
|
||||
}
|
||||
|
||||
private async getCredentialOwner(credentialsId: string) {
|
||||
const sharedCredential = await this.transactionManager.findOneBy(SharedCredentials, {
|
||||
credentialsId,
|
||||
role: 'credential:owner',
|
||||
});
|
||||
|
||||
return sharedCredential?.userId;
|
||||
}
|
||||
|
||||
private async credentialExists(credentialId: string) {
|
||||
return await this.transactionManager.existsBy(CredentialsEntity, { id: credentialId });
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import { WorkflowRepository } from '@db/repositories/workflow.repository';
|
|||
import type { IWorkflowToImport } from '@/Interfaces';
|
||||
import { ImportService } from '@/services/import.service';
|
||||
import { BaseCommand } from '../BaseCommand';
|
||||
import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository';
|
||||
|
||||
function assertHasWorkflowsToImport(workflows: unknown): asserts workflows is IWorkflowToImport[] {
|
||||
if (!Array.isArray(workflows)) {
|
||||
|
@ -78,53 +79,52 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
}
|
||||
}
|
||||
|
||||
const user = flags.userId ? await this.getAssignee(flags.userId) : await this.getOwner();
|
||||
const owner = await this.getOwner();
|
||||
|
||||
let totalImported = 0;
|
||||
const workflows = await this.readWorkflows(flags.input, flags.separate);
|
||||
|
||||
if (flags.separate) {
|
||||
let { input: inputPath } = flags;
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
inputPath = inputPath.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
const files = await glob('*.json', {
|
||||
cwd: inputPath,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
totalImported = files.length;
|
||||
this.logger.info(`Importing ${totalImported} workflows...`);
|
||||
|
||||
for (const file of files) {
|
||||
const workflow = jsonParse<IWorkflowToImport>(fs.readFileSync(file, { encoding: 'utf8' }));
|
||||
if (!workflow.id) {
|
||||
workflow.id = generateNanoId();
|
||||
}
|
||||
|
||||
const _workflow = Container.get(WorkflowRepository).create(workflow);
|
||||
|
||||
await Container.get(ImportService).importWorkflows([_workflow], user.id);
|
||||
}
|
||||
|
||||
this.reportSuccess(totalImported);
|
||||
process.exit();
|
||||
const result = await this.checkRelations(workflows, flags.userId);
|
||||
if (!result.success) {
|
||||
throw new ApplicationError(result.message);
|
||||
}
|
||||
|
||||
const workflows = jsonParse<IWorkflowToImport[]>(
|
||||
fs.readFileSync(flags.input, { encoding: 'utf8' }),
|
||||
);
|
||||
this.logger.info(`Importing ${workflows.length} workflows...`);
|
||||
|
||||
const _workflows = workflows.map((w) => Container.get(WorkflowRepository).create(w));
|
||||
await Container.get(ImportService).importWorkflows(workflows, flags.userId ?? owner.id);
|
||||
|
||||
assertHasWorkflowsToImport(workflows);
|
||||
this.reportSuccess(workflows.length);
|
||||
}
|
||||
|
||||
totalImported = workflows.length;
|
||||
private async checkRelations(workflows: WorkflowEntity[], userId: string | undefined) {
|
||||
if (!userId) {
|
||||
return {
|
||||
success: true as const,
|
||||
message: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
await Container.get(ImportService).importWorkflows(_workflows, user.id);
|
||||
for (const workflow of workflows) {
|
||||
if (!(await this.workflowExists(workflow))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.reportSuccess(totalImported);
|
||||
const ownerId = await this.getWorkflowOwner(workflow);
|
||||
if (!ownerId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ownerId !== userId) {
|
||||
return {
|
||||
success: false as const,
|
||||
message: `The credential with id "${workflow.id}" is already owned by the user with the id "${ownerId}". It can't be re-owned by the user with the id "${userId}"`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true as const,
|
||||
message: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
async catch(error: Error) {
|
||||
|
@ -145,13 +145,48 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
return owner;
|
||||
}
|
||||
|
||||
private async getAssignee(userId: string) {
|
||||
const user = await Container.get(UserRepository).findOneBy({ id: userId });
|
||||
private async getWorkflowOwner(workflow: WorkflowEntity) {
|
||||
const sharing = await Container.get(SharedWorkflowRepository).findOneBy({
|
||||
workflowId: workflow.id,
|
||||
role: 'workflow:owner',
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
throw new ApplicationError('Failed to find user', { extra: { userId } });
|
||||
return sharing?.userId;
|
||||
}
|
||||
|
||||
private async workflowExists(workflow: WorkflowEntity) {
|
||||
return await Container.get(WorkflowRepository).existsBy({ id: workflow.id });
|
||||
}
|
||||
|
||||
private async readWorkflows(path: string, separate: boolean): Promise<WorkflowEntity[]> {
|
||||
if (process.platform === 'win32') {
|
||||
path = path.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
return user;
|
||||
if (separate) {
|
||||
const files = await glob('*.json', {
|
||||
cwd: path,
|
||||
absolute: true,
|
||||
});
|
||||
const workflowInstances = files.map((file) => {
|
||||
const workflow = jsonParse<IWorkflowToImport>(fs.readFileSync(file, { encoding: 'utf8' }));
|
||||
if (!workflow.id) {
|
||||
workflow.id = generateNanoId();
|
||||
}
|
||||
|
||||
const workflowInstance = Container.get(WorkflowRepository).create(workflow);
|
||||
|
||||
return workflowInstance;
|
||||
});
|
||||
|
||||
return workflowInstances;
|
||||
} else {
|
||||
const workflows = jsonParse<IWorkflowToImport[]>(fs.readFileSync(path, { encoding: 'utf8' }));
|
||||
|
||||
const workflowInstances = workflows.map((w) => Container.get(WorkflowRepository).create(w));
|
||||
assertHasWorkflowsToImport(workflows);
|
||||
|
||||
return workflowInstances;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,14 +53,18 @@ export class ImportService {
|
|||
this.logger.info(`Deactivating workflow "${workflow.name}". Remember to activate later.`);
|
||||
}
|
||||
|
||||
const upsertResult = await tx.upsert(WorkflowEntity, workflow, ['id']);
|
||||
const exists = workflow.id ? await tx.existsBy(WorkflowEntity, { id: workflow.id }) : false;
|
||||
|
||||
const upsertResult = await tx.upsert(WorkflowEntity, workflow, ['id']);
|
||||
const workflowId = upsertResult.identifiers.at(0)?.id as string;
|
||||
|
||||
await tx.upsert(SharedWorkflow, { workflowId, userId, role: 'workflow:owner' }, [
|
||||
'workflowId',
|
||||
'userId',
|
||||
]);
|
||||
// Create relationship if the workflow was inserted instead of updated.
|
||||
if (!exists) {
|
||||
await tx.upsert(SharedWorkflow, { workflowId, userId, role: 'workflow:owner' }, [
|
||||
'workflowId',
|
||||
'userId',
|
||||
]);
|
||||
}
|
||||
|
||||
if (!workflow.tags?.length) continue;
|
||||
|
||||
|
|
|
@ -6,10 +6,17 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
|||
|
||||
import { mockInstance } from '../../shared/mocking';
|
||||
import * as testDb from '../shared/testDb';
|
||||
import { getAllCredentials } from '../shared/db/credentials';
|
||||
import { getAllCredentials, getAllSharedCredentials } from '../shared/db/credentials';
|
||||
import { createMember, createOwner } from '../shared/db/users';
|
||||
|
||||
const oclifConfig = new Config({ root: __dirname });
|
||||
|
||||
async function importCredential(argv: string[]) {
|
||||
const importer = new ImportCredentialsCommand(argv, oclifConfig);
|
||||
await importer.init();
|
||||
await importer.run();
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
mockInstance(InternalHooks);
|
||||
mockInstance(LoadNodesAndCredentials);
|
||||
|
@ -17,7 +24,7 @@ beforeAll(async () => {
|
|||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await testDb.truncate(['Credentials']);
|
||||
await testDb.truncate(['Credentials', 'SharedCredentials', 'User']);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -25,25 +32,202 @@ afterAll(async () => {
|
|||
});
|
||||
|
||||
test('import:credentials should import a credential', async () => {
|
||||
const before = await getAllCredentials();
|
||||
expect(before.length).toBe(0);
|
||||
const importer = new ImportCredentialsCommand(
|
||||
['--input=./test/integration/commands/importCredentials/credentials.json'],
|
||||
oclifConfig,
|
||||
);
|
||||
const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
const owner = await createOwner();
|
||||
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
await importCredential([
|
||||
'--input=./test/integration/commands/importCredentials/credentials.json',
|
||||
]);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
credentials: await getAllCredentials(),
|
||||
sharings: await getAllSharedCredentials(),
|
||||
};
|
||||
expect(after).toMatchObject({
|
||||
credentials: [expect.objectContaining({ id: '123', name: 'cred-aws-test' })],
|
||||
sharings: [
|
||||
expect.objectContaining({ credentialsId: '123', userId: owner.id, role: 'credential:owner' }),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('import:credentials should import a credential from separated files', async () => {
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
const owner = await createOwner();
|
||||
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
// import credential the first time, assigning it to the owner
|
||||
await importCredential([
|
||||
'--separate',
|
||||
'--input=./test/integration/commands/importCredentials/separate',
|
||||
]);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
credentials: await getAllCredentials(),
|
||||
sharings: await getAllSharedCredentials(),
|
||||
};
|
||||
|
||||
expect(after).toMatchObject({
|
||||
credentials: [
|
||||
expect.objectContaining({
|
||||
id: '123',
|
||||
name: 'cred-aws-test',
|
||||
}),
|
||||
],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
credentialsId: '123',
|
||||
userId: owner.id,
|
||||
role: 'credential:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('`import:credentials --userId ...` should fail if the credential exists already and is owned by somebody else', async () => {
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
const owner = await createOwner();
|
||||
const member = await createMember();
|
||||
|
||||
// import credential the first time, assigning it to the owner
|
||||
await importCredential([
|
||||
'--input=./test/integration/commands/importCredentials/credentials.json',
|
||||
`--userId=${owner.id}`,
|
||||
]);
|
||||
|
||||
// making sure the import worked
|
||||
const before = {
|
||||
credentials: await getAllCredentials(),
|
||||
sharings: await getAllSharedCredentials(),
|
||||
};
|
||||
expect(before).toMatchObject({
|
||||
credentials: [expect.objectContaining({ id: '123', name: 'cred-aws-test' })],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
credentialsId: '123',
|
||||
userId: owner.id,
|
||||
role: 'credential:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
await importer.init();
|
||||
try {
|
||||
await importer.run();
|
||||
} catch (error) {
|
||||
expect(error.message).toBe('process.exit');
|
||||
}
|
||||
const after = await getAllCredentials();
|
||||
expect(after.length).toBe(1);
|
||||
expect(after[0].name).toBe('cred-aws-test');
|
||||
expect(after[0].id).toBe('123');
|
||||
mockExit.mockRestore();
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
|
||||
// Import again while updating the name we try to assign the
|
||||
// credential to another user.
|
||||
await expect(
|
||||
importCredential([
|
||||
'--input=./test/integration/commands/importCredentials/credentials-updated.json',
|
||||
`--userId=${member.id}`,
|
||||
]),
|
||||
).rejects.toThrowError(
|
||||
`The credential with id "123" is already owned by the user with the id "${owner.id}". It can't be re-owned by the user with the id "${member.id}"`,
|
||||
);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
credentials: await getAllCredentials(),
|
||||
sharings: await getAllSharedCredentials(),
|
||||
};
|
||||
|
||||
expect(after).toMatchObject({
|
||||
credentials: [
|
||||
expect.objectContaining({
|
||||
id: '123',
|
||||
// only the name was updated
|
||||
name: 'cred-aws-test',
|
||||
}),
|
||||
],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
credentialsId: '123',
|
||||
userId: owner.id,
|
||||
role: 'credential:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test("only update credential, don't create or update owner if `--userId` is not passed", async () => {
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
await createOwner();
|
||||
const member = await createMember();
|
||||
|
||||
// import credential the first time, assigning it to a member
|
||||
await importCredential([
|
||||
'--input=./test/integration/commands/importCredentials/credentials.json',
|
||||
`--userId=${member.id}`,
|
||||
]);
|
||||
|
||||
// making sure the import worked
|
||||
const before = {
|
||||
credentials: await getAllCredentials(),
|
||||
sharings: await getAllSharedCredentials(),
|
||||
};
|
||||
expect(before).toMatchObject({
|
||||
credentials: [expect.objectContaining({ id: '123', name: 'cred-aws-test' })],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
credentialsId: '123',
|
||||
userId: member.id,
|
||||
role: 'credential:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
// Import again only updating the name and omitting `--userId`
|
||||
await importCredential([
|
||||
'--input=./test/integration/commands/importCredentials/credentials-updated.json',
|
||||
]);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
credentials: await getAllCredentials(),
|
||||
sharings: await getAllSharedCredentials(),
|
||||
};
|
||||
|
||||
expect(after).toMatchObject({
|
||||
credentials: [
|
||||
expect.objectContaining({
|
||||
id: '123',
|
||||
// only the name was updated
|
||||
name: 'cred-aws-prod',
|
||||
}),
|
||||
],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
credentialsId: '123',
|
||||
userId: member.id,
|
||||
role: 'credential:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,10 +6,17 @@ import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
|||
|
||||
import { mockInstance } from '../../shared/mocking';
|
||||
import * as testDb from '../shared/testDb';
|
||||
import { getAllWorkflows } from '../shared/db/workflows';
|
||||
import { getAllSharedWorkflows, getAllWorkflows } from '../shared/db/workflows';
|
||||
import { createMember, createOwner } from '../shared/db/users';
|
||||
|
||||
const oclifConfig = new Config({ root: __dirname });
|
||||
|
||||
async function importWorkflow(argv: string[]) {
|
||||
const importer = new ImportWorkflowsCommand(argv, oclifConfig);
|
||||
await importer.init();
|
||||
await importer.run();
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
mockInstance(InternalHooks);
|
||||
mockInstance(LoadNodesAndCredentials);
|
||||
|
@ -17,7 +24,7 @@ beforeAll(async () => {
|
|||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await testDb.truncate(['Workflow']);
|
||||
await testDb.truncate(['Workflow', 'SharedWorkflow', 'User']);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -25,53 +32,186 @@ afterAll(async () => {
|
|||
});
|
||||
|
||||
test('import:workflow should import active workflow and deactivate it', async () => {
|
||||
const before = await getAllWorkflows();
|
||||
expect(before.length).toBe(0);
|
||||
const importer = new ImportWorkflowsCommand(
|
||||
['--separate', '--input=./test/integration/commands/importWorkflows/separate'],
|
||||
oclifConfig,
|
||||
);
|
||||
const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
const owner = await createOwner();
|
||||
|
||||
await importer.init();
|
||||
try {
|
||||
await importer.run();
|
||||
} catch (error) {
|
||||
expect(error.message).toBe('process.exit');
|
||||
}
|
||||
const after = await getAllWorkflows();
|
||||
expect(after.length).toBe(2);
|
||||
expect(after[0].name).toBe('active-workflow');
|
||||
expect(after[0].active).toBe(false);
|
||||
expect(after[1].name).toBe('inactive-workflow');
|
||||
expect(after[1].active).toBe(false);
|
||||
mockExit.mockRestore();
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
await importWorkflow([
|
||||
'--separate',
|
||||
'--input=./test/integration/commands/importWorkflows/separate',
|
||||
]);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
workflows: await getAllWorkflows(),
|
||||
sharings: await getAllSharedWorkflows(),
|
||||
};
|
||||
expect(after).toMatchObject({
|
||||
workflows: [
|
||||
expect.objectContaining({ name: 'active-workflow', active: false }),
|
||||
expect.objectContaining({ name: 'inactive-workflow', active: false }),
|
||||
],
|
||||
sharings: [
|
||||
expect.objectContaining({ workflowId: '998', userId: owner.id, role: 'workflow:owner' }),
|
||||
expect.objectContaining({ workflowId: '999', userId: owner.id, role: 'workflow:owner' }),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('import:workflow should import active workflow from combined file and deactivate it', async () => {
|
||||
const before = await getAllWorkflows();
|
||||
expect(before.length).toBe(0);
|
||||
const importer = new ImportWorkflowsCommand(
|
||||
['--input=./test/integration/commands/importWorkflows/combined/combined.json'],
|
||||
oclifConfig,
|
||||
);
|
||||
const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
const owner = await createOwner();
|
||||
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
await importWorkflow([
|
||||
'--input=./test/integration/commands/importWorkflows/combined/combined.json',
|
||||
]);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
workflows: await getAllWorkflows(),
|
||||
sharings: await getAllSharedWorkflows(),
|
||||
};
|
||||
expect(after).toMatchObject({
|
||||
workflows: [
|
||||
expect.objectContaining({ name: 'active-workflow', active: false }),
|
||||
expect.objectContaining({ name: 'inactive-workflow', active: false }),
|
||||
],
|
||||
sharings: [
|
||||
expect.objectContaining({ workflowId: '998', userId: owner.id, role: 'workflow:owner' }),
|
||||
expect.objectContaining({ workflowId: '999', userId: owner.id, role: 'workflow:owner' }),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('`import:workflow --userId ...` should fail if the workflow exists already and is owned by somebody else', async () => {
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
const owner = await createOwner();
|
||||
const member = await createMember();
|
||||
|
||||
// Import workflow the first time, assigning it to a member.
|
||||
await importWorkflow([
|
||||
'--input=./test/integration/commands/importWorkflows/combined-with-update/original.json',
|
||||
`--userId=${owner.id}`,
|
||||
]);
|
||||
|
||||
const before = {
|
||||
workflows: await getAllWorkflows(),
|
||||
sharings: await getAllSharedWorkflows(),
|
||||
};
|
||||
// Make sure the workflow and sharing have been created.
|
||||
expect(before).toMatchObject({
|
||||
workflows: [expect.objectContaining({ id: '998', name: 'active-workflow' })],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
workflowId: '998',
|
||||
userId: owner.id,
|
||||
role: 'workflow:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
await importer.init();
|
||||
try {
|
||||
await importer.run();
|
||||
} catch (error) {
|
||||
expect(error.message).toBe('process.exit');
|
||||
}
|
||||
const after = await getAllWorkflows();
|
||||
expect(after.length).toBe(2);
|
||||
expect(after[0].name).toBe('active-workflow');
|
||||
expect(after[0].active).toBe(false);
|
||||
expect(after[1].name).toBe('inactive-workflow');
|
||||
expect(after[1].active).toBe(false);
|
||||
mockExit.mockRestore();
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
// Import the same workflow again, with another name but the same ID, and try
|
||||
// to assign it to the member.
|
||||
await expect(
|
||||
importWorkflow([
|
||||
'--input=./test/integration/commands/importWorkflows/combined-with-update/updated.json',
|
||||
`--userId=${member.id}`,
|
||||
]),
|
||||
).rejects.toThrowError(
|
||||
`The credential with id "998" is already owned by the user with the id "${owner.id}". It can't be re-owned by the user with the id "${member.id}"`,
|
||||
);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
workflows: await getAllWorkflows(),
|
||||
sharings: await getAllSharedWorkflows(),
|
||||
};
|
||||
// Make sure there is no new sharing and that the name DID NOT change.
|
||||
expect(after).toMatchObject({
|
||||
workflows: [expect.objectContaining({ id: '998', name: 'active-workflow' })],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
workflowId: '998',
|
||||
userId: owner.id,
|
||||
role: 'workflow:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test("only update the workflow, don't create or update the owner if `--userId` is not passed", async () => {
|
||||
//
|
||||
// ARRANGE
|
||||
//
|
||||
await createOwner();
|
||||
const member = await createMember();
|
||||
|
||||
// Import workflow the first time, assigning it to a member.
|
||||
await importWorkflow([
|
||||
'--input=./test/integration/commands/importWorkflows/combined-with-update/original.json',
|
||||
`--userId=${member.id}`,
|
||||
]);
|
||||
|
||||
const before = {
|
||||
workflows: await getAllWorkflows(),
|
||||
sharings: await getAllSharedWorkflows(),
|
||||
};
|
||||
// Make sure the workflow and sharing have been created.
|
||||
expect(before).toMatchObject({
|
||||
workflows: [expect.objectContaining({ id: '998', name: 'active-workflow' })],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
workflowId: '998',
|
||||
userId: member.id,
|
||||
role: 'workflow:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
//
|
||||
// ACT
|
||||
//
|
||||
// Import the same workflow again, with another name but the same ID.
|
||||
await importWorkflow([
|
||||
'--input=./test/integration/commands/importWorkflows/combined-with-update/updated.json',
|
||||
]);
|
||||
|
||||
//
|
||||
// ASSERT
|
||||
//
|
||||
const after = {
|
||||
workflows: await getAllWorkflows(),
|
||||
sharings: await getAllSharedWorkflows(),
|
||||
};
|
||||
// Make sure there is no new sharing and that the name changed.
|
||||
expect(after).toMatchObject({
|
||||
workflows: [expect.objectContaining({ id: '998', name: 'active-workflow updated' })],
|
||||
sharings: [
|
||||
expect.objectContaining({
|
||||
workflowId: '998',
|
||||
userId: member.id,
|
||||
role: 'workflow:owner',
|
||||
}),
|
||||
],
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
[
|
||||
{
|
||||
"createdAt": "2023-07-10T14:50:49.193Z",
|
||||
"updatedAt": "2023-10-27T13:34:42.917Z",
|
||||
"id": "123",
|
||||
"name": "cred-aws-prod",
|
||||
"data": {
|
||||
"region": "eu-west-1",
|
||||
"accessKeyId": "999999999999",
|
||||
"secretAccessKey": "aaaaaaaaaaaaa"
|
||||
},
|
||||
"type": "aws"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"createdAt": "2023-07-10T14:50:49.193Z",
|
||||
"updatedAt": "2023-10-27T13:34:42.917Z",
|
||||
"id": "123",
|
||||
"name": "cred-aws-test",
|
||||
"data": {
|
||||
"region": "eu-west-1",
|
||||
"accessKeyId": "999999999999",
|
||||
"secretAccessKey": "aaaaaaaaaaaaa"
|
||||
},
|
||||
"type": "aws"
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
[
|
||||
{
|
||||
"name": "active-workflow",
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {
|
||||
"path": "e20b4873-fcf7-4bce-88fc-a1a56d66b138",
|
||||
"responseMode": "responseNode",
|
||||
"options": {}
|
||||
},
|
||||
"id": "c26d8782-bd57-43d0-86dc-0c618a7e4024",
|
||||
"name": "Webhook",
|
||||
"type": "n8n-nodes-base.webhook",
|
||||
"typeVersion": 1,
|
||||
"position": [800, 580],
|
||||
"webhookId": "e20b4873-fcf7-4bce-88fc-a1a56d66b138"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"values": {
|
||||
"boolean": [
|
||||
{
|
||||
"name": "hooked",
|
||||
"value": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "9701b1ef-9ab0-432a-b086-cf76981b097d",
|
||||
"name": "Set",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 1,
|
||||
"position": [1020, 580]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "d0f086b8-c2b2-4404-b347-95d3f91e555a",
|
||||
"name": "Respond to Webhook",
|
||||
"type": "n8n-nodes-base.respondToWebhook",
|
||||
"typeVersion": 1,
|
||||
"position": [1240, 580]
|
||||
}
|
||||
],
|
||||
"pinData": {},
|
||||
"connections": {
|
||||
"Webhook": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Set",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Set": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Respond to Webhook",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"active": true,
|
||||
"settings": {},
|
||||
"versionId": "40a70df1-740f-47e7-8e16-50a0bcd5b70f",
|
||||
"id": "998",
|
||||
"meta": {
|
||||
"instanceId": "95977dc4769098fc608439605527ee75d23f10d551aed6b87a3eea1a252c0ba9"
|
||||
},
|
||||
"tags": []
|
||||
}
|
||||
]
|
|
@ -0,0 +1,81 @@
|
|||
[
|
||||
{
|
||||
"name": "active-workflow updated",
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {
|
||||
"path": "e20b4873-fcf7-4bce-88fc-a1a56d66b138",
|
||||
"responseMode": "responseNode",
|
||||
"options": {}
|
||||
},
|
||||
"id": "c26d8782-bd57-43d0-86dc-0c618a7e4024",
|
||||
"name": "Webhook",
|
||||
"type": "n8n-nodes-base.webhook",
|
||||
"typeVersion": 1,
|
||||
"position": [800, 580],
|
||||
"webhookId": "e20b4873-fcf7-4bce-88fc-a1a56d66b138"
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"values": {
|
||||
"boolean": [
|
||||
{
|
||||
"name": "hooked",
|
||||
"value": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"id": "9701b1ef-9ab0-432a-b086-cf76981b097d",
|
||||
"name": "Set",
|
||||
"type": "n8n-nodes-base.set",
|
||||
"typeVersion": 1,
|
||||
"position": [1020, 580]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"options": {}
|
||||
},
|
||||
"id": "d0f086b8-c2b2-4404-b347-95d3f91e555a",
|
||||
"name": "Respond to Webhook",
|
||||
"type": "n8n-nodes-base.respondToWebhook",
|
||||
"typeVersion": 1,
|
||||
"position": [1240, 580]
|
||||
}
|
||||
],
|
||||
"pinData": {},
|
||||
"connections": {
|
||||
"Webhook": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Set",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"Set": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Respond to Webhook",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"active": true,
|
||||
"settings": {},
|
||||
"versionId": "40a70df1-740f-47e7-8e16-50a0bcd5b70f",
|
||||
"id": "998",
|
||||
"meta": {
|
||||
"instanceId": "95977dc4769098fc608439605527ee75d23f10d551aed6b87a3eea1a252c0ba9"
|
||||
},
|
||||
"tags": []
|
||||
}
|
||||
]
|
|
@ -12,8 +12,13 @@ import { SharedWorkflowRepository } from '@/databases/repositories/sharedWorkflo
|
|||
|
||||
import * as testDb from './shared/testDb';
|
||||
import { mockInstance } from '../shared/mocking';
|
||||
import { createOwner } from './shared/db/users';
|
||||
import { createWorkflow, getWorkflowById } from './shared/db/workflows';
|
||||
import { createMember, createOwner } from './shared/db/users';
|
||||
import {
|
||||
createWorkflow,
|
||||
getAllSharedWorkflows,
|
||||
getWorkflowById,
|
||||
newWorkflow,
|
||||
} from './shared/db/workflows';
|
||||
|
||||
import type { User } from '@db/entities/User';
|
||||
|
||||
|
@ -57,7 +62,7 @@ describe('ImportService', () => {
|
|||
});
|
||||
|
||||
test('should make user owner of imported workflow', async () => {
|
||||
const workflowToImport = await createWorkflow();
|
||||
const workflowToImport = newWorkflow();
|
||||
|
||||
await importService.importWorkflows([workflowToImport], owner.id);
|
||||
|
||||
|
@ -68,6 +73,23 @@ describe('ImportService', () => {
|
|||
expect(dbSharing.userId).toBe(owner.id);
|
||||
});
|
||||
|
||||
test('should not change the owner if it already exists', async () => {
|
||||
const member = await createMember();
|
||||
const workflowToImport = await createWorkflow(undefined, owner);
|
||||
|
||||
await importService.importWorkflows([workflowToImport], member.id);
|
||||
|
||||
const sharings = await getAllSharedWorkflows();
|
||||
|
||||
expect(sharings).toMatchObject([
|
||||
expect.objectContaining({
|
||||
workflowId: workflowToImport.id,
|
||||
userId: owner.id,
|
||||
role: 'workflow:owner',
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
test('should deactivate imported workflow if active', async () => {
|
||||
const workflowToImport = await createWorkflow({ active: true });
|
||||
|
||||
|
|
|
@ -91,3 +91,7 @@ export async function getAllCredentials() {
|
|||
|
||||
export const getCredentialById = async (id: string) =>
|
||||
await Container.get(CredentialsRepository).findOneBy({ id });
|
||||
|
||||
export async function getAllSharedCredentials() {
|
||||
return await Container.get(SharedCredentialsRepository).find();
|
||||
}
|
||||
|
|
|
@ -19,12 +19,7 @@ export async function createManyWorkflows(
|
|||
return await Promise.all(workflowRequests);
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a workflow in the DB (without a trigger) and optionally assign it to a user.
|
||||
* @param attributes workflow attributes
|
||||
* @param user user to assign the workflow to
|
||||
*/
|
||||
export async function createWorkflow(attributes: Partial<WorkflowEntity> = {}, user?: User) {
|
||||
export function newWorkflow(attributes: Partial<WorkflowEntity> = {}): WorkflowEntity {
|
||||
const { active, name, nodes, connections, versionId } = attributes;
|
||||
|
||||
const workflowEntity = Container.get(WorkflowRepository).create({
|
||||
|
@ -45,7 +40,16 @@ export async function createWorkflow(attributes: Partial<WorkflowEntity> = {}, u
|
|||
...attributes,
|
||||
});
|
||||
|
||||
const workflow = await Container.get(WorkflowRepository).save(workflowEntity);
|
||||
return workflowEntity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Store a workflow in the DB (without a trigger) and optionally assign it to a user.
|
||||
* @param attributes workflow attributes
|
||||
* @param user user to assign the workflow to
|
||||
*/
|
||||
export async function createWorkflow(attributes: Partial<WorkflowEntity> = {}, user?: User) {
|
||||
const workflow = await Container.get(WorkflowRepository).save(newWorkflow(attributes));
|
||||
|
||||
if (user) {
|
||||
await Container.get(SharedWorkflowRepository).save({
|
||||
|
@ -121,5 +125,9 @@ export async function getAllWorkflows() {
|
|||
return await Container.get(WorkflowRepository).find();
|
||||
}
|
||||
|
||||
export async function getAllSharedWorkflows() {
|
||||
return await Container.get(SharedWorkflowRepository).find();
|
||||
}
|
||||
|
||||
export const getWorkflowById = async (id: string) =>
|
||||
await Container.get(WorkflowRepository).findOneBy({ id });
|
||||
|
|
Loading…
Reference in a new issue