mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-14 08:34:07 -08:00
cd08c8e4c6
Based on #7065 | Story: https://linear.app/n8n/issue/PAY-771 n8n on filesystem mode marks binary data to delete on manual execution deletion, on unsaved execution completion, and on every execution pruning cycle. We later prune binary data in a separate cycle via these marker files, based on the configured TTL. In the context of introducing an S3 client to manage binary data, the filesystem mode's mark-and-prune setup is too tightly coupled to the general binary data management client interface. This PR... - Ensures the deletion of an execution causes the deletion of any binary data associated to it. This does away with the need for binary data TTL and simplifies the filesystem mode's mark-and-prune setup. - Refactors all execution deletions (including pruning) to cause soft deletions, hard-deletes soft-deleted executions based on the existing pruning config, and adjusts execution endpoints to filter out soft-deleted executions. This reduces DB load, and keeps binary data around long enough for users to access it when building workflows with unsaved executions. - Moves all execution pruning work from an execution lifecycle hook to `execution.repository.ts`. This keeps related logic in a single place. - Removes all marking logic from the binary data manager. This simplifies the interface that the S3 client will meet. - Adds basic sanity-check tests to pruning logic and execution deletion. Out of scope: - Improving existing pruning logic. - Improving existing execution repository logic. - Adjusting dir structure for filesystem mode. --------- Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <aditya@netroy.in>
644 lines
18 KiB
TypeScript
644 lines
18 KiB
TypeScript
import { UserSettings } from 'n8n-core';
|
|
import type { DataSourceOptions as ConnectionOptions } from 'typeorm';
|
|
import { DataSource as Connection } from 'typeorm';
|
|
import { Container } from 'typedi';
|
|
|
|
import config from '@/config';
|
|
import * as Db from '@/Db';
|
|
import { createCredentialsFromCredentialsEntity } from '@/CredentialsHelper';
|
|
import { entities } from '@db/entities';
|
|
import { CredentialsEntity } from '@db/entities/CredentialsEntity';
|
|
import { mysqlMigrations } from '@db/migrations/mysqldb';
|
|
import { postgresMigrations } from '@db/migrations/postgresdb';
|
|
import { sqliteMigrations } from '@db/migrations/sqlite';
|
|
import { hashPassword } from '@/UserManagement/UserManagementHelper';
|
|
import { AuthIdentity } from '@db/entities/AuthIdentity';
|
|
import type { ExecutionEntity } from '@db/entities/ExecutionEntity';
|
|
import type { Role } from '@db/entities/Role';
|
|
import type { TagEntity } from '@db/entities/TagEntity';
|
|
import type { User } from '@db/entities/User';
|
|
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
|
import type { ICredentialsDb } from '@/Interfaces';
|
|
import { DB_INITIALIZATION_TIMEOUT } from './constants';
|
|
import { randomApiKey, randomEmail, randomName, randomString, randomValidPassword } from './random';
|
|
import type { CollectionName, CredentialPayload, PostgresSchemaSection } from './types';
|
|
import type { ExecutionData } from '@db/entities/ExecutionData';
|
|
import { generateNanoId } from '@db/utils/generators';
|
|
import { RoleService } from '@/services/role.service';
|
|
import { VariablesService } from '@/environments/variables/variables.service';
|
|
import { TagRepository, WorkflowTagMappingRepository } from '@/databases/repositories';
|
|
import { separate } from '@/utils';
|
|
|
|
import { randomPassword } from '@/Ldap/helpers';
|
|
import { TOTPService } from '@/Mfa/totp.service';
|
|
import { MfaService } from '@/Mfa/mfa.service';
|
|
|
|
export type TestDBType = 'postgres' | 'mysql';
|
|
|
|
export const testDbPrefix = 'n8n_test_';
|
|
|
|
export function getPostgresSchemaSection(
|
|
schema = config.getSchema(),
|
|
): PostgresSchemaSection | null {
|
|
for (const [key, value] of Object.entries(schema)) {
|
|
if (key === 'postgresdb') {
|
|
return value._cvtProperties;
|
|
}
|
|
}
|
|
return null;
|
|
}
|
|
|
|
/**
|
|
* Initialize one test DB per suite run, with bootstrap connection if needed.
|
|
*/
|
|
export async function init() {
|
|
jest.setTimeout(DB_INITIALIZATION_TIMEOUT);
|
|
const dbType = config.getEnv('database.type');
|
|
const testDbName = `${testDbPrefix}${randomString(6, 10)}_${Date.now()}`;
|
|
|
|
if (dbType === 'sqlite') {
|
|
// no bootstrap connection required
|
|
await Db.init(getSqliteOptions({ name: testDbName }));
|
|
} else if (dbType === 'postgresdb') {
|
|
let bootstrapPostgres;
|
|
const pgOptions = getBootstrapDBOptions('postgres');
|
|
|
|
try {
|
|
bootstrapPostgres = await new Connection(pgOptions).initialize();
|
|
} catch (error) {
|
|
const pgConfig = getPostgresSchemaSection();
|
|
|
|
if (!pgConfig) throw new Error("Failed to find config schema section for 'postgresdb'");
|
|
|
|
const message = [
|
|
"ERROR: Failed to connect to Postgres default DB 'postgres'",
|
|
'Please review your Postgres connection options:',
|
|
`host: ${pgOptions.host} | port: ${pgOptions.port} | schema: ${pgOptions.schema} | username: ${pgOptions.username} | password: ${pgOptions.password}`,
|
|
'Fix by setting correct values via environment variables:',
|
|
`${pgConfig.host.env} | ${pgConfig.port.env} | ${pgConfig.schema.env} | ${pgConfig.user.env} | ${pgConfig.password.env}`,
|
|
'Otherwise, make sure your Postgres server is running.',
|
|
].join('\n');
|
|
|
|
console.error(message);
|
|
|
|
process.exit(1);
|
|
}
|
|
|
|
await bootstrapPostgres.query(`CREATE DATABASE ${testDbName}`);
|
|
await bootstrapPostgres.destroy();
|
|
|
|
await Db.init(getDBOptions('postgres', testDbName));
|
|
} else if (dbType === 'mysqldb' || dbType === 'mariadb') {
|
|
const bootstrapMysql = await new Connection(getBootstrapDBOptions('mysql')).initialize();
|
|
await bootstrapMysql.query(`CREATE DATABASE ${testDbName} DEFAULT CHARACTER SET utf8mb4`);
|
|
await bootstrapMysql.destroy();
|
|
|
|
await Db.init(getDBOptions('mysql', testDbName));
|
|
}
|
|
|
|
await Db.migrate();
|
|
}
|
|
|
|
/**
|
|
* Drop test DB, closing bootstrap connection if existing.
|
|
*/
|
|
export async function terminate() {
|
|
await Db.close();
|
|
}
|
|
|
|
/**
|
|
* Truncate specific DB tables in a test DB.
|
|
*/
|
|
export async function truncate(collections: CollectionName[]) {
|
|
const [tag, rest] = separate(collections, (c) => c === 'Tag');
|
|
|
|
if (tag) {
|
|
await Container.get(TagRepository).delete({});
|
|
await Container.get(WorkflowTagMappingRepository).delete({});
|
|
}
|
|
|
|
for (const collection of rest) {
|
|
await Db.collections[collection].delete({});
|
|
}
|
|
}
|
|
|
|
// ----------------------------------
|
|
// credential creation
|
|
// ----------------------------------
|
|
|
|
/**
|
|
* Save a credential to the test DB, sharing it with a user.
|
|
*/
|
|
export async function saveCredential(
|
|
credentialPayload: CredentialPayload,
|
|
{ user, role }: { user: User; role: Role },
|
|
) {
|
|
const newCredential = new CredentialsEntity();
|
|
|
|
Object.assign(newCredential, credentialPayload);
|
|
|
|
const encryptedData = await encryptCredentialData(newCredential);
|
|
|
|
Object.assign(newCredential, encryptedData);
|
|
|
|
const savedCredential = await Db.collections.Credentials.save(newCredential);
|
|
|
|
savedCredential.data = newCredential.data;
|
|
|
|
await Db.collections.SharedCredentials.save({
|
|
user,
|
|
credentials: savedCredential,
|
|
role,
|
|
});
|
|
|
|
return savedCredential;
|
|
}
|
|
|
|
export async function shareCredentialWithUsers(credential: CredentialsEntity, users: User[]) {
|
|
const role = await Container.get(RoleService).findCredentialUserRole();
|
|
const newSharedCredentials = users.map((user) =>
|
|
Db.collections.SharedCredentials.create({
|
|
userId: user.id,
|
|
credentialsId: credential.id,
|
|
roleId: role?.id,
|
|
}),
|
|
);
|
|
return Db.collections.SharedCredentials.save(newSharedCredentials);
|
|
}
|
|
|
|
export function affixRoleToSaveCredential(role: Role) {
|
|
return async (credentialPayload: CredentialPayload, { user }: { user: User }) =>
|
|
saveCredential(credentialPayload, { user, role });
|
|
}
|
|
|
|
// ----------------------------------
|
|
// user creation
|
|
// ----------------------------------
|
|
|
|
/**
|
|
* Store a user in the DB, defaulting to a `member`.
|
|
*/
|
|
export async function createUser(attributes: Partial<User> = {}): Promise<User> {
|
|
const { email, password, firstName, lastName, globalRole, ...rest } = attributes;
|
|
const user: Partial<User> = {
|
|
email: email ?? randomEmail(),
|
|
password: await hashPassword(password ?? randomValidPassword()),
|
|
firstName: firstName ?? randomName(),
|
|
lastName: lastName ?? randomName(),
|
|
globalRoleId: (globalRole ?? (await getGlobalMemberRole())).id,
|
|
globalRole,
|
|
...rest,
|
|
};
|
|
|
|
return Db.collections.User.save(user);
|
|
}
|
|
|
|
export async function createLdapUser(attributes: Partial<User>, ldapId: string): Promise<User> {
|
|
const user = await createUser(attributes);
|
|
await Db.collections.AuthIdentity.save(AuthIdentity.create(user, ldapId, 'ldap'));
|
|
return user;
|
|
}
|
|
|
|
export async function createUserWithMfaEnabled(
|
|
data: { numberOfRecoveryCodes: number } = { numberOfRecoveryCodes: 10 },
|
|
) {
|
|
const encryptionKey = await UserSettings.getEncryptionKey();
|
|
|
|
const email = randomEmail();
|
|
const password = randomPassword();
|
|
|
|
const toptService = new TOTPService();
|
|
|
|
const secret = toptService.generateSecret();
|
|
|
|
const mfaService = new MfaService(Db.collections.User, toptService, encryptionKey);
|
|
|
|
const recoveryCodes = mfaService.generateRecoveryCodes(data.numberOfRecoveryCodes);
|
|
|
|
const { encryptedSecret, encryptedRecoveryCodes } = mfaService.encryptSecretAndRecoveryCodes(
|
|
secret,
|
|
recoveryCodes,
|
|
);
|
|
|
|
return {
|
|
user: await createUser({
|
|
mfaEnabled: true,
|
|
password,
|
|
email,
|
|
mfaSecret: encryptedSecret,
|
|
mfaRecoveryCodes: encryptedRecoveryCodes,
|
|
}),
|
|
rawPassword: password,
|
|
rawSecret: secret,
|
|
rawRecoveryCodes: recoveryCodes,
|
|
};
|
|
}
|
|
|
|
export async function createOwner() {
|
|
return createUser({ globalRole: await getGlobalOwnerRole() });
|
|
}
|
|
|
|
export async function createMember() {
|
|
return createUser({ globalRole: await getGlobalMemberRole() });
|
|
}
|
|
|
|
export async function createUserShell(globalRole: Role): Promise<User> {
|
|
if (globalRole.scope !== 'global') {
|
|
throw new Error(`Invalid role received: ${JSON.stringify(globalRole)}`);
|
|
}
|
|
|
|
const shell: Partial<User> = { globalRoleId: globalRole.id };
|
|
|
|
if (globalRole.name !== 'owner') {
|
|
shell.email = randomEmail();
|
|
}
|
|
|
|
return Db.collections.User.save(shell);
|
|
}
|
|
|
|
/**
|
|
* Create many users in the DB, defaulting to a `member`.
|
|
*/
|
|
export async function createManyUsers(
|
|
amount: number,
|
|
attributes: Partial<User> = {},
|
|
): Promise<User[]> {
|
|
let { email, password, firstName, lastName, globalRole, ...rest } = attributes;
|
|
if (!globalRole) {
|
|
globalRole = await getGlobalMemberRole();
|
|
}
|
|
|
|
const users = await Promise.all(
|
|
[...Array(amount)].map(async () =>
|
|
Db.collections.User.create({
|
|
email: email ?? randomEmail(),
|
|
password: await hashPassword(password ?? randomValidPassword()),
|
|
firstName: firstName ?? randomName(),
|
|
lastName: lastName ?? randomName(),
|
|
globalRole,
|
|
...rest,
|
|
}),
|
|
),
|
|
);
|
|
|
|
return Db.collections.User.save(users);
|
|
}
|
|
|
|
export async function addApiKey(user: User): Promise<User> {
|
|
user.apiKey = randomApiKey();
|
|
return Db.collections.User.save(user);
|
|
}
|
|
|
|
// ----------------------------------
|
|
// role fetchers
|
|
// ----------------------------------
|
|
|
|
export async function getGlobalOwnerRole() {
|
|
return Container.get(RoleService).findGlobalOwnerRole();
|
|
}
|
|
|
|
export async function getGlobalMemberRole() {
|
|
return Container.get(RoleService).findGlobalMemberRole();
|
|
}
|
|
|
|
export async function getWorkflowOwnerRole() {
|
|
return Container.get(RoleService).findWorkflowOwnerRole();
|
|
}
|
|
|
|
export async function getWorkflowEditorRole() {
|
|
return Container.get(RoleService).findWorkflowEditorRole();
|
|
}
|
|
|
|
export async function getCredentialOwnerRole() {
|
|
return Container.get(RoleService).findCredentialOwnerRole();
|
|
}
|
|
|
|
export async function getAllRoles() {
|
|
return Promise.all([
|
|
getGlobalOwnerRole(),
|
|
getGlobalMemberRole(),
|
|
getWorkflowOwnerRole(),
|
|
getCredentialOwnerRole(),
|
|
]);
|
|
}
|
|
|
|
export const getAllUsers = async () =>
|
|
Db.collections.User.find({
|
|
relations: ['globalRole', 'authIdentities'],
|
|
});
|
|
|
|
export const getLdapIdentities = async () =>
|
|
Db.collections.AuthIdentity.find({
|
|
where: { providerType: 'ldap' },
|
|
relations: ['user'],
|
|
});
|
|
|
|
// ----------------------------------
|
|
// Execution helpers
|
|
// ----------------------------------
|
|
|
|
export async function createManyExecutions(
|
|
amount: number,
|
|
workflow: WorkflowEntity,
|
|
callback: (workflow: WorkflowEntity) => Promise<ExecutionEntity>,
|
|
) {
|
|
const executionsRequests = [...Array(amount)].map(async (_) => callback(workflow));
|
|
return Promise.all(executionsRequests);
|
|
}
|
|
|
|
/**
|
|
* Store a execution in the DB and assign it to a workflow.
|
|
*/
|
|
async function createExecution(
|
|
attributes: Partial<ExecutionEntity & ExecutionData>,
|
|
workflow: WorkflowEntity,
|
|
) {
|
|
const { data, finished, mode, startedAt, stoppedAt, waitTill, status } = attributes;
|
|
|
|
const execution = await Db.collections.Execution.save({
|
|
finished: finished ?? true,
|
|
mode: mode ?? 'manual',
|
|
startedAt: startedAt ?? new Date(),
|
|
...(workflow !== undefined && { workflowId: workflow.id }),
|
|
stoppedAt: stoppedAt ?? new Date(),
|
|
waitTill: waitTill ?? null,
|
|
status,
|
|
});
|
|
|
|
await Db.collections.ExecutionData.save({
|
|
data: data ?? '[]',
|
|
workflowData: workflow ?? {},
|
|
executionId: execution.id,
|
|
});
|
|
|
|
return execution;
|
|
}
|
|
|
|
/**
|
|
* Store a successful execution in the DB and assign it to a workflow.
|
|
*/
|
|
export async function createSuccessfulExecution(workflow: WorkflowEntity) {
|
|
return createExecution({ finished: true, status: 'success' }, workflow);
|
|
}
|
|
|
|
/**
|
|
* Store an error execution in the DB and assign it to a workflow.
|
|
*/
|
|
export async function createErrorExecution(workflow: WorkflowEntity) {
|
|
return createExecution({ finished: false, stoppedAt: new Date(), status: 'failed' }, workflow);
|
|
}
|
|
|
|
/**
|
|
* Store a waiting execution in the DB and assign it to a workflow.
|
|
*/
|
|
export async function createWaitingExecution(workflow: WorkflowEntity) {
|
|
return createExecution({ finished: false, waitTill: new Date(), status: 'waiting' }, workflow);
|
|
}
|
|
|
|
// ----------------------------------
|
|
// Tags
|
|
// ----------------------------------
|
|
|
|
export async function createTag(attributes: Partial<TagEntity> = {}, workflow?: WorkflowEntity) {
|
|
const { name } = attributes;
|
|
|
|
const tag = await Container.get(TagRepository).save({
|
|
id: generateNanoId(),
|
|
name: name ?? randomName(),
|
|
...attributes,
|
|
});
|
|
|
|
if (workflow) {
|
|
const mappingRepository = Container.get(WorkflowTagMappingRepository);
|
|
|
|
const mapping = mappingRepository.create({ tagId: tag.id, workflowId: workflow.id });
|
|
|
|
await mappingRepository.save(mapping);
|
|
}
|
|
|
|
return tag;
|
|
}
|
|
|
|
// ----------------------------------
|
|
// Workflow helpers
|
|
// ----------------------------------
|
|
|
|
export async function createManyWorkflows(
|
|
amount: number,
|
|
attributes: Partial<WorkflowEntity> = {},
|
|
user?: User,
|
|
) {
|
|
const workflowRequests = [...Array(amount)].map(async (_) => createWorkflow(attributes, user));
|
|
return Promise.all(workflowRequests);
|
|
}
|
|
|
|
/**
|
|
* Store a workflow in the DB (without a trigger) and optionally assign it to a user.
|
|
* @param attributes workflow attributes
|
|
* @param user user to assign the workflow to
|
|
*/
|
|
export async function createWorkflow(attributes: Partial<WorkflowEntity> = {}, user?: User) {
|
|
const { active, name, nodes, connections } = attributes;
|
|
|
|
const workflowEntity = Db.collections.Workflow.create({
|
|
active: active ?? false,
|
|
name: name ?? 'test workflow',
|
|
nodes: nodes ?? [
|
|
{
|
|
id: 'uuid-1234',
|
|
name: 'Start',
|
|
parameters: {},
|
|
position: [-20, 260],
|
|
type: 'n8n-nodes-base.start',
|
|
typeVersion: 1,
|
|
},
|
|
],
|
|
connections: connections ?? {},
|
|
...attributes,
|
|
});
|
|
|
|
const workflow = await Db.collections.Workflow.save(workflowEntity);
|
|
|
|
if (user) {
|
|
await Db.collections.SharedWorkflow.save({
|
|
user,
|
|
workflow,
|
|
role: await getWorkflowOwnerRole(),
|
|
});
|
|
}
|
|
return workflow;
|
|
}
|
|
|
|
export async function shareWorkflowWithUsers(workflow: WorkflowEntity, users: User[]) {
|
|
const role = await getWorkflowEditorRole();
|
|
const sharedWorkflows = users.map((user) => ({
|
|
user,
|
|
workflow,
|
|
role,
|
|
}));
|
|
return Db.collections.SharedWorkflow.save(sharedWorkflows);
|
|
}
|
|
|
|
/**
|
|
* Store a workflow in the DB (with a trigger) and optionally assign it to a user.
|
|
* @param user user to assign the workflow to
|
|
*/
|
|
export async function createWorkflowWithTrigger(
|
|
attributes: Partial<WorkflowEntity> = {},
|
|
user?: User,
|
|
) {
|
|
const workflow = await createWorkflow(
|
|
{
|
|
nodes: [
|
|
{
|
|
id: 'uuid-1',
|
|
parameters: {},
|
|
name: 'Start',
|
|
type: 'n8n-nodes-base.start',
|
|
typeVersion: 1,
|
|
position: [240, 300],
|
|
},
|
|
{
|
|
id: 'uuid-2',
|
|
parameters: { triggerTimes: { item: [{ mode: 'everyMinute' }] } },
|
|
name: 'Cron',
|
|
type: 'n8n-nodes-base.cron',
|
|
typeVersion: 1,
|
|
position: [500, 300],
|
|
},
|
|
{
|
|
id: 'uuid-3',
|
|
parameters: { options: {} },
|
|
name: 'Set',
|
|
type: 'n8n-nodes-base.set',
|
|
typeVersion: 1,
|
|
position: [780, 300],
|
|
},
|
|
],
|
|
connections: { Cron: { main: [[{ node: 'Set', type: 'main', index: 0 }]] } },
|
|
...attributes,
|
|
},
|
|
user,
|
|
);
|
|
|
|
return workflow;
|
|
}
|
|
|
|
export async function getAllWorkflows() {
|
|
return Db.collections.Workflow.find();
|
|
}
|
|
|
|
export async function getAllExecutions() {
|
|
return Db.collections.Execution.find();
|
|
}
|
|
|
|
// ----------------------------------
|
|
// workflow sharing
|
|
// ----------------------------------
|
|
|
|
export async function getWorkflowSharing(workflow: WorkflowEntity) {
|
|
return Db.collections.SharedWorkflow.findBy({
|
|
workflowId: workflow.id,
|
|
});
|
|
}
|
|
|
|
// ----------------------------------
|
|
// variables
|
|
// ----------------------------------
|
|
|
|
export async function createVariable(key: string, value: string) {
|
|
const result = await Db.collections.Variables.save({
|
|
id: generateNanoId(),
|
|
key,
|
|
value,
|
|
});
|
|
await Container.get(VariablesService).updateCache();
|
|
return result;
|
|
}
|
|
|
|
export async function getVariableByKey(key: string) {
|
|
return Db.collections.Variables.findOne({
|
|
where: {
|
|
key,
|
|
},
|
|
});
|
|
}
|
|
|
|
export async function getVariableById(id: string) {
|
|
return Db.collections.Variables.findOne({
|
|
where: {
|
|
id,
|
|
},
|
|
});
|
|
}
|
|
|
|
// ----------------------------------
|
|
// connection options
|
|
// ----------------------------------
|
|
|
|
/**
|
|
* Generate options for an in-memory sqlite database connection,
|
|
* one per test suite run.
|
|
*/
|
|
const getSqliteOptions = ({ name }: { name: string }): ConnectionOptions => {
|
|
return {
|
|
name,
|
|
type: 'sqlite',
|
|
database: ':memory:',
|
|
entityPrefix: config.getEnv('database.tablePrefix'),
|
|
dropSchema: true,
|
|
migrations: sqliteMigrations,
|
|
migrationsTableName: 'migrations',
|
|
migrationsRun: false,
|
|
enableWAL: config.getEnv('database.sqlite.enableWAL'),
|
|
};
|
|
};
|
|
|
|
const baseOptions = (type: TestDBType) => ({
|
|
host: config.getEnv(`database.${type}db.host`),
|
|
port: config.getEnv(`database.${type}db.port`),
|
|
username: config.getEnv(`database.${type}db.user`),
|
|
password: config.getEnv(`database.${type}db.password`),
|
|
entityPrefix: config.getEnv('database.tablePrefix'),
|
|
schema: type === 'postgres' ? config.getEnv('database.postgresdb.schema') : undefined,
|
|
});
|
|
|
|
/**
|
|
* Generate options for a bootstrap DB connection, to create and drop test databases.
|
|
*/
|
|
export const getBootstrapDBOptions = (type: TestDBType) => ({
|
|
type,
|
|
name: type,
|
|
database: type,
|
|
...baseOptions(type),
|
|
});
|
|
|
|
const getDBOptions = (type: TestDBType, name: string) => ({
|
|
type,
|
|
name,
|
|
database: name,
|
|
...baseOptions(type),
|
|
dropSchema: true,
|
|
migrations: type === 'postgres' ? postgresMigrations : mysqlMigrations,
|
|
migrationsRun: false,
|
|
migrationsTableName: 'migrations',
|
|
entities: Object.values(entities),
|
|
synchronize: false,
|
|
logging: false,
|
|
});
|
|
|
|
// ----------------------------------
|
|
// encryption
|
|
// ----------------------------------
|
|
|
|
async function encryptCredentialData(credential: CredentialsEntity) {
|
|
const encryptionKey = await UserSettings.getEncryptionKey();
|
|
|
|
const coreCredential = createCredentialsFromCredentialsEntity(credential, true);
|
|
|
|
// @ts-ignore
|
|
coreCredential.setData(credential.data, encryptionKey);
|
|
|
|
return coreCredential.getDataToSave() as ICredentialsDb;
|
|
}
|