mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-10 06:34:05 -08:00
refactor(core): Delete more redundant code across migrations (Part 1) (no-changelog) (#6691)
This commit is contained in:
parent
7b27fa5898
commit
b7ca27afcf
|
@ -0,0 +1,48 @@
|
|||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
|
||||
export class UniqueWorkflowNames1620821879465 implements ReversibleMigration {
|
||||
protected indexSuffix = '943d8f922be094eb507cb9a7f9';
|
||||
|
||||
async up({ isMysql, escape, runQuery }: MigrationContext) {
|
||||
const tableName = escape.tableName('workflow_entity');
|
||||
const workflowNames: Array<Pick<WorkflowEntity, 'name'>> = await runQuery(
|
||||
`SELECT name FROM ${tableName}`,
|
||||
);
|
||||
|
||||
for (const { name } of workflowNames) {
|
||||
const duplicates: Array<Pick<WorkflowEntity, 'id' | 'name'>> = await runQuery(
|
||||
`SELECT id, name FROM ${tableName} WHERE name = :name ORDER BY createdAt ASC`,
|
||||
{ name },
|
||||
);
|
||||
|
||||
if (duplicates.length > 1) {
|
||||
await Promise.all(
|
||||
duplicates.map(async (workflow, index) => {
|
||||
if (index === 0) return;
|
||||
return runQuery(
|
||||
`UPDATE ${tableName} SET name = :name WHERE id = :id`,
|
||||
{ name: `${workflow.name} ${index + 1}` },
|
||||
{ id: workflow.id },
|
||||
);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const indexName = escape.indexName(this.indexSuffix);
|
||||
await runQuery(
|
||||
isMysql
|
||||
? `ALTER TABLE ${tableName} ADD UNIQUE INDEX ${indexName} (${escape.columnName('name')})`
|
||||
: `CREATE UNIQUE INDEX ${indexName} ON ${tableName} ("name")`,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ isMysql, escape, runQuery }: MigrationContext) {
|
||||
const tableName = escape.tableName('workflow_entity');
|
||||
const indexName = escape.indexName(this.indexSuffix);
|
||||
await runQuery(
|
||||
isMysql ? `ALTER TABLE ${tableName} DROP INDEX ${indexName}` : `DROP INDEX ${indexName}`,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,258 @@
|
|||
import type { IWorkflowBase } from 'n8n-workflow';
|
||||
import type { CredentialsEntity } from '@db/entities/CredentialsEntity';
|
||||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
|
||||
type Credential = Pick<CredentialsEntity, 'id' | 'name' | 'type'>;
|
||||
type ExecutionWithData = { id: string; workflowData: string | IWorkflowBase };
|
||||
type Workflow = Pick<WorkflowEntity, 'id'> & { nodes: string | WorkflowEntity['nodes'] };
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||
|
||||
export class UpdateWorkflowCredentials1630330987096 implements ReversibleMigration {
|
||||
async up({ dbType, escape, parseJson, runQuery, runInBatches }: MigrationContext) {
|
||||
const credentialsTable = escape.tableName('credentials_entity');
|
||||
const workflowsTable = escape.tableName('workflow_entity');
|
||||
const executionsTable = escape.tableName('execution_entity');
|
||||
const dataColumn = escape.columnName('workflowData');
|
||||
const waitTillColumn = escape.columnName('waitTill');
|
||||
|
||||
const credentialsEntities: Credential[] = await runQuery(
|
||||
`SELECT id, name, type FROM ${credentialsTable}`,
|
||||
);
|
||||
|
||||
const workflowsQuery = `SELECT id, nodes FROM ${workflowsTable}`;
|
||||
await runInBatches<Workflow>(workflowsQuery, async (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
let credentialsUpdated = false;
|
||||
const nodes = parseJson(workflow.nodes);
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id ?? null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
await runQuery(
|
||||
`UPDATE ${workflowsTable} SET nodes = :nodes WHERE id = :id`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{ id: workflow.id },
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const finishedValue = dbType === 'postgresdb' ? 'FALSE' : '0';
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, ${dataColumn}
|
||||
FROM ${executionsTable}
|
||||
WHERE ${waitTillColumn} IS NOT NULL AND finished = ${finishedValue}
|
||||
`;
|
||||
await runInBatches<ExecutionWithData>(waitingExecutionsQuery, async (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
let credentialsUpdated = false;
|
||||
const workflowData = parseJson(execution.workflowData);
|
||||
workflowData.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id ?? null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
await runQuery(
|
||||
`UPDATE ${executionsTable}
|
||||
SET ${escape.columnName('workflowData')} = :data WHERE id = :id`,
|
||||
{ data: JSON.stringify(workflowData) },
|
||||
{ id: execution.id },
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions: ExecutionWithData[] = await runQuery(`
|
||||
SELECT id, ${dataColumn}
|
||||
FROM ${executionsTable}
|
||||
WHERE ${waitTillColumn} IS NULL AND finished = ${finishedValue} AND mode != 'retry'
|
||||
ORDER BY ${escape.columnName('startedAt')} DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
let credentialsUpdated = false;
|
||||
const workflowData = parseJson(execution.workflowData);
|
||||
workflowData.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id ?? null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
await runQuery(
|
||||
`UPDATE ${executionsTable}
|
||||
SET ${escape.columnName('workflowData')} = :data WHERE id = :id`,
|
||||
{ data: JSON.stringify(workflowData) },
|
||||
{ id: execution.id },
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async down({ dbType, escape, parseJson, runQuery, runInBatches }: MigrationContext) {
|
||||
const credentialsTable = escape.tableName('credentials_entity');
|
||||
const workflowsTable = escape.tableName('workflow_entity');
|
||||
const executionsTable = escape.tableName('execution_entity');
|
||||
const dataColumn = escape.columnName('workflowData');
|
||||
const waitTillColumn = escape.columnName('waitTill');
|
||||
|
||||
const credentialsEntities: Credential[] = await runQuery(
|
||||
`SELECT id, name, type FROM ${credentialsTable}`,
|
||||
);
|
||||
|
||||
const workflowsQuery = `SELECT id, nodes FROM ${workflowsTable}`;
|
||||
await runInBatches<Workflow>(workflowsQuery, async (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
let credentialsUpdated = false;
|
||||
const nodes = parseJson(workflow.nodes);
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// double-equals because creds.id can be string or number
|
||||
// eslint-disable-next-line eqeqeq
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
await runQuery(
|
||||
`UPDATE ${workflowsTable} SET nodes = :nodes WHERE id = :id`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{ id: workflow.id },
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const finishedValue = dbType === 'postgresdb' ? 'FALSE' : '0';
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, ${dataColumn}
|
||||
FROM ${executionsTable}
|
||||
WHERE ${waitTillColumn} IS NOT NULL AND finished = ${finishedValue}
|
||||
`;
|
||||
|
||||
await runInBatches<ExecutionWithData>(waitingExecutionsQuery, async (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
let credentialsUpdated = false;
|
||||
const workflowData = parseJson(execution.workflowData);
|
||||
workflowData.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// double-equals because creds.id can be string or number
|
||||
// eslint-disable-next-line eqeqeq
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
await runQuery(
|
||||
`UPDATE ${executionsTable}
|
||||
SET ${escape.columnName('workflowData')} = :data WHERE id = :id`,
|
||||
{ data: JSON.stringify(workflowData) },
|
||||
{ id: execution.id },
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions: ExecutionWithData[] = await runQuery(`
|
||||
SELECT id, ${dataColumn}
|
||||
FROM ${executionsTable}
|
||||
WHERE ${waitTillColumn} IS NULL AND finished = ${finishedValue} AND mode != 'retry'
|
||||
ORDER BY ${escape.columnName('startedAt')} DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
let credentialsUpdated = false;
|
||||
const workflowData = parseJson(execution.workflowData);
|
||||
workflowData.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// double-equals because creds.id can be string or number
|
||||
// eslint-disable-next-line eqeqeq
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
await runQuery(
|
||||
`UPDATE ${executionsTable}
|
||||
SET ${escape.columnName('workflowData')} = :data WHERE id = :id`,
|
||||
{ data: JSON.stringify(workflowData) },
|
||||
{ id: execution.id },
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
import type { INode } from 'n8n-workflow';
|
||||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
type Workflow = Pick<WorkflowEntity, 'id'> & { nodes: string | INode[] };
|
||||
|
||||
export class AddNodeIds1658930531669 implements ReversibleMigration {
|
||||
async up({ escape, runQuery, runInBatches, parseJson }: MigrationContext) {
|
||||
const tableName = escape.tableName('workflow_entity');
|
||||
const workflowsQuery = `SELECT id, nodes FROM ${tableName}`;
|
||||
await runInBatches<Workflow>(workflowsQuery, async (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = parseJson(workflow.nodes);
|
||||
nodes.forEach((node: INode) => {
|
||||
if (!node.id) {
|
||||
node.id = uuid();
|
||||
}
|
||||
});
|
||||
|
||||
await runQuery(
|
||||
`UPDATE ${tableName} SET nodes = :nodes WHERE id = :id`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{ id: workflow.id },
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async down({ escape, runQuery, runInBatches, parseJson }: MigrationContext) {
|
||||
const tableName = escape.tableName('workflow_entity');
|
||||
const workflowsQuery = `SELECT id, nodes FROM ${tableName}`;
|
||||
await runInBatches<Workflow>(workflowsQuery, async (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = parseJson(workflow.nodes).map(({ id, ...rest }) => rest);
|
||||
await runQuery(
|
||||
`UPDATE ${tableName} SET nodes = :nodes WHERE id = :id`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{ id: workflow.id },
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
import type { IDataObject, INodeExecutionData } from 'n8n-workflow';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
|
||||
type OldPinnedData = { [nodeName: string]: IDataObject[] };
|
||||
type NewPinnedData = { [nodeName: string]: INodeExecutionData[] };
|
||||
type Workflow = { id: number; pinData: string | OldPinnedData };
|
||||
|
||||
function isObjectLiteral(item: unknown): item is { [key: string]: string } {
|
||||
return typeof item === 'object' && item !== null && !Array.isArray(item);
|
||||
}
|
||||
|
||||
function isJsonKeyObject(item: unknown): item is {
|
||||
json: unknown;
|
||||
[keys: string]: unknown;
|
||||
} {
|
||||
if (!isObjectLiteral(item)) return false;
|
||||
return Object.keys(item).includes('json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert TEXT-type `pinData` column in `workflow_entity` table from
|
||||
* `{ [nodeName: string]: IDataObject[] }` to `{ [nodeName: string]: INodeExecutionData[] }`
|
||||
*/
|
||||
export class AddJsonKeyPinData1659888469333 implements IrreversibleMigration {
|
||||
async up({ escape, runQuery, runInBatches }: MigrationContext) {
|
||||
const tableName = escape.tableName('workflow_entity');
|
||||
const columnName = escape.columnName('pinData');
|
||||
|
||||
const selectQuery = `SELECT id, ${columnName} FROM ${tableName} WHERE ${columnName} IS NOT NULL`;
|
||||
await runInBatches<Workflow>(selectQuery, async (workflows) => {
|
||||
await Promise.all(
|
||||
this.makeUpdateParams(workflows).map(async (workflow) =>
|
||||
runQuery(`UPDATE ${tableName} SET ${columnName} = :pinData WHERE id = :id;`, {
|
||||
pinData: workflow.pinData,
|
||||
id: workflow.id,
|
||||
}),
|
||||
),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
private makeUpdateParams(fetchedWorkflows: Workflow[]) {
|
||||
return fetchedWorkflows.reduce<Workflow[]>((updateParams, { id, pinData: rawPinData }) => {
|
||||
let pinDataPerWorkflow: OldPinnedData | NewPinnedData;
|
||||
|
||||
if (typeof rawPinData === 'string') {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
pinDataPerWorkflow = JSON.parse(rawPinData);
|
||||
} catch {
|
||||
pinDataPerWorkflow = {};
|
||||
}
|
||||
} else {
|
||||
pinDataPerWorkflow = rawPinData;
|
||||
}
|
||||
|
||||
const newPinDataPerWorkflow = Object.keys(pinDataPerWorkflow).reduce<NewPinnedData>(
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
(newPinDataPerWorkflow, nodeName) => {
|
||||
let pinDataPerNode = pinDataPerWorkflow[nodeName];
|
||||
|
||||
if (!Array.isArray(pinDataPerNode)) {
|
||||
pinDataPerNode = [pinDataPerNode];
|
||||
}
|
||||
|
||||
if (pinDataPerNode.every((item) => item.json)) return newPinDataPerWorkflow;
|
||||
|
||||
newPinDataPerWorkflow[nodeName] = pinDataPerNode.map((item) =>
|
||||
isJsonKeyObject(item) ? item : { json: item },
|
||||
);
|
||||
|
||||
return newPinDataPerWorkflow;
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
||||
if (Object.keys(newPinDataPerWorkflow).length > 0) {
|
||||
updateParams.push({ id, pinData: JSON.stringify(newPinDataPerWorkflow) });
|
||||
}
|
||||
|
||||
return updateParams;
|
||||
}, []);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
type Workflow = { id: number };
|
||||
|
||||
export class AddWorkflowVersionIdColumn1669739707124 implements ReversibleMigration {
|
||||
async up({ escape, runQuery }: MigrationContext) {
|
||||
const tableName = escape.tableName('workflow_entity');
|
||||
const columnName = escape.columnName('versionId');
|
||||
|
||||
await runQuery(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} CHAR(36)`);
|
||||
|
||||
const workflowIds: Workflow[] = await runQuery(`SELECT id FROM ${tableName}`);
|
||||
for (const { id } of workflowIds) {
|
||||
await runQuery(
|
||||
`UPDATE ${tableName} SET ${columnName} = :versionId WHERE id = :id`,
|
||||
{ versionId: uuidv4() },
|
||||
{ id },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async down({ escape, runQuery }: MigrationContext) {
|
||||
const tableName = escape.tableName('workflow_entity');
|
||||
const columnName = escape.columnName('versionId');
|
||||
await runQuery(`ALTER TABLE ${tableName} DROP COLUMN ${columnName}`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { StatisticsNames } from '@db/entities/WorkflowStatistics';
|
||||
|
||||
export class RemoveWorkflowDataLoadedFlag1671726148419 implements ReversibleMigration {
|
||||
async up({ escape, dbType, runQuery }: MigrationContext) {
|
||||
const workflowTableName = escape.tableName('workflow_entity');
|
||||
const statisticsTableName = escape.tableName('workflow_statistics');
|
||||
const columnName = escape.columnName('dataLoaded');
|
||||
|
||||
// If any existing workflow has dataLoaded set to true, insert the relevant information to the statistics table
|
||||
const workflowIds: Array<{ id: number; dataLoaded: boolean }> = await runQuery(
|
||||
`SELECT id, ${columnName} FROM ${workflowTableName}`,
|
||||
);
|
||||
|
||||
const now =
|
||||
dbType === 'sqlite' ? "STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')" : 'CURRENT_TIMESTAMP(3)';
|
||||
|
||||
await Promise.all(
|
||||
workflowIds.map(
|
||||
async ({ id, dataLoaded }) =>
|
||||
dataLoaded &&
|
||||
runQuery(
|
||||
`INSERT INTO ${statisticsTableName}
|
||||
(${escape.columnName('workflowId')}, name, count, ${escape.columnName('latestEvent')})
|
||||
VALUES (:id, :name, 1, ${now})`,
|
||||
{ id, name: StatisticsNames.dataLoaded },
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
await runQuery(`ALTER TABLE ${workflowTableName} DROP COLUMN ${columnName}`);
|
||||
}
|
||||
|
||||
async down({ escape, runQuery }: MigrationContext) {
|
||||
const workflowTableName = escape.tableName('workflow_entity');
|
||||
const statisticsTableName = escape.tableName('workflow_statistics');
|
||||
const columnName = escape.columnName('dataLoaded');
|
||||
|
||||
await runQuery(
|
||||
`ALTER TABLE ${workflowTableName} ADD COLUMN ${columnName} BOOLEAN DEFAULT false`,
|
||||
);
|
||||
|
||||
// Search through statistics for any workflows that have the dataLoaded stat
|
||||
const workflowsIds: Array<{ workflowId: string }> = await runQuery(
|
||||
`SELECT ${escape.columnName('workflowId')} FROM ${statisticsTableName} WHERE name = :name`,
|
||||
{ name: StatisticsNames.dataLoaded },
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
workflowsIds.map(async ({ workflowId }) =>
|
||||
runQuery(`UPDATE ${workflowTableName} SET ${columnName} = true WHERE id = :id`, {
|
||||
id: workflowId,
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
await runQuery(`DELETE FROM ${statisticsTableName} WHERE name = :name`, {
|
||||
name: StatisticsNames.dataLoaded,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { LDAP_DEFAULT_CONFIGURATION, LDAP_FEATURE_NAME } from '@/Ldap/constants';
|
||||
|
||||
export class CreateLdapEntities1674509946020 implements ReversibleMigration {
|
||||
async up({ escape, dbType, isMysql, runQuery }: MigrationContext) {
|
||||
const userTable = escape.tableName('user');
|
||||
await runQuery(`ALTER TABLE ${userTable} ADD COLUMN disabled BOOLEAN NOT NULL DEFAULT false;`);
|
||||
|
||||
await runQuery(`
|
||||
INSERT INTO ${escape.tableName('settings')}
|
||||
(${escape.columnName('key')}, value, ${escape.columnName('loadOnStartup')})
|
||||
VALUES ('${LDAP_FEATURE_NAME}', '${JSON.stringify(LDAP_DEFAULT_CONFIGURATION)}', true)
|
||||
`);
|
||||
|
||||
const uuidColumnType = dbType === 'postgresdb' ? 'UUID' : 'VARCHAR(36)';
|
||||
|
||||
await runQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${escape.tableName('auth_identity')} (
|
||||
${escape.columnName('userId')} ${uuidColumnType} REFERENCES ${userTable} (id),
|
||||
${escape.columnName('providerId')} VARCHAR(64) NOT NULL,
|
||||
${escape.columnName('providerType')} VARCHAR(32) NOT NULL,
|
||||
${escape.columnName('createdAt')} timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
${escape.columnName('updatedAt')} timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY(${escape.columnName('providerId')}, ${escape.columnName('providerType')})
|
||||
)${isMysql ? "ENGINE='InnoDB'" : ''}`,
|
||||
);
|
||||
|
||||
const idColumn =
|
||||
dbType === 'sqlite'
|
||||
? 'INTEGER PRIMARY KEY AUTOINCREMENT'
|
||||
: dbType === 'postgresdb'
|
||||
? 'SERIAL NOT NULL PRIMARY KEY'
|
||||
: 'INTEGER NOT NULL AUTO_INCREMENT';
|
||||
|
||||
const timestampColumn =
|
||||
dbType === 'sqlite'
|
||||
? 'DATETIME NOT NULL'
|
||||
: dbType === 'postgresdb'
|
||||
? 'TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP'
|
||||
: 'DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP';
|
||||
|
||||
await runQuery(
|
||||
`CREATE TABLE IF NOT EXISTS ${escape.tableName('auth_provider_sync_history')} (
|
||||
${escape.columnName('id')} ${idColumn},
|
||||
${escape.columnName('providerType')} VARCHAR(32) NOT NULL,
|
||||
${escape.columnName('runMode')} TEXT NOT NULL,
|
||||
${escape.columnName('status')} TEXT NOT NULL,
|
||||
${escape.columnName('startedAt')} ${timestampColumn},
|
||||
${escape.columnName('endedAt')} ${timestampColumn},
|
||||
${escape.columnName('scanned')} INTEGER NOT NULL,
|
||||
${escape.columnName('created')} INTEGER NOT NULL,
|
||||
${escape.columnName('updated')} INTEGER NOT NULL,
|
||||
${escape.columnName('disabled')} INTEGER NOT NULL,
|
||||
${escape.columnName('error')} TEXT
|
||||
${isMysql ? ',PRIMARY KEY (`id`)' : ''}
|
||||
)${isMysql ? "ENGINE='InnoDB'" : ''}`,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ escape, runQuery }: MigrationContext) {
|
||||
await runQuery(`DROP TABLE "${escape.tableName('auth_provider_sync_history')}`);
|
||||
await runQuery(`DROP TABLE "${escape.tableName('auth_identity')}`);
|
||||
await runQuery(`DELETE FROM ${escape.tableName('settings')} WHERE key = :key`, {
|
||||
key: LDAP_FEATURE_NAME,
|
||||
});
|
||||
await runQuery(`ALTER TABLE ${escape.tableName('user')} DROP COLUMN disabled`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
|
||||
interface Workflow {
|
||||
id: number;
|
||||
nodes: WorkflowEntity['nodes'] | string;
|
||||
connections: WorkflowEntity['connections'] | string;
|
||||
}
|
||||
|
||||
export class PurgeInvalidWorkflowConnections1675940580449 implements IrreversibleMigration {
|
||||
async up({ escape, parseJson, runQuery, nodeTypes }: MigrationContext) {
|
||||
const workflowsTable = escape.tableName('workflow_entity');
|
||||
const workflows: Workflow[] = await runQuery(
|
||||
`SELECT id, nodes, connections FROM ${workflowsTable}`,
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
workflows.map(async (workflow) => {
|
||||
const connections = parseJson(workflow.connections);
|
||||
const nodes = parseJson(workflow.nodes);
|
||||
|
||||
const nodesThatCannotReceiveInput = nodes.reduce<string[]>((acc, node) => {
|
||||
try {
|
||||
const nodeType = nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
|
||||
if ((nodeType.description.inputs?.length ?? []) === 0) {
|
||||
acc.push(node.name);
|
||||
}
|
||||
} catch (error) {}
|
||||
return acc;
|
||||
}, []);
|
||||
|
||||
Object.keys(connections).forEach((sourceNodeName) => {
|
||||
const connection = connections[sourceNodeName];
|
||||
const outputs = Object.keys(connection);
|
||||
|
||||
outputs.forEach((outputConnectionName /* Like `main` */) => {
|
||||
const outputConnection = connection[outputConnectionName];
|
||||
|
||||
// It filters out all connections that are connected to a node that cannot receive input
|
||||
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
|
||||
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
|
||||
(outgoingConnections) =>
|
||||
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Update database with new connections
|
||||
return runQuery(
|
||||
`UPDATE ${workflowsTable} SET connections = :connections WHERE id = :id`,
|
||||
{ connections: JSON.stringify(connections) },
|
||||
{ id: workflow.id },
|
||||
);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,63 +1,3 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames';
|
||||
|
||||
export class UniqueWorkflowNames1620826335440 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowNames = (await queryRunner.query(`
|
||||
SELECT name
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`)) as Array<{ name: string }>;
|
||||
|
||||
for (const { name } of workflowNames) {
|
||||
const [duplicatesQuery, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
` SELECT id, name
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
WHERE name = :name
|
||||
ORDER BY createdAt ASC`,
|
||||
{ name },
|
||||
{},
|
||||
);
|
||||
|
||||
const duplicates = (await queryRunner.query(duplicatesQuery, parameters)) as Array<{
|
||||
id: number;
|
||||
name: string;
|
||||
}>;
|
||||
|
||||
if (duplicates.length > 1) {
|
||||
await Promise.all(
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
duplicates.map(async ({ id, name }, index: number) => {
|
||||
if (index === 0) return;
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE ${tablePrefix}workflow_entity
|
||||
SET name = :name
|
||||
WHERE id = '${id}'`,
|
||||
{ name: `${name} ${index + 1}` },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(updateQuery, updateParams);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
'ALTER TABLE `' +
|
||||
tablePrefix +
|
||||
'workflow_entity` ADD UNIQUE INDEX `IDX_' +
|
||||
tablePrefix +
|
||||
'943d8f922be094eb507cb9a7f9` (`name`)',
|
||||
);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
'ALTER TABLE `' +
|
||||
tablePrefix +
|
||||
'workflow_entity` DROP INDEX `IDX_' +
|
||||
tablePrefix +
|
||||
'943d8f922be094eb507cb9a7f9`',
|
||||
);
|
||||
}
|
||||
}
|
||||
export class UniqueWorkflowNames1620826335440 extends UniqueWorkflowNames1620821879465 {}
|
||||
|
|
|
@ -1,298 +1,3 @@
|
|||
/* eslint-disable @typescript-eslint/prefer-nullish-coalescing */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||
|
||||
export class UpdateWorkflowCredentials1630451444017 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const credentialsEntities = (await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM ${tablePrefix}credentials_entity
|
||||
`)) as Array<{ id: string; name: string; type: string }>;
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, workflowData
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE waitTill IS NOT NULL AND finished = 0
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET workflowData = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, workflowData
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE waitTill IS NULL AND finished = 0 AND mode != 'retry'
|
||||
ORDER BY startedAt DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET workflowData = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const credentialsEntities = (await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM ${tablePrefix}credentials_entity
|
||||
`)) as Array<{ id: string; name: string; type: string }>;
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, workflowData
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE waitTill IS NOT NULL AND finished = 0
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET workflowData = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, workflowData
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE waitTill IS NULL AND finished = 0 AND mode != 'retry'
|
||||
ORDER BY startedAt DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET workflowData = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
export class UpdateWorkflowCredentials1630451444017 extends UpdateWorkflowCredentials1630330987096 {}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import type { InsertResult, MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { loadSurveyFromDisk } from '@db/utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
async up({ queryRunner, tablePrefix, loadSurveyFromDisk }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}role (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
|
|
|
@ -1,75 +1,3 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable n8n-local-rules/no-uncaught-json-parse */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds';
|
||||
|
||||
// add node ids in workflow objects
|
||||
|
||||
export class AddNodeIds1658932910559 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
let nodes = workflow.nodes;
|
||||
if (typeof nodes === 'string') {
|
||||
nodes = JSON.parse(nodes);
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (!node.id) {
|
||||
node.id = uuid();
|
||||
}
|
||||
});
|
||||
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => delete node.id);
|
||||
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
export class AddNodeIds1658932910559 extends AddNodeIds1658930531669 {}
|
||||
|
|
|
@ -1,32 +1,3 @@
|
|||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
import { addJsonKeyToPinDataColumn } from '../sqlite/1659888469333-AddJsonKeyPinData';
|
||||
import { AddJsonKeyPinData1659888469333 } from '../common/1659888469333-AddJsonKeyPinData';
|
||||
|
||||
/**
|
||||
* Convert JSON-type `pinData` column in `workflow_entity` table from
|
||||
* `{ [nodeName: string]: IDataObject[] }` to `{ [nodeName: string]: INodeExecutionData[] }`
|
||||
*/
|
||||
export class AddJsonKeyPinData1659895550980 implements IrreversibleMigration {
|
||||
async up(context: MigrationContext) {
|
||||
const { queryRunner, tablePrefix } = context;
|
||||
const workflowTable = `${tablePrefix}workflow_entity`;
|
||||
|
||||
const PINDATA_SELECT_QUERY = `
|
||||
SELECT id, pinData
|
||||
FROM \`${workflowTable}\`
|
||||
WHERE pinData IS NOT NULL;
|
||||
`;
|
||||
|
||||
const PINDATA_UPDATE_STATEMENT = `
|
||||
UPDATE \`${workflowTable}\`
|
||||
SET \`pinData\` = :pinData
|
||||
WHERE id = :id;
|
||||
`;
|
||||
|
||||
await runInBatches(
|
||||
queryRunner,
|
||||
PINDATA_SELECT_QUERY,
|
||||
addJsonKeyToPinDataColumn(context, PINDATA_UPDATE_STATEMENT),
|
||||
);
|
||||
}
|
||||
}
|
||||
export class AddJsonKeyPinData1659895550980 extends AddJsonKeyPinData1659888469333 {}
|
||||
|
|
|
@ -1,31 +1,3 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { AddWorkflowVersionIdColumn1669739707124 } from '../common/1669739707124-AddWorkflowVersionIdColumn';
|
||||
|
||||
export class AddWorkflowVersionIdColumn1669739707125 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN versionId CHAR(36)`,
|
||||
);
|
||||
|
||||
const workflowIds = (await queryRunner.query(`
|
||||
SELECT id
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`)) as Array<{ id: number }>;
|
||||
|
||||
for (const { id } of workflowIds) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE ${tablePrefix}workflow_entity
|
||||
SET versionId = :versionId
|
||||
WHERE id = '${id}'`,
|
||||
{ versionId: uuidv4() },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN versionId`);
|
||||
}
|
||||
}
|
||||
export class AddWorkflowVersionIdColumn1669739707125 extends AddWorkflowVersionIdColumn1669739707124 {}
|
||||
|
|
|
@ -1,55 +1,3 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { StatisticsNames } from '@db/entities/WorkflowStatistics';
|
||||
import { RemoveWorkflowDataLoadedFlag1671726148419 } from '../common/1671726148419-RemoveWorkflowDataLoadedFlag';
|
||||
|
||||
export class RemoveWorkflowDataLoadedFlag1671726148420 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
// If any existing workflow has dataLoaded set to true, insert the relevant information to the statistics table
|
||||
const workflowIds = (await queryRunner.query(`
|
||||
SELECT id, dataLoaded
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`)) as Array<{ id: number; dataLoaded: boolean }>;
|
||||
|
||||
workflowIds.map(async ({ id, dataLoaded }) => {
|
||||
if (dataLoaded) {
|
||||
const [insertQuery, insertParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
INSERT INTO ${tablePrefix}workflow_statistics (workflowId, name, count, latestEvent) VALUES
|
||||
(:id, :name, 1, CURRENT_TIMESTAMP(3))
|
||||
`,
|
||||
{ id, name: StatisticsNames.dataLoaded },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(insertQuery, insertParams);
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN dataLoaded`);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN dataLoaded BOOLEAN DEFAULT false`,
|
||||
);
|
||||
|
||||
// Search through statistics for any workflows that have the dataLoaded stat
|
||||
const workflowsIds = (await queryRunner.query(`
|
||||
SELECT workflowId
|
||||
FROM ${tablePrefix}workflow_statistics
|
||||
WHERE name = '${StatisticsNames.dataLoaded}'
|
||||
`)) as Array<{ workflowId: string }>;
|
||||
|
||||
workflowsIds.map(async ({ workflowId }) =>
|
||||
queryRunner.query(`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET dataLoaded = true
|
||||
WHERE id = '${workflowId}'
|
||||
`),
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`DELETE FROM ${tablePrefix}workflow_statistics WHERE name = '${StatisticsNames.dataLoaded}'`,
|
||||
);
|
||||
}
|
||||
}
|
||||
export class RemoveWorkflowDataLoadedFlag1671726148420 extends RemoveWorkflowDataLoadedFlag1671726148419 {}
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { LDAP_DEFAULT_CONFIGURATION, LDAP_FEATURE_NAME } from '@/Ldap/constants';
|
||||
|
||||
export class CreateLdapEntities1674509946020 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE \`${tablePrefix}user\` ADD COLUMN disabled BOOLEAN NOT NULL DEFAULT false;`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO ${tablePrefix}settings(\`key\`, value, loadOnStartup)
|
||||
VALUES ('${LDAP_FEATURE_NAME}', '${JSON.stringify(LDAP_DEFAULT_CONFIGURATION)}', 1);
|
||||
`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS \`${tablePrefix}auth_identity\` (
|
||||
\`userId\` VARCHAR(36) REFERENCES \`${tablePrefix}user\` (id),
|
||||
\`providerId\` VARCHAR(64) NOT NULL,
|
||||
\`providerType\` VARCHAR(32) NOT NULL,
|
||||
\`createdAt\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
\`updatedAt\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY(\`providerId\`, \`providerType\`)
|
||||
) ENGINE='InnoDB';`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS \`${tablePrefix}auth_provider_sync_history\` (
|
||||
\`id\` INTEGER NOT NULL AUTO_INCREMENT,
|
||||
\`providerType\` VARCHAR(32) NOT NULL,
|
||||
\`runMode\` TEXT NOT NULL,
|
||||
\`status\` TEXT NOT NULL,
|
||||
\`startedAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
\`endedAt\` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
\`scanned\` INTEGER NOT NULL,
|
||||
\`created\` INTEGER NOT NULL,
|
||||
\`updated\` INTEGER NOT NULL,
|
||||
\`disabled\` INTEGER NOT NULL,
|
||||
\`error\` TEXT,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE='InnoDB';`,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(`DROP TABLE \`${tablePrefix}auth_provider_sync_history\``);
|
||||
await queryRunner.query(`DROP TABLE \`${tablePrefix}auth_identity\``);
|
||||
|
||||
await queryRunner.query(
|
||||
`DELETE FROM ${tablePrefix}settings WHERE \`key\` = '${LDAP_FEATURE_NAME}'`,
|
||||
);
|
||||
await queryRunner.query(`ALTER TABLE \`${tablePrefix}user\` DROP COLUMN disabled`);
|
||||
}
|
||||
}
|
|
@ -1,69 +0,0 @@
|
|||
import type { IConnections, INode } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
export class PurgeInvalidWorkflowConnections1675940580449 implements IrreversibleMigration {
|
||||
async up({ queryRunner, tablePrefix, migrationName, logger }: MigrationContext) {
|
||||
const workflows = (await queryRunner.query(`
|
||||
SELECT id, nodes, connections
|
||||
FROM \`${tablePrefix}workflow_entity\`
|
||||
`)) as Array<{
|
||||
id: number;
|
||||
nodes: INode[] | string;
|
||||
connections: IConnections | string;
|
||||
}>;
|
||||
|
||||
const nodeTypes = Container.get(NodeTypes);
|
||||
|
||||
workflows.forEach(async (workflow) => {
|
||||
const connections =
|
||||
typeof workflow.connections === 'string'
|
||||
? jsonParse<IConnections>(workflow.connections)
|
||||
: workflow.connections;
|
||||
const nodes =
|
||||
typeof workflow.nodes === 'string' ? jsonParse<INode[]>(workflow.nodes) : workflow.nodes;
|
||||
|
||||
const nodesThatCannotReceiveInput: string[] = nodes.reduce((acc, node) => {
|
||||
try {
|
||||
const nodeType = nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
|
||||
if ((nodeType.description.inputs?.length ?? []) === 0) {
|
||||
acc.push(node.name);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Migration ${migrationName} failed with error: ${(error as Error).message}`);
|
||||
}
|
||||
return acc;
|
||||
}, [] as string[]);
|
||||
|
||||
Object.keys(connections).forEach((sourceNodeName) => {
|
||||
const connection = connections[sourceNodeName];
|
||||
const outputs = Object.keys(connection);
|
||||
|
||||
outputs.forEach((outputConnectionName /* Like `main` */) => {
|
||||
const outputConnection = connection[outputConnectionName];
|
||||
|
||||
// It filters out all connections that are connected to a node that cannot receive input
|
||||
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
|
||||
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
|
||||
(outgoingConnections) =>
|
||||
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Update database with new connections
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE \`${tablePrefix}workflow_entity\`
|
||||
SET connections = :connections
|
||||
WHERE id = '${workflow.id}'`,
|
||||
{ connections: JSON.stringify(connections) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -30,8 +30,8 @@ import { AddTriggerCountColumn1669823906994 } from './1669823906994-AddTriggerCo
|
|||
import { RemoveWorkflowDataLoadedFlag1671726148420 } from './1671726148420-RemoveWorkflowDataLoadedFlag';
|
||||
import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations';
|
||||
import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows';
|
||||
import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities';
|
||||
import { PurgeInvalidWorkflowConnections1675940580449 } from './1675940580449-PurgeInvalidWorkflowConnections';
|
||||
import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities';
|
||||
import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections';
|
||||
import { AddStatusToExecutions1674138566000 } from './1674138566000-AddStatusToExecutions';
|
||||
import { MigrateExecutionStatus1676996103000 } from './1676996103000-MigrateExecutionStatus';
|
||||
import { UpdateRunningExecutionStatus1677236788851 } from './1677236788851-UpdateRunningExecutionStatus';
|
||||
|
|
|
@ -1,56 +1,5 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames';
|
||||
|
||||
export class UniqueWorkflowNames1620824779533 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowNames = (await queryRunner.query(`
|
||||
SELECT name
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`)) as Array<{ name: string }>;
|
||||
|
||||
for (const { name } of workflowNames) {
|
||||
const [duplicatesQuery, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
SELECT id, name
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
WHERE name = :name
|
||||
ORDER BY "createdAt" ASC
|
||||
`,
|
||||
{ name },
|
||||
{},
|
||||
);
|
||||
|
||||
const duplicates = (await queryRunner.query(duplicatesQuery, parameters)) as Array<{
|
||||
id: number;
|
||||
name: string;
|
||||
}>;
|
||||
|
||||
if (duplicates.length > 1) {
|
||||
await Promise.all(
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
duplicates.map(async ({ id, name }, index: number) => {
|
||||
if (index === 0) return;
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE ${tablePrefix}workflow_entity
|
||||
SET name = :name
|
||||
WHERE id = '${id}'
|
||||
`,
|
||||
{ name: `${name} ${index + 1}` },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(updateQuery, updateParams);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}a252c527c4c89237221fe2c0ab" ON ${tablePrefix}workflow_entity ("name") `,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}a252c527c4c89237221fe2c0ab"`);
|
||||
}
|
||||
export class UniqueWorkflowNames1620824779533 extends UniqueWorkflowNames1620821879465 {
|
||||
indexSuffix = 'a252c527c4c89237221fe2c0ab';
|
||||
}
|
||||
|
|
|
@ -1,301 +1,3 @@
|
|||
/* eslint-disable @typescript-eslint/prefer-nullish-coalescing */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||
|
||||
export class UpdateWorkflowCredentials1630419189837 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const credentialsEntities = (await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM ${tablePrefix}credentials_entity
|
||||
`)) as Array<{ id: string; name: string; type: string }>;
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NULL AND finished = FALSE AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const credentialsEntities = (await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM ${tablePrefix}credentials_entity
|
||||
`)) as Array<{ id: string; name: string; type: string }>;
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NOT NULL AND finished = FALSE
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM ${tablePrefix}execution_entity
|
||||
WHERE "waitTill" IS NULL AND finished = FALSE AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = execution.workflowData;
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
// @ts-ignore
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.id === creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}execution_entity
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
export class UpdateWorkflowCredentials1630419189837 extends UpdateWorkflowCredentials1630330987096 {}
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import type { InsertResult, MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { loadSurveyFromDisk } from '@db/utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
async up({ queryRunner, tablePrefix, loadSurveyFromDisk }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE ${tablePrefix}role (
|
||||
"id" serial NOT NULL,
|
||||
|
|
|
@ -1,69 +1,3 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds';
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
|
||||
// add node ids in workflow objects
|
||||
|
||||
export class AddNodeIds1658932090381 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (!node.id) {
|
||||
node.id = uuid();
|
||||
}
|
||||
});
|
||||
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = workflow.nodes;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => delete node.id);
|
||||
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE ${tablePrefix}workflow_entity
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
export class AddNodeIds1658932090381 extends AddNodeIds1658930531669 {}
|
||||
|
|
|
@ -1,32 +1,3 @@
|
|||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
import { addJsonKeyToPinDataColumn } from '../sqlite/1659888469333-AddJsonKeyPinData';
|
||||
import { AddJsonKeyPinData1659888469333 } from '../common/1659888469333-AddJsonKeyPinData';
|
||||
|
||||
/**
|
||||
* Convert JSON-type `pinData` column in `workflow_entity` table from
|
||||
* `{ [nodeName: string]: IDataObject[] }` to `{ [nodeName: string]: INodeExecutionData[] }`
|
||||
*/
|
||||
export class AddJsonKeyPinData1659902242948 implements IrreversibleMigration {
|
||||
async up(context: MigrationContext) {
|
||||
const { queryRunner, tablePrefix } = context;
|
||||
const workflowTable = `${tablePrefix}workflow_entity`;
|
||||
|
||||
const PINDATA_SELECT_QUERY = `
|
||||
SELECT id, "pinData"
|
||||
FROM ${workflowTable}
|
||||
WHERE "pinData" IS NOT NULL;
|
||||
`;
|
||||
|
||||
const PINDATA_UPDATE_STATEMENT = `
|
||||
UPDATE ${workflowTable}
|
||||
SET "pinData" = :pinData
|
||||
WHERE id = :id;
|
||||
`;
|
||||
|
||||
await runInBatches(
|
||||
queryRunner,
|
||||
PINDATA_SELECT_QUERY,
|
||||
addJsonKeyToPinDataColumn(context, PINDATA_UPDATE_STATEMENT),
|
||||
);
|
||||
}
|
||||
}
|
||||
export class AddJsonKeyPinData1659902242948 extends AddJsonKeyPinData1659888469333 {}
|
||||
|
|
|
@ -1,31 +1,3 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { AddWorkflowVersionIdColumn1669739707124 } from '../common/1669739707124-AddWorkflowVersionIdColumn';
|
||||
|
||||
export class AddWorkflowVersionIdColumn1669739707126 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN "versionId" CHAR(36)`,
|
||||
);
|
||||
|
||||
const workflowIds = (await queryRunner.query(`
|
||||
SELECT id
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`)) as Array<{ id: number }>;
|
||||
|
||||
for (const { id } of workflowIds) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE ${tablePrefix}workflow_entity
|
||||
SET "versionId" = :versionId
|
||||
WHERE id = '${id}'`,
|
||||
{ versionId: uuidv4() },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN "versionId"`);
|
||||
}
|
||||
}
|
||||
export class AddWorkflowVersionIdColumn1669739707126 extends AddWorkflowVersionIdColumn1669739707124 {}
|
||||
|
|
|
@ -1,53 +1,3 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { StatisticsNames } from '@db/entities/WorkflowStatistics';
|
||||
import { RemoveWorkflowDataLoadedFlag1671726148419 } from '../common/1671726148419-RemoveWorkflowDataLoadedFlag';
|
||||
|
||||
export class RemoveWorkflowDataLoadedFlag1671726148421 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
// If any existing workflow has dataLoaded set to true, insert the relevant information to the statistics table
|
||||
const workflowIds = (await queryRunner.query(`
|
||||
SELECT id, "dataLoaded"
|
||||
FROM ${tablePrefix}workflow_entity
|
||||
`)) as Array<{ id: number; dataLoaded: boolean }>;
|
||||
|
||||
workflowIds.map(async ({ id, dataLoaded }) => {
|
||||
if (dataLoaded) {
|
||||
const [insertQuery, insertParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
INSERT INTO ${tablePrefix}workflow_statistics ("workflowId", name, count, "latestEvent") VALUES
|
||||
(:id, :name, 1, CURRENT_TIMESTAMP(3))
|
||||
`,
|
||||
{ id, name: StatisticsNames.dataLoaded },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(insertQuery, insertParams);
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
await queryRunner.query(`ALTER TABLE ${tablePrefix}workflow_entity DROP COLUMN "dataLoaded"`);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}workflow_entity ADD COLUMN "dataLoaded" BOOLEAN DEFAULT false`,
|
||||
);
|
||||
|
||||
// Search through statistics for any workflows that have the dataLoaded stat
|
||||
const workflowsIds = (await queryRunner.query(`
|
||||
SELECT "workflowId"
|
||||
FROM ${tablePrefix}workflow_statistics
|
||||
WHERE name = '${StatisticsNames.dataLoaded}'
|
||||
`)) as Array<{ workflowId: string }>;
|
||||
workflowsIds.map(async ({ workflowId }) => {
|
||||
return queryRunner.query(`
|
||||
UPDATE ${tablePrefix}workflow_entity
|
||||
SET "dataLoaded" = true
|
||||
WHERE id = '${workflowId}'`);
|
||||
});
|
||||
|
||||
await queryRunner.query(
|
||||
`DELETE FROM ${tablePrefix}workflow_statistics WHERE name = '${StatisticsNames.dataLoaded}'`,
|
||||
);
|
||||
}
|
||||
}
|
||||
export class RemoveWorkflowDataLoadedFlag1671726148421 extends RemoveWorkflowDataLoadedFlag1671726148419 {}
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { LDAP_DEFAULT_CONFIGURATION, LDAP_FEATURE_NAME } from '@/Ldap/constants';
|
||||
|
||||
export class CreateLdapEntities1674509946020 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}user" ADD COLUMN disabled BOOLEAN NOT NULL DEFAULT false;`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO ${tablePrefix}settings (key, value, "loadOnStartup")
|
||||
VALUES ('${LDAP_FEATURE_NAME}', '${JSON.stringify(LDAP_DEFAULT_CONFIGURATION)}', true)
|
||||
`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}auth_identity" (
|
||||
"userId" uuid REFERENCES "${tablePrefix}user" (id),
|
||||
"providerId" VARCHAR(64) NOT NULL,
|
||||
"providerType" VARCHAR(32) NOT NULL,
|
||||
"createdAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY("providerId", "providerType")
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}auth_provider_sync_history" (
|
||||
"id" serial NOT NULL PRIMARY KEY,
|
||||
"providerType" VARCHAR(32) NOT NULL,
|
||||
"runMode" TEXT NOT NULL,
|
||||
"status" TEXT NOT NULL,
|
||||
"startedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"endedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"scanned" INTEGER NOT NULL,
|
||||
"created" INTEGER NOT NULL,
|
||||
"updated" INTEGER NOT NULL,
|
||||
"disabled" INTEGER NOT NULL,
|
||||
"error" TEXT
|
||||
);`,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}auth_provider_sync_history"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}auth_identity"`);
|
||||
|
||||
await queryRunner.query(
|
||||
`DELETE FROM ${tablePrefix}settings WHERE key = '${LDAP_FEATURE_NAME}'`,
|
||||
);
|
||||
await queryRunner.query(`ALTER TABLE "${tablePrefix}user" DROP COLUMN disabled`);
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
import type { IConnections, INode } from 'n8n-workflow';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
export class PurgeInvalidWorkflowConnections1675940580449 implements IrreversibleMigration {
|
||||
async up({ queryRunner, tablePrefix, migrationName, logger }: MigrationContext) {
|
||||
const workflows = (await queryRunner.query(`
|
||||
SELECT id, nodes, connections
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`)) as Array<{ id: number; nodes: INode[]; connections: IConnections }>;
|
||||
|
||||
const nodeTypes = Container.get(NodeTypes);
|
||||
|
||||
workflows.forEach(async (workflow) => {
|
||||
const { connections, nodes } = workflow;
|
||||
|
||||
const nodesThatCannotReceiveInput: string[] = nodes.reduce((acc, node) => {
|
||||
try {
|
||||
const nodeType = nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
|
||||
if ((nodeType.description.inputs?.length ?? []) === 0) {
|
||||
acc.push(node.name);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Migration ${migrationName} failed with error: ${(error as Error).message}`);
|
||||
}
|
||||
return acc;
|
||||
}, [] as string[]);
|
||||
|
||||
Object.keys(connections).forEach((sourceNodeName) => {
|
||||
const connection = connections[sourceNodeName];
|
||||
const outputs = Object.keys(connection);
|
||||
|
||||
outputs.forEach((outputConnectionName /* Like `main` */) => {
|
||||
const outputConnection = connection[outputConnectionName];
|
||||
|
||||
// It filters out all connections that are connected to a node that cannot receive input
|
||||
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
|
||||
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
|
||||
(outgoingConnections) =>
|
||||
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Update database with new connections
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE "${tablePrefix}workflow_entity"
|
||||
SET connections = :connections
|
||||
WHERE id = '${workflow.id}'`,
|
||||
{ connections: JSON.stringify(connections) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -28,8 +28,8 @@ import { AddTriggerCountColumn1669823906995 } from './1669823906995-AddTriggerCo
|
|||
import { RemoveWorkflowDataLoadedFlag1671726148421 } from './1671726148421-RemoveWorkflowDataLoadedFlag';
|
||||
import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations';
|
||||
import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows';
|
||||
import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities';
|
||||
import { PurgeInvalidWorkflowConnections1675940580449 } from './1675940580449-PurgeInvalidWorkflowConnections';
|
||||
import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities';
|
||||
import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections';
|
||||
import { AddStatusToExecutions1674138566000 } from './1674138566000-AddStatusToExecutions';
|
||||
import { MigrateExecutionStatus1676996103000 } from './1676996103000-MigrateExecutionStatus';
|
||||
import { UpdateRunningExecutionStatus1677236854063 } from './1677236854063-UpdateRunningExecutionStatus';
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
|
||||
export class UniqueWorkflowNames1620821879465 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowNames = (await queryRunner.query(`
|
||||
SELECT name
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`)) as Array<{ name: string }>;
|
||||
|
||||
for (const { name } of workflowNames) {
|
||||
const [duplicatesQuery, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
SELECT id, name
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
WHERE name = :name
|
||||
ORDER BY createdAt ASC
|
||||
`,
|
||||
{ name },
|
||||
{},
|
||||
);
|
||||
|
||||
const duplicates = (await queryRunner.query(duplicatesQuery, parameters)) as Array<{
|
||||
id: number;
|
||||
name: string;
|
||||
}>;
|
||||
|
||||
if (duplicates.length > 1) {
|
||||
await Promise.all(
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
duplicates.map(async ({ id, name }, index: number) => {
|
||||
if (index === 0) return;
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET name = :name
|
||||
WHERE id = '${id}'
|
||||
`,
|
||||
{ name: `${name} ${index + 1}` },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(updateQuery, updateParams);
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE UNIQUE INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9" ON "${tablePrefix}workflow_entity" ("name") `,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(`DROP INDEX "IDX_${tablePrefix}943d8f922be094eb507cb9a7f9"`);
|
||||
}
|
||||
}
|
|
@ -1,309 +0,0 @@
|
|||
/* eslint-disable n8n-local-rules/no-uncaught-json-parse */
|
||||
|
||||
/* eslint-disable @typescript-eslint/prefer-nullish-coalescing */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
|
||||
// replacing the credentials in workflows and execution
|
||||
// `nodeType: name` changes to `nodeType: { id, name }`
|
||||
|
||||
export class UpdateWorkflowCredentials1630330987096 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const credentialsEntities = (await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM "${tablePrefix}credentials_entity"
|
||||
`)) as Array<{ id: string; name: string; type: string }>;
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = JSON.parse(workflow.nodes);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NOT NULL AND finished = 0
|
||||
`;
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NULL AND finished = 0 AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, name] of allNodeCredentials) {
|
||||
if (typeof name === 'string') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
(credentials) => credentials.name === name && credentials.type === type,
|
||||
);
|
||||
node.credentials[type] = { id: matchingCredentials?.id || null, name };
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const credentialsEntities = (await queryRunner.query(`
|
||||
SELECT id, name, type
|
||||
FROM "${tablePrefix}credentials_entity"
|
||||
`)) as Array<{ id: string; name: string; type: string }>;
|
||||
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
// @ts-ignore
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = JSON.parse(workflow.nodes);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
// double-equals because creds.id can be string or number
|
||||
// eslint-disable-next-line eqeqeq
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const waitingExecutionsQuery = `
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NOT NULL AND finished = 0
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, waitingExecutionsQuery, (waitingExecutions) => {
|
||||
// @ts-ignore
|
||||
waitingExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
// double-equals because creds.id can be string or number
|
||||
// eslint-disable-next-line eqeqeq
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] =
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const retryableExecutions = await queryRunner.query(`
|
||||
SELECT id, "workflowData"
|
||||
FROM "${tablePrefix}execution_entity"
|
||||
WHERE "waitTill" IS NULL AND finished = 0 AND mode != 'retry'
|
||||
ORDER BY "startedAt" DESC
|
||||
LIMIT 200
|
||||
`);
|
||||
|
||||
// @ts-ignore
|
||||
retryableExecutions.forEach(async (execution) => {
|
||||
const data = JSON.parse(execution.workflowData);
|
||||
let credentialsUpdated = false;
|
||||
// @ts-ignore
|
||||
data.nodes.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
const allNodeCredentials = Object.entries(node.credentials);
|
||||
for (const [type, creds] of allNodeCredentials) {
|
||||
if (typeof creds === 'object') {
|
||||
const matchingCredentials = credentialsEntities.find(
|
||||
// @ts-ignore
|
||||
// double-equals because creds.id can be string or number
|
||||
// eslint-disable-next-line eqeqeq
|
||||
(credentials) => credentials.id == creds.id && credentials.type === type,
|
||||
);
|
||||
if (matchingCredentials) {
|
||||
node.credentials[type] = matchingCredentials.name;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
node.credentials[type] = creds.name;
|
||||
}
|
||||
credentialsUpdated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
if (credentialsUpdated) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}execution_entity"
|
||||
SET "workflowData" = :data
|
||||
WHERE id = '${execution.id}'
|
||||
`,
|
||||
{ data: JSON.stringify(data) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,9 +1,8 @@
|
|||
import type { InsertResult, MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { loadSurveyFromDisk } from '@db/utils/migrationHelpers';
|
||||
|
||||
export class CreateUserManagement1646992772331 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
async up({ queryRunner, tablePrefix, loadSurveyFromDisk }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}role" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "name" varchar(32) NOT NULL, "scope" varchar NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), CONSTRAINT "UQ_${tablePrefix}5b49d0f504f7ef31045a1fb2eb8" UNIQUE ("scope", "name"))`,
|
||||
);
|
||||
|
|
|
@ -1,73 +0,0 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-call */
|
||||
/* eslint-disable n8n-local-rules/no-uncaught-json-parse */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
import type { INode } from 'n8n-workflow';
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { runInBatches } from '@db/utils/migrationHelpers';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
// add node ids in workflow objects
|
||||
|
||||
export class AddNodeIds1658930531669 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = JSON.parse(workflow.nodes);
|
||||
nodes.forEach((node: INode) => {
|
||||
if (!node.id) {
|
||||
node.id = uuid();
|
||||
}
|
||||
});
|
||||
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowsQuery = `
|
||||
SELECT id, nodes
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`;
|
||||
|
||||
// @ts-ignore
|
||||
await runInBatches(queryRunner, workflowsQuery, (workflows) => {
|
||||
workflows.forEach(async (workflow) => {
|
||||
const nodes = JSON.parse(workflow.nodes);
|
||||
// @ts-ignore
|
||||
nodes.forEach((node) => delete node.id);
|
||||
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET nodes = :nodes
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ nodes: JSON.stringify(nodes) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,115 +0,0 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
import type { IDataObject, INodeExecutionData } from 'n8n-workflow';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import { runInBatches, escapeQuery } from '@db/utils/migrationHelpers';
|
||||
|
||||
/**
|
||||
* Convert TEXT-type `pinData` column in `workflow_entity` table from
|
||||
* `{ [nodeName: string]: IDataObject[] }` to `{ [nodeName: string]: INodeExecutionData[] }`
|
||||
*/
|
||||
export class AddJsonKeyPinData1659888469333 implements IrreversibleMigration {
|
||||
async up(context: MigrationContext) {
|
||||
const { queryRunner, tablePrefix } = context;
|
||||
const workflowTable = `${tablePrefix}workflow_entity`;
|
||||
|
||||
const PINDATA_SELECT_QUERY = `
|
||||
SELECT id, pinData
|
||||
FROM "${workflowTable}"
|
||||
WHERE pinData IS NOT NULL;
|
||||
`;
|
||||
|
||||
const PINDATA_UPDATE_STATEMENT = `
|
||||
UPDATE "${workflowTable}"
|
||||
SET "pinData" = :pinData
|
||||
WHERE id = :id;
|
||||
`;
|
||||
|
||||
await runInBatches(
|
||||
queryRunner,
|
||||
PINDATA_SELECT_QUERY,
|
||||
addJsonKeyToPinDataColumn(context, PINDATA_UPDATE_STATEMENT),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
namespace PinData {
|
||||
export type Old = { [nodeName: string]: IDataObject[] };
|
||||
|
||||
export type New = { [nodeName: string]: INodeExecutionData[] };
|
||||
|
||||
export type FetchedWorkflow = { id: number; pinData: string | Old };
|
||||
}
|
||||
|
||||
function isObjectLiteral(maybeObject: unknown): maybeObject is { [key: string]: string } {
|
||||
return typeof maybeObject === 'object' && maybeObject !== null && !Array.isArray(maybeObject);
|
||||
}
|
||||
|
||||
function isJsonKeyObject(item: unknown): item is {
|
||||
json: unknown;
|
||||
[keys: string]: unknown;
|
||||
} {
|
||||
if (!isObjectLiteral(item)) return false;
|
||||
return Object.keys(item).includes('json');
|
||||
}
|
||||
|
||||
export const addJsonKeyToPinDataColumn =
|
||||
({ queryRunner }: MigrationContext, updateStatement: string) =>
|
||||
async (fetchedWorkflows: PinData.FetchedWorkflow[]) => {
|
||||
await Promise.all(
|
||||
makeUpdateParams(fetchedWorkflows).map(async (param) => {
|
||||
const params = {
|
||||
pinData: param.pinData,
|
||||
id: param.id,
|
||||
};
|
||||
|
||||
const [escapedStatement, escapedParams] = escapeQuery(queryRunner, updateStatement, params);
|
||||
return queryRunner.query(escapedStatement, escapedParams);
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
function makeUpdateParams(fetchedWorkflows: PinData.FetchedWorkflow[]) {
|
||||
return fetchedWorkflows.reduce<PinData.FetchedWorkflow[]>(
|
||||
(updateParams, { id, pinData: rawPinData }) => {
|
||||
let pinDataPerWorkflow: PinData.Old | PinData.New;
|
||||
|
||||
if (typeof rawPinData === 'string') {
|
||||
try {
|
||||
pinDataPerWorkflow = JSON.parse(rawPinData);
|
||||
} catch {
|
||||
pinDataPerWorkflow = {};
|
||||
}
|
||||
} else {
|
||||
pinDataPerWorkflow = rawPinData;
|
||||
}
|
||||
|
||||
const newPinDataPerWorkflow = Object.keys(pinDataPerWorkflow).reduce<PinData.New>(
|
||||
// eslint-disable-next-line @typescript-eslint/no-shadow
|
||||
(newPinDataPerWorkflow, nodeName) => {
|
||||
let pinDataPerNode = pinDataPerWorkflow[nodeName];
|
||||
|
||||
if (!Array.isArray(pinDataPerNode)) {
|
||||
pinDataPerNode = [pinDataPerNode];
|
||||
}
|
||||
|
||||
if (pinDataPerNode.every((item) => item.json)) return newPinDataPerWorkflow;
|
||||
|
||||
newPinDataPerWorkflow[nodeName] = pinDataPerNode.map((item) =>
|
||||
isJsonKeyObject(item) ? item : { json: item },
|
||||
);
|
||||
|
||||
return newPinDataPerWorkflow;
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
||||
if (Object.keys(newPinDataPerWorkflow).length > 0) {
|
||||
updateParams.push({ id, pinData: JSON.stringify(newPinDataPerWorkflow) });
|
||||
}
|
||||
|
||||
return updateParams;
|
||||
},
|
||||
[],
|
||||
);
|
||||
}
|
|
@ -1,33 +0,0 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
export class AddWorkflowVersionIdColumn1669739707124 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE \`${tablePrefix}workflow_entity\` ADD COLUMN "versionId" char(36)`,
|
||||
);
|
||||
|
||||
const workflowIds = (await queryRunner.query(`
|
||||
SELECT id
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`)) as Array<{ id: number }>;
|
||||
|
||||
for (const { id } of workflowIds) {
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`UPDATE "${tablePrefix}workflow_entity"
|
||||
SET versionId = :versionId
|
||||
WHERE id = '${id}'`,
|
||||
{ versionId: uuidv4() },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
}
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE \`${tablePrefix}workflow_entity\` DROP COLUMN "versionId"`,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,55 +0,0 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { StatisticsNames } from '@db/entities/WorkflowStatistics';
|
||||
|
||||
export class RemoveWorkflowDataLoadedFlag1671726148419 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
// If any existing workflow has dataLoaded set to true, insert the relevant information to the statistics table
|
||||
const workflowIds = (await queryRunner.query(`
|
||||
SELECT id, dataLoaded
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`)) as Array<{ id: number; dataLoaded: boolean }>;
|
||||
|
||||
workflowIds.map(async ({ id, dataLoaded }) => {
|
||||
if (dataLoaded) {
|
||||
const [insertQuery, insertParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
INSERT INTO "${tablePrefix}workflow_statistics" (workflowId, name, count, latestEvent) VALUES
|
||||
(:id, :name, 1, STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW'))
|
||||
`,
|
||||
{ id, name: StatisticsNames.dataLoaded },
|
||||
{},
|
||||
);
|
||||
|
||||
return queryRunner.query(insertQuery, insertParams);
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE \`${tablePrefix}workflow_entity\` DROP COLUMN "dataLoaded"`,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE \`${tablePrefix}workflow_entity\` ADD COLUMN "dataLoaded" BOOLEAN DEFAULT false`,
|
||||
);
|
||||
|
||||
// Search through statistics for any workflows that have the dataLoaded stat
|
||||
const workflowsIds = (await queryRunner.query(`
|
||||
SELECT workflowId
|
||||
FROM "${tablePrefix}workflow_statistics"
|
||||
WHERE name = '${StatisticsNames.dataLoaded}'
|
||||
`)) as Array<{ workflowId: string }>;
|
||||
workflowsIds.map(async ({ workflowId }) => {
|
||||
return queryRunner.query(`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET dataLoaded = true
|
||||
WHERE id = '${workflowId}'`);
|
||||
});
|
||||
|
||||
await queryRunner.query(
|
||||
`DELETE FROM "${tablePrefix}workflow_statistics" WHERE name = '${StatisticsNames.dataLoaded}'`,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { LDAP_DEFAULT_CONFIGURATION, LDAP_FEATURE_NAME } from '@/Ldap/constants';
|
||||
|
||||
export class CreateLdapEntities1674509946020 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE ${tablePrefix}user ADD COLUMN disabled BOOLEAN NOT NULL DEFAULT false;`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`
|
||||
INSERT INTO "${tablePrefix}settings" (key, value, loadOnStartup)
|
||||
VALUES ('${LDAP_FEATURE_NAME}', '${JSON.stringify(LDAP_DEFAULT_CONFIGURATION)}', true)
|
||||
`);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}auth_identity" (
|
||||
"userId" VARCHAR(36) REFERENCES "${tablePrefix}user" (id),
|
||||
"providerId" VARCHAR(64) NOT NULL,
|
||||
"providerType" VARCHAR(32) NOT NULL,
|
||||
"createdAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
PRIMARY KEY("providerId", "providerType")
|
||||
);`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE IF NOT EXISTS "${tablePrefix}auth_provider_sync_history" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"providerType" VARCHAR(32) NOT NULL,
|
||||
"runMode" TEXT NOT NULL,
|
||||
"status" TEXT NOT NULL,
|
||||
"startedAt" DATETIME NOT NULL,
|
||||
"endedAt" DATETIME NOT NULL,
|
||||
"scanned" INTEGER NOT NULL,
|
||||
"created" INTEGER NOT NULL,
|
||||
"updated" INTEGER NOT NULL,
|
||||
"disabled" INTEGER NOT NULL,
|
||||
"error" TEXT
|
||||
);`,
|
||||
);
|
||||
}
|
||||
|
||||
async down({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}auth_provider_sync_history"`);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}auth_identity"`);
|
||||
|
||||
await queryRunner.query(
|
||||
`DELETE FROM "${tablePrefix}settings" WHERE key = '${LDAP_FEATURE_NAME}'`,
|
||||
);
|
||||
await queryRunner.query(`ALTER TABLE "${tablePrefix}user" DROP COLUMN disabled`);
|
||||
}
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
import type { IConnections, INode } from 'n8n-workflow';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
export class PurgeInvalidWorkflowConnections1675940580449 implements IrreversibleMigration {
|
||||
async up({ queryRunner, tablePrefix, migrationName, logger }: MigrationContext) {
|
||||
const workflows = (await queryRunner.query(`
|
||||
SELECT id, nodes, connections
|
||||
FROM "${tablePrefix}workflow_entity"
|
||||
`)) as Array<{ id: number; nodes: string; connections: string }>;
|
||||
|
||||
const nodeTypes = Container.get(NodeTypes);
|
||||
|
||||
workflows.forEach(async (workflow) => {
|
||||
const connections = jsonParse<IConnections>(workflow.connections);
|
||||
const nodes = jsonParse<INode[]>(workflow.nodes);
|
||||
|
||||
const nodesThatCannotReceiveInput: string[] = nodes.reduce((acc, node) => {
|
||||
try {
|
||||
const nodeType = nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
|
||||
if ((nodeType.description.inputs?.length ?? []) === 0) {
|
||||
acc.push(node.name);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Migration ${migrationName} failed with error: ${(error as Error).message}`);
|
||||
}
|
||||
return acc;
|
||||
}, [] as string[]);
|
||||
|
||||
Object.keys(connections).forEach((sourceNodeName) => {
|
||||
const connection = connections[sourceNodeName];
|
||||
const outputs = Object.keys(connection);
|
||||
|
||||
outputs.forEach((outputConnectionName /* Like `main` */) => {
|
||||
const outputConnection = connection[outputConnectionName];
|
||||
|
||||
// It filters out all connections that are connected to a node that cannot receive input
|
||||
outputConnection.forEach((outputConnectionItem, outputConnectionItemIdx) => {
|
||||
outputConnection[outputConnectionItemIdx] = outputConnectionItem.filter(
|
||||
(outgoingConnections) =>
|
||||
!nodesThatCannotReceiveInput.includes(outgoingConnections.node),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Update database with new connections
|
||||
const [updateQuery, updateParams] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
`
|
||||
UPDATE "${tablePrefix}workflow_entity"
|
||||
SET connections = :connections
|
||||
WHERE id = '${workflow.id}'
|
||||
`,
|
||||
{ connections: JSON.stringify(connections) },
|
||||
{},
|
||||
);
|
||||
|
||||
await queryRunner.query(updateQuery, updateParams);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -3,7 +3,6 @@ import path from 'path';
|
|||
import { UserSettings } from 'n8n-core';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import config from '@/config';
|
||||
import { copyTable } from '@db/utils/migrationHelpers';
|
||||
|
||||
export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigration {
|
||||
transaction = false as const;
|
||||
|
@ -118,7 +117,7 @@ export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigr
|
|||
"data" text NOT NULL, "status" varchar,
|
||||
FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE
|
||||
);`);
|
||||
await copyTable({ tablePrefix, queryRunner }, 'execution_entity', 'TMP_execution_entity');
|
||||
await context.copyTable('execution_entity', 'TMP_execution_entity');
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`,
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
import { copyTable } from '@db/utils/migrationHelpers';
|
||||
|
||||
export class SeparateExecutionData1690000000010 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext): Promise<void> {
|
||||
async up(context: MigrationContext): Promise<void> {
|
||||
const { queryRunner, tablePrefix } = context;
|
||||
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "${tablePrefix}execution_data" (
|
||||
"executionId" int PRIMARY KEY NOT NULL,
|
||||
|
@ -12,8 +13,7 @@ export class SeparateExecutionData1690000000010 implements ReversibleMigration {
|
|||
)`,
|
||||
);
|
||||
|
||||
await copyTable(
|
||||
{ tablePrefix, queryRunner },
|
||||
await context.copyTable(
|
||||
'execution_entity',
|
||||
'execution_data',
|
||||
['id', 'workflowData', 'data'],
|
||||
|
|
|
@ -5,9 +5,9 @@ import { CreateIndexStoppedAt1594825041918 } from './1594825041918-CreateIndexSt
|
|||
import { AddWebhookId1611071044839 } from './1611071044839-AddWebhookId';
|
||||
import { MakeStoppedAtNullable1607431743769 } from './1607431743769-MakeStoppedAtNullable';
|
||||
import { CreateTagEntity1617213344594 } from './1617213344594-CreateTagEntity';
|
||||
import { UniqueWorkflowNames1620821879465 } from './1620821879465-UniqueWorkflowNames';
|
||||
import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames';
|
||||
import { AddWaitColumn1621707690587 } from './1621707690587-AddWaitColumn';
|
||||
import { UpdateWorkflowCredentials1630330987096 } from './1630330987096-UpdateWorkflowCredentials';
|
||||
import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials';
|
||||
import { AddExecutionEntityIndexes1644421939510 } from './1644421939510-AddExecutionEntityIndexes';
|
||||
import { CreateUserManagement1646992772331 } from './1646992772331-CreateUserManagement';
|
||||
import { LowerCaseUserEmail1648740597343 } from './1648740597343-LowerCaseUserEmail';
|
||||
|
@ -15,20 +15,20 @@ import { AddUserSettings1652367743993 } from './1652367743993-AddUserSettings';
|
|||
import { CommunityNodes1652254514001 } from './1652254514001-CommunityNodes';
|
||||
import { AddAPIKeyColumn1652905585850 } from './1652905585850-AddAPIKeyColumn';
|
||||
import { IntroducePinData1654089251344 } from './1654089251344-IntroducePinData';
|
||||
import { AddNodeIds1658930531669 } from './1658930531669-AddNodeIds';
|
||||
import { AddJsonKeyPinData1659888469333 } from './1659888469333-AddJsonKeyPinData';
|
||||
import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds';
|
||||
import { AddJsonKeyPinData1659888469333 } from '../common/1659888469333-AddJsonKeyPinData';
|
||||
import { CreateCredentialsUserRole1660062385367 } from './1660062385367-CreateCredentialsUserRole';
|
||||
import { CreateWorkflowsEditorRole1663755770892 } from './1663755770892-CreateWorkflowsUserRole';
|
||||
import { CreateCredentialUsageTable1665484192211 } from './1665484192211-CreateCredentialUsageTable';
|
||||
import { RemoveCredentialUsageTable1665754637024 } from './1665754637024-RemoveCredentialUsageTable';
|
||||
import { AddWorkflowVersionIdColumn1669739707124 } from './1669739707124-AddWorkflowVersionIdColumn';
|
||||
import { AddWorkflowVersionIdColumn1669739707124 } from '../common/1669739707124-AddWorkflowVersionIdColumn';
|
||||
import { WorkflowStatistics1664196174000 } from './1664196174000-WorkflowStatistics';
|
||||
import { AddTriggerCountColumn1669823906993 } from './1669823906993-AddTriggerCountColumn';
|
||||
import { RemoveWorkflowDataLoadedFlag1671726148419 } from './1671726148419-RemoveWorkflowDataLoadedFlag';
|
||||
import { RemoveWorkflowDataLoadedFlag1671726148419 } from '../common/1671726148419-RemoveWorkflowDataLoadedFlag';
|
||||
import { MessageEventBusDestinations1671535397530 } from './1671535397530-MessageEventBusDestinations';
|
||||
import { DeleteExecutionsWithWorkflows1673268682475 } from './1673268682475-DeleteExecutionsWithWorkflows';
|
||||
import { CreateLdapEntities1674509946020 } from './1674509946020-CreateLdapEntities';
|
||||
import { PurgeInvalidWorkflowConnections1675940580449 } from './1675940580449-PurgeInvalidWorkflowConnections';
|
||||
import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities';
|
||||
import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections';
|
||||
import { AddStatusToExecutions1674138566000 } from './1674138566000-AddStatusToExecutions';
|
||||
import { MigrateExecutionStatus1676996103000 } from './1676996103000-MigrateExecutionStatus';
|
||||
import { UpdateRunningExecutionStatus1677237073720 } from './1677237073720-UpdateRunningExecutionStatus';
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import type { Logger } from '@/Logger';
|
||||
import type { QueryRunner } from 'typeorm';
|
||||
import type { INodeTypes } from 'n8n-workflow';
|
||||
import type { QueryRunner, ObjectLiteral } from 'typeorm';
|
||||
|
||||
export type DatabaseType = 'mariadb' | 'postgresdb' | 'mysqldb' | 'sqlite';
|
||||
|
||||
|
@ -8,8 +9,35 @@ export interface MigrationContext {
|
|||
queryRunner: QueryRunner;
|
||||
tablePrefix: string;
|
||||
dbType: DatabaseType;
|
||||
isMysql: boolean;
|
||||
dbName: string;
|
||||
migrationName: string;
|
||||
nodeTypes: INodeTypes;
|
||||
loadSurveyFromDisk(): string | null;
|
||||
parseJson<T>(data: string | T): T;
|
||||
escape: {
|
||||
columnName(name: string): string;
|
||||
tableName(name: string): string;
|
||||
indexName(name: string): string;
|
||||
};
|
||||
runQuery<T>(
|
||||
sql: string,
|
||||
unsafeParameters?: ObjectLiteral,
|
||||
nativeParameters?: ObjectLiteral,
|
||||
): Promise<T>;
|
||||
runInBatches<T>(
|
||||
query: string,
|
||||
operation: (results: T[]) => Promise<void>,
|
||||
limit?: number,
|
||||
): Promise<void>;
|
||||
copyTable(fromTable: string, toTable: string): Promise<void>;
|
||||
copyTable(
|
||||
fromTable: string,
|
||||
toTable: string,
|
||||
fromFields?: string[],
|
||||
toFields?: string[],
|
||||
batchSize?: number,
|
||||
): Promise<void>;
|
||||
}
|
||||
|
||||
export type MigrationFn = (ctx: MigrationContext) => Promise<void>;
|
||||
|
|
|
@ -1,16 +1,20 @@
|
|||
import { Container } from 'typedi';
|
||||
import { readFileSync, rmSync } from 'fs';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import type { ObjectLiteral } from 'typeorm';
|
||||
import type { QueryRunner } from 'typeorm/query-runner/QueryRunner';
|
||||
import config from '@/config';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { inTest } from '@/constants';
|
||||
import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@db/types';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { jsonParse } from 'n8n-workflow';
|
||||
|
||||
const logger = getLogger();
|
||||
|
||||
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
|
||||
|
||||
export function loadSurveyFromDisk(): string | null {
|
||||
function loadSurveyFromDisk(): string | null {
|
||||
const userSettingsPath = UserSettings.getUserN8nFolderPath();
|
||||
try {
|
||||
const filename = `${userSettingsPath}/${PERSONALIZATION_SURVEY_FILENAME}`;
|
||||
|
@ -21,8 +25,7 @@ export function loadSurveyFromDisk(): string | null {
|
|||
if (!kvPairs.length) {
|
||||
throw new Error('personalizationSurvey is empty');
|
||||
} else {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const emptyKeys = kvPairs.reduce((acc, [_key, value]) => {
|
||||
const emptyKeys = kvPairs.reduce((acc, [, value]) => {
|
||||
if (!value || (Array.isArray(value) && !value.length)) {
|
||||
return acc + 1;
|
||||
}
|
||||
|
@ -79,113 +82,120 @@ const runDisablingForeignKeys = async (
|
|||
}
|
||||
};
|
||||
|
||||
export const wrapMigration = (migration: Migration) => {
|
||||
const dbType = config.getEnv('database.type');
|
||||
const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbType}.database`);
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
const migrationName = migration.name;
|
||||
const context: Omit<MigrationContext, 'queryRunner'> = {
|
||||
tablePrefix,
|
||||
dbType,
|
||||
dbName,
|
||||
migrationName,
|
||||
logger,
|
||||
};
|
||||
function parseJson<T>(data: string | T): T {
|
||||
return typeof data === 'string' ? jsonParse<T>(data) : data;
|
||||
}
|
||||
|
||||
const dbType = config.getEnv('database.type');
|
||||
const isMysql = ['mariadb', 'mysqldb'].includes(dbType);
|
||||
const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbType}.database`);
|
||||
const tablePrefix = config.getEnv('database.tablePrefix');
|
||||
|
||||
const createContext = (queryRunner: QueryRunner, migration: Migration): MigrationContext => ({
|
||||
logger,
|
||||
tablePrefix,
|
||||
dbType,
|
||||
isMysql,
|
||||
dbName,
|
||||
migrationName: migration.name,
|
||||
queryRunner,
|
||||
nodeTypes: Container.get(NodeTypes),
|
||||
loadSurveyFromDisk,
|
||||
parseJson,
|
||||
escape: {
|
||||
columnName: (name) => queryRunner.connection.driver.escape(name),
|
||||
tableName: (name) => queryRunner.connection.driver.escape(`${tablePrefix}${name}`),
|
||||
indexName: (name) => queryRunner.connection.driver.escape(`IDX_${tablePrefix}${name}`),
|
||||
},
|
||||
runQuery: async <T>(
|
||||
sql: string,
|
||||
unsafeParameters?: ObjectLiteral,
|
||||
safeParameters?: ObjectLiteral,
|
||||
) => {
|
||||
if (unsafeParameters) {
|
||||
const [query, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
sql,
|
||||
unsafeParameters,
|
||||
safeParameters ?? {},
|
||||
);
|
||||
return queryRunner.query(query, parameters) as Promise<T>;
|
||||
} else {
|
||||
return queryRunner.query(sql) as Promise<T>;
|
||||
}
|
||||
},
|
||||
runInBatches: async <T>(
|
||||
query: string,
|
||||
operation: (results: T[]) => Promise<void>,
|
||||
limit = 100,
|
||||
) => {
|
||||
let offset = 0;
|
||||
let batchedQuery: string;
|
||||
let batchedQueryResults: T[];
|
||||
|
||||
if (query.trim().endsWith(';')) query = query.trim().slice(0, -1);
|
||||
|
||||
do {
|
||||
batchedQuery = `${query} LIMIT ${limit} OFFSET ${offset}`;
|
||||
batchedQueryResults = (await queryRunner.query(batchedQuery)) as T[];
|
||||
// pass a copy to prevent errors from mutation
|
||||
await operation([...batchedQueryResults]);
|
||||
offset += limit;
|
||||
} while (batchedQueryResults.length === limit);
|
||||
},
|
||||
copyTable: async (
|
||||
fromTable: string,
|
||||
toTable: string,
|
||||
fromFields?: string[],
|
||||
toFields?: string[],
|
||||
batchSize?: number,
|
||||
) => {
|
||||
const { driver } = queryRunner.connection;
|
||||
fromTable = driver.escape(`${tablePrefix}${fromTable}`);
|
||||
toTable = driver.escape(`${tablePrefix}${toTable}`);
|
||||
const fromFieldsStr = fromFields?.length
|
||||
? fromFields.map((f) => driver.escape(f)).join(', ')
|
||||
: '*';
|
||||
const toFieldsStr = toFields?.length
|
||||
? `(${toFields.map((f) => driver.escape(f)).join(', ')})`
|
||||
: '';
|
||||
|
||||
const total = await queryRunner
|
||||
.query(`SELECT COUNT(*) AS count FROM ${fromTable}`)
|
||||
.then((rows: Array<{ count: number }>) => rows[0].count);
|
||||
|
||||
batchSize = batchSize ?? 10;
|
||||
let migrated = 0;
|
||||
while (migrated < total) {
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`,
|
||||
);
|
||||
migrated += batchSize;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export const wrapMigration = (migration: Migration) => {
|
||||
const { up, down } = migration.prototype;
|
||||
Object.assign(migration.prototype, {
|
||||
async up(this: BaseMigration, queryRunner: QueryRunner) {
|
||||
logMigrationStart(migrationName);
|
||||
logMigrationStart(migration.name);
|
||||
const context = createContext(queryRunner, migration);
|
||||
if (this.transaction === false) {
|
||||
await runDisablingForeignKeys(this, { queryRunner, ...context }, up);
|
||||
await runDisablingForeignKeys(this, context, up);
|
||||
} else {
|
||||
await up.call(this, { queryRunner, ...context });
|
||||
await up.call(this, context);
|
||||
}
|
||||
logMigrationEnd(migrationName);
|
||||
logMigrationEnd(migration.name);
|
||||
},
|
||||
async down(this: BaseMigration, queryRunner: QueryRunner) {
|
||||
if (down) {
|
||||
const context = createContext(queryRunner, migration);
|
||||
if (this.transaction === false) {
|
||||
await runDisablingForeignKeys(this, { queryRunner, ...context }, up);
|
||||
await runDisablingForeignKeys(this, context, up);
|
||||
} else {
|
||||
await down.call(this, { queryRunner, ...context });
|
||||
await down.call(this, context);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const copyTable = async (
|
||||
{ tablePrefix, queryRunner }: Pick<MigrationContext, 'queryRunner' | 'tablePrefix'>,
|
||||
fromTable: string,
|
||||
toTable: string,
|
||||
fromFields: string[] = [],
|
||||
toFields: string[] = [],
|
||||
batchSize = 10,
|
||||
) => {
|
||||
const driver = queryRunner.connection.driver;
|
||||
fromTable = driver.escape(`${tablePrefix}${fromTable}`);
|
||||
toTable = driver.escape(`${tablePrefix}${toTable}`);
|
||||
const fromFieldsStr = fromFields.length
|
||||
? fromFields.map((f) => driver.escape(f)).join(', ')
|
||||
: '*';
|
||||
const toFieldsStr = toFields.length
|
||||
? `(${toFields.map((f) => driver.escape(f)).join(', ')})`
|
||||
: '';
|
||||
|
||||
const total = await queryRunner
|
||||
.query(`SELECT COUNT(*) as count from ${fromTable}`)
|
||||
.then((rows: Array<{ count: number }>) => rows[0].count);
|
||||
|
||||
let migrated = 0;
|
||||
while (migrated < total) {
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`,
|
||||
);
|
||||
migrated += batchSize;
|
||||
}
|
||||
};
|
||||
|
||||
function batchQuery(query: string, limit: number, offset = 0): string {
|
||||
return `
|
||||
${query}
|
||||
LIMIT ${limit}
|
||||
OFFSET ${offset}
|
||||
`;
|
||||
}
|
||||
|
||||
export async function runInBatches(
|
||||
queryRunner: QueryRunner,
|
||||
query: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
operation: (results: any[]) => Promise<void>,
|
||||
limit = 100,
|
||||
): Promise<void> {
|
||||
let offset = 0;
|
||||
let batchedQuery: string;
|
||||
let batchedQueryResults: unknown[];
|
||||
|
||||
if (query.trim().endsWith(';')) query = query.trim().slice(0, -1);
|
||||
|
||||
do {
|
||||
batchedQuery = batchQuery(query, limit, offset);
|
||||
batchedQueryResults = (await queryRunner.query(batchedQuery)) as unknown[];
|
||||
// pass a copy to prevent errors from mutation
|
||||
await operation([...batchedQueryResults]);
|
||||
offset += limit;
|
||||
} while (batchedQueryResults.length === limit);
|
||||
}
|
||||
|
||||
export const escapeQuery = (
|
||||
queryRunner: QueryRunner,
|
||||
query: string,
|
||||
params: { [property: string]: unknown },
|
||||
): [string, unknown[]] =>
|
||||
queryRunner.connection.driver.escapeQueryWithParameters(
|
||||
query,
|
||||
{
|
||||
pinData: params.pinData,
|
||||
id: params.id,
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
|
Loading…
Reference in a new issue