fix(core): Make WorkflowStatistics tests pass on all databases (no-changelog) (#5909)

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™ 2023-04-05 14:51:43 +02:00 committed by GitHub
parent d08c885734
commit e7aaa9425a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 46 additions and 26 deletions

View file

@ -19,21 +19,21 @@ async function upsertWorkflowStatistics(
workflowId: string, workflowId: string,
): Promise<StatisticsUpsertResult> { ): Promise<StatisticsUpsertResult> {
const dbType = config.getEnv('database.type'); const dbType = config.getEnv('database.type');
const tablePrefix = config.getEnv('database.tablePrefix'); const { tableName } = Db.collections.WorkflowStatistics.metadata;
try { try {
if (dbType === 'sqlite') { if (dbType === 'sqlite') {
await Db.collections.WorkflowStatistics await Db.collections.WorkflowStatistics.query(
.query(`INSERT INTO "${tablePrefix}workflow_statistics" ("count", "name", "workflowId", "latestEvent") `INSERT INTO "${tableName}" ("count", "name", "workflowId", "latestEvent")
VALUES (1, "${eventName}", "${workflowId}", CURRENT_TIMESTAMP) VALUES (1, "${eventName}", "${workflowId}", CURRENT_TIMESTAMP)
ON CONFLICT (workflowId, name) DO UPDATE SET ON CONFLICT (workflowId, name)
count = count + 1, DO UPDATE SET count = count + 1, latestEvent = CURRENT_TIMESTAMP`,
latestEvent = CURRENT_TIMESTAMP returning count );
`);
// SQLite does not offer a reliable way to know whether or not an insert or update happened. // SQLite does not offer a reliable way to know whether or not an insert or update happened.
// We'll use a naive approach in this case. Query again after and it might cause us to miss the // We'll use a naive approach in this case. Query again after and it might cause us to miss the
// first production execution sometimes due to concurrency, but it's the only way. // first production execution sometimes due to concurrency, but it's the only way.
const counter = await Db.collections.WorkflowStatistics.findOne({ const counter = await Db.collections.WorkflowStatistics.findOne({
select: ['count'],
where: { where: {
name: eventName, name: eventName,
workflowId, workflowId,
@ -45,10 +45,13 @@ async function upsertWorkflowStatistics(
} }
return StatisticsUpsertResult.update; return StatisticsUpsertResult.update;
} else if (dbType === 'postgresdb') { } else if (dbType === 'postgresdb') {
const queryResult = (await Db.collections.WorkflowStatistics const queryResult = (await Db.collections.WorkflowStatistics.query(
.query(`insert into "${tablePrefix}workflow_statistics" ("count", "name", "workflowId", "latestEvent") `INSERT INTO "${tableName}" ("count", "name", "workflowId", "latestEvent")
values (1, '${eventName}', '${workflowId}', CURRENT_TIMESTAMP) on conflict ("name", "workflowId") VALUES (1, '${eventName}', '${workflowId}', CURRENT_TIMESTAMP)
do update set "count" = "${tablePrefix}workflow_statistics"."count" + 1, "latestEvent" = CURRENT_TIMESTAMP returning *;`)) as Array<{ ON CONFLICT ("name", "workflowId")
DO UPDATE SET "count" = "${tableName}"."count" + 1, "latestEvent" = CURRENT_TIMESTAMP
RETURNING *;`,
)) as Array<{
count: number; count: number;
}>; }>;
if (queryResult[0].count === 1) { if (queryResult[0].count === 1) {
@ -56,12 +59,12 @@ async function upsertWorkflowStatistics(
} }
return StatisticsUpsertResult.update; return StatisticsUpsertResult.update;
} else { } else {
const queryResult = (await Db.collections.WorkflowStatistics const queryResult = (await Db.collections.WorkflowStatistics.query(
.query(`insert into \`${tablePrefix}workflow_statistics\` (count, `INSERT INTO \`${tableName}\` (count, name, workflowId, latestEvent)
latestEvent, VALUES (1, "${eventName}", "${workflowId}", NOW())
name, ON DUPLICATE KEY
workflowId) UPDATE count = count + 1, latestEvent = NOW();`,
values (1, NOW(), "${eventName}", "${workflowId}") ON DUPLICATE KEY UPDATE count = count + 1, latestEvent = NOW();`)) as { )) as {
affectedRows: number; affectedRows: number;
}; };
if (queryResult.affectedRows === 1) { if (queryResult.affectedRows === 1) {
@ -71,7 +74,8 @@ async function upsertWorkflowStatistics(
return StatisticsUpsertResult.update; return StatisticsUpsertResult.update;
} }
} catch (error) { } catch (error) {
return StatisticsUpsertResult.failed; if (error instanceof QueryFailedError) return StatisticsUpsertResult.failed;
throw error;
} }
} }

View file

@ -18,15 +18,14 @@ jest.mock('@/Db', () => {
return { return {
collections: { collections: {
WorkflowStatistics: mock<WorkflowStatisticsRepository>({ WorkflowStatistics: mock<WorkflowStatisticsRepository>({
findOne: jest.fn(() => ({ metadata: { tableName: 'workflow_statistics' },
count: 1,
})),
}), }),
}, },
}; };
}); });
describe('Events', () => { describe('Events', () => {
const dbType = config.getEnv('database.type');
const fakeUser = Object.assign(new User(), { id: 'abcde-fghij' }); const fakeUser = Object.assign(new User(), { id: 'abcde-fghij' });
const internalHooks = mockInstance(InternalHooks); const internalHooks = mockInstance(InternalHooks);
@ -48,11 +47,28 @@ describe('Events', () => {
}); });
beforeEach(() => { beforeEach(() => {
if (dbType === 'sqlite') {
workflowStatisticsRepository.findOne.mockClear();
} else {
workflowStatisticsRepository.query.mockClear();
}
internalHooks.onFirstProductionWorkflowSuccess.mockClear(); internalHooks.onFirstProductionWorkflowSuccess.mockClear();
internalHooks.onFirstWorkflowDataLoad.mockClear(); internalHooks.onFirstWorkflowDataLoad.mockClear();
}); });
afterEach(() => {}); const mockDBCall = (count = 1) => {
if (dbType === 'sqlite') {
workflowStatisticsRepository.findOne.mockResolvedValueOnce(
mock<WorkflowStatistics>({ count }),
);
} else {
const result = dbType === 'postgresdb' ? [{ count }] : { affectedRows: count };
workflowStatisticsRepository.query.mockImplementationOnce(async (query) =>
query.startsWith('INSERT INTO') ? result : null,
);
}
};
describe('workflowExecutionCompleted', () => { describe('workflowExecutionCompleted', () => {
test('should create metrics for production successes', async () => { test('should create metrics for production successes', async () => {
@ -73,6 +89,8 @@ describe('Events', () => {
mode: 'internal' as WorkflowExecuteMode, mode: 'internal' as WorkflowExecuteMode,
startedAt: new Date(), startedAt: new Date(),
}; };
mockDBCall();
await workflowExecutionCompleted(workflow, runData); await workflowExecutionCompleted(workflow, runData);
expect(internalHooks.onFirstProductionWorkflowSuccess).toBeCalledTimes(1); expect(internalHooks.onFirstProductionWorkflowSuccess).toBeCalledTimes(1);
expect(internalHooks.onFirstProductionWorkflowSuccess).toHaveBeenNthCalledWith(1, { expect(internalHooks.onFirstProductionWorkflowSuccess).toHaveBeenNthCalledWith(1, {
@ -105,9 +123,6 @@ describe('Events', () => {
test('should not send metrics for updated entries', async () => { test('should not send metrics for updated entries', async () => {
// Call the function with a fail insert, ensure update is called *and* metrics aren't sent // Call the function with a fail insert, ensure update is called *and* metrics aren't sent
workflowStatisticsRepository.findOne.mockImplementationOnce(() => ({
count: 2,
}));
const workflow = { const workflow = {
id: '1', id: '1',
name: '', name: '',
@ -124,6 +139,7 @@ describe('Events', () => {
mode: 'internal' as WorkflowExecuteMode, mode: 'internal' as WorkflowExecuteMode,
startedAt: new Date(), startedAt: new Date(),
}; };
mockDBCall(2);
await workflowExecutionCompleted(workflow, runData); await workflowExecutionCompleted(workflow, runData);
expect(internalHooks.onFirstProductionWorkflowSuccess).toBeCalledTimes(0); expect(internalHooks.onFirstProductionWorkflowSuccess).toBeCalledTimes(0);
}); });