mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-14 00:24:07 -08:00
0da338f9b5
* feat(editor): Usage and plan page (#4793) feat(editor): usage and plan page * feat(editor): Update Usage and plan page (#4842) * feat(editor): usage and plan store * feat(editor): usage and plan page updates * feat(editor): usage and plan add buttons and alert * tes(editor): usage and plan store * tes(editor): usage remove refresh button and add link to view plans * tes(editor): usage use info tip * tes(editor): usage info style * feat(editor): Get quotas data (#4866) feat(editor): get quotas data * feat(editor): In-app experience (#4875) * feat: Add license quotas endpoint * feat: Add trigger count to workflow activation process * refactor: Get quotas from db * feat: Add license information * ✨ - finalised GET /license endpoint * 🔨 - getActiveTriggerCount return 0 instead of null * 🐛 - ignore manualTrigger when counting active triggers * ✨ - add activation endpoint * ✨ - added renew endpoint * 🔨 - added return type interfaces * 🔨 - handle license errors where methods are called * 🔨 - rename function to match name from lib * feat(editor): usage add plans buttons logic * 🚨 - testing new License methods * feat(editor): usage add more business logic * chore(editor): code formatting * 🚨 - added license api tests * fix(editor): usage store * fix(editor): usage update translations * feat(editor): usage add license activation modal * feat(editor): usage change subscription app url * feat(editor): usage add contact us link * feat(editor): usage fix modal width * ✨ - Add renewal tracking metric * ✨ - add license data to pulse event * 🔨 - set default triggercount on entity model * ✨ - add db migrations for mysql and postgres * fix(editor): Usage api call data processing and error handling * fix(editor): Usage fix activation query key * 🚨 - add initDb to telemetry tests * 🔨 - move getlicensedata to licenseservice * 🔨 - return 403 instead of 404 to non owners * 🔨 - move owner checking to middleware * 🐛 - fixed incorrectly returned error from middleware * 🐛 - using mock instead of test db for pulse tests * fix(editor): Usage fix activation and add success messages * fix(editor): Usage should not renew activation right after activation * 🚨 - skipping failing pulse tests for now * fix(editor): Usage add telemetry calls and apply design review outcomes * feat(editor): Hide usage page according to BE flag * feat(editor): Usage modify key activation flow * feat(editor): Usage change subscription app url * feat(editor): Usage add telemetry for manage plan * feat(editor): Usage extend link url query params * feat(editor): Usage add line chart if there is a workflow limit * feat(editor): Usage remove query after key activation redirection * fix(editor): Usage handle limit exceeded workflow chart, add focus to input when modal opened * fix(editor): Usage activation can return router promise when removing query * fix(editor): Usage and plan design review * 🐛 - fix renew endpoint hanging issue * 🐛 - fix license activation bug * fix(editor): Usage proper translation for plans and/or editions * fix(editor): Usage apply David's review results * fix(editor): Usage page set as default and first under Settings * fix(editor): Usage open subscription app in new tab * fix(editor): Usage page having key query param a plan links * test: Fix broken test * fix(editor): Usage page address review * 🧪 Flush promises on telemetry tests * ⚡ Extract helper with `setImmediate` * 🔥 Remove leftovers * ⚡ Use Adi's helper * refactor: Comment broken tests * refactor: add Tenant id to settings * feat: add environment to license endpoints * refactor: Move license environment to general settings * fix: fix routing bug * fix(editor): Usage page some code review changes and formatting * fix(editor): Usage page remove direct usage of reusable translation keys * fix(editor): Usage page async await instead of then * fix(editor): Usage page show some content only if network requests in component mounted were successful * chore(editor): code formatting * fix(editor): Usage checking license environment * feat(editor): Improve license activation error messages (no-changelog) (#4958) * fix(editor): Usage changing activation error title * remove unnecessary import * fix(editor): Usage refactor notification showing * fix(editor): Usage using notification directly in store actions Co-authored-by: Omar Ajoue <krynble@gmail.com> Co-authored-by: freyamade <freya@n8n.io> Co-authored-by: Iván Ovejero <ivov.src@gmail.com> Co-authored-by: Mutasem <mutdmour@gmail.com> Co-authored-by: Cornelius Suermann <cornelius@n8n.io> * fix(editor): Usage change mounted lifecycle logic * fix(editor): Usage return after successful activation in mounted * fix: remove console log * test: fix tests related to settings (#4979) Co-authored-by: Omar Ajoue <krynble@gmail.com> Co-authored-by: freyamade <freya@n8n.io> Co-authored-by: Iván Ovejero <ivov.src@gmail.com> Co-authored-by: Mutasem <mutdmour@gmail.com> Co-authored-by: Cornelius Suermann <cornelius@n8n.io> Co-authored-by: Mutasem Aldmour <4711238+mutdmour@users.noreply.github.com>
234 lines
6.5 KiB
TypeScript
234 lines
6.5 KiB
TypeScript
import config from '@/config';
|
|
import { InternalHooksManager } from '../../src';
|
|
import { nodeFetchedData, workflowExecutionCompleted } from '../../src/events/WorkflowStatistics';
|
|
import { LoggerProxy, WorkflowExecuteMode } from 'n8n-workflow';
|
|
import { getLogger } from '@/Logger';
|
|
|
|
const FAKE_USER_ID = 'abcde-fghij';
|
|
|
|
const mockedFirstProductionWorkflowSuccess = jest.fn((...args) => {});
|
|
const mockedFirstWorkflowDataLoad = jest.fn((...args) => {});
|
|
|
|
jest.spyOn(InternalHooksManager, 'getInstance').mockImplementation((...args) => {
|
|
const actual = jest.requireActual('../../src/InternalHooks');
|
|
return {
|
|
...actual,
|
|
onFirstProductionWorkflowSuccess: mockedFirstProductionWorkflowSuccess,
|
|
onFirstWorkflowDataLoad: mockedFirstWorkflowDataLoad,
|
|
};
|
|
});
|
|
jest.mock('../../src/Db', () => {
|
|
return {
|
|
collections: {
|
|
Workflow: {
|
|
update: jest.fn(({ id, dataLoaded }, updateArgs) => {
|
|
if (id === 1) return { affected: 1 };
|
|
return { affected: 0 };
|
|
}),
|
|
},
|
|
WorkflowStatistics: {
|
|
insert: jest.fn(({ count, name, workflowId }) => {
|
|
if (workflowId === -1) throw new Error('test error');
|
|
return null;
|
|
}),
|
|
update: jest.fn((...args) => {}),
|
|
},
|
|
},
|
|
};
|
|
});
|
|
jest.mock('../../src/UserManagement/UserManagementHelper', () => {
|
|
return {
|
|
getWorkflowOwner: jest.fn((workflowId) => {
|
|
return { id: FAKE_USER_ID };
|
|
}),
|
|
};
|
|
});
|
|
|
|
describe('Events', () => {
|
|
beforeAll(() => {
|
|
config.set('diagnostics.enabled', true);
|
|
config.set('deployment.type', 'n8n-testing');
|
|
LoggerProxy.init(getLogger());
|
|
});
|
|
|
|
afterAll(() => {
|
|
jest.clearAllTimers();
|
|
jest.useRealTimers();
|
|
});
|
|
|
|
beforeEach(() => {
|
|
mockedFirstProductionWorkflowSuccess.mockClear();
|
|
mockedFirstWorkflowDataLoad.mockClear();
|
|
});
|
|
|
|
afterEach(() => {});
|
|
|
|
describe('workflowExecutionCompleted', () => {
|
|
test('should fail with an invalid workflowId', async () => {
|
|
const workflow = {
|
|
id: 'abcde',
|
|
name: '',
|
|
active: false,
|
|
createdAt: new Date(),
|
|
updatedAt: new Date(),
|
|
nodes: [],
|
|
connections: {},
|
|
};
|
|
const runData = {
|
|
finished: true,
|
|
data: { resultData: { runData: {} } },
|
|
mode: 'internal' as WorkflowExecuteMode,
|
|
startedAt: new Date(),
|
|
};
|
|
await workflowExecutionCompleted(workflow, runData);
|
|
});
|
|
|
|
test('should create metrics for production successes', async () => {
|
|
// Call the function with a production success result, ensure metrics hook gets called
|
|
const workflow = {
|
|
id: '1',
|
|
name: '',
|
|
active: false,
|
|
createdAt: new Date(),
|
|
updatedAt: new Date(),
|
|
nodes: [],
|
|
connections: {},
|
|
};
|
|
const runData = {
|
|
finished: true,
|
|
data: { resultData: { runData: {} } },
|
|
mode: 'internal' as WorkflowExecuteMode,
|
|
startedAt: new Date(),
|
|
};
|
|
await workflowExecutionCompleted(workflow, runData);
|
|
expect(mockedFirstProductionWorkflowSuccess).toBeCalledTimes(1);
|
|
expect(mockedFirstProductionWorkflowSuccess).toHaveBeenNthCalledWith(1, {
|
|
user_id: FAKE_USER_ID,
|
|
workflow_id: parseInt(workflow.id, 10),
|
|
});
|
|
});
|
|
|
|
test('should only create metrics for production successes', async () => {
|
|
// Call the function with a non production success result, ensure metrics hook is never called
|
|
const workflow = {
|
|
id: '1',
|
|
name: '',
|
|
active: false,
|
|
createdAt: new Date(),
|
|
updatedAt: new Date(),
|
|
nodes: [],
|
|
connections: {},
|
|
};
|
|
const runData = {
|
|
finished: false,
|
|
data: { resultData: { runData: {} } },
|
|
mode: 'internal' as WorkflowExecuteMode,
|
|
startedAt: new Date(),
|
|
};
|
|
await workflowExecutionCompleted(workflow, runData);
|
|
expect(mockedFirstProductionWorkflowSuccess).toBeCalledTimes(0);
|
|
});
|
|
|
|
test('should not send metrics for updated entries', async () => {
|
|
// Call the function with the id that causes insert to fail, ensure update is called *and* metrics aren't sent
|
|
const workflow = {
|
|
id: '-1',
|
|
name: '',
|
|
active: false,
|
|
createdAt: new Date(),
|
|
updatedAt: new Date(),
|
|
nodes: [],
|
|
connections: {},
|
|
};
|
|
const runData = {
|
|
finished: true,
|
|
data: { resultData: { runData: {} } },
|
|
mode: 'internal' as WorkflowExecuteMode,
|
|
startedAt: new Date(),
|
|
};
|
|
await workflowExecutionCompleted(workflow, runData);
|
|
expect(mockedFirstProductionWorkflowSuccess).toBeCalledTimes(0);
|
|
});
|
|
});
|
|
|
|
describe('nodeFetchedData', () => {
|
|
test('should fail with an invalid workflowId', async () => {
|
|
const workflowId = 'abcde';
|
|
const node = {
|
|
id: 'abcde',
|
|
name: 'test node',
|
|
typeVersion: 1,
|
|
type: '',
|
|
position: [0, 0] as [number, number],
|
|
parameters: {},
|
|
};
|
|
await nodeFetchedData(workflowId, node);
|
|
});
|
|
|
|
test('should create metrics when the db is updated', async () => {
|
|
// Call the function with a production success result, ensure metrics hook gets called
|
|
const workflowId = '1';
|
|
const node = {
|
|
id: 'abcde',
|
|
name: 'test node',
|
|
typeVersion: 1,
|
|
type: '',
|
|
position: [0, 0] as [number, number],
|
|
parameters: {},
|
|
};
|
|
await nodeFetchedData(workflowId, node);
|
|
expect(mockedFirstWorkflowDataLoad).toBeCalledTimes(1);
|
|
expect(mockedFirstWorkflowDataLoad).toHaveBeenNthCalledWith(1, {
|
|
user_id: FAKE_USER_ID,
|
|
workflow_id: parseInt(workflowId, 10),
|
|
node_type: node.type,
|
|
node_id: node.id,
|
|
});
|
|
});
|
|
|
|
test('should create metrics with credentials when the db is updated', async () => {
|
|
// Call the function with a production success result, ensure metrics hook gets called
|
|
const workflowId = '1';
|
|
const node = {
|
|
id: 'abcde',
|
|
name: 'test node',
|
|
typeVersion: 1,
|
|
type: '',
|
|
position: [0, 0] as [number, number],
|
|
parameters: {},
|
|
credentials: {
|
|
testCredentials: {
|
|
id: '1',
|
|
name: 'Test Credentials',
|
|
},
|
|
},
|
|
};
|
|
await nodeFetchedData(workflowId, node);
|
|
expect(mockedFirstWorkflowDataLoad).toBeCalledTimes(1);
|
|
expect(mockedFirstWorkflowDataLoad).toHaveBeenNthCalledWith(1, {
|
|
user_id: FAKE_USER_ID,
|
|
workflow_id: parseInt(workflowId, 10),
|
|
node_type: node.type,
|
|
node_id: node.id,
|
|
credential_type: 'testCredentials',
|
|
credential_id: node.credentials.testCredentials.id,
|
|
});
|
|
});
|
|
|
|
test('should not send metrics for entries that already have the flag set', async () => {
|
|
// Fetch data for workflow 2 which is set up to not be altered in the mocks
|
|
const workflowId = '2';
|
|
const node = {
|
|
id: 'abcde',
|
|
name: 'test node',
|
|
typeVersion: 1,
|
|
type: '',
|
|
position: [0, 0] as [number, number],
|
|
parameters: {},
|
|
};
|
|
await nodeFetchedData(workflowId, node);
|
|
expect(mockedFirstWorkflowDataLoad).toBeCalledTimes(0);
|
|
});
|
|
});
|
|
});
|