mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-14 16:44:07 -08:00
test: add tests
This commit is contained in:
parent
020d860360
commit
8d5ec7d1db
|
@ -25,6 +25,7 @@
|
||||||
"start:default": "cd bin && ./n8n",
|
"start:default": "cd bin && ./n8n",
|
||||||
"start:windows": "cd bin && n8n",
|
"start:windows": "cd bin && n8n",
|
||||||
"test": "pnpm test:sqlite",
|
"test": "pnpm test:sqlite",
|
||||||
|
"test:dev": "N8N_LOG_LEVEL=silent DB_TYPE=sqlite jest --watch",
|
||||||
"test:sqlite": "N8N_LOG_LEVEL=silent DB_TYPE=sqlite jest",
|
"test:sqlite": "N8N_LOG_LEVEL=silent DB_TYPE=sqlite jest",
|
||||||
"test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --no-coverage",
|
"test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --no-coverage",
|
||||||
"test:mysql": "N8N_LOG_LEVEL=silent DB_TYPE=mysqldb DB_TABLE_PREFIX=test_ jest --no-coverage",
|
"test:mysql": "N8N_LOG_LEVEL=silent DB_TYPE=mysqldb DB_TABLE_PREFIX=test_ jest --no-coverage",
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
import { mock } from 'jest-mock-extended';
|
import { mock } from 'jest-mock-extended';
|
||||||
import type {
|
import type { IWorkflowBase } from 'n8n-workflow';
|
||||||
IExecuteWorkflowInfo,
|
import {
|
||||||
IWorkflowExecuteAdditionalData,
|
type IExecuteWorkflowInfo,
|
||||||
ExecuteWorkflowOptions,
|
type IWorkflowExecuteAdditionalData,
|
||||||
IRun,
|
type ExecuteWorkflowOptions,
|
||||||
|
type IRun,
|
||||||
|
type INodeExecutionData,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import type PCancelable from 'p-cancelable';
|
import type PCancelable from 'p-cancelable';
|
||||||
import Container from 'typedi';
|
import Container from 'typedi';
|
||||||
|
@ -21,43 +23,59 @@ import { WorkflowStatisticsService } from '@/services/workflow-statistics.servic
|
||||||
import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service';
|
import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service';
|
||||||
import { Telemetry } from '@/telemetry';
|
import { Telemetry } from '@/telemetry';
|
||||||
import { PermissionChecker } from '@/user-management/permission-checker';
|
import { PermissionChecker } from '@/user-management/permission-checker';
|
||||||
import { executeWorkflow, getBase } from '@/workflow-execute-additional-data';
|
import { executeWorkflow, getBase, getRunData } from '@/workflow-execute-additional-data';
|
||||||
import { mockInstance } from '@test/mocking';
|
import { mockInstance } from '@test/mocking';
|
||||||
|
|
||||||
const run = mock<IRun>({
|
const EXECUTION_ID = '123';
|
||||||
data: { resultData: {} },
|
const LAST_NODE_EXECUTED = 'Last node executed';
|
||||||
finished: true,
|
|
||||||
mode: 'manual',
|
|
||||||
startedAt: new Date(),
|
|
||||||
status: 'new',
|
|
||||||
});
|
|
||||||
|
|
||||||
const cancelablePromise = mock<PCancelable<IRun>>({
|
const getMockRun = ({ lastNodeOutput }: { lastNodeOutput: Array<INodeExecutionData[] | null> }) =>
|
||||||
then: jest
|
mock<IRun>({
|
||||||
.fn()
|
data: {
|
||||||
.mockImplementation(async (onfulfilled) => await Promise.resolve(run).then(onfulfilled)),
|
resultData: {
|
||||||
catch: jest
|
runData: {
|
||||||
.fn()
|
[LAST_NODE_EXECUTED]: [
|
||||||
.mockImplementation(async (onrejected) => await Promise.resolve(run).catch(onrejected)),
|
{
|
||||||
finally: jest
|
startTime: 100,
|
||||||
.fn()
|
data: {
|
||||||
.mockImplementation(async (onfinally) => await Promise.resolve(run).finally(onfinally)),
|
main: lastNodeOutput,
|
||||||
[Symbol.toStringTag]: 'PCancelable',
|
},
|
||||||
});
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
lastNodeExecuted: LAST_NODE_EXECUTED,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
finished: true,
|
||||||
|
mode: 'manual',
|
||||||
|
startedAt: new Date(),
|
||||||
|
status: 'new',
|
||||||
|
});
|
||||||
|
|
||||||
|
const getCancelablePromise = async (run: IRun) =>
|
||||||
|
await mock<PCancelable<IRun>>({
|
||||||
|
then: jest
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(async (onfulfilled) => await Promise.resolve(run).then(onfulfilled)),
|
||||||
|
catch: jest
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(async (onrejected) => await Promise.resolve(run).catch(onrejected)),
|
||||||
|
finally: jest
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(async (onfinally) => await Promise.resolve(run).finally(onfinally)),
|
||||||
|
[Symbol.toStringTag]: 'PCancelable',
|
||||||
|
});
|
||||||
|
|
||||||
|
const processRunExecutionData = jest.fn();
|
||||||
|
|
||||||
jest.mock('n8n-core', () => ({
|
jest.mock('n8n-core', () => ({
|
||||||
__esModule: true,
|
__esModule: true,
|
||||||
...jest.requireActual('n8n-core'),
|
...jest.requireActual('n8n-core'),
|
||||||
WorkflowExecute: jest.fn().mockImplementation(() => ({
|
WorkflowExecute: jest.fn().mockImplementation(() => ({
|
||||||
processRunExecutionData: jest.fn().mockReturnValue(cancelablePromise),
|
processRunExecutionData,
|
||||||
})),
|
})),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.mock('../workflow-helpers', () => ({
|
|
||||||
...jest.requireActual('../workflow-helpers'),
|
|
||||||
getDataLastExecutedNodeData: jest.fn().mockReturnValue({ data: { main: [] } }),
|
|
||||||
}));
|
|
||||||
|
|
||||||
describe('WorkflowExecuteAdditionalData', () => {
|
describe('WorkflowExecuteAdditionalData', () => {
|
||||||
const variablesService = mockInstance(VariablesService);
|
const variablesService = mockInstance(VariablesService);
|
||||||
variablesService.getAllCached.mockResolvedValue([]);
|
variablesService.getAllCached.mockResolvedValue([]);
|
||||||
|
@ -95,17 +113,129 @@ describe('WorkflowExecuteAdditionalData', () => {
|
||||||
expect(eventService.emit).toHaveBeenCalledWith(eventName, payload);
|
expect(eventService.emit).toHaveBeenCalledWith(eventName, payload);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('`executeWorkflow` should set subworkflow execution as running', async () => {
|
describe('executeWorkflow', () => {
|
||||||
const executionId = '123';
|
const runWithData = getMockRun({ lastNodeOutput: [[{ json: { test: 1 } }]] });
|
||||||
workflowRepository.get.mockResolvedValue(mock<WorkflowEntity>({ id: executionId, nodes: [] }));
|
|
||||||
activeExecutions.add.mockResolvedValue(executionId);
|
|
||||||
|
|
||||||
await executeWorkflow(
|
beforeEach(() => {
|
||||||
mock<IExecuteWorkflowInfo>(),
|
workflowRepository.get.mockResolvedValue(
|
||||||
mock<IWorkflowExecuteAdditionalData>(),
|
mock<WorkflowEntity>({ id: EXECUTION_ID, nodes: [] }),
|
||||||
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined }),
|
);
|
||||||
);
|
activeExecutions.add.mockResolvedValue(EXECUTION_ID);
|
||||||
|
processRunExecutionData.mockReturnValue(getCancelablePromise(runWithData));
|
||||||
|
});
|
||||||
|
|
||||||
expect(executionRepository.setRunning).toHaveBeenCalledWith(executionId);
|
it('should execute workflow, return data and execution id', async () => {
|
||||||
|
const response = await executeWorkflow(
|
||||||
|
mock<IExecuteWorkflowInfo>(),
|
||||||
|
mock<IWorkflowExecuteAdditionalData>(),
|
||||||
|
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined, doNotWaitToFinish: false }),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(response).toEqual({
|
||||||
|
data: runWithData.data.resultData.runData[LAST_NODE_EXECUTED][0].data!.main,
|
||||||
|
executionId: EXECUTION_ID,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should execute workflow, skip waiting', async () => {
|
||||||
|
const response = await executeWorkflow(
|
||||||
|
mock<IExecuteWorkflowInfo>(),
|
||||||
|
mock<IWorkflowExecuteAdditionalData>(),
|
||||||
|
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined, doNotWaitToFinish: true }),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(response).toEqual({
|
||||||
|
data: [null],
|
||||||
|
executionId: EXECUTION_ID,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set sub workflow execution as running', async () => {
|
||||||
|
await executeWorkflow(
|
||||||
|
mock<IExecuteWorkflowInfo>(),
|
||||||
|
mock<IWorkflowExecuteAdditionalData>(),
|
||||||
|
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined }),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(executionRepository.setRunning).toHaveBeenCalledWith(EXECUTION_ID);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('getRunData', () => {
|
||||||
|
it('should throw error to add trigger ndoe', async () => {
|
||||||
|
const workflow = mock<IWorkflowBase>({
|
||||||
|
id: '1',
|
||||||
|
name: 'test',
|
||||||
|
nodes: [],
|
||||||
|
active: false,
|
||||||
|
});
|
||||||
|
await expect(getRunData(workflow)).rejects.toThrowError('Missing node to start execution');
|
||||||
|
});
|
||||||
|
|
||||||
|
const workflow = mock<IWorkflowBase>({
|
||||||
|
id: '1',
|
||||||
|
name: 'test',
|
||||||
|
nodes: [
|
||||||
|
{
|
||||||
|
type: 'n8n-nodes-base.executeWorkflowTrigger',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
active: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return default data', async () => {
|
||||||
|
expect(await getRunData(workflow)).toEqual({
|
||||||
|
executionData: {
|
||||||
|
executionData: {
|
||||||
|
contextData: {},
|
||||||
|
metadata: {},
|
||||||
|
nodeExecutionStack: [
|
||||||
|
{
|
||||||
|
data: { main: [[{ json: {} }]] },
|
||||||
|
metadata: { parentExecution: undefined },
|
||||||
|
node: workflow.nodes[0],
|
||||||
|
source: null,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
waitingExecution: {},
|
||||||
|
waitingExecutionSource: {},
|
||||||
|
},
|
||||||
|
resultData: { runData: {} },
|
||||||
|
startData: {},
|
||||||
|
},
|
||||||
|
executionMode: 'integrated',
|
||||||
|
workflowData: workflow,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return run data with input data and metadata', async () => {
|
||||||
|
const data = [{ json: { test: 1 } }];
|
||||||
|
const parentExecution = {
|
||||||
|
executionId: '123',
|
||||||
|
workflowId: '567',
|
||||||
|
};
|
||||||
|
expect(await getRunData(workflow, data, parentExecution)).toEqual({
|
||||||
|
executionData: {
|
||||||
|
executionData: {
|
||||||
|
contextData: {},
|
||||||
|
metadata: {},
|
||||||
|
nodeExecutionStack: [
|
||||||
|
{
|
||||||
|
data: { main: [data] },
|
||||||
|
metadata: { parentExecution },
|
||||||
|
node: workflow.nodes[0],
|
||||||
|
source: null,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
waitingExecution: {},
|
||||||
|
waitingExecutionSource: {},
|
||||||
|
},
|
||||||
|
resultData: { runData: {} },
|
||||||
|
startData: {},
|
||||||
|
},
|
||||||
|
executionMode: 'integrated',
|
||||||
|
workflowData: workflow,
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
Loading…
Reference in a new issue