mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-11 12:57:29 -08:00
test(core): Fix typing issues in tests (no-changelog) (#10244)
This commit is contained in:
parent
5b47c8b57b
commit
3d23f2f333
|
@ -13,7 +13,7 @@
|
||||||
"build:backend": "turbo run build:backend",
|
"build:backend": "turbo run build:backend",
|
||||||
"build:frontend": "turbo run build:frontend",
|
"build:frontend": "turbo run build:frontend",
|
||||||
"build:nodes": "turbo run build:nodes",
|
"build:nodes": "turbo run build:nodes",
|
||||||
"typecheck": "turbo --filter=!n8n typecheck",
|
"typecheck": "turbo typecheck",
|
||||||
"dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat",
|
"dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat",
|
||||||
"dev:ai": "turbo run dev --parallel --env-mode=loose --filter=@n8n/nodes-langchain --filter=n8n --filter=n8n-core",
|
"dev:ai": "turbo run dev --parallel --env-mode=loose --filter=@n8n/nodes-langchain --filter=n8n --filter=n8n-core",
|
||||||
"clean": "turbo run clean --parallel",
|
"clean": "turbo run clean --parallel",
|
||||||
|
|
|
@ -41,6 +41,9 @@ export class InternalHooks {
|
||||||
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
|
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
|
||||||
workflowStatisticsService: WorkflowStatisticsService,
|
workflowStatisticsService: WorkflowStatisticsService,
|
||||||
private readonly projectRelationRepository: ProjectRelationRepository,
|
private readonly projectRelationRepository: ProjectRelationRepository,
|
||||||
|
// Can't use @ts-expect-error because only dev time tsconfig considers this as an error, but not build time
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-ignore - needed until we decouple telemetry
|
||||||
private readonly _eventBus: MessageEventBus, // needed until we decouple telemetry
|
private readonly _eventBus: MessageEventBus, // needed until we decouple telemetry
|
||||||
) {
|
) {
|
||||||
workflowStatisticsService.on('telemetry.onFirstProductionWorkflowSuccess', (metrics) =>
|
workflowStatisticsService.on('telemetry.onFirstProductionWorkflowSuccess', (metrics) =>
|
||||||
|
|
|
@ -6,7 +6,6 @@ import type { MessageEventBusLogWriterOptions } from './MessageEventBusLogWriter
|
||||||
let logFileBasePath = '';
|
let logFileBasePath = '';
|
||||||
let loggingPaused = true;
|
let loggingPaused = true;
|
||||||
let keepFiles = 10;
|
let keepFiles = 10;
|
||||||
let fileStatTimer: NodeJS.Timer;
|
|
||||||
let maxLogFileSizeInKB = 102400;
|
let maxLogFileSizeInKB = 102400;
|
||||||
|
|
||||||
function setLogFileBasePath(basePath: string) {
|
function setLogFileBasePath(basePath: string) {
|
||||||
|
@ -117,7 +116,7 @@ if (!isMainThread) {
|
||||||
if (logFileBasePath) {
|
if (logFileBasePath) {
|
||||||
renameAndCreateLogs();
|
renameAndCreateLogs();
|
||||||
loggingPaused = false;
|
loggingPaused = false;
|
||||||
fileStatTimer = setInterval(async () => {
|
setInterval(async () => {
|
||||||
await checkFileSize(buildLogFileNameWithCounter());
|
await checkFileSize(buildLogFileNameWithCounter());
|
||||||
}, 5000);
|
}, 5000);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,16 @@
|
||||||
import { filterListQueryMiddleware } from './filter';
|
import { filterListQueryMiddleware } from './filter';
|
||||||
import { selectListQueryMiddleware } from './select';
|
import { selectListQueryMiddleware } from './select';
|
||||||
import { paginationListQueryMiddleware } from './pagination';
|
import { paginationListQueryMiddleware } from './pagination';
|
||||||
|
import type { ListQuery } from '@/requests';
|
||||||
|
import type { NextFunction, Response } from 'express';
|
||||||
|
|
||||||
export const listQueryMiddleware = [
|
export type ListQueryMiddleware = (
|
||||||
|
req: ListQuery.Request,
|
||||||
|
res: Response,
|
||||||
|
next: NextFunction,
|
||||||
|
) => void;
|
||||||
|
|
||||||
|
export const listQueryMiddleware: ListQueryMiddleware[] = [
|
||||||
filterListQueryMiddleware,
|
filterListQueryMiddleware,
|
||||||
selectListQueryMiddleware,
|
selectListQueryMiddleware,
|
||||||
paginationListQueryMiddleware,
|
paginationListQueryMiddleware,
|
||||||
|
|
|
@ -200,7 +200,7 @@ test('should anonymize audit message to syslog ', async () => {
|
||||||
'message',
|
'message',
|
||||||
async function handler005(msg: { command: string; data: any }) {
|
async function handler005(msg: { command: string; data: any }) {
|
||||||
if (msg.command === 'appendMessageToLog') {
|
if (msg.command === 'appendMessageToLog') {
|
||||||
const sent = await eventBus.getEventsAll();
|
await eventBus.getEventsAll();
|
||||||
await confirmIdInAll(testAuditMessage.id);
|
await confirmIdInAll(testAuditMessage.id);
|
||||||
expect(mockedSyslogClientLog).toHaveBeenCalled();
|
expect(mockedSyslogClientLog).toHaveBeenCalled();
|
||||||
eventBus.logWriter.worker?.removeListener('message', handler005);
|
eventBus.logWriter.worker?.removeListener('message', handler005);
|
||||||
|
@ -217,7 +217,7 @@ test('should anonymize audit message to syslog ', async () => {
|
||||||
'message',
|
'message',
|
||||||
async function handler006(msg: { command: string; data: any }) {
|
async function handler006(msg: { command: string; data: any }) {
|
||||||
if (msg.command === 'appendMessageToLog') {
|
if (msg.command === 'appendMessageToLog') {
|
||||||
const sent = await eventBus.getEventsAll();
|
await eventBus.getEventsAll();
|
||||||
await confirmIdInAll(testAuditMessage.id);
|
await confirmIdInAll(testAuditMessage.id);
|
||||||
expect(mockedSyslogClientLog).toHaveBeenCalled();
|
expect(mockedSyslogClientLog).toHaveBeenCalled();
|
||||||
syslogDestination.disable();
|
syslogDestination.disable();
|
||||||
|
|
|
@ -12,7 +12,10 @@ import { WaitTracker } from '@/WaitTracker';
|
||||||
import { createTeamProject, linkUserToProject } from './shared/db/projects';
|
import { createTeamProject, linkUserToProject } from './shared/db/projects';
|
||||||
|
|
||||||
mockInstance(WaitTracker);
|
mockInstance(WaitTracker);
|
||||||
mockInstance(ConcurrencyControlService, { isEnabled: false });
|
mockInstance(ConcurrencyControlService, {
|
||||||
|
// @ts-expect-error Private property
|
||||||
|
isEnabled: false,
|
||||||
|
});
|
||||||
|
|
||||||
const testServer = setupTestServer({ endpointGroups: ['executions'] });
|
const testServer = setupTestServer({ endpointGroups: ['executions'] });
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,6 @@ describe('ProjectService', () => {
|
||||||
describe('when user has roles in projects where workflow is accessible', () => {
|
describe('when user has roles in projects where workflow is accessible', () => {
|
||||||
it('should return roles and project IDs', async () => {
|
it('should return roles and project IDs', async () => {
|
||||||
const user = await createUser();
|
const user = await createUser();
|
||||||
const secondUser = await createUser(); // @TODO: Needed only to satisfy index in legacy column
|
|
||||||
|
|
||||||
const firstProject = await createTeamProject('Project 1');
|
const firstProject = await createTeamProject('Project 1');
|
||||||
const secondProject = await createTeamProject('Project 2');
|
const secondProject = await createTeamProject('Project 2');
|
||||||
|
@ -42,17 +41,15 @@ describe('ProjectService', () => {
|
||||||
const workflow = await createWorkflow();
|
const workflow = await createWorkflow();
|
||||||
|
|
||||||
await sharedWorkflowRepository.insert({
|
await sharedWorkflowRepository.insert({
|
||||||
userId: user.id, // @TODO: Legacy column
|
|
||||||
projectId: firstProject.id,
|
projectId: firstProject.id,
|
||||||
workflowId: workflow.id,
|
workflowId: workflow.id,
|
||||||
role: 'workflow:owner',
|
role: 'workflow:owner',
|
||||||
});
|
});
|
||||||
|
|
||||||
await sharedWorkflowRepository.insert({
|
await sharedWorkflowRepository.insert({
|
||||||
userId: secondUser.id, // @TODO: Legacy column
|
|
||||||
projectId: secondProject.id,
|
projectId: secondProject.id,
|
||||||
workflowId: workflow.id,
|
workflowId: workflow.id,
|
||||||
role: 'workflow:user',
|
role: 'workflow:owner',
|
||||||
});
|
});
|
||||||
|
|
||||||
const projectIds = await projectService.findProjectsWorkflowIsIn(workflow.id);
|
const projectIds = await projectService.findProjectsWorkflowIsIn(workflow.id);
|
||||||
|
@ -63,9 +60,6 @@ describe('ProjectService', () => {
|
||||||
|
|
||||||
describe('when user has no roles in projects where workflow is accessible', () => {
|
describe('when user has no roles in projects where workflow is accessible', () => {
|
||||||
it('should return project IDs but no roles', async () => {
|
it('should return project IDs but no roles', async () => {
|
||||||
const user = await createUser();
|
|
||||||
const secondUser = await createUser(); // @TODO: Needed only to satisfy index in legacy column
|
|
||||||
|
|
||||||
const firstProject = await createTeamProject('Project 1');
|
const firstProject = await createTeamProject('Project 1');
|
||||||
const secondProject = await createTeamProject('Project 2');
|
const secondProject = await createTeamProject('Project 2');
|
||||||
|
|
||||||
|
@ -74,17 +68,15 @@ describe('ProjectService', () => {
|
||||||
const workflow = await createWorkflow();
|
const workflow = await createWorkflow();
|
||||||
|
|
||||||
await sharedWorkflowRepository.insert({
|
await sharedWorkflowRepository.insert({
|
||||||
userId: user.id, // @TODO: Legacy column
|
|
||||||
projectId: firstProject.id,
|
projectId: firstProject.id,
|
||||||
workflowId: workflow.id,
|
workflowId: workflow.id,
|
||||||
role: 'workflow:owner',
|
role: 'workflow:owner',
|
||||||
});
|
});
|
||||||
|
|
||||||
await sharedWorkflowRepository.insert({
|
await sharedWorkflowRepository.insert({
|
||||||
userId: secondUser.id, // @TODO: Legacy column
|
|
||||||
projectId: secondProject.id,
|
projectId: secondProject.id,
|
||||||
workflowId: workflow.id,
|
workflowId: workflow.id,
|
||||||
role: 'workflow:user',
|
role: 'workflow:owner',
|
||||||
});
|
});
|
||||||
|
|
||||||
const projectIds = await projectService.findProjectsWorkflowIsIn(workflow.id);
|
const projectIds = await projectService.findProjectsWorkflowIsIn(workflow.id);
|
||||||
|
|
|
@ -98,7 +98,6 @@ describe('softDeleteOnPruningCycle()', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test.each<[ExecutionStatus, Partial<ExecutionEntity>]>([
|
test.each<[ExecutionStatus, Partial<ExecutionEntity>]>([
|
||||||
['warning', { startedAt: now, stoppedAt: now }],
|
|
||||||
['unknown', { startedAt: now, stoppedAt: now }],
|
['unknown', { startedAt: now, stoppedAt: now }],
|
||||||
['canceled', { startedAt: now, stoppedAt: now }],
|
['canceled', { startedAt: now, stoppedAt: now }],
|
||||||
['crashed', { startedAt: now, stoppedAt: now }],
|
['crashed', { startedAt: now, stoppedAt: now }],
|
||||||
|
@ -191,7 +190,6 @@ describe('softDeleteOnPruningCycle()', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
test.each<[ExecutionStatus, Partial<ExecutionEntity>]>([
|
test.each<[ExecutionStatus, Partial<ExecutionEntity>]>([
|
||||||
['warning', { startedAt: yesterday, stoppedAt: yesterday }],
|
|
||||||
['unknown', { startedAt: yesterday, stoppedAt: yesterday }],
|
['unknown', { startedAt: yesterday, stoppedAt: yesterday }],
|
||||||
['canceled', { startedAt: yesterday, stoppedAt: yesterday }],
|
['canceled', { startedAt: yesterday, stoppedAt: yesterday }],
|
||||||
['crashed', { startedAt: yesterday, stoppedAt: yesterday }],
|
['crashed', { startedAt: yesterday, stoppedAt: yesterday }],
|
||||||
|
|
|
@ -14,6 +14,7 @@ import config from '@/config';
|
||||||
import { generateNanoId } from '@db/utils/generators';
|
import { generateNanoId } from '@db/utils/generators';
|
||||||
import { WorkflowRepository } from '@db/repositories/workflow.repository';
|
import { WorkflowRepository } from '@db/repositories/workflow.repository';
|
||||||
import Container from 'typedi';
|
import Container from 'typedi';
|
||||||
|
import { NodeConnectionType } from 'n8n-workflow';
|
||||||
|
|
||||||
let securityAuditService: SecurityAuditService;
|
let securityAuditService: SecurityAuditService;
|
||||||
|
|
||||||
|
@ -156,7 +157,7 @@ test('should not report webhooks validated by direct children', async () => {
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
node: 'My Node',
|
node: 'My Node',
|
||||||
type: 'main',
|
type: NodeConnectionType.Main,
|
||||||
index: 0,
|
index: 0,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|
|
@ -9,6 +9,7 @@ import { WorkflowRepository } from '@db/repositories/workflow.repository';
|
||||||
import type { SharedWorkflow, WorkflowSharingRole } from '@db/entities/SharedWorkflow';
|
import type { SharedWorkflow, WorkflowSharingRole } from '@db/entities/SharedWorkflow';
|
||||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||||
import { Project } from '@/databases/entities/Project';
|
import { Project } from '@/databases/entities/Project';
|
||||||
|
import { NodeConnectionType } from 'n8n-workflow';
|
||||||
|
|
||||||
export async function createManyWorkflows(
|
export async function createManyWorkflows(
|
||||||
amount: number,
|
amount: number,
|
||||||
|
@ -157,7 +158,7 @@ export async function createWorkflowWithTrigger(
|
||||||
position: [780, 300],
|
position: [780, 300],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
connections: { Cron: { main: [[{ node: 'Set', type: 'main', index: 0 }]] } },
|
connections: { Cron: { main: [[{ node: 'Set', type: NodeConnectionType.Main, index: 0 }]] } },
|
||||||
...attributes,
|
...attributes,
|
||||||
},
|
},
|
||||||
user,
|
user,
|
||||||
|
|
|
@ -96,9 +96,10 @@ export async function initBinaryDataService(mode: 'default' | 'filesystem' = 'de
|
||||||
* Extract the value (token) of the auth cookie in a response.
|
* Extract the value (token) of the auth cookie in a response.
|
||||||
*/
|
*/
|
||||||
export function getAuthToken(response: request.Response, authCookieName = AUTH_COOKIE_NAME) {
|
export function getAuthToken(response: request.Response, authCookieName = AUTH_COOKIE_NAME) {
|
||||||
const cookies: string[] = response.headers['set-cookie'];
|
const cookiesHeader = response.headers['set-cookie'];
|
||||||
|
if (!cookiesHeader) return undefined;
|
||||||
|
|
||||||
if (!cookies) return undefined;
|
const cookies = Array.isArray(cookiesHeader) ? cookiesHeader : [cookiesHeader];
|
||||||
|
|
||||||
const authCookie = cookies.find((c) => c.startsWith(`${authCookieName}=`));
|
const authCookie = cookies.find((c) => c.startsWith(`${authCookieName}=`));
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,8 @@ describe('EnterpriseWorkflowService', () => {
|
||||||
Container.get(CredentialsRepository),
|
Container.get(CredentialsRepository),
|
||||||
mock(),
|
mock(),
|
||||||
mock(),
|
mock(),
|
||||||
|
mock(),
|
||||||
|
mock(),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,6 @@ import { Telemetry } from '@/telemetry';
|
||||||
mockInstance(Telemetry);
|
mockInstance(Telemetry);
|
||||||
|
|
||||||
let member: User;
|
let member: User;
|
||||||
let anotherMember: User;
|
|
||||||
|
|
||||||
const testServer = utils.setupTestServer({
|
const testServer = utils.setupTestServer({
|
||||||
endpointGroups: ['workflows'],
|
endpointGroups: ['workflows'],
|
||||||
|
@ -20,7 +19,6 @@ const testServer = utils.setupTestServer({
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
member = await createUser({ role: 'global:member' });
|
member = await createUser({ role: 'global:member' });
|
||||||
anotherMember = await createUser({ role: 'global:member' });
|
|
||||||
|
|
||||||
await utils.initNodeTypes();
|
await utils.initNodeTypes();
|
||||||
});
|
});
|
||||||
|
|
|
@ -20,7 +20,10 @@ const executionRepository = mock<ExecutionRepository>({
|
||||||
createNewExecution,
|
createNewExecution,
|
||||||
});
|
});
|
||||||
|
|
||||||
const concurrencyControl = mockInstance(ConcurrencyControlService, { isEnabled: false });
|
const concurrencyControl = mockInstance(ConcurrencyControlService, {
|
||||||
|
// @ts-expect-error Private property
|
||||||
|
isEnabled: false,
|
||||||
|
});
|
||||||
|
|
||||||
describe('ActiveExecutions', () => {
|
describe('ActiveExecutions', () => {
|
||||||
let activeExecutions: ActiveExecutions;
|
let activeExecutions: ActiveExecutions;
|
||||||
|
|
|
@ -15,11 +15,6 @@ describe('Telemetry', () => {
|
||||||
const spyTrack = jest.spyOn(Telemetry.prototype, 'track').mockName('track');
|
const spyTrack = jest.spyOn(Telemetry.prototype, 'track').mockName('track');
|
||||||
|
|
||||||
const mockRudderStack = mock<RudderStack>();
|
const mockRudderStack = mock<RudderStack>();
|
||||||
mockRudderStack.track.mockImplementation(function (_, cb) {
|
|
||||||
cb?.();
|
|
||||||
|
|
||||||
return this;
|
|
||||||
});
|
|
||||||
|
|
||||||
let telemetry: Telemetry;
|
let telemetry: Telemetry;
|
||||||
const instanceId = 'Telemetry unit test';
|
const instanceId = 'Telemetry unit test';
|
||||||
|
|
|
@ -33,7 +33,7 @@ function setDefaultConfig() {
|
||||||
config.set('generic.instanceType', 'main');
|
config.set('generic.instanceType', 'main');
|
||||||
}
|
}
|
||||||
|
|
||||||
const workerRestartEventbusResponse: RedisServiceWorkerResponseObject = {
|
const workerRestartEventBusResponse: RedisServiceWorkerResponseObject = {
|
||||||
senderId: 'test',
|
senderId: 'test',
|
||||||
workerId: 'test',
|
workerId: 'test',
|
||||||
command: 'restartEventBus',
|
command: 'restartEventBus',
|
||||||
|
@ -88,7 +88,7 @@ describe('Orchestration Service', () => {
|
||||||
|
|
||||||
test('should handle worker responses', async () => {
|
test('should handle worker responses', async () => {
|
||||||
const response = await handleWorkerResponseMessageMain(
|
const response = await handleWorkerResponseMessageMain(
|
||||||
JSON.stringify(workerRestartEventbusResponse),
|
JSON.stringify(workerRestartEventBusResponse),
|
||||||
);
|
);
|
||||||
expect(response.command).toEqual('restartEventBus');
|
expect(response.command).toEqual('restartEventBus');
|
||||||
});
|
});
|
||||||
|
@ -108,7 +108,7 @@ describe('Orchestration Service', () => {
|
||||||
|
|
||||||
test('should reject command messages from itself', async () => {
|
test('should reject command messages from itself', async () => {
|
||||||
const response = await handleCommandMessageMain(
|
const response = await handleCommandMessageMain(
|
||||||
JSON.stringify({ ...workerRestartEventbusResponse, senderId: queueModeId }),
|
JSON.stringify({ ...workerRestartEventBusResponse, senderId: queueModeId }),
|
||||||
);
|
);
|
||||||
expect(response).toBeDefined();
|
expect(response).toBeDefined();
|
||||||
expect(response!.command).toEqual('restartEventBus');
|
expect(response!.command).toEqual('restartEventBus');
|
||||||
|
@ -141,7 +141,7 @@ describe('Orchestration Service', () => {
|
||||||
);
|
);
|
||||||
expect(helpers.debounceMessageReceiver).toHaveBeenCalledTimes(2);
|
expect(helpers.debounceMessageReceiver).toHaveBeenCalledTimes(2);
|
||||||
expect(res1!.payload).toBeUndefined();
|
expect(res1!.payload).toBeUndefined();
|
||||||
expect(res2!.payload!.result).toEqual('debounced');
|
expect((res2!.payload as { result: string }).result).toEqual('debounced');
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('shouldAddWebhooks', () => {
|
describe('shouldAddWebhooks', () => {
|
||||||
|
|
|
@ -118,7 +118,7 @@ describe('WorkflowStatisticsService', () => {
|
||||||
};
|
};
|
||||||
const runData: IRun = {
|
const runData: IRun = {
|
||||||
finished: false,
|
finished: false,
|
||||||
status: 'failed',
|
status: 'error',
|
||||||
data: { resultData: { runData: {} } },
|
data: { resultData: { runData: {} } },
|
||||||
mode: 'internal' as WorkflowExecuteMode,
|
mode: 'internal' as WorkflowExecuteMode,
|
||||||
startedAt: new Date(),
|
startedAt: new Date(),
|
||||||
|
@ -206,7 +206,7 @@ describe('WorkflowStatisticsService', () => {
|
||||||
|
|
||||||
test('should not send metrics for entries that already have the flag set', async () => {
|
test('should not send metrics for entries that already have the flag set', async () => {
|
||||||
// Fetch data for workflow 2 which is set up to not be altered in the mocks
|
// Fetch data for workflow 2 which is set up to not be altered in the mocks
|
||||||
entityManager.insert.mockRejectedValueOnce(new QueryFailedError('', undefined, ''));
|
entityManager.insert.mockRejectedValueOnce(new QueryFailedError('', undefined, new Error()));
|
||||||
const workflowId = '1';
|
const workflowId = '1';
|
||||||
const node = {
|
const node = {
|
||||||
id: 'abcde',
|
id: 'abcde',
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
"extends": ["../../tsconfig.json", "../../tsconfig.backend.json"],
|
"extends": ["../../tsconfig.json", "../../tsconfig.backend.json"],
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"rootDir": ".",
|
"rootDir": ".",
|
||||||
"preserveSymlinks": true,
|
|
||||||
"emitDecoratorMetadata": true,
|
"emitDecoratorMetadata": true,
|
||||||
"experimentalDecorators": true,
|
"experimentalDecorators": true,
|
||||||
"baseUrl": "src",
|
"baseUrl": "src",
|
||||||
|
|
Loading…
Reference in a new issue