mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-11 04:47:29 -08:00
ci: Expand ESLint to tests in BE packages (no-changelog) (#6147)
* 🔧 Adjust base ESLint config * 🔧 Adjust `lint` and `lintfix` in `nodes-base` * 🔧 Include `test` and `utils` in `nodes-base` * 📘 Convert JS tests to TS * 👕 Apply lintfixes
This commit is contained in:
parent
c63181b317
commit
06fa6f1fb3
|
@ -6,9 +6,7 @@ const config = (module.exports = {
|
|||
'node_modules/**',
|
||||
'dist/**',
|
||||
// TODO: remove these
|
||||
'test/**',
|
||||
'.eslintrc.js',
|
||||
'jest.config.js',
|
||||
'*.js',
|
||||
],
|
||||
|
||||
plugins: [
|
||||
|
@ -452,6 +450,43 @@ const config = (module.exports = {
|
|||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['test/**/*.ts'],
|
||||
// TODO: Remove these
|
||||
rules: {
|
||||
'@typescript-eslint/await-thenable': 'off',
|
||||
'@typescript-eslint/ban-ts-comment': 'off',
|
||||
'@typescript-eslint/naming-convention': 'off',
|
||||
'@typescript-eslint/no-duplicate-imports': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'off',
|
||||
'@typescript-eslint/no-loop-func': 'off',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/no-shadow': 'off',
|
||||
'@typescript-eslint/no-throw-literal': 'off',
|
||||
'@typescript-eslint/no-unsafe-argument': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/no-unused-expressions': 'off',
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/no-use-before-define': 'off',
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
'@typescript-eslint/prefer-nullish-coalescing': 'off',
|
||||
'@typescript-eslint/prefer-optional-chain': 'off',
|
||||
'@typescript-eslint/restrict-plus-operands': 'off',
|
||||
'@typescript-eslint/restrict-template-expressions': 'off',
|
||||
'@typescript-eslint/unbound-method': 'off',
|
||||
'id-denylist': 'off',
|
||||
'import/no-cycle': 'off',
|
||||
'import/no-default-export': 'off',
|
||||
'import/no-extraneous-dependencies': 'off',
|
||||
'n8n-local-rules/no-uncaught-json-parse': 'off',
|
||||
'prefer-const': 'off',
|
||||
'prefer-spread': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ test('import:workflow should import active workflow and deactivate it', async ()
|
|||
['--separate', '--input=./test/integration/commands/importWorkflows/separate'],
|
||||
config,
|
||||
);
|
||||
const mockExit = jest.spyOn(process, 'exit').mockImplementation((number) => {
|
||||
const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
|
@ -52,7 +52,7 @@ test('import:workflow should import active workflow from combined file and deact
|
|||
['--input=./test/integration/commands/importWorkflows/combined/combined.json'],
|
||||
config,
|
||||
);
|
||||
const mockExit = jest.spyOn(process, 'exit').mockImplementation((number) => {
|
||||
const mockExit = jest.spyOn(process, 'exit').mockImplementation(() => {
|
||||
throw new Error('process.exit');
|
||||
});
|
||||
|
||||
|
|
|
@ -415,7 +415,7 @@ describe('PUT /credentials/:id/share', () => {
|
|||
|
||||
test('should respond 403 for non-existing credentials', async () => {
|
||||
const response = await authOwnerAgent
|
||||
.put(`/credentials/1234567/share`)
|
||||
.put('/credentials/1234567/share')
|
||||
.send({ shareWithIds: [member.id] });
|
||||
|
||||
expect(response.statusCode).toBe(403);
|
||||
|
|
|
@ -11,9 +11,8 @@ import type { Role } from '@db/entities/Role';
|
|||
import type { User } from '@db/entities/User';
|
||||
import { randomCredentialPayload, randomName, randomString } from './shared/random';
|
||||
import * as testDb from './shared/testDb';
|
||||
import type { SaveCredentialFunction } from './shared/types';
|
||||
import type { AuthAgent, SaveCredentialFunction } from './shared/types';
|
||||
import * as utils from './shared/utils';
|
||||
import type { AuthAgent } from './shared/types';
|
||||
|
||||
// mock that credentialsSharing is not enabled
|
||||
const mockIsCredentialsSharingEnabled = jest.spyOn(UserManagementHelpers, 'isSharingEnabled');
|
||||
|
@ -124,7 +123,7 @@ describe('POST /credentials', () => {
|
|||
|
||||
expect(credential.name).toBe(payload.name);
|
||||
expect(credential.type).toBe(payload.type);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(payload.nodesAccess![0].nodeType);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(payload.nodesAccess[0].nodeType);
|
||||
expect(credential.data).not.toBe(payload.data);
|
||||
|
||||
const sharedCredential = await Db.collections.SharedCredentials.findOneOrFail({
|
||||
|
@ -278,7 +277,7 @@ describe('PATCH /credentials/:id', () => {
|
|||
|
||||
expect(credential.name).toBe(patchPayload.name);
|
||||
expect(credential.type).toBe(patchPayload.type);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(patchPayload.nodesAccess![0].nodeType);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(patchPayload.nodesAccess[0].nodeType);
|
||||
expect(credential.data).not.toBe(patchPayload.data);
|
||||
|
||||
const sharedCredential = await Db.collections.SharedCredentials.findOneOrFail({
|
||||
|
@ -315,7 +314,7 @@ describe('PATCH /credentials/:id', () => {
|
|||
|
||||
expect(credential.name).toBe(patchPayload.name);
|
||||
expect(credential.type).toBe(patchPayload.type);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(patchPayload.nodesAccess![0].nodeType);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(patchPayload.nodesAccess[0].nodeType);
|
||||
expect(credential.data).not.toBe(patchPayload.data);
|
||||
|
||||
const sharedCredential = await Db.collections.SharedCredentials.findOneOrFail({
|
||||
|
@ -352,7 +351,7 @@ describe('PATCH /credentials/:id', () => {
|
|||
|
||||
expect(credential.name).toBe(patchPayload.name);
|
||||
expect(credential.type).toBe(patchPayload.type);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(patchPayload.nodesAccess![0].nodeType);
|
||||
expect(credential.nodesAccess[0].nodeType).toBe(patchPayload.nodesAccess[0].nodeType);
|
||||
expect(credential.data).not.toBe(patchPayload.data);
|
||||
|
||||
const sharedCredential = await Db.collections.SharedCredentials.findOneOrFail({
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import express from 'express';
|
||||
import type express from 'express';
|
||||
import config from '@/config';
|
||||
import axios from 'axios';
|
||||
import syslog from 'syslog-client';
|
||||
|
@ -7,23 +7,25 @@ import { Container } from 'typedi';
|
|||
import type { SuperAgentTest } from 'supertest';
|
||||
import * as utils from './shared/utils';
|
||||
import * as testDb from './shared/testDb';
|
||||
import { Role } from '@db/entities/Role';
|
||||
import { User } from '@db/entities/User';
|
||||
import {
|
||||
defaultMessageEventBusDestinationSentryOptions,
|
||||
defaultMessageEventBusDestinationSyslogOptions,
|
||||
defaultMessageEventBusDestinationWebhookOptions,
|
||||
import type { Role } from '@db/entities/Role';
|
||||
import type { User } from '@db/entities/User';
|
||||
import type {
|
||||
MessageEventBusDestinationSentryOptions,
|
||||
MessageEventBusDestinationSyslogOptions,
|
||||
MessageEventBusDestinationWebhookOptions,
|
||||
} from 'n8n-workflow';
|
||||
import {
|
||||
defaultMessageEventBusDestinationSentryOptions,
|
||||
defaultMessageEventBusDestinationSyslogOptions,
|
||||
defaultMessageEventBusDestinationWebhookOptions,
|
||||
} from 'n8n-workflow';
|
||||
import { eventBus } from '@/eventbus';
|
||||
import { EventMessageGeneric } from '@/eventbus/EventMessageClasses/EventMessageGeneric';
|
||||
import { MessageEventBusDestinationSyslog } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationSyslog.ee';
|
||||
import { MessageEventBusDestinationWebhook } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationWebhook.ee';
|
||||
import { MessageEventBusDestinationSentry } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationSentry.ee';
|
||||
import type { MessageEventBusDestinationSyslog } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationSyslog.ee';
|
||||
import type { MessageEventBusDestinationWebhook } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationWebhook.ee';
|
||||
import type { MessageEventBusDestinationSentry } from '@/eventbus/MessageEventBusDestination/MessageEventBusDestinationSentry.ee';
|
||||
import { EventMessageAudit } from '@/eventbus/EventMessageClasses/EventMessageAudit';
|
||||
import { EventNamesTypes } from '@/eventbus/EventMessageClasses';
|
||||
import type { EventNamesTypes } from '@/eventbus/EventMessageClasses';
|
||||
import { License } from '@/License';
|
||||
|
||||
jest.unmock('@/eventbus/MessageEventBus/MessageEventBus');
|
||||
|
@ -51,7 +53,7 @@ const testWebhookDestination: MessageEventBusDestinationWebhookOptions = {
|
|||
...defaultMessageEventBusDestinationWebhookOptions,
|
||||
id: '88be6560-bfb4-455c-8aa1-06971e9e5522',
|
||||
url: 'http://localhost:3456',
|
||||
method: `POST`,
|
||||
method: 'POST',
|
||||
label: 'Test Webhook',
|
||||
enabled: false,
|
||||
subscribedEvents: ['n8n.test.message', 'n8n.audit.user.updated'],
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import express from 'express';
|
||||
import type express from 'express';
|
||||
import type { Entry as LdapUser } from 'ldapts';
|
||||
import { Not } from 'typeorm';
|
||||
import { Container } from 'typedi';
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import type { SuperAgentTest } from 'supertest';
|
||||
import config from '@/config';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { ILicensePostResponse, ILicenseReadResponse } from '@/Interfaces';
|
||||
import type { ILicensePostResponse, ILicenseReadResponse } from '@/Interfaces';
|
||||
import { License } from '@/License';
|
||||
import * as testDb from './shared/testDb';
|
||||
import * as utils from './shared/utils';
|
||||
|
|
|
@ -23,7 +23,7 @@ let globalOwnerRole: Role;
|
|||
let globalMemberRole: Role;
|
||||
let owner: User;
|
||||
let authlessAgent: SuperAgentTest;
|
||||
let externalHooks = utils.mockInstance(ExternalHooks);
|
||||
const externalHooks = utils.mockInstance(ExternalHooks);
|
||||
|
||||
beforeAll(async () => {
|
||||
const app = await utils.initTestServer({ endpointGroups: ['passwordReset'] });
|
||||
|
|
|
@ -213,7 +213,7 @@ describe('GET /executions', () => {
|
|||
|
||||
await testDb.createErrorExecution(workflow);
|
||||
|
||||
const response = await authOwnerAgent.get(`/executions`).query({
|
||||
const response = await authOwnerAgent.get('/executions').query({
|
||||
status: 'success',
|
||||
});
|
||||
|
||||
|
@ -254,7 +254,7 @@ describe('GET /executions', () => {
|
|||
|
||||
await testDb.createErrorExecution(workflow);
|
||||
|
||||
const firstExecutionResponse = await authOwnerAgent.get(`/executions`).query({
|
||||
const firstExecutionResponse = await authOwnerAgent.get('/executions').query({
|
||||
status: 'success',
|
||||
limit: 1,
|
||||
});
|
||||
|
@ -263,7 +263,7 @@ describe('GET /executions', () => {
|
|||
expect(firstExecutionResponse.body.data.length).toBe(1);
|
||||
expect(firstExecutionResponse.body.nextCursor).toBeDefined();
|
||||
|
||||
const secondExecutionResponse = await authOwnerAgent.get(`/executions`).query({
|
||||
const secondExecutionResponse = await authOwnerAgent.get('/executions').query({
|
||||
status: 'success',
|
||||
limit: 1,
|
||||
cursor: firstExecutionResponse.body.nextCursor,
|
||||
|
@ -308,7 +308,7 @@ describe('GET /executions', () => {
|
|||
|
||||
const errorExecution = await testDb.createErrorExecution(workflow);
|
||||
|
||||
const response = await authOwnerAgent.get(`/executions`).query({
|
||||
const response = await authOwnerAgent.get('/executions').query({
|
||||
status: 'error',
|
||||
});
|
||||
|
||||
|
@ -348,7 +348,7 @@ describe('GET /executions', () => {
|
|||
|
||||
const waitingExecution = await testDb.createWaitingExecution(workflow);
|
||||
|
||||
const response = await authOwnerAgent.get(`/executions`).query({
|
||||
const response = await authOwnerAgent.get('/executions').query({
|
||||
status: 'waiting',
|
||||
});
|
||||
|
||||
|
@ -389,7 +389,7 @@ describe('GET /executions', () => {
|
|||
);
|
||||
await testDb.createManyExecutions(2, workflow2, testDb.createSuccessfulExecution);
|
||||
|
||||
const response = await authOwnerAgent.get(`/executions`).query({
|
||||
const response = await authOwnerAgent.get('/executions').query({
|
||||
workflowId: workflow.id,
|
||||
});
|
||||
|
||||
|
@ -439,7 +439,7 @@ describe('GET /executions', () => {
|
|||
await testDb.createManyExecutions(2, firstWorkflowForUser2, testDb.createSuccessfulExecution);
|
||||
await testDb.createManyExecutions(2, secondWorkflowForUser2, testDb.createSuccessfulExecution);
|
||||
|
||||
const response = await authOwnerAgent.get(`/executions`);
|
||||
const response = await authOwnerAgent.get('/executions');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(8);
|
||||
|
@ -463,7 +463,7 @@ describe('GET /executions', () => {
|
|||
await testDb.createManyExecutions(2, firstWorkflowForUser2, testDb.createSuccessfulExecution);
|
||||
await testDb.createManyExecutions(2, secondWorkflowForUser2, testDb.createSuccessfulExecution);
|
||||
|
||||
const response = await authUser1Agent.get(`/executions`);
|
||||
const response = await authUser1Agent.get('/executions');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(4);
|
||||
|
@ -489,7 +489,7 @@ describe('GET /executions', () => {
|
|||
|
||||
await testDb.shareWorkflowWithUsers(firstWorkflowForUser2, [user1]);
|
||||
|
||||
const response = await authUser1Agent.get(`/executions`);
|
||||
const response = await authUser1Agent.get('/executions');
|
||||
|
||||
expect(response.statusCode).toBe(200);
|
||||
expect(response.body.data.length).toBe(6);
|
||||
|
|
|
@ -309,7 +309,7 @@ describe('GET /workflows/:id', () => {
|
|||
test('should fail due to invalid API Key', testWithAPIKey('get', '/workflows/2', 'abcXYZ'));
|
||||
|
||||
test('should fail due to non-existing workflow', async () => {
|
||||
const response = await authOwnerAgent.get(`/workflows/2`);
|
||||
const response = await authOwnerAgent.get('/workflows/2');
|
||||
expect(response.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
|
@ -375,7 +375,7 @@ describe('DELETE /workflows/:id', () => {
|
|||
test('should fail due to invalid API Key', testWithAPIKey('delete', '/workflows/2', 'abcXYZ'));
|
||||
|
||||
test('should fail due to non-existing workflow', async () => {
|
||||
const response = await authOwnerAgent.delete(`/workflows/2`);
|
||||
const response = await authOwnerAgent.delete('/workflows/2');
|
||||
expect(response.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
|
@ -447,7 +447,7 @@ describe('POST /workflows/:id/activate', () => {
|
|||
);
|
||||
|
||||
test('should fail due to non-existing workflow', async () => {
|
||||
const response = await authOwnerAgent.post(`/workflows/2/activate`);
|
||||
const response = await authOwnerAgent.post('/workflows/2/activate');
|
||||
expect(response.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
|
@ -549,7 +549,7 @@ describe('POST /workflows/:id/deactivate', () => {
|
|||
);
|
||||
|
||||
test('should fail due to non-existing workflow', async () => {
|
||||
const response = await authOwnerAgent.post(`/workflows/2/deactivate`);
|
||||
const response = await authOwnerAgent.post('/workflows/2/deactivate');
|
||||
expect(response.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
|
@ -709,7 +709,7 @@ describe('PUT /workflows/:id', () => {
|
|||
test('should fail due to invalid API Key', testWithAPIKey('put', '/workflows/1', 'abcXYZ'));
|
||||
|
||||
test('should fail due to non-existing workflow', async () => {
|
||||
const response = await authOwnerAgent.put(`/workflows/1`).send({
|
||||
const response = await authOwnerAgent.put('/workflows/1').send({
|
||||
name: 'testing',
|
||||
nodes: [
|
||||
{
|
||||
|
@ -737,7 +737,7 @@ describe('PUT /workflows/:id', () => {
|
|||
});
|
||||
|
||||
test('should fail due to invalid body', async () => {
|
||||
const response = await authOwnerAgent.put(`/workflows/1`).send({
|
||||
const response = await authOwnerAgent.put('/workflows/1').send({
|
||||
nodes: [
|
||||
{
|
||||
id: 'uuid-1234',
|
||||
|
|
|
@ -11,8 +11,8 @@ import * as utils from '../shared/utils';
|
|||
import { sampleConfig } from './sampleMetadata';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
import { SamlService } from '@/sso/saml/saml.service.ee';
|
||||
import { SamlUserAttributes } from '@/sso/saml/types/samlUserAttributes';
|
||||
import { AuthenticationMethod } from 'n8n-workflow';
|
||||
import type { SamlUserAttributes } from '@/sso/saml/types/samlUserAttributes';
|
||||
import type { AuthenticationMethod } from 'n8n-workflow';
|
||||
|
||||
let someUser: User;
|
||||
let owner: User;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import config from '@/config';
|
||||
|
||||
export const REST_PATH_SEGMENT = config.getEnv('endpoints.rest') as Readonly<string>;
|
||||
export const REST_PATH_SEGMENT = config.getEnv('endpoints.rest');
|
||||
|
||||
export const PUBLIC_API_REST_PATH_SEGMENT = config.getEnv('publicApi.path') as Readonly<string>;
|
||||
export const PUBLIC_API_REST_PATH_SEGMENT = config.getEnv('publicApi.path');
|
||||
|
||||
export const AUTHLESS_ENDPOINTS: Readonly<string[]> = [
|
||||
'healthz',
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
import { UserSettings } from 'n8n-core';
|
||||
import {
|
||||
DataSource as Connection,
|
||||
DataSourceOptions as ConnectionOptions,
|
||||
Repository,
|
||||
} from 'typeorm';
|
||||
import type { DataSourceOptions as ConnectionOptions, Repository } from 'typeorm';
|
||||
import { DataSource as Connection } from 'typeorm';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
import config from '@/config';
|
||||
|
@ -24,7 +21,7 @@ import type { TagEntity } from '@db/entities/TagEntity';
|
|||
import type { User } from '@db/entities/User';
|
||||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import { RoleRepository } from '@db/repositories';
|
||||
import { ICredentialsDb } from '@/Interfaces';
|
||||
import type { ICredentialsDb } from '@/Interfaces';
|
||||
|
||||
import { DB_INITIALIZATION_TIMEOUT } from './constants';
|
||||
import { randomApiKey, randomEmail, randomName, randomString, randomValidPassword } from './random';
|
||||
|
@ -211,6 +208,7 @@ export async function createManyUsers(
|
|||
amount: number,
|
||||
attributes: Partial<User> = {},
|
||||
): Promise<User[]> {
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { email, password, firstName, lastName, globalRole, ...rest } = attributes;
|
||||
if (!globalRole) {
|
||||
globalRole = await getGlobalMemberRole();
|
||||
|
|
|
@ -7,25 +7,23 @@ import { CronJob } from 'cron';
|
|||
import express from 'express';
|
||||
import set from 'lodash.set';
|
||||
import { BinaryDataManager, UserSettings } from 'n8n-core';
|
||||
import {
|
||||
import type {
|
||||
ICredentialType,
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
INode,
|
||||
INodeExecutionData,
|
||||
INodeParameters,
|
||||
ITriggerFunctions,
|
||||
ITriggerResponse,
|
||||
LoggerProxy,
|
||||
NodeHelpers,
|
||||
toCronExpression,
|
||||
TriggerTime,
|
||||
} from 'n8n-workflow';
|
||||
import superagent from 'superagent';
|
||||
import { deepCopy } from 'n8n-workflow';
|
||||
import { LoggerProxy, NodeHelpers, toCronExpression } from 'n8n-workflow';
|
||||
import type superagent from 'superagent';
|
||||
import request from 'supertest';
|
||||
import { URL } from 'url';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { DeepPartial } from 'ts-essentials';
|
||||
import type { DeepPartial } from 'ts-essentials';
|
||||
import config from '@/config';
|
||||
import * as Db from '@/Db';
|
||||
import { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
|
@ -368,7 +366,7 @@ export async function initNodeTypes() {
|
|||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
|
||||
return this.prepareOutputData(items);
|
||||
|
@ -571,7 +569,7 @@ export async function initNodeTypes() {
|
|||
},
|
||||
],
|
||||
},
|
||||
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
|
||||
if (items.length === 0) {
|
||||
|
@ -585,13 +583,13 @@ export async function initNodeTypes() {
|
|||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
keepOnlySet = this.getNodeParameter('keepOnlySet', itemIndex, false) as boolean;
|
||||
item = items[itemIndex];
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as IDataObject;
|
||||
const options = this.getNodeParameter('options', itemIndex, {});
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
};
|
||||
|
||||
if (keepOnlySet !== true) {
|
||||
if (!keepOnlySet) {
|
||||
if (item.binary !== undefined) {
|
||||
// Create a shallow copy of the binary data so that the old
|
||||
// data references which do not get changed still stay behind
|
||||
|
@ -600,7 +598,7 @@ export async function initNodeTypes() {
|
|||
Object.assign(newItem.binary, item.binary);
|
||||
}
|
||||
|
||||
newItem.json = JSON.parse(JSON.stringify(item.json));
|
||||
newItem.json = deepCopy(item.json);
|
||||
}
|
||||
|
||||
// Add boolean values
|
||||
|
@ -708,7 +706,7 @@ export function createAuthAgent(app: express.Application) {
|
|||
* Example: http://127.0.0.1:62100/me/password → http://127.0.0.1:62100/rest/me/password
|
||||
*/
|
||||
export function prefix(pathSegment: string) {
|
||||
return function (request: superagent.SuperAgentRequest) {
|
||||
return async function (request: superagent.SuperAgentRequest) {
|
||||
const url = new URL(request.url);
|
||||
|
||||
// enforce consistency at call sites
|
||||
|
|
|
@ -514,7 +514,7 @@ describe('UserManagementMailer expect NodeMailer.verifyConnection', () => {
|
|||
test('not be called when SMTP not set up', async () => {
|
||||
const userManagementMailer = new UserManagementMailer();
|
||||
// NodeMailer.verifyConnection gets called only explicitly
|
||||
expect(async () => await userManagementMailer.verifyConnection()).rejects.toThrow();
|
||||
expect(async () => userManagementMailer.verifyConnection()).rejects.toThrow();
|
||||
|
||||
expect(NodeMailer.prototype.verifyConnection).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
@ -526,6 +526,6 @@ describe('UserManagementMailer expect NodeMailer.verifyConnection', () => {
|
|||
|
||||
const userManagementMailer = new UserManagementMailer();
|
||||
// NodeMailer.verifyConnection gets called only explicitly
|
||||
expect(async () => await userManagementMailer.verifyConnection()).not.toThrow();
|
||||
expect(async () => userManagementMailer.verifyConnection()).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -5,7 +5,6 @@ import * as testDb from './shared/testDb';
|
|||
import * as utils from './shared/utils';
|
||||
|
||||
import type { AuthAgent } from './shared/types';
|
||||
import type { ClassLike, MockedClass } from 'jest-mock';
|
||||
import { License } from '@/License';
|
||||
|
||||
// mock that credentialsSharing is not enabled
|
||||
|
@ -14,7 +13,7 @@ let ownerUser: User;
|
|||
let memberUser: User;
|
||||
let authAgent: AuthAgent;
|
||||
let variablesSpy: jest.SpyInstance<boolean>;
|
||||
let licenseLike = {
|
||||
const licenseLike = {
|
||||
isVariablesEnabled: jest.fn().mockReturnValue(true),
|
||||
getVariablesLimit: jest.fn().mockReturnValue(-1),
|
||||
};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { SuperAgentTest } from 'supertest';
|
||||
import type { SuperAgentTest } from 'supertest';
|
||||
import type { IPinData } from 'n8n-workflow';
|
||||
|
||||
import type { User } from '@db/entities/User';
|
||||
|
|
|
@ -12,14 +12,14 @@ export default async () => {
|
|||
|
||||
const query =
|
||||
dbType === 'postgres' ? 'SELECT datname as "Database" FROM pg_database' : 'SHOW DATABASES';
|
||||
const results: { Database: string }[] = await connection.query(query);
|
||||
const results: Array<{ Database: string }> = await connection.query(query);
|
||||
const databases = results
|
||||
.filter(
|
||||
({ Database: dbName }) => dbName.startsWith(`${dbType}_`) && dbName.endsWith('_n8n_test'),
|
||||
)
|
||||
.map(({ Database: dbName }) => dbName);
|
||||
|
||||
const promises = databases.map((dbName) => connection.query(`DROP DATABASE ${dbName};`));
|
||||
const promises = databases.map(async (dbName) => connection.query(`DROP DATABASE ${dbName};`));
|
||||
await Promise.all(promises);
|
||||
await connection.destroy();
|
||||
};
|
||||
|
|
|
@ -3,13 +3,9 @@ import { ActiveExecutions } from '@/ActiveExecutions';
|
|||
import { mocked } from 'jest-mock';
|
||||
import PCancelable from 'p-cancelable';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import {
|
||||
createDeferredPromise,
|
||||
IDeferredPromise,
|
||||
IExecuteResponsePromiseData,
|
||||
IRun,
|
||||
} from 'n8n-workflow';
|
||||
import { IWorkflowExecutionDataProcess } from '@/Interfaces';
|
||||
import type { IDeferredPromise, IExecuteResponsePromiseData, IRun } from 'n8n-workflow';
|
||||
import { createDeferredPromise } from 'n8n-workflow';
|
||||
import type { IWorkflowExecutionDataProcess } from '@/Interfaces';
|
||||
|
||||
const FAKE_EXECUTION_ID = '15';
|
||||
const FAKE_SECOND_EXECUTION_ID = '20';
|
||||
|
@ -160,12 +156,12 @@ function mockFullRunData(): IRun {
|
|||
};
|
||||
}
|
||||
|
||||
function mockCancelablePromise(): PCancelable<IRun> {
|
||||
async function mockCancelablePromise(): PCancelable<IRun> {
|
||||
return new PCancelable(async (resolve) => {
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
function mockDeferredPromise(): Promise<IDeferredPromise<IExecuteResponsePromiseData>> {
|
||||
async function mockDeferredPromise(): Promise<IDeferredPromise<IExecuteResponsePromiseData>> {
|
||||
return createDeferredPromise<IExecuteResponsePromiseData>();
|
||||
}
|
||||
|
|
|
@ -1,13 +1,8 @@
|
|||
import { v4 as uuid } from 'uuid';
|
||||
import { mocked } from 'jest-mock';
|
||||
|
||||
import {
|
||||
ICredentialTypes,
|
||||
INodesAndCredentials,
|
||||
LoggerProxy,
|
||||
NodeOperationError,
|
||||
Workflow,
|
||||
} from 'n8n-workflow';
|
||||
import type { ICredentialTypes, INodesAndCredentials } from 'n8n-workflow';
|
||||
import { LoggerProxy, NodeOperationError, Workflow } from 'n8n-workflow';
|
||||
|
||||
import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner';
|
||||
import * as Db from '@/Db';
|
||||
|
@ -22,7 +17,7 @@ import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData'
|
|||
|
||||
import { WorkflowRunner } from '@/WorkflowRunner';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { ExternalHooks } from '@/ExternalHooks';
|
||||
import type { ExternalHooks } from '@/ExternalHooks';
|
||||
import { Container } from 'typedi';
|
||||
import { LoadNodesAndCredentials } from '@/LoadNodesAndCredentials';
|
||||
import { mockInstance } from '../integration/shared/utils';
|
||||
|
|
|
@ -145,7 +145,7 @@ describe('executeCommand', () => {
|
|||
);
|
||||
});
|
||||
|
||||
await expect(async () => await executeCommand('ls')).rejects.toThrow(
|
||||
await expect(async () => executeCommand('ls')).rejects.toThrow(
|
||||
RESPONSE_ERROR_MESSAGES.PACKAGE_NOT_FOUND,
|
||||
);
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import {
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialDataDecryptedObject,
|
||||
ICredentialType,
|
||||
|
@ -7,8 +7,9 @@ import {
|
|||
INode,
|
||||
INodeProperties,
|
||||
INodesAndCredentials,
|
||||
Workflow,
|
||||
} from 'n8n-workflow';
|
||||
import { deepCopy } from 'n8n-workflow';
|
||||
import { Workflow } from 'n8n-workflow';
|
||||
import { CredentialsHelper } from '@/CredentialsHelper';
|
||||
import { CredentialTypes } from '@/CredentialTypes';
|
||||
import { Container } from 'typedi';
|
||||
|
@ -82,7 +83,9 @@ describe('CredentialsHelper', () => {
|
|||
},
|
||||
credentialType: new (class TestApi implements ICredentialType {
|
||||
name = 'testApi';
|
||||
|
||||
displayName = 'Test API';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'User',
|
||||
|
@ -124,7 +127,9 @@ describe('CredentialsHelper', () => {
|
|||
},
|
||||
credentialType: new (class TestApi implements ICredentialType {
|
||||
name = 'testApi';
|
||||
|
||||
displayName = 'Test API';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Access Token',
|
||||
|
@ -154,7 +159,9 @@ describe('CredentialsHelper', () => {
|
|||
},
|
||||
credentialType: new (class TestApi implements ICredentialType {
|
||||
name = 'testApi';
|
||||
|
||||
displayName = 'Test API';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Access Token',
|
||||
|
@ -184,7 +191,9 @@ describe('CredentialsHelper', () => {
|
|||
},
|
||||
credentialType: new (class TestApi implements ICredentialType {
|
||||
name = 'testApi';
|
||||
|
||||
displayName = 'Test API';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Access Token',
|
||||
|
@ -215,7 +224,9 @@ describe('CredentialsHelper', () => {
|
|||
},
|
||||
credentialType: new (class TestApi implements ICredentialType {
|
||||
name = 'testApi';
|
||||
|
||||
displayName = 'Test API';
|
||||
|
||||
properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'My Token',
|
||||
|
@ -229,8 +240,8 @@ describe('CredentialsHelper', () => {
|
|||
credentials: ICredentialDataDecryptedObject,
|
||||
requestOptions: IHttpRequestOptions,
|
||||
): Promise<IHttpRequestOptions> {
|
||||
requestOptions.headers!['Authorization'] = `Bearer ${credentials.accessToken}`;
|
||||
requestOptions.qs!['user'] = credentials.user;
|
||||
requestOptions.headers!.Authorization = `Bearer ${credentials.accessToken}`;
|
||||
requestOptions.qs!.user = credentials.user;
|
||||
return requestOptions;
|
||||
}
|
||||
})(),
|
||||
|
@ -287,7 +298,7 @@ describe('CredentialsHelper', () => {
|
|||
const result = await credentialsHelper.authenticate(
|
||||
testData.input.credentials,
|
||||
testData.input.credentialType.name,
|
||||
JSON.parse(JSON.stringify(incomingRequestOptions)),
|
||||
deepCopy(incomingRequestOptions),
|
||||
workflow,
|
||||
node,
|
||||
timezone,
|
||||
|
|
|
@ -17,7 +17,8 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse JSON content type correctly', () => {
|
||||
const curl = `curl -X POST https://reqbin.com/echo/post/json -H 'Content-Type: application/json' -d '{"login":"my_login","password":"my_password"}'`;
|
||||
const curl =
|
||||
'curl -X POST https://reqbin.com/echo/post/json -H \'Content-Type: application/json\' -d \'{"login":"my_login","password":"my_password"}\'';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo/post/json');
|
||||
expect(parameters.sendBody).toBe(true);
|
||||
|
@ -31,7 +32,8 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse multipart-form-data content type correctly', () => {
|
||||
const curl = `curl -X POST https://reqbin.com/echo/post/json -v -F key1=value1 -F upload=@localfilename`;
|
||||
const curl =
|
||||
'curl -X POST https://reqbin.com/echo/post/json -v -F key1=value1 -F upload=@localfilename';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo/post/json');
|
||||
expect(parameters.sendBody).toBe(true);
|
||||
|
@ -46,7 +48,8 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse binary request correctly', () => {
|
||||
const curl = `curl --location --request POST 'https://www.website.com' --header 'Content-Type: image/png' --data-binary '@/Users/image.png`;
|
||||
const curl =
|
||||
"curl --location --request POST 'https://www.website.com' --header 'Content-Type: image/png' --data-binary '@/Users/image.png";
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://www.website.com');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
@ -74,7 +77,8 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse header properties and keep the original case', () => {
|
||||
const curl = `curl -X POST https://reqbin.com/echo/post/json -v -F key1=value1 -F upload=@localfilename -H "ACCEPT: text/javascript" -H "content-type: multipart/form-data"`;
|
||||
const curl =
|
||||
'curl -X POST https://reqbin.com/echo/post/json -v -F key1=value1 -F upload=@localfilename -H "ACCEPT: text/javascript" -H "content-type: multipart/form-data"';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo/post/json');
|
||||
expect(parameters.sendBody).toBe(true);
|
||||
|
@ -91,7 +95,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse querystring properties', () => {
|
||||
const curl = `curl -G -d 'q=kitties' -d 'count=20' https://google.com/search`;
|
||||
const curl = "curl -G -d 'q=kitties' -d 'count=20' https://google.com/search";
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://google.com/search');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -105,7 +109,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse basic authentication property and keep the original case', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password"`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password"';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -119,7 +123,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse location flag with --location', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password" --location`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password" --location';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -134,7 +138,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse location flag with --L', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password" -L`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password" -L';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -149,7 +153,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse location and max redirects flags with --location and --max-redirs 10', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password" --location --max-redirs 10`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password" --location --max-redirs 10';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -165,7 +169,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse proxy flag -x', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password" -x https://google.com`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password" -x https://google.com';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -180,7 +184,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse proxy flag --proxy', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password" -x https://google.com`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password" -x https://google.com';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -195,7 +199,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse include headers on output flag --include', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password" --include -x https://google.com`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password" --include -x https://google.com';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -210,7 +214,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse include headers on output flag -i', () => {
|
||||
const curl = `curl https://reqbin.com/echo -u "login:password" -x https://google.com -i`;
|
||||
const curl = 'curl https://reqbin.com/echo -u "login:password" -x https://google.com -i';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.sendBody).toBe(false);
|
||||
|
@ -225,7 +229,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse include request flag -X', () => {
|
||||
const curl = `curl -X POST https://reqbin.com/echo -u "login:password" -x https://google.com`;
|
||||
const curl = 'curl -X POST https://reqbin.com/echo -u "login:password" -x https://google.com';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
@ -233,7 +237,8 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse include request flag --request', () => {
|
||||
const curl = `curl --request POST https://reqbin.com/echo -u "login:password" -x https://google.com`;
|
||||
const curl =
|
||||
'curl --request POST https://reqbin.com/echo -u "login:password" -x https://google.com';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
@ -241,7 +246,8 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse include timeout flag --connect-timeout', () => {
|
||||
const curl = `curl --request POST https://reqbin.com/echo -u "login:password" --connect-timeout 20`;
|
||||
const curl =
|
||||
'curl --request POST https://reqbin.com/echo -u "login:password" --connect-timeout 20';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
@ -250,7 +256,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse download file flag -O', () => {
|
||||
const curl = `curl --request POST https://reqbin.com/echo -u "login:password" -O`;
|
||||
const curl = 'curl --request POST https://reqbin.com/echo -u "login:password" -O';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
@ -260,7 +266,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse download file flag -o', () => {
|
||||
const curl = `curl --request POST https://reqbin.com/echo -u "login:password" -o`;
|
||||
const curl = 'curl --request POST https://reqbin.com/echo -u "login:password" -o';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
@ -270,7 +276,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse ignore SSL flag -k', () => {
|
||||
const curl = `curl --request POST https://reqbin.com/echo -u "login:password" -k`;
|
||||
const curl = 'curl --request POST https://reqbin.com/echo -u "login:password" -k';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
@ -279,7 +285,7 @@ describe('CurlConverterHelper', () => {
|
|||
});
|
||||
|
||||
test('Should parse ignore SSL flag --insecure', () => {
|
||||
const curl = `curl --request POST https://reqbin.com/echo -u "login:password" --insecure`;
|
||||
const curl = 'curl --request POST https://reqbin.com/echo -u "login:password" --insecure';
|
||||
const parameters = toHttpNodeParameters(curl);
|
||||
expect(parameters.url).toBe('https://reqbin.com/echo');
|
||||
expect(parameters.method).toBe('POST');
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import { IRun, LoggerProxy, WorkflowExecuteMode } from 'n8n-workflow';
|
||||
import type { IRun, WorkflowExecuteMode } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { QueryFailedError } from 'typeorm';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
|
||||
import config from '@/config';
|
||||
import * as Db from '@/Db';
|
||||
import { User } from '@db/entities/User';
|
||||
import { WorkflowStatistics } from '@db/entities/WorkflowStatistics';
|
||||
import { WorkflowStatisticsRepository } from '@db/repositories';
|
||||
import type { WorkflowStatistics } from '@db/entities/WorkflowStatistics';
|
||||
import type { WorkflowStatisticsRepository } from '@db/repositories';
|
||||
import { nodeFetchedData, workflowExecutionCompleted } from '@/events/WorkflowStatistics';
|
||||
import * as UserManagementHelper from '@/UserManagement/UserManagementHelper';
|
||||
import { getLogger } from '@/Logger';
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { INodeTypeData } from 'n8n-workflow';
|
||||
import type { INodeTypeData } from 'n8n-workflow';
|
||||
|
||||
/**
|
||||
* Ensure all pending promises settle. The promise's `resolve` is placed in
|
||||
|
@ -29,7 +29,7 @@ export function mockNodeTypesData(
|
|||
outputs: [],
|
||||
properties: [],
|
||||
},
|
||||
trigger: options?.addTrigger ? () => Promise.resolve(undefined) : undefined,
|
||||
trigger: options?.addTrigger ? async () => undefined : undefined,
|
||||
},
|
||||
}),
|
||||
acc
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { v4 as uuid } from 'uuid';
|
||||
import { Container } from 'typedi';
|
||||
import { ICredentialTypes, INodeTypes, SubworkflowOperationError, Workflow } from 'n8n-workflow';
|
||||
import type { ICredentialTypes, INodeTypes } from 'n8n-workflow';
|
||||
import { SubworkflowOperationError, Workflow } from 'n8n-workflow';
|
||||
|
||||
import config from '@/config';
|
||||
import * as Db from '@/Db';
|
||||
|
@ -79,7 +80,7 @@ describe('PermissionChecker.check()', () => {
|
|||
],
|
||||
});
|
||||
|
||||
expect(() => PermissionChecker.check(workflow, userId)).not.toThrow();
|
||||
expect(async () => PermissionChecker.check(workflow, userId)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should allow if requesting user is instance owner', async () => {
|
||||
|
@ -109,7 +110,7 @@ describe('PermissionChecker.check()', () => {
|
|||
],
|
||||
});
|
||||
|
||||
expect(async () => await PermissionChecker.check(workflow, owner.id)).not.toThrow();
|
||||
expect(async () => PermissionChecker.check(workflow, owner.id)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should allow if workflow creds are valid subset', async () => {
|
||||
|
@ -156,7 +157,7 @@ describe('PermissionChecker.check()', () => {
|
|||
],
|
||||
});
|
||||
|
||||
expect(async () => await PermissionChecker.check(workflow, owner.id)).not.toThrow();
|
||||
expect(async () => PermissionChecker.check(workflow, owner.id)).not.toThrow();
|
||||
});
|
||||
|
||||
test('should deny if workflow creds are not valid subset', async () => {
|
||||
|
|
|
@ -24,7 +24,7 @@ describe('PostHog', () => {
|
|||
const ph = new PostHogClient();
|
||||
await ph.init(instanceId);
|
||||
|
||||
expect(PostHog.prototype.constructor).toHaveBeenCalledWith(apiKey, {host: apiHost});
|
||||
expect(PostHog.prototype.constructor).toHaveBeenCalledWith(apiKey, { host: apiHost });
|
||||
});
|
||||
|
||||
it('does not initialize or track if diagnostics are not enabled', async () => {
|
||||
|
@ -78,13 +78,10 @@ describe('PostHog', () => {
|
|||
createdAt,
|
||||
});
|
||||
|
||||
expect(PostHog.prototype.getAllFlags).toHaveBeenCalledWith(
|
||||
`${instanceId}#${userId}`,
|
||||
{
|
||||
personProperties: {
|
||||
created_at_timestamp: createdAt.getTime().toString(),
|
||||
},
|
||||
}
|
||||
);
|
||||
expect(PostHog.prototype.getAllFlags).toHaveBeenCalledWith(`${instanceId}#${userId}`, {
|
||||
personProperties: {
|
||||
created_at_timestamp: createdAt.getTime().toString(),
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -12,16 +12,16 @@ async function mockFind({
|
|||
type: string;
|
||||
}): Promise<IWorkflowCredentials | null> {
|
||||
// Simple statement that maps a return value based on the `id` parameter
|
||||
if (id === notFoundNode.credentials!!.test.id) {
|
||||
if (id === notFoundNode.credentials!.test.id) {
|
||||
return null;
|
||||
}
|
||||
// Otherwise just build some kind of credential object and return it
|
||||
return {
|
||||
[type]: {
|
||||
[id]: {
|
||||
id: id,
|
||||
id,
|
||||
name: type,
|
||||
type: type,
|
||||
type,
|
||||
nodesAccess: [],
|
||||
data: '',
|
||||
},
|
||||
|
@ -49,7 +49,7 @@ describe('WorkflowCredentials', () => {
|
|||
});
|
||||
|
||||
test('Should return an error if any node has no credential ID', () => {
|
||||
const credentials = noIdNode.credentials!!.test;
|
||||
const credentials = noIdNode.credentials!.test;
|
||||
const expectedError = new Error(
|
||||
`Credentials with name "${credentials.name}" for type "test" miss an ID.`,
|
||||
);
|
||||
|
@ -58,7 +58,7 @@ describe('WorkflowCredentials', () => {
|
|||
});
|
||||
|
||||
test('Should return an error if credentials cannot be found in the DB', () => {
|
||||
const credentials = notFoundNode.credentials!!.test;
|
||||
const credentials = notFoundNode.credentials!.test;
|
||||
const expectedError = new Error(
|
||||
`Could not find credentials for type "test" with ID "${credentials.id}".`,
|
||||
);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { INode, LoggerProxy } from 'n8n-workflow';
|
||||
import type { INode } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
import { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import { CredentialsEntity } from '@db/entities/CredentialsEntity';
|
||||
import { getNodesWithInaccessibleCreds, validateWorkflowCredentialUsage } from '@/WorkflowHelpers';
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import { CookieOptions, Response } from 'express';
|
||||
import type { Repository } from 'typeorm';
|
||||
import type { CookieOptions, Response } from 'express';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import { mock, anyObject, captor } from 'jest-mock-extended';
|
||||
import type { ILogger } from 'n8n-workflow';
|
||||
import type { IExternalHooksClass, IInternalHooksClass } from '@/Interfaces';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { UserRepository } from '@db/repositories';
|
||||
import type { UserRepository } from '@db/repositories';
|
||||
import { MeController } from '@/controllers';
|
||||
import { AUTH_COOKIE_NAME } from '@/constants';
|
||||
import { BadRequestError } from '@/ResponseHelper';
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type { ICredentialTypes } from 'n8n-workflow';
|
||||
import type { Config } from '@/config';
|
||||
import type { TranslationRequest } from '@/controllers/translation.controller';
|
||||
import {
|
||||
TranslationController,
|
||||
TranslationRequest,
|
||||
CREDENTIAL_TRANSLATIONS_DIR,
|
||||
} from '@/controllers/translation.controller';
|
||||
import { BadRequestError } from '@/ResponseHelper';
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { Container } from 'typedi';
|
||||
import { DataSource, EntityManager } from 'typeorm';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { Role, RoleNames, RoleScopes } from '@db/entities/Role';
|
||||
import type { RoleNames, RoleScopes } from '@db/entities/Role';
|
||||
import { Role } from '@db/entities/Role';
|
||||
import { RoleRepository } from '@db/repositories/role.repository';
|
||||
import { mockInstance } from '../../integration/shared/utils';
|
||||
import { randomInteger } from '../../integration/shared/random';
|
||||
|
@ -38,7 +39,7 @@ describe('RoleRepository', () => {
|
|||
|
||||
test('should throw otherwise', async () => {
|
||||
entityManager.findOneOrFail.mockRejectedValueOnce(new Error());
|
||||
expect(() => roleRepository.findRoleOrFail('global', 'owner')).rejects.toThrow();
|
||||
expect(async () => roleRepository.findRoleOrFail('global', 'owner')).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
import set from 'lodash.set';
|
||||
|
||||
import {
|
||||
import type {
|
||||
ICredentialDataDecryptedObject,
|
||||
ICredentialsHelper,
|
||||
IDataObject,
|
||||
IDeferredPromise,
|
||||
IExecuteWorkflowInfo,
|
||||
IHttpRequestHelper,
|
||||
|
@ -20,12 +18,12 @@ import {
|
|||
IVersionedNodeType,
|
||||
IWorkflowBase,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
NodeHelpers,
|
||||
NodeParameterValue,
|
||||
WorkflowHooks,
|
||||
} from 'n8n-workflow';
|
||||
import { deepCopy } from 'n8n-workflow';
|
||||
import { ICredentialsHelper, NodeHelpers, WorkflowHooks } from 'n8n-workflow';
|
||||
import { Credentials } from '@/Credentials';
|
||||
import { IExecuteFunctions } from '@/Interfaces';
|
||||
import type { IExecuteFunctions } from '@/Interfaces';
|
||||
|
||||
export class CredentialsHelper extends ICredentialsHelper {
|
||||
async authenticate(
|
||||
|
@ -381,12 +379,12 @@ class NodeTypesClass implements INodeTypes {
|
|||
compareData.value2 as NodeParameterValue,
|
||||
);
|
||||
|
||||
if (compareOperationResult === true && combineOperation === 'any') {
|
||||
if (compareOperationResult && combineOperation === 'any') {
|
||||
// If it passes and the operation is "any" we do not have to check any
|
||||
// other ones as it should pass anyway. So go on with the next item.
|
||||
returnDataTrue.push(item);
|
||||
continue itemLoop;
|
||||
} else if (compareOperationResult === false && combineOperation === 'all') {
|
||||
} else if (!compareOperationResult && combineOperation === 'all') {
|
||||
// If it fails and the operation is "all" we do not have to check any
|
||||
// other ones as it should be not pass anyway. So go on with the next item.
|
||||
returnDataFalse.push(item);
|
||||
|
@ -524,7 +522,7 @@ class NodeTypesClass implements INodeTypes {
|
|||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
return this.prepareOutputData(items);
|
||||
},
|
||||
|
@ -570,7 +568,7 @@ class NodeTypesClass implements INodeTypes {
|
|||
},
|
||||
],
|
||||
},
|
||||
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
|
@ -702,13 +700,14 @@ class NodeTypesClass implements INodeTypes {
|
|||
name: 'dotNotation',
|
||||
type: 'boolean',
|
||||
default: true,
|
||||
description: `<p>By default, dot-notation is used in property names. This means that "a.b" will set the property "b" underneath "a" so { "a": { "b": value} }.</p><p>If that is not intended this can be deactivated, it will then set { "a.b": value } instead.</p>`,
|
||||
description:
|
||||
'<p>By default, dot-notation is used in property names. This means that "a.b" will set the property "b" underneath "a" so { "a": { "b": value} }.</p><p>If that is not intended this can be deactivated, it will then set { "a.b": value } instead.</p>',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
|
||||
if (items.length === 0) {
|
||||
|
@ -722,13 +721,13 @@ class NodeTypesClass implements INodeTypes {
|
|||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
keepOnlySet = this.getNodeParameter('keepOnlySet', itemIndex, false) as boolean;
|
||||
item = items[itemIndex];
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as IDataObject;
|
||||
const options = this.getNodeParameter('options', itemIndex, {});
|
||||
|
||||
const newItem: INodeExecutionData = {
|
||||
json: {},
|
||||
};
|
||||
|
||||
if (keepOnlySet !== true) {
|
||||
if (!keepOnlySet) {
|
||||
if (item.binary !== undefined) {
|
||||
// Create a shallow copy of the binary data so that the old
|
||||
// data references which do not get changed still stay behind
|
||||
|
@ -737,7 +736,7 @@ class NodeTypesClass implements INodeTypes {
|
|||
Object.assign(newItem.binary, item.binary);
|
||||
}
|
||||
|
||||
newItem.json = JSON.parse(JSON.stringify(item.json));
|
||||
newItem.json = deepCopy(item.json);
|
||||
}
|
||||
|
||||
// Add boolean values
|
||||
|
@ -797,7 +796,7 @@ class NodeTypesClass implements INodeTypes {
|
|||
outputs: ['main'],
|
||||
properties: [],
|
||||
},
|
||||
execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
const items = this.getInputData();
|
||||
|
||||
return this.prepareOutputData(items);
|
||||
|
@ -851,6 +850,8 @@ export function WorkflowExecuteAdditionalData(
|
|||
connections: {},
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore
|
||||
return {
|
||||
credentialsHelper: new CredentialsHelper(''),
|
||||
hooks: new WorkflowHooks(hookFunctions, 'trigger', '1', workflowData),
|
||||
|
|
|
@ -28,7 +28,7 @@ describe('NodeExecuteFunctions', () => {
|
|||
BinaryDataManager.instance = undefined;
|
||||
});
|
||||
|
||||
test(`test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'default' mode`, async () => {
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'default' mode", async () => {
|
||||
// Setup a 'default' binary data manager instance
|
||||
await BinaryDataManager.init({
|
||||
mode: 'default',
|
||||
|
@ -39,8 +39,8 @@ describe('NodeExecuteFunctions', () => {
|
|||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
let inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
let setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten by the actual payload in the response',
|
||||
|
@ -54,7 +54,7 @@ describe('NodeExecuteFunctions', () => {
|
|||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
let taskDataConnectionsInput: ITaskDataConnections = {
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
|
@ -69,7 +69,7 @@ describe('NodeExecuteFunctions', () => {
|
|||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
let getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
|
@ -79,7 +79,7 @@ describe('NodeExecuteFunctions', () => {
|
|||
expect(getBinaryDataBufferResponse).toEqual(inputData);
|
||||
});
|
||||
|
||||
test(`test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'filesystem' mode`, async () => {
|
||||
test("test getBinaryDataBuffer(...) & setBinaryDataBuffer(...) methods in 'filesystem' mode", async () => {
|
||||
// Setup a 'filesystem' binary data manager instance
|
||||
await BinaryDataManager.init({
|
||||
mode: 'filesystem',
|
||||
|
@ -90,8 +90,8 @@ describe('NodeExecuteFunctions', () => {
|
|||
});
|
||||
|
||||
// Set our binary data buffer
|
||||
let inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
let setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
const inputData: Buffer = Buffer.from('This is some binary data', 'utf8');
|
||||
const setBinaryDataBufferResponse: IBinaryData = await setBinaryDataBuffer(
|
||||
{
|
||||
mimeType: 'txt',
|
||||
data: 'This should be overwritten with the name of the configured data manager',
|
||||
|
@ -112,7 +112,7 @@ describe('NodeExecuteFunctions', () => {
|
|||
|
||||
// Now, re-fetch our data.
|
||||
// An ITaskDataConnections object is used to share data between nodes. The top level property, 'main', represents the successful output object from a previous node.
|
||||
let taskDataConnectionsInput: ITaskDataConnections = {
|
||||
const taskDataConnectionsInput: ITaskDataConnections = {
|
||||
main: [],
|
||||
};
|
||||
|
||||
|
@ -127,7 +127,7 @@ describe('NodeExecuteFunctions', () => {
|
|||
]);
|
||||
|
||||
// Now, lets fetch our data! The item will be item index 0.
|
||||
let getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
const getBinaryDataBufferResponse: Buffer = await getBinaryDataBuffer(
|
||||
taskDataConnectionsInput,
|
||||
0,
|
||||
'data',
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { createDeferredPromise, IConnections, INode, IRun, Workflow } from 'n8n-workflow';
|
||||
import type { IConnections, INode, IRun } from 'n8n-workflow';
|
||||
import { createDeferredPromise, Workflow } from 'n8n-workflow';
|
||||
import { WorkflowExecute } from '@/WorkflowExecute';
|
||||
|
||||
import * as Helpers from './Helpers';
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { ILogger, LoggerProxy } from 'n8n-workflow';
|
||||
import type { ILogger } from 'n8n-workflow';
|
||||
import { LoggerProxy } from 'n8n-workflow';
|
||||
|
||||
const fakeLogger = {
|
||||
log: () => {},
|
||||
|
|
|
@ -21,8 +21,8 @@
|
|||
"build:translations": "gulp build:translations",
|
||||
"build:metadata": "pnpm n8n-generate-known && pnpm n8n-generate-ui-types",
|
||||
"format": "prettier --write . --ignore-path ../../.prettierignore",
|
||||
"lint": "eslint --quiet nodes credentials",
|
||||
"lintfix": "eslint nodes credentials --fix",
|
||||
"lint": "eslint --quiet .",
|
||||
"lintfix": "eslint . --fix",
|
||||
"watch": "tsc-watch -p tsconfig.build.json --onSuccess \"pnpm n8n-generate-ui-types\"",
|
||||
"test": "jest"
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { executeWorkflow } from '../ExecuteWorkflow';
|
||||
import * as Helpers from '../Helpers';
|
||||
import { WorkflowTestData } from '../types';
|
||||
import type { WorkflowTestData } from '../types';
|
||||
import nock from 'nock';
|
||||
|
||||
const records = [
|
||||
|
@ -26,7 +26,7 @@ describe('Execute Airtable Node', () => {
|
|||
nock.restore();
|
||||
});
|
||||
|
||||
const tests: Array<WorkflowTestData> = [
|
||||
const tests: WorkflowTestData[] = [
|
||||
{
|
||||
description: 'List Airtable Records',
|
||||
input: {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { WorkflowExecute } from 'n8n-core';
|
||||
import { createDeferredPromise, INodeTypes, IRun, Workflow } from 'n8n-workflow';
|
||||
import type { INodeTypes, IRun } from 'n8n-workflow';
|
||||
import { createDeferredPromise, Workflow } from 'n8n-workflow';
|
||||
import * as Helpers from './Helpers';
|
||||
import type { WorkflowTestData } from './types';
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { IDataObject } from 'n8n-workflow';
|
||||
import type { IDataObject } from 'n8n-workflow';
|
||||
|
||||
// If your test needs data from credentials, you can add it here.
|
||||
// as JSON.stringify({ id: 'credentials_ID', name: 'credentials_name' }) for specific credentials
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import { readFileSync, readdirSync, mkdtempSync } from 'fs';
|
||||
import { BinaryDataManager, Credentials, constructExecutionMetaData } from 'n8n-core';
|
||||
import {
|
||||
import type {
|
||||
ICredentialDataDecryptedObject,
|
||||
ICredentialsHelper,
|
||||
IDataObject,
|
||||
IDeferredPromise,
|
||||
IExecuteFunctions,
|
||||
|
@ -22,12 +21,10 @@ import {
|
|||
IWorkflowBase,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
LoadingDetails,
|
||||
LoggerProxy,
|
||||
NodeHelpers,
|
||||
WorkflowHooks,
|
||||
} from 'n8n-workflow';
|
||||
import { ICredentialsHelper, LoggerProxy, NodeHelpers, WorkflowHooks } from 'n8n-workflow';
|
||||
import { executeWorkflow } from './ExecuteWorkflow';
|
||||
import { WorkflowTestData } from './types';
|
||||
import type { WorkflowTestData } from './types';
|
||||
import path from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
import { isEmpty } from 'lodash';
|
||||
|
@ -139,6 +136,7 @@ export function WorkflowExecuteAdditionalData(
|
|||
|
||||
class NodeTypesClass implements INodeTypes {
|
||||
nodeTypes: INodeTypeData = {};
|
||||
|
||||
getByName(nodeType: string): INodeType | IVersionedNodeType {
|
||||
return this.nodeTypes[nodeType].type;
|
||||
}
|
||||
|
@ -180,7 +178,7 @@ const loadKnownNodes = (): Record<string, LoadingDetails> => {
|
|||
return knownNodes!;
|
||||
};
|
||||
|
||||
export function createTemporaryDir(prefix: string = 'n8n') {
|
||||
export function createTemporaryDir(prefix = 'n8n') {
|
||||
return mkdtempSync(path.join(tmpdir(), prefix));
|
||||
}
|
||||
|
||||
|
@ -196,7 +194,7 @@ export async function initBinaryDataManager(mode: 'default' | 'filesystem' = 'de
|
|||
return temporaryDir;
|
||||
}
|
||||
|
||||
export function setup(testData: Array<WorkflowTestData> | WorkflowTestData) {
|
||||
export function setup(testData: WorkflowTestData[] | WorkflowTestData) {
|
||||
if (!Array.isArray(testData)) {
|
||||
testData = [testData];
|
||||
}
|
||||
|
@ -278,7 +276,7 @@ const preparePinData = (pinData: IDataObject) => {
|
|||
const returnData = Object.keys(pinData).reduce(
|
||||
(acc, key) => {
|
||||
const data = pinData[key] as IDataObject[];
|
||||
acc[key] = [data as IDataObject[]];
|
||||
acc[key] = [data];
|
||||
return acc;
|
||||
},
|
||||
{} as {
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
const PostgresFun = require('../../../nodes/Postgres/v1/genericFunctions');
|
||||
const pgPromise = require('pg-promise');
|
||||
|
||||
type NodeParams = Record<string, string | {}>;
|
||||
|
||||
describe('pgUpdate', () => {
|
||||
it('runs query to update db', async () => {
|
||||
const updateItem = { id: 1234, name: 'test' };
|
||||
const nodeParams = {
|
||||
const nodeParams: NodeParams = {
|
||||
table: 'mytable',
|
||||
schema: 'myschema',
|
||||
updateKey: 'id',
|
||||
|
@ -12,7 +14,7 @@ describe('pgUpdate', () => {
|
|||
additionalFields: {},
|
||||
returnFields: '*',
|
||||
};
|
||||
const getNodeParam = (key) => nodeParams[key];
|
||||
const getNodeParam = (key: string) => nodeParams[key];
|
||||
const pgp = pgPromise();
|
||||
const any = jest.fn();
|
||||
const db = { any };
|
||||
|
@ -26,13 +28,13 @@ describe('pgUpdate', () => {
|
|||
await PostgresFun.pgUpdate(getNodeParam, pgp, db, items);
|
||||
|
||||
expect(db.any).toHaveBeenCalledWith(
|
||||
`update \"myschema\".\"mytable\" as t set \"id\"=v.\"id\",\"name\"=v.\"name\" from (values(1234,'test')) as v(\"id\",\"name\") WHERE v.\"id\" = t.\"id\" RETURNING *`,
|
||||
'update "myschema"."mytable" as t set "id"=v."id","name"=v."name" from (values(1234,\'test\')) as v("id","name") WHERE v."id" = t."id" RETURNING *',
|
||||
);
|
||||
});
|
||||
|
||||
it('runs query to update db if updateKey is not in columns', async () => {
|
||||
const updateItem = { id: 1234, name: 'test' };
|
||||
const nodeParams = {
|
||||
const nodeParams: NodeParams = {
|
||||
table: 'mytable',
|
||||
schema: 'myschema',
|
||||
updateKey: 'id',
|
||||
|
@ -40,7 +42,7 @@ describe('pgUpdate', () => {
|
|||
additionalFields: {},
|
||||
returnFields: '*',
|
||||
};
|
||||
const getNodeParam = (key) => nodeParams[key];
|
||||
const getNodeParam = (key: string) => nodeParams[key];
|
||||
const pgp = pgPromise();
|
||||
const any = jest.fn();
|
||||
const db = { any };
|
||||
|
@ -54,13 +56,13 @@ describe('pgUpdate', () => {
|
|||
const results = await PostgresFun.pgUpdate(getNodeParam, pgp, db, items);
|
||||
|
||||
expect(db.any).toHaveBeenCalledWith(
|
||||
`update \"myschema\".\"mytable\" as t set \"id\"=v.\"id\",\"name\"=v.\"name\" from (values(1234,'test')) as v(\"id\",\"name\") WHERE v.\"id\" = t.\"id\" RETURNING *`,
|
||||
'update "myschema"."mytable" as t set "id"=v."id","name"=v."name" from (values(1234,\'test\')) as v("id","name") WHERE v."id" = t."id" RETURNING *',
|
||||
);
|
||||
});
|
||||
|
||||
it('runs query to update db with cast as updateKey', async () => {
|
||||
const updateItem = { id: '1234', name: 'test' };
|
||||
const nodeParams = {
|
||||
const nodeParams: NodeParams = {
|
||||
table: 'mytable',
|
||||
schema: 'myschema',
|
||||
updateKey: 'id:uuid',
|
||||
|
@ -68,7 +70,7 @@ describe('pgUpdate', () => {
|
|||
additionalFields: {},
|
||||
returnFields: '*',
|
||||
};
|
||||
const getNodeParam = (key) => nodeParams[key];
|
||||
const getNodeParam = (key: string) => nodeParams[key];
|
||||
const pgp = pgPromise();
|
||||
const any = jest.fn();
|
||||
const db = { any };
|
||||
|
@ -82,13 +84,13 @@ describe('pgUpdate', () => {
|
|||
await PostgresFun.pgUpdate(getNodeParam, pgp, db, items);
|
||||
|
||||
expect(db.any).toHaveBeenCalledWith(
|
||||
`update \"myschema\".\"mytable\" as t set \"id\"=v.\"id\",\"name\"=v.\"name\" from (values('1234'::uuid,'test')) as v(\"id\",\"name\") WHERE v.\"id\" = t.\"id\" RETURNING *`,
|
||||
'update "myschema"."mytable" as t set "id"=v."id","name"=v."name" from (values(\'1234\'::uuid,\'test\')) as v("id","name") WHERE v."id" = t."id" RETURNING *',
|
||||
);
|
||||
});
|
||||
|
||||
it('runs query to update db with cast in target columns', async () => {
|
||||
const updateItem = { id: '1234', name: 'test' };
|
||||
const nodeParams = {
|
||||
const nodeParams: NodeParams = {
|
||||
table: 'mytable',
|
||||
schema: 'myschema',
|
||||
updateKey: 'id',
|
||||
|
@ -96,7 +98,7 @@ describe('pgUpdate', () => {
|
|||
additionalFields: {},
|
||||
returnFields: '*',
|
||||
};
|
||||
const getNodeParam = (key) => nodeParams[key];
|
||||
const getNodeParam = (key: string) => nodeParams[key];
|
||||
const pgp = pgPromise();
|
||||
const any = jest.fn();
|
||||
const db = { any };
|
||||
|
@ -110,7 +112,7 @@ describe('pgUpdate', () => {
|
|||
await PostgresFun.pgUpdate(getNodeParam, pgp, db, items);
|
||||
|
||||
expect(db.any).toHaveBeenCalledWith(
|
||||
`update \"myschema\".\"mytable\" as t set \"id\"=v.\"id\",\"name\"=v.\"name\" from (values('1234'::uuid,'test')) as v(\"id\",\"name\") WHERE v.\"id\" = t.\"id\" RETURNING *`,
|
||||
'update "myschema"."mytable" as t set "id"=v."id","name"=v."name" from (values(\'1234\'::uuid,\'test\')) as v("id","name") WHERE v."id" = t."id" RETURNING *',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -118,14 +120,14 @@ describe('pgUpdate', () => {
|
|||
describe('pgInsert', () => {
|
||||
it('runs query to insert', async () => {
|
||||
const insertItem = { id: 1234, name: 'test', age: 34 };
|
||||
const nodeParams = {
|
||||
const nodeParams: NodeParams = {
|
||||
table: 'mytable',
|
||||
schema: 'myschema',
|
||||
columns: 'id,name,age',
|
||||
returnFields: '*',
|
||||
additionalFields: {},
|
||||
};
|
||||
const getNodeParam = (key) => nodeParams[key];
|
||||
const getNodeParam = (key: string) => nodeParams[key];
|
||||
const pgp = pgPromise();
|
||||
const any = jest.fn();
|
||||
const db = { any };
|
||||
|
@ -139,20 +141,20 @@ describe('pgInsert', () => {
|
|||
await PostgresFun.pgInsert(getNodeParam, pgp, db, items);
|
||||
|
||||
expect(db.any).toHaveBeenCalledWith(
|
||||
`insert into \"myschema\".\"mytable\"(\"id\",\"name\",\"age\") values(1234,'test',34) RETURNING *`,
|
||||
'insert into "myschema"."mytable"("id","name","age") values(1234,\'test\',34) RETURNING *',
|
||||
);
|
||||
});
|
||||
|
||||
it('runs query to insert with type casting', async () => {
|
||||
const insertItem = { id: 1234, name: 'test', age: 34 };
|
||||
const nodeParams = {
|
||||
const nodeParams: NodeParams = {
|
||||
table: 'mytable',
|
||||
schema: 'myschema',
|
||||
columns: 'id:int,name:text,age',
|
||||
returnFields: '*',
|
||||
additionalFields: {},
|
||||
};
|
||||
const getNodeParam = (key) => nodeParams[key];
|
||||
const getNodeParam = (key: string) => nodeParams[key];
|
||||
const pgp = pgPromise();
|
||||
const any = jest.fn();
|
||||
const db = { any };
|
||||
|
@ -166,7 +168,7 @@ describe('pgInsert', () => {
|
|||
await PostgresFun.pgInsert(getNodeParam, pgp, db, items);
|
||||
|
||||
expect(db.any).toHaveBeenCalledWith(
|
||||
`insert into \"myschema\".\"mytable\"(\"id\",\"name\",\"age\") values(1234::int,'test'::text,34) RETURNING *`,
|
||||
'insert into "myschema"."mytable"("id","name","age") values(1234::int,\'test\'::text,34) RETURNING *',
|
||||
);
|
||||
});
|
||||
});
|
|
@ -1,9 +1,9 @@
|
|||
import * as Helpers from '../Helpers';
|
||||
import { WorkflowTestData } from '../types';
|
||||
import type { WorkflowTestData } from '../types';
|
||||
import { executeWorkflow } from '../ExecuteWorkflow';
|
||||
|
||||
describe('Execute Start Node', () => {
|
||||
const tests: Array<WorkflowTestData> = [
|
||||
const tests: WorkflowTestData[] = [
|
||||
{
|
||||
description: 'should run start node',
|
||||
input: {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { INode, IConnections } from 'n8n-workflow';
|
||||
import type { INode, IConnections } from 'n8n-workflow';
|
||||
|
||||
export interface WorkflowTestData {
|
||||
description: string;
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"noUnusedLocals": false,
|
||||
"useUnknownInCatchVariables": false
|
||||
},
|
||||
"include": ["credentials/**/*.ts", "nodes/**/*.ts"],
|
||||
"include": ["credentials/**/*.ts", "nodes/**/*.ts", "test/**/*.ts", "utils/**/*.ts"],
|
||||
"references": [
|
||||
{ "path": "../workflow/tsconfig.build.json" },
|
||||
{ "path": "../core/tsconfig.build.json" }
|
||||
|
|
|
@ -6,7 +6,7 @@ describe('AugmentObject', () => {
|
|||
describe('augmentArray', () => {
|
||||
test('should work with arrays', () => {
|
||||
const originalObject = [1, 2, 3, 4, null];
|
||||
const copyOriginal = JSON.parse(JSON.stringify(originalObject));
|
||||
const copyOriginal = deepCopy(originalObject);
|
||||
|
||||
const augmentedObject = augmentArray(originalObject);
|
||||
|
||||
|
@ -70,7 +70,7 @@ describe('AugmentObject', () => {
|
|||
},
|
||||
],
|
||||
};
|
||||
const copyOriginal = JSON.parse(JSON.stringify(originalObject));
|
||||
const copyOriginal = deepCopy(originalObject);
|
||||
|
||||
const augmentedObject = augmentObject(originalObject);
|
||||
|
||||
|
@ -81,13 +81,13 @@ describe('AugmentObject', () => {
|
|||
|
||||
// Make sure that also array operations as push and length work as expected
|
||||
// On lower levels
|
||||
augmentedObject.a.b.c[0].a3!.b3.c3 = '033';
|
||||
expect(augmentedObject.a.b.c[0].a3!.b3.c3).toEqual('033');
|
||||
expect(originalObject.a.b.c[0].a3!.b3.c3).toEqual('03');
|
||||
augmentedObject.a.b.c[0].a3.b3.c3 = '033';
|
||||
expect(augmentedObject.a.b.c[0].a3.b3.c3).toEqual('033');
|
||||
expect(originalObject.a.b.c[0].a3.b3.c3).toEqual('03');
|
||||
|
||||
augmentedObject.a.b.c[1].a3!.b3.c3 = '133';
|
||||
expect(augmentedObject.a.b.c[1].a3!.b3.c3).toEqual('133');
|
||||
expect(originalObject.a.b.c[1].a3!.b3.c3).toEqual('13');
|
||||
augmentedObject.a.b.c[1].a3.b3.c3 = '133';
|
||||
expect(augmentedObject.a.b.c[1].a3.b3.c3).toEqual('133');
|
||||
expect(originalObject.a.b.c[1].a3.b3.c3).toEqual('13');
|
||||
|
||||
augmentedObject.a.b.c.push({
|
||||
a3: {
|
||||
|
@ -203,7 +203,7 @@ describe('AugmentObject', () => {
|
|||
d: date,
|
||||
r: regexp,
|
||||
};
|
||||
const copyOriginal = JSON.parse(JSON.stringify(originalObject));
|
||||
const copyOriginal = deepCopy(originalObject);
|
||||
|
||||
const augmentedObject = augmentObject(originalObject);
|
||||
|
||||
|
@ -248,30 +248,30 @@ describe('AugmentObject', () => {
|
|||
},
|
||||
aa: '1',
|
||||
};
|
||||
const copyOriginal = JSON.parse(JSON.stringify(originalObject));
|
||||
const copyOriginal = deepCopy(originalObject);
|
||||
|
||||
const augmentedObject = augmentObject(originalObject);
|
||||
|
||||
augmentedObject.a.bb = '92';
|
||||
expect(originalObject.a.bb).toEqual('2');
|
||||
expect(augmentedObject.a!.bb!).toEqual('92');
|
||||
expect(augmentedObject.a.bb).toEqual('92');
|
||||
|
||||
augmentedObject.a!.b!.cc = '93';
|
||||
augmentedObject.a.b.cc = '93';
|
||||
expect(originalObject.a.b.cc).toEqual('3');
|
||||
expect(augmentedObject.a!.b!.cc).toEqual('93');
|
||||
expect(augmentedObject.a.b.cc).toEqual('93');
|
||||
|
||||
// @ts-ignore
|
||||
augmentedObject.a!.b!.ccc = {
|
||||
augmentedObject.a.b.ccc = {
|
||||
d: '4',
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
expect(augmentedObject.a!.b!.ccc).toEqual({ d: '4' });
|
||||
expect(augmentedObject.a.b.ccc).toEqual({ d: '4' });
|
||||
|
||||
// @ts-ignore
|
||||
augmentedObject.a!.b!.ccc.d = '94';
|
||||
augmentedObject.a.b.ccc.d = '94';
|
||||
// @ts-ignore
|
||||
expect(augmentedObject.a!.b!.ccc.d).toEqual('94');
|
||||
expect(augmentedObject.a.b.ccc.d).toEqual('94');
|
||||
|
||||
expect(originalObject).toEqual(copyOriginal);
|
||||
|
||||
|
@ -300,7 +300,7 @@ describe('AugmentObject', () => {
|
|||
},
|
||||
aa: '1',
|
||||
};
|
||||
const copyOriginal = JSON.parse(JSON.stringify(originalObject));
|
||||
const copyOriginal = deepCopy(originalObject);
|
||||
|
||||
const augmentedObject = augmentObject(originalObject);
|
||||
|
||||
|
@ -351,7 +351,7 @@ describe('AugmentObject', () => {
|
|||
},
|
||||
aa: '1' as string | undefined,
|
||||
};
|
||||
const copyOriginal = JSON.parse(JSON.stringify(originalObject));
|
||||
const copyOriginal = deepCopy(originalObject);
|
||||
|
||||
const augmentedObject = augmentObject(originalObject);
|
||||
|
||||
|
@ -423,7 +423,7 @@ describe('AugmentObject', () => {
|
|||
},
|
||||
aa: '1' as string | undefined,
|
||||
};
|
||||
const copyOriginal = JSON.parse(JSON.stringify(originalObject));
|
||||
const copyOriginal = deepCopy(originalObject);
|
||||
|
||||
const augmentedObject = augmentObject(originalObject);
|
||||
|
||||
|
|
|
@ -6,12 +6,9 @@ import { DateTime, Duration, Interval } from 'luxon';
|
|||
import { Expression } from '@/Expression';
|
||||
import { Workflow } from '@/Workflow';
|
||||
import * as Helpers from './Helpers';
|
||||
import {
|
||||
baseFixtures,
|
||||
ExpressionTestEvaluation,
|
||||
ExpressionTestTransform,
|
||||
} from './ExpressionFixtures/base';
|
||||
import { INodeExecutionData } from '@/Interfaces';
|
||||
import type { ExpressionTestEvaluation, ExpressionTestTransform } from './ExpressionFixtures/base';
|
||||
import { baseFixtures } from './ExpressionFixtures/base';
|
||||
import type { INodeExecutionData } from '@/Interfaces';
|
||||
import { extendSyntax } from '@/Extensions/ExpressionExtension';
|
||||
|
||||
describe('Expression', () => {
|
||||
|
@ -88,7 +85,7 @@ describe('Expression', () => {
|
|||
|
||||
expect(evaluate('={{new Object()}}')).toEqual(new Object());
|
||||
|
||||
expect(evaluate('={{new Array()}}')).toEqual(new Array());
|
||||
expect(evaluate('={{new Array()}}')).toEqual([]);
|
||||
expect(evaluate('={{new Int8Array()}}')).toEqual(new Int8Array());
|
||||
expect(evaluate('={{new Uint8Array()}}')).toEqual(new Uint8Array());
|
||||
expect(evaluate('={{new Uint8ClampedArray()}}')).toEqual(new Uint8ClampedArray());
|
||||
|
|
|
@ -25,9 +25,7 @@ describe('Data Transformation Functions', () => {
|
|||
{ value: 6, string: '6' },
|
||||
{ value: { something: 'else' } }
|
||||
].pluck("value") }}`),
|
||||
).toEqual(
|
||||
expect.arrayContaining([1, 2, 3, 4, 5, 6, { something: 'else' }]),
|
||||
);
|
||||
).toEqual(expect.arrayContaining([1, 2, 3, 4, 5, 6, { something: 'else' }]));
|
||||
});
|
||||
|
||||
test('.pluck() should work correctly for multiple values', () => {
|
||||
|
@ -50,7 +48,10 @@ describe('Data Transformation Functions', () => {
|
|||
}
|
||||
].pluck("firstName", "lastName") }}`),
|
||||
).toEqual(
|
||||
expect.arrayContaining([["John", "Doe"],["Jane", "Doe"]]),
|
||||
expect.arrayContaining([
|
||||
['John', 'Doe'],
|
||||
['Jane', 'Doe'],
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -73,7 +74,7 @@ describe('Data Transformation Functions', () => {
|
|||
{ value: 4, string: '4' },
|
||||
{ value: 5, string: '5' },
|
||||
{ value: 6, string: '6' },
|
||||
{ value: { something: 'else' } }
|
||||
{ value: { something: 'else' } },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
@ -86,10 +87,8 @@ describe('Data Transformation Functions', () => {
|
|||
|
||||
test('.unique() should work on an arrays containing nulls, objects and arrays', () => {
|
||||
expect(
|
||||
evaluate(
|
||||
'={{ [1, 2, 3, "as", {}, {}, 1, 2, [1,2], "[sad]", "[sad]", null].unique() }}',
|
||||
),
|
||||
).toEqual([1, 2, 3, "as", {}, [1,2], "[sad]", null]);
|
||||
evaluate('={{ [1, 2, 3, "as", {}, {}, 1, 2, [1,2], "[sad]", "[sad]", null].unique() }}'),
|
||||
).toEqual([1, 2, 3, 'as', {}, [1, 2], '[sad]', null]);
|
||||
});
|
||||
|
||||
test('.isEmpty() should work correctly on an array', () => {
|
||||
|
@ -113,7 +112,7 @@ describe('Data Transformation Functions', () => {
|
|||
evaluate(
|
||||
'={{ [{ test1: 1, test2: 2 }, { test1: 1, test3: 3 }].merge([{ test1: 2, test3: 3 }, { test4: 4 }]) }}',
|
||||
),
|
||||
).toEqual({"test1": 1, "test2": 2, "test3": 3, "test4": 4});
|
||||
).toEqual({ test1: 1, test2: 2, test3: 3, test4: 4 });
|
||||
});
|
||||
|
||||
test('.merge() should work correctly without arguments', () => {
|
||||
|
@ -121,7 +120,7 @@ describe('Data Transformation Functions', () => {
|
|||
evaluate(
|
||||
'={{ [{ a: 1, some: null }, { a: 2, c: "something" }, 2, "asds", { b: 23 }, null, [1, 2]].merge() }}',
|
||||
),
|
||||
).toEqual({"a": 1, "some": null, "c": "something", "b": 23});
|
||||
).toEqual({ a: 1, some: null, c: 'something', b: 23 });
|
||||
});
|
||||
|
||||
test('.smartJoin() should work correctly on an array of objects', () => {
|
||||
|
@ -175,11 +174,14 @@ describe('Data Transformation Functions', () => {
|
|||
});
|
||||
|
||||
test('.union() should work on an arrays containing nulls, objects and arrays', () => {
|
||||
expect(
|
||||
evaluate(
|
||||
'={{ [1, 2, "dd", {}, null].union([1, {}, null, 3]) }}',
|
||||
),
|
||||
).toEqual([1, 2, "dd", {}, null, 3]);
|
||||
expect(evaluate('={{ [1, 2, "dd", {}, null].union([1, {}, null, 3]) }}')).toEqual([
|
||||
1,
|
||||
2,
|
||||
'dd',
|
||||
{},
|
||||
null,
|
||||
3,
|
||||
]);
|
||||
});
|
||||
|
||||
test('.intersection() should work on an array of objects', () => {
|
||||
|
@ -191,11 +193,11 @@ describe('Data Transformation Functions', () => {
|
|||
});
|
||||
|
||||
test('.intersection() should work on an arrays containing nulls, objects and arrays', () => {
|
||||
expect(
|
||||
evaluate(
|
||||
'={{ [1, 2, "dd", {}, null].intersection([1, {}, null]) }}',
|
||||
),
|
||||
).toEqual([1, {}, null]);
|
||||
expect(evaluate('={{ [1, 2, "dd", {}, null].intersection([1, {}, null]) }}')).toEqual([
|
||||
1,
|
||||
{},
|
||||
null,
|
||||
]);
|
||||
});
|
||||
|
||||
test('.difference() should work on an array of objects', () => {
|
||||
|
@ -212,10 +214,8 @@ describe('Data Transformation Functions', () => {
|
|||
|
||||
test('.difference() should work on an arrays containing nulls, objects and arrays', () => {
|
||||
expect(
|
||||
evaluate(
|
||||
'={{ [1, 2, "dd", {}, null, ["a", 1]].difference([1, {}, null, ["a", 1]]) }}',
|
||||
),
|
||||
).toEqual([2, "dd"]);
|
||||
evaluate('={{ [1, 2, "dd", {}, null, ["a", 1]].difference([1, {}, null, ["a", 1]]) }}'),
|
||||
).toEqual([2, 'dd']);
|
||||
});
|
||||
|
||||
test('.compact() should work on an array', () => {
|
||||
|
|
|
@ -61,7 +61,6 @@ describe('Data Transformation Functions', () => {
|
|||
expect(evaluate('={{ DateTime.local(2023, 1, 20).extract() }}')).toEqual(3);
|
||||
});
|
||||
|
||||
|
||||
test('.format("yyyy LLL dd") should work correctly on a date', () => {
|
||||
expect(evaluate('={{ DateTime.local(2023, 1, 16).format("yyyy LLL dd") }}')).toEqual(
|
||||
'2023 Jan 16',
|
||||
|
@ -74,27 +73,29 @@ describe('Data Transformation Functions', () => {
|
|||
});
|
||||
|
||||
test('.inBetween() should work on string and Date', () => {
|
||||
expect(evaluate(`={{ $now.isBetween('2023-06-23'.toDate(), '2023-06-23') }}`)).toBeDefined();
|
||||
expect(evaluate("={{ $now.isBetween('2023-06-23'.toDate(), '2023-06-23') }}")).toBeDefined();
|
||||
});
|
||||
|
||||
test('.inBetween() should work on string and DateTime', () => {
|
||||
expect(evaluate(`={{ $now.isBetween($now, '2023-06-23') }}`)).toBeDefined();
|
||||
expect(evaluate("={{ $now.isBetween($now, '2023-06-23') }}")).toBeDefined();
|
||||
});
|
||||
|
||||
test('.inBetween() should not work for invalid strings', () => {
|
||||
expect(evaluate(`={{ $now.isBetween($now, 'invalid') }}`)).toBeUndefined();
|
||||
expect(evaluate("={{ $now.isBetween($now, 'invalid') }}")).toBeUndefined();
|
||||
});
|
||||
|
||||
test('.inBetween() should not work for numbers', () => {
|
||||
expect(evaluate(`={{ $now.isBetween($now, 1) }}`)).toBeUndefined();
|
||||
expect(evaluate('={{ $now.isBetween($now, 1) }}')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('.inBetween() should not work for a single argument', () => {
|
||||
expect(() => evaluate(`={{ $now.isBetween($now) }}`)).toThrow();
|
||||
expect(() => evaluate('={{ $now.isBetween($now) }}')).toThrow();
|
||||
});
|
||||
|
||||
test('.inBetween() should not work for a more than two arguments', () => {
|
||||
expect(() => evaluate(`={{ $now.isBetween($now, '2023-06-23', '2023-09-21'.toDate()) }}`)).toThrow();
|
||||
expect(() =>
|
||||
evaluate("={{ $now.isBetween($now, '2023-06-23', '2023-09-21'.toDate()) }}"),
|
||||
).toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
* @jest-environment jsdom
|
||||
*/
|
||||
|
||||
/* eslint-disable n8n-local-rules/no-interpolation-in-regular-string */
|
||||
|
||||
import { extendTransform } from '@/Extensions';
|
||||
import { joinExpression, splitExpression } from '@/Extensions/ExpressionParser';
|
||||
import { evaluate } from './Helpers';
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { Expression, IDataObject, Workflow } from '../../src';
|
||||
import type { IDataObject } from '../../src';
|
||||
import { Expression, Workflow } from '../../src';
|
||||
import * as Helpers from '../Helpers';
|
||||
|
||||
export const TEST_TIMEZONE = 'America/New_York';
|
||||
|
@ -20,7 +21,7 @@ export const workflow = new Workflow({
|
|||
nodeTypes,
|
||||
settings: {
|
||||
timezone: TEST_TIMEZONE,
|
||||
}
|
||||
},
|
||||
});
|
||||
export const expression = new Expression(workflow);
|
||||
|
||||
|
|
|
@ -59,6 +59,7 @@ describe('Data Transformation Functions', () => {
|
|||
|
||||
describe('Multiple expressions', () => {
|
||||
test('Basic multiple expressions', () => {
|
||||
// eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string
|
||||
expect(evaluate('={{ "test abc".toSnakeCase() }} you have ${{ (100).format() }}.')).toEqual(
|
||||
'test_abc you have $100.',
|
||||
);
|
||||
|
|
|
@ -37,8 +37,8 @@ describe('Data Transformation Functions', () => {
|
|||
});
|
||||
|
||||
test('.removeFieldsContaining should not work for empty string', () => {
|
||||
expect(
|
||||
() => evaluate(
|
||||
expect(() =>
|
||||
evaluate(
|
||||
'={{ ({ test1: "i exist", test2: "i should be removed", test3: "i should also be removed" }).removeFieldsContaining("") }}',
|
||||
),
|
||||
).toThrow();
|
||||
|
@ -65,8 +65,8 @@ describe('Data Transformation Functions', () => {
|
|||
});
|
||||
|
||||
test('.keepFieldsContaining should not work for empty string', () => {
|
||||
expect(
|
||||
() => evaluate(
|
||||
expect(() =>
|
||||
evaluate(
|
||||
'={{ ({ test1: "i exist", test2: "i should be removed", test3: "i should also be removed" }).keepFieldsContaining("") }}',
|
||||
),
|
||||
).toThrow();
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
*/
|
||||
|
||||
import { stringExtensions } from '@/Extensions/StringExtensions';
|
||||
import { dateExtensions } from '@/Extensions/DateExtensions';
|
||||
import { evaluate } from './Helpers';
|
||||
|
||||
describe('Data Transformation Functions', () => {
|
||||
|
@ -157,13 +156,29 @@ describe('Data Transformation Functions', () => {
|
|||
'={{ "I am a test with a url: https://example.net/ and I am a test with an email: test@example.org".extractUrl() }}',
|
||||
),
|
||||
).toEqual('https://example.net/');
|
||||
expect(evaluate('={{ "Check this out: https://subdomain.example.com:3000/path?q=1#hash".extractUrl() }}')).toEqual('https://subdomain.example.com:3000/path?q=1#hash');
|
||||
expect(
|
||||
evaluate(
|
||||
'={{ "Check this out: https://subdomain.example.com:3000/path?q=1#hash".extractUrl() }}',
|
||||
),
|
||||
).toEqual('https://subdomain.example.com:3000/path?q=1#hash');
|
||||
expect(evaluate('={{ "Invalid URL: http:///example.com".extractUrl() }}')).toEqual(undefined);
|
||||
expect(evaluate('={{ "Mixed content: https://www.example.com and http://www.example.org".extractUrl() }}')).toEqual('https://www.example.com');
|
||||
expect(evaluate('={{ "Text without URL: This is just a simple text".extractUrl() }}')).toEqual(undefined);
|
||||
expect(evaluate('={{ "URL with Unicode: http://www.xn--80aswg.xn--j1amh".extractUrl() }}')).toEqual('http://www.xn--80aswg.xn--j1amh');
|
||||
expect(evaluate('={{ "Localhost URL: http://localhost:8080/test?x=1".extractUrl() }}')).toEqual('http://localhost:8080/test?x=1');
|
||||
expect(evaluate('={{ "IP URL: http://192.168.1.1:8000/path?q=value#frag".extractUrl() }}')).toEqual('http://192.168.1.1:8000/path?q=value#frag');
|
||||
expect(
|
||||
evaluate(
|
||||
'={{ "Mixed content: https://www.example.com and http://www.example.org".extractUrl() }}',
|
||||
),
|
||||
).toEqual('https://www.example.com');
|
||||
expect(
|
||||
evaluate('={{ "Text without URL: This is just a simple text".extractUrl() }}'),
|
||||
).toEqual(undefined);
|
||||
expect(
|
||||
evaluate('={{ "URL with Unicode: http://www.xn--80aswg.xn--j1amh".extractUrl() }}'),
|
||||
).toEqual('http://www.xn--80aswg.xn--j1amh');
|
||||
expect(
|
||||
evaluate('={{ "Localhost URL: http://localhost:8080/test?x=1".extractUrl() }}'),
|
||||
).toEqual('http://localhost:8080/test?x=1');
|
||||
expect(
|
||||
evaluate('={{ "IP URL: http://192.168.1.1:8000/path?q=value#frag".extractUrl() }}'),
|
||||
).toEqual('http://192.168.1.1:8000/path?q=value#frag');
|
||||
});
|
||||
|
||||
test('.extractDomain should work on a string', () => {
|
||||
|
@ -175,24 +190,46 @@ describe('Data Transformation Functions', () => {
|
|||
expect(evaluate('={{ "google.com".extractDomain() }}')).toEqual('google.com');
|
||||
expect(evaluate('={{ "www.example.net".extractDomain() }}')).toEqual('www.example.net');
|
||||
expect(evaluate('={{ "//example.com".extractDomain() }}')).toEqual('example.com');
|
||||
expect(evaluate('={{ "mailto:john.doe@example.com".extractDomain() }}')).toEqual('example.com');
|
||||
expect(evaluate('={{ "mailto:john.doe@example.com".extractDomain() }}')).toEqual(
|
||||
'example.com',
|
||||
);
|
||||
expect(evaluate('={{ "tel:+1-555-123-4567".extractDomain() }}')).toEqual(undefined);
|
||||
expect(evaluate('={{ "jane.doe@example.org".extractDomain() }}')).toEqual('example.org');
|
||||
expect(evaluate('={{ "name+tag@example.com".extractDomain() }}')).toEqual('example.com');
|
||||
expect(evaluate('={{ "first.last@example.co.uk".extractDomain() }}')).toEqual('example.co.uk');
|
||||
expect(evaluate('={{ "user@subdomain.example.com".extractDomain() }}')).toEqual('subdomain.example.com');
|
||||
expect(evaluate('={{ "www.example.net?test=1213".extractDomain() }}')).toEqual('www.example.net');
|
||||
expect(evaluate('={{ "first.last@example.co.uk".extractDomain() }}')).toEqual(
|
||||
'example.co.uk',
|
||||
);
|
||||
expect(evaluate('={{ "user@subdomain.example.com".extractDomain() }}')).toEqual(
|
||||
'subdomain.example.com',
|
||||
);
|
||||
expect(evaluate('={{ "www.example.net?test=1213".extractDomain() }}')).toEqual(
|
||||
'www.example.net',
|
||||
);
|
||||
expect(evaluate('={{ "www.example.net?test".extractDomain() }}')).toEqual('www.example.net');
|
||||
expect(evaluate('={{ "www.example.net#tesdt123".extractDomain() }}')).toEqual('www.example.net');
|
||||
expect(evaluate('={{ "https://www.example.net?test=1213".extractDomain() }}')).toEqual('www.example.net');
|
||||
expect(evaluate('={{ "https://www.example.net?test".extractDomain() }}')).toEqual('www.example.net');
|
||||
expect(evaluate('={{ "https://www.example.net#tesdt123".extractDomain() }}')).toEqual('www.example.net');
|
||||
expect(evaluate('={{ "www.example.net#tesdt123".extractDomain() }}')).toEqual(
|
||||
'www.example.net',
|
||||
);
|
||||
expect(evaluate('={{ "https://www.example.net?test=1213".extractDomain() }}')).toEqual(
|
||||
'www.example.net',
|
||||
);
|
||||
expect(evaluate('={{ "https://www.example.net?test".extractDomain() }}')).toEqual(
|
||||
'www.example.net',
|
||||
);
|
||||
expect(evaluate('={{ "https://www.example.net#tesdt123".extractDomain() }}')).toEqual(
|
||||
'www.example.net',
|
||||
);
|
||||
expect(evaluate('={{ "https://192.168.1.1".extractDomain() }}')).toEqual('192.168.1.1');
|
||||
expect(evaluate('={{ "http://www.xn--80aswg.xn--j1amh".extractDomain() }}')).toEqual('www.xn--80aswg.xn--j1amh');
|
||||
expect(evaluate('={{ "http://www.xn--80aswg.xn--j1amh".extractDomain() }}')).toEqual(
|
||||
'www.xn--80aswg.xn--j1amh',
|
||||
);
|
||||
expect(evaluate('={{ "https://localhost".extractDomain() }}')).toEqual('localhost');
|
||||
expect(evaluate('={{ "https://localhost?test=123".extractDomain() }}')).toEqual('localhost');
|
||||
expect(evaluate('={{ "https://www.example_with_underscore.com".extractDomain() }}')).toEqual('www.example_with_underscore.com');
|
||||
expect(evaluate('={{ "https://www.example.com:8080".extractDomain() }}')).toEqual('www.example.com');
|
||||
expect(evaluate('={{ "https://www.example_with_underscore.com".extractDomain() }}')).toEqual(
|
||||
'www.example_with_underscore.com',
|
||||
);
|
||||
expect(evaluate('={{ "https://www.example.com:8080".extractDomain() }}')).toEqual(
|
||||
'www.example.com',
|
||||
);
|
||||
expect(evaluate('={{ "https://example.space".extractDomain() }}')).toEqual('example.space');
|
||||
});
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import type {
|
|||
WorkflowExecuteMode,
|
||||
} from '@/Interfaces';
|
||||
import { ICredentials, ICredentialsHelper } from '@/Interfaces';
|
||||
import { Workflow } from '@/Workflow';
|
||||
import type { Workflow } from '@/Workflow';
|
||||
import { WorkflowDataProxy } from '@/WorkflowDataProxy';
|
||||
import { WorkflowHooks } from '@/WorkflowHooks';
|
||||
import * as NodeHelpers from '@/NodeHelpers';
|
||||
|
@ -76,7 +76,7 @@ export class Credentials extends ICredentials {
|
|||
const fullData = this.getData(encryptionKey, nodeType);
|
||||
|
||||
if (fullData === null) {
|
||||
throw new Error(`No data was set.`);
|
||||
throw new Error('No data was set.');
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-prototype-builtins
|
||||
|
@ -89,7 +89,7 @@ export class Credentials extends ICredentials {
|
|||
|
||||
getDataToSave(): ICredentialsEncrypted {
|
||||
if (this.data === undefined) {
|
||||
throw new Error(`No credentials were set to save.`);
|
||||
throw new Error('No credentials were set to save.');
|
||||
}
|
||||
|
||||
return {
|
||||
|
@ -277,7 +277,7 @@ export function getExecuteFunctions(
|
|||
return mode;
|
||||
},
|
||||
getNode: () => {
|
||||
return JSON.parse(JSON.stringify(node));
|
||||
return deepCopy(node);
|
||||
},
|
||||
getRestApiUrl: (): string => {
|
||||
return additionalData.restApiUrl;
|
||||
|
@ -444,7 +444,7 @@ export function getExecuteSingleFunctions(
|
|||
return mode;
|
||||
},
|
||||
getNode: () => {
|
||||
return JSON.parse(JSON.stringify(node));
|
||||
return deepCopy(node);
|
||||
},
|
||||
getRestApiUrl: (): string => {
|
||||
return additionalData.restApiUrl;
|
||||
|
|
|
@ -5,7 +5,6 @@ import type {
|
|||
DeclarativeRestApiSettings,
|
||||
IRunExecutionData,
|
||||
INodeProperties,
|
||||
IDataObject,
|
||||
IExecuteSingleFunctions,
|
||||
IHttpRequestOptions,
|
||||
IN8nHttpFullResponse,
|
||||
|
@ -34,7 +33,7 @@ const preSendFunction1 = async function (
|
|||
this: IExecuteSingleFunctions,
|
||||
requestOptions: IHttpRequestOptions,
|
||||
): Promise<IHttpRequestOptions> {
|
||||
requestOptions.headers = (requestOptions.headers || {}) as IDataObject;
|
||||
requestOptions.headers = requestOptions.headers || {};
|
||||
requestOptions.headers.addedIn = 'preSendFunction1';
|
||||
return requestOptions;
|
||||
};
|
||||
|
@ -344,6 +343,7 @@ describe('RoutingNode', () => {
|
|||
type: 'string',
|
||||
routing: {
|
||||
send: {
|
||||
// eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string
|
||||
property: '={{ `value${5+1}A` }}',
|
||||
type: 'query',
|
||||
value: '={{$value.toUpperCase()}}',
|
||||
|
@ -357,6 +357,7 @@ describe('RoutingNode', () => {
|
|||
type: 'string',
|
||||
routing: {
|
||||
send: {
|
||||
// eslint-disable-next-line n8n-local-rules/no-interpolation-in-regular-string
|
||||
property: '={{ `value${6+1}B` }}',
|
||||
type: 'body',
|
||||
value: "={{$value.split(',')}}",
|
||||
|
|
|
@ -751,7 +751,7 @@ describe('Workflow', () => {
|
|||
});
|
||||
|
||||
describe('getParameterValue', () => {
|
||||
const tests: {
|
||||
const tests: Array<{
|
||||
description: string;
|
||||
input: {
|
||||
[nodeName: string]: {
|
||||
|
@ -761,7 +761,7 @@ describe('Workflow', () => {
|
|||
};
|
||||
};
|
||||
output: Record<string, unknown>;
|
||||
}[] = [
|
||||
}> = [
|
||||
{
|
||||
description: 'read simple not expression value',
|
||||
input: {
|
||||
|
@ -1296,7 +1296,7 @@ describe('Workflow', () => {
|
|||
const itemIndex = 0;
|
||||
const runIndex = 0;
|
||||
const connectionInputData: INodeExecutionData[] =
|
||||
runExecutionData.resultData.runData!['Node1']![0]!.data!.main[0]!;
|
||||
runExecutionData.resultData.runData.Node1[0]!.data!.main[0]!;
|
||||
|
||||
for (const parameterName of Object.keys(testData.output)) {
|
||||
const parameterValue = nodes.find((node) => node.name === activeNodeName)!.parameters[
|
||||
|
@ -1451,7 +1451,7 @@ describe('Workflow', () => {
|
|||
const itemIndex = 0;
|
||||
const runIndex = 0;
|
||||
const connectionInputData: INodeExecutionData[] =
|
||||
runExecutionData.resultData.runData!['Node1']![0]!.data!.main[0]!;
|
||||
runExecutionData.resultData.runData.Node1[0]!.data!.main[0]!;
|
||||
const parameterName = 'values';
|
||||
|
||||
const parameterValue = nodes.find((node) => node.name === activeNodeName)!.parameters[
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { IConnections, IExecuteData, INode, IRunExecutionData } from '@/Interfaces';
|
||||
import type { IConnections, IExecuteData, INode, IRunExecutionData } from '@/Interfaces';
|
||||
import { Workflow } from '@/Workflow';
|
||||
import { WorkflowDataProxy } from '@/WorkflowDataProxy';
|
||||
import * as Helpers from './Helpers';
|
||||
|
@ -234,7 +234,7 @@ describe('WorkflowDataProxy', () => {
|
|||
data: runExecutionData.resultData.runData[nameLastNode][0].data!,
|
||||
node: nodes.find((node) => node.name === nameLastNode) as INode,
|
||||
source: {
|
||||
main: runExecutionData.resultData.runData[nameLastNode][0].source!,
|
||||
main: runExecutionData.resultData.runData[nameLastNode][0].source,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in a new issue