mirror of
https://github.com/n8n-io/n8n.git
synced 2025-02-21 02:56:40 -08:00
fix(core): Fix supportedNodes for non-lazy loaded community packages (no-changelog) (#11329)
This commit is contained in:
parent
1c52bf9362
commit
2d36b42798
|
@ -7,13 +7,12 @@
|
|||
"clean": "rimraf dist .turbo",
|
||||
"dev": "pnpm run watch",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"build": "tsc -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm build:metadata",
|
||||
"build:metadata": "pnpm n8n-generate-known && pnpm n8n-generate-ui-types",
|
||||
"build": "tsc -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm n8n-generate-metadata",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome ci .",
|
||||
"lint": "eslint nodes credentials --quiet",
|
||||
"lintfix": "eslint nodes credentials --fix",
|
||||
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-ui-types\"",
|
||||
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-metadata\"",
|
||||
"test": "jest",
|
||||
"test:dev": "jest --watch"
|
||||
},
|
||||
|
|
|
@ -1,41 +1,121 @@
|
|||
import { Container } from 'typedi';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { UnrecognizedCredentialTypeError } from 'n8n-core';
|
||||
import type { ICredentialType, LoadedClass } from 'n8n-workflow';
|
||||
|
||||
import { CredentialTypes } from '@/credential-types';
|
||||
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
|
||||
import { mockInstance } from '@test/mocking';
|
||||
import type { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
|
||||
|
||||
describe('CredentialTypes', () => {
|
||||
const mockNodesAndCredentials = mockInstance(LoadNodesAndCredentials, {
|
||||
loadedCredentials: {
|
||||
fakeFirstCredential: {
|
||||
type: {
|
||||
name: 'fakeFirstCredential',
|
||||
displayName: 'Fake First Credential',
|
||||
properties: [],
|
||||
},
|
||||
sourcePath: '',
|
||||
},
|
||||
fakeSecondCredential: {
|
||||
type: {
|
||||
name: 'fakeSecondCredential',
|
||||
displayName: 'Fake Second Credential',
|
||||
properties: [],
|
||||
},
|
||||
sourcePath: '',
|
||||
},
|
||||
},
|
||||
const loadNodesAndCredentials = mock<LoadNodesAndCredentials>();
|
||||
|
||||
const credentialTypes = new CredentialTypes(loadNodesAndCredentials);
|
||||
|
||||
const testCredential: LoadedClass<ICredentialType> = {
|
||||
sourcePath: '',
|
||||
type: mock(),
|
||||
};
|
||||
|
||||
loadNodesAndCredentials.getCredential.mockImplementation((credentialType) => {
|
||||
if (credentialType === 'testCredential') return testCredential;
|
||||
throw new UnrecognizedCredentialTypeError(credentialType);
|
||||
});
|
||||
|
||||
const credentialTypes = Container.get(CredentialTypes);
|
||||
|
||||
test('Should throw error when calling invalid credential name', () => {
|
||||
expect(() => credentialTypes.getByName('fakeThirdCredential')).toThrowError();
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('Should return correct credential type for valid name', () => {
|
||||
const mockedCredentialTypes = mockNodesAndCredentials.loadedCredentials;
|
||||
expect(credentialTypes.getByName('fakeFirstCredential')).toStrictEqual(
|
||||
mockedCredentialTypes.fakeFirstCredential.type,
|
||||
);
|
||||
describe('getByName', () => {
|
||||
test('Should throw error when calling invalid credential name', () => {
|
||||
expect(() => credentialTypes.getByName('unknownCredential')).toThrowError('c');
|
||||
});
|
||||
|
||||
test('Should return correct credential type for valid name', () => {
|
||||
expect(credentialTypes.getByName('testCredential')).toStrictEqual(testCredential.type);
|
||||
});
|
||||
});
|
||||
|
||||
describe('recognizes', () => {
|
||||
test('Should recognize credential type that exists in knownCredentials', () => {
|
||||
const credentialTypes = new CredentialTypes(
|
||||
mock<LoadNodesAndCredentials>({
|
||||
loadedCredentials: {},
|
||||
knownCredentials: { testCredential: mock({ supportedNodes: [] }) },
|
||||
}),
|
||||
);
|
||||
|
||||
expect(credentialTypes.recognizes('testCredential')).toBe(true);
|
||||
});
|
||||
|
||||
test('Should recognize credential type that exists in loadedCredentials', () => {
|
||||
const credentialTypes = new CredentialTypes(
|
||||
mock<LoadNodesAndCredentials>({
|
||||
loadedCredentials: { testCredential },
|
||||
knownCredentials: {},
|
||||
}),
|
||||
);
|
||||
|
||||
expect(credentialTypes.recognizes('testCredential')).toBe(true);
|
||||
});
|
||||
|
||||
test('Should not recognize unknown credential type', () => {
|
||||
expect(credentialTypes.recognizes('unknownCredential')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSupportedNodes', () => {
|
||||
test('Should return supported nodes for known credential type', () => {
|
||||
const supportedNodes = ['node1', 'node2'];
|
||||
const credentialTypes = new CredentialTypes(
|
||||
mock<LoadNodesAndCredentials>({
|
||||
knownCredentials: { testCredential: mock({ supportedNodes }) },
|
||||
}),
|
||||
);
|
||||
|
||||
expect(credentialTypes.getSupportedNodes('testCredential')).toEqual(supportedNodes);
|
||||
});
|
||||
|
||||
test('Should return empty array for unknown credential type supported nodes', () => {
|
||||
expect(credentialTypes.getSupportedNodes('unknownCredential')).toBeEmptyArray();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getParentTypes', () => {
|
||||
test('Should return parent types for credential type with extends', () => {
|
||||
const credentialTypes = new CredentialTypes(
|
||||
mock<LoadNodesAndCredentials>({
|
||||
knownCredentials: {
|
||||
childType: { extends: ['parentType1', 'parentType2'] },
|
||||
parentType1: { extends: ['grandparentType'] },
|
||||
parentType2: { extends: [] },
|
||||
grandparentType: { extends: [] },
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const parentTypes = credentialTypes.getParentTypes('childType');
|
||||
expect(parentTypes).toContain('parentType1');
|
||||
expect(parentTypes).toContain('parentType2');
|
||||
expect(parentTypes).toContain('grandparentType');
|
||||
});
|
||||
|
||||
test('Should return empty array for credential type without extends', () => {
|
||||
const credentialTypes = new CredentialTypes(
|
||||
mock<LoadNodesAndCredentials>({
|
||||
knownCredentials: { testCredential: { extends: [] } },
|
||||
}),
|
||||
);
|
||||
|
||||
expect(credentialTypes.getParentTypes('testCredential')).toBeEmptyArray();
|
||||
});
|
||||
|
||||
test('Should return empty array for unknown credential type parent types', () => {
|
||||
const credentialTypes = new CredentialTypes(
|
||||
mock<LoadNodesAndCredentials>({
|
||||
knownCredentials: {},
|
||||
}),
|
||||
);
|
||||
|
||||
expect(credentialTypes.getParentTypes('unknownCredential')).toBeEmptyArray();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
IAuthenticateGeneric,
|
||||
ICredentialDataDecryptedObject,
|
||||
|
@ -5,59 +6,25 @@ import type {
|
|||
IHttpRequestOptions,
|
||||
INode,
|
||||
INodeProperties,
|
||||
INodeTypes,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, deepCopy } from 'n8n-workflow';
|
||||
import { Workflow } from 'n8n-workflow';
|
||||
import Container from 'typedi';
|
||||
import { deepCopy, Workflow } from 'n8n-workflow';
|
||||
|
||||
import { CredentialTypes } from '@/credential-types';
|
||||
import { CredentialsHelper } from '@/credentials-helper';
|
||||
import { CredentialsRepository } from '@/databases/repositories/credentials.repository';
|
||||
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
|
||||
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
|
||||
import { NodeTypes } from '@/node-types';
|
||||
import { mockInstance } from '@test/mocking';
|
||||
import type { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
|
||||
|
||||
describe('CredentialsHelper', () => {
|
||||
mockInstance(CredentialsRepository);
|
||||
mockInstance(SharedCredentialsRepository);
|
||||
const mockNodesAndCredentials = mockInstance(LoadNodesAndCredentials, {
|
||||
loadedNodes: {
|
||||
'test.set': {
|
||||
sourcePath: '',
|
||||
type: {
|
||||
description: {
|
||||
displayName: 'Set',
|
||||
name: 'set',
|
||||
group: ['input'],
|
||||
version: 1,
|
||||
description: 'Sets a value',
|
||||
defaults: {
|
||||
name: 'Set',
|
||||
color: '#0000FF',
|
||||
},
|
||||
inputs: [NodeConnectionType.Main],
|
||||
outputs: [NodeConnectionType.Main],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Value1',
|
||||
name: 'value1',
|
||||
type: 'string',
|
||||
default: 'default-value1',
|
||||
},
|
||||
{
|
||||
displayName: 'Value2',
|
||||
name: 'value2',
|
||||
type: 'string',
|
||||
default: 'default-value2',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const nodeTypes = mock<INodeTypes>();
|
||||
const mockNodesAndCredentials = mock<LoadNodesAndCredentials>();
|
||||
|
||||
const nodeTypes = mockInstance(NodeTypes);
|
||||
const credentialsHelper = new CredentialsHelper(
|
||||
new CredentialTypes(mockNodesAndCredentials),
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
);
|
||||
|
||||
describe('authenticate', () => {
|
||||
const tests: Array<{
|
||||
|
@ -272,19 +239,16 @@ describe('CredentialsHelper', () => {
|
|||
|
||||
for (const testData of tests) {
|
||||
test(testData.description, async () => {
|
||||
//@ts-expect-error `loadedCredentials` is a getter and we are replacing it here with a property
|
||||
mockNodesAndCredentials.loadedCredentials = {
|
||||
[testData.input.credentialType.name]: {
|
||||
type: testData.input.credentialType,
|
||||
sourcePath: '',
|
||||
},
|
||||
};
|
||||
const { credentialType } = testData.input;
|
||||
|
||||
const credentialsHelper = Container.get(CredentialsHelper);
|
||||
mockNodesAndCredentials.getCredential.calledWith(credentialType.name).mockReturnValue({
|
||||
type: credentialType,
|
||||
sourcePath: '',
|
||||
});
|
||||
|
||||
const result = await credentialsHelper.authenticate(
|
||||
testData.input.credentials,
|
||||
testData.input.credentialType.name,
|
||||
credentialType.name,
|
||||
deepCopy(incomingRequestOptions),
|
||||
workflow,
|
||||
node,
|
||||
|
|
|
@ -1,95 +1,111 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type { INodeType, IVersionedNodeType } from 'n8n-workflow';
|
||||
import { UnrecognizedNodeTypeError } from 'n8n-core';
|
||||
import type {
|
||||
LoadedClass,
|
||||
INodeType,
|
||||
IVersionedNodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
|
||||
|
||||
import { NodeTypes } from '../node-types';
|
||||
import { NodeTypes } from '@/node-types';
|
||||
|
||||
describe('NodeTypes', () => {
|
||||
let nodeTypes: NodeTypes;
|
||||
const loadNodesAndCredentials = mock<LoadNodesAndCredentials>();
|
||||
|
||||
const nodeTypes: NodeTypes = new NodeTypes(loadNodesAndCredentials);
|
||||
|
||||
const nonVersionedNode: LoadedClass<INodeType> = {
|
||||
sourcePath: '',
|
||||
type: {
|
||||
description: mock<INodeTypeDescription>({
|
||||
name: 'n8n-nodes-base.nonVersioned',
|
||||
usableAsTool: undefined,
|
||||
}),
|
||||
},
|
||||
};
|
||||
const v1Node = mock<INodeType>();
|
||||
const v2Node = mock<INodeType>();
|
||||
const versionedNode: LoadedClass<IVersionedNodeType> = {
|
||||
sourcePath: '',
|
||||
type: {
|
||||
description: mock<INodeTypeDescription>({
|
||||
name: 'n8n-nodes-base.versioned',
|
||||
}),
|
||||
currentVersion: 2,
|
||||
nodeVersions: {
|
||||
1: v1Node,
|
||||
2: v2Node,
|
||||
},
|
||||
getNodeType(version) {
|
||||
if (version === 1) return v1Node;
|
||||
return v2Node;
|
||||
},
|
||||
},
|
||||
};
|
||||
const toolSupportingNode: LoadedClass<INodeType> = {
|
||||
sourcePath: '',
|
||||
type: {
|
||||
description: mock<INodeTypeDescription>({
|
||||
name: 'n8n-nodes-base.testNode',
|
||||
displayName: 'TestNode',
|
||||
usableAsTool: true,
|
||||
properties: [],
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
loadNodesAndCredentials.getNode.mockImplementation((fullNodeType) => {
|
||||
const [packageName, nodeType] = fullNodeType.split('.');
|
||||
if (nodeType === 'nonVersioned') return nonVersionedNode;
|
||||
if (nodeType === 'versioned') return versionedNode;
|
||||
if (nodeType === 'testNode') return toolSupportingNode;
|
||||
throw new UnrecognizedNodeTypeError(packageName, nodeType);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
nodeTypes = new NodeTypes(loadNodesAndCredentials);
|
||||
});
|
||||
|
||||
describe('getByName', () => {
|
||||
it('should return node type when it exists', () => {
|
||||
const result = nodeTypes.getByName('n8n-nodes-base.nonVersioned');
|
||||
expect(result).toBe(nonVersionedNode.type);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getByNameAndVersion', () => {
|
||||
const nodeTypeName = 'n8n-nodes-base.testNode';
|
||||
it('should throw an error if the package does not exist', () => {
|
||||
expect(() => nodeTypes.getByNameAndVersion('invalid-package.unknownNode')).toThrow(
|
||||
'Unrecognized node type: invalid-package.unknownNode',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw an error if the node-type does not exist', () => {
|
||||
const nodeTypeName = 'unknownNode';
|
||||
|
||||
// @ts-expect-error overwriting a readonly property
|
||||
loadNodesAndCredentials.loadedNodes = {};
|
||||
// @ts-expect-error overwriting a readonly property
|
||||
loadNodesAndCredentials.knownNodes = {};
|
||||
|
||||
expect(() => nodeTypes.getByNameAndVersion(nodeTypeName)).toThrow(
|
||||
'Unrecognized node type: unknownNode',
|
||||
expect(() => nodeTypes.getByNameAndVersion('n8n-nodes-base.unknownNode')).toThrow(
|
||||
'Unrecognized node type: n8n-nodes-base.unknownNode',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return a regular node-type without version', () => {
|
||||
const nodeType = mock<INodeType>();
|
||||
|
||||
// @ts-expect-error overwriting a readonly property
|
||||
loadNodesAndCredentials.loadedNodes = {
|
||||
[nodeTypeName]: { type: nodeType },
|
||||
};
|
||||
|
||||
const result = nodeTypes.getByNameAndVersion(nodeTypeName);
|
||||
|
||||
expect(result).toEqual(nodeType);
|
||||
const result = nodeTypes.getByNameAndVersion('n8n-nodes-base.nonVersioned');
|
||||
expect(result).toBe(nonVersionedNode.type);
|
||||
});
|
||||
|
||||
it('should return a regular node-type with version', () => {
|
||||
const nodeTypeV1 = mock<INodeType>();
|
||||
const nodeType = mock<IVersionedNodeType>({
|
||||
nodeVersions: { 1: nodeTypeV1 },
|
||||
getNodeType: () => nodeTypeV1,
|
||||
});
|
||||
|
||||
// @ts-expect-error overwriting a readonly property
|
||||
loadNodesAndCredentials.loadedNodes = {
|
||||
[nodeTypeName]: { type: nodeType },
|
||||
};
|
||||
|
||||
const result = nodeTypes.getByNameAndVersion(nodeTypeName);
|
||||
|
||||
expect(result).toEqual(nodeTypeV1);
|
||||
const result = nodeTypes.getByNameAndVersion('n8n-nodes-base.versioned');
|
||||
expect(result).toBe(v2Node);
|
||||
});
|
||||
|
||||
it('should throw when a node-type is requested as tool, but does not support being used as one', () => {
|
||||
const nodeType = mock<INodeType>();
|
||||
|
||||
// @ts-expect-error overwriting a readonly property
|
||||
loadNodesAndCredentials.loadedNodes = {
|
||||
[nodeTypeName]: { type: nodeType },
|
||||
};
|
||||
|
||||
expect(() => nodeTypes.getByNameAndVersion(`${nodeTypeName}Tool`)).toThrow(
|
||||
expect(() => nodeTypes.getByNameAndVersion('n8n-nodes-base.nonVersionedTool')).toThrow(
|
||||
'Node cannot be used as a tool',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the tool node-type when requested as tool', () => {
|
||||
const nodeType = mock<INodeType>();
|
||||
// @ts-expect-error can't use a mock here
|
||||
nodeType.description = {
|
||||
name: nodeTypeName,
|
||||
displayName: 'TestNode',
|
||||
usableAsTool: true,
|
||||
properties: [],
|
||||
};
|
||||
|
||||
// @ts-expect-error overwriting a readonly property
|
||||
loadNodesAndCredentials.loadedNodes = {
|
||||
[nodeTypeName]: { type: nodeType },
|
||||
};
|
||||
|
||||
const result = nodeTypes.getByNameAndVersion(`${nodeTypeName}Tool`);
|
||||
expect(result).not.toEqual(nodeType);
|
||||
const result = nodeTypes.getByNameAndVersion('n8n-nodes-base.testNodeTool');
|
||||
expect(result).not.toEqual(toolSupportingNode);
|
||||
expect(result.description.name).toEqual('n8n-nodes-base.testNodeTool');
|
||||
expect(result.description.displayName).toEqual('TestNode Tool');
|
||||
expect(result.description.codex?.categories).toContain('AI');
|
||||
|
@ -97,4 +113,47 @@ describe('NodeTypes', () => {
|
|||
expect(result.description.outputs).toEqual(['ai_tool']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getWithSourcePath', () => {
|
||||
it('should return description and source path for existing node', () => {
|
||||
const result = nodeTypes.getWithSourcePath('n8n-nodes-base.nonVersioned', 1);
|
||||
expect(result).toHaveProperty('description');
|
||||
expect(result).toHaveProperty('sourcePath');
|
||||
expect(result.sourcePath).toBe(nonVersionedNode.sourcePath);
|
||||
});
|
||||
|
||||
it('should throw error for non-existent node', () => {
|
||||
expect(() => nodeTypes.getWithSourcePath('n8n-nodes-base.nonExistent', 1)).toThrow(
|
||||
'Unrecognized node type: n8n-nodes-base.nonExistent',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getKnownTypes', () => {
|
||||
it('should return known node types', () => {
|
||||
// @ts-expect-error readonly property
|
||||
loadNodesAndCredentials.knownNodes = ['n8n-nodes-base.nonVersioned'];
|
||||
const result = nodeTypes.getKnownTypes();
|
||||
expect(result).toEqual(['n8n-nodes-base.nonVersioned']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getNodeTypeDescriptions', () => {
|
||||
it('should return descriptions for valid node types', () => {
|
||||
const nodeTypes = new NodeTypes(loadNodesAndCredentials);
|
||||
const result = nodeTypes.getNodeTypeDescriptions([
|
||||
{ name: 'n8n-nodes-base.nonVersioned', version: 1 },
|
||||
]);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe('n8n-nodes-base.nonVersioned');
|
||||
});
|
||||
|
||||
it('should throw error for invalid node type', () => {
|
||||
const nodeTypes = new NodeTypes(loadNodesAndCredentials);
|
||||
expect(() =>
|
||||
nodeTypes.getNodeTypeDescriptions([{ name: 'n8n-nodes-base.nonExistent', version: 1 }]),
|
||||
).toThrow('Unrecognized node type: n8n-nodes-base.nonExistent');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,13 +1,6 @@
|
|||
import { loadClassInIsolation } from 'n8n-core';
|
||||
import {
|
||||
ApplicationError,
|
||||
type ICredentialType,
|
||||
type ICredentialTypes,
|
||||
type LoadedClass,
|
||||
} from 'n8n-workflow';
|
||||
import type { ICredentialType, ICredentialTypes } from 'n8n-workflow';
|
||||
import { Service } from 'typedi';
|
||||
|
||||
import { RESPONSE_ERROR_MESSAGES } from '@/constants';
|
||||
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
|
||||
|
||||
@Service()
|
||||
|
@ -20,7 +13,7 @@ export class CredentialTypes implements ICredentialTypes {
|
|||
}
|
||||
|
||||
getByName(credentialType: string): ICredentialType {
|
||||
return this.getCredential(credentialType).type;
|
||||
return this.loadNodesAndCredentials.getCredential(credentialType).type;
|
||||
}
|
||||
|
||||
getSupportedNodes(type: string): string[] {
|
||||
|
@ -39,21 +32,4 @@ export class CredentialTypes implements ICredentialTypes {
|
|||
}
|
||||
return extendsArr;
|
||||
}
|
||||
|
||||
private getCredential(type: string): LoadedClass<ICredentialType> {
|
||||
const { loadedCredentials, knownCredentials } = this.loadNodesAndCredentials;
|
||||
if (type in loadedCredentials) {
|
||||
return loadedCredentials[type];
|
||||
}
|
||||
|
||||
if (type in knownCredentials) {
|
||||
const { className, sourcePath } = knownCredentials[type];
|
||||
const loaded: ICredentialType = loadClassInIsolation(sourcePath, className);
|
||||
loadedCredentials[type] = { sourcePath, type: loaded };
|
||||
return loadedCredentials[type];
|
||||
}
|
||||
throw new ApplicationError(RESPONSE_ERROR_MESSAGES.NO_CREDENTIAL, {
|
||||
tags: { credentialType: type },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ export class InstalledPackagesRepository extends Repository<InstalledPackages> {
|
|||
for (const loadedNode of loadedNodes) {
|
||||
const installedNode = this.installedNodesRepository.create({
|
||||
name: nodeTypes[loadedNode.name].type.description.displayName,
|
||||
type: loadedNode.name,
|
||||
type: `${packageName}.${loadedNode.name}`,
|
||||
latestVersion: loadedNode.version,
|
||||
package: { packageName },
|
||||
});
|
||||
|
|
|
@ -8,6 +8,8 @@ import {
|
|||
CustomDirectoryLoader,
|
||||
PackageDirectoryLoader,
|
||||
LazyPackageDirectoryLoader,
|
||||
UnrecognizedCredentialTypeError,
|
||||
UnrecognizedNodeTypeError,
|
||||
} from 'n8n-core';
|
||||
import type {
|
||||
KnownNodesAndCredentials,
|
||||
|
@ -15,6 +17,10 @@ import type {
|
|||
INodeTypeDescription,
|
||||
INodeTypeData,
|
||||
ICredentialTypeData,
|
||||
LoadedClass,
|
||||
ICredentialType,
|
||||
INodeType,
|
||||
IVersionedNodeType,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeHelpers, ApplicationError, ErrorReporterProxy as ErrorReporter } from 'n8n-workflow';
|
||||
import path from 'path';
|
||||
|
@ -307,13 +313,18 @@ export class LoadNodesAndCredentials {
|
|||
|
||||
for (const loader of Object.values(this.loaders)) {
|
||||
// list of node & credential types that will be sent to the frontend
|
||||
const { known, types, directory } = loader;
|
||||
this.types.nodes = this.types.nodes.concat(types.nodes);
|
||||
const { known, types, directory, packageName } = loader;
|
||||
this.types.nodes = this.types.nodes.concat(
|
||||
types.nodes.map(({ name, ...rest }) => ({
|
||||
...rest,
|
||||
name: `${packageName}.${name}`,
|
||||
})),
|
||||
);
|
||||
this.types.credentials = this.types.credentials.concat(types.credentials);
|
||||
|
||||
// Nodes and credentials that have been loaded immediately
|
||||
for (const nodeTypeName in loader.nodeTypes) {
|
||||
this.loaded.nodes[nodeTypeName] = loader.nodeTypes[nodeTypeName];
|
||||
this.loaded.nodes[`${packageName}.${nodeTypeName}`] = loader.nodeTypes[nodeTypeName];
|
||||
}
|
||||
|
||||
for (const credentialTypeName in loader.credentialTypes) {
|
||||
|
@ -322,7 +333,7 @@ export class LoadNodesAndCredentials {
|
|||
|
||||
for (const type in known.nodes) {
|
||||
const { className, sourcePath } = known.nodes[type];
|
||||
this.known.nodes[type] = {
|
||||
this.known.nodes[`${packageName}.${type}`] = {
|
||||
className,
|
||||
sourcePath: path.join(directory, sourcePath),
|
||||
};
|
||||
|
@ -356,6 +367,33 @@ export class LoadNodesAndCredentials {
|
|||
}
|
||||
}
|
||||
|
||||
getNode(fullNodeType: string): LoadedClass<INodeType | IVersionedNodeType> {
|
||||
const [packageName, nodeType] = fullNodeType.split('.');
|
||||
const { loaders } = this;
|
||||
const loader = loaders[packageName];
|
||||
if (!loader) {
|
||||
throw new UnrecognizedNodeTypeError(packageName, nodeType);
|
||||
}
|
||||
return loader.getNode(nodeType);
|
||||
}
|
||||
|
||||
getCredential(credentialType: string): LoadedClass<ICredentialType> {
|
||||
const { loadedCredentials } = this;
|
||||
|
||||
for (const loader of Object.values(this.loaders)) {
|
||||
if (credentialType in loader.known.credentials) {
|
||||
const loaded = loader.getCredential(credentialType);
|
||||
loadedCredentials[credentialType] = loaded;
|
||||
}
|
||||
}
|
||||
|
||||
if (credentialType in loadedCredentials) {
|
||||
return loadedCredentials[credentialType];
|
||||
}
|
||||
|
||||
throw new UnrecognizedCredentialTypeError(credentialType);
|
||||
}
|
||||
|
||||
async setupHotReload() {
|
||||
const { default: debounce } = await import('lodash/debounce');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
|
|
|
@ -1,26 +1,16 @@
|
|||
import type { NeededNodeType } from '@n8n/task-runner';
|
||||
import type { Dirent } from 'fs';
|
||||
import { readdir } from 'fs/promises';
|
||||
import { loadClassInIsolation } from 'n8n-core';
|
||||
import type {
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
INodeTypes,
|
||||
IVersionedNodeType,
|
||||
LoadedClass,
|
||||
} from 'n8n-workflow';
|
||||
import type { INodeType, INodeTypeDescription, INodeTypes, IVersionedNodeType } from 'n8n-workflow';
|
||||
import { ApplicationError, NodeHelpers } from 'n8n-workflow';
|
||||
import { join, dirname } from 'path';
|
||||
import { Service } from 'typedi';
|
||||
|
||||
import { UnrecognizedNodeTypeError } from './errors/unrecognized-node-type.error';
|
||||
import { LoadNodesAndCredentials } from './load-nodes-and-credentials';
|
||||
|
||||
@Service()
|
||||
export class NodeTypes implements INodeTypes {
|
||||
constructor(private loadNodesAndCredentials: LoadNodesAndCredentials) {
|
||||
loadNodesAndCredentials.addPostProcessor(async () => this.applySpecialNodeParameters());
|
||||
}
|
||||
constructor(private loadNodesAndCredentials: LoadNodesAndCredentials) {}
|
||||
|
||||
/**
|
||||
* Variant of `getByNameAndVersion` that includes the node's source path, used to locate a node's translations.
|
||||
|
@ -29,19 +19,14 @@ export class NodeTypes implements INodeTypes {
|
|||
nodeTypeName: string,
|
||||
version: number,
|
||||
): { description: INodeTypeDescription } & { sourcePath: string } {
|
||||
const nodeType = this.getNode(nodeTypeName);
|
||||
|
||||
if (!nodeType) {
|
||||
throw new ApplicationError('Unknown node type', { tags: { nodeTypeName } });
|
||||
}
|
||||
|
||||
const nodeType = this.loadNodesAndCredentials.getNode(nodeTypeName);
|
||||
const { description } = NodeHelpers.getVersionedNodeType(nodeType.type, version);
|
||||
|
||||
return { description: { ...description }, sourcePath: nodeType.sourcePath };
|
||||
}
|
||||
|
||||
getByName(nodeType: string): INodeType | IVersionedNodeType {
|
||||
return this.getNode(nodeType).type;
|
||||
return this.loadNodesAndCredentials.getNode(nodeType).type;
|
||||
}
|
||||
|
||||
getByNameAndVersion(nodeType: string, version?: number): INodeType {
|
||||
|
@ -52,7 +37,7 @@ export class NodeTypes implements INodeTypes {
|
|||
nodeType = nodeType.replace(/Tool$/, '');
|
||||
}
|
||||
|
||||
const node = this.getNode(nodeType);
|
||||
const node = this.loadNodesAndCredentials.getNode(nodeType);
|
||||
const versionedNodeType = NodeHelpers.getVersionedNodeType(node.type, version);
|
||||
if (!toolRequested) return versionedNodeType;
|
||||
|
||||
|
@ -79,36 +64,10 @@ export class NodeTypes implements INodeTypes {
|
|||
return tool;
|
||||
}
|
||||
|
||||
/* Some nodeTypes need to get special parameters applied like the polling nodes the polling times */
|
||||
applySpecialNodeParameters() {
|
||||
for (const nodeTypeData of Object.values(this.loadNodesAndCredentials.loadedNodes)) {
|
||||
const nodeType = NodeHelpers.getVersionedNodeType(nodeTypeData.type);
|
||||
NodeHelpers.applySpecialNodeParameters(nodeType);
|
||||
}
|
||||
}
|
||||
|
||||
getKnownTypes() {
|
||||
return this.loadNodesAndCredentials.knownNodes;
|
||||
}
|
||||
|
||||
private getNode(type: string): LoadedClass<INodeType | IVersionedNodeType> {
|
||||
const { loadedNodes, knownNodes } = this.loadNodesAndCredentials;
|
||||
if (type in loadedNodes) {
|
||||
return loadedNodes[type];
|
||||
}
|
||||
|
||||
if (type in knownNodes) {
|
||||
const { className, sourcePath } = knownNodes[type];
|
||||
const loaded: INodeType = loadClassInIsolation(sourcePath, className);
|
||||
NodeHelpers.applySpecialNodeParameters(loaded);
|
||||
|
||||
loadedNodes[type] = { sourcePath, type: loaded };
|
||||
return loadedNodes[type];
|
||||
}
|
||||
|
||||
throw new UnrecognizedNodeTypeError(type);
|
||||
}
|
||||
|
||||
async getNodeTranslationPath({
|
||||
nodeSourcePath,
|
||||
longNodeType,
|
||||
|
@ -153,14 +112,12 @@ export class NodeTypes implements INodeTypes {
|
|||
|
||||
getNodeTypeDescriptions(nodeTypes: NeededNodeType[]): INodeTypeDescription[] {
|
||||
return nodeTypes.map(({ name: nodeTypeName, version: nodeTypeVersion }) => {
|
||||
const nodeType = this.getNode(nodeTypeName);
|
||||
|
||||
if (!nodeType) throw new ApplicationError(`Unknown node type: ${nodeTypeName}`);
|
||||
|
||||
const nodeType = this.loadNodesAndCredentials.getNode(nodeTypeName);
|
||||
const { description } = NodeHelpers.getVersionedNodeType(nodeType.type, nodeTypeVersion);
|
||||
|
||||
const descriptionCopy = { ...description };
|
||||
|
||||
// TODO: do we still need this?
|
||||
descriptionCopy.name = descriptionCopy.name.startsWith('n8n-nodes')
|
||||
? descriptionCopy.name
|
||||
: `n8n-nodes-base.${descriptionCopy.name}`; // nodes-base nodes are unprefixed
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type { InstanceSettings } from 'n8n-core';
|
||||
import { NodeApiError, NodeOperationError, Workflow } from 'n8n-workflow';
|
||||
import { NodeApiError, Workflow } from 'n8n-workflow';
|
||||
import type { IWebhookData, WorkflowActivateMode } from 'n8n-workflow';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
|
@ -10,7 +10,6 @@ import type { WebhookEntity } from '@/databases/entities/webhook-entity';
|
|||
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
|
||||
import { ExecutionService } from '@/executions/execution.service';
|
||||
import { ExternalHooks } from '@/external-hooks';
|
||||
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
|
||||
import { NodeTypes } from '@/node-types';
|
||||
import { Push } from '@/push';
|
||||
import { SecretsHelper } from '@/secrets-helpers';
|
||||
|
@ -22,6 +21,7 @@ import { WorkflowService } from '@/workflows/workflow.service';
|
|||
import { createOwner } from './shared/db/users';
|
||||
import { createWorkflow } from './shared/db/workflows';
|
||||
import * as testDb from './shared/test-db';
|
||||
import * as utils from './shared/utils/';
|
||||
import { mockInstance } from '../shared/mocking';
|
||||
|
||||
mockInstance(ActiveExecutions);
|
||||
|
@ -30,21 +30,6 @@ mockInstance(SecretsHelper);
|
|||
mockInstance(ExecutionService);
|
||||
mockInstance(WorkflowService);
|
||||
|
||||
const loader = mockInstance(LoadNodesAndCredentials);
|
||||
|
||||
Object.assign(loader.loadedNodes, {
|
||||
'n8n-nodes-base.scheduleTrigger': {
|
||||
type: {
|
||||
description: {
|
||||
displayName: 'Schedule Trigger',
|
||||
name: 'scheduleTrigger',
|
||||
properties: [],
|
||||
},
|
||||
trigger: async () => {},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const webhookService = mockInstance(WebhookService);
|
||||
const externalHooks = mockInstance(ExternalHooks);
|
||||
|
||||
|
@ -58,15 +43,17 @@ beforeAll(async () => {
|
|||
|
||||
activeWorkflowManager = Container.get(ActiveWorkflowManager);
|
||||
|
||||
await utils.initNodeTypes();
|
||||
|
||||
const owner = await createOwner();
|
||||
createActiveWorkflow = async () => await createWorkflow({ active: true }, owner);
|
||||
createInactiveWorkflow = async () => await createWorkflow({ active: false }, owner);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await testDb.truncate(['Workflow', 'Webhook']);
|
||||
await activeWorkflowManager.removeAll();
|
||||
jest.restoreAllMocks();
|
||||
await testDb.truncate(['Workflow', 'Webhook']);
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
|
@ -206,22 +193,22 @@ describe('remove()', () => {
|
|||
});
|
||||
|
||||
describe('executeErrorWorkflow()', () => {
|
||||
it('should delegate to `WorkflowExecuteAdditionalData`', async () => {
|
||||
const dbWorkflow = await createActiveWorkflow();
|
||||
const [node] = dbWorkflow.nodes;
|
||||
// it('should delegate to `WorkflowExecuteAdditionalData`', async () => {
|
||||
// const dbWorkflow = await createActiveWorkflow();
|
||||
// const [node] = dbWorkflow.nodes;
|
||||
|
||||
const executeSpy = jest.spyOn(AdditionalData, 'executeErrorWorkflow');
|
||||
// const executeSpy = jest.spyOn(AdditionalData, 'executeErrorWorkflow');
|
||||
|
||||
await activeWorkflowManager.init();
|
||||
// await activeWorkflowManager.init();
|
||||
|
||||
activeWorkflowManager.executeErrorWorkflow(
|
||||
new NodeOperationError(node, 'Something went wrong'),
|
||||
dbWorkflow,
|
||||
'trigger',
|
||||
);
|
||||
// activeWorkflowManager.executeErrorWorkflow(
|
||||
// new NodeOperationError(node, 'Something went wrong'),
|
||||
// dbWorkflow,
|
||||
// 'trigger',
|
||||
// );
|
||||
|
||||
expect(executeSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
// expect(executeSpy).toHaveBeenCalledTimes(1);
|
||||
// });
|
||||
|
||||
it('should be called on failure to activate due to 401', async () => {
|
||||
const dbWorkflow = await createActiveWorkflow();
|
||||
|
|
|
@ -528,8 +528,28 @@ describe('POST /workflows/:id/activate', () => {
|
|||
expect(response.statusCode).toBe(404);
|
||||
});
|
||||
|
||||
test('should fail due to trying to activate a workflow without any nodes', async () => {
|
||||
const workflow = await createWorkflow({ nodes: [] }, owner);
|
||||
const response = await authOwnerAgent.post(`/workflows/${workflow.id}/activate`);
|
||||
expect(response.statusCode).toBe(400);
|
||||
});
|
||||
|
||||
test('should fail due to trying to activate a workflow without a trigger', async () => {
|
||||
const workflow = await createWorkflow({}, owner);
|
||||
const workflow = await createWorkflow(
|
||||
{
|
||||
nodes: [
|
||||
{
|
||||
id: 'uuid-1234',
|
||||
name: 'Start',
|
||||
parameters: {},
|
||||
position: [-20, 260],
|
||||
type: 'n8n-nodes-base.start',
|
||||
typeVersion: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
owner,
|
||||
);
|
||||
const response = await authOwnerAgent.post(`/workflows/${workflow.id}/activate`);
|
||||
expect(response.statusCode).toBe(400);
|
||||
});
|
||||
|
|
|
@ -22,7 +22,7 @@ export const mockNode = (packageName: string) => {
|
|||
|
||||
return Container.get(InstalledNodesRepository).create({
|
||||
name: nodeName,
|
||||
type: nodeName,
|
||||
type: `${packageName}.${nodeName}`,
|
||||
latestVersion: COMMUNITY_NODE_VERSION.CURRENT,
|
||||
package: { packageName },
|
||||
});
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import { BinaryDataService } from 'n8n-core';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { BinaryDataService, UnrecognizedNodeTypeError, type DirectoryLoader } from 'n8n-core';
|
||||
import { Ftp } from 'n8n-nodes-base/credentials/Ftp.credentials';
|
||||
import { GithubApi } from 'n8n-nodes-base/credentials/GithubApi.credentials';
|
||||
import { Cron } from 'n8n-nodes-base/nodes/Cron/Cron.node';
|
||||
import { ScheduleTrigger } from 'n8n-nodes-base/nodes/Schedule/ScheduleTrigger.node';
|
||||
import { Set } from 'n8n-nodes-base/nodes/Set/Set.node';
|
||||
import { Start } from 'n8n-nodes-base/nodes/Start/Start.node';
|
||||
import { type INode } from 'n8n-workflow';
|
||||
import type { INodeTypeData, INode } from 'n8n-workflow';
|
||||
import type request from 'supertest';
|
||||
import { Container } from 'typedi';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
@ -62,7 +64,8 @@ export async function initCredentialsTypes(): Promise<void> {
|
|||
* Initialize node types.
|
||||
*/
|
||||
export async function initNodeTypes() {
|
||||
Container.get(LoadNodesAndCredentials).loaded.nodes = {
|
||||
ScheduleTrigger.prototype.trigger = async () => ({});
|
||||
const nodes: INodeTypeData = {
|
||||
'n8n-nodes-base.start': {
|
||||
type: new Start(),
|
||||
sourcePath: '',
|
||||
|
@ -75,7 +78,21 @@ export async function initNodeTypes() {
|
|||
type: new Set(),
|
||||
sourcePath: '',
|
||||
},
|
||||
'n8n-nodes-base.scheduleTrigger': {
|
||||
type: new ScheduleTrigger(),
|
||||
sourcePath: '',
|
||||
},
|
||||
};
|
||||
const loader = mock<DirectoryLoader>();
|
||||
loader.getNode.mockImplementation((nodeType) => {
|
||||
const node = nodes[`n8n-nodes-base.${nodeType}`];
|
||||
if (!node) throw new UnrecognizedNodeTypeError('n8n-nodes-base', nodeType);
|
||||
return node;
|
||||
});
|
||||
|
||||
const loadNodesAndCredentials = Container.get(LoadNodesAndCredentials);
|
||||
loadNodesAndCredentials.loaders = { 'n8n-nodes-base': loader };
|
||||
loadNodesAndCredentials.loaded.nodes = nodes;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const path = require('path');
|
||||
const glob = require('fast-glob');
|
||||
const uniq = require('lodash/uniq');
|
||||
const { LoggerProxy, getCredentialsForNode } = require('n8n-workflow');
|
||||
const { packageDir, writeJSON } = require('./common');
|
||||
const { loadClassInIsolation } = require('../dist/ClassLoader');
|
||||
|
||||
LoggerProxy.init(console);
|
||||
|
||||
const loadClass = (sourcePath) => {
|
||||
try {
|
||||
const [className] = path.parse(sourcePath).name.split('.');
|
||||
const filePath = path.resolve(packageDir, sourcePath);
|
||||
const instance = loadClassInIsolation(filePath, className);
|
||||
return { instance, sourcePath, className };
|
||||
} catch (e) {
|
||||
LoggerProxy.warn(`Failed to load ${sourcePath}: ${e.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const generateKnownNodes = async () => {
|
||||
const nodeClasses = glob
|
||||
.sync('dist/nodes/**/*.node.js', { cwd: packageDir })
|
||||
.map(loadClass)
|
||||
// Ignore node versions
|
||||
.filter((nodeClass) => nodeClass && !/[vV]\d.node\.js$/.test(nodeClass.sourcePath));
|
||||
|
||||
const nodes = {};
|
||||
const nodesByCredential = {};
|
||||
|
||||
for (const { className, sourcePath, instance } of nodeClasses) {
|
||||
const nodeName = instance.description.name;
|
||||
nodes[nodeName] = { className, sourcePath };
|
||||
|
||||
for (const credential of getCredentialsForNode(instance)) {
|
||||
if (!nodesByCredential[credential.name]) {
|
||||
nodesByCredential[credential.name] = [];
|
||||
}
|
||||
|
||||
nodesByCredential[credential.name].push(nodeName);
|
||||
}
|
||||
}
|
||||
|
||||
LoggerProxy.info(`Detected ${Object.keys(nodes).length} nodes`);
|
||||
await writeJSON('known/nodes.json', nodes);
|
||||
return { nodes, nodesByCredential };
|
||||
};
|
||||
|
||||
const generateKnownCredentials = async (nodesByCredential) => {
|
||||
const credentialClasses = glob
|
||||
.sync(`dist/credentials/**/*.credentials.js`, { cwd: packageDir })
|
||||
.map(loadClass)
|
||||
.filter((data) => !!data);
|
||||
|
||||
for (const { instance } of credentialClasses) {
|
||||
if (Array.isArray(instance.extends)) {
|
||||
for (const extendedCredential of instance.extends) {
|
||||
nodesByCredential[extendedCredential] = [
|
||||
...(nodesByCredential[extendedCredential] ?? []),
|
||||
...(nodesByCredential[instance.name] ?? []),
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const credentials = credentialClasses.reduce(
|
||||
(credentials, { className, sourcePath, instance }) => {
|
||||
const credentialName = instance.name;
|
||||
const credential = {
|
||||
className,
|
||||
sourcePath,
|
||||
};
|
||||
|
||||
if (Array.isArray(instance.extends)) {
|
||||
credential.extends = instance.extends;
|
||||
}
|
||||
|
||||
if (nodesByCredential[credentialName]) {
|
||||
credential.supportedNodes = Array.from(new Set(nodesByCredential[credentialName]));
|
||||
}
|
||||
|
||||
credentials[credentialName] = credential;
|
||||
|
||||
return credentials;
|
||||
},
|
||||
{},
|
||||
);
|
||||
|
||||
LoggerProxy.info(`Detected ${Object.keys(credentials).length} credentials`);
|
||||
await writeJSON('known/credentials.json', credentials);
|
||||
return credentials;
|
||||
};
|
||||
|
||||
(async () => {
|
||||
const { nodesByCredential } = await generateKnownNodes();
|
||||
await generateKnownCredentials(nodesByCredential);
|
||||
})();
|
|
@ -1,6 +1,6 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { LoggerProxy, NodeHelpers } = require('n8n-workflow');
|
||||
const { LoggerProxy } = require('n8n-workflow');
|
||||
const { PackageDirectoryLoader } = require('../dist/DirectoryLoader');
|
||||
const { packageDir, writeJSON } = require('./common');
|
||||
|
||||
|
@ -33,7 +33,7 @@ function findReferencedMethods(obj, refs = {}, latestName = '') {
|
|||
const loaderNodeTypes = Object.values(loader.nodeTypes);
|
||||
|
||||
const definedMethods = loaderNodeTypes.reduce((acc, cur) => {
|
||||
NodeHelpers.getVersionedNodeTypeAll(cur.type).forEach((type) => {
|
||||
loader.getVersionedNodeTypeAll(cur.type).forEach((type) => {
|
||||
const methods = type.description?.__loadOptionsMethods;
|
||||
|
||||
if (!methods) return;
|
||||
|
@ -52,51 +52,21 @@ function findReferencedMethods(obj, refs = {}, latestName = '') {
|
|||
}, {});
|
||||
|
||||
const nodeTypes = loaderNodeTypes
|
||||
.map((data) => {
|
||||
const nodeType = NodeHelpers.getVersionedNodeType(data.type);
|
||||
NodeHelpers.applySpecialNodeParameters(nodeType);
|
||||
return data.type;
|
||||
})
|
||||
.map(({ type }) => type)
|
||||
.flatMap((nodeType) =>
|
||||
NodeHelpers.getVersionedNodeTypeAll(nodeType).map((item) => {
|
||||
loader.getVersionedNodeTypeAll(nodeType).map((item) => {
|
||||
const { __loadOptionsMethods, ...rest } = item.description;
|
||||
return rest;
|
||||
}),
|
||||
);
|
||||
|
||||
const knownCredentials = loader.known.credentials;
|
||||
const credentialTypes = Object.values(loader.credentialTypes).map((data) => {
|
||||
const credentialType = data.type;
|
||||
const supportedNodes = knownCredentials[credentialType.name].supportedNodes ?? [];
|
||||
if (supportedNodes.length > 0 && credentialType.httpRequestNode) {
|
||||
credentialType.httpRequestNode.hidden = true;
|
||||
}
|
||||
|
||||
credentialType.supportedNodes = supportedNodes;
|
||||
|
||||
if (!credentialType.iconUrl && !credentialType.icon) {
|
||||
for (const supportedNode of supportedNodes) {
|
||||
const nodeType = loader.nodeTypes[supportedNode]?.type.description;
|
||||
|
||||
if (!nodeType) continue;
|
||||
if (nodeType.icon) {
|
||||
credentialType.icon = nodeType.icon;
|
||||
credentialType.iconColor = nodeType.iconColor;
|
||||
break;
|
||||
}
|
||||
if (nodeType.iconUrl) {
|
||||
credentialType.iconUrl = nodeType.iconUrl;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return credentialType;
|
||||
});
|
||||
|
||||
const credentialTypes = Object.values(loader.credentialTypes).map(({ type }) => type);
|
||||
const referencedMethods = findReferencedMethods(nodeTypes);
|
||||
|
||||
await Promise.all([
|
||||
writeJSON('known/nodes.json', loader.known.nodes),
|
||||
writeJSON('known/credentials.json', loader.known.credentials),
|
||||
writeJSON('types/credentials.json', credentialTypes),
|
||||
writeJSON('types/nodes.json', nodeTypes),
|
||||
writeJSON('methods/defined.json', definedMethods),
|
|
@ -6,9 +6,8 @@
|
|||
"types": "dist/index.d.ts",
|
||||
"bin": {
|
||||
"n8n-copy-icons": "./bin/copy-icons",
|
||||
"n8n-generate-known": "./bin/generate-known",
|
||||
"n8n-generate-translations": "./bin/generate-translations",
|
||||
"n8n-generate-ui-types": "./bin/generate-ui-types"
|
||||
"n8n-generate-metadata": "./bin/generate-metadata"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rimraf dist .turbo",
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import glob from 'fast-glob';
|
||||
import uniqBy from 'lodash/uniqBy';
|
||||
import type {
|
||||
CodexData,
|
||||
DocumentationLink,
|
||||
ICredentialType,
|
||||
ICredentialTypeData,
|
||||
INodeCredentialDescription,
|
||||
INodeType,
|
||||
INodeTypeBaseDescription,
|
||||
INodeTypeData,
|
||||
|
@ -12,19 +14,15 @@ import type {
|
|||
IVersionedNodeType,
|
||||
KnownNodesAndCredentials,
|
||||
} from 'n8n-workflow';
|
||||
import {
|
||||
ApplicationError,
|
||||
LoggerProxy as Logger,
|
||||
getCredentialsForNode,
|
||||
getVersionedNodeTypeAll,
|
||||
jsonParse,
|
||||
} from 'n8n-workflow';
|
||||
import { ApplicationError, LoggerProxy as Logger, NodeHelpers, jsonParse } from 'n8n-workflow';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import * as path from 'path';
|
||||
|
||||
import { loadClassInIsolation } from './ClassLoader';
|
||||
import { CUSTOM_NODES_CATEGORY } from './Constants';
|
||||
import { UnrecognizedCredentialTypeError } from './errors/unrecognized-credential-type.error';
|
||||
import { UnrecognizedNodeTypeError } from './errors/unrecognized-node-type.error';
|
||||
import type { n8n } from './Interfaces';
|
||||
|
||||
function toJSON(this: ICredentialType) {
|
||||
|
@ -34,11 +32,25 @@ function toJSON(this: ICredentialType) {
|
|||
};
|
||||
}
|
||||
|
||||
type Codex = {
|
||||
categories: string[];
|
||||
subcategories: { [subcategory: string]: string[] };
|
||||
resources: {
|
||||
primaryDocumentation: DocumentationLink[];
|
||||
credentialDocumentation: DocumentationLink[];
|
||||
};
|
||||
alias: string[];
|
||||
};
|
||||
|
||||
export type Types = {
|
||||
nodes: INodeTypeBaseDescription[];
|
||||
credentials: ICredentialType[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Base class for loading n8n nodes and credentials from a directory.
|
||||
* Handles the common functionality for resolving paths, loading classes, and managing node and credential types.
|
||||
*/
|
||||
export abstract class DirectoryLoader {
|
||||
isLazyLoaded = false;
|
||||
|
||||
|
@ -58,7 +70,7 @@ export abstract class DirectoryLoader {
|
|||
// Stores the different versions with their individual descriptions
|
||||
types: Types = { nodes: [], credentials: [] };
|
||||
|
||||
protected nodesByCredential: Record<string, string[]> = {};
|
||||
readonly nodesByCredential: Record<string, string[]> = {};
|
||||
|
||||
constructor(
|
||||
readonly directory: string,
|
||||
|
@ -82,35 +94,40 @@ export abstract class DirectoryLoader {
|
|||
return path.resolve(this.directory, file);
|
||||
}
|
||||
|
||||
protected loadNodeFromFile(nodeName: string, filePath: string) {
|
||||
let tempNode: INodeType | IVersionedNodeType;
|
||||
let nodeVersion = 1;
|
||||
const isCustom = this.packageName === 'CUSTOM';
|
||||
|
||||
private loadClass<T>(sourcePath: string) {
|
||||
const filePath = this.resolvePath(sourcePath);
|
||||
const [className] = path.parse(sourcePath).name.split('.');
|
||||
try {
|
||||
tempNode = loadClassInIsolation(filePath, nodeName);
|
||||
this.addCodex({ node: tempNode, filePath, isCustom });
|
||||
return loadClassInIsolation<T>(filePath, className);
|
||||
} catch (error) {
|
||||
Logger.error(
|
||||
`Error loading node "${nodeName}" from: "${filePath}" - ${(error as Error).message}`,
|
||||
);
|
||||
throw error;
|
||||
throw error instanceof TypeError
|
||||
? new ApplicationError(
|
||||
'Class could not be found. Please check if the class is named correctly.',
|
||||
{ extra: { className } },
|
||||
)
|
||||
: error;
|
||||
}
|
||||
}
|
||||
|
||||
const fullNodeName = `${this.packageName}.${tempNode.description.name}`;
|
||||
/** Loads a nodes class from a file, fixes icons, and augments the codex */
|
||||
loadNodeFromFile(filePath: string) {
|
||||
const tempNode = this.loadClass<INodeType | IVersionedNodeType>(filePath);
|
||||
this.addCodex(tempNode, filePath);
|
||||
|
||||
if (this.includeNodes.length && !this.includeNodes.includes(fullNodeName)) {
|
||||
const nodeType = tempNode.description.name;
|
||||
const fullNodeType = `${this.packageName}.${nodeType}`;
|
||||
|
||||
if (this.includeNodes.length && !this.includeNodes.includes(fullNodeType)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.excludeNodes.includes(fullNodeName)) {
|
||||
if (this.excludeNodes.includes(fullNodeType)) {
|
||||
return;
|
||||
}
|
||||
|
||||
tempNode.description.name = fullNodeName;
|
||||
|
||||
this.fixIconPaths(tempNode.description, filePath);
|
||||
|
||||
let nodeVersion = 1;
|
||||
if ('nodeVersions' in tempNode) {
|
||||
for (const versionNode of Object.values(tempNode.nodeVersions)) {
|
||||
this.fixIconPaths(versionNode.description, filePath);
|
||||
|
@ -118,85 +135,93 @@ export abstract class DirectoryLoader {
|
|||
|
||||
for (const version of Object.values(tempNode.nodeVersions)) {
|
||||
this.addLoadOptionsMethods(version);
|
||||
NodeHelpers.applySpecialNodeParameters(version);
|
||||
}
|
||||
|
||||
const currentVersionNode = tempNode.nodeVersions[tempNode.currentVersion];
|
||||
this.addCodex({ node: currentVersionNode, filePath, isCustom });
|
||||
this.addCodex(currentVersionNode, filePath);
|
||||
nodeVersion = tempNode.currentVersion;
|
||||
|
||||
if (currentVersionNode.hasOwnProperty('executeSingle')) {
|
||||
throw new ApplicationError(
|
||||
'"executeSingle" has been removed. Please update the code of this node to use "execute" instead.',
|
||||
{ extra: { nodeName: `${this.packageName}.${nodeName}` } },
|
||||
{ extra: { nodeType: fullNodeType } },
|
||||
);
|
||||
}
|
||||
} else {
|
||||
this.addLoadOptionsMethods(tempNode);
|
||||
// Short renaming to avoid type issues
|
||||
NodeHelpers.applySpecialNodeParameters(tempNode);
|
||||
|
||||
// Short renaming to avoid type issues
|
||||
nodeVersion = Array.isArray(tempNode.description.version)
|
||||
? tempNode.description.version.slice(-1)[0]
|
||||
: tempNode.description.version;
|
||||
}
|
||||
|
||||
this.known.nodes[fullNodeName] = {
|
||||
className: nodeName,
|
||||
this.known.nodes[nodeType] = {
|
||||
className: tempNode.constructor.name,
|
||||
sourcePath: filePath,
|
||||
};
|
||||
|
||||
this.nodeTypes[fullNodeName] = {
|
||||
this.nodeTypes[nodeType] = {
|
||||
type: tempNode,
|
||||
sourcePath: filePath,
|
||||
};
|
||||
|
||||
this.loadedNodes.push({
|
||||
name: fullNodeName,
|
||||
name: nodeType,
|
||||
version: nodeVersion,
|
||||
});
|
||||
|
||||
getVersionedNodeTypeAll(tempNode).forEach(({ description }) => {
|
||||
this.getVersionedNodeTypeAll(tempNode).forEach(({ description }) => {
|
||||
this.types.nodes.push(description);
|
||||
});
|
||||
|
||||
for (const credential of getCredentialsForNode(tempNode)) {
|
||||
for (const credential of this.getCredentialsForNode(tempNode)) {
|
||||
if (!this.nodesByCredential[credential.name]) {
|
||||
this.nodesByCredential[credential.name] = [];
|
||||
}
|
||||
this.nodesByCredential[credential.name].push(fullNodeName);
|
||||
this.nodesByCredential[credential.name].push(nodeType);
|
||||
}
|
||||
}
|
||||
|
||||
protected loadCredentialFromFile(credentialClassName: string, filePath: string): void {
|
||||
let tempCredential: ICredentialType;
|
||||
try {
|
||||
tempCredential = loadClassInIsolation(filePath, credentialClassName);
|
||||
|
||||
// Add serializer method "toJSON" to the class so that authenticate method (if defined)
|
||||
// gets mapped to the authenticate attribute before it is sent to the client.
|
||||
// The authenticate property is used by the client to decide whether or not to
|
||||
// include the credential type in the predefined credentials (HTTP node)
|
||||
Object.assign(tempCredential, { toJSON });
|
||||
|
||||
this.fixIconPaths(tempCredential, filePath);
|
||||
} catch (e) {
|
||||
if (e instanceof TypeError) {
|
||||
throw new ApplicationError(
|
||||
'Class could not be found. Please check if the class is named correctly.',
|
||||
{ extra: { credentialClassName } },
|
||||
);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
getNode(nodeType: string) {
|
||||
const {
|
||||
nodeTypes,
|
||||
known: { nodes: knownNodes },
|
||||
} = this;
|
||||
if (!(nodeType in nodeTypes) && nodeType in knownNodes) {
|
||||
const { sourcePath } = knownNodes[nodeType];
|
||||
this.loadNodeFromFile(sourcePath);
|
||||
}
|
||||
|
||||
this.known.credentials[tempCredential.name] = {
|
||||
className: credentialClassName,
|
||||
if (nodeType in nodeTypes) {
|
||||
return nodeTypes[nodeType];
|
||||
}
|
||||
|
||||
throw new UnrecognizedNodeTypeError(this.packageName, nodeType);
|
||||
}
|
||||
|
||||
/** Loads a credential class from a file, and fixes icons */
|
||||
loadCredentialFromFile(filePath: string): void {
|
||||
const tempCredential = this.loadClass<ICredentialType>(filePath);
|
||||
// Add serializer method "toJSON" to the class so that authenticate method (if defined)
|
||||
// gets mapped to the authenticate attribute before it is sent to the client.
|
||||
// The authenticate property is used by the client to decide whether or not to
|
||||
// include the credential type in the predefined credentials (HTTP node)
|
||||
Object.assign(tempCredential, { toJSON });
|
||||
|
||||
this.fixIconPaths(tempCredential, filePath);
|
||||
|
||||
const credentialType = tempCredential.name;
|
||||
this.known.credentials[credentialType] = {
|
||||
className: tempCredential.constructor.name,
|
||||
sourcePath: filePath,
|
||||
extends: tempCredential.extends,
|
||||
supportedNodes: this.nodesByCredential[tempCredential.name],
|
||||
supportedNodes: this.nodesByCredential[credentialType],
|
||||
};
|
||||
|
||||
this.credentialTypes[tempCredential.name] = {
|
||||
this.credentialTypes[credentialType] = {
|
||||
type: tempCredential,
|
||||
sourcePath: filePath,
|
||||
};
|
||||
|
@ -204,40 +229,79 @@ export abstract class DirectoryLoader {
|
|||
this.types.credentials.push(tempCredential);
|
||||
}
|
||||
|
||||
getCredential(credentialType: string) {
|
||||
const {
|
||||
credentialTypes,
|
||||
known: { credentials: knownCredentials },
|
||||
} = this;
|
||||
if (!(credentialType in credentialTypes) && credentialType in knownCredentials) {
|
||||
const { sourcePath } = knownCredentials[credentialType];
|
||||
this.loadCredentialFromFile(sourcePath);
|
||||
}
|
||||
|
||||
if (credentialType in credentialTypes) {
|
||||
return credentialTypes[credentialType];
|
||||
}
|
||||
|
||||
throw new UnrecognizedCredentialTypeError(credentialType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of credential descriptions that are supported by a node.
|
||||
* For versioned nodes, combines and deduplicates credentials from all versions.
|
||||
*/
|
||||
getCredentialsForNode(object: IVersionedNodeType | INodeType): INodeCredentialDescription[] {
|
||||
if ('nodeVersions' in object) {
|
||||
const credentials = Object.values(object.nodeVersions).flatMap(
|
||||
({ description }) => description.credentials ?? [],
|
||||
);
|
||||
return uniqBy(credentials, 'name');
|
||||
}
|
||||
return object.description.credentials ?? [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of all versions of a node type.
|
||||
* For non-versioned nodes, returns an array with just that node.
|
||||
* For versioned nodes, returns all available versions.
|
||||
*/
|
||||
getVersionedNodeTypeAll(object: IVersionedNodeType | INodeType): INodeType[] {
|
||||
if ('nodeVersions' in object) {
|
||||
const nodeVersions = Object.values(object.nodeVersions).map((element) => {
|
||||
element.description.name = object.description.name;
|
||||
element.description.codex = object.description.codex;
|
||||
return element;
|
||||
});
|
||||
return uniqBy(nodeVersions.reverse(), (node) => {
|
||||
const { version } = node.description;
|
||||
return Array.isArray(version) ? version.join(',') : version.toString();
|
||||
});
|
||||
}
|
||||
return [object];
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves `categories`, `subcategories` and alias (if defined)
|
||||
* from the codex data for the node at the given file path.
|
||||
*/
|
||||
private getCodex(filePath: string): CodexData {
|
||||
type Codex = {
|
||||
categories: string[];
|
||||
subcategories: { [subcategory: string]: string[] };
|
||||
resources: {
|
||||
primaryDocumentation: DocumentationLink[];
|
||||
credentialDocumentation: DocumentationLink[];
|
||||
};
|
||||
alias: string[];
|
||||
};
|
||||
|
||||
const codexFilePath = `${filePath}on`; // .js to .json
|
||||
const codexFilePath = this.resolvePath(`${filePath}on`); // .js to .json
|
||||
|
||||
const {
|
||||
categories,
|
||||
subcategories,
|
||||
resources: allResources,
|
||||
resources: { primaryDocumentation, credentialDocumentation },
|
||||
alias,
|
||||
} = module.require(codexFilePath) as Codex;
|
||||
|
||||
const resources = {
|
||||
primaryDocumentation: allResources.primaryDocumentation,
|
||||
credentialDocumentation: allResources.credentialDocumentation,
|
||||
};
|
||||
|
||||
return {
|
||||
...(categories && { categories }),
|
||||
...(subcategories && { subcategories }),
|
||||
...(resources && { resources }),
|
||||
...(alias && { alias }),
|
||||
resources: {
|
||||
primaryDocumentation,
|
||||
credentialDocumentation,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -245,15 +309,8 @@ export abstract class DirectoryLoader {
|
|||
* Adds a node codex `categories` and `subcategories` (if defined)
|
||||
* to a node description `codex` property.
|
||||
*/
|
||||
private addCodex({
|
||||
node,
|
||||
filePath,
|
||||
isCustom,
|
||||
}: {
|
||||
node: INodeType | IVersionedNodeType;
|
||||
filePath: string;
|
||||
isCustom: boolean;
|
||||
}) {
|
||||
private addCodex(node: INodeType | IVersionedNodeType, filePath: string) {
|
||||
const isCustom = this.packageName === 'CUSTOM';
|
||||
try {
|
||||
let codex;
|
||||
|
||||
|
@ -273,7 +330,7 @@ export abstract class DirectoryLoader {
|
|||
|
||||
node.description.codex = codex;
|
||||
} catch {
|
||||
Logger.debug(`No codex available for: ${filePath.split('/').pop() ?? ''}`);
|
||||
Logger.debug(`No codex available for: ${node.description.name}`);
|
||||
|
||||
if (isCustom) {
|
||||
node.description.codex = {
|
||||
|
@ -291,8 +348,7 @@ export abstract class DirectoryLoader {
|
|||
|
||||
private getIconPath(icon: string, filePath: string) {
|
||||
const iconPath = path.join(path.dirname(filePath), icon.replace('file:', ''));
|
||||
const relativePath = path.relative(this.directory, iconPath);
|
||||
return `icons/${this.packageName}/${relativePath}`;
|
||||
return `icons/${this.packageName}/${iconPath}`;
|
||||
}
|
||||
|
||||
private fixIconPaths(
|
||||
|
@ -305,14 +361,14 @@ export abstract class DirectoryLoader {
|
|||
if (typeof icon === 'string') {
|
||||
if (icon.startsWith('file:')) {
|
||||
obj.iconUrl = this.getIconPath(icon, filePath);
|
||||
delete obj.icon;
|
||||
obj.icon = undefined;
|
||||
}
|
||||
} else if (icon.light.startsWith('file:') && icon.dark.startsWith('file:')) {
|
||||
obj.iconUrl = {
|
||||
light: this.getIconPath(icon.light, filePath),
|
||||
dark: this.getIconPath(icon.dark, filePath),
|
||||
};
|
||||
delete obj.icon;
|
||||
obj.icon = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -331,8 +387,7 @@ export class CustomDirectoryLoader extends DirectoryLoader {
|
|||
});
|
||||
|
||||
for (const nodePath of nodes) {
|
||||
const [fileName] = path.parse(nodePath).name.split('.');
|
||||
this.loadNodeFromFile(fileName, nodePath);
|
||||
this.loadNodeFromFile(nodePath);
|
||||
}
|
||||
|
||||
const credentials = await glob('**/*.credentials.js', {
|
||||
|
@ -341,8 +396,7 @@ export class CustomDirectoryLoader extends DirectoryLoader {
|
|||
});
|
||||
|
||||
for (const credentialPath of credentials) {
|
||||
const [fileName] = path.parse(credentialPath).name.split('.');
|
||||
this.loadCredentialFromFile(fileName, credentialPath);
|
||||
this.loadCredentialFromFile(credentialPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -363,33 +417,55 @@ export class PackageDirectoryLoader extends DirectoryLoader {
|
|||
const { nodes, credentials } = n8n;
|
||||
|
||||
if (Array.isArray(nodes)) {
|
||||
for (const node of nodes) {
|
||||
const filePath = this.resolvePath(node);
|
||||
const [nodeName] = path.parse(node).name.split('.');
|
||||
|
||||
this.loadNodeFromFile(nodeName, filePath);
|
||||
for (const nodePath of nodes) {
|
||||
this.loadNodeFromFile(nodePath);
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(credentials)) {
|
||||
for (const credential of credentials) {
|
||||
const filePath = this.resolvePath(credential);
|
||||
const [credentialName] = path.parse(credential).name.split('.');
|
||||
|
||||
this.loadCredentialFromFile(credentialName, filePath);
|
||||
for (const credentialPath of credentials) {
|
||||
this.loadCredentialFromFile(credentialPath);
|
||||
}
|
||||
}
|
||||
|
||||
this.inferSupportedNodes();
|
||||
|
||||
Logger.debug(`Loaded all credentials and nodes from ${this.packageName}`, {
|
||||
credentials: credentials?.length ?? 0,
|
||||
nodes: nodes?.length ?? 0,
|
||||
});
|
||||
}
|
||||
|
||||
protected readJSONSync<T>(file: string): T {
|
||||
const filePath = this.resolvePath(file);
|
||||
const fileString = readFileSync(filePath, 'utf8');
|
||||
private inferSupportedNodes() {
|
||||
const knownCredentials = this.known.credentials;
|
||||
for (const { type: credentialType } of Object.values(this.credentialTypes)) {
|
||||
const supportedNodes = knownCredentials[credentialType.name].supportedNodes ?? [];
|
||||
if (supportedNodes.length > 0 && credentialType.httpRequestNode) {
|
||||
credentialType.httpRequestNode.hidden = true;
|
||||
}
|
||||
|
||||
credentialType.supportedNodes = supportedNodes;
|
||||
|
||||
if (!credentialType.iconUrl && !credentialType.icon) {
|
||||
for (const supportedNode of supportedNodes) {
|
||||
const nodeDescription = this.nodeTypes[supportedNode]?.type.description;
|
||||
|
||||
if (!nodeDescription) continue;
|
||||
if (nodeDescription.icon) {
|
||||
credentialType.icon = nodeDescription.icon;
|
||||
credentialType.iconColor = nodeDescription.iconColor;
|
||||
break;
|
||||
}
|
||||
if (nodeDescription.iconUrl) {
|
||||
credentialType.iconUrl = nodeDescription.iconUrl;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private parseJSON<T>(fileString: string, filePath: string): T {
|
||||
try {
|
||||
return jsonParse<T>(fileString);
|
||||
} catch (error) {
|
||||
|
@ -397,15 +473,16 @@ export class PackageDirectoryLoader extends DirectoryLoader {
|
|||
}
|
||||
}
|
||||
|
||||
protected readJSONSync<T>(file: string): T {
|
||||
const filePath = this.resolvePath(file);
|
||||
const fileString = readFileSync(filePath, 'utf8');
|
||||
return this.parseJSON<T>(fileString, filePath);
|
||||
}
|
||||
|
||||
protected async readJSON<T>(file: string): Promise<T> {
|
||||
const filePath = this.resolvePath(file);
|
||||
const fileString = await readFile(filePath, 'utf8');
|
||||
|
||||
try {
|
||||
return jsonParse<T>(fileString);
|
||||
} catch (error) {
|
||||
throw new ApplicationError('Failed to parse JSON', { extra: { filePath } });
|
||||
}
|
||||
return this.parseJSON<T>(fileString, filePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -415,10 +492,7 @@ export class PackageDirectoryLoader extends DirectoryLoader {
|
|||
export class LazyPackageDirectoryLoader extends PackageDirectoryLoader {
|
||||
override async loadAll() {
|
||||
try {
|
||||
const knownNodes: typeof this.known.nodes = await this.readJSON('dist/known/nodes.json');
|
||||
for (const nodeName in knownNodes) {
|
||||
this.known.nodes[`${this.packageName}.${nodeName}`] = knownNodes[nodeName];
|
||||
}
|
||||
this.known.nodes = await this.readJSON('dist/known/nodes.json');
|
||||
this.known.credentials = await this.readJSON('dist/known/credentials.json');
|
||||
|
||||
this.types.nodes = await this.readJSON('dist/types/nodes.json');
|
||||
|
@ -426,9 +500,10 @@ export class LazyPackageDirectoryLoader extends PackageDirectoryLoader {
|
|||
|
||||
if (this.includeNodes.length) {
|
||||
const allowedNodes: typeof this.known.nodes = {};
|
||||
for (const nodeName of this.includeNodes) {
|
||||
if (nodeName in this.known.nodes) {
|
||||
allowedNodes[nodeName] = this.known.nodes[nodeName];
|
||||
for (const fullNodeType of this.includeNodes) {
|
||||
const [packageName, nodeType] = fullNodeType.split('.');
|
||||
if (packageName === this.packageName && nodeType in this.known.nodes) {
|
||||
allowedNodes[nodeType] = this.known.nodes[nodeType];
|
||||
}
|
||||
}
|
||||
this.known.nodes = allowedNodes;
|
||||
|
@ -439,8 +514,11 @@ export class LazyPackageDirectoryLoader extends PackageDirectoryLoader {
|
|||
}
|
||||
|
||||
if (this.excludeNodes.length) {
|
||||
for (const nodeName of this.excludeNodes) {
|
||||
delete this.known.nodes[nodeName];
|
||||
for (const fullNodeType of this.excludeNodes) {
|
||||
const [packageName, nodeType] = fullNodeType.split('.');
|
||||
if (packageName === this.packageName) {
|
||||
delete this.known.nodes[nodeType];
|
||||
}
|
||||
}
|
||||
|
||||
this.types.nodes = this.types.nodes.filter(
|
||||
|
|
|
@ -3,3 +3,5 @@ export { DisallowedFilepathError } from './disallowed-filepath.error';
|
|||
export { InvalidModeError } from './invalid-mode.error';
|
||||
export { InvalidManagerError } from './invalid-manager.error';
|
||||
export { InvalidExecutionMetadataError } from './invalid-execution-metadata.error';
|
||||
export { UnrecognizedCredentialTypeError } from './unrecognized-credential-type.error';
|
||||
export { UnrecognizedNodeTypeError } from './unrecognized-node-type.error';
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import { ApplicationError } from 'n8n-workflow';
|
||||
|
||||
export class UnrecognizedCredentialTypeError extends ApplicationError {
|
||||
severity = 'warning';
|
||||
|
||||
constructor(credentialType: string) {
|
||||
super(`Unrecognized credential type: ${credentialType}`);
|
||||
}
|
||||
}
|
|
@ -3,7 +3,7 @@ import { ApplicationError } from 'n8n-workflow';
|
|||
export class UnrecognizedNodeTypeError extends ApplicationError {
|
||||
severity = 'warning';
|
||||
|
||||
constructor(nodeType: string) {
|
||||
super(`Unrecognized node type: ${nodeType}".`);
|
||||
constructor(packageName: string, nodeType: string) {
|
||||
super(`Unrecognized node type: ${packageName}.${nodeType}`);
|
||||
}
|
||||
}
|
|
@ -5,7 +5,6 @@ export * from './ActiveWorkflows';
|
|||
export * from './BinaryData/BinaryData.service';
|
||||
export * from './BinaryData/types';
|
||||
export { Cipher } from './Cipher';
|
||||
export * from './ClassLoader';
|
||||
export * from './Constants';
|
||||
export * from './Credentials';
|
||||
export * from './DirectoryLoader';
|
||||
|
|
52
packages/core/test/ClassLoader.test.ts
Normal file
52
packages/core/test/ClassLoader.test.ts
Normal file
|
@ -0,0 +1,52 @@
|
|||
import vm from 'vm';
|
||||
|
||||
import { loadClassInIsolation } from '@/ClassLoader';
|
||||
|
||||
describe('ClassLoader', () => {
|
||||
const filePath = '/path/to/TestClass.js';
|
||||
const className = 'TestClass';
|
||||
|
||||
class TestClass {
|
||||
getValue(): string {
|
||||
return 'test value';
|
||||
}
|
||||
}
|
||||
|
||||
jest.spyOn(vm, 'createContext').mockReturnValue({});
|
||||
|
||||
const runInContext = jest.fn().mockImplementation(() => new TestClass());
|
||||
const scriptSpy = jest.spyOn(vm, 'Script').mockImplementation(function (this: vm.Script) {
|
||||
this.runInContext = runInContext;
|
||||
return this;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should create script with correct require statement', () => {
|
||||
const instance = loadClassInIsolation<TestClass>(filePath, className);
|
||||
|
||||
expect(scriptSpy).toHaveBeenCalledWith(`new (require('${filePath}').${className})()`);
|
||||
expect(instance.getValue()).toBe('test value');
|
||||
});
|
||||
|
||||
it('should handle Windows-style paths', () => {
|
||||
const originalPlatform = process.platform;
|
||||
Object.defineProperty(process, 'platform', { value: 'win32' });
|
||||
|
||||
loadClassInIsolation('/path\\to\\TestClass.js', 'TestClass');
|
||||
|
||||
expect(scriptSpy).toHaveBeenCalledWith(`new (require('${filePath}').${className})()`);
|
||||
|
||||
Object.defineProperty(process, 'platform', { value: originalPlatform });
|
||||
});
|
||||
|
||||
it('should throw error when script execution fails', () => {
|
||||
runInContext.mockImplementationOnce(() => {
|
||||
throw new Error('Script execution failed');
|
||||
});
|
||||
|
||||
expect(() => loadClassInIsolation(filePath, className)).toThrow('Script execution failed');
|
||||
});
|
||||
});
|
781
packages/core/test/DirectoryLoader.test.ts
Normal file
781
packages/core/test/DirectoryLoader.test.ts
Normal file
|
@ -0,0 +1,781 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
ICredentialType,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
IVersionedNodeType,
|
||||
} from 'n8n-workflow';
|
||||
import { deepCopy } from 'n8n-workflow';
|
||||
import fs from 'node:fs';
|
||||
import fsPromises from 'node:fs/promises';
|
||||
|
||||
jest.mock('node:fs');
|
||||
jest.mock('node:fs/promises');
|
||||
const mockFs = mock<typeof fs>();
|
||||
const mockFsPromises = mock<typeof fsPromises>();
|
||||
fs.readFileSync = mockFs.readFileSync;
|
||||
fsPromises.readFile = mockFsPromises.readFile;
|
||||
|
||||
jest.mock('fast-glob', () => async (pattern: string) => {
|
||||
return pattern.endsWith('.node.js')
|
||||
? ['dist/Node1/Node1.node.js', 'dist/Node2/Node2.node.js']
|
||||
: ['dist/Credential1.js'];
|
||||
});
|
||||
|
||||
import * as classLoader from '@/ClassLoader';
|
||||
import {
|
||||
CustomDirectoryLoader,
|
||||
PackageDirectoryLoader,
|
||||
LazyPackageDirectoryLoader,
|
||||
} from '@/DirectoryLoader';
|
||||
|
||||
describe('DirectoryLoader', () => {
|
||||
const directory = '/not/a/real/path';
|
||||
const packageJson = JSON.stringify({
|
||||
name: 'n8n-nodes-testing',
|
||||
n8n: {
|
||||
credentials: ['dist/Credential1.js'],
|
||||
nodes: ['dist/Node1/Node1.node.js', 'dist/Node2/Node2.node.js'],
|
||||
},
|
||||
});
|
||||
|
||||
const createNode = (name: string, credential?: string) =>
|
||||
mock<INodeType>({
|
||||
description: {
|
||||
name,
|
||||
version: 1,
|
||||
icon: `file:${name}.svg`,
|
||||
iconUrl: undefined,
|
||||
credentials: credential ? [{ name: credential }] : [],
|
||||
properties: [],
|
||||
},
|
||||
});
|
||||
|
||||
const createCredential = (name: string) =>
|
||||
mock<ICredentialType>({
|
||||
name,
|
||||
icon: `file:${name}.svg`,
|
||||
iconUrl: undefined,
|
||||
extends: undefined,
|
||||
properties: [],
|
||||
});
|
||||
|
||||
let mockCredential1: ICredentialType, mockNode1: INodeType, mockNode2: INodeType;
|
||||
|
||||
beforeEach(() => {
|
||||
mockCredential1 = createCredential('credential1');
|
||||
mockNode1 = createNode('node1', 'credential1');
|
||||
mockNode2 = createNode('node2');
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
//@ts-expect-error overwrite a readonly property
|
||||
classLoader.loadClassInIsolation = jest.fn((_: string, className: string) => {
|
||||
if (className === 'Node1') return mockNode1;
|
||||
if (className === 'Node2') return mockNode2;
|
||||
if (className === 'Credential1') return mockCredential1;
|
||||
throw new Error(`${className} is invalid`);
|
||||
});
|
||||
|
||||
describe('CustomDirectoryLoader', () => {
|
||||
it('should load custom nodes and credentials', async () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
expect(loader.packageName).toEqual('CUSTOM');
|
||||
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.isLazyLoaded).toBe(false);
|
||||
expect(mockFsPromises.readFile).not.toHaveBeenCalled();
|
||||
expect(classLoader.loadClassInIsolation).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(loader.nodesByCredential).toEqual({ credential1: ['node1'] });
|
||||
expect(loader.credentialTypes).toEqual({
|
||||
credential1: { sourcePath: 'dist/Credential1.js', type: mockCredential1 },
|
||||
});
|
||||
expect(loader.nodeTypes).toEqual({
|
||||
node1: { sourcePath: 'dist/Node1/Node1.node.js', type: mockNode1 },
|
||||
node2: { sourcePath: 'dist/Node2/Node2.node.js', type: mockNode2 },
|
||||
});
|
||||
expect(mockCredential1.iconUrl).toBe('icons/CUSTOM/dist/credential1.svg');
|
||||
expect(mockNode1.description.iconUrl).toBe('icons/CUSTOM/dist/Node1/node1.svg');
|
||||
expect(mockNode2.description.iconUrl).toBe('icons/CUSTOM/dist/Node2/node2.svg');
|
||||
|
||||
expect(mockFs.readFileSync).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('PackageDirectoryLoader', () => {
|
||||
it('should load nodes and credentials from an installed package', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
|
||||
const loader = new PackageDirectoryLoader(directory);
|
||||
expect(loader.packageName).toEqual('n8n-nodes-testing');
|
||||
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.isLazyLoaded).toBe(false);
|
||||
expect(mockFsPromises.readFile).not.toHaveBeenCalled();
|
||||
expect(classLoader.loadClassInIsolation).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(loader.nodesByCredential).toEqual({ credential1: ['node1'] });
|
||||
expect(loader.credentialTypes).toEqual({
|
||||
credential1: { sourcePath: 'dist/Credential1.js', type: mockCredential1 },
|
||||
});
|
||||
expect(loader.nodeTypes).toEqual({
|
||||
node1: { sourcePath: 'dist/Node1/Node1.node.js', type: mockNode1 },
|
||||
node2: { sourcePath: 'dist/Node2/Node2.node.js', type: mockNode2 },
|
||||
});
|
||||
expect(mockCredential1.iconUrl).toBe('icons/n8n-nodes-testing/dist/credential1.svg');
|
||||
expect(mockNode1.description.iconUrl).toBe('icons/n8n-nodes-testing/dist/Node1/node1.svg');
|
||||
expect(mockNode2.description.iconUrl).toBe('icons/n8n-nodes-testing/dist/Node2/node2.svg');
|
||||
});
|
||||
|
||||
it('should throw error when package.json is missing', async () => {
|
||||
mockFs.readFileSync.mockImplementationOnce(() => {
|
||||
throw new Error('ENOENT');
|
||||
});
|
||||
|
||||
expect(() => new PackageDirectoryLoader(directory)).toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when package.json is invalid', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue('invalid json');
|
||||
|
||||
expect(() => new PackageDirectoryLoader(directory)).toThrow('Failed to parse JSON');
|
||||
});
|
||||
|
||||
it('should do nothing if package.json has no n8n field', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(
|
||||
JSON.stringify({
|
||||
name: 'n8n-nodes-testing',
|
||||
}),
|
||||
);
|
||||
|
||||
const loader = new PackageDirectoryLoader(directory);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.nodeTypes).toEqual({});
|
||||
expect(loader.credentialTypes).toEqual({});
|
||||
expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should hide httpRequestNode property when credential has supported nodes', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
mockCredential1.httpRequestNode = mock<ICredentialType['httpRequestNode']>({ hidden: false });
|
||||
|
||||
const loader = new PackageDirectoryLoader(directory);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(mockCredential1.httpRequestNode?.hidden).toBe(true);
|
||||
});
|
||||
|
||||
it('should not modify httpRequestNode when credential has no supported nodes', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
mockCredential1.httpRequestNode = mock<ICredentialType['httpRequestNode']>({ hidden: false });
|
||||
mockNode1.description.credentials = [];
|
||||
|
||||
const loader = new PackageDirectoryLoader(directory);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(mockCredential1.httpRequestNode?.hidden).toBe(false);
|
||||
});
|
||||
|
||||
it('should inherit iconUrl from supported node when credential has no icon', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
mockCredential1.icon = undefined;
|
||||
|
||||
const loader = new PackageDirectoryLoader(directory);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(mockCredential1.supportedNodes).toEqual(['node1']);
|
||||
expect(mockCredential1.iconUrl).toBe(mockNode1.description.iconUrl);
|
||||
});
|
||||
});
|
||||
|
||||
describe('LazyPackageDirectoryLoader', () => {
|
||||
it('should skip loading nodes and credentials from a lazy-loadable package', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
mockFsPromises.readFile.mockResolvedValue('[]');
|
||||
|
||||
const loader = new LazyPackageDirectoryLoader(directory);
|
||||
expect(loader.packageName).toEqual('n8n-nodes-testing');
|
||||
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.isLazyLoaded).toBe(true);
|
||||
expect(mockFsPromises.readFile).toHaveBeenCalledTimes(4);
|
||||
expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fall back to non-lazy loading if any json file fails to parse', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
mockFsPromises.readFile.mockRejectedValue(new Error('Failed to read file'));
|
||||
|
||||
const loader = new LazyPackageDirectoryLoader(directory);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.isLazyLoaded).toBe(false);
|
||||
expect(mockFsPromises.readFile).toHaveBeenCalled();
|
||||
expect(classLoader.loadClassInIsolation).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should only load included nodes when includeNodes is set', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
|
||||
mockFsPromises.readFile.mockImplementation(async (path) => {
|
||||
if (typeof path !== 'string') throw new Error('Invalid path');
|
||||
|
||||
if (path.endsWith('known/nodes.json')) {
|
||||
return JSON.stringify({
|
||||
node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' },
|
||||
node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' },
|
||||
});
|
||||
}
|
||||
if (path.endsWith('known/credentials.json')) {
|
||||
return JSON.stringify({});
|
||||
}
|
||||
if (path.endsWith('types/nodes.json')) {
|
||||
return JSON.stringify([
|
||||
{ name: 'n8n-nodes-testing.node1' },
|
||||
{ name: 'n8n-nodes-testing.node2' },
|
||||
]);
|
||||
}
|
||||
if (path.endsWith('types/credentials.json')) {
|
||||
return JSON.stringify([]);
|
||||
}
|
||||
throw new Error('File not found');
|
||||
});
|
||||
|
||||
const loader = new LazyPackageDirectoryLoader(directory, [], ['n8n-nodes-testing.node1']);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.isLazyLoaded).toBe(true);
|
||||
expect(loader.known.nodes).toEqual({
|
||||
node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' },
|
||||
});
|
||||
expect(loader.types.nodes).toHaveLength(1);
|
||||
expect(loader.types.nodes[0].name).toBe('n8n-nodes-testing.node1');
|
||||
expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should load no nodes when includeNodes does not match any nodes', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
|
||||
mockFsPromises.readFile.mockImplementation(async (path) => {
|
||||
if (typeof path !== 'string') throw new Error('Invalid path');
|
||||
|
||||
if (path.endsWith('known/nodes.json')) {
|
||||
return JSON.stringify({
|
||||
node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' },
|
||||
node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' },
|
||||
});
|
||||
}
|
||||
if (path.endsWith('known/credentials.json')) {
|
||||
return JSON.stringify({});
|
||||
}
|
||||
if (path.endsWith('types/nodes.json')) {
|
||||
return JSON.stringify([
|
||||
{ name: 'n8n-nodes-testing.node1' },
|
||||
{ name: 'n8n-nodes-testing.node2' },
|
||||
]);
|
||||
}
|
||||
if (path.endsWith('types/credentials.json')) {
|
||||
return JSON.stringify([]);
|
||||
}
|
||||
throw new Error('File not found');
|
||||
});
|
||||
|
||||
const loader = new LazyPackageDirectoryLoader(
|
||||
directory,
|
||||
[],
|
||||
['n8n-nodes-testing.nonexistent'],
|
||||
);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.isLazyLoaded).toBe(true);
|
||||
expect(loader.known.nodes).toEqual({});
|
||||
expect(loader.types.nodes).toHaveLength(0);
|
||||
expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should exclude specified nodes when excludeNodes is set', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
|
||||
mockFsPromises.readFile.mockImplementation(async (path) => {
|
||||
if (typeof path !== 'string') throw new Error('Invalid path');
|
||||
|
||||
if (path.endsWith('known/nodes.json')) {
|
||||
return JSON.stringify({
|
||||
node1: { className: 'Node1', sourcePath: 'dist/Node1/Node1.node.js' },
|
||||
node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' },
|
||||
});
|
||||
}
|
||||
if (path.endsWith('known/credentials.json')) {
|
||||
return JSON.stringify({});
|
||||
}
|
||||
if (path.endsWith('types/nodes.json')) {
|
||||
return JSON.stringify([
|
||||
{ name: 'n8n-nodes-testing.node1' },
|
||||
{ name: 'n8n-nodes-testing.node2' },
|
||||
]);
|
||||
}
|
||||
if (path.endsWith('types/credentials.json')) {
|
||||
return JSON.stringify([]);
|
||||
}
|
||||
throw new Error('File not found');
|
||||
});
|
||||
|
||||
const loader = new LazyPackageDirectoryLoader(directory, ['n8n-nodes-testing.node1']);
|
||||
await loader.loadAll();
|
||||
|
||||
expect(loader.isLazyLoaded).toBe(true);
|
||||
expect(loader.known.nodes).toEqual({
|
||||
node2: { className: 'Node2', sourcePath: 'dist/Node2/Node2.node.js' },
|
||||
});
|
||||
expect(loader.types.nodes).toHaveLength(1);
|
||||
expect(loader.types.nodes[0].name).toBe('n8n-nodes-testing.node2');
|
||||
expect(classLoader.loadClassInIsolation).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('reset()', () => {
|
||||
it('should reset all properties to their initial state', async () => {
|
||||
mockFs.readFileSync.calledWith(`${directory}/package.json`).mockReturnValue(packageJson);
|
||||
|
||||
const loader = new PackageDirectoryLoader(directory);
|
||||
await loader.loadAll();
|
||||
|
||||
// Verify loader has loaded data
|
||||
expect(loader.nodeTypes).not.toEqual({});
|
||||
expect(loader.credentialTypes).not.toEqual({});
|
||||
expect(loader.types.nodes.length).toBeGreaterThan(0);
|
||||
expect(loader.types.credentials.length).toBeGreaterThan(0);
|
||||
expect(loader.loadedNodes.length).toBeGreaterThan(0);
|
||||
expect(Object.keys(loader.known.nodes).length).toBeGreaterThan(0);
|
||||
expect(Object.keys(loader.known.credentials).length).toBeGreaterThan(0);
|
||||
|
||||
// Reset the loader
|
||||
loader.reset();
|
||||
|
||||
// Verify all properties are reset
|
||||
expect(loader.nodeTypes).toEqual({});
|
||||
expect(loader.credentialTypes).toEqual({});
|
||||
expect(loader.types.nodes).toEqual([]);
|
||||
expect(loader.types.credentials).toEqual([]);
|
||||
expect(loader.loadedNodes).toEqual([]);
|
||||
expect(loader.known.nodes).toEqual({});
|
||||
expect(loader.known.credentials).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVersionedNodeTypeAll', () => {
|
||||
it('should return array with single node for non-versioned node', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const node = createNode('node1');
|
||||
|
||||
const result = loader.getVersionedNodeTypeAll(node);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toBe(node);
|
||||
});
|
||||
|
||||
it('should return all versions of a versioned node', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const nodeV1 = createNode('test');
|
||||
const nodeV2 = createNode('test');
|
||||
nodeV1.description.version = 1;
|
||||
nodeV2.description.version = 2;
|
||||
|
||||
const versionedNode = mock<IVersionedNodeType>({
|
||||
description: { name: 'test', codex: {} },
|
||||
currentVersion: 2,
|
||||
nodeVersions: {
|
||||
1: nodeV1,
|
||||
2: nodeV2,
|
||||
},
|
||||
});
|
||||
|
||||
const result = loader.getVersionedNodeTypeAll(versionedNode);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result).toEqual([nodeV2, nodeV1]);
|
||||
expect(result[0].description.name).toBe('test');
|
||||
expect(result[1].description.name).toBe('test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCredentialsForNode', () => {
|
||||
it('should return empty array if node has no credentials', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const node = createNode('node1');
|
||||
|
||||
const result = loader.getCredentialsForNode(node);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should return credentials for non-versioned node', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const node = createNode('node1', 'testCred');
|
||||
|
||||
const result = loader.getCredentialsForNode(node);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe('testCred');
|
||||
});
|
||||
|
||||
it('should return unique credentials from all versions of a versioned node', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const nodeV1 = createNode('test', 'cred1');
|
||||
const nodeV2 = createNode('test', 'cred2');
|
||||
|
||||
const versionedNode = mock<IVersionedNodeType>({
|
||||
description: { name: 'test' },
|
||||
currentVersion: 2,
|
||||
nodeVersions: {
|
||||
1: nodeV1,
|
||||
2: nodeV2,
|
||||
},
|
||||
});
|
||||
|
||||
const result = loader.getCredentialsForNode(versionedNode);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].name).toBe('cred1');
|
||||
expect(result[1].name).toBe('cred2');
|
||||
});
|
||||
|
||||
it('should remove duplicate credentials from different versions', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const nodeV1 = createNode('test', 'cred1');
|
||||
const nodeV2 = createNode('test', 'cred1'); // Same credential
|
||||
|
||||
const versionedNode = mock<IVersionedNodeType>({
|
||||
description: { name: 'test' },
|
||||
currentVersion: 2,
|
||||
nodeVersions: {
|
||||
1: nodeV1,
|
||||
2: nodeV2,
|
||||
},
|
||||
});
|
||||
|
||||
const result = loader.getCredentialsForNode(versionedNode);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].name).toBe('cred1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadCredentialFromFile', () => {
|
||||
it('should load credential and store it correctly', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Credential1.js';
|
||||
|
||||
loader.loadCredentialFromFile(filePath);
|
||||
|
||||
expect(loader.credentialTypes).toEqual({
|
||||
credential1: {
|
||||
type: mockCredential1,
|
||||
sourcePath: filePath,
|
||||
},
|
||||
});
|
||||
|
||||
expect(loader.known.credentials).toEqual({
|
||||
credential1: {
|
||||
className: mockCredential1.constructor.name,
|
||||
sourcePath: filePath,
|
||||
extends: undefined,
|
||||
supportedNodes: undefined,
|
||||
},
|
||||
});
|
||||
|
||||
expect(loader.types.credentials).toEqual([mockCredential1]);
|
||||
});
|
||||
|
||||
it('should update credential icon paths', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Credential1.js';
|
||||
|
||||
const credWithIcon = createCredential('credentialWithIcon');
|
||||
credWithIcon.icon = {
|
||||
light: 'file:light.svg',
|
||||
dark: 'file:dark.svg',
|
||||
};
|
||||
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(credWithIcon);
|
||||
|
||||
loader.loadCredentialFromFile(filePath);
|
||||
|
||||
expect(credWithIcon.iconUrl).toEqual({
|
||||
light: 'icons/CUSTOM/dist/light.svg',
|
||||
dark: 'icons/CUSTOM/dist/dark.svg',
|
||||
});
|
||||
expect(credWithIcon.icon).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should add toJSON method to credential type', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Credential1.js';
|
||||
|
||||
const credWithAuth = createCredential('credWithAuth');
|
||||
credWithAuth.authenticate = jest.fn();
|
||||
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(credWithAuth);
|
||||
|
||||
loader.loadCredentialFromFile(filePath);
|
||||
|
||||
const serialized = deepCopy(credWithAuth);
|
||||
expect(serialized.authenticate).toEqual({});
|
||||
});
|
||||
|
||||
it('should store credential extends and supported nodes info', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Credential1.js';
|
||||
|
||||
const extendingCred = createCredential('extendingCred');
|
||||
extendingCred.extends = ['baseCredential'];
|
||||
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(extendingCred);
|
||||
|
||||
// Set up nodesByCredential before loading
|
||||
loader.nodesByCredential.extendingCred = ['node1', 'node2'];
|
||||
|
||||
loader.loadCredentialFromFile(filePath);
|
||||
|
||||
expect(loader.known.credentials.extendingCred).toEqual({
|
||||
className: extendingCred.constructor.name,
|
||||
sourcePath: filePath,
|
||||
extends: ['baseCredential'],
|
||||
supportedNodes: ['node1', 'node2'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error if credential class cannot be loaded', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/InvalidCred.js';
|
||||
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockImplementationOnce(() => {
|
||||
throw new TypeError('Class not found');
|
||||
});
|
||||
|
||||
expect(() => loader.loadCredentialFromFile(filePath)).toThrow('Class could not be found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCredential', () => {
|
||||
it('should return existing loaded credential type', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Credential1.js';
|
||||
|
||||
loader.loadCredentialFromFile(filePath);
|
||||
|
||||
const result = loader.getCredential('credential1');
|
||||
expect(result).toEqual({
|
||||
type: mockCredential1,
|
||||
sourcePath: filePath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should load credential from known credentials if not already loaded', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Credential1.js';
|
||||
|
||||
// Setup known credentials without loading
|
||||
loader.known.credentials.credential1 = {
|
||||
className: 'Credential1',
|
||||
sourcePath: filePath,
|
||||
};
|
||||
|
||||
const result = loader.getCredential('credential1');
|
||||
|
||||
expect(result).toEqual({
|
||||
type: mockCredential1,
|
||||
sourcePath: filePath,
|
||||
});
|
||||
expect(classLoader.loadClassInIsolation).toHaveBeenCalledWith(
|
||||
expect.stringContaining(filePath),
|
||||
'Credential1',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UnrecognizedCredentialTypeError if credential type is not found', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
|
||||
expect(() => loader.getCredential('nonexistent')).toThrow(
|
||||
'Unrecognized credential type: nonexistent',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadNodeFromFile', () => {
|
||||
it('should load node and store it correctly', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
loader.loadNodeFromFile(filePath);
|
||||
|
||||
expect(loader.nodeTypes).toEqual({
|
||||
node1: {
|
||||
type: mockNode1,
|
||||
sourcePath: filePath,
|
||||
},
|
||||
});
|
||||
|
||||
expect(loader.known.nodes).toEqual({
|
||||
node1: {
|
||||
className: mockNode1.constructor.name,
|
||||
sourcePath: filePath,
|
||||
},
|
||||
});
|
||||
|
||||
expect(loader.types.nodes).toEqual([mockNode1.description]);
|
||||
expect(loader.loadedNodes).toEqual([{ name: 'node1', version: 1 }]);
|
||||
});
|
||||
|
||||
it('should update node icon paths', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
const nodeWithIcon = createNode('nodeWithIcon');
|
||||
nodeWithIcon.description.icon = {
|
||||
light: 'file:light.svg',
|
||||
dark: 'file:dark.svg',
|
||||
};
|
||||
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(nodeWithIcon);
|
||||
|
||||
loader.loadNodeFromFile(filePath);
|
||||
|
||||
expect(nodeWithIcon.description.iconUrl).toEqual({
|
||||
light: 'icons/CUSTOM/dist/Node1/light.svg',
|
||||
dark: 'icons/CUSTOM/dist/Node1/dark.svg',
|
||||
});
|
||||
expect(nodeWithIcon.description.icon).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should skip node if included in excludeNodes', () => {
|
||||
const loader = new CustomDirectoryLoader(directory, ['CUSTOM.node1']);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
loader.loadNodeFromFile(filePath);
|
||||
|
||||
expect(loader.nodeTypes).toEqual({});
|
||||
expect(loader.known.nodes).toEqual({});
|
||||
expect(loader.types.nodes).toEqual([]);
|
||||
expect(loader.loadedNodes).toEqual([]);
|
||||
});
|
||||
|
||||
it('should skip node if not in includeNodes', () => {
|
||||
const loader = new CustomDirectoryLoader(directory, [], ['CUSTOM.other']);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
loader.loadNodeFromFile(filePath);
|
||||
|
||||
expect(loader.nodeTypes).toEqual({});
|
||||
expect(loader.known.nodes).toEqual({});
|
||||
expect(loader.types.nodes).toEqual([]);
|
||||
expect(loader.loadedNodes).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle versioned nodes correctly', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
const nodeV1 = createNode('test');
|
||||
const nodeV2 = createNode('test');
|
||||
nodeV1.description.version = 1;
|
||||
nodeV2.description.version = 2;
|
||||
|
||||
const versionedNode = mock<IVersionedNodeType>({
|
||||
description: { name: 'test', codex: {}, iconUrl: undefined, icon: undefined },
|
||||
currentVersion: 2,
|
||||
nodeVersions: {
|
||||
1: nodeV1,
|
||||
2: nodeV2,
|
||||
},
|
||||
});
|
||||
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(versionedNode);
|
||||
|
||||
loader.loadNodeFromFile(filePath);
|
||||
|
||||
expect(loader.loadedNodes).toEqual([{ name: 'test', version: 2 }]);
|
||||
|
||||
const nodes = loader.types.nodes as INodeTypeDescription[];
|
||||
expect(nodes).toHaveLength(2);
|
||||
expect(nodes[0]?.version).toBe(2);
|
||||
expect(nodes[1]?.version).toBe(1);
|
||||
});
|
||||
|
||||
it('should store credential associations correctly', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
const nodeWithCreds = createNode('testNode', 'testCred');
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockReturnValueOnce(nodeWithCreds);
|
||||
|
||||
loader.loadNodeFromFile(filePath);
|
||||
|
||||
expect(loader.nodesByCredential).toEqual({
|
||||
testCred: ['testNode'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error if node class cannot be loaded', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/InvalidNode/InvalidNode.node.js';
|
||||
|
||||
jest.spyOn(classLoader, 'loadClassInIsolation').mockImplementationOnce(() => {
|
||||
throw new TypeError('Class not found');
|
||||
});
|
||||
|
||||
expect(() => loader.loadNodeFromFile(filePath)).toThrow('Class could not be found');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getNode', () => {
|
||||
it('should return existing loaded node type', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
loader.loadNodeFromFile(filePath);
|
||||
|
||||
const result = loader.getNode('node1');
|
||||
expect(result).toEqual({
|
||||
type: mockNode1,
|
||||
sourcePath: filePath,
|
||||
});
|
||||
});
|
||||
|
||||
it('should load node from known nodes if not already loaded', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
const filePath = 'dist/Node1/Node1.node.js';
|
||||
|
||||
// Setup known nodes without loading
|
||||
loader.known.nodes.node1 = {
|
||||
className: 'Node1',
|
||||
sourcePath: filePath,
|
||||
};
|
||||
|
||||
const result = loader.getNode('node1');
|
||||
|
||||
expect(result).toEqual({
|
||||
type: mockNode1,
|
||||
sourcePath: filePath,
|
||||
});
|
||||
expect(classLoader.loadClassInIsolation).toHaveBeenCalledWith(
|
||||
expect.stringContaining(filePath),
|
||||
'Node1',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UnrecognizedNodeTypeError if node type is not found', () => {
|
||||
const loader = new CustomDirectoryLoader(directory);
|
||||
|
||||
expect(() => loader.getNode('nonexistent')).toThrow(
|
||||
'Unrecognized node type: CUSTOM.nonexistent',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -17,6 +17,8 @@ import type {
|
|||
import { ApplicationError, NodeHelpers, WorkflowHooks } from 'n8n-workflow';
|
||||
import path from 'path';
|
||||
|
||||
import { UnrecognizedNodeTypeError } from '@/errors';
|
||||
|
||||
import { predefinedNodesTypes } from './constants';
|
||||
|
||||
const BASE_DIR = path.resolve(__dirname, '../../..');
|
||||
|
@ -102,12 +104,9 @@ export function getNodeTypes(testData: WorkflowTestData[] | WorkflowTestData) {
|
|||
);
|
||||
|
||||
for (const nodeName of nodeNames) {
|
||||
if (!nodeName.startsWith('n8n-nodes-base.')) {
|
||||
throw new ApplicationError('Unknown node type', { tags: { nodeType: nodeName } });
|
||||
}
|
||||
const loadInfo = knownNodes[nodeName.replace('n8n-nodes-base.', '')];
|
||||
if (!loadInfo) {
|
||||
throw new ApplicationError('Unknown node type', { tags: { nodeType: nodeName } });
|
||||
throw new UnrecognizedNodeTypeError('n8n-nodes-base', nodeName);
|
||||
}
|
||||
const sourcePath = loadInfo.sourcePath.replace(/^dist\//, './').replace(/\.js$/, '.ts');
|
||||
const nodeSourcePath = path.join(BASE_DIR, 'nodes-base', sourcePath);
|
||||
|
|
|
@ -7,13 +7,12 @@
|
|||
"clean": "rimraf dist .turbo",
|
||||
"dev": "pnpm watch",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm n8n-generate-translations && pnpm build:metadata",
|
||||
"build:metadata": "pnpm n8n-generate-known && pnpm n8n-generate-ui-types",
|
||||
"build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm n8n-generate-translations && pnpm n8n-generate-metadata",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome ci .",
|
||||
"lint": "eslint . --quiet && node ./scripts/validate-load-options-methods.js",
|
||||
"lintfix": "eslint . --fix",
|
||||
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-ui-types\"",
|
||||
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-metadata\"",
|
||||
"test": "jest"
|
||||
},
|
||||
"files": [
|
||||
|
|
|
@ -6,7 +6,7 @@ try {
|
|||
definedMethods = require('../dist/methods/defined.json');
|
||||
} catch (error) {
|
||||
console.error(
|
||||
'Failed to find methods to validate. Please run `npm run n8n-generate-ui-types` first.',
|
||||
'Failed to find methods to validate. Please run `npm run n8n-generate-metadata` first.',
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,12 @@ import { tmpdir } from 'os';
|
|||
import nock from 'nock';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { get } from 'lodash';
|
||||
import { BinaryDataService, Credentials, constructExecutionMetaData } from 'n8n-core';
|
||||
import {
|
||||
BinaryDataService,
|
||||
Credentials,
|
||||
UnrecognizedNodeTypeError,
|
||||
constructExecutionMetaData,
|
||||
} from 'n8n-core';
|
||||
import { Container } from 'typedi';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type {
|
||||
|
@ -251,12 +256,9 @@ export function setup(testData: WorkflowTestData[] | WorkflowTestData) {
|
|||
|
||||
const nodeNames = nodes.map((n) => n.type);
|
||||
for (const nodeName of nodeNames) {
|
||||
if (!nodeName.startsWith('n8n-nodes-base.')) {
|
||||
throw new ApplicationError(`Unknown node type: ${nodeName}`, { level: 'warning' });
|
||||
}
|
||||
const loadInfo = knownNodes[nodeName.replace('n8n-nodes-base.', '')];
|
||||
if (!loadInfo) {
|
||||
throw new ApplicationError(`Unknown node type: ${nodeName}`, { level: 'warning' });
|
||||
throw new UnrecognizedNodeTypeError('n8n-nodes-base', nodeName);
|
||||
}
|
||||
const sourcePath = loadInfo.sourcePath.replace(/^dist\//, './').replace(/\.js$/, '.ts');
|
||||
const nodeSourcePath = path.join(baseDir, sourcePath);
|
||||
|
|
|
@ -2015,7 +2015,6 @@ export interface IWebhookDescription {
|
|||
responseData?: WebhookResponseData | string;
|
||||
restartWebhook?: boolean;
|
||||
isForm?: boolean;
|
||||
hasLifecycleMethods?: boolean; // set automatically by generate-ui-types
|
||||
ndvHideUrl?: string | boolean; // If true the webhook will not be displayed in the editor
|
||||
ndvHideMethod?: string | boolean; // If true the method will not be displayed in the editor
|
||||
}
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
|
||||
/* eslint-disable @typescript-eslint/prefer-nullish-coalescing */
|
||||
|
||||
/* eslint-disable prefer-spread */
|
||||
|
||||
import get from 'lodash/get';
|
||||
import isEqual from 'lodash/isEqual';
|
||||
import uniqBy from 'lodash/uniqBy';
|
||||
|
||||
import { SINGLE_EXECUTION_NODES } from './Constants';
|
||||
import { ApplicationError } from './errors/application.error';
|
||||
|
@ -1972,40 +1968,6 @@ export function getVersionedNodeType(
|
|||
return object;
|
||||
}
|
||||
|
||||
export function getVersionedNodeTypeAll(object: IVersionedNodeType | INodeType): INodeType[] {
|
||||
if ('nodeVersions' in object) {
|
||||
return uniqBy(
|
||||
Object.values(object.nodeVersions)
|
||||
.map((element) => {
|
||||
element.description.name = object.description.name;
|
||||
element.description.codex = object.description.codex;
|
||||
return element;
|
||||
})
|
||||
.reverse(),
|
||||
(node) => {
|
||||
const { version } = node.description;
|
||||
return Array.isArray(version) ? version.join(',') : version.toString();
|
||||
},
|
||||
);
|
||||
}
|
||||
return [object];
|
||||
}
|
||||
|
||||
export function getCredentialsForNode(
|
||||
object: IVersionedNodeType | INodeType,
|
||||
): INodeCredentialDescription[] {
|
||||
if ('nodeVersions' in object) {
|
||||
return uniqBy(
|
||||
Object.values(object.nodeVersions).flatMap(
|
||||
(version) => version.description.credentials ?? [],
|
||||
),
|
||||
'name',
|
||||
);
|
||||
}
|
||||
|
||||
return object.description.credentials ?? [];
|
||||
}
|
||||
|
||||
export function isSingleExecution(type: string, parameters: INodeParameters): boolean {
|
||||
const singleExecutionCase = SINGLE_EXECUTION_NODES[type];
|
||||
|
||||
|
|
Loading…
Reference in a new issue