mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
Merge branch 'master' into ai-508-backend-cancel-test-run
# Conflicts: # packages/cli/src/evaluation.ee/test-runs.controller.ee.ts # packages/cli/test/integration/evaluation/test-runs.api.test.ts
This commit is contained in:
commit
f426c41cba
6
.github/workflows/chromatic.yml
vendored
6
.github/workflows/chromatic.yml
vendored
|
@ -1,6 +1,8 @@
|
|||
name: Chromatic
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
@ -70,7 +72,7 @@ jobs:
|
|||
exitZeroOnChanges: false
|
||||
|
||||
- name: Success comment
|
||||
if: steps.chromatic_tests.outcome == 'success'
|
||||
if: steps.chromatic_tests.outcome == 'success' && github.ref != 'refs/heads/master'
|
||||
uses: peter-evans/create-or-update-comment@v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
|
@ -80,7 +82,7 @@ jobs:
|
|||
:white_check_mark: No visual regressions found.
|
||||
|
||||
- name: Fail comment
|
||||
if: steps.chromatic_tests.outcome != 'success'
|
||||
if: steps.chromatic_tests.outcome != 'success' && github.ref != 'refs/heads/master'
|
||||
uses: peter-evans/create-or-update-comment@v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
|
|
1
.github/workflows/ci-master.yml
vendored
1
.github/workflows/ci-master.yml
vendored
|
@ -47,6 +47,7 @@ jobs:
|
|||
nodeVersion: ${{ matrix.node-version }}
|
||||
cacheKey: ${{ github.sha }}-base:build
|
||||
collectCoverage: ${{ matrix.node-version == '20.x' }}
|
||||
ignoreTurboCache: ${{ matrix.node-version == '20.x' }}
|
||||
secrets:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
|
|
2
.github/workflows/ci-postgres-mysql.yml
vendored
2
.github/workflows/ci-postgres-mysql.yml
vendored
|
@ -106,7 +106,7 @@ jobs:
|
|||
|
||||
- name: Test MariaDB
|
||||
working-directory: packages/cli
|
||||
run: pnpm test:mariadb --testTimeout 20000
|
||||
run: pnpm test:mariadb --testTimeout 30000
|
||||
|
||||
postgres:
|
||||
name: Postgres
|
||||
|
|
8
.github/workflows/units-tests-reusable.yml
vendored
8
.github/workflows/units-tests-reusable.yml
vendored
|
@ -22,6 +22,10 @@ on:
|
|||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
ignoreTurboCache:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
secrets:
|
||||
CODECOV_TOKEN:
|
||||
description: 'Codecov upload token.'
|
||||
|
@ -32,6 +36,7 @@ jobs:
|
|||
name: Unit tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TURBO_FORCE: ${{ inputs.ignoreTurboCache }}
|
||||
COVERAGE_ENABLED: ${{ inputs.collectCoverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.1
|
||||
|
@ -49,7 +54,6 @@ jobs:
|
|||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Setup build cache
|
||||
if: inputs.collectCoverage != true
|
||||
uses: rharkor/caching-for-turbo@v1.5
|
||||
|
||||
- name: Build
|
||||
|
@ -74,6 +78,6 @@ jobs:
|
|||
|
||||
- name: Upload coverage to Codecov
|
||||
if: inputs.collectCoverage
|
||||
uses: codecov/codecov-action@v4.5.0
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
|
|
@ -3,9 +3,11 @@
|
|||
Portions of this software are licensed as follows:
|
||||
|
||||
- Content of branches other than the main branch (i.e. "master") are not licensed.
|
||||
- Source code files that contain ".ee." in their filename are NOT licensed under the Sustainable Use License.
|
||||
To use source code files that contain ".ee." in their filename you must hold a valid n8n Enterprise License
|
||||
specifically allowing you access to such source code files and as defined in "LICENSE_EE.md".
|
||||
- Source code files that contain ".ee." in their filename or ".ee" in their dirname are NOT licensed under
|
||||
the Sustainable Use License.
|
||||
To use source code files that contain ".ee." in their filename or ".ee" in their dirname you must hold a
|
||||
valid n8n Enterprise License specifically allowing you access to such source code files and as defined
|
||||
in "LICENSE_EE.md".
|
||||
- All third party components incorporated into the n8n Software are licensed under the original license
|
||||
provided by the owner of the applicable component.
|
||||
- Content outside of the above mentioned files or restrictions is available under the "Sustainable Use
|
||||
|
|
|
@ -18,11 +18,16 @@ n8n is a workflow automation platform that gives technical teams the flexibility
|
|||
|
||||
Try n8n instantly with [npx](https://docs.n8n.io/hosting/installation/npm/) (requires [Node.js](https://nodejs.org/en/)):
|
||||
|
||||
`npx n8n`
|
||||
```
|
||||
npx n8n
|
||||
```
|
||||
|
||||
Or deploy with [Docker](https://docs.n8n.io/hosting/installation/docker/):
|
||||
|
||||
`docker run -it --rm --name n8n -p 5678:5678 docker.n8n.io/n8n-io/n8n`
|
||||
```
|
||||
docker volume create n8n_data
|
||||
docker run -it --rm --name n8n -p 5678:5678 -v n8n_data:/home/node/.n8n docker.n8n.io/n8nio/n8n
|
||||
```
|
||||
|
||||
Access the editor at http://localhost:5678
|
||||
|
||||
|
|
60
codecov.yml
Normal file
60
codecov.yml
Normal file
|
@ -0,0 +1,60 @@
|
|||
codecov:
|
||||
max_report_age: off
|
||||
require_ci_to_pass: true
|
||||
|
||||
coverage:
|
||||
status:
|
||||
patch: false
|
||||
project:
|
||||
default:
|
||||
threshold: 0.5%
|
||||
|
||||
github_checks:
|
||||
annotations: false
|
||||
|
||||
flags:
|
||||
tests:
|
||||
paths:
|
||||
- "**"
|
||||
carryforward: true
|
||||
|
||||
component_management:
|
||||
default_rules:
|
||||
statuses:
|
||||
- type: project
|
||||
target: auto
|
||||
branches:
|
||||
- "!master"
|
||||
individual_components:
|
||||
- component_id: backend_packages
|
||||
name: Backend
|
||||
paths:
|
||||
- packages/@n8n/api-types/**
|
||||
- packages/@n8n/config/**
|
||||
- packages/@n8n/client-oauth2/**
|
||||
- packages/@n8n/di/**
|
||||
- packages/@n8n/imap/**
|
||||
- packages/@n8n/permissions/**
|
||||
- packages/@n8n/task-runner/**
|
||||
- packages/workflow/**
|
||||
- packages/core/**
|
||||
- packages/cli/**
|
||||
- component_id: frontend_packages
|
||||
name: Frontend
|
||||
paths:
|
||||
- packages/@n8n/chat/**
|
||||
- packages/@n8n/codemirror-lang/**
|
||||
- packages/design-system/**
|
||||
- packages/editor-ui/**
|
||||
- component_id: nodes_packages
|
||||
name: Nodes
|
||||
paths:
|
||||
- packages/node-dev/**
|
||||
- packages/nodes-base/**
|
||||
- packages/@n8n/json-schema-to-zod/**
|
||||
- packages/@n8n/nodes-langchain/**
|
||||
|
||||
ignore:
|
||||
- (?s:.*/[^\/]*\.spec\.ts.*)\Z
|
||||
- (?s:.*/[^\/]*\.test\.ts.*)\Z
|
||||
- (?s:.*/[^\/]*e2e[^\/]*\.ts.*)\Z
|
|
@ -2,7 +2,7 @@
|
|||
* Getters
|
||||
*/
|
||||
|
||||
import { getVisibleSelect } from '../utils';
|
||||
import { getVisibleSelect } from '../utils/popper';
|
||||
|
||||
export function getCredentialSelect(eq = 0) {
|
||||
return cy.getByTestId('node-credentials-select').eq(eq);
|
||||
|
|
|
@ -29,7 +29,11 @@ export const getAddProjectButton = () => {
|
|||
|
||||
return cy.get('@button');
|
||||
};
|
||||
|
||||
export const getAddFirstProjectButton = () => cy.getByTestId('add-first-project-button');
|
||||
export const getIconPickerButton = () => cy.getByTestId('icon-picker-button');
|
||||
export const getIconPickerTab = (tab: string) => cy.getByTestId('icon-picker-tabs').contains(tab);
|
||||
export const getIconPickerIcons = () => cy.getByTestId('icon-picker-icon');
|
||||
export const getIconPickerEmojis = () => cy.getByTestId('icon-picker-emoji');
|
||||
// export const getAddProjectButton = () =>
|
||||
// cy.getByTestId('universal-add').should('contain', 'Add project').should('be.visible');
|
||||
export const getProjectTabs = () => cy.getByTestId('project-tabs').find('a');
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import { getManualChatModal } from './modals/chat-modal';
|
||||
import { clickGetBackToCanvas, getParameterInputByName } from './ndv';
|
||||
import { ROUTES } from '../constants';
|
||||
|
||||
/**
|
||||
|
@ -127,7 +128,7 @@ export function navigateToNewWorkflowPage(preventNodeViewUnload = true) {
|
|||
});
|
||||
}
|
||||
|
||||
export function addSupplementalNodeToParent(
|
||||
function connectNodeToParent(
|
||||
nodeName: string,
|
||||
endpointType: EndpointType,
|
||||
parentNodeName: string,
|
||||
|
@ -141,6 +142,15 @@ export function addSupplementalNodeToParent(
|
|||
} else {
|
||||
getNodeCreatorItems().contains(nodeName).click();
|
||||
}
|
||||
}
|
||||
|
||||
export function addSupplementalNodeToParent(
|
||||
nodeName: string,
|
||||
endpointType: EndpointType,
|
||||
parentNodeName: string,
|
||||
exactMatch = false,
|
||||
) {
|
||||
connectNodeToParent(nodeName, endpointType, parentNodeName, exactMatch);
|
||||
getConnectionBySourceAndTarget(parentNodeName, nodeName).should('exist');
|
||||
}
|
||||
|
||||
|
@ -160,6 +170,15 @@ export function addToolNodeToParent(nodeName: string, parentNodeName: string) {
|
|||
addSupplementalNodeToParent(nodeName, 'ai_tool', parentNodeName);
|
||||
}
|
||||
|
||||
export function addVectorStoreToolToParent(nodeName: string, parentNodeName: string) {
|
||||
connectNodeToParent(nodeName, 'ai_tool', parentNodeName, false);
|
||||
getParameterInputByName('mode')
|
||||
.find('input')
|
||||
.should('have.value', 'Retrieve Documents (As Tool for AI Agent)');
|
||||
clickGetBackToCanvas();
|
||||
getConnectionBySourceAndTarget(nodeName, parentNodeName).should('exist');
|
||||
}
|
||||
|
||||
export function addOutputParserNodeToParent(nodeName: string, parentNodeName: string) {
|
||||
addSupplementalNodeToParent(nodeName, 'ai_outputParser', parentNodeName);
|
||||
}
|
||||
|
|
|
@ -41,7 +41,9 @@ describe('Data mapping', () => {
|
|||
ndv.actions.mapDataFromHeader(1, 'value');
|
||||
ndv.getters.inlineExpressionEditorInput().should('have.text', '{{ $json.timestamp }}');
|
||||
ndv.getters.inlineExpressionEditorInput().type('{esc}');
|
||||
ndv.getters.parameterExpressionPreview('value').should('include.text', '2024');
|
||||
ndv.getters
|
||||
.parameterExpressionPreview('value')
|
||||
.should('include.text', new Date().getFullYear());
|
||||
|
||||
ndv.actions.mapDataFromHeader(2, 'value');
|
||||
ndv.getters
|
||||
|
@ -113,6 +115,8 @@ describe('Data mapping', () => {
|
|||
});
|
||||
|
||||
it('maps expressions from json view', () => {
|
||||
// ADO-3063 - followup to make this viewport global
|
||||
cy.viewport('macbook-16');
|
||||
cy.fixture('Test_workflow_3.json').then((data) => {
|
||||
cy.get('body').paste(JSON.stringify(data));
|
||||
});
|
||||
|
@ -121,17 +125,17 @@ describe('Data mapping', () => {
|
|||
workflowPage.actions.openNode('Set');
|
||||
ndv.actions.switchInputMode('JSON');
|
||||
|
||||
ndv.getters.inputDataContainer().should('exist');
|
||||
|
||||
ndv.getters
|
||||
.inputDataContainer()
|
||||
.should('exist')
|
||||
.find('.json-data')
|
||||
.should(
|
||||
'have.text',
|
||||
'[{"input": [{"count": 0,"with space": "!!","with.dot": "!!","with"quotes": "!!"}]},{"input": [{"count": 1}]}]',
|
||||
)
|
||||
.find('span')
|
||||
.contains('"count"')
|
||||
.realMouseDown();
|
||||
);
|
||||
|
||||
ndv.getters.inputDataContainer().find('span').contains('"count"').realMouseDown();
|
||||
|
||||
ndv.actions.mapToParameter('value');
|
||||
ndv.getters.inlineExpressionEditorInput().should('have.text', '{{ $json.input[0].count }}');
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
NDV,
|
||||
MainSidebar,
|
||||
} from '../pages';
|
||||
import { clearNotifications } from '../pages/notifications';
|
||||
import { clearNotifications, successToast } from '../pages/notifications';
|
||||
import { getVisibleDropdown, getVisibleModalOverlay, getVisibleSelect } from '../utils';
|
||||
|
||||
const workflowsPage = new WorkflowsPage();
|
||||
|
@ -830,4 +830,23 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
.should('not.have.length');
|
||||
});
|
||||
});
|
||||
|
||||
it('should set and update project icon', () => {
|
||||
const DEFAULT_ICON = 'fa-layer-group';
|
||||
const NEW_PROJECT_NAME = 'Test Project';
|
||||
|
||||
cy.signinAsAdmin();
|
||||
cy.visit(workflowsPage.url);
|
||||
projects.createProject(NEW_PROJECT_NAME);
|
||||
// New project should have default icon
|
||||
projects.getIconPickerButton().find('svg').should('have.class', DEFAULT_ICON);
|
||||
// Choose another icon
|
||||
projects.getIconPickerButton().click();
|
||||
projects.getIconPickerTab('Emojis').click();
|
||||
projects.getIconPickerEmojis().first().click();
|
||||
// Project should be updated with new icon
|
||||
successToast().contains('Project icon updated successfully');
|
||||
projects.getIconPickerButton().should('contain', '😀');
|
||||
projects.getMenuItems().contains(NEW_PROJECT_NAME).should('contain', '😀');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import { clickGetBackToCanvas } from '../composables/ndv';
|
||||
import {
|
||||
addNodeToCanvas,
|
||||
addRetrieverNodeToParent,
|
||||
addVectorStoreNodeToParent,
|
||||
addVectorStoreToolToParent,
|
||||
getNodeCreatorItems,
|
||||
} from '../composables/workflow';
|
||||
import { IF_NODE_NAME } from '../constants';
|
||||
import { AGENT_NODE_NAME, IF_NODE_NAME, MANUAL_CHAT_TRIGGER_NODE_NAME } from '../constants';
|
||||
import { NodeCreator } from '../pages/features/node-creator';
|
||||
import { NDV } from '../pages/ndv';
|
||||
import { WorkflowPage as WorkflowPageClass } from '../pages/workflow';
|
||||
|
@ -536,7 +538,7 @@ describe('Node Creator', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should add node directly for sub-connection', () => {
|
||||
it('should add node directly for sub-connection as vector store', () => {
|
||||
addNodeToCanvas('Question and Answer Chain', true);
|
||||
addRetrieverNodeToParent('Vector Store Retriever', 'Question and Answer Chain');
|
||||
cy.realPress('Escape');
|
||||
|
@ -544,4 +546,12 @@ describe('Node Creator', () => {
|
|||
cy.realPress('Escape');
|
||||
WorkflowPage.getters.canvasNodes().should('have.length', 4);
|
||||
});
|
||||
|
||||
it('should add node directly for sub-connection as tool', () => {
|
||||
addNodeToCanvas(MANUAL_CHAT_TRIGGER_NODE_NAME, true);
|
||||
addNodeToCanvas(AGENT_NODE_NAME, true, true);
|
||||
clickGetBackToCanvas();
|
||||
|
||||
addVectorStoreToolToParent('In-Memory Vector Store', AGENT_NODE_NAME);
|
||||
});
|
||||
});
|
||||
|
|
288
cypress/e2e/48-subworkflow-inputs.cy.ts
Normal file
288
cypress/e2e/48-subworkflow-inputs.cy.ts
Normal file
|
@ -0,0 +1,288 @@
|
|||
import { clickGetBackToCanvas, getOutputTableHeaders } from '../composables/ndv';
|
||||
import {
|
||||
clickZoomToFit,
|
||||
navigateToNewWorkflowPage,
|
||||
openNode,
|
||||
pasteWorkflow,
|
||||
saveWorkflowOnButtonClick,
|
||||
} from '../composables/workflow';
|
||||
import SUB_WORKFLOW_INPUTS from '../fixtures/Test_Subworkflow-Inputs.json';
|
||||
import { NDV, WorkflowsPage, WorkflowPage } from '../pages';
|
||||
import { errorToast, successToast } from '../pages/notifications';
|
||||
import { getVisiblePopper } from '../utils';
|
||||
|
||||
const ndv = new NDV();
|
||||
const workflowsPage = new WorkflowsPage();
|
||||
const workflow = new WorkflowPage();
|
||||
|
||||
const DEFAULT_WORKFLOW_NAME = 'My workflow';
|
||||
const DEFAULT_SUBWORKFLOW_NAME_1 = 'My Sub-Workflow 1';
|
||||
const DEFAULT_SUBWORKFLOW_NAME_2 = 'My Sub-Workflow 2';
|
||||
|
||||
type FieldRow = readonly string[];
|
||||
|
||||
const exampleFields = [
|
||||
['aNumber', 'Number'],
|
||||
['aString', 'String'],
|
||||
['aArray', 'Array'],
|
||||
['aObject', 'Object'],
|
||||
['aAny', 'Allow Any Type'],
|
||||
// bool last since it's not an inputField so we'll skip it for some cases
|
||||
['aBool', 'Boolean'],
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Populate multiValue fixedCollections. Only supports fixedCollections for which all fields can be defined via keyboard typing
|
||||
*
|
||||
* @param items - 2D array of items to populate, i.e. [["myField1", "String"], [""]
|
||||
* @param collectionName - name of the fixedCollection to populate
|
||||
* @param offset - amount of 'parameter-input's before the fixedCollection under test
|
||||
* @returns
|
||||
*/
|
||||
function populateFixedCollection(
|
||||
items: readonly FieldRow[],
|
||||
collectionName: string,
|
||||
offset: number,
|
||||
) {
|
||||
if (items.length === 0) return;
|
||||
const n = items[0].length;
|
||||
for (const [i, params] of items.entries()) {
|
||||
ndv.actions.addItemToFixedCollection(collectionName);
|
||||
for (const [j, param] of params.entries()) {
|
||||
ndv.getters
|
||||
.fixedCollectionParameter(collectionName)
|
||||
.getByTestId('parameter-input')
|
||||
.eq(offset + i * n + j)
|
||||
.type(`${param}{downArrow}{enter}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makeExample(type: TypeField) {
|
||||
switch (type) {
|
||||
case 'String':
|
||||
return '"example"';
|
||||
case 'Number':
|
||||
return '42';
|
||||
case 'Boolean':
|
||||
return 'true';
|
||||
case 'Array':
|
||||
return '["example", 123, null]';
|
||||
case 'Object':
|
||||
return '{{}"example": [123]}';
|
||||
case 'Allow Any Type':
|
||||
return 'null';
|
||||
}
|
||||
}
|
||||
|
||||
type TypeField = 'Allow Any Type' | 'String' | 'Number' | 'Boolean' | 'Array' | 'Object';
|
||||
function populateFields(items: ReadonlyArray<readonly [string, TypeField]>) {
|
||||
populateFixedCollection(items, 'workflowInputs', 1);
|
||||
}
|
||||
|
||||
function navigateWorkflowSelectionDropdown(index: number, expectedText: string) {
|
||||
ndv.getters.resourceLocator('workflowId').should('be.visible');
|
||||
ndv.getters.resourceLocatorInput('workflowId').click();
|
||||
|
||||
getVisiblePopper().findChildByTestId('rlc-item').eq(0).should('exist');
|
||||
getVisiblePopper()
|
||||
.findChildByTestId('rlc-item')
|
||||
.eq(index)
|
||||
.find('span')
|
||||
.should('have.text', expectedText)
|
||||
.click();
|
||||
}
|
||||
|
||||
function populateMapperFields(values: readonly string[], offset: number) {
|
||||
for (const [i, value] of values.entries()) {
|
||||
cy.getByTestId('parameter-input')
|
||||
.eq(offset + i)
|
||||
.type(value);
|
||||
|
||||
// Click on a parent to dismiss the pop up hiding the field below.
|
||||
cy.getByTestId('parameter-input')
|
||||
.eq(offset + i)
|
||||
.parent()
|
||||
.parent()
|
||||
.click('topLeft');
|
||||
}
|
||||
}
|
||||
|
||||
// This function starts off in the Child Workflow Input Trigger, assuming we just defined the input fields
|
||||
// It then navigates back to the parent and validates output
|
||||
function validateAndReturnToParent(targetChild: string, offset: number, fields: string[]) {
|
||||
ndv.actions.execute();
|
||||
|
||||
// + 1 to account for formatting-only column
|
||||
getOutputTableHeaders().should('have.length', fields.length + 1);
|
||||
for (const [i, name] of fields.entries()) {
|
||||
getOutputTableHeaders().eq(i).should('have.text', name);
|
||||
}
|
||||
|
||||
clickGetBackToCanvas();
|
||||
saveWorkflowOnButtonClick();
|
||||
|
||||
cy.visit(workflowsPage.url);
|
||||
|
||||
workflowsPage.getters.workflowCardContent(DEFAULT_WORKFLOW_NAME).click();
|
||||
|
||||
openNode('Execute Workflow');
|
||||
|
||||
// Note that outside of e2e tests this will be pre-selected correctly.
|
||||
// Due to our workaround to remain in the same tab we need to select the correct tab manually
|
||||
navigateWorkflowSelectionDropdown(offset, targetChild);
|
||||
|
||||
// This fails, pointing to `usePushConnection` `const triggerNode = subWorkflow?.nodes.find` being `undefined.find()`I <think>
|
||||
ndv.actions.execute();
|
||||
|
||||
getOutputTableHeaders().should('have.length', fields.length + 1);
|
||||
for (const [i, name] of fields.entries()) {
|
||||
getOutputTableHeaders().eq(i).should('have.text', name);
|
||||
}
|
||||
|
||||
// todo: verify the fields appear and show the correct types
|
||||
|
||||
// todo: fill in the input fields (and mock previous node data in the json fixture to match)
|
||||
|
||||
// todo: validate the actual output data
|
||||
}
|
||||
|
||||
function setWorkflowInputFieldValue(index: number, value: string) {
|
||||
ndv.actions.addItemToFixedCollection('workflowInputs');
|
||||
ndv.actions.typeIntoFixedCollectionItem('workflowInputs', index, value);
|
||||
}
|
||||
|
||||
describe('Sub-workflow creation and typed usage', () => {
|
||||
beforeEach(() => {
|
||||
navigateToNewWorkflowPage();
|
||||
pasteWorkflow(SUB_WORKFLOW_INPUTS);
|
||||
saveWorkflowOnButtonClick();
|
||||
clickZoomToFit();
|
||||
|
||||
openNode('Execute Workflow');
|
||||
|
||||
// Prevent sub-workflow from opening in new window
|
||||
cy.window().then((win) => {
|
||||
cy.stub(win, 'open').callsFake((url) => {
|
||||
cy.visit(url);
|
||||
});
|
||||
});
|
||||
navigateWorkflowSelectionDropdown(0, 'Create a new sub-workflow');
|
||||
// **************************
|
||||
// NAVIGATE TO CHILD WORKFLOW
|
||||
// **************************
|
||||
|
||||
openNode('Workflow Input Trigger');
|
||||
});
|
||||
|
||||
it('works with type-checked values', () => {
|
||||
populateFields(exampleFields);
|
||||
|
||||
validateAndReturnToParent(
|
||||
DEFAULT_SUBWORKFLOW_NAME_1,
|
||||
1,
|
||||
exampleFields.map((f) => f[0]),
|
||||
);
|
||||
|
||||
const values = [
|
||||
'-1', // number fields don't support `=` switch to expression, so let's test the Fixed case with it
|
||||
...exampleFields.slice(1).map((x) => `={{}{{} $json.a${x[0]}`), // }} are added automatically
|
||||
];
|
||||
|
||||
// this matches with the pinned data provided in the fixture
|
||||
populateMapperFields(values, 2);
|
||||
|
||||
ndv.actions.execute();
|
||||
|
||||
// todo:
|
||||
// - validate output lines up
|
||||
// - change input to need casts
|
||||
// - run
|
||||
// - confirm error
|
||||
// - switch `attemptToConvertTypes` flag
|
||||
// - confirm success and changed output
|
||||
// - change input to be invalid despite cast
|
||||
// - run
|
||||
// - confirm error
|
||||
// - switch type option flags
|
||||
// - run
|
||||
// - confirm success
|
||||
// - turn off attempt to cast flag
|
||||
// - confirm a value was not cast
|
||||
});
|
||||
|
||||
it('works with Fields input source into JSON input source', () => {
|
||||
ndv.getters.nodeOutputHint().should('exist');
|
||||
|
||||
populateFields(exampleFields);
|
||||
|
||||
validateAndReturnToParent(
|
||||
DEFAULT_SUBWORKFLOW_NAME_1,
|
||||
1,
|
||||
exampleFields.map((f) => f[0]),
|
||||
);
|
||||
|
||||
cy.window().then((win) => {
|
||||
cy.stub(win, 'open').callsFake((url) => {
|
||||
cy.visit(url);
|
||||
});
|
||||
});
|
||||
navigateWorkflowSelectionDropdown(0, 'Create a new sub-workflow');
|
||||
|
||||
openNode('Workflow Input Trigger');
|
||||
|
||||
cy.getByTestId('parameter-input').eq(0).click();
|
||||
|
||||
// Todo: Check if there's a better way to interact with option dropdowns
|
||||
// This PR would add this child testId
|
||||
getVisiblePopper()
|
||||
.getByTestId('parameter-input')
|
||||
.eq(0)
|
||||
.type('Using JSON Example{downArrow}{enter}');
|
||||
|
||||
const exampleJson =
|
||||
'{{}' + exampleFields.map((x) => `"${x[0]}": ${makeExample(x[1])}`).join(',') + '}';
|
||||
cy.getByTestId('parameter-input-jsonExample')
|
||||
.find('.cm-line')
|
||||
.eq(0)
|
||||
.type(`{selectAll}{backspace}${exampleJson}{enter}`);
|
||||
|
||||
// first one doesn't work for some reason, might need to wait for something?
|
||||
ndv.actions.execute();
|
||||
|
||||
validateAndReturnToParent(
|
||||
DEFAULT_SUBWORKFLOW_NAME_2,
|
||||
2,
|
||||
exampleFields.map((f) => f[0]),
|
||||
);
|
||||
|
||||
// test for either InputSource mode and options combinations:
|
||||
// + we're showing the notice in the output panel
|
||||
// + we start with no fields
|
||||
// + Test Step works and we create the fields
|
||||
// + create field of each type (string, number, boolean, object, array, any)
|
||||
// + exit ndv
|
||||
// + save
|
||||
// + go back to parent workflow
|
||||
// - verify fields appear [needs Ivan's PR]
|
||||
// - link fields [needs Ivan's PR]
|
||||
// + run parent
|
||||
// - verify output with `null` defaults exists
|
||||
//
|
||||
});
|
||||
|
||||
it('should show node issue when no fields are defined in manual mode', () => {
|
||||
ndv.getters.nodeExecuteButton().should('be.disabled');
|
||||
ndv.actions.close();
|
||||
// Executing the workflow should show an error toast
|
||||
workflow.actions.executeWorkflow();
|
||||
errorToast().should('contain', 'The workflow has issues');
|
||||
openNode('Workflow Input Trigger');
|
||||
// Add a field to the workflowInputs fixedCollection
|
||||
setWorkflowInputFieldValue(0, 'test');
|
||||
// Executing the workflow should not show error now
|
||||
ndv.actions.close();
|
||||
workflow.actions.executeWorkflow();
|
||||
successToast().should('contain', 'Workflow executed successfully');
|
||||
});
|
||||
});
|
70
cypress/fixtures/Test_Subworkflow-Inputs.json
Normal file
70
cypress/fixtures/Test_Subworkflow-Inputs.json
Normal file
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
"meta": {
|
||||
"instanceId": "4d0676b62208d810ef035130bbfc9fd3afdc78d963ea8ccb9514dc89066efc94"
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"parameters": {},
|
||||
"id": "bb7f8bb3-840a-464c-a7de-d3a80538c2be",
|
||||
"name": "When clicking ‘Test workflow’",
|
||||
"type": "n8n-nodes-base.manualTrigger",
|
||||
"typeVersion": 1,
|
||||
"position": [0, 0]
|
||||
},
|
||||
{
|
||||
"parameters": {
|
||||
"workflowId": {},
|
||||
"workflowInputs": {
|
||||
"mappingMode": "defineBelow",
|
||||
"value": {},
|
||||
"matchingColumns": [],
|
||||
"schema": [],
|
||||
"ignoreTypeMismatchErrors": false,
|
||||
"attemptToConvertTypes": false,
|
||||
"convertFieldsToString": true
|
||||
},
|
||||
"options": {}
|
||||
},
|
||||
"type": "n8n-nodes-base.executeWorkflow",
|
||||
"typeVersion": 1.2,
|
||||
"position": [500, 240],
|
||||
"id": "6b6e2e34-c6ab-4083-b8e3-6b0d56be5453",
|
||||
"name": "Execute Workflow"
|
||||
}
|
||||
],
|
||||
"connections": {
|
||||
"When clicking ‘Test workflow’": {
|
||||
"main": [
|
||||
[
|
||||
{
|
||||
"node": "Execute Workflow",
|
||||
"type": "main",
|
||||
"index": 0
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"pinData": {
|
||||
"When clicking ‘Test workflow’": [
|
||||
{
|
||||
"aaString": "A String",
|
||||
"aaNumber": 1,
|
||||
"aaArray": [1, true, "3"],
|
||||
"aaObject": {
|
||||
"aKey": -1
|
||||
},
|
||||
"aaAny": {}
|
||||
},
|
||||
{
|
||||
"aaString": "Another String",
|
||||
"aaNumber": 2,
|
||||
"aaArray": [],
|
||||
"aaObject": {
|
||||
"aDifferentKey": -1
|
||||
},
|
||||
"aaAny": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -320,6 +320,11 @@ export class NDV extends BasePage {
|
|||
addItemToFixedCollection: (paramName: string) => {
|
||||
this.getters.fixedCollectionParameter(paramName).getByTestId('fixed-collection-add').click();
|
||||
},
|
||||
typeIntoFixedCollectionItem: (fixedCollectionName: string, index: number, content: string) => {
|
||||
this.getters.fixedCollectionParameter(fixedCollectionName).within(() => {
|
||||
cy.getByTestId('parameter-input').eq(index).type(content);
|
||||
});
|
||||
},
|
||||
dragMainPanelToLeft: () => {
|
||||
cy.drag('[data-test-id=panel-drag-button]', [-1000, 0], { moveTwice: true });
|
||||
},
|
||||
|
|
|
@ -84,7 +84,6 @@
|
|||
"ws": ">=8.17.1"
|
||||
},
|
||||
"patchedDependencies": {
|
||||
"typedi@0.10.0": "patches/typedi@0.10.0.patch",
|
||||
"pkce-challenge@3.0.0": "patches/pkce-challenge@3.0.0.patch",
|
||||
"pyodide@0.23.4": "patches/pyodide@0.23.4.patch",
|
||||
"@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch",
|
||||
|
|
|
@ -27,6 +27,6 @@
|
|||
"dependencies": {
|
||||
"xss": "catalog:",
|
||||
"zod": "catalog:",
|
||||
"zod-class": "0.0.15"
|
||||
"zod-class": "0.0.16"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
import { AiApplySuggestionRequestDto } from '../ai-apply-suggestion-request.dto';
|
||||
|
||||
describe('AiApplySuggestionRequestDto', () => {
|
||||
it('should validate a valid suggestion application request', () => {
|
||||
const validRequest = {
|
||||
sessionId: 'session-123',
|
||||
suggestionId: 'suggestion-456',
|
||||
};
|
||||
|
||||
const result = AiApplySuggestionRequestDto.safeParse(validRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if sessionId is missing', () => {
|
||||
const invalidRequest = {
|
||||
suggestionId: 'suggestion-456',
|
||||
};
|
||||
|
||||
const result = AiApplySuggestionRequestDto.safeParse(invalidRequest);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.issues[0].path).toEqual(['sessionId']);
|
||||
});
|
||||
|
||||
it('should fail if suggestionId is missing', () => {
|
||||
const invalidRequest = {
|
||||
sessionId: 'session-123',
|
||||
};
|
||||
|
||||
const result = AiApplySuggestionRequestDto.safeParse(invalidRequest);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.issues[0].path).toEqual(['suggestionId']);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,252 @@
|
|||
import { AiAskRequestDto } from '../ai-ask-request.dto';
|
||||
|
||||
describe('AiAskRequestDto', () => {
|
||||
const validRequest = {
|
||||
question: 'How can I improve this workflow?',
|
||||
context: {
|
||||
schema: [
|
||||
{
|
||||
nodeName: 'TestNode',
|
||||
schema: {
|
||||
type: 'string',
|
||||
key: 'testKey',
|
||||
value: 'testValue',
|
||||
path: '/test/path',
|
||||
},
|
||||
},
|
||||
],
|
||||
inputSchema: {
|
||||
nodeName: 'InputNode',
|
||||
schema: {
|
||||
type: 'object',
|
||||
key: 'inputKey',
|
||||
value: [
|
||||
{
|
||||
type: 'string',
|
||||
key: 'nestedKey',
|
||||
value: 'nestedValue',
|
||||
path: '/nested/path',
|
||||
},
|
||||
],
|
||||
path: '/input/path',
|
||||
},
|
||||
},
|
||||
pushRef: 'push-123',
|
||||
ndvPushRef: 'ndv-push-456',
|
||||
},
|
||||
forNode: 'TestWorkflowNode',
|
||||
};
|
||||
|
||||
it('should validate a valid AI ask request', () => {
|
||||
const result = AiAskRequestDto.safeParse(validRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if question is missing', () => {
|
||||
const invalidRequest = {
|
||||
...validRequest,
|
||||
question: undefined,
|
||||
};
|
||||
|
||||
const result = AiAskRequestDto.safeParse(invalidRequest);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.issues[0].path).toEqual(['question']);
|
||||
});
|
||||
|
||||
it('should fail if context is invalid', () => {
|
||||
const invalidRequest = {
|
||||
...validRequest,
|
||||
context: {
|
||||
...validRequest.context,
|
||||
schema: [
|
||||
{
|
||||
nodeName: 'TestNode',
|
||||
schema: {
|
||||
type: 'invalid-type', // Invalid type
|
||||
value: 'testValue',
|
||||
path: '/test/path',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const result = AiAskRequestDto.safeParse(invalidRequest);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail if forNode is missing', () => {
|
||||
const invalidRequest = {
|
||||
...validRequest,
|
||||
forNode: undefined,
|
||||
};
|
||||
|
||||
const result = AiAskRequestDto.safeParse(invalidRequest);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.issues[0].path).toEqual(['forNode']);
|
||||
});
|
||||
|
||||
it('should validate all possible schema types', () => {
|
||||
const allTypesRequest = {
|
||||
question: 'Test all possible types',
|
||||
context: {
|
||||
schema: [
|
||||
{
|
||||
nodeName: 'AllTypesNode',
|
||||
schema: {
|
||||
type: 'object',
|
||||
key: 'typesRoot',
|
||||
value: [
|
||||
{ type: 'string', key: 'stringType', value: 'string', path: '/types/string' },
|
||||
{ type: 'number', key: 'numberType', value: 'number', path: '/types/number' },
|
||||
{ type: 'boolean', key: 'booleanType', value: 'boolean', path: '/types/boolean' },
|
||||
{ type: 'bigint', key: 'bigintType', value: 'bigint', path: '/types/bigint' },
|
||||
{ type: 'symbol', key: 'symbolType', value: 'symbol', path: '/types/symbol' },
|
||||
{ type: 'array', key: 'arrayType', value: [], path: '/types/array' },
|
||||
{ type: 'object', key: 'objectType', value: [], path: '/types/object' },
|
||||
{
|
||||
type: 'function',
|
||||
key: 'functionType',
|
||||
value: 'function',
|
||||
path: '/types/function',
|
||||
},
|
||||
{ type: 'null', key: 'nullType', value: 'null', path: '/types/null' },
|
||||
{
|
||||
type: 'undefined',
|
||||
key: 'undefinedType',
|
||||
value: 'undefined',
|
||||
path: '/types/undefined',
|
||||
},
|
||||
],
|
||||
path: '/types/root',
|
||||
},
|
||||
},
|
||||
],
|
||||
inputSchema: {
|
||||
nodeName: 'InputNode',
|
||||
schema: {
|
||||
type: 'object',
|
||||
key: 'simpleInput',
|
||||
value: [
|
||||
{
|
||||
type: 'string',
|
||||
key: 'simpleKey',
|
||||
value: 'simpleValue',
|
||||
path: '/simple/path',
|
||||
},
|
||||
],
|
||||
path: '/simple/input/path',
|
||||
},
|
||||
},
|
||||
pushRef: 'push-types-123',
|
||||
ndvPushRef: 'ndv-push-types-456',
|
||||
},
|
||||
forNode: 'TypeCheckNode',
|
||||
};
|
||||
|
||||
const result = AiAskRequestDto.safeParse(allTypesRequest);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail with invalid type', () => {
|
||||
const invalidTypeRequest = {
|
||||
question: 'Test invalid type',
|
||||
context: {
|
||||
schema: [
|
||||
{
|
||||
nodeName: 'InvalidTypeNode',
|
||||
schema: {
|
||||
type: 'invalid-type', // This should fail
|
||||
key: 'invalidKey',
|
||||
value: 'invalidValue',
|
||||
path: '/invalid/path',
|
||||
},
|
||||
},
|
||||
],
|
||||
inputSchema: {
|
||||
nodeName: 'InputNode',
|
||||
schema: {
|
||||
type: 'object',
|
||||
key: 'simpleInput',
|
||||
value: [
|
||||
{
|
||||
type: 'string',
|
||||
key: 'simpleKey',
|
||||
value: 'simpleValue',
|
||||
path: '/simple/path',
|
||||
},
|
||||
],
|
||||
path: '/simple/input/path',
|
||||
},
|
||||
},
|
||||
pushRef: 'push-invalid-123',
|
||||
ndvPushRef: 'ndv-push-invalid-456',
|
||||
},
|
||||
forNode: 'InvalidTypeNode',
|
||||
};
|
||||
|
||||
const result = AiAskRequestDto.safeParse(invalidTypeRequest);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should validate multiple schema entries', () => {
|
||||
const multiSchemaRequest = {
|
||||
question: 'Multiple schema test',
|
||||
context: {
|
||||
schema: [
|
||||
{
|
||||
nodeName: 'FirstNode',
|
||||
schema: {
|
||||
type: 'string',
|
||||
key: 'firstKey',
|
||||
value: 'firstValue',
|
||||
path: '/first/path',
|
||||
},
|
||||
},
|
||||
{
|
||||
nodeName: 'SecondNode',
|
||||
schema: {
|
||||
type: 'object',
|
||||
key: 'secondKey',
|
||||
value: [
|
||||
{
|
||||
type: 'number',
|
||||
key: 'nestedKey',
|
||||
value: 'nestedValue',
|
||||
path: '/second/nested/path',
|
||||
},
|
||||
],
|
||||
path: '/second/path',
|
||||
},
|
||||
},
|
||||
],
|
||||
inputSchema: {
|
||||
nodeName: 'InputNode',
|
||||
schema: {
|
||||
type: 'object',
|
||||
key: 'simpleInput',
|
||||
value: [
|
||||
{
|
||||
type: 'string',
|
||||
key: 'simpleKey',
|
||||
value: 'simpleValue',
|
||||
path: '/simple/path',
|
||||
},
|
||||
],
|
||||
path: '/simple/input/path',
|
||||
},
|
||||
},
|
||||
pushRef: 'push-multi-123',
|
||||
ndvPushRef: 'ndv-push-multi-456',
|
||||
},
|
||||
forNode: 'MultiSchemaNode',
|
||||
};
|
||||
|
||||
const result = AiAskRequestDto.safeParse(multiSchemaRequest);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,34 @@
|
|||
import { AiChatRequestDto } from '../ai-chat-request.dto';
|
||||
|
||||
describe('AiChatRequestDto', () => {
|
||||
it('should validate a request with a payload and session ID', () => {
|
||||
const validRequest = {
|
||||
payload: { someKey: 'someValue' },
|
||||
sessionId: 'session-123',
|
||||
};
|
||||
|
||||
const result = AiChatRequestDto.safeParse(validRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should validate a request with only a payload', () => {
|
||||
const validRequest = {
|
||||
payload: { complexObject: { nested: 'value' } },
|
||||
};
|
||||
|
||||
const result = AiChatRequestDto.safeParse(validRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail if payload is missing', () => {
|
||||
const invalidRequest = {
|
||||
sessionId: 'session-123',
|
||||
};
|
||||
|
||||
const result = AiChatRequestDto.safeParse(invalidRequest);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,32 @@
|
|||
import { nanoId } from 'minifaker';
|
||||
|
||||
import { AiFreeCreditsRequestDto } from '../ai-free-credits-request.dto';
|
||||
import 'minifaker/locales/en';
|
||||
|
||||
describe('AiChatRequestDto', () => {
|
||||
it('should succeed if projectId is a valid nanoid', () => {
|
||||
const validRequest = {
|
||||
projectId: nanoId.nanoid(),
|
||||
};
|
||||
|
||||
const result = AiFreeCreditsRequestDto.safeParse(validRequest);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should succeed if no projectId is sent', () => {
|
||||
const result = AiFreeCreditsRequestDto.safeParse({});
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should fail is projectId invalid value', () => {
|
||||
const validRequest = {
|
||||
projectId: '',
|
||||
};
|
||||
|
||||
const result = AiFreeCreditsRequestDto.safeParse(validRequest);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,7 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class AiApplySuggestionRequestDto extends Z.class({
|
||||
sessionId: z.string(),
|
||||
suggestionId: z.string(),
|
||||
}) {}
|
53
packages/@n8n/api-types/src/dto/ai/ai-ask-request.dto.ts
Normal file
53
packages/@n8n/api-types/src/dto/ai/ai-ask-request.dto.ts
Normal file
|
@ -0,0 +1,53 @@
|
|||
import type { AiAssistantSDK, SchemaType } from '@n8n_io/ai-assistant-sdk';
|
||||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
// Note: This is copied from the sdk, since this type is not exported
|
||||
type Schema = {
|
||||
type: SchemaType;
|
||||
key?: string;
|
||||
value: string | Schema[];
|
||||
path: string;
|
||||
};
|
||||
|
||||
// Create a lazy validator to handle the recursive type
|
||||
const schemaValidator: z.ZodType<Schema> = z.lazy(() =>
|
||||
z.object({
|
||||
type: z.enum([
|
||||
'string',
|
||||
'number',
|
||||
'boolean',
|
||||
'bigint',
|
||||
'symbol',
|
||||
'array',
|
||||
'object',
|
||||
'function',
|
||||
'null',
|
||||
'undefined',
|
||||
]),
|
||||
key: z.string().optional(),
|
||||
value: z.union([z.string(), z.lazy(() => schemaValidator.array())]),
|
||||
path: z.string(),
|
||||
}),
|
||||
);
|
||||
|
||||
export class AiAskRequestDto
|
||||
extends Z.class({
|
||||
question: z.string(),
|
||||
context: z.object({
|
||||
schema: z.array(
|
||||
z.object({
|
||||
nodeName: z.string(),
|
||||
schema: schemaValidator,
|
||||
}),
|
||||
),
|
||||
inputSchema: z.object({
|
||||
nodeName: z.string(),
|
||||
schema: schemaValidator,
|
||||
}),
|
||||
pushRef: z.string(),
|
||||
ndvPushRef: z.string(),
|
||||
}),
|
||||
forNode: z.string(),
|
||||
})
|
||||
implements AiAssistantSDK.AskAiRequestPayload {}
|
10
packages/@n8n/api-types/src/dto/ai/ai-chat-request.dto.ts
Normal file
10
packages/@n8n/api-types/src/dto/ai/ai-chat-request.dto.ts
Normal file
|
@ -0,0 +1,10 @@
|
|||
import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
|
||||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class AiChatRequestDto
|
||||
extends Z.class({
|
||||
payload: z.object({}).passthrough(), // Allow any object shape
|
||||
sessionId: z.string().optional(),
|
||||
})
|
||||
implements AiAssistantSDK.ChatRequestPayload {}
|
|
@ -0,0 +1,6 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class AiFreeCreditsRequestDto extends Z.class({
|
||||
projectId: z.string().min(1).optional(),
|
||||
}) {}
|
|
@ -0,0 +1,93 @@
|
|||
import { LoginRequestDto } from '../login-request.dto';
|
||||
|
||||
describe('LoginRequestDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'complete valid login request',
|
||||
request: {
|
||||
email: 'test@example.com',
|
||||
password: 'securePassword123',
|
||||
mfaCode: '123456',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'login request without optional MFA',
|
||||
request: {
|
||||
email: 'test@example.com',
|
||||
password: 'securePassword123',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'login request with both mfaCode and mfaRecoveryCode',
|
||||
request: {
|
||||
email: 'test@example.com',
|
||||
password: 'securePassword123',
|
||||
mfaCode: '123456',
|
||||
mfaRecoveryCode: 'recovery-code-123',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'login request with only mfaRecoveryCode',
|
||||
request: {
|
||||
email: 'test@example.com',
|
||||
password: 'securePassword123',
|
||||
mfaRecoveryCode: 'recovery-code-123',
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = LoginRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid email',
|
||||
request: {
|
||||
email: 'invalid-email',
|
||||
password: 'securePassword123',
|
||||
},
|
||||
expectedErrorPath: ['email'],
|
||||
},
|
||||
{
|
||||
name: 'empty password',
|
||||
request: {
|
||||
email: 'test@example.com',
|
||||
password: '',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'missing email',
|
||||
request: {
|
||||
password: 'securePassword123',
|
||||
},
|
||||
expectedErrorPath: ['email'],
|
||||
},
|
||||
{
|
||||
name: 'missing password',
|
||||
request: {
|
||||
email: 'test@example.com',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'whitespace in email and password',
|
||||
request: {
|
||||
email: ' test@example.com ',
|
||||
password: ' securePassword123 ',
|
||||
},
|
||||
expectedErrorPath: ['email'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = LoginRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,87 @@
|
|||
import { ResolveSignupTokenQueryDto } from '../resolve-signup-token-query.dto';
|
||||
|
||||
describe('ResolveSignupTokenQueryDto', () => {
|
||||
const validUuid = '123e4567-e89b-12d3-a456-426614174000';
|
||||
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'standard UUID',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
inviteeId: validUuid,
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = ResolveSignupTokenQueryDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid inviterId UUID',
|
||||
request: {
|
||||
inviterId: 'not-a-valid-uuid',
|
||||
inviteeId: validUuid,
|
||||
},
|
||||
expectedErrorPath: ['inviterId'],
|
||||
},
|
||||
{
|
||||
name: 'invalid inviteeId UUID',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
inviteeId: 'not-a-valid-uuid',
|
||||
},
|
||||
expectedErrorPath: ['inviteeId'],
|
||||
},
|
||||
{
|
||||
name: 'missing inviterId',
|
||||
request: {
|
||||
inviteeId: validUuid,
|
||||
},
|
||||
expectedErrorPath: ['inviterId'],
|
||||
},
|
||||
{
|
||||
name: 'missing inviteeId',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
},
|
||||
expectedErrorPath: ['inviteeId'],
|
||||
},
|
||||
{
|
||||
name: 'UUID with invalid characters',
|
||||
request: {
|
||||
inviterId: '123e4567-e89b-12d3-a456-42661417400G',
|
||||
inviteeId: validUuid,
|
||||
},
|
||||
expectedErrorPath: ['inviterId'],
|
||||
},
|
||||
{
|
||||
name: 'UUID too long',
|
||||
request: {
|
||||
inviterId: '123e4567-e89b-12d3-a456-426614174001234',
|
||||
inviteeId: validUuid,
|
||||
},
|
||||
expectedErrorPath: ['inviterId'],
|
||||
},
|
||||
{
|
||||
name: 'UUID too short',
|
||||
request: {
|
||||
inviterId: '123e4567-e89b-12d3-a456',
|
||||
inviteeId: validUuid,
|
||||
},
|
||||
expectedErrorPath: ['inviterId'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = ResolveSignupTokenQueryDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,9 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class LoginRequestDto extends Z.class({
|
||||
email: z.string().email(),
|
||||
password: z.string().min(1),
|
||||
mfaCode: z.string().optional(),
|
||||
mfaRecoveryCode: z.string().optional(),
|
||||
}) {}
|
|
@ -0,0 +1,7 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class ResolveSignupTokenQueryDto extends Z.class({
|
||||
inviterId: z.string().uuid(),
|
||||
inviteeId: z.string().uuid(),
|
||||
}) {}
|
|
@ -0,0 +1,55 @@
|
|||
import { CredentialsGetManyRequestQuery } from '../credentials-get-many-request.dto';
|
||||
|
||||
describe('CredentialsGetManyRequestQuery', () => {
|
||||
describe('should pass validation', () => {
|
||||
it('with empty object', () => {
|
||||
const data = {};
|
||||
|
||||
const result = CredentialsGetManyRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
test.each([
|
||||
{ field: 'includeScopes', value: 'true' },
|
||||
{ field: 'includeScopes', value: 'false' },
|
||||
{ field: 'includeData', value: 'true' },
|
||||
{ field: 'includeData', value: 'false' },
|
||||
])('with $field set to $value', ({ field, value }) => {
|
||||
const data = { [field]: value };
|
||||
|
||||
const result = CredentialsGetManyRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('with both parameters set', () => {
|
||||
const data = {
|
||||
includeScopes: 'true',
|
||||
includeData: 'true',
|
||||
};
|
||||
|
||||
const result = CredentialsGetManyRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('should fail validation', () => {
|
||||
test.each([
|
||||
{ field: 'includeScopes', value: true },
|
||||
{ field: 'includeScopes', value: false },
|
||||
{ field: 'includeScopes', value: 'invalid' },
|
||||
{ field: 'includeData', value: true },
|
||||
{ field: 'includeData', value: false },
|
||||
{ field: 'includeData', value: 'invalid' },
|
||||
])('with invalid value $value for $field', ({ field, value }) => {
|
||||
const data = { [field]: value };
|
||||
|
||||
const result = CredentialsGetManyRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.issues[0].path[0]).toBe(field);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,52 @@
|
|||
import { CredentialsGetOneRequestQuery } from '../credentials-get-one-request.dto';
|
||||
|
||||
describe('CredentialsGetManyRequestQuery', () => {
|
||||
describe('should pass validation', () => {
|
||||
it('with empty object', () => {
|
||||
const data = {};
|
||||
|
||||
const result = CredentialsGetOneRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
// defaults to false
|
||||
expect(result.data?.includeData).toBe(false);
|
||||
});
|
||||
|
||||
test.each([
|
||||
{ field: 'includeData', value: 'true' },
|
||||
{ field: 'includeData', value: 'false' },
|
||||
])('with $field set to $value', ({ field, value }) => {
|
||||
const data = { [field]: value };
|
||||
|
||||
const result = CredentialsGetOneRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('with both parameters set', () => {
|
||||
const data = {
|
||||
includeScopes: 'true',
|
||||
includeData: 'true',
|
||||
};
|
||||
|
||||
const result = CredentialsGetOneRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('should fail validation', () => {
|
||||
test.each([
|
||||
{ field: 'includeData', value: true },
|
||||
{ field: 'includeData', value: false },
|
||||
{ field: 'includeData', value: 'invalid' },
|
||||
])('with invalid value $value for $field', ({ field, value }) => {
|
||||
const data = { [field]: value };
|
||||
|
||||
const result = CredentialsGetOneRequestQuery.safeParse(data);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error?.issues[0].path[0]).toBe(field);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,22 @@
|
|||
import { Z } from 'zod-class';
|
||||
|
||||
import { booleanFromString } from '../../schemas/booleanFromString';
|
||||
|
||||
export class CredentialsGetManyRequestQuery extends Z.class({
|
||||
/**
|
||||
* Adds the `scopes` field to each credential which includes all scopes the
|
||||
* requesting user has in relation to the credential, e.g.
|
||||
* ['credential:read', 'credential:update']
|
||||
*/
|
||||
includeScopes: booleanFromString.optional(),
|
||||
|
||||
/**
|
||||
* Adds the decrypted `data` field to each credential.
|
||||
*
|
||||
* It only does this for credentials for which the user has the
|
||||
* `credential:update` scope.
|
||||
*
|
||||
* This switches `includeScopes` to true to be able to check for the scopes
|
||||
*/
|
||||
includeData: booleanFromString.optional(),
|
||||
}) {}
|
|
@ -0,0 +1,13 @@
|
|||
import { Z } from 'zod-class';
|
||||
|
||||
import { booleanFromString } from '../../schemas/booleanFromString';
|
||||
|
||||
export class CredentialsGetOneRequestQuery extends Z.class({
|
||||
/**
|
||||
* Adds the decrypted `data` field to each credential.
|
||||
*
|
||||
* It only does this for credentials for which the user has the
|
||||
* `credential:update` scope.
|
||||
*/
|
||||
includeData: booleanFromString.optional().default('false'),
|
||||
}) {}
|
|
@ -0,0 +1,81 @@
|
|||
import { ActionResultRequestDto } from '../action-result-request.dto';
|
||||
|
||||
describe('ActionResultRequestDto', () => {
|
||||
const baseValidRequest = {
|
||||
path: '/test/path',
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
handler: 'testHandler',
|
||||
currentNodeParameters: {},
|
||||
};
|
||||
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'minimal valid request',
|
||||
request: baseValidRequest,
|
||||
},
|
||||
{
|
||||
name: 'request with payload',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
payload: { key: 'value' },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with credentials',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
credentials: { testCredential: { id: 'cred1', name: 'Test Cred' } },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with current node parameters',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
currentNodeParameters: { param1: 'value1' },
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = ActionResultRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'missing path',
|
||||
request: {
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
handler: 'testHandler',
|
||||
},
|
||||
expectedErrorPath: ['path'],
|
||||
},
|
||||
{
|
||||
name: 'missing handler',
|
||||
request: {
|
||||
path: '/test/path',
|
||||
currentNodeParameters: {},
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
},
|
||||
expectedErrorPath: ['handler'],
|
||||
},
|
||||
{
|
||||
name: 'invalid node version',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 0 },
|
||||
},
|
||||
expectedErrorPath: ['nodeTypeAndVersion', 'version'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = ActionResultRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,90 @@
|
|||
import { OptionsRequestDto } from '../options-request.dto';
|
||||
|
||||
describe('OptionsRequestDto', () => {
|
||||
const baseValidRequest = {
|
||||
path: '/test/path',
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
currentNodeParameters: {},
|
||||
};
|
||||
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'minimal valid request',
|
||||
request: baseValidRequest,
|
||||
},
|
||||
{
|
||||
name: 'request with method name',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
methodName: 'testMethod',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with load options',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
loadOptions: {
|
||||
routing: {
|
||||
operations: { someOperation: 'test' },
|
||||
output: { someOutput: 'test' },
|
||||
request: { someRequest: 'test' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with credentials',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
credentials: { testCredential: { id: 'cred1', name: 'Test Cred' } },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with current node parameters',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
currentNodeParameters: { param1: 'value1' },
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = OptionsRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'missing path',
|
||||
request: {
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
},
|
||||
expectedErrorPath: ['path'],
|
||||
},
|
||||
{
|
||||
name: 'missing node type and version',
|
||||
request: {
|
||||
path: '/test/path',
|
||||
},
|
||||
expectedErrorPath: ['nodeTypeAndVersion'],
|
||||
},
|
||||
{
|
||||
name: 'invalid node version',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 0 },
|
||||
},
|
||||
expectedErrorPath: ['nodeTypeAndVersion', 'version'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = OptionsRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,95 @@
|
|||
import { ResourceLocatorRequestDto } from '../resource-locator-request.dto';
|
||||
|
||||
describe('ResourceLocatorRequestDto', () => {
|
||||
const baseValidRequest = {
|
||||
path: '/test/path',
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
methodName: 'testMethod',
|
||||
currentNodeParameters: {},
|
||||
};
|
||||
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'minimal valid request',
|
||||
request: baseValidRequest,
|
||||
},
|
||||
{
|
||||
name: 'request with filter',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
filter: 'testFilter',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with pagination token',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
paginationToken: 'token123',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with credentials',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
credentials: { testCredential: { id: 'cred1', name: 'Test Cred' } },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with current node parameters',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
currentNodeParameters: { param1: 'value1' },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with a semver node version',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1.1 },
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = ResourceLocatorRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'missing path',
|
||||
request: {
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
methodName: 'testMethod',
|
||||
},
|
||||
expectedErrorPath: ['path'],
|
||||
},
|
||||
{
|
||||
name: 'missing method name',
|
||||
request: {
|
||||
path: '/test/path',
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
currentNodeParameters: {},
|
||||
},
|
||||
expectedErrorPath: ['methodName'],
|
||||
},
|
||||
{
|
||||
name: 'invalid node version',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 0 },
|
||||
},
|
||||
expectedErrorPath: ['nodeTypeAndVersion', 'version'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = ResourceLocatorRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,74 @@
|
|||
import { ResourceMapperFieldsRequestDto } from '../resource-mapper-fields-request.dto';
|
||||
|
||||
describe('ResourceMapperFieldsRequestDto', () => {
|
||||
const baseValidRequest = {
|
||||
path: '/test/path',
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
methodName: 'testMethod',
|
||||
currentNodeParameters: {},
|
||||
};
|
||||
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'minimal valid request',
|
||||
request: baseValidRequest,
|
||||
},
|
||||
{
|
||||
name: 'request with credentials',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
credentials: { testCredential: { id: 'cred1', name: 'Test Cred' } },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'request with current node parameters',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
currentNodeParameters: { param1: 'value1' },
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = ResourceMapperFieldsRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'missing path',
|
||||
request: {
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
methodName: 'testMethod',
|
||||
},
|
||||
expectedErrorPath: ['path'],
|
||||
},
|
||||
{
|
||||
name: 'missing method name',
|
||||
request: {
|
||||
path: '/test/path',
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 1 },
|
||||
currentNodeParameters: {},
|
||||
},
|
||||
expectedErrorPath: ['methodName'],
|
||||
},
|
||||
{
|
||||
name: 'invalid node version',
|
||||
request: {
|
||||
...baseValidRequest,
|
||||
nodeTypeAndVersion: { name: 'TestNode', version: 0 },
|
||||
},
|
||||
expectedErrorPath: ['nodeTypeAndVersion', 'version'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = ResourceMapperFieldsRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,11 @@
|
|||
import type { IDataObject } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { BaseDynamicParametersRequestDto } from './base-dynamic-parameters-request.dto';
|
||||
|
||||
export class ActionResultRequestDto extends BaseDynamicParametersRequestDto.extend({
|
||||
handler: z.string(),
|
||||
payload: z
|
||||
.union([z.object({}).catchall(z.any()) satisfies z.ZodType<IDataObject>, z.string()])
|
||||
.optional(),
|
||||
}) {}
|
|
@ -0,0 +1,18 @@
|
|||
import type { INodeCredentials, INodeParameters, INodeTypeNameVersion } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
import { nodeVersionSchema } from '../../schemas/nodeVersion.schema';
|
||||
|
||||
export class BaseDynamicParametersRequestDto extends Z.class({
|
||||
path: z.string(),
|
||||
nodeTypeAndVersion: z.object({
|
||||
name: z.string(),
|
||||
version: nodeVersionSchema,
|
||||
}) satisfies z.ZodType<INodeTypeNameVersion>,
|
||||
currentNodeParameters: z.record(z.string(), z.any()) satisfies z.ZodType<INodeParameters>,
|
||||
methodName: z.string().optional(),
|
||||
credentials: z.record(z.string(), z.any()).optional() satisfies z.ZodType<
|
||||
INodeCredentials | undefined
|
||||
>,
|
||||
}) {}
|
|
@ -0,0 +1,18 @@
|
|||
import type { ILoadOptions } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
||||
import { BaseDynamicParametersRequestDto } from './base-dynamic-parameters-request.dto';
|
||||
|
||||
export class OptionsRequestDto extends BaseDynamicParametersRequestDto.extend({
|
||||
loadOptions: z
|
||||
.object({
|
||||
routing: z
|
||||
.object({
|
||||
operations: z.any().optional(),
|
||||
output: z.any().optional(),
|
||||
request: z.any().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.optional() as z.ZodType<ILoadOptions | undefined>,
|
||||
}) {}
|
|
@ -0,0 +1,9 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
import { BaseDynamicParametersRequestDto } from './base-dynamic-parameters-request.dto';
|
||||
|
||||
export class ResourceLocatorRequestDto extends BaseDynamicParametersRequestDto.extend({
|
||||
methodName: z.string(),
|
||||
filter: z.string().optional(),
|
||||
paginationToken: z.string().optional(),
|
||||
}) {}
|
|
@ -0,0 +1,7 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
import { BaseDynamicParametersRequestDto } from './base-dynamic-parameters-request.dto';
|
||||
|
||||
export class ResourceMapperFieldsRequestDto extends BaseDynamicParametersRequestDto.extend({
|
||||
methodName: z.string(),
|
||||
}) {}
|
|
@ -1,6 +1,39 @@
|
|||
export { AiAskRequestDto } from './ai/ai-ask-request.dto';
|
||||
export { AiChatRequestDto } from './ai/ai-chat-request.dto';
|
||||
export { AiApplySuggestionRequestDto } from './ai/ai-apply-suggestion-request.dto';
|
||||
export { AiFreeCreditsRequestDto } from './ai/ai-free-credits-request.dto';
|
||||
|
||||
export { LoginRequestDto } from './auth/login-request.dto';
|
||||
export { ResolveSignupTokenQueryDto } from './auth/resolve-signup-token-query.dto';
|
||||
|
||||
export { OptionsRequestDto } from './dynamic-node-parameters/options-request.dto';
|
||||
export { ResourceLocatorRequestDto } from './dynamic-node-parameters/resource-locator-request.dto';
|
||||
export { ResourceMapperFieldsRequestDto } from './dynamic-node-parameters/resource-mapper-fields-request.dto';
|
||||
export { ActionResultRequestDto } from './dynamic-node-parameters/action-result-request.dto';
|
||||
|
||||
export { InviteUsersRequestDto } from './invitation/invite-users-request.dto';
|
||||
export { AcceptInvitationRequestDto } from './invitation/accept-invitation-request.dto';
|
||||
|
||||
export { OwnerSetupRequestDto } from './owner/owner-setup-request.dto';
|
||||
export { DismissBannerRequestDto } from './owner/dismiss-banner-request.dto';
|
||||
|
||||
export { ForgotPasswordRequestDto } from './password-reset/forgot-password-request.dto';
|
||||
export { ResolvePasswordTokenQueryDto } from './password-reset/resolve-password-token-query.dto';
|
||||
export { ChangePasswordRequestDto } from './password-reset/change-password-request.dto';
|
||||
|
||||
export { SamlAcsDto } from './saml/saml-acs.dto';
|
||||
export { SamlPreferences } from './saml/saml-preferences.dto';
|
||||
export { SamlToggleDto } from './saml/saml-toggle.dto';
|
||||
|
||||
export { PasswordUpdateRequestDto } from './user/password-update-request.dto';
|
||||
export { RoleChangeRequestDto } from './user/role-change-request.dto';
|
||||
export { SettingsUpdateRequestDto } from './user/settings-update-request.dto';
|
||||
export { UserUpdateRequestDto } from './user/user-update-request.dto';
|
||||
|
||||
export { CommunityRegisteredRequestDto } from './license/community-registered-request.dto';
|
||||
|
||||
export { VariableListRequestDto } from './variables/variables-list-request.dto';
|
||||
export { CredentialsGetOneRequestQuery } from './credentials/credentials-get-one-request.dto';
|
||||
export { CredentialsGetManyRequestQuery } from './credentials/credentials-get-many-request.dto';
|
||||
|
||||
export { ImportWorkflowFromUrlDto } from './workflows/import-workflow-from-url.dto';
|
||||
|
|
|
@ -0,0 +1,94 @@
|
|||
import { AcceptInvitationRequestDto } from '../accept-invitation-request.dto';
|
||||
|
||||
describe('AcceptInvitationRequestDto', () => {
|
||||
const validUuid = '123e4567-e89b-12d3-a456-426614174000';
|
||||
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'complete valid invitation acceptance',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = AcceptInvitationRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'missing inviterId',
|
||||
request: {
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
expectedErrorPath: ['inviterId'],
|
||||
},
|
||||
{
|
||||
name: 'invalid inviterId',
|
||||
request: {
|
||||
inviterId: 'not-a-valid-uuid',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
expectedErrorPath: ['inviterId'],
|
||||
},
|
||||
{
|
||||
name: 'missing first name',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
firstName: '',
|
||||
lastName: 'Doe',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
expectedErrorPath: ['firstName'],
|
||||
},
|
||||
{
|
||||
name: 'missing last name',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
firstName: 'John',
|
||||
lastName: '',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
expectedErrorPath: ['lastName'],
|
||||
},
|
||||
{
|
||||
name: 'password too short',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'short',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'password without number',
|
||||
request: {
|
||||
inviterId: validUuid,
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'NoNumberPassword',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = AcceptInvitationRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,60 @@
|
|||
import { InviteUsersRequestDto } from '../invite-users-request.dto';
|
||||
|
||||
describe('InviteUsersRequestDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'empty array',
|
||||
request: [],
|
||||
},
|
||||
{
|
||||
name: 'single user invitation with default role',
|
||||
request: [{ email: 'user@example.com' }],
|
||||
},
|
||||
{
|
||||
name: 'multiple user invitations with different roles',
|
||||
request: [
|
||||
{ email: 'user1@example.com', role: 'global:member' },
|
||||
{ email: 'user2@example.com', role: 'global:admin' },
|
||||
],
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = InviteUsersRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should default role to global:member', () => {
|
||||
const result = InviteUsersRequestDto.safeParse([{ email: 'user@example.com' }]);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.[0].role).toBe('global:member');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid email',
|
||||
request: [{ email: 'invalid-email' }],
|
||||
expectedErrorPath: [0, 'email'],
|
||||
},
|
||||
{
|
||||
name: 'invalid role',
|
||||
request: [
|
||||
{
|
||||
email: 'user@example.com',
|
||||
role: 'invalid-role',
|
||||
},
|
||||
],
|
||||
expectedErrorPath: [0, 'role'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = InviteUsersRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,11 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
import { passwordSchema } from '../../schemas/password.schema';
|
||||
|
||||
export class AcceptInvitationRequestDto extends Z.class({
|
||||
inviterId: z.string().uuid(),
|
||||
firstName: z.string().min(1, 'First name is required'),
|
||||
lastName: z.string().min(1, 'Last name is required'),
|
||||
password: passwordSchema,
|
||||
}) {}
|
|
@ -0,0 +1,16 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
const roleSchema = z.enum(['global:member', 'global:admin']);
|
||||
|
||||
const invitedUserSchema = z.object({
|
||||
email: z.string().email(),
|
||||
role: roleSchema.default('global:member'),
|
||||
});
|
||||
|
||||
const invitationsSchema = z.array(invitedUserSchema);
|
||||
|
||||
export class InviteUsersRequestDto extends Array<z.infer<typeof invitedUserSchema>> {
|
||||
static safeParse(data: unknown) {
|
||||
return invitationsSchema.safeParse(data);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
import { bannerNameSchema } from '../../../schemas/bannerName.schema';
|
||||
import { DismissBannerRequestDto } from '../dismiss-banner-request.dto';
|
||||
|
||||
describe('DismissBannerRequestDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each(
|
||||
bannerNameSchema.options.map((banner) => ({
|
||||
name: `valid banner: ${banner}`,
|
||||
request: { banner },
|
||||
})),
|
||||
)('should validate $name', ({ request }) => {
|
||||
const result = DismissBannerRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid banner string',
|
||||
request: {
|
||||
banner: 'not-a-valid-banner',
|
||||
},
|
||||
expectedErrorPath: ['banner'],
|
||||
},
|
||||
{
|
||||
name: 'non-string banner',
|
||||
request: {
|
||||
banner: 123,
|
||||
},
|
||||
expectedErrorPath: ['banner'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = DismissBannerRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Optional banner', () => {
|
||||
test('should validate empty request', () => {
|
||||
const result = DismissBannerRequestDto.safeParse({});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Exhaustive banner name check', () => {
|
||||
test('should have all banner names defined', () => {
|
||||
const expectedBanners = [
|
||||
'V1',
|
||||
'TRIAL_OVER',
|
||||
'TRIAL',
|
||||
'NON_PRODUCTION_LICENSE',
|
||||
'EMAIL_CONFIRMATION',
|
||||
];
|
||||
|
||||
expect(bannerNameSchema.options).toEqual(expectedBanners);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,93 @@
|
|||
import { OwnerSetupRequestDto } from '../owner-setup-request.dto';
|
||||
|
||||
describe('OwnerSetupRequestDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'complete valid setup request',
|
||||
request: {
|
||||
email: 'owner@example.com',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = OwnerSetupRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid email',
|
||||
request: {
|
||||
email: 'invalid-email',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
expectedErrorPath: ['email'],
|
||||
},
|
||||
{
|
||||
name: 'missing first name',
|
||||
request: {
|
||||
email: 'owner@example.com',
|
||||
firstName: '',
|
||||
lastName: 'Doe',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
expectedErrorPath: ['firstName'],
|
||||
},
|
||||
{
|
||||
name: 'missing last name',
|
||||
request: {
|
||||
email: 'owner@example.com',
|
||||
firstName: 'John',
|
||||
lastName: '',
|
||||
password: 'SecurePassword123',
|
||||
},
|
||||
expectedErrorPath: ['lastName'],
|
||||
},
|
||||
{
|
||||
name: 'password too short',
|
||||
request: {
|
||||
email: 'owner@example.com',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'short',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'password without number',
|
||||
request: {
|
||||
email: 'owner@example.com',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'NoNumberPassword',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'password without uppercase letter',
|
||||
request: {
|
||||
email: 'owner@example.com',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
password: 'nouppercasepassword123',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = OwnerSetupRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,7 @@
|
|||
import { Z } from 'zod-class';
|
||||
|
||||
import { bannerNameSchema } from '../../schemas/bannerName.schema';
|
||||
|
||||
export class DismissBannerRequestDto extends Z.class({
|
||||
banner: bannerNameSchema.optional(),
|
||||
}) {}
|
|
@ -0,0 +1,11 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
import { passwordSchema } from '../../schemas/password.schema';
|
||||
|
||||
export class OwnerSetupRequestDto extends Z.class({
|
||||
email: z.string().email(),
|
||||
firstName: z.string().min(1, 'First name is required'),
|
||||
lastName: z.string().min(1, 'Last name is required'),
|
||||
password: passwordSchema,
|
||||
}) {}
|
|
@ -0,0 +1,114 @@
|
|||
import { ChangePasswordRequestDto } from '../change-password-request.dto';
|
||||
|
||||
describe('ChangePasswordRequestDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'valid password reset with token',
|
||||
request: {
|
||||
token: 'valid-reset-token-with-sufficient-length',
|
||||
password: 'newSecurePassword123',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'valid password reset with MFA code',
|
||||
request: {
|
||||
token: 'another-valid-reset-token',
|
||||
password: 'newSecurePassword123',
|
||||
mfaCode: '123456',
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = ChangePasswordRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'missing token',
|
||||
request: { password: 'newSecurePassword123' },
|
||||
expectedErrorPath: ['token'],
|
||||
},
|
||||
{
|
||||
name: 'empty token',
|
||||
request: { token: '', password: 'newSecurePassword123' },
|
||||
expectedErrorPath: ['token'],
|
||||
},
|
||||
{
|
||||
name: 'short token',
|
||||
request: { token: 'short', password: 'newSecurePassword123' },
|
||||
expectedErrorPath: ['token'],
|
||||
},
|
||||
{
|
||||
name: 'missing password',
|
||||
request: { token: 'valid-reset-token' },
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'password too short',
|
||||
request: {
|
||||
token: 'valid-reset-token',
|
||||
password: 'short',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'password too long',
|
||||
request: {
|
||||
token: 'valid-reset-token',
|
||||
password: 'a'.repeat(65),
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'password without number',
|
||||
request: {
|
||||
token: 'valid-reset-token',
|
||||
password: 'NoNumberPassword',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
{
|
||||
name: 'password without uppercase letter',
|
||||
request: {
|
||||
token: 'valid-reset-token',
|
||||
password: 'nouppercasepassword123',
|
||||
},
|
||||
expectedErrorPath: ['password'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = ChangePasswordRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
test('should handle optional MFA code correctly', () => {
|
||||
const validRequest = {
|
||||
token: 'valid-reset-token',
|
||||
password: 'newSecurePassword123',
|
||||
mfaCode: undefined,
|
||||
};
|
||||
|
||||
const result = ChangePasswordRequestDto.safeParse(validRequest);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
test('should handle token with special characters', () => {
|
||||
const validRequest = {
|
||||
token: 'valid-reset-token-with-special-!@#$%^&*()_+',
|
||||
password: 'newSecurePassword123',
|
||||
};
|
||||
|
||||
const result = ChangePasswordRequestDto.safeParse(validRequest);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,47 @@
|
|||
import { ForgotPasswordRequestDto } from '../forgot-password-request.dto';
|
||||
|
||||
describe('ForgotPasswordRequestDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'valid email',
|
||||
request: { email: 'test@example.com' },
|
||||
},
|
||||
{
|
||||
name: 'email with subdomain',
|
||||
request: { email: 'user@sub.example.com' },
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = ForgotPasswordRequestDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid email format',
|
||||
request: { email: 'invalid-email' },
|
||||
expectedErrorPath: ['email'],
|
||||
},
|
||||
{
|
||||
name: 'missing email',
|
||||
request: {},
|
||||
expectedErrorPath: ['email'],
|
||||
},
|
||||
{
|
||||
name: 'empty email',
|
||||
request: { email: '' },
|
||||
expectedErrorPath: ['email'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = ForgotPasswordRequestDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,42 @@
|
|||
import { ResolvePasswordTokenQueryDto } from '../resolve-password-token-query.dto';
|
||||
|
||||
describe('ResolvePasswordTokenQueryDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'valid token',
|
||||
request: { token: 'valid-reset-token' },
|
||||
},
|
||||
{
|
||||
name: 'long token',
|
||||
request: { token: 'x'.repeat(50) },
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = ResolvePasswordTokenQueryDto.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'missing token',
|
||||
request: {},
|
||||
expectedErrorPath: ['token'],
|
||||
},
|
||||
{
|
||||
name: 'empty token',
|
||||
request: { token: '' },
|
||||
expectedErrorPath: ['token'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = ResolvePasswordTokenQueryDto.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,11 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
import { passwordSchema } from '../../schemas/password.schema';
|
||||
import { passwordResetTokenSchema } from '../../schemas/passwordResetToken.schema';
|
||||
|
||||
export class ChangePasswordRequestDto extends Z.class({
|
||||
token: passwordResetTokenSchema,
|
||||
password: passwordSchema,
|
||||
mfaCode: z.string().optional(),
|
||||
}) {}
|
|
@ -0,0 +1,6 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class ForgotPasswordRequestDto extends Z.class({
|
||||
email: z.string().email(),
|
||||
}) {}
|
|
@ -0,0 +1,7 @@
|
|||
import { Z } from 'zod-class';
|
||||
|
||||
import { passwordResetTokenSchema } from '../../schemas/passwordResetToken.schema';
|
||||
|
||||
export class ResolvePasswordTokenQueryDto extends Z.class({
|
||||
token: passwordResetTokenSchema,
|
||||
}) {}
|
|
@ -0,0 +1,155 @@
|
|||
import { SamlPreferences } from '../saml-preferences.dto';
|
||||
|
||||
describe('SamlPreferences', () => {
|
||||
describe('Valid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'valid minimal configuration',
|
||||
request: {
|
||||
mapping: {
|
||||
email: 'user@example.com',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
userPrincipalName: 'johndoe',
|
||||
},
|
||||
metadata: '<xml>metadata</xml>',
|
||||
metadataUrl: 'https://example.com/metadata',
|
||||
loginEnabled: true,
|
||||
loginLabel: 'Login with SAML',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'valid full configuration',
|
||||
request: {
|
||||
mapping: {
|
||||
email: 'user@example.com',
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
userPrincipalName: 'johndoe',
|
||||
},
|
||||
metadata: '<xml>metadata</xml>',
|
||||
metadataUrl: 'https://example.com/metadata',
|
||||
ignoreSSL: true,
|
||||
loginBinding: 'post',
|
||||
loginEnabled: true,
|
||||
loginLabel: 'Login with SAML',
|
||||
authnRequestsSigned: true,
|
||||
wantAssertionsSigned: true,
|
||||
wantMessageSigned: true,
|
||||
acsBinding: 'redirect',
|
||||
signatureConfig: {
|
||||
prefix: 'ds',
|
||||
location: {
|
||||
reference: '/samlp:Response/saml:Issuer',
|
||||
action: 'after',
|
||||
},
|
||||
},
|
||||
relayState: 'https://example.com/relay',
|
||||
},
|
||||
},
|
||||
])('should validate $name', ({ request }) => {
|
||||
const result = SamlPreferences.safeParse(request);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid loginBinding',
|
||||
request: {
|
||||
loginBinding: 'invalid',
|
||||
},
|
||||
expectedErrorPath: ['loginBinding'],
|
||||
},
|
||||
{
|
||||
name: 'invalid acsBinding',
|
||||
request: {
|
||||
acsBinding: 'invalid',
|
||||
},
|
||||
expectedErrorPath: ['acsBinding'],
|
||||
},
|
||||
{
|
||||
name: 'invalid signatureConfig location action',
|
||||
request: {
|
||||
signatureConfig: {
|
||||
prefix: 'ds',
|
||||
location: {
|
||||
reference: '/samlp:Response/saml:Issuer',
|
||||
action: 'invalid',
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedErrorPath: ['signatureConfig', 'location', 'action'],
|
||||
},
|
||||
{
|
||||
name: 'missing signatureConfig location reference',
|
||||
request: {
|
||||
signatureConfig: {
|
||||
prefix: 'ds',
|
||||
location: {
|
||||
action: 'after',
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedErrorPath: ['signatureConfig', 'location', 'reference'],
|
||||
},
|
||||
{
|
||||
name: 'invalid mapping email',
|
||||
request: {
|
||||
mapping: {
|
||||
email: 123,
|
||||
firstName: 'John',
|
||||
lastName: 'Doe',
|
||||
userPrincipalName: 'johndoe',
|
||||
},
|
||||
},
|
||||
expectedErrorPath: ['mapping', 'email'],
|
||||
},
|
||||
])('should fail validation for $name', ({ request, expectedErrorPath }) => {
|
||||
const result = SamlPreferences.safeParse(request);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
test('should handle optional fields correctly', () => {
|
||||
const validRequest = {
|
||||
mapping: undefined,
|
||||
metadata: undefined,
|
||||
metadataUrl: undefined,
|
||||
loginEnabled: undefined,
|
||||
loginLabel: undefined,
|
||||
};
|
||||
|
||||
const result = SamlPreferences.safeParse(validRequest);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
test('should handle default values correctly', () => {
|
||||
const validRequest = {};
|
||||
|
||||
const result = SamlPreferences.safeParse(validRequest);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.ignoreSSL).toBe(false);
|
||||
expect(result.data?.loginBinding).toBe('redirect');
|
||||
expect(result.data?.authnRequestsSigned).toBe(false);
|
||||
expect(result.data?.wantAssertionsSigned).toBe(true);
|
||||
expect(result.data?.wantMessageSigned).toBe(true);
|
||||
expect(result.data?.acsBinding).toBe('post');
|
||||
expect(result.data?.signatureConfig).toEqual({
|
||||
prefix: 'ds',
|
||||
location: {
|
||||
reference: '/samlp:Response/saml:Issuer',
|
||||
action: 'after',
|
||||
},
|
||||
});
|
||||
expect(result.data?.relayState).toBe('');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
6
packages/@n8n/api-types/src/dto/saml/saml-acs.dto.ts
Normal file
6
packages/@n8n/api-types/src/dto/saml/saml-acs.dto.ts
Normal file
|
@ -0,0 +1,6 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class SamlAcsDto extends Z.class({
|
||||
RelayState: z.string().optional(),
|
||||
}) {}
|
50
packages/@n8n/api-types/src/dto/saml/saml-preferences.dto.ts
Normal file
50
packages/@n8n/api-types/src/dto/saml/saml-preferences.dto.ts
Normal file
|
@ -0,0 +1,50 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
const SamlLoginBindingSchema = z.enum(['redirect', 'post']);
|
||||
|
||||
/** Schema for configuring the signature in SAML requests/responses. */
|
||||
const SignatureConfigSchema = z.object({
|
||||
prefix: z.string().default('ds'),
|
||||
location: z.object({
|
||||
reference: z.string(),
|
||||
action: z.enum(['before', 'after', 'prepend', 'append']),
|
||||
}),
|
||||
});
|
||||
|
||||
export class SamlPreferences extends Z.class({
|
||||
/** Mapping of SAML attributes to user fields. */
|
||||
mapping: z
|
||||
.object({
|
||||
email: z.string(),
|
||||
firstName: z.string(),
|
||||
lastName: z.string(),
|
||||
userPrincipalName: z.string(),
|
||||
})
|
||||
.optional(),
|
||||
/** SAML metadata in XML format. */
|
||||
metadata: z.string().optional(),
|
||||
metadataUrl: z.string().optional(),
|
||||
|
||||
ignoreSSL: z.boolean().default(false),
|
||||
loginBinding: SamlLoginBindingSchema.default('redirect'),
|
||||
/** Whether SAML login is enabled. */
|
||||
loginEnabled: z.boolean().optional(),
|
||||
/** Label for the SAML login button. on the Auth screen */
|
||||
loginLabel: z.string().optional(),
|
||||
|
||||
authnRequestsSigned: z.boolean().default(false),
|
||||
wantAssertionsSigned: z.boolean().default(true),
|
||||
wantMessageSigned: z.boolean().default(true),
|
||||
|
||||
acsBinding: SamlLoginBindingSchema.default('post'),
|
||||
signatureConfig: SignatureConfigSchema.default({
|
||||
prefix: 'ds',
|
||||
location: {
|
||||
reference: '/samlp:Response/saml:Issuer',
|
||||
action: 'after',
|
||||
},
|
||||
}),
|
||||
|
||||
relayState: z.string().default(''),
|
||||
}) {}
|
6
packages/@n8n/api-types/src/dto/saml/saml-toggle.dto.ts
Normal file
6
packages/@n8n/api-types/src/dto/saml/saml-toggle.dto.ts
Normal file
|
@ -0,0 +1,6 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class SamlToggleDto extends Z.class({
|
||||
loginEnabled: z.boolean(),
|
||||
}) {}
|
|
@ -0,0 +1,63 @@
|
|||
import { ImportWorkflowFromUrlDto } from '../import-workflow-from-url.dto';
|
||||
|
||||
describe('ImportWorkflowFromUrlDto', () => {
|
||||
describe('Valid requests', () => {
|
||||
test('should validate $name', () => {
|
||||
const result = ImportWorkflowFromUrlDto.safeParse({
|
||||
url: 'https://example.com/workflow.json',
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Invalid requests', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'invalid URL (not ending with .json)',
|
||||
url: 'https://example.com/workflow',
|
||||
expectedErrorPath: ['url'],
|
||||
},
|
||||
{
|
||||
name: 'invalid URL (missing protocol)',
|
||||
url: 'example.com/workflow.json',
|
||||
expectedErrorPath: ['url'],
|
||||
},
|
||||
{
|
||||
name: 'invalid URL (not a URL)',
|
||||
url: 'not-a-url',
|
||||
expectedErrorPath: ['url'],
|
||||
},
|
||||
{
|
||||
name: 'missing URL',
|
||||
url: undefined,
|
||||
expectedErrorPath: ['url'],
|
||||
},
|
||||
{
|
||||
name: 'null URL',
|
||||
url: null,
|
||||
expectedErrorPath: ['url'],
|
||||
},
|
||||
{
|
||||
name: 'invalid URL (ends with .json but not a valid URL)',
|
||||
url: 'not-a-url.json',
|
||||
expectedErrorPath: ['url'],
|
||||
},
|
||||
{
|
||||
name: 'valid URL with query parameters',
|
||||
url: 'https://example.com/workflow.json?param=value',
|
||||
},
|
||||
{
|
||||
name: 'valid URL with fragments',
|
||||
url: 'https://example.com/workflow.json#section',
|
||||
},
|
||||
])('should fail validation for $name', ({ url, expectedErrorPath }) => {
|
||||
const result = ImportWorkflowFromUrlDto.safeParse({ url });
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
|
||||
if (expectedErrorPath) {
|
||||
expect(result.error?.issues[0].path).toEqual(expectedErrorPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,6 @@
|
|||
import { z } from 'zod';
|
||||
import { Z } from 'zod-class';
|
||||
|
||||
export class ImportWorkflowFromUrlDto extends Z.class({
|
||||
url: z.string().url().endsWith('.json'),
|
||||
}) {}
|
|
@ -163,6 +163,10 @@ export interface FrontendSettings {
|
|||
pruneTime: number;
|
||||
licensePruneTime: number;
|
||||
};
|
||||
aiCredits: {
|
||||
enabled: boolean;
|
||||
credits: number;
|
||||
};
|
||||
pruning?: {
|
||||
isEnabled: boolean;
|
||||
maxAge: number;
|
||||
|
|
|
@ -7,3 +7,6 @@ export type * from './user';
|
|||
|
||||
export type { Collaborator } from './push/collaboration';
|
||||
export type { SendWorkerStatusMessage } from './push/worker';
|
||||
|
||||
export type { BannerName } from './schemas/bannerName.schema';
|
||||
export { passwordSchema } from './schemas/password.schema';
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
import { nodeVersionSchema } from '../nodeVersion.schema';
|
||||
|
||||
describe('nodeVersionSchema', () => {
|
||||
describe('valid versions', () => {
|
||||
test.each([
|
||||
[1, 'single digit'],
|
||||
[2, 'single digit'],
|
||||
[1.0, 'major.minor with zero minor'],
|
||||
[1.2, 'major.minor'],
|
||||
[10.5, 'major.minor with double digits'],
|
||||
])('should accept %s as a valid version (%s)', (version) => {
|
||||
const validated = nodeVersionSchema.parse(version);
|
||||
expect(validated).toBe(version);
|
||||
});
|
||||
});
|
||||
|
||||
describe('invalid versions', () => {
|
||||
test.each([
|
||||
['not-a-number', 'non-number input'],
|
||||
['1.2.3', 'more than two parts'],
|
||||
['1.a', 'non-numeric characters'],
|
||||
['1.2.3', 'more than two parts as string'],
|
||||
])('should reject %s as an invalid version (%s)', (version) => {
|
||||
const check = () => nodeVersionSchema.parse(version);
|
||||
expect(check).toThrowError();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,54 @@
|
|||
import { passwordSchema } from '../password.schema';
|
||||
|
||||
describe('passwordSchema', () => {
|
||||
test('should throw on empty password', () => {
|
||||
const check = () => passwordSchema.parse('');
|
||||
|
||||
expect(check).toThrowError('Password must be 8 to 64 characters long');
|
||||
});
|
||||
|
||||
test('should return same password if valid', () => {
|
||||
const validPassword = 'abcd1234X';
|
||||
|
||||
const validated = passwordSchema.parse(validPassword);
|
||||
|
||||
expect(validated).toBe(validPassword);
|
||||
});
|
||||
|
||||
test('should require at least one uppercase letter', () => {
|
||||
const invalidPassword = 'abcd1234';
|
||||
|
||||
const failingCheck = () => passwordSchema.parse(invalidPassword);
|
||||
|
||||
expect(failingCheck).toThrowError('Password must contain at least 1 uppercase letter.');
|
||||
});
|
||||
|
||||
test('should require at least one number', () => {
|
||||
const validPassword = 'abcd1234X';
|
||||
const invalidPassword = 'abcdEFGH';
|
||||
|
||||
const validated = passwordSchema.parse(validPassword);
|
||||
|
||||
expect(validated).toBe(validPassword);
|
||||
|
||||
const check = () => passwordSchema.parse(invalidPassword);
|
||||
|
||||
expect(check).toThrowError('Password must contain at least 1 number.');
|
||||
});
|
||||
|
||||
test('should require a minimum length of 8 characters', () => {
|
||||
const invalidPassword = 'a'.repeat(7);
|
||||
|
||||
const check = () => passwordSchema.parse(invalidPassword);
|
||||
|
||||
expect(check).toThrowError('Password must be 8 to 64 characters long.');
|
||||
});
|
||||
|
||||
test('should require a maximum length of 64 characters', () => {
|
||||
const invalidPassword = 'a'.repeat(65);
|
||||
|
||||
const check = () => passwordSchema.parse(invalidPassword);
|
||||
|
||||
expect(check).toThrowError('Password must be 8 to 64 characters long.');
|
||||
});
|
||||
});
|
11
packages/@n8n/api-types/src/schemas/bannerName.schema.ts
Normal file
11
packages/@n8n/api-types/src/schemas/bannerName.schema.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export const bannerNameSchema = z.enum([
|
||||
'V1',
|
||||
'TRIAL_OVER',
|
||||
'TRIAL',
|
||||
'NON_PRODUCTION_LICENSE',
|
||||
'EMAIL_CONFIRMATION',
|
||||
]);
|
||||
|
||||
export type BannerName = z.infer<typeof bannerNameSchema>;
|
3
packages/@n8n/api-types/src/schemas/booleanFromString.ts
Normal file
3
packages/@n8n/api-types/src/schemas/booleanFromString.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export const booleanFromString = z.enum(['true', 'false']).transform((value) => value === 'true');
|
17
packages/@n8n/api-types/src/schemas/nodeVersion.schema.ts
Normal file
17
packages/@n8n/api-types/src/schemas/nodeVersion.schema.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export const nodeVersionSchema = z
|
||||
.number()
|
||||
.min(1)
|
||||
.refine(
|
||||
(val) => {
|
||||
const parts = String(val).split('.');
|
||||
return (
|
||||
(parts.length === 1 && !isNaN(Number(parts[0]))) ||
|
||||
(parts.length === 2 && !isNaN(Number(parts[0])) && !isNaN(Number(parts[1])))
|
||||
);
|
||||
},
|
||||
{
|
||||
message: 'Invalid node version. Must be in format: major.minor',
|
||||
},
|
||||
);
|
16
packages/@n8n/api-types/src/schemas/password.schema.ts
Normal file
16
packages/@n8n/api-types/src/schemas/password.schema.ts
Normal file
|
@ -0,0 +1,16 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
// TODO: Delete these from `cli` after all password-validation code starts using this schema
|
||||
const minLength = 8;
|
||||
const maxLength = 64;
|
||||
|
||||
export const passwordSchema = z
|
||||
.string()
|
||||
.min(minLength, `Password must be ${minLength} to ${maxLength} characters long.`)
|
||||
.max(maxLength, `Password must be ${minLength} to ${maxLength} characters long.`)
|
||||
.refine((password) => /\d/.test(password), {
|
||||
message: 'Password must contain at least 1 number.',
|
||||
})
|
||||
.refine((password) => /[A-Z]/.test(password), {
|
||||
message: 'Password must contain at least 1 uppercase letter.',
|
||||
});
|
|
@ -0,0 +1,3 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export const passwordResetTokenSchema = z.string().min(10, 'Token too short');
|
|
@ -21,7 +21,7 @@
|
|||
"dist/**/*"
|
||||
],
|
||||
"dependencies": {
|
||||
"reflect-metadata": "0.2.2",
|
||||
"typedi": "catalog:"
|
||||
"@n8n/di": "workspace:*",
|
||||
"reflect-metadata": "catalog:"
|
||||
}
|
||||
}
|
||||
|
|
8
packages/@n8n/config/src/configs/aiAssistant.config.ts
Normal file
8
packages/@n8n/config/src/configs/aiAssistant.config.ts
Normal file
|
@ -0,0 +1,8 @@
|
|||
import { Config, Env } from '../decorators';
|
||||
|
||||
@Config
|
||||
export class AiAssistantConfig {
|
||||
/** Base URL of the AI assistant service */
|
||||
@Env('N8N_AI_ASSISTANT_BASE_URL')
|
||||
baseUrl: string = '';
|
||||
}
|
|
@ -9,6 +9,7 @@ export const LOG_SCOPES = [
|
|||
'multi-main-setup',
|
||||
'pruning',
|
||||
'pubsub',
|
||||
'push',
|
||||
'redis',
|
||||
'scaling',
|
||||
'waiting-executions',
|
||||
|
@ -70,10 +71,13 @@ export class LoggingConfig {
|
|||
* - `external-secrets`
|
||||
* - `license`
|
||||
* - `multi-main-setup`
|
||||
* - `pruning`
|
||||
* - `pubsub`
|
||||
* - `push`
|
||||
* - `redis`
|
||||
* - `scaling`
|
||||
* - `waiting-executions`
|
||||
* - `task-runner`
|
||||
*
|
||||
* @example
|
||||
* `N8N_LOG_SCOPES=license`
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import 'reflect-metadata';
|
||||
import { Container, Service } from '@n8n/di';
|
||||
import { readFileSync } from 'fs';
|
||||
import { Container, Service } from 'typedi';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
type Class = Function;
|
||||
|
@ -35,7 +35,7 @@ export const Config: ClassDecorator = (ConfigClass: Class) => {
|
|||
|
||||
for (const [key, { type, envName }] of classMetadata) {
|
||||
if (typeof type === 'function' && globalMetadata.has(type)) {
|
||||
config[key] = Container.get(type);
|
||||
config[key] = Container.get(type as Constructable);
|
||||
} else if (envName) {
|
||||
const value = readEnv(envName);
|
||||
if (value === undefined) continue;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { AiAssistantConfig } from './configs/aiAssistant.config';
|
||||
import { CacheConfig } from './configs/cache.config';
|
||||
import { CredentialsConfig } from './configs/credentials.config';
|
||||
import { DatabaseConfig } from './configs/database.config';
|
||||
|
@ -121,4 +122,7 @@ export class GlobalConfig {
|
|||
|
||||
@Nested
|
||||
diagnostics: DiagnosticsConfig;
|
||||
|
||||
@Nested
|
||||
aiAssistant: AiAssistantConfig;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import fs from 'fs';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { Container } from 'typedi';
|
||||
|
||||
import { GlobalConfig } from '../src/index';
|
||||
|
||||
|
@ -289,6 +289,9 @@ describe('GlobalConfig', () => {
|
|||
apiHost: 'https://ph.n8n.io',
|
||||
},
|
||||
},
|
||||
aiAssistant: {
|
||||
baseUrl: '',
|
||||
},
|
||||
};
|
||||
|
||||
it('should use all default values when no env variables are defined', () => {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { Container } from 'typedi';
|
||||
import { Container } from '@n8n/di';
|
||||
|
||||
import { Config, Env } from '../src/decorators';
|
||||
|
||||
|
|
|
@ -9,5 +9,6 @@
|
|||
"baseUrl": "src",
|
||||
"tsBuildInfoFile": "dist/typecheck.tsbuildinfo"
|
||||
},
|
||||
"include": ["src/**/*.ts", "test/**/*.ts"]
|
||||
"include": ["src/**/*.ts", "test/**/*.ts"],
|
||||
"references": [{ "path": "../di/tsconfig.build.json" }]
|
||||
}
|
||||
|
|
7
packages/@n8n/di/.eslintrc.js
Normal file
7
packages/@n8n/di/.eslintrc.js
Normal file
|
@ -0,0 +1,7 @@
|
|||
const sharedOptions = require('@n8n_io/eslint-config/shared');
|
||||
|
||||
/** @type {import('@types/eslint').ESLint.ConfigData} */
|
||||
module.exports = {
|
||||
extends: ['@n8n_io/eslint-config/base'],
|
||||
...sharedOptions(__dirname),
|
||||
};
|
52
packages/@n8n/di/README.md
Normal file
52
packages/@n8n/di/README.md
Normal file
|
@ -0,0 +1,52 @@
|
|||
## @n8n/di
|
||||
|
||||
`@n8n/di` is a dependency injection (DI) container library, based on [`typedi`](https://github.com/typestack/typedi).
|
||||
|
||||
n8n no longer uses `typedi` because:
|
||||
|
||||
- `typedi` is no longer officially maintained
|
||||
- Need for future-proofing, e.g. stage-3 decorators
|
||||
- Small enough that it is worth the maintenance burden
|
||||
- Easier to customize, e.g. to simplify unit tests
|
||||
|
||||
### Usage
|
||||
|
||||
```typescript
|
||||
// from https://github.com/typestack/typedi/blob/develop/README.md
|
||||
import { Container, Service } from 'typedi';
|
||||
|
||||
@Service()
|
||||
class ExampleInjectedService {
|
||||
printMessage() {
|
||||
console.log('I am alive!');
|
||||
}
|
||||
}
|
||||
|
||||
@Service()
|
||||
class ExampleService {
|
||||
constructor(
|
||||
// because we annotated ExampleInjectedService with the @Service()
|
||||
// decorator TypeDI will automatically inject an instance of
|
||||
// ExampleInjectedService here when the ExampleService class is requested
|
||||
// from TypeDI.
|
||||
public injectedService: ExampleInjectedService
|
||||
) {}
|
||||
}
|
||||
|
||||
const serviceInstance = Container.get(ExampleService);
|
||||
// we request an instance of ExampleService from TypeDI
|
||||
|
||||
serviceInstance.injectedService.printMessage();
|
||||
// logs "I am alive!" to the console
|
||||
```
|
||||
|
||||
Requires enabling these flags in `tsconfig.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true
|
||||
}
|
||||
}
|
||||
```
|
2
packages/@n8n/di/jest.config.js
Normal file
2
packages/@n8n/di/jest.config.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
/** @type {import('jest').Config} */
|
||||
module.exports = require('../../../jest.config');
|
26
packages/@n8n/di/package.json
Normal file
26
packages/@n8n/di/package.json
Normal file
|
@ -0,0 +1,26 @@
|
|||
{
|
||||
"name": "@n8n/di",
|
||||
"version": "0.1.0",
|
||||
"scripts": {
|
||||
"clean": "rimraf dist .turbo",
|
||||
"dev": "pnpm watch",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"build": "tsc -p tsconfig.build.json",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome ci .",
|
||||
"lint": "eslint .",
|
||||
"lintfix": "eslint . --fix",
|
||||
"watch": "tsc -p tsconfig.build.json --watch",
|
||||
"test": "jest",
|
||||
"test:dev": "jest --watch"
|
||||
},
|
||||
"main": "dist/di.js",
|
||||
"module": "src/di.ts",
|
||||
"types": "dist/di.d.ts",
|
||||
"files": [
|
||||
"dist/**/*"
|
||||
],
|
||||
"dependencies": {
|
||||
"reflect-metadata": "catalog:"
|
||||
}
|
||||
}
|
287
packages/@n8n/di/src/__tests__/di.test.ts
Normal file
287
packages/@n8n/di/src/__tests__/di.test.ts
Normal file
|
@ -0,0 +1,287 @@
|
|||
import { Container, Service } from '../di';
|
||||
|
||||
@Service()
|
||||
class SimpleService {
|
||||
getValue() {
|
||||
return 'simple';
|
||||
}
|
||||
}
|
||||
|
||||
@Service()
|
||||
class DependentService {
|
||||
constructor(readonly simple: SimpleService) {}
|
||||
|
||||
getValue() {
|
||||
return this.simple.getValue() + '-dependent';
|
||||
}
|
||||
}
|
||||
|
||||
class CustomFactory {
|
||||
getValue() {
|
||||
return 'factory-made';
|
||||
}
|
||||
}
|
||||
|
||||
@Service({ factory: () => new CustomFactory() })
|
||||
class FactoryService {
|
||||
getValue() {
|
||||
return 'should-not-be-called';
|
||||
}
|
||||
}
|
||||
|
||||
abstract class AbstractService {
|
||||
abstract getValue(): string;
|
||||
}
|
||||
|
||||
@Service()
|
||||
class ConcreteService extends AbstractService {
|
||||
getValue(): string {
|
||||
return 'concrete';
|
||||
}
|
||||
}
|
||||
|
||||
describe('DI Container', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
Container.reset();
|
||||
});
|
||||
|
||||
describe('basic functionality', () => {
|
||||
it('should create a simple instance', () => {
|
||||
const instance = Container.get(SimpleService);
|
||||
expect(instance).toBeInstanceOf(SimpleService);
|
||||
expect(instance.getValue()).toBe('simple');
|
||||
});
|
||||
|
||||
it('should return same instance on multiple gets', () => {
|
||||
const instance1 = Container.get(SimpleService);
|
||||
const instance2 = Container.get(SimpleService);
|
||||
expect(instance1).toBe(instance2);
|
||||
});
|
||||
|
||||
it('should handle classes with no dependencies (empty constructor)', () => {
|
||||
@Service()
|
||||
class EmptyConstructorService {}
|
||||
|
||||
const instance = Container.get(EmptyConstructorService);
|
||||
expect(instance).toBeInstanceOf(EmptyConstructorService);
|
||||
});
|
||||
|
||||
it('should throw when trying to resolve an undecorated class', () => {
|
||||
class UnDecoratedService {}
|
||||
|
||||
expect(() => Container.get(UnDecoratedService)).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('dependency injection', () => {
|
||||
it('should inject dependencies correctly', () => {
|
||||
const dependent = Container.get(DependentService);
|
||||
expect(dependent).toBeInstanceOf(DependentService);
|
||||
expect(dependent.getValue()).toBe('simple-dependent');
|
||||
expect(dependent.simple).toBeInstanceOf(SimpleService);
|
||||
});
|
||||
|
||||
it('should handle deep dependency chains', () => {
|
||||
@Service()
|
||||
class ServiceC {
|
||||
getValue() {
|
||||
return 'C';
|
||||
}
|
||||
}
|
||||
|
||||
@Service()
|
||||
class ServiceB {
|
||||
constructor(private c: ServiceC) {}
|
||||
|
||||
getValue() {
|
||||
return this.c.getValue() + 'B';
|
||||
}
|
||||
}
|
||||
|
||||
@Service()
|
||||
class ServiceA {
|
||||
constructor(private b: ServiceB) {}
|
||||
|
||||
getValue() {
|
||||
return this.b.getValue() + 'A';
|
||||
}
|
||||
}
|
||||
|
||||
const instance = Container.get(ServiceA);
|
||||
expect(instance.getValue()).toBe('CBA');
|
||||
});
|
||||
|
||||
it('should return undefined for non-decorated dependencies in resolution chain', () => {
|
||||
class NonDecoratedDep {}
|
||||
|
||||
@Service()
|
||||
class ServiceWithNonDecoratedDep {
|
||||
constructor(readonly dep: NonDecoratedDep) {}
|
||||
}
|
||||
|
||||
const instance = Container.get(ServiceWithNonDecoratedDep);
|
||||
expect(instance).toBeInstanceOf(ServiceWithNonDecoratedDep);
|
||||
expect(instance.dep).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('factory handling', () => {
|
||||
it('should use factory when provided', () => {
|
||||
const instance = Container.get(FactoryService);
|
||||
expect(instance).toBeInstanceOf(CustomFactory);
|
||||
expect(instance.getValue()).toBe('factory-made');
|
||||
});
|
||||
|
||||
it('should preserve factory metadata when setting instance', () => {
|
||||
const customInstance = new CustomFactory();
|
||||
Container.set(FactoryService, customInstance);
|
||||
const instance = Container.get(FactoryService);
|
||||
expect(instance).toBe(customInstance);
|
||||
});
|
||||
|
||||
it('should preserve factory when resetting container', () => {
|
||||
const factoryInstance1 = Container.get(FactoryService);
|
||||
Container.reset();
|
||||
const factoryInstance2 = Container.get(FactoryService);
|
||||
|
||||
expect(factoryInstance1).not.toBe(factoryInstance2);
|
||||
expect(factoryInstance2.getValue()).toBe('factory-made');
|
||||
});
|
||||
|
||||
it('should throw error when factory throws', () => {
|
||||
@Service({
|
||||
factory: () => {
|
||||
throw new Error('Factory error');
|
||||
},
|
||||
})
|
||||
class ErrorFactoryService {}
|
||||
|
||||
expect(() => Container.get(ErrorFactoryService)).toThrow('Factory error');
|
||||
});
|
||||
});
|
||||
|
||||
describe('instance management', () => {
|
||||
it('should allow manual instance setting', () => {
|
||||
const customInstance = new SimpleService();
|
||||
Container.set(SimpleService, customInstance);
|
||||
const instance = Container.get(SimpleService);
|
||||
expect(instance).toBe(customInstance);
|
||||
});
|
||||
});
|
||||
|
||||
describe('abstract classes', () => {
|
||||
it('should throw when trying to instantiate an abstract class directly', () => {
|
||||
@Service()
|
||||
abstract class TestAbstractClass {
|
||||
abstract doSomething(): void;
|
||||
|
||||
// Add a concrete method to make the class truly abstract at runtime
|
||||
constructor() {
|
||||
if (this.constructor === TestAbstractClass) {
|
||||
throw new TypeError('Abstract class "TestAbstractClass" cannot be instantiated');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(() => Container.get(TestAbstractClass)).toThrow(
|
||||
'[DI] TestAbstractClass is an abstract class, and cannot be instantiated',
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow setting an implementation for an abstract class', () => {
|
||||
const concrete = new ConcreteService();
|
||||
Container.set(AbstractService, concrete);
|
||||
|
||||
const instance = Container.get(AbstractService);
|
||||
expect(instance).toBe(concrete);
|
||||
expect(instance.getValue()).toBe('concrete');
|
||||
});
|
||||
|
||||
it('should allow factory for abstract class', () => {
|
||||
@Service({ factory: () => new ConcreteService() })
|
||||
abstract class FactoryAbstractService {
|
||||
abstract getValue(): string;
|
||||
}
|
||||
|
||||
const instance = Container.get(FactoryAbstractService);
|
||||
expect(instance).toBeInstanceOf(ConcreteService);
|
||||
expect(instance.getValue()).toBe('concrete');
|
||||
});
|
||||
});
|
||||
|
||||
describe('inheritance', () => {
|
||||
it('should handle inheritance in injectable classes', () => {
|
||||
@Service()
|
||||
class BaseService {
|
||||
getValue() {
|
||||
return 'base';
|
||||
}
|
||||
}
|
||||
|
||||
@Service()
|
||||
class DerivedService extends BaseService {
|
||||
getValue() {
|
||||
return 'derived-' + super.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
const instance = Container.get(DerivedService);
|
||||
expect(instance.getValue()).toBe('derived-base');
|
||||
});
|
||||
|
||||
it('should maintain separate instances for base and derived classes', () => {
|
||||
@Service()
|
||||
class BaseService {
|
||||
getValue() {
|
||||
return 'base';
|
||||
}
|
||||
}
|
||||
|
||||
@Service()
|
||||
class DerivedService extends BaseService {}
|
||||
|
||||
const baseInstance = Container.get(BaseService);
|
||||
const derivedInstance = Container.get(DerivedService);
|
||||
|
||||
expect(baseInstance).not.toBe(derivedInstance);
|
||||
expect(baseInstance).toBeInstanceOf(BaseService);
|
||||
expect(derivedInstance).toBeInstanceOf(DerivedService);
|
||||
});
|
||||
});
|
||||
|
||||
describe('type registration checking', () => {
|
||||
it('should return true for registered classes', () => {
|
||||
expect(Container.has(SimpleService)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for unregistered classes', () => {
|
||||
class UnregisteredService {}
|
||||
expect(Container.has(UnregisteredService)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for abstract classes with implementations', () => {
|
||||
const concrete = new ConcreteService();
|
||||
Container.set(AbstractService, concrete);
|
||||
expect(Container.has(AbstractService)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for factory-provided services before instantiation', () => {
|
||||
expect(Container.has(FactoryService)).toBe(true);
|
||||
});
|
||||
|
||||
it('should maintain registration after reset', () => {
|
||||
expect(Container.has(SimpleService)).toBe(true);
|
||||
Container.reset();
|
||||
expect(Container.has(SimpleService)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true after manual instance setting', () => {
|
||||
class ManualService {}
|
||||
expect(Container.has(ManualService)).toBe(false);
|
||||
|
||||
Container.set(ManualService, new ManualService());
|
||||
expect(Container.has(ManualService)).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
142
packages/@n8n/di/src/di.ts
Normal file
142
packages/@n8n/di/src/di.ts
Normal file
|
@ -0,0 +1,142 @@
|
|||
import 'reflect-metadata';
|
||||
|
||||
/**
|
||||
* Represents a class constructor type that can be instantiated with 'new'
|
||||
* @template T The type of instance the constructor creates
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export type Constructable<T = unknown> = new (...args: any[]) => T;
|
||||
|
||||
type AbstractConstructable<T = unknown> = abstract new (...args: unknown[]) => T;
|
||||
|
||||
type ServiceIdentifier<T = unknown> = Constructable<T> | AbstractConstructable<T>;
|
||||
|
||||
interface Metadata<T = unknown> {
|
||||
instance?: T;
|
||||
factory?: () => T;
|
||||
}
|
||||
|
||||
interface Options<T> {
|
||||
factory?: () => T;
|
||||
}
|
||||
|
||||
const instances = new Map<ServiceIdentifier, Metadata>();
|
||||
|
||||
/**
|
||||
* Decorator that marks a class as available for dependency injection.
|
||||
* @param options Configuration options for the injectable class
|
||||
* @param options.factory Optional factory function to create instances of this class
|
||||
* @returns A class decorator to be applied to the target class
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
export function Service<T = unknown>(): Function;
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
export function Service<T = unknown>(options: Options<T>): Function;
|
||||
export function Service<T>({ factory }: Options<T> = {}) {
|
||||
return function (target: Constructable<T>) {
|
||||
instances.set(target, { factory });
|
||||
return target;
|
||||
};
|
||||
}
|
||||
|
||||
class DIError extends Error {
|
||||
constructor(message: string) {
|
||||
super(`[DI] ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
class ContainerClass {
|
||||
/** Stack to track types being resolved to detect circular dependencies */
|
||||
private readonly resolutionStack: ServiceIdentifier[] = [];
|
||||
|
||||
/**
|
||||
* Checks if a type is registered in the container
|
||||
* @template T The type to check for
|
||||
* @param type The constructor of the type to check
|
||||
* @returns True if the type is registered (has metadata), false otherwise
|
||||
*/
|
||||
has<T>(type: ServiceIdentifier<T>): boolean {
|
||||
return instances.has(type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves or creates an instance of the specified type from the container
|
||||
* @template T The type of instance to retrieve
|
||||
* @param type The constructor of the type to retrieve
|
||||
* @returns An instance of the specified type with all dependencies injected
|
||||
* @throws {DIError} If circular dependencies are detected or if the type is not injectable
|
||||
*/
|
||||
get<T>(type: ServiceIdentifier<T>): T {
|
||||
const { resolutionStack } = this;
|
||||
const metadata = instances.get(type) as Metadata<T>;
|
||||
if (!metadata) {
|
||||
// Special case: Allow undefined returns for non-decorated constructor params
|
||||
// when resolving a dependency chain (i.e., resolutionStack not empty)
|
||||
if (resolutionStack.length) return undefined as T;
|
||||
throw new DIError(`${type.name} is not decorated with ${Service.name}`);
|
||||
}
|
||||
|
||||
if (metadata?.instance) return metadata.instance as T;
|
||||
|
||||
// Check for circular dependencies before proceeding with instantiation
|
||||
if (resolutionStack.includes(type)) {
|
||||
throw new DIError(
|
||||
`Circular dependency detected. ${resolutionStack.map((t) => t.name).join(' -> ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Add current type to resolution stack before resolving dependencies
|
||||
resolutionStack.push(type);
|
||||
|
||||
try {
|
||||
let instance: T;
|
||||
|
||||
if (metadata?.factory) {
|
||||
instance = metadata.factory();
|
||||
} else {
|
||||
const paramTypes = (Reflect.getMetadata('design:paramtypes', type) ??
|
||||
[]) as Constructable[];
|
||||
const dependencies = paramTypes.map(<P>(paramType: Constructable<P>) =>
|
||||
this.get(paramType),
|
||||
);
|
||||
// Create new instance with resolved dependencies
|
||||
instance = new (type as Constructable)(...dependencies) as T;
|
||||
}
|
||||
|
||||
instances.set(type, { ...metadata, instance });
|
||||
return instance;
|
||||
} catch (error) {
|
||||
if (error instanceof TypeError && error.message.toLowerCase().includes('abstract')) {
|
||||
throw new DIError(`${type.name} is an abstract class, and cannot be instantiated`);
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
resolutionStack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually sets an instance for a specific type in the container
|
||||
* @template T The type of instance being set
|
||||
* @param type The constructor of the type to set. This can also be an abstract class
|
||||
* @param instance The instance to store in the container
|
||||
*/
|
||||
set<T>(type: ServiceIdentifier<T>, instance: T): void {
|
||||
// Preserve any existing metadata (like factory) when setting new instance
|
||||
const metadata = instances.get(type) ?? {};
|
||||
instances.set(type, { ...metadata, instance });
|
||||
}
|
||||
|
||||
/** Clears all instantiated instances from the container while preserving type registrations */
|
||||
reset(): void {
|
||||
for (const metadata of instances.values()) {
|
||||
delete metadata.instance;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Global dependency injection container instance
|
||||
* Used to retrieve and manage class instances and their dependencies
|
||||
*/
|
||||
export const Container = new ContainerClass();
|
11
packages/@n8n/di/tsconfig.build.json
Normal file
11
packages/@n8n/di/tsconfig.build.json
Normal file
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"extends": ["./tsconfig.json", "../../../tsconfig.build.json"],
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"rootDir": "src",
|
||||
"outDir": "dist",
|
||||
"tsBuildInfoFile": "dist/build.tsbuildinfo"
|
||||
},
|
||||
"include": ["src/**/*.ts"],
|
||||
"exclude": ["src/**/__tests__/**"]
|
||||
}
|
12
packages/@n8n/di/tsconfig.json
Normal file
12
packages/@n8n/di/tsconfig.json
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"extends": "../../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"rootDir": ".",
|
||||
"types": ["node", "jest"],
|
||||
"baseUrl": "src",
|
||||
"tsBuildInfoFile": "dist/typecheck.tsbuildinfo",
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true
|
||||
},
|
||||
"include": ["src/**/*.ts"]
|
||||
}
|
|
@ -15,15 +15,15 @@ import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
|||
|
||||
export class ToolVectorStore implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Vector Store Tool',
|
||||
displayName: 'Vector Store Question Answer Tool',
|
||||
name: 'toolVectorStore',
|
||||
icon: 'fa:database',
|
||||
iconColor: 'black',
|
||||
group: ['transform'],
|
||||
version: [1],
|
||||
description: 'Retrieve context from vector store',
|
||||
description: 'Answer questions with a vector store',
|
||||
defaults: {
|
||||
name: 'Vector Store Tool',
|
||||
name: 'Answer questions with a vector store',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
|
@ -60,20 +60,23 @@ export class ToolVectorStore implements INodeType {
|
|||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName: 'Name',
|
||||
displayName: 'Data Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. company_knowledge_base',
|
||||
placeholder: 'e.g. users_info',
|
||||
validateType: 'string-alphanumeric',
|
||||
description: 'Name of the vector store',
|
||||
description:
|
||||
'Name of the data in vector store. This will be used to fill this tool description: Useful for when you need to answer questions about [name]. Whenever you need information about [data description], you should ALWAYS use this. Input should be a fully formed question.',
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
displayName: 'Description of Data',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'Retrieves data about [insert information about your data here]...',
|
||||
placeholder: "[Describe your data here, e.g. a user's name, email, etc.]",
|
||||
description:
|
||||
'Describe the data in vector store. This will be used to fill this tool description: Useful for when you need to answer questions about [name]. Whenever you need information about [data description], you should ALWAYS use this. Input should be a fully formed question.',
|
||||
typeOptions: {
|
||||
rows: 3,
|
||||
},
|
||||
|
|
|
@ -1,567 +1,42 @@
|
|||
import type { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager';
|
||||
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
|
||||
import type { JSONSchema7 } from 'json-schema';
|
||||
import get from 'lodash/get';
|
||||
import isObject from 'lodash/isObject';
|
||||
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
|
||||
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
|
||||
import type {
|
||||
IExecuteWorkflowInfo,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
IWorkflowBase,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ExecutionError,
|
||||
ExecuteWorkflowData,
|
||||
IDataObject,
|
||||
INodeParameterResourceLocator,
|
||||
ITaskMetadata,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
|
||||
import type { IVersionedNodeType, INodeTypeBaseDescription } from 'n8n-workflow';
|
||||
import { VersionedNodeType } from 'n8n-workflow';
|
||||
|
||||
import { jsonSchemaExampleField, schemaTypeField, inputSchemaField } from '@utils/descriptions';
|
||||
import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing';
|
||||
import { getConnectionHintNoticeField } from '@utils/sharedFields';
|
||||
import { ToolWorkflowV1 } from './v1/ToolWorkflowV1.node';
|
||||
import { ToolWorkflowV2 } from './v2/ToolWorkflowV2.node';
|
||||
|
||||
import type { DynamicZodObject } from '../../../types/zod.types';
|
||||
|
||||
export class ToolWorkflow implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Call n8n Workflow Tool',
|
||||
name: 'toolWorkflow',
|
||||
icon: 'fa:network-wired',
|
||||
iconColor: 'black',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1, 1.2, 1.3],
|
||||
description: 'Uses another n8n workflow as a tool. Allows packaging any n8n node(s) as a tool.',
|
||||
defaults: {
|
||||
name: 'Call n8n Workflow Tool',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Tools'],
|
||||
Tools: ['Recommended Tools'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolworkflow/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiTool],
|
||||
outputNames: ['Tool'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName:
|
||||
'See an example of a workflow to suggest meeting slots using AI <a href="/templates/1953" target="_blank">here</a>.',
|
||||
name: 'noticeTemplateExample',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'My_Color_Tool',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1],
|
||||
},
|
||||
export class ToolWorkflow extends VersionedNodeType {
|
||||
constructor() {
|
||||
const baseDescription: INodeTypeBaseDescription = {
|
||||
displayName: 'Call n8n Sub-Workflow Tool',
|
||||
name: 'toolWorkflow',
|
||||
icon: 'fa:network-wired',
|
||||
group: ['transform'],
|
||||
description:
|
||||
'Uses another n8n workflow as a tool. Allows packaging any n8n node(s) as a tool.',
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Tools'],
|
||||
Tools: ['Recommended Tools'],
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. My_Color_Tool',
|
||||
validateType: 'string-alphanumeric',
|
||||
description:
|
||||
'The name of the function to be called, could contain letters, numbers, and underscores only',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [{ _cnd: { gte: 1.1 } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder:
|
||||
'Call this tool to get a random color. The input should be a string with comma separted names of colors to exclude.',
|
||||
typeOptions: {
|
||||
rows: 3,
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
displayName:
|
||||
'This tool will call the workflow you define below, and look in the last node for the response. The workflow needs to start with an Execute Workflow trigger',
|
||||
name: 'executeNotice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Source',
|
||||
name: 'source',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Database',
|
||||
value: 'database',
|
||||
description: 'Load the workflow from the database by ID',
|
||||
},
|
||||
{
|
||||
name: 'Define Below',
|
||||
value: 'parameter',
|
||||
description: 'Pass the JSON code of a workflow',
|
||||
},
|
||||
],
|
||||
default: 'database',
|
||||
description: 'Where to get the workflow to execute from',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// source:database
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Workflow ID',
|
||||
name: 'workflowId',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['database'],
|
||||
'@version': [{ _cnd: { lte: 1.1 } }],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'The workflow to execute',
|
||||
hint: 'Can be found in the URL of the workflow',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Workflow',
|
||||
name: 'workflowId',
|
||||
type: 'workflowSelector',
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['database'],
|
||||
'@version': [{ _cnd: { gte: 1.2 } }],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
required: true,
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// source:parameter
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Workflow JSON',
|
||||
name: 'workflowJson',
|
||||
type: 'json',
|
||||
typeOptions: {
|
||||
rows: 10,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['parameter'],
|
||||
},
|
||||
},
|
||||
default: '\n\n\n\n\n\n\n\n\n',
|
||||
required: true,
|
||||
description: 'The workflow JSON code to execute',
|
||||
},
|
||||
// ----------------------------------
|
||||
// For all
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Field to Return',
|
||||
name: 'responsePropertyName',
|
||||
type: 'string',
|
||||
default: 'response',
|
||||
required: true,
|
||||
hint: 'The field in the last-executed node of the workflow that contains the response',
|
||||
description:
|
||||
'Where to find the data that this tool should return. n8n will look in the output of the last-executed node of the workflow for a field with this name, and return its value.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [{ _cnd: { lt: 1.3 } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Extra Workflow Inputs',
|
||||
name: 'fields',
|
||||
placeholder: 'Add Value',
|
||||
type: 'fixedCollection',
|
||||
description:
|
||||
"These will be output by the 'execute workflow' trigger of the workflow being called",
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
sortable: true,
|
||||
},
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
name: 'values',
|
||||
displayName: 'Values',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. fieldName',
|
||||
description:
|
||||
'Name of the field to set the value of. Supports dot-notation. Example: data.person[0].name.',
|
||||
requiresDataPath: 'single',
|
||||
},
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The field value type',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
|
||||
options: [
|
||||
{
|
||||
name: 'String',
|
||||
value: 'stringValue',
|
||||
},
|
||||
{
|
||||
name: 'Number',
|
||||
value: 'numberValue',
|
||||
},
|
||||
{
|
||||
name: 'Boolean',
|
||||
value: 'booleanValue',
|
||||
},
|
||||
{
|
||||
name: 'Array',
|
||||
value: 'arrayValue',
|
||||
},
|
||||
{
|
||||
name: 'Object',
|
||||
value: 'objectValue',
|
||||
},
|
||||
],
|
||||
default: 'stringValue',
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'stringValue',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['stringValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'string',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'numberValue',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['numberValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'number',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'booleanValue',
|
||||
type: 'options',
|
||||
default: 'true',
|
||||
options: [
|
||||
{
|
||||
name: 'True',
|
||||
value: 'true',
|
||||
},
|
||||
{
|
||||
name: 'False',
|
||||
value: 'false',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['booleanValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'boolean',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'arrayValue',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. [ arrayItem1, arrayItem2, arrayItem3 ]',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['arrayValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'array',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'objectValue',
|
||||
type: 'json',
|
||||
default: '={}',
|
||||
typeOptions: {
|
||||
rows: 2,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['objectValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'object',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
// ----------------------------------
|
||||
// Output Parsing
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Specify Input Schema',
|
||||
name: 'specifyInputSchema',
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether to specify the schema for the function. This would require the LLM to provide the input in the correct format and would validate it against the schema.',
|
||||
noDataExpression: true,
|
||||
default: false,
|
||||
},
|
||||
{ ...schemaTypeField, displayOptions: { show: { specifyInputSchema: [true] } } },
|
||||
jsonSchemaExampleField,
|
||||
inputSchemaField,
|
||||
],
|
||||
};
|
||||
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const workflowProxy = this.getWorkflowDataProxy(0);
|
||||
|
||||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const description = this.getNodeParameter('description', itemIndex) as string;
|
||||
|
||||
let subExecutionId: string | undefined;
|
||||
let subWorkflowId: string | undefined;
|
||||
|
||||
const useSchema = this.getNodeParameter('specifyInputSchema', itemIndex) as boolean;
|
||||
let tool: DynamicTool | DynamicStructuredTool | undefined = undefined;
|
||||
|
||||
const runFunction = async (
|
||||
query: string | IDataObject,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> => {
|
||||
const source = this.getNodeParameter('source', itemIndex) as string;
|
||||
const workflowInfo: IExecuteWorkflowInfo = {};
|
||||
if (source === 'database') {
|
||||
// Read workflow from database
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
if (nodeVersion <= 1.1) {
|
||||
workflowInfo.id = this.getNodeParameter('workflowId', itemIndex) as string;
|
||||
} else {
|
||||
const { value } = this.getNodeParameter(
|
||||
'workflowId',
|
||||
itemIndex,
|
||||
{},
|
||||
) as INodeParameterResourceLocator;
|
||||
workflowInfo.id = value as string;
|
||||
}
|
||||
|
||||
subWorkflowId = workflowInfo.id;
|
||||
} else if (source === 'parameter') {
|
||||
// Read workflow from parameter
|
||||
const workflowJson = this.getNodeParameter('workflowJson', itemIndex) as string;
|
||||
try {
|
||||
workflowInfo.code = JSON.parse(workflowJson) as IWorkflowBase;
|
||||
|
||||
// subworkflow is same as parent workflow
|
||||
subWorkflowId = workflowProxy.$workflow.id;
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`The provided workflow is not valid JSON: "${(error as Error).message}"`,
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
itemIndex,
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolworkflow/',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const rawData: IDataObject = { query };
|
||||
|
||||
const workflowFieldsJson = this.getNodeParameter('fields.values', itemIndex, [], {
|
||||
rawExpressions: true,
|
||||
}) as SetField[];
|
||||
|
||||
// Copied from Set Node v2
|
||||
for (const entry of workflowFieldsJson) {
|
||||
if (entry.type === 'objectValue' && (entry.objectValue as string).startsWith('=')) {
|
||||
rawData[entry.name] = (entry.objectValue as string).replace(/^=+/, '');
|
||||
}
|
||||
}
|
||||
|
||||
const options: SetNodeOptions = {
|
||||
include: 'all',
|
||||
};
|
||||
|
||||
const newItem = await manual.execute.call(
|
||||
this,
|
||||
{ json: { query } },
|
||||
itemIndex,
|
||||
options,
|
||||
rawData,
|
||||
this.getNode(),
|
||||
);
|
||||
|
||||
const items = [newItem] as INodeExecutionData[];
|
||||
|
||||
let receivedData: ExecuteWorkflowData;
|
||||
try {
|
||||
receivedData = await this.executeWorkflow(workflowInfo, items, runManager?.getChild(), {
|
||||
parentExecution: {
|
||||
executionId: workflowProxy.$execution.id,
|
||||
workflowId: workflowProxy.$workflow.id,
|
||||
},
|
||||
});
|
||||
subExecutionId = receivedData.executionId;
|
||||
} catch (error) {
|
||||
// Make sure a valid error gets returned that can by json-serialized else it will
|
||||
// not show up in the frontend
|
||||
throw new NodeOperationError(this.getNode(), error as Error);
|
||||
}
|
||||
|
||||
const response: string | undefined = get(receivedData, 'data[0][0].json') as
|
||||
| string
|
||||
| undefined;
|
||||
if (response === undefined) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'There was an error: "The workflow did not return a response"',
|
||||
);
|
||||
}
|
||||
|
||||
return response;
|
||||
],
|
||||
},
|
||||
},
|
||||
defaultVersion: 2,
|
||||
};
|
||||
|
||||
const toolHandler = async (
|
||||
query: string | IDataObject,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> => {
|
||||
const { index } = this.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]);
|
||||
|
||||
let response: string = '';
|
||||
let executionError: ExecutionError | undefined;
|
||||
try {
|
||||
response = await runFunction(query, runManager);
|
||||
} catch (error) {
|
||||
// TODO: Do some more testing. Issues here should actually fail the workflow
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
executionError = error;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
response = `There was an error: "${error.message}"`;
|
||||
}
|
||||
|
||||
if (typeof response === 'number') {
|
||||
response = (response as number).toString();
|
||||
}
|
||||
|
||||
if (isObject(response)) {
|
||||
response = JSON.stringify(response, null, 2);
|
||||
}
|
||||
|
||||
if (typeof response !== 'string') {
|
||||
// TODO: Do some more testing. Issues here should actually fail the workflow
|
||||
executionError = new NodeOperationError(this.getNode(), 'Wrong output type returned', {
|
||||
description: `The response property should be a string, but it is an ${typeof response}`,
|
||||
});
|
||||
response = `There was an error: "${executionError.message}"`;
|
||||
}
|
||||
|
||||
let metadata: ITaskMetadata | undefined;
|
||||
if (subExecutionId && subWorkflowId) {
|
||||
metadata = {
|
||||
subExecution: {
|
||||
executionId: subExecutionId,
|
||||
workflowId: subWorkflowId,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (executionError) {
|
||||
void this.addOutputData(NodeConnectionType.AiTool, index, executionError, metadata);
|
||||
} else {
|
||||
// Output always needs to be an object
|
||||
// so we try to parse the response as JSON and if it fails we just return the string wrapped in an object
|
||||
const json = jsonParse<IDataObject>(response, { fallbackValue: { response } });
|
||||
void this.addOutputData(NodeConnectionType.AiTool, index, [[{ json }]], metadata);
|
||||
}
|
||||
return response;
|
||||
};
|
||||
|
||||
const functionBase = {
|
||||
name,
|
||||
description,
|
||||
func: toolHandler,
|
||||
};
|
||||
|
||||
if (useSchema) {
|
||||
try {
|
||||
// We initialize these even though one of them will always be empty
|
||||
// it makes it easier to navigate the ternary operator
|
||||
const jsonExample = this.getNodeParameter('jsonSchemaExample', itemIndex, '') as string;
|
||||
const inputSchema = this.getNodeParameter('inputSchema', itemIndex, '') as string;
|
||||
|
||||
const schemaType = this.getNodeParameter('schemaType', itemIndex) as 'fromJson' | 'manual';
|
||||
const jsonSchema =
|
||||
schemaType === 'fromJson'
|
||||
? generateSchema(jsonExample)
|
||||
: jsonParse<JSONSchema7>(inputSchema);
|
||||
|
||||
const zodSchema = convertJsonSchemaToZod<DynamicZodObject>(jsonSchema);
|
||||
|
||||
tool = new DynamicStructuredTool({
|
||||
schema: zodSchema,
|
||||
...functionBase,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'Error during parsing of JSON Schema. \n ' + error,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
tool = new DynamicTool(functionBase);
|
||||
}
|
||||
|
||||
return {
|
||||
response: tool,
|
||||
const nodeVersions: IVersionedNodeType['nodeVersions'] = {
|
||||
1: new ToolWorkflowV1(baseDescription),
|
||||
1.1: new ToolWorkflowV1(baseDescription),
|
||||
1.2: new ToolWorkflowV1(baseDescription),
|
||||
1.3: new ToolWorkflowV1(baseDescription),
|
||||
2: new ToolWorkflowV2(baseDescription),
|
||||
};
|
||||
super(nodeVersions, baseDescription);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,241 @@
|
|||
import type { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager';
|
||||
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
|
||||
import type { JSONSchema7 } from 'json-schema';
|
||||
import get from 'lodash/get';
|
||||
import isObject from 'lodash/isObject';
|
||||
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
|
||||
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
|
||||
import type {
|
||||
IExecuteWorkflowInfo,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
IWorkflowBase,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ExecutionError,
|
||||
ExecuteWorkflowData,
|
||||
IDataObject,
|
||||
INodeParameterResourceLocator,
|
||||
ITaskMetadata,
|
||||
INodeTypeBaseDescription,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
|
||||
|
||||
import { versionDescription } from './versionDescription';
|
||||
import type { DynamicZodObject } from '../../../../types/zod.types';
|
||||
import { convertJsonSchemaToZod, generateSchema } from '../../../../utils/schemaParsing';
|
||||
|
||||
export class ToolWorkflowV1 implements INodeType {
|
||||
description: INodeTypeDescription;
|
||||
|
||||
constructor(baseDescription: INodeTypeBaseDescription) {
|
||||
this.description = {
|
||||
...baseDescription,
|
||||
...versionDescription,
|
||||
};
|
||||
}
|
||||
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const workflowProxy = this.getWorkflowDataProxy(0);
|
||||
|
||||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const description = this.getNodeParameter('description', itemIndex) as string;
|
||||
|
||||
let subExecutionId: string | undefined;
|
||||
let subWorkflowId: string | undefined;
|
||||
|
||||
const useSchema = this.getNodeParameter('specifyInputSchema', itemIndex) as boolean;
|
||||
let tool: DynamicTool | DynamicStructuredTool | undefined = undefined;
|
||||
|
||||
const runFunction = async (
|
||||
query: string | IDataObject,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> => {
|
||||
const source = this.getNodeParameter('source', itemIndex) as string;
|
||||
const workflowInfo: IExecuteWorkflowInfo = {};
|
||||
if (source === 'database') {
|
||||
// Read workflow from database
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
if (nodeVersion <= 1.1) {
|
||||
workflowInfo.id = this.getNodeParameter('workflowId', itemIndex) as string;
|
||||
} else {
|
||||
const { value } = this.getNodeParameter(
|
||||
'workflowId',
|
||||
itemIndex,
|
||||
{},
|
||||
) as INodeParameterResourceLocator;
|
||||
workflowInfo.id = value as string;
|
||||
}
|
||||
|
||||
subWorkflowId = workflowInfo.id;
|
||||
} else if (source === 'parameter') {
|
||||
// Read workflow from parameter
|
||||
const workflowJson = this.getNodeParameter('workflowJson', itemIndex) as string;
|
||||
try {
|
||||
workflowInfo.code = JSON.parse(workflowJson) as IWorkflowBase;
|
||||
|
||||
// subworkflow is same as parent workflow
|
||||
subWorkflowId = workflowProxy.$workflow.id;
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
`The provided workflow is not valid JSON: "${(error as Error).message}"`,
|
||||
{
|
||||
itemIndex,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const rawData: IDataObject = { query };
|
||||
|
||||
const workflowFieldsJson = this.getNodeParameter('fields.values', itemIndex, [], {
|
||||
rawExpressions: true,
|
||||
}) as SetField[];
|
||||
|
||||
// Copied from Set Node v2
|
||||
for (const entry of workflowFieldsJson) {
|
||||
if (entry.type === 'objectValue' && (entry.objectValue as string).startsWith('=')) {
|
||||
rawData[entry.name] = (entry.objectValue as string).replace(/^=+/, '');
|
||||
}
|
||||
}
|
||||
|
||||
const options: SetNodeOptions = {
|
||||
include: 'all',
|
||||
};
|
||||
|
||||
const newItem = await manual.execute.call(
|
||||
this,
|
||||
{ json: { query } },
|
||||
itemIndex,
|
||||
options,
|
||||
rawData,
|
||||
this.getNode(),
|
||||
);
|
||||
|
||||
const items = [newItem] as INodeExecutionData[];
|
||||
|
||||
let receivedData: ExecuteWorkflowData;
|
||||
try {
|
||||
receivedData = await this.executeWorkflow(workflowInfo, items, runManager?.getChild(), {
|
||||
parentExecution: {
|
||||
executionId: workflowProxy.$execution.id,
|
||||
workflowId: workflowProxy.$workflow.id,
|
||||
},
|
||||
});
|
||||
subExecutionId = receivedData.executionId;
|
||||
} catch (error) {
|
||||
// Make sure a valid error gets returned that can by json-serialized else it will
|
||||
// not show up in the frontend
|
||||
throw new NodeOperationError(this.getNode(), error as Error);
|
||||
}
|
||||
|
||||
const response: string | undefined = get(receivedData, 'data[0][0].json') as
|
||||
| string
|
||||
| undefined;
|
||||
if (response === undefined) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'There was an error: "The workflow did not return a response"',
|
||||
);
|
||||
}
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
const toolHandler = async (
|
||||
query: string | IDataObject,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> => {
|
||||
const { index } = this.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]);
|
||||
|
||||
let response: string = '';
|
||||
let executionError: ExecutionError | undefined;
|
||||
try {
|
||||
response = await runFunction(query, runManager);
|
||||
} catch (error) {
|
||||
// TODO: Do some more testing. Issues here should actually fail the workflow
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
|
||||
executionError = error;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
response = `There was an error: "${error.message}"`;
|
||||
}
|
||||
|
||||
if (typeof response === 'number') {
|
||||
response = (response as number).toString();
|
||||
}
|
||||
|
||||
if (isObject(response)) {
|
||||
response = JSON.stringify(response, null, 2);
|
||||
}
|
||||
|
||||
if (typeof response !== 'string') {
|
||||
// TODO: Do some more testing. Issues here should actually fail the workflow
|
||||
executionError = new NodeOperationError(this.getNode(), 'Wrong output type returned', {
|
||||
description: `The response property should be a string, but it is an ${typeof response}`,
|
||||
});
|
||||
response = `There was an error: "${executionError.message}"`;
|
||||
}
|
||||
|
||||
let metadata: ITaskMetadata | undefined;
|
||||
if (subExecutionId && subWorkflowId) {
|
||||
metadata = {
|
||||
subExecution: {
|
||||
executionId: subExecutionId,
|
||||
workflowId: subWorkflowId,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (executionError) {
|
||||
void this.addOutputData(NodeConnectionType.AiTool, index, executionError, metadata);
|
||||
} else {
|
||||
// Output always needs to be an object
|
||||
// so we try to parse the response as JSON and if it fails we just return the string wrapped in an object
|
||||
const json = jsonParse<IDataObject>(response, { fallbackValue: { response } });
|
||||
void this.addOutputData(NodeConnectionType.AiTool, index, [[{ json }]], metadata);
|
||||
}
|
||||
return response;
|
||||
};
|
||||
|
||||
const functionBase = {
|
||||
name,
|
||||
description,
|
||||
func: toolHandler,
|
||||
};
|
||||
|
||||
if (useSchema) {
|
||||
try {
|
||||
// We initialize these even though one of them will always be empty
|
||||
// it makes it easier to navigate the ternary operator
|
||||
const jsonExample = this.getNodeParameter('jsonSchemaExample', itemIndex, '') as string;
|
||||
const inputSchema = this.getNodeParameter('inputSchema', itemIndex, '') as string;
|
||||
|
||||
const schemaType = this.getNodeParameter('schemaType', itemIndex) as 'fromJson' | 'manual';
|
||||
const jsonSchema =
|
||||
schemaType === 'fromJson'
|
||||
? generateSchema(jsonExample)
|
||||
: jsonParse<JSONSchema7>(inputSchema);
|
||||
|
||||
const zodSchema = convertJsonSchemaToZod<DynamicZodObject>(jsonSchema);
|
||||
|
||||
tool = new DynamicStructuredTool({
|
||||
schema: zodSchema,
|
||||
...functionBase,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'Error during parsing of JSON Schema. \n ' + error,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
tool = new DynamicTool(functionBase);
|
||||
}
|
||||
|
||||
return {
|
||||
response: tool,
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,345 @@
|
|||
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import type { INodeTypeDescription } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
inputSchemaField,
|
||||
jsonSchemaExampleField,
|
||||
schemaTypeField,
|
||||
} from '../../../../utils/descriptions';
|
||||
import { getConnectionHintNoticeField } from '../../../../utils/sharedFields';
|
||||
|
||||
export const versionDescription: INodeTypeDescription = {
|
||||
displayName: 'Call n8n Workflow Tool',
|
||||
name: 'toolWorkflow',
|
||||
icon: 'fa:network-wired',
|
||||
iconColor: 'black',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1, 1.2, 1.3],
|
||||
description: 'Uses another n8n workflow as a tool. Allows packaging any n8n node(s) as a tool.',
|
||||
defaults: {
|
||||
name: 'Call n8n Workflow Tool',
|
||||
},
|
||||
codex: {
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Tools'],
|
||||
Tools: ['Recommended Tools'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.toolworkflow/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [],
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
|
||||
outputs: [NodeConnectionType.AiTool],
|
||||
outputNames: ['Tool'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName:
|
||||
'See an example of a workflow to suggest meeting slots using AI <a href="/templates/1953" target="_blank">here</a>.',
|
||||
name: 'noticeTemplateExample',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'My_Color_Tool',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. My_Color_Tool',
|
||||
validateType: 'string-alphanumeric',
|
||||
description:
|
||||
'The name of the function to be called, could contain letters, numbers, and underscores only',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [{ _cnd: { gte: 1.1 } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder:
|
||||
'Call this tool to get a random color. The input should be a string with comma separted names of colors to exclude.',
|
||||
typeOptions: {
|
||||
rows: 3,
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
displayName:
|
||||
'This tool will call the workflow you define below, and look in the last node for the response. The workflow needs to start with an Execute Workflow trigger',
|
||||
name: 'executeNotice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Source',
|
||||
name: 'source',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Database',
|
||||
value: 'database',
|
||||
description: 'Load the workflow from the database by ID',
|
||||
},
|
||||
{
|
||||
name: 'Define Below',
|
||||
value: 'parameter',
|
||||
description: 'Pass the JSON code of a workflow',
|
||||
},
|
||||
],
|
||||
default: 'database',
|
||||
description: 'Where to get the workflow to execute from',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// source:database
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Workflow ID',
|
||||
name: 'workflowId',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['database'],
|
||||
'@version': [{ _cnd: { lte: 1.1 } }],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
required: true,
|
||||
description: 'The workflow to execute',
|
||||
hint: 'Can be found in the URL of the workflow',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Workflow',
|
||||
name: 'workflowId',
|
||||
type: 'workflowSelector',
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['database'],
|
||||
'@version': [{ _cnd: { gte: 1.2 } }],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
required: true,
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// source:parameter
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Workflow JSON',
|
||||
name: 'workflowJson',
|
||||
type: 'json',
|
||||
typeOptions: {
|
||||
rows: 10,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['parameter'],
|
||||
},
|
||||
},
|
||||
default: '\n\n\n\n\n\n\n\n\n',
|
||||
required: true,
|
||||
description: 'The workflow JSON code to execute',
|
||||
},
|
||||
// ----------------------------------
|
||||
// For all
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Field to Return',
|
||||
name: 'responsePropertyName',
|
||||
type: 'string',
|
||||
default: 'response',
|
||||
required: true,
|
||||
hint: 'The field in the last-executed node of the workflow that contains the response',
|
||||
description:
|
||||
'Where to find the data that this tool should return. n8n will look in the output of the last-executed node of the workflow for a field with this name, and return its value.',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [{ _cnd: { lt: 1.3 } }],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Extra Workflow Inputs',
|
||||
name: 'fields',
|
||||
placeholder: 'Add Value',
|
||||
type: 'fixedCollection',
|
||||
description:
|
||||
"These will be output by the 'execute workflow' trigger of the workflow being called",
|
||||
typeOptions: {
|
||||
multipleValues: true,
|
||||
sortable: true,
|
||||
},
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
name: 'values',
|
||||
displayName: 'Values',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. fieldName',
|
||||
description:
|
||||
'Name of the field to set the value of. Supports dot-notation. Example: data.person[0].name.',
|
||||
requiresDataPath: 'single',
|
||||
},
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The field value type',
|
||||
// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
|
||||
options: [
|
||||
{
|
||||
name: 'String',
|
||||
value: 'stringValue',
|
||||
},
|
||||
{
|
||||
name: 'Number',
|
||||
value: 'numberValue',
|
||||
},
|
||||
{
|
||||
name: 'Boolean',
|
||||
value: 'booleanValue',
|
||||
},
|
||||
{
|
||||
name: 'Array',
|
||||
value: 'arrayValue',
|
||||
},
|
||||
{
|
||||
name: 'Object',
|
||||
value: 'objectValue',
|
||||
},
|
||||
],
|
||||
default: 'stringValue',
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'stringValue',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['stringValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'string',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'numberValue',
|
||||
type: 'string',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['numberValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'number',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'booleanValue',
|
||||
type: 'options',
|
||||
default: 'true',
|
||||
options: [
|
||||
{
|
||||
name: 'True',
|
||||
value: 'true',
|
||||
},
|
||||
{
|
||||
name: 'False',
|
||||
value: 'false',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['booleanValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'boolean',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'arrayValue',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. [ arrayItem1, arrayItem2, arrayItem3 ]',
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['arrayValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'array',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Value',
|
||||
name: 'objectValue',
|
||||
type: 'json',
|
||||
default: '={}',
|
||||
typeOptions: {
|
||||
rows: 2,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
type: ['objectValue'],
|
||||
},
|
||||
},
|
||||
validateType: 'object',
|
||||
ignoreValidationDuringExecution: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
// ----------------------------------
|
||||
// Output Parsing
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Specify Input Schema',
|
||||
name: 'specifyInputSchema',
|
||||
type: 'boolean',
|
||||
description:
|
||||
'Whether to specify the schema for the function. This would require the LLM to provide the input in the correct format and would validate it against the schema.',
|
||||
noDataExpression: true,
|
||||
default: false,
|
||||
},
|
||||
{ ...schemaTypeField, displayOptions: { show: { specifyInputSchema: [true] } } },
|
||||
jsonSchemaExampleField,
|
||||
inputSchemaField,
|
||||
],
|
||||
};
|
|
@ -0,0 +1,42 @@
|
|||
import { loadWorkflowInputMappings } from 'n8n-nodes-base/dist/utils/workflowInputsResourceMapping/GenericFunctions';
|
||||
import type {
|
||||
INodeTypeBaseDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { WorkflowToolService } from './utils/WorkflowToolService';
|
||||
import { versionDescription } from './versionDescription';
|
||||
|
||||
export class ToolWorkflowV2 implements INodeType {
|
||||
description: INodeTypeDescription;
|
||||
|
||||
constructor(baseDescription: INodeTypeBaseDescription) {
|
||||
this.description = {
|
||||
...baseDescription,
|
||||
...versionDescription,
|
||||
};
|
||||
}
|
||||
|
||||
methods = {
|
||||
localResourceMapping: {
|
||||
loadWorkflowInputMappings,
|
||||
},
|
||||
};
|
||||
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const workflowToolService = new WorkflowToolService(this);
|
||||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const description = this.getNodeParameter('description', itemIndex) as string;
|
||||
|
||||
const tool = await workflowToolService.createTool({
|
||||
name,
|
||||
description,
|
||||
itemIndex,
|
||||
});
|
||||
|
||||
return { response: tool };
|
||||
}
|
||||
}
|
|
@ -0,0 +1,235 @@
|
|||
/* eslint-disable @typescript-eslint/dot-notation */ // Disabled to allow access to private methods
|
||||
import { DynamicTool } from '@langchain/core/tools';
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
import type {
|
||||
ISupplyDataFunctions,
|
||||
INodeExecutionData,
|
||||
IWorkflowDataProxyData,
|
||||
ExecuteWorkflowData,
|
||||
INode,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { WorkflowToolService } from './utils/WorkflowToolService';
|
||||
|
||||
// Mock ISupplyDataFunctions interface
|
||||
function createMockContext(overrides?: Partial<ISupplyDataFunctions>): ISupplyDataFunctions {
|
||||
return {
|
||||
getNodeParameter: jest.fn(),
|
||||
getWorkflowDataProxy: jest.fn(),
|
||||
getNode: jest.fn(),
|
||||
executeWorkflow: jest.fn(),
|
||||
addInputData: jest.fn(),
|
||||
addOutputData: jest.fn(),
|
||||
getCredentials: jest.fn(),
|
||||
getCredentialsProperties: jest.fn(),
|
||||
getInputData: jest.fn(),
|
||||
getMode: jest.fn(),
|
||||
getRestApiUrl: jest.fn(),
|
||||
getTimezone: jest.fn(),
|
||||
getWorkflow: jest.fn(),
|
||||
getWorkflowStaticData: jest.fn(),
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
...overrides,
|
||||
} as ISupplyDataFunctions;
|
||||
}
|
||||
|
||||
describe('WorkflowTool::WorkflowToolService', () => {
|
||||
let context: ISupplyDataFunctions;
|
||||
let service: WorkflowToolService;
|
||||
|
||||
beforeEach(() => {
|
||||
// Prepare essential mocks
|
||||
context = createMockContext();
|
||||
jest.spyOn(context, 'getNode').mockReturnValue({
|
||||
parameters: { workflowInputs: { schema: [] } },
|
||||
} as unknown as INode);
|
||||
service = new WorkflowToolService(context);
|
||||
});
|
||||
|
||||
describe('createTool', () => {
|
||||
it('should create a basic dynamic tool when schema is not used', async () => {
|
||||
const toolParams = {
|
||||
name: 'TestTool',
|
||||
description: 'Test Description',
|
||||
itemIndex: 0,
|
||||
};
|
||||
|
||||
const result = await service.createTool(toolParams);
|
||||
|
||||
expect(result).toBeInstanceOf(DynamicTool);
|
||||
expect(result).toHaveProperty('name', 'TestTool');
|
||||
expect(result).toHaveProperty('description', 'Test Description');
|
||||
});
|
||||
|
||||
it('should create a tool that can handle successful execution', async () => {
|
||||
const toolParams = {
|
||||
name: 'TestTool',
|
||||
description: 'Test Description',
|
||||
itemIndex: 0,
|
||||
};
|
||||
|
||||
const TEST_RESPONSE = { msg: 'test response' };
|
||||
|
||||
const mockExecuteWorkflowResponse: ExecuteWorkflowData = {
|
||||
data: [[{ json: TEST_RESPONSE }]],
|
||||
executionId: 'test-execution',
|
||||
};
|
||||
|
||||
jest.spyOn(context, 'executeWorkflow').mockResolvedValueOnce(mockExecuteWorkflowResponse);
|
||||
jest.spyOn(context, 'addInputData').mockReturnValue({ index: 0 });
|
||||
jest.spyOn(context, 'getNodeParameter').mockReturnValue('database');
|
||||
jest.spyOn(context, 'getWorkflowDataProxy').mockReturnValue({
|
||||
$execution: { id: 'exec-id' },
|
||||
$workflow: { id: 'workflow-id' },
|
||||
} as unknown as IWorkflowDataProxyData);
|
||||
|
||||
const tool = await service.createTool(toolParams);
|
||||
const result = await tool.func('test query');
|
||||
|
||||
expect(result).toBe(JSON.stringify(TEST_RESPONSE, null, 2));
|
||||
expect(context.addOutputData).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle errors during tool execution', async () => {
|
||||
const toolParams = {
|
||||
name: 'TestTool',
|
||||
description: 'Test Description',
|
||||
itemIndex: 0,
|
||||
};
|
||||
|
||||
jest
|
||||
.spyOn(context, 'executeWorkflow')
|
||||
.mockRejectedValueOnce(new Error('Workflow execution failed'));
|
||||
jest.spyOn(context, 'addInputData').mockReturnValue({ index: 0 });
|
||||
jest.spyOn(context, 'getNodeParameter').mockReturnValue('database');
|
||||
|
||||
const tool = await service.createTool(toolParams);
|
||||
const result = await tool.func('test query');
|
||||
|
||||
expect(result).toContain('There was an error');
|
||||
expect(context.addOutputData).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleToolResponse', () => {
|
||||
it('should handle number response', () => {
|
||||
const result = service['handleToolResponse'](42);
|
||||
|
||||
expect(result).toBe('42');
|
||||
});
|
||||
|
||||
it('should handle object response', () => {
|
||||
const obj = { test: 'value' };
|
||||
|
||||
const result = service['handleToolResponse'](obj);
|
||||
|
||||
expect(result).toBe(JSON.stringify(obj, null, 2));
|
||||
});
|
||||
|
||||
it('should handle string response', () => {
|
||||
const result = service['handleToolResponse']('test response');
|
||||
|
||||
expect(result).toBe('test response');
|
||||
});
|
||||
|
||||
it('should throw error for invalid response type', () => {
|
||||
expect(() => service['handleToolResponse'](undefined)).toThrow(NodeOperationError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('executeSubWorkflow', () => {
|
||||
it('should successfully execute workflow and return response', async () => {
|
||||
const workflowInfo = { id: 'test-workflow' };
|
||||
const items: INodeExecutionData[] = [];
|
||||
const workflowProxyMock = {
|
||||
$execution: { id: 'exec-id' },
|
||||
$workflow: { id: 'workflow-id' },
|
||||
} as unknown as IWorkflowDataProxyData;
|
||||
|
||||
const TEST_RESPONSE = { msg: 'test response' };
|
||||
|
||||
const mockResponse: ExecuteWorkflowData = {
|
||||
data: [[{ json: TEST_RESPONSE }]],
|
||||
executionId: 'test-execution',
|
||||
};
|
||||
|
||||
jest.spyOn(context, 'executeWorkflow').mockResolvedValueOnce(mockResponse);
|
||||
|
||||
const result = await service['executeSubWorkflow'](workflowInfo, items, workflowProxyMock);
|
||||
|
||||
expect(result.response).toBe(TEST_RESPONSE);
|
||||
expect(result.subExecutionId).toBe('test-execution');
|
||||
});
|
||||
|
||||
it('should throw error when workflow execution fails', async () => {
|
||||
jest.spyOn(context, 'executeWorkflow').mockRejectedValueOnce(new Error('Execution failed'));
|
||||
|
||||
await expect(service['executeSubWorkflow']({}, [], {} as never)).rejects.toThrow(
|
||||
NodeOperationError,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw error when workflow returns no response', async () => {
|
||||
const mockResponse: ExecuteWorkflowData = {
|
||||
data: [],
|
||||
executionId: 'test-execution',
|
||||
};
|
||||
|
||||
jest.spyOn(context, 'executeWorkflow').mockResolvedValueOnce(mockResponse);
|
||||
|
||||
await expect(service['executeSubWorkflow']({}, [], {} as never)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSubWorkflowInfo', () => {
|
||||
it('should handle database source correctly', async () => {
|
||||
const source = 'database';
|
||||
const itemIndex = 0;
|
||||
const workflowProxyMock = {
|
||||
$workflow: { id: 'proxy-id' },
|
||||
} as unknown as IWorkflowDataProxyData;
|
||||
|
||||
jest.spyOn(context, 'getNodeParameter').mockReturnValueOnce({ value: 'workflow-id' });
|
||||
|
||||
const result = await service['getSubWorkflowInfo'](source, itemIndex, workflowProxyMock);
|
||||
|
||||
expect(result.workflowInfo).toHaveProperty('id', 'workflow-id');
|
||||
expect(result.subWorkflowId).toBe('workflow-id');
|
||||
});
|
||||
|
||||
it('should handle parameter source correctly', async () => {
|
||||
const source = 'parameter';
|
||||
const itemIndex = 0;
|
||||
const workflowProxyMock = {
|
||||
$workflow: { id: 'proxy-id' },
|
||||
} as unknown as IWorkflowDataProxyData;
|
||||
const mockWorkflow = { id: 'test-workflow' };
|
||||
|
||||
jest.spyOn(context, 'getNodeParameter').mockReturnValueOnce(JSON.stringify(mockWorkflow));
|
||||
|
||||
const result = await service['getSubWorkflowInfo'](source, itemIndex, workflowProxyMock);
|
||||
|
||||
expect(result.workflowInfo.code).toEqual(mockWorkflow);
|
||||
expect(result.subWorkflowId).toBe('proxy-id');
|
||||
});
|
||||
|
||||
it('should throw error for invalid JSON in parameter source', async () => {
|
||||
const source = 'parameter';
|
||||
const itemIndex = 0;
|
||||
const workflowProxyMock = {
|
||||
$workflow: { id: 'proxy-id' },
|
||||
} as unknown as IWorkflowDataProxyData;
|
||||
|
||||
jest.spyOn(context, 'getNodeParameter').mockReturnValueOnce('invalid json');
|
||||
|
||||
await expect(
|
||||
service['getSubWorkflowInfo'](source, itemIndex, workflowProxyMock),
|
||||
).rejects.toThrow(NodeOperationError);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,284 @@
|
|||
import type { ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { jsonParse, NodeOperationError } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
||||
type AllowedTypes = 'string' | 'number' | 'boolean' | 'json';
|
||||
export interface FromAIArgument {
|
||||
key: string;
|
||||
description?: string;
|
||||
type?: AllowedTypes;
|
||||
defaultValue?: string | number | boolean | Record<string, unknown>;
|
||||
}
|
||||
|
||||
// TODO: We copied this class from the core package, once the new nodes context work is merged, this should be available in root node context and this file can be removed.
|
||||
// Please apply any changes to both files
|
||||
|
||||
/**
|
||||
* AIParametersParser
|
||||
*
|
||||
* This class encapsulates the logic for parsing node parameters, extracting $fromAI calls,
|
||||
* generating Zod schemas, and creating LangChain tools.
|
||||
*/
|
||||
export class AIParametersParser {
|
||||
private ctx: ISupplyDataFunctions;
|
||||
|
||||
/**
|
||||
* Constructs an instance of AIParametersParser.
|
||||
* @param ctx The execution context.
|
||||
*/
|
||||
constructor(ctx: ISupplyDataFunctions) {
|
||||
this.ctx = ctx;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a Zod schema based on the provided FromAIArgument placeholder.
|
||||
* @param placeholder The FromAIArgument object containing key, type, description, and defaultValue.
|
||||
* @returns A Zod schema corresponding to the placeholder's type and constraints.
|
||||
*/
|
||||
generateZodSchema(placeholder: FromAIArgument): z.ZodTypeAny {
|
||||
let schema: z.ZodTypeAny;
|
||||
|
||||
switch (placeholder.type?.toLowerCase()) {
|
||||
case 'string':
|
||||
schema = z.string();
|
||||
break;
|
||||
case 'number':
|
||||
schema = z.number();
|
||||
break;
|
||||
case 'boolean':
|
||||
schema = z.boolean();
|
||||
break;
|
||||
case 'json':
|
||||
schema = z.record(z.any());
|
||||
break;
|
||||
default:
|
||||
schema = z.string();
|
||||
}
|
||||
|
||||
if (placeholder.description) {
|
||||
schema = schema.describe(`${schema.description ?? ''} ${placeholder.description}`.trim());
|
||||
}
|
||||
|
||||
if (placeholder.defaultValue !== undefined) {
|
||||
schema = schema.default(placeholder.defaultValue);
|
||||
}
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively traverses the nodeParameters object to find all $fromAI calls.
|
||||
* @param payload The current object or value being traversed.
|
||||
* @param collectedArgs The array collecting FromAIArgument objects.
|
||||
*/
|
||||
traverseNodeParameters(payload: unknown, collectedArgs: FromAIArgument[]) {
|
||||
if (typeof payload === 'string') {
|
||||
const fromAICalls = this.extractFromAICalls(payload);
|
||||
fromAICalls.forEach((call) => collectedArgs.push(call));
|
||||
} else if (Array.isArray(payload)) {
|
||||
payload.forEach((item: unknown) => this.traverseNodeParameters(item, collectedArgs));
|
||||
} else if (typeof payload === 'object' && payload !== null) {
|
||||
Object.values(payload).forEach((value) => this.traverseNodeParameters(value, collectedArgs));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts all $fromAI calls from a given string
|
||||
* @param str The string to search for $fromAI calls.
|
||||
* @returns An array of FromAIArgument objects.
|
||||
*
|
||||
* This method uses a regular expression to find the start of each $fromAI function call
|
||||
* in the input string. It then employs a character-by-character parsing approach to
|
||||
* accurately extract the arguments of each call, handling nested parentheses and quoted strings.
|
||||
*
|
||||
* The parsing process:
|
||||
* 1. Finds the starting position of a $fromAI call using regex.
|
||||
* 2. Iterates through characters, keeping track of parentheses depth and quote status.
|
||||
* 3. Handles escaped characters within quotes to avoid premature quote closing.
|
||||
* 4. Builds the argument string until the matching closing parenthesis is found.
|
||||
* 5. Parses the extracted argument string into a FromAIArgument object.
|
||||
* 6. Repeats the process for all $fromAI calls in the input string.
|
||||
*
|
||||
*/
|
||||
extractFromAICalls(str: string): FromAIArgument[] {
|
||||
const args: FromAIArgument[] = [];
|
||||
// Regular expression to match the start of a $fromAI function call
|
||||
const pattern = /\$fromAI\s*\(\s*/gi;
|
||||
let match: RegExpExecArray | null;
|
||||
|
||||
while ((match = pattern.exec(str)) !== null) {
|
||||
const startIndex = match.index + match[0].length;
|
||||
let current = startIndex;
|
||||
let inQuotes = false;
|
||||
let quoteChar = '';
|
||||
let parenthesesCount = 1;
|
||||
let argsString = '';
|
||||
|
||||
// Parse the arguments string, handling nested parentheses and quotes
|
||||
while (current < str.length && parenthesesCount > 0) {
|
||||
const char = str[current];
|
||||
|
||||
if (inQuotes) {
|
||||
// Handle characters inside quotes, including escaped characters
|
||||
if (char === '\\' && current + 1 < str.length) {
|
||||
argsString += char + str[current + 1];
|
||||
current += 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === quoteChar) {
|
||||
inQuotes = false;
|
||||
quoteChar = '';
|
||||
}
|
||||
argsString += char;
|
||||
} else {
|
||||
// Handle characters outside quotes
|
||||
if (['"', "'", '`'].includes(char)) {
|
||||
inQuotes = true;
|
||||
quoteChar = char;
|
||||
} else if (char === '(') {
|
||||
parenthesesCount++;
|
||||
} else if (char === ')') {
|
||||
parenthesesCount--;
|
||||
}
|
||||
|
||||
// Only add characters if we're still inside the main parentheses
|
||||
if (parenthesesCount > 0 || char !== ')') {
|
||||
argsString += char;
|
||||
}
|
||||
}
|
||||
|
||||
current++;
|
||||
}
|
||||
|
||||
// If parentheses are balanced, parse the arguments
|
||||
if (parenthesesCount === 0) {
|
||||
try {
|
||||
const parsedArgs = this.parseArguments(argsString);
|
||||
args.push(parsedArgs);
|
||||
} catch (error) {
|
||||
// If parsing fails, throw an ApplicationError with details
|
||||
throw new NodeOperationError(
|
||||
this.ctx.getNode(),
|
||||
`Failed to parse $fromAI arguments: ${argsString}: ${error}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// Log an error if parentheses are unbalanced
|
||||
throw new NodeOperationError(
|
||||
this.ctx.getNode(),
|
||||
`Unbalanced parentheses while parsing $fromAI call: ${str.slice(startIndex)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the arguments of a single $fromAI function call.
|
||||
* @param argsString The string containing the function arguments.
|
||||
* @returns A FromAIArgument object.
|
||||
*/
|
||||
parseArguments(argsString: string): FromAIArgument {
|
||||
// Split arguments by commas not inside quotes
|
||||
const args: string[] = [];
|
||||
let currentArg = '';
|
||||
let inQuotes = false;
|
||||
let quoteChar = '';
|
||||
let escapeNext = false;
|
||||
|
||||
for (let i = 0; i < argsString.length; i++) {
|
||||
const char = argsString[i];
|
||||
|
||||
if (escapeNext) {
|
||||
currentArg += char;
|
||||
escapeNext = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '\\') {
|
||||
escapeNext = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (['"', "'", '`'].includes(char)) {
|
||||
if (!inQuotes) {
|
||||
inQuotes = true;
|
||||
quoteChar = char;
|
||||
currentArg += char;
|
||||
} else if (char === quoteChar) {
|
||||
inQuotes = false;
|
||||
quoteChar = '';
|
||||
currentArg += char;
|
||||
} else {
|
||||
currentArg += char;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === ',' && !inQuotes) {
|
||||
args.push(currentArg.trim());
|
||||
currentArg = '';
|
||||
continue;
|
||||
}
|
||||
|
||||
currentArg += char;
|
||||
}
|
||||
|
||||
if (currentArg) {
|
||||
args.push(currentArg.trim());
|
||||
}
|
||||
|
||||
// Remove surrounding quotes if present
|
||||
const cleanArgs = args.map((arg) => {
|
||||
const trimmed = arg.trim();
|
||||
if (
|
||||
(trimmed.startsWith("'") && trimmed.endsWith("'")) ||
|
||||
(trimmed.startsWith('`') && trimmed.endsWith('`')) ||
|
||||
(trimmed.startsWith('"') && trimmed.endsWith('"'))
|
||||
) {
|
||||
return trimmed
|
||||
.slice(1, -1)
|
||||
.replace(/\\'/g, "'")
|
||||
.replace(/\\`/g, '`')
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/\\\\/g, '\\');
|
||||
}
|
||||
return trimmed;
|
||||
});
|
||||
|
||||
const type = cleanArgs?.[2] || 'string';
|
||||
|
||||
if (!['string', 'number', 'boolean', 'json'].includes(type.toLowerCase())) {
|
||||
throw new NodeOperationError(this.ctx.getNode(), `Invalid type: ${type}`);
|
||||
}
|
||||
|
||||
return {
|
||||
key: cleanArgs[0] || '',
|
||||
description: cleanArgs[1],
|
||||
type: (cleanArgs?.[2] ?? 'string') as AllowedTypes,
|
||||
defaultValue: this.parseDefaultValue(cleanArgs[3]),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the default value, preserving its original type.
|
||||
* @param value The default value as a string.
|
||||
* @returns The parsed default value in its appropriate type.
|
||||
*/
|
||||
parseDefaultValue(
|
||||
value: string | undefined,
|
||||
): string | number | boolean | Record<string, unknown> | undefined {
|
||||
if (value === undefined || value === '') return undefined;
|
||||
const lowerValue = value.toLowerCase();
|
||||
if (lowerValue === 'true') return true;
|
||||
if (lowerValue === 'false') return false;
|
||||
if (!isNaN(Number(value))) return Number(value);
|
||||
try {
|
||||
return jsonParse(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,313 @@
|
|||
import type { CallbackManagerForToolRun } from '@langchain/core/callbacks/manager';
|
||||
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
|
||||
import get from 'lodash/get';
|
||||
import isObject from 'lodash/isObject';
|
||||
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
|
||||
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
|
||||
import { getCurrentWorkflowInputData } from 'n8n-nodes-base/dist/utils/workflowInputsResourceMapping/GenericFunctions';
|
||||
import type {
|
||||
ExecuteWorkflowData,
|
||||
ExecutionError,
|
||||
IDataObject,
|
||||
IExecuteWorkflowInfo,
|
||||
INodeExecutionData,
|
||||
INodeParameterResourceLocator,
|
||||
ISupplyDataFunctions,
|
||||
ITaskMetadata,
|
||||
IWorkflowBase,
|
||||
IWorkflowDataProxyData,
|
||||
ResourceMapperValue,
|
||||
} from 'n8n-workflow';
|
||||
import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
||||
import type { FromAIArgument } from './FromAIParser';
|
||||
import { AIParametersParser } from './FromAIParser';
|
||||
|
||||
/**
|
||||
Main class for creating the Workflow tool
|
||||
Processes the node parameters and creates AI Agent tool capable of executing n8n workflows
|
||||
*/
|
||||
export class WorkflowToolService {
|
||||
// Determines if we should use input schema when creating the tool
|
||||
private useSchema: boolean;
|
||||
|
||||
// Sub-workflow id, pulled from referenced sub-workflow
|
||||
private subWorkflowId: string | undefined;
|
||||
|
||||
// Sub-workflow execution id, will be set after the sub-workflow is executed
|
||||
private subExecutionId: string | undefined;
|
||||
|
||||
constructor(private context: ISupplyDataFunctions) {
|
||||
const subWorkflowInputs = this.context.getNode().parameters
|
||||
.workflowInputs as ResourceMapperValue;
|
||||
this.useSchema = (subWorkflowInputs?.schema ?? []).length > 0;
|
||||
}
|
||||
|
||||
// Creates the tool based on the provided parameters
|
||||
async createTool({
|
||||
name,
|
||||
description,
|
||||
itemIndex,
|
||||
}: {
|
||||
name: string;
|
||||
description: string;
|
||||
itemIndex: number;
|
||||
}): Promise<DynamicTool | DynamicStructuredTool> {
|
||||
// Handler for the tool execution, will be called when the tool is executed
|
||||
// This function will execute the sub-workflow and return the response
|
||||
const toolHandler = async (
|
||||
query: string | IDataObject,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> => {
|
||||
const { index } = this.context.addInputData(NodeConnectionType.AiTool, [
|
||||
[{ json: { query } }],
|
||||
]);
|
||||
|
||||
try {
|
||||
const response = await this.runFunction(query, itemIndex, runManager);
|
||||
const processedResponse = this.handleToolResponse(response);
|
||||
|
||||
// Once the sub-workflow is executed, add the output data to the context
|
||||
// This will be used to link the sub-workflow execution in the parent workflow
|
||||
let metadata: ITaskMetadata | undefined;
|
||||
if (this.subExecutionId && this.subWorkflowId) {
|
||||
metadata = {
|
||||
subExecution: {
|
||||
executionId: this.subExecutionId,
|
||||
workflowId: this.subWorkflowId,
|
||||
},
|
||||
};
|
||||
}
|
||||
const json = jsonParse<IDataObject>(processedResponse, {
|
||||
fallbackValue: { response: processedResponse },
|
||||
});
|
||||
void this.context.addOutputData(NodeConnectionType.AiTool, index, [[{ json }]], metadata);
|
||||
|
||||
return processedResponse;
|
||||
} catch (error) {
|
||||
const executionError = error as ExecutionError;
|
||||
const errorResponse = `There was an error: "${executionError.message}"`;
|
||||
void this.context.addOutputData(NodeConnectionType.AiTool, index, executionError);
|
||||
return errorResponse;
|
||||
}
|
||||
};
|
||||
|
||||
// Create structured tool if input schema is provided
|
||||
return this.useSchema
|
||||
? await this.createStructuredTool(name, description, toolHandler)
|
||||
: new DynamicTool({ name, description, func: toolHandler });
|
||||
}
|
||||
|
||||
private handleToolResponse(response: unknown): string {
|
||||
if (typeof response === 'number') {
|
||||
return response.toString();
|
||||
}
|
||||
|
||||
if (isObject(response)) {
|
||||
return JSON.stringify(response, null, 2);
|
||||
}
|
||||
|
||||
if (typeof response !== 'string') {
|
||||
throw new NodeOperationError(this.context.getNode(), 'Wrong output type returned', {
|
||||
description: `The response property should be a string, but it is an ${typeof response}`,
|
||||
});
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes specified sub-workflow with provided inputs
|
||||
*/
|
||||
private async executeSubWorkflow(
|
||||
workflowInfo: IExecuteWorkflowInfo,
|
||||
items: INodeExecutionData[],
|
||||
workflowProxy: IWorkflowDataProxyData,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<{ response: string; subExecutionId: string }> {
|
||||
let receivedData: ExecuteWorkflowData;
|
||||
try {
|
||||
receivedData = await this.context.executeWorkflow(
|
||||
workflowInfo,
|
||||
items,
|
||||
runManager?.getChild(),
|
||||
{
|
||||
parentExecution: {
|
||||
executionId: workflowProxy.$execution.id,
|
||||
workflowId: workflowProxy.$workflow.id,
|
||||
},
|
||||
},
|
||||
);
|
||||
// Set sub-workflow execution id so it can be used in other places
|
||||
this.subExecutionId = receivedData.executionId;
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(this.context.getNode(), error as Error);
|
||||
}
|
||||
|
||||
const response: string | undefined = get(receivedData, 'data[0][0].json') as string | undefined;
|
||||
if (response === undefined) {
|
||||
throw new NodeOperationError(
|
||||
this.context.getNode(),
|
||||
'There was an error: "The workflow did not return a response"',
|
||||
);
|
||||
}
|
||||
|
||||
return { response, subExecutionId: receivedData.executionId };
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the sub-workflow info based on the source and executes it.
|
||||
* This function will be called as part of the tool execution (from the toolHandler)
|
||||
*/
|
||||
private async runFunction(
|
||||
query: string | IDataObject,
|
||||
itemIndex: number,
|
||||
runManager?: CallbackManagerForToolRun,
|
||||
): Promise<string> {
|
||||
const source = this.context.getNodeParameter('source', itemIndex) as string;
|
||||
const workflowProxy = this.context.getWorkflowDataProxy(0);
|
||||
|
||||
const { workflowInfo } = await this.getSubWorkflowInfo(source, itemIndex, workflowProxy);
|
||||
const rawData = this.prepareRawData(query, itemIndex);
|
||||
const items = await this.prepareWorkflowItems(query, itemIndex, rawData);
|
||||
|
||||
this.subWorkflowId = workflowInfo.id;
|
||||
|
||||
const { response } = await this.executeSubWorkflow(
|
||||
workflowInfo,
|
||||
items,
|
||||
workflowProxy,
|
||||
runManager,
|
||||
);
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the sub-workflow info based on the source (database or parameter)
|
||||
*/
|
||||
private async getSubWorkflowInfo(
|
||||
source: string,
|
||||
itemIndex: number,
|
||||
workflowProxy: IWorkflowDataProxyData,
|
||||
): Promise<{
|
||||
workflowInfo: IExecuteWorkflowInfo;
|
||||
subWorkflowId: string;
|
||||
}> {
|
||||
const workflowInfo: IExecuteWorkflowInfo = {};
|
||||
let subWorkflowId: string;
|
||||
|
||||
if (source === 'database') {
|
||||
const { value } = this.context.getNodeParameter(
|
||||
'workflowId',
|
||||
itemIndex,
|
||||
{},
|
||||
) as INodeParameterResourceLocator;
|
||||
workflowInfo.id = value as string;
|
||||
subWorkflowId = workflowInfo.id;
|
||||
} else if (source === 'parameter') {
|
||||
const workflowJson = this.context.getNodeParameter('workflowJson', itemIndex) as string;
|
||||
try {
|
||||
workflowInfo.code = JSON.parse(workflowJson) as IWorkflowBase;
|
||||
// subworkflow is same as parent workflow
|
||||
subWorkflowId = workflowProxy.$workflow.id;
|
||||
} catch (error) {
|
||||
throw new NodeOperationError(
|
||||
this.context.getNode(),
|
||||
`The provided workflow is not valid JSON: "${(error as Error).message}"`,
|
||||
{ itemIndex },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return { workflowInfo, subWorkflowId: subWorkflowId! };
|
||||
}
|
||||
|
||||
private prepareRawData(query: string | IDataObject, itemIndex: number): IDataObject {
|
||||
const rawData: IDataObject = { query };
|
||||
const workflowFieldsJson = this.context.getNodeParameter('fields.values', itemIndex, [], {
|
||||
rawExpressions: true,
|
||||
}) as SetField[];
|
||||
|
||||
// Copied from Set Node v2
|
||||
for (const entry of workflowFieldsJson) {
|
||||
if (entry.type === 'objectValue' && (entry.objectValue as string).startsWith('=')) {
|
||||
rawData[entry.name] = (entry.objectValue as string).replace(/^=+/, '');
|
||||
}
|
||||
}
|
||||
|
||||
return rawData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the sub-workflow items for execution
|
||||
*/
|
||||
private async prepareWorkflowItems(
|
||||
query: string | IDataObject,
|
||||
itemIndex: number,
|
||||
rawData: IDataObject,
|
||||
): Promise<INodeExecutionData[]> {
|
||||
const options: SetNodeOptions = { include: 'all' };
|
||||
let jsonData = typeof query === 'object' ? query : { query };
|
||||
|
||||
if (this.useSchema) {
|
||||
const currentWorkflowInputs = getCurrentWorkflowInputData.call(this.context);
|
||||
jsonData = currentWorkflowInputs[itemIndex].json;
|
||||
}
|
||||
|
||||
const newItem = await manual.execute.call(
|
||||
this.context,
|
||||
{ json: jsonData },
|
||||
itemIndex,
|
||||
options,
|
||||
rawData,
|
||||
this.context.getNode(),
|
||||
);
|
||||
|
||||
return [newItem] as INodeExecutionData[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Create structured tool by parsing the sub-workflow input schema
|
||||
*/
|
||||
private async createStructuredTool(
|
||||
name: string,
|
||||
description: string,
|
||||
func: (query: string | IDataObject, runManager?: CallbackManagerForToolRun) => Promise<string>,
|
||||
): Promise<DynamicStructuredTool | DynamicTool> {
|
||||
const fromAIParser = new AIParametersParser(this.context);
|
||||
const collectedArguments = await this.extractFromAIParameters(fromAIParser);
|
||||
|
||||
// If there are no `fromAI` arguments, fallback to creating a simple tool
|
||||
if (collectedArguments.length === 0) {
|
||||
return new DynamicTool({ name, description, func });
|
||||
}
|
||||
|
||||
// Otherwise, prepare Zod schema and create a structured tool
|
||||
const schema = this.createZodSchema(collectedArguments, fromAIParser);
|
||||
return new DynamicStructuredTool({ schema, name, description, func });
|
||||
}
|
||||
|
||||
private async extractFromAIParameters(
|
||||
fromAIParser: AIParametersParser,
|
||||
): Promise<FromAIArgument[]> {
|
||||
const collectedArguments: FromAIArgument[] = [];
|
||||
fromAIParser.traverseNodeParameters(this.context.getNode().parameters, collectedArguments);
|
||||
|
||||
const uniqueArgsMap = new Map<string, FromAIArgument>();
|
||||
for (const arg of collectedArguments) {
|
||||
uniqueArgsMap.set(arg.key, arg);
|
||||
}
|
||||
|
||||
return Array.from(uniqueArgsMap.values());
|
||||
}
|
||||
|
||||
private createZodSchema(args: FromAIArgument[], parser: AIParametersParser): z.ZodObject<any> {
|
||||
const schemaObj = args.reduce((acc: Record<string, z.ZodTypeAny>, placeholder) => {
|
||||
acc[placeholder.key] = parser.generateZodSchema(placeholder);
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
return z.object(schemaObj).required();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,151 @@
|
|||
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
|
||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { NodeConnectionType, type INodeTypeDescription } from 'n8n-workflow';
|
||||
|
||||
import { getConnectionHintNoticeField } from '../../../../utils/sharedFields';
|
||||
|
||||
export const versionDescription: INodeTypeDescription = {
|
||||
displayName: 'Call n8n Workflow Tool',
|
||||
name: 'toolWorkflow',
|
||||
icon: 'fa:network-wired',
|
||||
group: ['transform'],
|
||||
description: 'Uses another n8n workflow as a tool. Allows packaging any n8n node(s) as a tool.',
|
||||
defaults: {
|
||||
name: 'Call n8n Workflow Tool',
|
||||
},
|
||||
version: [2],
|
||||
inputs: [],
|
||||
outputs: [NodeConnectionType.AiTool],
|
||||
outputNames: ['Tool'],
|
||||
properties: [
|
||||
getConnectionHintNoticeField([NodeConnectionType.AiAgent]),
|
||||
{
|
||||
displayName:
|
||||
'See an example of a workflow to suggest meeting slots using AI <a href="/templates/1953" target="_blank">here</a>.',
|
||||
name: 'noticeTemplateExample',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Name',
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. My_Color_Tool',
|
||||
validateType: 'string-alphanumeric',
|
||||
description:
|
||||
'The name of the function to be called, could contain letters, numbers, and underscores only',
|
||||
},
|
||||
{
|
||||
displayName: 'Description',
|
||||
name: 'description',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder:
|
||||
'Call this tool to get a random color. The input should be a string with comma separated names of colors to exclude.',
|
||||
typeOptions: {
|
||||
rows: 3,
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
displayName:
|
||||
'This tool will call the workflow you define below, and look in the last node for the response. The workflow needs to start with an Execute Workflow trigger',
|
||||
name: 'executeNotice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Source',
|
||||
name: 'source',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Database',
|
||||
value: 'database',
|
||||
description: 'Load the workflow from the database by ID',
|
||||
},
|
||||
{
|
||||
name: 'Define Below',
|
||||
value: 'parameter',
|
||||
description: 'Pass the JSON code of a workflow',
|
||||
},
|
||||
],
|
||||
default: 'database',
|
||||
description: 'Where to get the workflow to execute from',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// source:database
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Workflow',
|
||||
name: 'workflowId',
|
||||
type: 'workflowSelector',
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['database'],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
required: true,
|
||||
},
|
||||
// -----------------------------------------------
|
||||
// Resource mapper for workflow inputs
|
||||
// -----------------------------------------------
|
||||
{
|
||||
displayName: 'Workflow Inputs',
|
||||
name: 'workflowInputs',
|
||||
type: 'resourceMapper',
|
||||
noDataExpression: true,
|
||||
default: {
|
||||
mappingMode: 'defineBelow',
|
||||
value: null,
|
||||
},
|
||||
required: true,
|
||||
typeOptions: {
|
||||
loadOptionsDependsOn: ['workflowId.value'],
|
||||
resourceMapper: {
|
||||
localResourceMapperMethod: 'loadWorkflowInputMappings',
|
||||
valuesLabel: 'Workflow Inputs',
|
||||
mode: 'map',
|
||||
fieldWords: {
|
||||
singular: 'workflow input',
|
||||
plural: 'workflow inputs',
|
||||
},
|
||||
addAllFields: true,
|
||||
multiKeyMatch: false,
|
||||
supportAutoMap: false,
|
||||
},
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['database'],
|
||||
},
|
||||
hide: {
|
||||
workflowId: [''],
|
||||
},
|
||||
},
|
||||
},
|
||||
// ----------------------------------
|
||||
// source:parameter
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Workflow JSON',
|
||||
name: 'workflowJson',
|
||||
type: 'json',
|
||||
typeOptions: {
|
||||
rows: 10,
|
||||
},
|
||||
displayOptions: {
|
||||
show: {
|
||||
source: ['parameter'],
|
||||
},
|
||||
},
|
||||
default: '\n\n\n\n\n\n\n\n\n',
|
||||
required: true,
|
||||
description: 'The workflow JSON code to execute',
|
||||
},
|
||||
],
|
||||
};
|
|
@ -228,7 +228,7 @@ export class VectorStorePGVector extends createVectorStoreNode({
|
|||
testedBy: 'postgresConnectionTest',
|
||||
},
|
||||
],
|
||||
operationModes: ['load', 'insert', 'retrieve'],
|
||||
operationModes: ['load', 'insert', 'retrieve', 'retrieve-as-tool'],
|
||||
},
|
||||
sharedFields,
|
||||
insertFields,
|
||||
|
|
|
@ -65,7 +65,7 @@ export class VectorStorePinecone extends createVectorStoreNode({
|
|||
required: true,
|
||||
},
|
||||
],
|
||||
operationModes: ['load', 'insert', 'retrieve', 'update'],
|
||||
operationModes: ['load', 'insert', 'retrieve', 'update', 'retrieve-as-tool'],
|
||||
},
|
||||
methods: { listSearch: { pineconeIndexSearch } },
|
||||
retrieveFields,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue