Merge branch 'master' into ai-396-implement-split-pane

This commit is contained in:
Oleg Ivaniv 2024-11-07 17:45:11 +01:00
commit 04744cc489
No known key found for this signature in database
158 changed files with 8126 additions and 5886 deletions

View file

@ -65,6 +65,7 @@ jobs:
continue-on-error: true
with:
workingDir: packages/design-system
onlyChanged: true
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
exitZeroOnChanges: false

View file

@ -42,7 +42,7 @@ jobs:
uses: actions/cache/save@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}-base:build
key: ${{ github.sha }}-release:build
- name: Dry-run publishing
run: pnpm publish -r --no-git-checks --dry-run
@ -126,7 +126,7 @@ jobs:
body: ${{github.event.pull_request.body}}
create-sentry-release:
name: Create release on Sentry
name: Create a Sentry Release
needs: [publish-to-npm, publish-to-docker-hub]
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
@ -136,18 +136,19 @@ jobs:
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
steps:
- uses: actions/checkout@v4.1.1
- name: Restore cached build artifacts
uses: actions/cache/restore@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}:db-tests
key: ${{ github.sha }}-release:build
- name: Create a frontend release
uses: getsentry/action-release@v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_FRONTEND_PROJECT }}
version: {{ needs.publish-to-npm.outputs.release }}
version: ${{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/editor-ui/dist
- name: Create a backend release
@ -155,7 +156,7 @@ jobs:
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_BACKEND_PROJECT }}
version: {{ needs.publish-to-npm.outputs.release }}
version: ${{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/cli/dist packages/core/dist packages/nodes-base/dist packages/@n8n/n8n-nodes-langchain/dist
trigger-release-note:

View file

@ -1,3 +1,39 @@
# [1.67.0](https://github.com/n8n-io/n8n/compare/n8n@1.66.0...n8n@1.67.0) (2024-11-06)
### Bug Fixes
* Bring back nodes panel telemetry events ([#11456](https://github.com/n8n-io/n8n/issues/11456)) ([130c942](https://github.com/n8n-io/n8n/commit/130c942f633788d1b2f937d6fea342d4450c6e3d))
* **core:** Account for double quotes in instance base URL ([#11495](https://github.com/n8n-io/n8n/issues/11495)) ([c5191e6](https://github.com/n8n-io/n8n/commit/c5191e697a9a9ebfa2b67587cd01b5835ebf6ea8))
* **core:** Do not delete waiting executions when saving of successful executions is disabled ([#11458](https://github.com/n8n-io/n8n/issues/11458)) ([e8757e5](https://github.com/n8n-io/n8n/commit/e8757e58f69e091ac3d2a2f8e8c8e33ac57c1e47))
* **core:** Don't send a `executionFinished` event to the browser with no run data if the execution has already been cleaned up ([#11502](https://github.com/n8n-io/n8n/issues/11502)) ([d1153f5](https://github.com/n8n-io/n8n/commit/d1153f51e80911cbc8f34ba5f038f349b75295c3))
* **core:** Include `projectId` in range query middleware ([#11590](https://github.com/n8n-io/n8n/issues/11590)) ([a6070af](https://github.com/n8n-io/n8n/commit/a6070afdda29631fd36e5213f52bf815268bcda4))
* **core:** Save exeution progress for waiting executions, even when progress saving is disabled ([#11535](https://github.com/n8n-io/n8n/issues/11535)) ([6b9353c](https://github.com/n8n-io/n8n/commit/6b9353c80f61ab36945fff434d98242dc1cab7b3))
* **core:** Use the correct docs URL for regular nodes when used as tools ([#11529](https://github.com/n8n-io/n8n/issues/11529)) ([a092b8e](https://github.com/n8n-io/n8n/commit/a092b8e972e1253d92df416f19096a045858e7c1))
* **Edit Image Node:** Fix Text operation by setting Arial as default font ([#11125](https://github.com/n8n-io/n8n/issues/11125)) ([60c1ace](https://github.com/n8n-io/n8n/commit/60c1ace64be29d651ce7b777fbb576598e38b9d7))
* **editor:** Auto focus first fields on SignIn, SignUp and ForgotMyPassword views ([#11445](https://github.com/n8n-io/n8n/issues/11445)) ([5b5bd72](https://github.com/n8n-io/n8n/commit/5b5bd7291dde17880b7699f7e6832938599ffd8f))
* **editor:** Do not overwrite the webhookId in the new canvas ([#11562](https://github.com/n8n-io/n8n/issues/11562)) ([dfd785b](https://github.com/n8n-io/n8n/commit/dfd785bc0894257eb6e62b0dd8f71248c27aae53))
* **editor:** Ensure Enter key on Cancel button correctly cancels node rename ([#11563](https://github.com/n8n-io/n8n/issues/11563)) ([be05ae3](https://github.com/n8n-io/n8n/commit/be05ae36e7790156cb48b317fc254ae46a3b2d8c))
* **editor:** Fix emitting `n8nReady` notification via `postmessage` on new canvas ([#11558](https://github.com/n8n-io/n8n/issues/11558)) ([463d101](https://github.com/n8n-io/n8n/commit/463d101f3592e6df4afd66c4d0fde0cb4aec34cc))
* **editor:** Fix run index input for RunData view in sub-nodes ([#11538](https://github.com/n8n-io/n8n/issues/11538)) ([434d31c](https://github.com/n8n-io/n8n/commit/434d31ce928342d52b6ab8b78639afd7829216d4))
* **editor:** Fix selected credential being overwritten in NDV ([#11496](https://github.com/n8n-io/n8n/issues/11496)) ([a26c0e2](https://github.com/n8n-io/n8n/commit/a26c0e2c3c7da87bfaba9737a967aa0070810d85))
* **editor:** Keep workflow pristine after load on new canvas ([#11579](https://github.com/n8n-io/n8n/issues/11579)) ([7254359](https://github.com/n8n-io/n8n/commit/7254359855b89769613cd5cc24dbb4f45a7cc76f))
* Show Pinned data in demo mode ([#11490](https://github.com/n8n-io/n8n/issues/11490)) ([ca2a583](https://github.com/n8n-io/n8n/commit/ca2a583b5cbb0cba3ecb694261806de16547aa91))
* Toast not aligned to the bottom when AI assistant disable ([#11549](https://github.com/n8n-io/n8n/issues/11549)) ([e80f7e0](https://github.com/n8n-io/n8n/commit/e80f7e0a02a972379f73af6a44de11768081086e))
### Features
* Add Rapid7 InsightVm credentials ([#11462](https://github.com/n8n-io/n8n/issues/11462)) ([46eceab](https://github.com/n8n-io/n8n/commit/46eceabc27ac219b11b85c16c533a2cff848c5dd))
* **AI Transform Node:** UX improvements ([#11280](https://github.com/n8n-io/n8n/issues/11280)) ([8a48407](https://github.com/n8n-io/n8n/commit/8a484077af3d3e1fe2d1b90b1ea9edf4ba41fcb6))
* **Anthropic Chat Model Node:** Add support for Haiku 3.5 ([#11551](https://github.com/n8n-io/n8n/issues/11551)) ([8b39825](https://github.com/n8n-io/n8n/commit/8b398256a81594a52f20f8eb8adf8ff205209bc1))
* **Convert to File Node:** Add delimiter convert to csv ([#11556](https://github.com/n8n-io/n8n/issues/11556)) ([63d454b](https://github.com/n8n-io/n8n/commit/63d454b776c092ff8c6c521a7e083774adb8f649))
* **editor:** Update panning and selection keybindings on new canvas ([#11534](https://github.com/n8n-io/n8n/issues/11534)) ([5e2e205](https://github.com/n8n-io/n8n/commit/5e2e205394adf76faf02aee2d4f21df71848e1d4))
* **Gmail Trigger Node:** Add filter option to include drafts ([#11441](https://github.com/n8n-io/n8n/issues/11441)) ([7a2be77](https://github.com/n8n-io/n8n/commit/7a2be77f384a32ede3acad8fe24fb89227c058bf))
* **Intercom Node:** Update credential to new style ([#11485](https://github.com/n8n-io/n8n/issues/11485)) ([b137e13](https://github.com/n8n-io/n8n/commit/b137e13845f0714ebf7421c837f5ab104b66709b))
# [1.66.0](https://github.com/n8n-io/n8n/compare/n8n@1.65.0...n8n@1.66.0) (2024-10-31)

View file

@ -26,6 +26,22 @@ const nodeDetailsView = new NDV();
const NEW_CREDENTIAL_NAME = 'Something else';
const NEW_CREDENTIAL_NAME2 = 'Something else entirely';
function createNotionCredential() {
workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME);
workflowPage.actions.openNode(NOTION_NODE_NAME);
workflowPage.getters.nodeCredentialsSelect().click();
getVisibleSelect().find('li').last().click();
credentialsModal.actions.fillCredentialsForm();
cy.get('body').type('{esc}');
workflowPage.actions.deleteNode(NOTION_NODE_NAME);
}
function deleteSelectedCredential() {
workflowPage.getters.nodeCredentialsEditButton().click();
credentialsModal.getters.deleteButton().click();
cy.get('.el-message-box').find('button').contains('Yes').click();
}
describe('Credentials', () => {
beforeEach(() => {
cy.visit(credentialsPage.url);
@ -229,6 +245,40 @@ describe('Credentials', () => {
.should('have.value', NEW_CREDENTIAL_NAME);
});
it('should set a default credential when adding nodes', () => {
workflowPage.actions.visit();
createNotionCredential();
workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true);
workflowPage.getters
.nodeCredentialsSelect()
.find('input')
.should('have.value', NEW_NOTION_ACCOUNT_NAME);
deleteSelectedCredential();
});
it('should set a default credential when editing a node', () => {
workflowPage.actions.visit();
createNotionCredential();
workflowPage.actions.addNodeToCanvas(HTTP_REQUEST_NODE_NAME, true, true);
nodeDetailsView.getters.parameterInput('authentication').click();
getVisibleSelect().find('li').contains('Predefined').click();
nodeDetailsView.getters.parameterInput('nodeCredentialType').click();
getVisibleSelect().find('li').contains('Notion API').click();
workflowPage.getters
.nodeCredentialsSelect()
.find('input')
.should('have.value', NEW_NOTION_ACCOUNT_NAME);
deleteSelectedCredential();
});
it('should setup generic authentication for HTTP node', () => {
workflowPage.actions.visit();
workflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME);

View file

@ -4,6 +4,7 @@ import { clickCreateNewCredential, openCredentialSelect } from '../composables/n
import { GMAIL_NODE_NAME, SCHEDULE_TRIGGER_NODE_NAME } from '../constants';
import { CredentialsModal, CredentialsPage, NDV, WorkflowPage } from '../pages';
import { AIAssistant } from '../pages/features/ai-assistant';
import { NodeCreator } from '../pages/features/node-creator';
import { getVisibleSelect } from '../utils';
const wf = new WorkflowPage();
@ -11,6 +12,7 @@ const ndv = new NDV();
const aiAssistant = new AIAssistant();
const credentialsPage = new CredentialsPage();
const credentialsModal = new CredentialsModal();
const nodeCreatorFeature = new NodeCreator();
describe('AI Assistant::disabled', () => {
beforeEach(() => {
@ -280,6 +282,20 @@ describe('AI Assistant::enabled', () => {
wf.getters.isWorkflowSaved();
aiAssistant.getters.placeholderMessage().should('not.exist');
});
it('should send message via enter even with global NodeCreator panel opened', () => {
cy.intercept('POST', '/rest/ai/chat', {
statusCode: 200,
fixture: 'aiAssistant/responses/simple_message_response.json',
}).as('chatRequest');
wf.actions.addInitialNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME);
aiAssistant.actions.openChat();
nodeCreatorFeature.actions.openNodeCreator();
aiAssistant.getters.chatInput().type('Hello{Enter}');
aiAssistant.getters.placeholderMessage().should('not.exist');
});
});
describe('AI Assistant Credential Help', () => {

View file

@ -795,4 +795,46 @@ describe('NDV', () => {
.find('[data-test-id=run-data-schema-item]')
.should('contain.text', 'onlyOnItem3');
});
it('should keep search expanded after Test step node run', () => {
cy.createFixtureWorkflow('Test_ndv_search.json');
workflowPage.actions.zoomToFit();
workflowPage.actions.executeWorkflow();
workflowPage.actions.openNode('Edit Fields');
ndv.getters.outputPanel().should('be.visible');
ndv.getters.outputPanel().findChildByTestId('ndv-search').click().type('US');
ndv.getters.outputTableRow(1).find('mark').should('have.text', 'US');
ndv.actions.execute();
ndv.getters
.outputPanel()
.findChildByTestId('ndv-search')
.should('be.visible')
.should('have.value', 'US');
});
it('should not show items count when seaching in schema view', () => {
cy.createFixtureWorkflow('Test_ndv_search.json');
workflowPage.actions.zoomToFit();
workflowPage.actions.openNode('Edit Fields');
ndv.getters.outputPanel().should('be.visible');
ndv.actions.execute();
ndv.actions.switchOutputMode('Schema');
ndv.getters.outputPanel().find('[data-test-id=ndv-search]').click().type('US');
ndv.getters.outputPanel().find('[data-test-id=ndv-items-count]').should('not.exist');
});
it('should show additional tooltip when seaching in schema view if no matches', () => {
cy.createFixtureWorkflow('Test_ndv_search.json');
workflowPage.actions.zoomToFit();
workflowPage.actions.openNode('Edit Fields');
ndv.getters.outputPanel().should('be.visible');
ndv.actions.execute();
ndv.actions.switchOutputMode('Schema');
ndv.getters.outputPanel().find('[data-test-id=ndv-search]').click().type('foo');
ndv.getters
.outputPanel()
.contains('To search field contents rather than just names, use Table or JSON view')
.should('exist');
});
});

View file

@ -0,0 +1,135 @@
{
"name": "NDV search bugs (introduced by schema view?)",
"nodes": [
{
"parameters": {},
"id": "55635c7b-92ee-4d2d-a0c0-baff9ab071da",
"name": "When clicking Test workflow",
"type": "n8n-nodes-base.manualTrigger",
"position": [
800,
380
],
"typeVersion": 1
},
{
"parameters": {
"operation": "getAllPeople"
},
"id": "4737af43-e49b-4c92-b76f-32605c047114",
"name": "Customer Datastore (n8n training)",
"type": "n8n-nodes-base.n8nTrainingCustomerDatastore",
"typeVersion": 1,
"position": [
1020,
380
]
},
{
"parameters": {
"assignments": {
"assignments": []
},
"includeOtherFields": true,
"options": {}
},
"id": "8cc9b374-1856-4f3f-9315-08e6e27840d8",
"name": "Edit Fields",
"type": "n8n-nodes-base.set",
"typeVersion": 3.4,
"position": [
1240,
380
]
}
],
"pinData": {
"Customer Datastore (n8n training)": [
{
"json": {
"id": "23423532",
"name": "Jay Gatsby",
"email": "gatsby@west-egg.com",
"notes": "Keeps asking about a green light??",
"country": "US",
"created": "1925-04-10"
}
},
{
"json": {
"id": "23423533",
"name": "José Arcadio Buendía",
"email": "jab@macondo.co",
"notes": "Lots of people named after him. Very confusing",
"country": "CO",
"created": "1967-05-05"
}
},
{
"json": {
"id": "23423534",
"name": "Max Sendak",
"email": "info@in-and-out-of-weeks.org",
"notes": "Keeps rolling his terrible eyes",
"country": "US",
"created": "1963-04-09"
}
},
{
"json": {
"id": "23423535",
"name": "Zaphod Beeblebrox",
"email": "captain@heartofgold.com",
"notes": "Felt like I was talking to more than one person",
"country": null,
"created": "1979-10-12"
}
},
{
"json": {
"id": "23423536",
"name": "Edmund Pevensie",
"email": "edmund@narnia.gov",
"notes": "Passionate sailor",
"country": "UK",
"created": "1950-10-16"
}
}
]
},
"connections": {
"When clicking Test workflow": {
"main": [
[
{
"node": "Customer Datastore (n8n training)",
"type": "main",
"index": 0
}
]
]
},
"Customer Datastore (n8n training)": {
"main": [
[
{
"node": "Edit Fields",
"type": "main",
"index": 0
}
]
]
}
},
"active": false,
"settings": {
"executionOrder": "v1"
},
"versionId": "20178044-fb64-4443-88dd-e941517520d0",
"meta": {
"templateCredsSetupCompleted": true,
"instanceId": "be251a83c052a9862eeac953816fbb1464f89dfbf79d7ac490a8e336a8cc8bfd"
},
"id": "aBVnTRON9Y2cSmse",
"tags": []
}

View file

@ -1,6 +1,6 @@
{
"name": "n8n-monorepo",
"version": "1.66.0",
"version": "1.67.0",
"private": true,
"engines": {
"node": ">=20.15",
@ -46,6 +46,7 @@
"@types/jest": "^29.5.3",
"@types/node": "*",
"@types/supertest": "^6.0.2",
"cross-env": "^7.0.3",
"jest": "^29.6.2",
"jest-environment-jsdom": "^29.6.2",
"jest-expect-message": "^1.1.3",

View file

@ -1,11 +1,11 @@
{
"name": "@n8n/chat",
"version": "0.29.0",
"version": "0.30.0",
"scripts": {
"dev": "pnpm run storybook",
"build": "pnpm build:vite && pnpm build:bundle",
"build:vite": "vite build",
"build:bundle": "INCLUDE_VUE=true vite build",
"build:vite": "cross-env vite build",
"build:bundle": "cross-env INCLUDE_VUE=true vite build",
"preview": "vite preview",
"test:dev": "vitest",
"test": "vitest run",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/config",
"version": "1.16.0",
"version": "1.17.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -10,9 +10,8 @@ export type TaskRunnerMode = 'internal_childprocess' | 'internal_launcher' | 'ex
@Config
export class TaskRunnersConfig {
// Defaults to true for now
@Env('N8N_RUNNERS_DISABLED')
disabled: boolean = true;
@Env('N8N_RUNNERS_ENABLED')
enabled: boolean = false;
// Defaults to true for now
@Env('N8N_RUNNERS_MODE')
@ -50,4 +49,8 @@ export class TaskRunnersConfig {
/** How many concurrent tasks can a runner execute at a time */
@Env('N8N_RUNNERS_MAX_CONCURRENCY')
maxConcurrency: number = 5;
/** Should the output of deduplication be asserted for correctness */
@Env('N8N_RUNNERS_ASSERT_DEDUPLICATION_OUTPUT')
assertDeduplicationOutput: boolean = false;
}

View file

@ -222,7 +222,7 @@ describe('GlobalConfig', () => {
},
},
taskRunners: {
disabled: true,
enabled: false,
mode: 'internal_childprocess',
path: '/runners',
authToken: '',
@ -233,6 +233,7 @@ describe('GlobalConfig', () => {
launcherRunner: 'javascript',
maxOldSpaceSize: '',
maxConcurrency: 5,
assertDeduplicationOutput: false,
},
sentry: {
backendDsn: '',

View file

@ -206,10 +206,28 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
// If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will try to parse the output manually
if (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) {
const finalResponse = (steps as AgentFinish).returnValues;
const returnValues = (await outputParser.parse(finalResponse as unknown as string)) as Record<
string,
unknown
>;
let parserInput: string;
if (finalResponse instanceof Object) {
if ('output' in finalResponse) {
try {
// If the output is an object, we will try to parse it as JSON
// this is because parser expects stringified JSON object like { "output": { .... } }
// so we try to parse the output before wrapping it and then stringify it
parserInput = JSON.stringify({ output: jsonParse(finalResponse.output) });
} catch (error) {
// If parsing of the output fails, we will use the raw output
parserInput = finalResponse.output;
}
} else {
// If the output is not an object, we will stringify it as it is
parserInput = JSON.stringify(finalResponse);
}
} else {
parserInput = finalResponse;
}
const returnValues = (await outputParser.parse(parserInput)) as Record<string, unknown>;
return handleParsedStepOutput(returnValues);
}
return handleAgentFinishOutput(steps);

View file

@ -1,5 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { NodeConnectionType } from 'n8n-workflow';
import { PromptTemplate } from '@langchain/core/prompts';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
ISupplyDataFunctions,
INodeType,
@ -7,6 +8,7 @@ import type {
SupplyData,
} from 'n8n-workflow';
import { NAIVE_FIX_PROMPT } from './prompt';
import {
N8nOutputFixingParser,
type N8nStructuredOutputParser,
@ -65,6 +67,27 @@ export class OutputParserAutofixing implements INodeType {
default: '',
},
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Option',
default: {},
options: [
{
displayName: 'Retry Prompt',
name: 'prompt',
type: 'string',
default: NAIVE_FIX_PROMPT,
typeOptions: {
rows: 10,
},
hint: 'Should include "{error}", "{instructions}", and "{completion}" placeholders',
description:
'Prompt template used for fixing the output. Uses placeholders: "{instructions}" for parsing rules, "{completion}" for the failed attempt, and "{error}" for the validation error message.',
},
],
},
],
};
@ -77,8 +100,20 @@ export class OutputParserAutofixing implements INodeType {
NodeConnectionType.AiOutputParser,
itemIndex,
)) as N8nStructuredOutputParser;
const prompt = this.getNodeParameter('options.prompt', itemIndex, NAIVE_FIX_PROMPT) as string;
const parser = new N8nOutputFixingParser(this, model, outputParser);
if (prompt.length === 0 || !prompt.includes('{error}')) {
throw new NodeOperationError(
this.getNode(),
'Auto-fixing parser prompt has to contain {error} placeholder',
);
}
const parser = new N8nOutputFixingParser(
this,
model,
outputParser,
PromptTemplate.fromTemplate(prompt),
);
return {
response: parser,

View file

@ -0,0 +1,16 @@
export const NAIVE_FIX_PROMPT = `Instructions:
--------------
{instructions}
--------------
Completion:
--------------
{completion}
--------------
Above, the Completion did not satisfy the constraints given in the Instructions.
Error:
--------------
{error}
--------------
Please try again. Please only respond with an answer that satisfies the constraints laid out in the Instructions:`;

View file

@ -1,15 +1,19 @@
/* eslint-disable @typescript-eslint/unbound-method */
/* eslint-disable @typescript-eslint/no-unsafe-call */
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { OutputParserException } from '@langchain/core/output_parsers';
import type { MockProxy } from 'jest-mock-extended';
import { mock } from 'jest-mock-extended';
import { normalizeItems } from 'n8n-core';
import type { IExecuteFunctions, IWorkflowDataProxyData } from 'n8n-workflow';
import { ApplicationError, NodeConnectionType } from 'n8n-workflow';
import { ApplicationError, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { N8nOutputFixingParser } from '../../../../utils/output_parsers/N8nOutputParser';
import type { N8nStructuredOutputParser } from '../../../../utils/output_parsers/N8nOutputParser';
import type {
N8nOutputFixingParser,
N8nStructuredOutputParser,
} from '../../../../utils/output_parsers/N8nOutputParser';
import { OutputParserAutofixing } from '../OutputParserAutofixing.node';
import { NAIVE_FIX_PROMPT } from '../prompt';
describe('OutputParserAutofixing', () => {
let outputParser: OutputParserAutofixing;
@ -34,6 +38,13 @@ describe('OutputParserAutofixing', () => {
throw new ApplicationError('Unexpected connection type');
});
thisArg.getNodeParameter.mockReset();
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return NAIVE_FIX_PROMPT;
}
throw new ApplicationError('Not implemented');
});
});
afterEach(() => {
@ -48,73 +59,132 @@ describe('OutputParserAutofixing', () => {
});
}
it('should successfully parse valid output without needing to fix it', async () => {
const validOutput = { name: 'Alice', age: 25 };
describe('Configuration', () => {
it('should throw error when prompt template does not contain {error} placeholder', async () => {
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return 'Invalid prompt without error placeholder';
}
throw new ApplicationError('Not implemented');
});
mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput);
await expect(outputParser.supplyData.call(thisArg, 0)).rejects.toThrow(
new NodeOperationError(
thisArg.getNode(),
'Auto-fixing parser prompt has to contain {error} placeholder',
),
);
});
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
it('should throw error when prompt template is empty', async () => {
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return '';
}
throw new ApplicationError('Not implemented');
});
// Ensure the response contains the output-fixing parser
expect(response).toBeDefined();
expect(response).toBeInstanceOf(N8nOutputFixingParser);
await expect(outputParser.supplyData.call(thisArg, 0)).rejects.toThrow(
new NodeOperationError(
thisArg.getNode(),
'Auto-fixing parser prompt has to contain {error} placeholder',
),
);
});
const result = await response.parse('{"name": "Alice", "age": 25}');
it('should use default prompt when none specified', async () => {
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return NAIVE_FIX_PROMPT;
}
throw new ApplicationError('Not implemented');
});
// Validate that the parser succeeds without retry
expect(result).toEqual(validOutput);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1); // Only one call to parse
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
expect(response).toBeDefined();
});
});
it('should throw an error when both structured parser and fixing parser fail', async () => {
mockStructuredOutputParser.parse
.mockRejectedValueOnce(new Error('Invalid JSON')) // First attempt fails
.mockRejectedValueOnce(new Error('Fixing attempt failed')); // Second attempt fails
describe('Parsing', () => {
it('should successfully parse valid output without needing to fix it', async () => {
const validOutput = { name: 'Alice', age: 25 };
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput);
response.getRetryChain = getMockedRetryChain('{}');
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
await expect(response.parse('Invalid JSON string')).rejects.toThrow('Fixing attempt failed');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2);
});
const result = await response.parse('{"name": "Alice", "age": 25}');
it('should reject on the first attempt and succeed on retry with the parsed content', async () => {
const validOutput = { name: 'Bob', age: 28 };
expect(result).toEqual(validOutput);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1);
});
mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON'));
it('should not retry on non-OutputParserException errors', async () => {
const error = new Error('Some other error');
mockStructuredOutputParser.parse.mockRejectedValueOnce(error);
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
response.getRetryChain = getMockedRetryChain(JSON.stringify(validOutput));
await expect(response.parse('Invalid JSON string')).rejects.toThrow(error);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1);
});
mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput);
it('should retry on OutputParserException and succeed', async () => {
const validOutput = { name: 'Bob', age: 28 };
const result = await response.parse('Invalid JSON string');
mockStructuredOutputParser.parse
.mockRejectedValueOnce(new OutputParserException('Invalid JSON'))
.mockResolvedValueOnce(validOutput);
expect(result).toEqual(validOutput);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second succeeds
});
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
it('should handle non-JSON formatted response from fixing parser', async () => {
mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON'));
response.getRetryChain = getMockedRetryChain(JSON.stringify(validOutput));
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
const result = await response.parse('Invalid JSON string');
response.getRetryChain = getMockedRetryChain('This is not JSON');
expect(result).toEqual(validOutput);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2);
});
mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Unexpected token'));
it('should handle failed retry attempt', async () => {
mockStructuredOutputParser.parse
.mockRejectedValueOnce(new OutputParserException('Invalid JSON'))
.mockRejectedValueOnce(new Error('Still invalid JSON'));
// Expect the structured parser to throw an error on invalid JSON from retry
await expect(response.parse('Invalid JSON string')).rejects.toThrow('Unexpected token');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second tries and fails
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
response.getRetryChain = getMockedRetryChain('Still not valid JSON');
await expect(response.parse('Invalid JSON string')).rejects.toThrow('Still invalid JSON');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2);
});
it('should throw non-OutputParserException errors immediately without retry', async () => {
const customError = new Error('Database connection error');
const retryChainSpy = jest.fn();
mockStructuredOutputParser.parse.mockRejectedValueOnce(customError);
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
response.getRetryChain = retryChainSpy;
await expect(response.parse('Some input')).rejects.toThrow('Database connection error');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1);
expect(retryChainSpy).not.toHaveBeenCalled();
});
});
});

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-nodes-langchain",
"version": "1.66.0",
"version": "1.67.0",
"description": "",
"main": "index.js",
"scripts": {

View file

@ -1,12 +1,12 @@
import type { Callbacks } from '@langchain/core/callbacks/manager';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { AIMessage } from '@langchain/core/messages';
import { BaseOutputParser } from '@langchain/core/output_parsers';
import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
import type { PromptTemplate } from '@langchain/core/prompts';
import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser';
import { NAIVE_FIX_PROMPT } from './prompt';
import { logAiEvent } from '../helpers';
export class N8nOutputFixingParser extends BaseOutputParser {
@ -16,12 +16,13 @@ export class N8nOutputFixingParser extends BaseOutputParser {
private context: ISupplyDataFunctions,
private model: BaseLanguageModel,
private outputParser: N8nStructuredOutputParser,
private fixPromptTemplate: PromptTemplate,
) {
super();
}
getRetryChain() {
return NAIVE_FIX_PROMPT.pipe(this.model);
return this.fixPromptTemplate.pipe(this.model);
}
/**
@ -47,11 +48,14 @@ export class N8nOutputFixingParser extends BaseOutputParser {
return response;
} catch (error) {
if (!(error instanceof OutputParserException)) {
throw error;
}
try {
// Second attempt: use retry chain to fix the output
const result = (await this.getRetryChain().invoke({
completion,
error,
error: error.message,
instructions: this.getFormatInstructions(),
})) as AIMessage;

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/task-runner",
"version": "1.4.0",
"version": "1.5.0",
"scripts": {
"clean": "rimraf dist .turbo",
"start": "node dist/start.js",

View file

@ -63,4 +63,35 @@ describe('TaskRunnerNodeTypes', () => {
});
});
});
describe('addNodeTypeDescriptions', () => {
it('should add new node types', () => {
const nodeTypes = new TaskRunnerNodeTypes(TYPES);
const nodeTypeDescriptions = [
{ name: 'new-type', version: 1 },
{ name: 'new-type', version: 2 },
] as INodeTypeDescription[];
nodeTypes.addNodeTypeDescriptions(nodeTypeDescriptions);
expect(nodeTypes.getByNameAndVersion('new-type', 1)).toEqual({
description: { name: 'new-type', version: 1 },
});
expect(nodeTypes.getByNameAndVersion('new-type', 2)).toEqual({
description: { name: 'new-type', version: 2 },
});
});
});
describe('onlyUnknown', () => {
it('should return only unknown node types', () => {
const nodeTypes = new TaskRunnerNodeTypes(TYPES);
const candidate = { name: 'unknown', version: 1 };
expect(nodeTypes.onlyUnknown([candidate])).toEqual([candidate]);
expect(nodeTypes.onlyUnknown([SINGLE_VERSIONED])).toEqual([]);
});
});
});

View file

@ -0,0 +1,29 @@
import type { IExecuteData, INodeExecutionData } from 'n8n-workflow';
import type { DataRequestResponse } from '@/runner-types';
/**
* Reconstructs data from a DataRequestResponse to the initial
* data structures.
*/
export class DataRequestResponseReconstruct {
/**
* Reconstructs `connectionInputData` from a DataRequestResponse
*/
reconstructConnectionInputData(
inputData: DataRequestResponse['inputData'],
): INodeExecutionData[] {
return inputData?.main?.[0] ?? [];
}
/**
* Reconstruct `executeData` from a DataRequestResponse
*/
reconstructExecuteData(response: DataRequestResponse): IExecuteData {
return {
data: response.inputData,
node: response.node,
source: response.connectionInputSource,
};
}
}

View file

@ -1,3 +1,4 @@
export * from './task-runner';
export * from './runner-types';
export * from './message-types';
export * from './data-request/data-request-response-reconstruct';

View file

@ -3,15 +3,21 @@ import type { CodeExecutionMode, IDataObject } from 'n8n-workflow';
import fs from 'node:fs';
import { builtinModules } from 'node:module';
import type { JsRunnerConfig } from '@/config/js-runner-config';
import { MainConfig } from '@/config/main-config';
import { ExecutionError } from '@/js-task-runner/errors/execution-error';
import { ValidationError } from '@/js-task-runner/errors/validation-error';
import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { JSExecSettings } from '@/js-task-runner/js-task-runner';
import { JsTaskRunner } from '@/js-task-runner/js-task-runner';
import type { DataRequestResponse } from '@/runner-types';
import type { Task } from '@/task-runner';
import { newCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data';
import type { JsRunnerConfig } from '../../config/js-runner-config';
import { MainConfig } from '../../config/main-config';
import { ExecutionError } from '../errors/execution-error';
import {
newDataRequestResponse,
newTaskWithSettings,
withPairedItem,
wrapIntoJson,
} from './test-data';
jest.mock('ws');
@ -68,7 +74,7 @@ describe('JsTaskRunner', () => {
nodeMode: 'runOnceForAllItems',
...settings,
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson)),
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson)),
runner,
});
};
@ -91,7 +97,7 @@ describe('JsTaskRunner', () => {
nodeMode: 'runOnceForEachItem',
...settings,
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson)),
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson)),
runner,
});
};
@ -108,7 +114,7 @@ describe('JsTaskRunner', () => {
await execTaskWithParams({
task,
taskData: newCodeTaskData([wrapIntoJson({})]),
taskData: newDataRequestResponse([wrapIntoJson({})]),
});
expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [
@ -243,7 +249,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.VAR1 }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: false,
isProcessAvailable: true,
@ -262,7 +268,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.VAR1 }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: true,
isProcessAvailable: true,
@ -279,7 +285,7 @@ describe('JsTaskRunner', () => {
code: 'return Object.values($env).concat(Object.keys($env))',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: false,
isProcessAvailable: true,
@ -298,7 +304,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.N8N_RUNNERS_N8N_URI }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -313,7 +319,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: Buffer.from("test-buffer").toString() }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -325,7 +331,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: Buffer.from("test-buffer").toString() }',
nodeMode: 'runOnceForEachItem',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -771,7 +777,7 @@ describe('JsTaskRunner', () => {
code: 'unknown',
nodeMode,
}),
taskData: newCodeTaskData([wrapIntoJson({ a: 1 })]),
taskData: newDataRequestResponse([wrapIntoJson({ a: 1 })]),
}),
).rejects.toThrow(ExecutionError);
},
@ -793,7 +799,7 @@ describe('JsTaskRunner', () => {
jest.spyOn(runner, 'sendOffers').mockImplementation(() => {});
jest
.spyOn(runner, 'requestData')
.mockResolvedValue(newCodeTaskData([wrapIntoJson({ a: 1 })]));
.mockResolvedValue(newDataRequestResponse([wrapIntoJson({ a: 1 })]));
await runner.receivedSettings(taskId, task.settings);

View file

@ -2,7 +2,8 @@ import type { IDataObject, INode, INodeExecutionData, ITaskData } from 'n8n-work
import { NodeConnectionType } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { DataRequestResponse } from '@/runner-types';
import type { Task } from '@/task-runner';
/**
@ -46,10 +47,10 @@ export const newTaskData = (opts: Partial<ITaskData> & Pick<ITaskData, 'source'>
});
/**
* Creates a new all code task data with the given options
* Creates a new data request response with the given options
*/
export const newCodeTaskData = (
codeNodeInputData: INodeExecutionData[],
export const newDataRequestResponse = (
inputData: INodeExecutionData[],
opts: Partial<DataRequestResponse> = {},
): DataRequestResponse => {
const codeNode = newNode({
@ -83,9 +84,8 @@ export const newCodeTaskData = (
nodes: [manualTriggerNode, codeNode],
},
inputData: {
main: [codeNodeInputData],
main: [inputData],
},
connectionInputData: codeNodeInputData,
node: codeNode,
runExecutionData: {
startData: {},
@ -95,7 +95,7 @@ export const newCodeTaskData = (
newTaskData({
source: [],
data: {
main: [codeNodeInputData],
main: [inputData],
},
}),
],
@ -137,14 +137,13 @@ export const newCodeTaskData = (
var: 'value',
},
},
executeData: {
node: codeNode,
data: {
main: [codeNodeInputData],
},
source: {
main: [{ previousNode: manualTriggerNode.name }],
},
connectionInputSource: {
main: [
{
previousNode: 'Trigger',
previousNodeOutput: 0,
},
],
},
...opts,
};

View file

@ -1,8 +1,13 @@
import { getAdditionalKeys } from 'n8n-core';
import type { IDataObject, INodeType, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import type {
IDataObject,
IExecuteData,
INodeType,
IWorkflowExecuteAdditionalData,
} from 'n8n-workflow';
import { Workflow, WorkflowDataProxy } from 'n8n-workflow';
import { newCodeTaskData } from '../../__tests__/test-data';
import { newDataRequestResponse } from '../../__tests__/test-data';
import { BuiltInsParser } from '../built-ins-parser';
import { BuiltInsParserState } from '../built-ins-parser-state';
@ -159,7 +164,12 @@ describe('BuiltInsParser', () => {
describe('WorkflowDataProxy built-ins', () => {
it('should have a known list of built-ins', () => {
const data = newCodeTaskData([]);
const data = newDataRequestResponse([]);
const executeData: IExecuteData = {
data: {},
node: data.node,
source: data.connectionInputSource,
};
const dataProxy = new WorkflowDataProxy(
new Workflow({
...data.workflow,
@ -179,7 +189,7 @@ describe('BuiltInsParser', () => {
data.runIndex,
0,
data.activeNodeName,
data.connectionInputData,
[],
data.siblingParameters,
data.mode,
getAdditionalKeys(
@ -187,7 +197,7 @@ describe('BuiltInsParser', () => {
data.mode,
data.runExecutionData,
),
data.executeData,
executeData,
data.defaultReturnRunIndex,
data.selfData,
data.contextNodeName,

View file

@ -1,27 +1,25 @@
import { getAdditionalKeys } from 'n8n-core';
import {
WorkflowDataProxy,
// type IWorkflowDataProxyAdditionalKeys,
Workflow,
} from 'n8n-workflow';
import { WorkflowDataProxy, Workflow } from 'n8n-workflow';
import type {
CodeExecutionMode,
INode,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
WorkflowParameters,
IDataObject,
IExecuteData,
INodeExecutionData,
INodeParameters,
IRunExecutionData,
WorkflowExecuteMode,
WorkflowParameters,
ITaskDataConnections,
INode,
IRunExecutionData,
EnvProviderState,
IExecuteData,
INodeTypeDescription,
} from 'n8n-workflow';
import * as a from 'node:assert';
import { runInNewContext, type Context } from 'node:vm';
import type { TaskResultData } from '@/runner-types';
import type { MainConfig } from '@/config/main-config';
import type { DataRequestResponse, PartialAdditionalData, TaskResultData } from '@/runner-types';
import { type Task, TaskRunner } from '@/task-runner';
import { BuiltInsParser } from './built-ins-parser/built-ins-parser';
@ -32,7 +30,7 @@ import { makeSerializable } from './errors/serializable-error';
import type { RequireResolver } from './require-resolver';
import { createRequireResolver } from './require-resolver';
import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation';
import type { MainConfig } from '../config/main-config';
import { DataRequestResponseReconstruct } from '../data-request/data-request-response-reconstruct';
export interface JSExecSettings {
code: string;
@ -44,34 +42,19 @@ export interface JSExecSettings {
mode: WorkflowExecuteMode;
}
export interface PartialAdditionalData {
executionId?: string;
restartExecutionId?: string;
restApiUrl: string;
instanceBaseUrl: string;
formWaitingBaseUrl: string;
webhookBaseUrl: string;
webhookWaitingBaseUrl: string;
webhookTestBaseUrl: string;
currentNodeParameters?: INodeParameters;
executionTimeoutTimestamp?: number;
userId?: string;
variables: IDataObject;
}
export interface DataRequestResponse {
export interface JsTaskData {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
connectionInputData: INodeExecutionData[];
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState?: EnvProviderState;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
@ -88,6 +71,8 @@ export class JsTaskRunner extends TaskRunner {
private readonly builtInsParser = new BuiltInsParser();
private readonly taskDataReconstruct = new DataRequestResponseReconstruct();
constructor(config: MainConfig, name = 'JS Task Runner') {
super({
taskType: 'javascript',
@ -114,11 +99,15 @@ export class JsTaskRunner extends TaskRunner {
? neededBuiltInsResult.result
: BuiltInsParserState.newNeedsAllDataState();
const data = await this.requestData<DataRequestResponse>(
const dataResponse = await this.requestData<DataRequestResponse>(
task.taskId,
neededBuiltIns.toDataRequestParams(),
);
const data = this.reconstructTaskData(dataResponse);
await this.requestNodeTypeIfNeeded(neededBuiltIns, data.workflow, task.taskId);
const workflowParams = data.workflow;
const workflow = new Workflow({
...workflowParams,
@ -177,7 +166,7 @@ export class JsTaskRunner extends TaskRunner {
private async runForAllItems(
taskId: string,
settings: JSExecSettings,
data: DataRequestResponse,
data: JsTaskData,
workflow: Workflow,
customConsole: CustomConsole,
): Promise<INodeExecutionData[]> {
@ -224,7 +213,7 @@ export class JsTaskRunner extends TaskRunner {
private async runForEachItem(
taskId: string,
settings: JSExecSettings,
data: DataRequestResponse,
data: JsTaskData,
workflow: Workflow,
customConsole: CustomConsole,
): Promise<INodeExecutionData[]> {
@ -291,7 +280,7 @@ export class JsTaskRunner extends TaskRunner {
return returnData;
}
private createDataProxy(data: DataRequestResponse, workflow: Workflow, itemIndex: number) {
private createDataProxy(data: JsTaskData, workflow: Workflow, itemIndex: number) {
return new WorkflowDataProxy(
workflow,
data.runExecutionData,
@ -335,4 +324,43 @@ export class JsTaskRunner extends TaskRunner {
return new ExecutionError({ message: JSON.stringify(error) });
}
private reconstructTaskData(response: DataRequestResponse): JsTaskData {
return {
...response,
connectionInputData: this.taskDataReconstruct.reconstructConnectionInputData(
response.inputData,
),
executeData: this.taskDataReconstruct.reconstructExecuteData(response),
};
}
private async requestNodeTypeIfNeeded(
neededBuiltIns: BuiltInsParserState,
workflow: JsTaskData['workflow'],
taskId: string,
) {
/**
* We request node types only when we know a task needs all nodes, because
* needing all nodes means that the task relies on paired item functionality,
* which is the same requirement for needing node types.
*/
if (neededBuiltIns.needsAllNodes) {
const uniqueNodeTypes = new Map(
workflow.nodes.map((node) => [
`${node.type}|${node.typeVersion}`,
{ name: node.type, version: node.typeVersion },
]),
);
const unknownNodeTypes = this.nodeTypes.onlyUnknown([...uniqueNodeTypes.values()]);
const nodeTypes = await this.requestNodeTypes<INodeTypeDescription[]>(
taskId,
unknownNodeTypes,
);
this.nodeTypes.addNodeTypeDescriptions(nodeTypes);
}
}
}

View file

@ -1,6 +1,11 @@
import type { INodeTypeBaseDescription } from 'n8n-workflow';
import type { RPC_ALLOW_LIST, TaskDataRequestParams, TaskResultData } from './runner-types';
import type {
NeededNodeType,
RPC_ALLOW_LIST,
TaskDataRequestParams,
TaskResultData,
} from './runner-types';
export namespace BrokerMessage {
export namespace ToRunner {
@ -47,6 +52,8 @@ export namespace BrokerMessage {
export interface NodeTypes {
type: 'broker:nodetypes';
taskId: string;
requestId: string;
nodeTypes: INodeTypeBaseDescription[];
}
@ -87,6 +94,13 @@ export namespace BrokerMessage {
requestParams: TaskDataRequestParams;
}
export interface NodeTypesRequest {
type: 'broker:nodetypesrequest';
taskId: string;
requestId: string;
requestParams: NeededNodeType[];
}
export interface RPC {
type: 'broker:rpc';
callId: string;
@ -95,7 +109,7 @@ export namespace BrokerMessage {
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC;
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | NodeTypesRequest | RPC;
}
}
@ -120,6 +134,13 @@ export namespace RequesterMessage {
data: unknown;
}
export interface NodeTypesResponse {
type: 'requester:nodetypesresponse';
taskId: string;
requestId: string;
nodeTypes: INodeTypeBaseDescription[];
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
@ -134,7 +155,13 @@ export namespace RequesterMessage {
taskType: string;
}
export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest;
export type All =
| TaskSettings
| TaskCancel
| RPCResponse
| TaskDataResponse
| NodeTypesResponse
| TaskRequest;
}
}
@ -183,6 +210,25 @@ export namespace RunnerMessage {
requestParams: TaskDataRequestParams;
}
export interface NodeTypesRequest {
type: 'runner:nodetypesrequest';
taskId: string;
requestId: string;
/**
* Which node types should be included in the runner's node types request.
*
* Node types are needed only when the script relies on paired item functionality.
* If so, we need only the node types not already cached in the runner.
*
* TODO: In future we can trim this down to only node types in the paired item chain,
* rather than assuming we need all node types in the workflow.
*
* @example [{ name: 'n8n-nodes-base.httpRequest', version: 1 }]
*/
requestParams: NeededNodeType[];
}
export interface RPC {
type: 'runner:rpc';
callId: string;
@ -199,6 +245,7 @@ export namespace RunnerMessage {
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest;
| TaskDataRequest
| NodeTypesRequest;
}
}

View file

@ -7,6 +7,8 @@ import {
type IVersionedNodeType,
} from 'n8n-workflow';
import type { NeededNodeType } from './runner-types';
type VersionedTypes = Map<number, INodeTypeDescription>;
export const DEFAULT_NODETYPE_VERSION = 1;
@ -61,4 +63,30 @@ export class TaskRunnerNodeTypes implements INodeTypes {
getKnownTypes(): IDataObject {
throw new ApplicationError('Unimplemented `getKnownTypes`', { level: 'error' });
}
addNodeTypeDescriptions(nodeTypeDescriptions: INodeTypeDescription[]) {
const newNodeTypes = this.parseNodeTypes(nodeTypeDescriptions);
for (const [name, newVersions] of newNodeTypes.entries()) {
if (!this.nodeTypesByVersion.has(name)) {
this.nodeTypesByVersion.set(name, newVersions);
} else {
const existingVersions = this.nodeTypesByVersion.get(name)!;
for (const [version, nodeType] of newVersions.entries()) {
existingVersions.set(version, nodeType);
}
}
}
}
/** Filter out node type versions that are already registered. */
onlyUnknown(nodeTypes: NeededNodeType[]) {
return nodeTypes.filter(({ name, version }) => {
const existingVersions = this.nodeTypesByVersion.get(name);
if (!existingVersions) return true;
return !existingVersions.has(version);
});
}
}

View file

@ -8,6 +8,7 @@ import type {
INodeParameters,
IRunExecutionData,
ITaskDataConnections,
ITaskDataConnectionsSource,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
@ -29,17 +30,16 @@ export interface TaskDataRequestParams {
export interface DataRequestResponse {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
connectionInputSource: ITaskDataConnectionsSource | null;
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
@ -112,3 +112,6 @@ export const RPC_ALLOW_LIST = [
'helpers.httpRequest',
'logNodeOutput',
] as const;
/** Node types needed for the runner to execute a task. */
export type NeededNodeType = { name: string; version: number };

View file

@ -1,4 +1,4 @@
import { ApplicationError, type INodeTypeDescription } from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { type MessageEvent, WebSocket } from 'ws';
@ -25,6 +25,12 @@ interface DataRequest {
reject: (error: unknown) => void;
}
interface NodeTypesRequest {
requestId: string;
resolve: (data: unknown) => void;
reject: (error: unknown) => void;
}
interface RPCCall {
callId: string;
resolve: (data: unknown) => void;
@ -58,6 +64,8 @@ export abstract class TaskRunner {
dataRequests: Map<DataRequest['requestId'], DataRequest> = new Map();
nodeTypesRequests: Map<NodeTypesRequest['requestId'], NodeTypesRequest> = new Map();
rpcCalls: Map<RPCCall['callId'], RPCCall> = new Map();
nodeTypes: TaskRunnerNodeTypes = new TaskRunnerNodeTypes([]);
@ -168,15 +176,11 @@ export abstract class TaskRunner {
this.handleRpcResponse(message.callId, message.status, message.data);
break;
case 'broker:nodetypes':
this.setNodeTypes(message.nodeTypes as unknown as INodeTypeDescription[]);
this.processNodeTypesResponse(message.requestId, message.nodeTypes);
break;
}
}
setNodeTypes(nodeTypes: INodeTypeDescription[]) {
this.nodeTypes = new TaskRunnerNodeTypes(nodeTypes);
}
processDataResponse(requestId: string, data: unknown) {
const request = this.dataRequests.get(requestId);
if (!request) {
@ -187,6 +191,16 @@ export abstract class TaskRunner {
request.resolve(data);
}
processNodeTypesResponse(requestId: string, nodeTypes: unknown) {
const request = this.nodeTypesRequests.get(requestId);
if (!request) return;
// Deleting of the request is handled in `requestNodeTypes`, using a
// `finally` wrapped around the return
request.resolve(nodeTypes);
}
hasOpenTasks() {
return Object.values(this.runningTasks).length < this.maxConcurrency;
}
@ -282,6 +296,34 @@ export abstract class TaskRunner {
throw new ApplicationError('Unimplemented');
}
async requestNodeTypes<T = unknown>(
taskId: Task['taskId'],
requestParams: RunnerMessage.ToBroker.NodeTypesRequest['requestParams'],
) {
const requestId = nanoid();
const nodeTypesPromise = new Promise<T>((resolve, reject) => {
this.nodeTypesRequests.set(requestId, {
requestId,
resolve: resolve as (data: unknown) => void,
reject,
});
});
this.send({
type: 'runner:nodetypesrequest',
taskId,
requestId,
requestParams,
});
try {
return await nodeTypesPromise;
} finally {
this.nodeTypesRequests.delete(requestId);
}
}
async requestData<T = unknown>(
taskId: Task['taskId'],
requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'],

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "1.66.0",
"version": "1.67.0",
"description": "n8n Workflow Automation Tool",
"main": "dist/index",
"types": "dist/index.d.ts",

View file

@ -221,7 +221,7 @@ export class Start extends BaseCommand {
}
const { taskRunners: taskRunnerConfig } = this.globalConfig;
if (!taskRunnerConfig.disabled) {
if (taskRunnerConfig.enabled) {
const { TaskRunnerModule } = await import('@/runners/task-runner-module');
const taskRunnerModule = Container.get(TaskRunnerModule);
await taskRunnerModule.start();

View file

@ -113,7 +113,7 @@ export class Worker extends BaseCommand {
);
const { taskRunners: taskRunnerConfig } = this.globalConfig;
if (!taskRunnerConfig.disabled) {
if (taskRunnerConfig.enabled) {
const { TaskRunnerModule } = await import('@/runners/task-runner-module');
const taskRunnerModule = Container.get(TaskRunnerModule);
await taskRunnerModule.start();

View file

@ -127,6 +127,9 @@ export const TIME = {
* Eventually this will superseed `TIME` above
*/
export const Time = {
milliseconds: {
toMinutes: 1 / (60 * 1000),
},
seconds: {
toMilliseconds: 1000,
},

View file

@ -50,8 +50,8 @@ export class CreateTable extends TableOperation {
ref: {
tableName: string;
columnName: string;
onDelete?: 'CASCADE';
onUpdate?: 'CASCADE';
onDelete?: 'RESTRICT' | 'CASCADE' | 'NO ACTION' | 'SET NULL';
onUpdate?: 'RESTRICT' | 'CASCADE' | 'NO ACTION' | 'SET NULL';
name?: string;
},
) {

View file

@ -20,6 +20,7 @@ import { Settings } from './settings';
import { SharedCredentials } from './shared-credentials';
import { SharedWorkflow } from './shared-workflow';
import { TagEntity } from './tag-entity';
import { TestDefinition } from './test-definition';
import { User } from './user';
import { Variables } from './variables';
import { WebhookEntity } from './webhook-entity';
@ -58,4 +59,5 @@ export const entities = {
ProjectRelation,
ApiKey,
ProcessedData,
TestDefinition,
};

View file

@ -0,0 +1,61 @@
import {
Column,
Entity,
Generated,
Index,
ManyToOne,
OneToOne,
PrimaryColumn,
RelationId,
} from '@n8n/typeorm';
import { Length } from 'class-validator';
import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee';
import { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { WithTimestamps } from './abstract-entity';
/**
* Entity representing a Test Definition
* It combines:
* - the workflow under test
* - the workflow used to evaluate the results of test execution
* - the filter used to select test cases from previous executions of the workflow under test - annotation tag
*/
@Entity()
@Index(['workflow'])
@Index(['evaluationWorkflow'])
export class TestDefinition extends WithTimestamps {
@Generated()
@PrimaryColumn()
id: number;
@Column({ length: 255 })
@Length(1, 255, { message: 'Test name must be $constraint1 to $constraint2 characters long.' })
name: string;
/**
* Relation to the workflow under test
*/
@ManyToOne('WorkflowEntity', 'tests')
workflow: WorkflowEntity;
@RelationId((test: TestDefinition) => test.workflow)
workflowId: string;
/**
* Relation to the workflow used to evaluate the results of test execution
*/
@ManyToOne('WorkflowEntity', 'evaluationTests')
evaluationWorkflow: WorkflowEntity;
@RelationId((test: TestDefinition) => test.evaluationWorkflow)
evaluationWorkflowId: string;
/**
* Relation to the annotation tag associated with the test
* This tag will be used to select the test cases to run from previous executions
*/
@OneToOne('AnnotationTagEntity', 'test')
annotationTag: AnnotationTagEntity;
}

View file

@ -0,0 +1,37 @@
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
const testEntityTableName = 'test_definition';
export class CreateTestDefinitionTable1730386903556 implements ReversibleMigration {
async up({ schemaBuilder: { createTable, column } }: MigrationContext) {
await createTable(testEntityTableName)
.withColumns(
column('id').int.notNull.primary.autoGenerate,
column('name').varchar(255).notNull,
column('workflowId').varchar(36).notNull,
column('evaluationWorkflowId').varchar(36),
column('annotationTagId').varchar(16),
)
.withIndexOn('workflowId')
.withIndexOn('evaluationWorkflowId')
.withForeignKey('workflowId', {
tableName: 'workflow_entity',
columnName: 'id',
onDelete: 'CASCADE',
})
.withForeignKey('evaluationWorkflowId', {
tableName: 'workflow_entity',
columnName: 'id',
onDelete: 'SET NULL',
})
.withForeignKey('annotationTagId', {
tableName: 'annotation_tag_entity',
columnName: 'id',
onDelete: 'SET NULL',
}).withTimestamps;
}
async down({ schemaBuilder: { dropTable } }: MigrationContext) {
await dropTable(testEntityTableName);
}
}

View file

@ -68,6 +68,7 @@ import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-C
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping';
import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText';
import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable';
export const mysqlMigrations: Migration[] = [
InitialMigration1588157391238,
@ -138,4 +139,5 @@ export const mysqlMigrations: Migration[] = [
CreateProcessedDataTable1726606152711,
AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644,
UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556,
];

View file

@ -68,6 +68,7 @@ import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-C
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping';
import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText';
import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable';
export const postgresMigrations: Migration[] = [
InitialMigration1587669153312,
@ -138,4 +139,5 @@ export const postgresMigrations: Migration[] = [
CreateProcessedDataTable1726606152711,
AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644,
UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556,
];

View file

@ -65,6 +65,7 @@ import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-Cre
import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable';
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText';
import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable';
const sqliteMigrations: Migration[] = [
InitialMigration1588102412422,
@ -132,6 +133,7 @@ const sqliteMigrations: Migration[] = [
CreateProcessedDataTable1726606152711,
AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644,
UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556,
];
export { sqliteMigrations };

View file

@ -108,6 +108,22 @@ describe('`parseRangeQuery` middleware', () => {
expect(nextFn).toBeCalledTimes(1);
});
test('should parse `projectId` field', () => {
const req = mock<ExecutionRequest.GetMany>({
query: {
filter: '{ "projectId": "123" }',
limit: undefined,
firstId: undefined,
lastId: undefined,
},
});
parseRangeQuery(req, res, nextFn);
expect(req.rangeQuery.projectId).toBe('123');
expect(nextFn).toBeCalledTimes(1);
});
test('should delete invalid fields', () => {
const req = mock<ExecutionRequest.GetMany>({
query: {

View file

@ -66,6 +66,7 @@ export const schemaGetExecutionsQueryFilter = {
startedBefore: { type: 'date-time' },
annotationTags: { type: 'array', items: { type: 'string' } },
vote: { type: 'string' },
projectId: { type: 'string' },
},
$defs: {
metadata: {

View file

@ -3,6 +3,7 @@ import { validate } from 'class-validator';
import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee';
import type { CredentialsEntity } from '@/databases/entities/credentials-entity';
import type { TagEntity } from '@/databases/entities/tag-entity';
import type { TestDefinition } from '@/databases/entities/test-definition';
import type { User } from '@/databases/entities/user';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
@ -12,6 +13,7 @@ import { BadRequestError } from './errors/response-errors/bad-request.error';
export async function validateEntity(
entity:
| WorkflowEntity
| TestDefinition
| CredentialsEntity
| TagEntity
| AnnotationTagEntity

View file

@ -1,3 +1,4 @@
import type { NeededNodeType } from '@n8n/task-runner';
import type { Dirent } from 'fs';
import { readdir } from 'fs/promises';
import { loadClassInIsolation } from 'n8n-core';
@ -149,4 +150,22 @@ export class NodeTypes implements INodeTypes {
dirent.name.toLowerCase().startsWith('v')
);
}
getNodeTypeDescriptions(nodeTypes: NeededNodeType[]): INodeTypeDescription[] {
return nodeTypes.map(({ name: nodeTypeName, version: nodeTypeVersion }) => {
const nodeType = this.getNode(nodeTypeName);
if (!nodeType) throw new ApplicationError(`Unknown node type: ${nodeTypeName}`);
const { description } = NodeHelpers.getVersionedNodeType(nodeType.type, nodeTypeVersion);
const descriptionCopy = { ...description };
descriptionCopy.name = descriptionCopy.name.startsWith('n8n-nodes')
? descriptionCopy.name
: `n8n-nodes-base.${descriptionCopy.name}`; // nodes-base nodes are unprefixed
return descriptionCopy;
});
}
}

View file

@ -1,5 +1,6 @@
import type { RunnerMessage, TaskResultData } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import type { INodeTypeBaseDescription } from 'n8n-workflow';
import { TaskRejectError } from '../errors';
import { TaskBroker } from '../task-broker.service';
@ -11,7 +12,7 @@ describe('TaskBroker', () => {
let taskBroker: TaskBroker;
beforeEach(() => {
taskBroker = new TaskBroker(mock(), mock());
taskBroker = new TaskBroker(mock());
jest.restoreAllMocks();
});
@ -76,13 +77,6 @@ describe('TaskBroker', () => {
const messageCallback = jest.fn();
taskBroker.registerRunner(runner, messageCallback);
expect(messageCallback).toBeCalledWith({
type: 'broker:nodetypes',
// We're mocking the node types service, so this will
// be undefined.
nodeType: undefined,
});
});
});
@ -560,5 +554,68 @@ describe('TaskBroker', () => {
params: rpcParams,
});
});
it('should handle `runner:nodetypesrequest` message', async () => {
const runnerId = 'runner1';
const taskId = 'task1';
const requesterId = 'requester1';
const requestId = 'request1';
const requestParams = [
{
name: 'n8n-nodes-base.someNode',
version: 1,
},
];
const message: RunnerMessage.ToBroker.NodeTypesRequest = {
type: 'runner:nodetypesrequest',
taskId,
requestId,
requestParams,
};
const requesterMessageCallback = jest.fn();
taskBroker.registerRunner(mock<TaskRunner>({ id: runnerId }), jest.fn());
taskBroker.setTasks({
[taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' },
});
taskBroker.registerRequester(requesterId, requesterMessageCallback);
await taskBroker.onRunnerMessage(runnerId, message);
expect(requesterMessageCallback).toHaveBeenCalledWith({
type: 'broker:nodetypesrequest',
taskId,
requestId,
requestParams,
});
});
});
describe('onRequesterMessage', () => {
it('should handle `requester:nodetypesresponse` message', async () => {
const runnerId = 'runner1';
const taskId = 'task1';
const requesterId = 'requester1';
const requestId = 'request1';
const nodeTypes = [mock<INodeTypeBaseDescription>(), mock<INodeTypeBaseDescription>()];
const runnerMessageCallback = jest.fn();
taskBroker.registerRunner(mock<TaskRunner>({ id: runnerId }), runnerMessageCallback);
taskBroker.setTasks({
[taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' },
});
await taskBroker.handleRequesterNodeTypesResponse(taskId, requestId, nodeTypes);
expect(runnerMessageCallback).toHaveBeenCalledWith({
type: 'broker:nodetypes',
taskId,
requestId,
nodeTypes,
});
});
});
});

View file

@ -22,7 +22,7 @@ require('child_process').spawn = spawnMock;
describe('TaskRunnerProcess', () => {
const logger = mockInstance(Logger);
const runnerConfig = mockInstance(TaskRunnersConfig);
runnerConfig.disabled = false;
runnerConfig.enabled = true;
runnerConfig.mode = 'internal_childprocess';
const authService = mock<TaskRunnerAuthService>();
let taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService);

View file

@ -5,18 +5,6 @@ import type WebSocket from 'ws';
import type { TaskRunner } from './task-broker.service';
import type { AuthlessRequest } from '../requests';
/**
* Specifies what data should be included for a task data request.
*/
export interface TaskDataRequestParams {
dataOfNodes: string[] | 'all';
prevNode: boolean;
/** Whether input data for the node should be included */
input: boolean;
/** Whether env provider's state should be included */
env: boolean;
}
export interface DisconnectAnalyzer {
determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error>;
}

View file

@ -70,7 +70,7 @@ export class TaskRunnerWsServer {
this.sendMessage.bind(this, id) as MessageCallback,
);
this.logger.info(`Runner "${message.name}"(${id}) has been registered`);
this.logger.info(`Runner "${message.name}" (${id}) has been registered`);
return;
}

View file

@ -8,7 +8,6 @@ import { ApplicationError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { Service } from 'typedi';
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
import { Logger } from '@/logging/logger.service';
import { TaskRejectError } from './errors';
@ -79,19 +78,7 @@ export class TaskBroker {
private pendingTaskRequests: TaskRequest[] = [];
constructor(
private readonly logger: Logger,
private readonly loadNodesAndCredentials: LoadNodesAndCredentials,
) {
this.loadNodesAndCredentials.addPostProcessor(this.updateNodeTypes);
}
updateNodeTypes = async () => {
await this.messageAllRunners({
type: 'broker:nodetypes',
nodeTypes: this.loadNodesAndCredentials.types.nodes,
});
};
constructor(private readonly logger: Logger) {}
expireTasks() {
const now = process.hrtime.bigint();
@ -105,10 +92,6 @@ export class TaskBroker {
registerRunner(runner: TaskRunner, messageCallback: MessageCallback) {
this.knownRunners.set(runner.id, { runner, messageCallback });
void this.knownRunners.get(runner.id)!.messageCallback({ type: 'broker:runnerregistered' });
void this.knownRunners.get(runner.id)!.messageCallback({
type: 'broker:nodetypes',
nodeTypes: this.loadNodesAndCredentials.types.nodes,
});
}
deregisterRunner(runnerId: string, error: Error) {
@ -145,14 +128,6 @@ export class TaskBroker {
await this.knownRunners.get(runnerId)?.messageCallback(message);
}
private async messageAllRunners(message: BrokerMessage.ToRunner.All) {
await Promise.allSettled(
[...this.knownRunners.values()].map(async (runner) => {
await runner.messageCallback(message);
}),
);
}
private async messageRequester(requesterId: string, message: BrokerMessage.ToRequester.All) {
await this.requesters.get(requesterId)?.(message);
}
@ -187,7 +162,9 @@ export class TaskBroker {
case 'runner:taskdatarequest':
await this.handleDataRequest(message.taskId, message.requestId, message.requestParams);
break;
case 'runner:nodetypesrequest':
await this.handleNodeTypesRequest(message.taskId, message.requestId, message.requestParams);
break;
case 'runner:rpc':
await this.handleRpcRequest(message.taskId, message.callId, message.name, message.params);
break;
@ -249,6 +226,23 @@ export class TaskBroker {
});
}
async handleNodeTypesRequest(
taskId: Task['id'],
requestId: RunnerMessage.ToBroker.NodeTypesRequest['requestId'],
requestParams: RunnerMessage.ToBroker.NodeTypesRequest['requestParams'],
) {
const task = this.tasks.get(taskId);
if (!task) {
return;
}
await this.messageRequester(task.requesterId, {
type: 'broker:nodetypesrequest',
taskId,
requestId,
requestParams,
});
}
async handleResponse(
taskId: Task['id'],
requestId: RunnerMessage.ToBroker.TaskDataRequest['requestId'],
@ -284,6 +278,13 @@ export class TaskBroker {
case 'requester:taskdataresponse':
await this.handleRequesterDataResponse(message.taskId, message.requestId, message.data);
break;
case 'requester:nodetypesresponse':
await this.handleRequesterNodeTypesResponse(
message.taskId,
message.requestId,
message.nodeTypes,
);
break;
case 'requester:rpcresponse':
await this.handleRequesterRpcResponse(
message.taskId,
@ -322,6 +323,21 @@ export class TaskBroker {
});
}
async handleRequesterNodeTypesResponse(
taskId: Task['id'],
requestId: RequesterMessage.ToBroker.NodeTypesResponse['requestId'],
nodeTypes: RequesterMessage.ToBroker.NodeTypesResponse['nodeTypes'],
) {
const runner = await this.getRunnerOrFailTask(taskId);
await this.messageRunner(runner.id, {
type: 'broker:nodetypes',
taskId,
requestId,
nodeTypes,
});
}
handleRequesterAccept(
taskId: Task['id'],
settings: RequesterMessage.ToBroker.TaskSettings['settings'],

View file

@ -1,42 +1,10 @@
import type { TaskData } from '@n8n/task-runner';
import type { PartialAdditionalData, TaskData } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import type { IExecuteFunctions, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import { type INode, type INodeExecutionData, type Workflow } from 'n8n-workflow';
import type { Workflow } from 'n8n-workflow';
import { DataRequestResponseBuilder } from '../data-request-response-builder';
const triggerNode: INode = mock<INode>({
name: 'Trigger',
});
const debugHelperNode: INode = mock<INode>({
name: 'DebugHelper',
});
const codeNode: INode = mock<INode>({
name: 'Code',
});
const workflow: TaskData['workflow'] = mock<Workflow>();
const debugHelperNodeOutItems: INodeExecutionData[] = [
{
json: {
uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032',
email: 'Dan.Schmidt31@yahoo.com',
firstname: 'Toni',
lastname: 'Schuster',
password: 'Q!D6C2',
},
pairedItem: {
item: 0,
},
},
];
const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems;
const connectionInputData: TaskData['connectionInputData'] = codeNodeInputItems;
const envProviderState: TaskData['envProviderState'] = mock<TaskData['envProviderState']>({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
const additionalData = mock<IWorkflowExecuteAdditionalData>({
const additionalData = mock<PartialAdditionalData>({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
@ -50,275 +18,57 @@ const additionalData = mock<IWorkflowExecuteAdditionalData>({
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
});
const executeFunctions = mock<IExecuteFunctions>();
/**
* Drawn with https://asciiflow.com/#/
* Task data for an execution of the following WF:
* where denotes the currently being executing node.
*
*
* Trigger DebugHelper Code
*
*/
const taskData: TaskData = {
executeFunctions,
workflow,
connectionInputData,
inputData: {
main: [codeNodeInputItems],
},
itemIndex: 0,
activeNodeName: codeNode.name,
contextNodeName: codeNode.name,
defaultReturnRunIndex: -1,
mode: 'manual',
envProviderState,
node: codeNode,
runExecutionData: {
startData: {
destinationNode: codeNode.name,
runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name],
},
resultData: {
runData: {
[triggerNode.name]: [
{
hints: [],
startTime: 1730313407328,
executionTime: 1,
source: [],
executionStatus: 'success',
data: {
main: [[]],
},
},
],
[debugHelperNode.name]: [
{
hints: [],
startTime: 1730313407330,
executionTime: 1,
source: [
{
previousNode: triggerNode.name,
},
],
executionStatus: 'success',
data: {
main: [debugHelperNodeOutItems],
},
},
],
},
pinData: {},
},
executionData: {
contextData: {},
nodeExecutionStack: [],
metadata: {},
waitingExecution: {
[codeNode.name]: {
'0': {
main: [codeNodeInputItems],
},
},
},
waitingExecutionSource: {
[codeNode.name]: {
'0': {
main: [
{
previousNode: debugHelperNode.name,
},
],
},
},
},
},
},
runIndex: 0,
selfData: {},
siblingParameters: {},
executeData: {
node: codeNode,
data: {
main: [codeNodeInputItems],
},
source: {
main: [
{
previousNode: debugHelperNode.name,
previousNodeOutput: 0,
},
],
},
},
const workflow: TaskData['workflow'] = mock<Workflow>({
id: '1',
name: 'Test Workflow',
active: true,
connectionsBySourceNode: {},
nodes: {},
pinData: {},
settings: {},
staticData: {},
});
const taskData = mock<TaskData>({
additionalData,
} as const;
workflow,
});
describe('DataRequestResponseBuilder', () => {
const allDataParam: DataRequestResponseBuilder['requestParams'] = {
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
};
const builder = new DataRequestResponseBuilder();
const newRequestParam = (opts: Partial<DataRequestResponseBuilder['requestParams']>) => ({
...allDataParam,
...opts,
});
it('picks only specific properties for additional data', () => {
const result = builder.buildFromTaskData(taskData);
describe('all data', () => {
it('should build the runExecutionData as is when everything is requested', () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam);
const { runExecutionData } = dataRequestResponseBuilder.build();
expect(runExecutionData).toStrictEqual(taskData.runExecutionData);
expect(result.additionalData).toStrictEqual({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
variables: additionalData.variables,
webhookBaseUrl: 'http://localhost:5678/webhook',
webhookTestBaseUrl: 'http://localhost:5678/webhook-test',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
executionId: '45844',
userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5',
currentNodeParameters: undefined,
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
});
});
describe('envProviderState', () => {
it("should filter out envProviderState when it's not requested", () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(
taskData,
newRequestParam({
env: false,
}),
);
it('picks only specific properties for workflow', () => {
const result = builder.buildFromTaskData(taskData);
const result = dataRequestResponseBuilder.build();
expect(result.envProviderState).toStrictEqual({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
});
});
describe('additionalData', () => {
it('picks only specific properties for additional data', () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam);
const result = dataRequestResponseBuilder.build();
expect(result.additionalData).toStrictEqual({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
webhookBaseUrl: 'http://localhost:5678/webhook',
webhookTestBaseUrl: 'http://localhost:5678/webhook-test',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
executionId: '45844',
userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5',
currentNodeParameters: undefined,
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
variables: additionalData.variables,
});
});
});
describe('input data', () => {
const allExceptInputParam = newRequestParam({
input: false,
});
it('drops input data from executeData', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.executeData).toStrictEqual({
node: taskData.executeData!.node,
source: taskData.executeData!.source,
data: {},
});
});
it('drops input data from result', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.inputData).toStrictEqual({});
});
it('drops input data from result', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.inputData).toStrictEqual({});
});
it('drops input data from connectionInputData', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.connectionInputData).toStrictEqual([]);
});
});
describe('nodes', () => {
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: ['Code'], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return only DebugHelper's data when only DebugHelper node is requested", () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return DebugHelper's data when only prevNode node is requested", () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [], prevNode: true }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
expect(result.workflow).toStrictEqual({
id: '1',
name: 'Test Workflow',
active: true,
connections: workflow.connectionsBySourceNode,
nodes: [],
pinData: workflow.pinData,
settings: workflow.settings,
staticData: workflow.staticData,
});
});
});

View file

@ -0,0 +1,300 @@
import type { DataRequestResponse, TaskDataRequestParams } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import type { IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import { type INode, type INodeExecutionData } from 'n8n-workflow';
import { DataRequestResponseStripper } from '../data-request-response-stripper';
const triggerNode: INode = mock<INode>({
name: 'Trigger',
});
const debugHelperNode: INode = mock<INode>({
name: 'DebugHelper',
});
const codeNode: INode = mock<INode>({
name: 'Code',
});
const workflow: DataRequestResponse['workflow'] = mock<DataRequestResponse['workflow']>();
const debugHelperNodeOutItems: INodeExecutionData[] = [
{
json: {
uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032',
email: 'Dan.Schmidt31@yahoo.com',
firstname: 'Toni',
lastname: 'Schuster',
password: 'Q!D6C2',
},
pairedItem: {
item: 0,
},
},
];
const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems;
const envProviderState: DataRequestResponse['envProviderState'] = mock<
DataRequestResponse['envProviderState']
>({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
const additionalData = mock<IWorkflowExecuteAdditionalData>({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
variables: {},
webhookBaseUrl: 'http://localhost:5678/webhook',
webhookTestBaseUrl: 'http://localhost:5678/webhook-test',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
executionId: '45844',
userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5',
currentNodeParameters: undefined,
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
});
/**
* Drawn with https://asciiflow.com/#/
* Task data for an execution of the following WF:
* where denotes the currently being executing node.
*
*
* Trigger DebugHelper Code
*
*/
const taskData: DataRequestResponse = {
workflow,
inputData: {
main: [codeNodeInputItems],
},
itemIndex: 0,
activeNodeName: codeNode.name,
contextNodeName: codeNode.name,
defaultReturnRunIndex: -1,
mode: 'manual',
envProviderState,
node: codeNode,
runExecutionData: {
startData: {
destinationNode: codeNode.name,
runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name],
},
resultData: {
runData: {
[triggerNode.name]: [
{
hints: [],
startTime: 1730313407328,
executionTime: 1,
source: [],
executionStatus: 'success',
data: {
main: [[]],
},
},
],
[debugHelperNode.name]: [
{
hints: [],
startTime: 1730313407330,
executionTime: 1,
source: [
{
previousNode: triggerNode.name,
},
],
executionStatus: 'success',
data: {
main: [debugHelperNodeOutItems],
},
},
],
},
pinData: {},
},
executionData: {
contextData: {},
nodeExecutionStack: [],
metadata: {},
waitingExecution: {
[codeNode.name]: {
'0': {
main: [codeNodeInputItems],
},
},
},
waitingExecutionSource: {
[codeNode.name]: {
'0': {
main: [
{
previousNode: debugHelperNode.name,
},
],
},
},
},
},
},
runIndex: 0,
selfData: {},
siblingParameters: {},
connectionInputSource: {
main: [
{
previousNode: debugHelperNode.name,
previousNodeOutput: 0,
},
],
},
additionalData,
} as const;
describe('DataRequestResponseStripper', () => {
const allDataParam: TaskDataRequestParams = {
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
};
const newRequestParam = (opts: Partial<TaskDataRequestParams>) => ({
...allDataParam,
...opts,
});
describe('all data', () => {
it('should build the runExecutionData as is when everything is requested', () => {
const dataRequestResponseBuilder = new DataRequestResponseStripper(taskData, allDataParam);
const { runExecutionData } = dataRequestResponseBuilder.strip();
expect(runExecutionData).toStrictEqual(taskData.runExecutionData);
});
});
describe('envProviderState', () => {
it("should filter out envProviderState when it's not requested", () => {
const dataRequestResponseBuilder = new DataRequestResponseStripper(
taskData,
newRequestParam({
env: false,
}),
);
const result = dataRequestResponseBuilder.strip();
expect(result.envProviderState).toStrictEqual({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
});
});
describe('input data', () => {
const allExceptInputParam = newRequestParam({
input: false,
});
it('drops input data from result', () => {
const result = new DataRequestResponseStripper(taskData, allExceptInputParam).strip();
expect(result.inputData).toStrictEqual({});
});
it('drops input data from result', () => {
const result = new DataRequestResponseStripper(taskData, allExceptInputParam).strip();
expect(result.inputData).toStrictEqual({});
});
});
describe('nodes', () => {
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: ['Code'], prevNode: false }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return only DebugHelper's data when only DebugHelper node is requested", () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return DebugHelper's data when only prevNode node is requested", () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: [], prevNode: true }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
});
describe('passthrough properties', () => {
test.each<Array<keyof DataRequestResponse>>([
['workflow'],
['connectionInputSource'],
['node'],
['runIndex'],
['itemIndex'],
['activeNodeName'],
['siblingParameters'],
['mode'],
['defaultReturnRunIndex'],
['selfData'],
['contextNodeName'],
['additionalData'],
])("it doesn't change %s", (propertyName) => {
const dataRequestResponseBuilder = new DataRequestResponseStripper(taskData, allDataParam);
const result = dataRequestResponseBuilder.strip();
expect(result[propertyName]).toBe(taskData[propertyName]);
});
});
});

View file

@ -1,63 +1,30 @@
import type {
DataRequestResponse,
BrokerMessage,
PartialAdditionalData,
TaskData,
} from '@n8n/task-runner';
import type {
EnvProviderState,
IExecuteData,
INodeExecutionData,
IPinData,
IRunData,
IRunExecutionData,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowParameters,
} from 'n8n-workflow';
import type { DataRequestResponse, PartialAdditionalData, TaskData } from '@n8n/task-runner';
import type { IWorkflowExecuteAdditionalData, Workflow, WorkflowParameters } from 'n8n-workflow';
/**
* Builds the response to a data request coming from a Task Runner. Tries to minimize
* the amount of data that is sent to the runner by only providing what is requested.
* Transforms TaskData to DataRequestResponse. The main purpose of the
* transformation is to make sure there is no duplication in the data
* (e.g. connectionInputData and executeData.data can be derived from
* inputData).
*/
export class DataRequestResponseBuilder {
private requestedNodeNames = new Set<string>();
constructor(
private readonly taskData: TaskData,
private readonly requestParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'],
) {
this.requestedNodeNames = new Set(requestParams.dataOfNodes);
if (this.requestParams.prevNode && this.requestParams.dataOfNodes !== 'all') {
this.requestedNodeNames.add(this.determinePrevNodeName());
}
}
/**
* Builds a response to the data request
*/
build(): DataRequestResponse {
const { taskData: td } = this;
buildFromTaskData(taskData: TaskData): DataRequestResponse {
return {
workflow: this.buildWorkflow(td.workflow),
connectionInputData: this.buildConnectionInputData(td.connectionInputData),
inputData: this.buildInputData(td.inputData),
itemIndex: td.itemIndex,
activeNodeName: td.activeNodeName,
contextNodeName: td.contextNodeName,
defaultReturnRunIndex: td.defaultReturnRunIndex,
mode: td.mode,
envProviderState: this.buildEnvProviderState(td.envProviderState),
node: td.node, // The current node being executed
runExecutionData: this.buildRunExecutionData(td.runExecutionData),
runIndex: td.runIndex,
selfData: td.selfData,
siblingParameters: td.siblingParameters,
executeData: this.buildExecuteData(td.executeData),
additionalData: this.buildAdditionalData(td.additionalData),
workflow: this.buildWorkflow(taskData.workflow),
inputData: taskData.inputData,
connectionInputSource: taskData.executeData?.source ?? null,
itemIndex: taskData.itemIndex,
activeNodeName: taskData.activeNodeName,
contextNodeName: taskData.contextNodeName,
defaultReturnRunIndex: taskData.defaultReturnRunIndex,
mode: taskData.mode,
envProviderState: taskData.envProviderState,
node: taskData.node,
runExecutionData: taskData.runExecutionData,
runIndex: taskData.runIndex,
selfData: taskData.selfData,
siblingParameters: taskData.siblingParameters,
additionalData: this.buildAdditionalData(taskData.additionalData),
};
}
@ -80,86 +47,6 @@ export class DataRequestResponseBuilder {
};
}
private buildExecuteData(executeData: IExecuteData | undefined): IExecuteData | undefined {
if (executeData === undefined) {
return undefined;
}
return {
node: executeData.node, // The current node being executed
data: this.requestParams.input ? executeData.data : {},
source: executeData.source,
};
}
private buildRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData {
if (this.requestParams.dataOfNodes === 'all') {
return runExecutionData;
}
return {
startData: runExecutionData.startData,
resultData: {
error: runExecutionData.resultData.error,
lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted,
metadata: runExecutionData.resultData.metadata,
runData: this.buildRunData(runExecutionData.resultData.runData),
pinData: this.buildPinData(runExecutionData.resultData.pinData),
},
executionData: runExecutionData.executionData
? {
// TODO: Figure out what these two are and can they be filtered
contextData: runExecutionData.executionData?.contextData,
nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack,
metadata: runExecutionData.executionData.metadata,
waitingExecution: runExecutionData.executionData.waitingExecution,
waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource,
}
: undefined,
};
}
private buildRunData(runData: IRunData): IRunData {
return this.filterObjectByNodeNames(runData);
}
private buildPinData(pinData: IPinData | undefined): IPinData | undefined {
return pinData ? this.filterObjectByNodeNames(pinData) : undefined;
}
private buildEnvProviderState(envProviderState: EnvProviderState): EnvProviderState {
if (this.requestParams.env) {
// In case `isEnvAccessBlocked` = true, the provider state has already sanitized
// the environment variables and we can return it as is.
return envProviderState;
}
return {
env: {},
isEnvAccessBlocked: envProviderState.isEnvAccessBlocked,
isProcessAvailable: envProviderState.isProcessAvailable,
};
}
private buildInputData(inputData: ITaskDataConnections): ITaskDataConnections {
if (this.requestParams.input) {
return inputData;
}
return {};
}
private buildConnectionInputData(
connectionInputData: INodeExecutionData[],
): INodeExecutionData[] {
if (this.requestParams.input) {
return connectionInputData;
}
return [];
}
private buildWorkflow(workflow: Workflow): Omit<WorkflowParameters, 'nodeTypes'> {
return {
id: workflow.id,
@ -172,37 +59,4 @@ export class DataRequestResponseBuilder {
staticData: workflow.staticData,
};
}
/**
* Assuming the given `obj` is an object where the keys are node names,
* filters the object to only include the node names that are requested.
*/
private filterObjectByNodeNames<T extends Record<string, unknown>>(obj: T): T {
if (this.requestParams.dataOfNodes === 'all') {
return obj;
}
const filteredObj: T = {} as T;
for (const nodeName in obj) {
if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) {
continue;
}
if (this.requestedNodeNames.has(nodeName)) {
filteredObj[nodeName] = obj[nodeName];
}
}
return filteredObj;
}
private determinePrevNodeName(): string {
const sourceData = this.taskData.executeData?.source?.main?.[0];
if (!sourceData) {
return '';
}
return sourceData.previousNode;
}
}

View file

@ -0,0 +1,131 @@
import type { DataRequestResponse, BrokerMessage } from '@n8n/task-runner';
import type {
EnvProviderState,
IPinData,
IRunData,
IRunExecutionData,
ITaskDataConnections,
} from 'n8n-workflow';
/**
* Strips data from data request response based on the specified parameters
*/
export class DataRequestResponseStripper {
private requestedNodeNames = new Set<string>();
constructor(
private readonly dataResponse: DataRequestResponse,
private readonly stripParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'],
) {
this.requestedNodeNames = new Set(stripParams.dataOfNodes);
if (this.stripParams.prevNode && this.stripParams.dataOfNodes !== 'all') {
this.requestedNodeNames.add(this.determinePrevNodeName());
}
}
/**
* Builds a response to the data request
*/
strip(): DataRequestResponse {
const { dataResponse: dr } = this;
return {
...dr,
inputData: this.stripInputData(dr.inputData),
envProviderState: this.stripEnvProviderState(dr.envProviderState),
runExecutionData: this.stripRunExecutionData(dr.runExecutionData),
};
}
private stripRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData {
if (this.stripParams.dataOfNodes === 'all') {
return runExecutionData;
}
return {
startData: runExecutionData.startData,
resultData: {
error: runExecutionData.resultData.error,
lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted,
metadata: runExecutionData.resultData.metadata,
runData: this.stripRunData(runExecutionData.resultData.runData),
pinData: this.stripPinData(runExecutionData.resultData.pinData),
},
executionData: runExecutionData.executionData
? {
// TODO: Figure out what these two are and can they be stripped
contextData: runExecutionData.executionData?.contextData,
nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack,
metadata: runExecutionData.executionData.metadata,
waitingExecution: runExecutionData.executionData.waitingExecution,
waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource,
}
: undefined,
};
}
private stripRunData(runData: IRunData): IRunData {
return this.filterObjectByNodeNames(runData);
}
private stripPinData(pinData: IPinData | undefined): IPinData | undefined {
return pinData ? this.filterObjectByNodeNames(pinData) : undefined;
}
private stripEnvProviderState(envProviderState: EnvProviderState): EnvProviderState {
if (this.stripParams.env) {
// In case `isEnvAccessBlocked` = true, the provider state has already sanitized
// the environment variables and we can return it as is.
return envProviderState;
}
return {
env: {},
isEnvAccessBlocked: envProviderState.isEnvAccessBlocked,
isProcessAvailable: envProviderState.isProcessAvailable,
};
}
private stripInputData(inputData: ITaskDataConnections): ITaskDataConnections {
if (this.stripParams.input) {
return inputData;
}
return {};
}
/**
* Assuming the given `obj` is an object where the keys are node names,
* filters the object to only include the node names that are requested.
*/
private filterObjectByNodeNames<T extends Record<string, unknown>>(obj: T): T {
if (this.stripParams.dataOfNodes === 'all') {
return obj;
}
const filteredObj: T = {} as T;
for (const nodeName in obj) {
if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) {
continue;
}
if (this.requestedNodeNames.has(nodeName)) {
filteredObj[nodeName] = obj[nodeName];
}
}
return filteredObj;
}
private determinePrevNodeName(): string {
const sourceData = this.dataResponse.connectionInputSource?.main?.[0];
if (!sourceData) {
return '';
}
return sourceData.previousNode;
}
}

View file

@ -1,17 +1,20 @@
import type { RequesterMessage } from '@n8n/task-runner';
import Container from 'typedi';
import Container, { Service } from 'typedi';
import { NodeTypes } from '@/node-types';
import { TaskManager } from './task-manager';
import type { RequesterMessageCallback } from '../task-broker.service';
import { TaskBroker } from '../task-broker.service';
@Service()
export class LocalTaskManager extends TaskManager {
taskBroker: TaskBroker;
id: string = 'single-main';
constructor() {
super();
constructor(nodeTypes: NodeTypes) {
super(nodeTypes);
this.registerRequester();
}

View file

@ -1,5 +1,6 @@
import { TaskRunnersConfig } from '@n8n/config';
import type { TaskResultData, RequesterMessage, BrokerMessage, TaskData } from '@n8n/task-runner';
import { RPC_ALLOW_LIST } from '@n8n/task-runner';
import { DataRequestResponseReconstruct, RPC_ALLOW_LIST } from '@n8n/task-runner';
import type {
EnvProviderState,
IExecuteFunctions,
@ -17,8 +18,13 @@ import type {
} from 'n8n-workflow';
import { createResultOk, createResultError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import * as a from 'node:assert/strict';
import Container, { Service } from 'typedi';
import { NodeTypes } from '@/node-types';
import { DataRequestResponseBuilder } from './data-request-response-builder';
import { DataRequestResponseStripper } from './data-request-response-stripper';
export type RequestAccept = (jobId: string) => void;
export type RequestReject = (reason: string) => void;
@ -43,7 +49,8 @@ interface ExecuteFunctionObject {
[name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject;
}
export class TaskManager {
@Service()
export abstract class TaskManager {
requestAcceptRejects: Map<string, { accept: RequestAccept; reject: RequestReject }> = new Map();
taskAcceptRejects: Map<string, { accept: TaskAccept; reject: TaskReject }> = new Map();
@ -52,6 +59,12 @@ export class TaskManager {
tasks: Map<string, Task> = new Map();
private readonly runnerConfig = Container.get(TaskRunnersConfig);
private readonly dataResponseBuilder = new DataRequestResponseBuilder();
constructor(private readonly nodeTypes: NodeTypes) {}
async startTask<TData, TError>(
additionalData: IWorkflowExecuteAdditionalData,
taskType: string,
@ -173,6 +186,9 @@ export class TaskManager {
case 'broker:taskdatarequest':
this.sendTaskData(message.taskId, message.requestId, message.requestParams);
break;
case 'broker:nodetypesrequest':
this.sendNodeTypes(message.taskId, message.requestId, message.requestParams);
break;
case 'broker:rpc':
void this.handleRpc(message.taskId, message.callId, message.name, message.params);
break;
@ -228,14 +244,45 @@ export class TaskManager {
return;
}
const dataRequestResponseBuilder = new DataRequestResponseBuilder(job.data, requestParams);
const requestedData = dataRequestResponseBuilder.build();
const dataRequestResponse = this.dataResponseBuilder.buildFromTaskData(job.data);
if (this.runnerConfig.assertDeduplicationOutput) {
const reconstruct = new DataRequestResponseReconstruct();
a.deepStrictEqual(
reconstruct.reconstructConnectionInputData(dataRequestResponse.inputData),
job.data.connectionInputData,
);
a.deepStrictEqual(
reconstruct.reconstructExecuteData(dataRequestResponse),
job.data.executeData,
);
}
const strippedData = new DataRequestResponseStripper(
dataRequestResponse,
requestParams,
).strip();
this.sendMessage({
type: 'requester:taskdataresponse',
taskId,
requestId,
data: requestedData,
data: strippedData,
});
}
sendNodeTypes(
taskId: string,
requestId: string,
neededNodeTypes: BrokerMessage.ToRequester.NodeTypesRequest['requestParams'],
) {
const nodeTypes = this.nodeTypes.getNodeTypeDescriptions(neededNodeTypes);
this.sendMessage({
type: 'requester:nodetypesresponse',
taskId,
requestId,
nodeTypes,
});
}

View file

@ -26,7 +26,7 @@ export class TaskRunnerModule {
constructor(private readonly runnerConfig: TaskRunnersConfig) {}
async start() {
a.ok(!this.runnerConfig.disabled, 'Task runner is disabled');
a.ok(this.runnerConfig.enabled, 'Task runner is disabled');
await this.loadTaskManager();
await this.loadTaskRunnerServer();
@ -54,7 +54,7 @@ export class TaskRunnerModule {
private async loadTaskManager() {
const { TaskManager } = await import('@/runners/task-managers/task-manager');
const { LocalTaskManager } = await import('@/runners/task-managers/local-task-manager');
this.taskManager = new LocalTaskManager();
this.taskManager = Container.get(LocalTaskManager);
Container.set(TaskManager, this.taskManager);
}

View file

@ -1,4 +1,5 @@
import { mock } from 'jest-mock-extended';
import { v4 as uuid } from 'uuid';
import { Project } from '@/databases/entities/project';
import { ProjectRelation } from '@/databases/entities/project-relation';
@ -13,12 +14,15 @@ import { OwnershipService } from '@/services/ownership.service';
import { mockCredential, mockProject } from '@test/mock-objects';
import { mockInstance } from '@test/mocking';
import { CacheService } from '../cache/cache.service';
describe('OwnershipService', () => {
const userRepository = mockInstance(UserRepository);
const sharedWorkflowRepository = mockInstance(SharedWorkflowRepository);
const projectRelationRepository = mockInstance(ProjectRelationRepository);
const cacheService = mockInstance(CacheService);
const ownershipService = new OwnershipService(
mock(),
cacheService,
userRepository,
mock(),
projectRelationRepository,
@ -52,22 +56,22 @@ describe('OwnershipService', () => {
});
});
describe('getProjectOwnerCached()', () => {
describe('getPersonalProjectOwnerCached()', () => {
test('should retrieve a project owner', async () => {
const mockProject = new Project();
const mockOwner = new User();
const projectRelation = Object.assign(new ProjectRelation(), {
role: 'project:personalOwner',
project: mockProject,
user: mockOwner,
});
// ARRANGE
const project = new Project();
const owner = new User();
const projectRelation = new ProjectRelation();
projectRelation.role = 'project:personalOwner';
(projectRelation.project = project), (projectRelation.user = owner);
projectRelationRepository.getPersonalProjectOwners.mockResolvedValueOnce([projectRelation]);
// ACT
const returnedOwner = await ownershipService.getPersonalProjectOwnerCached('some-project-id');
expect(returnedOwner).toBe(mockOwner);
// ASSERT
expect(returnedOwner).toBe(owner);
});
test('should not throw if no project owner found, should return null instead', async () => {
@ -77,6 +81,29 @@ describe('OwnershipService', () => {
expect(owner).toBeNull();
});
test('should not use the repository if the owner was found in the cache', async () => {
// ARRANGE
const project = new Project();
project.id = uuid();
const owner = new User();
owner.id = uuid();
const projectRelation = new ProjectRelation();
projectRelation.role = 'project:personalOwner';
(projectRelation.project = project), (projectRelation.user = owner);
cacheService.getHashValue.mockResolvedValueOnce(owner);
userRepository.create.mockReturnValueOnce(owner);
// ACT
const foundOwner = await ownershipService.getPersonalProjectOwnerCached(project.id);
// ASSERT
expect(cacheService.getHashValue).toHaveBeenCalledTimes(1);
expect(cacheService.getHashValue).toHaveBeenCalledWith('project-owner', project.id);
expect(projectRelationRepository.getPersonalProjectOwners).not.toHaveBeenCalled();
expect(foundOwner).toEqual(owner);
});
});
describe('getProjectOwnerCached()', () => {

View file

@ -20,7 +20,7 @@ export class OrchestrationService {
private subscriber: Subscriber;
protected isInitialized = false;
isInitialized = false;
private isMultiMainSetupLicensed = false;

View file

@ -45,13 +45,9 @@ export class OwnershipService {
* Personal project ownership is **immutable**.
*/
async getPersonalProjectOwnerCached(projectId: string): Promise<User | null> {
const cachedValue = await this.cacheService.getHashValue<User | null>(
'project-owner',
projectId,
);
const cachedValue = await this.cacheService.getHashValue<User>('project-owner', projectId);
if (cachedValue) this.userRepository.create(cachedValue);
if (cachedValue === null) return null;
if (cachedValue) return this.userRepository.create(cachedValue);
const ownerRel = await this.projectRelationRepository.getPersonalProjectOwners([projectId]);
const owner = ownerRel[0]?.user ?? null;

View file

@ -1,4 +1,4 @@
import type { GlobalConfig } from '@n8n/config';
import type { PruningConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import type { InstanceSettings } from 'n8n-core';
@ -8,9 +8,13 @@ import { mockLogger } from '@test/mocking';
import { PruningService } from '../pruning.service';
jest.mock('@/db', () => ({
connectionState: { migrated: true },
}));
describe('PruningService', () => {
describe('init', () => {
it('should start pruning if leader', () => {
it('should start pruning on main instance that is the leader', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true }),
@ -29,7 +33,7 @@ describe('PruningService', () => {
expect(startPruningSpy).toHaveBeenCalled();
});
it('should not start pruning if follower', () => {
it('should not start pruning on main instance that is a follower', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false }),
@ -48,7 +52,7 @@ describe('PruningService', () => {
expect(startPruningSpy).not.toHaveBeenCalled();
});
it('should register leadership events if multi-main setup is enabled', () => {
it('should register leadership events if main on multi-main setup', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true }),
@ -88,13 +92,10 @@ describe('PruningService', () => {
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true } }),
mock<PruningConfig>({ isEnabled: true }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(true);
expect(pruningService.isEnabled).toBe(true);
});
it('should return `false` based on config if leader main', () => {
@ -107,16 +108,13 @@ describe('PruningService', () => {
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: false } }),
mock<PruningConfig>({ isEnabled: false }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(false);
expect(pruningService.isEnabled).toBe(false);
});
it('should return `false` if non-main even if enabled', () => {
it('should return `false` if non-main even if config is enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false, instanceType: 'worker' }),
@ -126,16 +124,13 @@ describe('PruningService', () => {
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true } }),
mock<PruningConfig>({ isEnabled: true }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(false);
expect(pruningService.isEnabled).toBe(false);
});
it('should return `false` if follower main even if enabled', () => {
it('should return `false` if follower main even if config is enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false, isFollower: true, instanceType: 'main' }),
@ -145,13 +140,10 @@ describe('PruningService', () => {
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true }, multiMainSetup: { enabled: true } }),
mock<PruningConfig>({ isEnabled: true }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(false);
expect(pruningService.isEnabled).toBe(false);
});
});
@ -166,22 +158,25 @@ describe('PruningService', () => {
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: false } }),
mock<PruningConfig>({ isEnabled: false }),
);
const scheduleRollingSoftDeletionsSpy = jest.spyOn(
pruningService,
// @ts-expect-error Private method
'scheduleRollingSoftDeletions',
);
// @ts-expect-error Private method
const setSoftDeletionInterval = jest.spyOn(pruningService, 'setSoftDeletionInterval');
// @ts-expect-error Private method
const scheduleHardDeletion = jest.spyOn(pruningService, 'scheduleHardDeletion');
const scheduleNextHardDeletionSpy = jest.spyOn(pruningService, 'scheduleNextHardDeletion');
pruningService.startPruning();
expect(setSoftDeletionInterval).not.toHaveBeenCalled();
expect(scheduleHardDeletion).not.toHaveBeenCalled();
expect(scheduleRollingSoftDeletionsSpy).not.toHaveBeenCalled();
expect(scheduleNextHardDeletionSpy).not.toHaveBeenCalled();
});
it('should start pruning if service is enabled', () => {
it('should start pruning if service is enabled and DB is migrated', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
@ -191,23 +186,23 @@ describe('PruningService', () => {
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true } }),
mock<PruningConfig>({ isEnabled: true }),
);
const setSoftDeletionInterval = jest
const scheduleRollingSoftDeletionsSpy = jest
// @ts-expect-error Private method
.spyOn(pruningService, 'setSoftDeletionInterval')
.spyOn(pruningService, 'scheduleRollingSoftDeletions')
.mockImplementation();
const scheduleHardDeletion = jest
const scheduleNextHardDeletionSpy = jest
// @ts-expect-error Private method
.spyOn(pruningService, 'scheduleHardDeletion')
.spyOn(pruningService, 'scheduleNextHardDeletion')
.mockImplementation();
pruningService.startPruning();
expect(setSoftDeletionInterval).toHaveBeenCalled();
expect(scheduleHardDeletion).toHaveBeenCalled();
expect(scheduleRollingSoftDeletionsSpy).toHaveBeenCalled();
expect(scheduleNextHardDeletionSpy).toHaveBeenCalled();
});
});
});

View file

@ -1,27 +1,37 @@
import { GlobalConfig } from '@n8n/config';
import { PruningConfig } from '@n8n/config';
import { BinaryDataService, InstanceSettings } from 'n8n-core';
import { jsonStringify } from 'n8n-workflow';
import { ensureError } from 'n8n-workflow';
import { strict } from 'node:assert';
import { Service } from 'typedi';
import { TIME } from '@/constants';
import { Time } from '@/constants';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { connectionState as dbConnectionState } from '@/db';
import { OnShutdown } from '@/decorators/on-shutdown';
import { Logger } from '@/logging/logger.service';
import { OrchestrationService } from '../orchestration.service';
/**
* Responsible for pruning executions from the database and their associated binary data
* from the filesystem, on a rolling basis. By default we soft-delete execution rows
* every cycle and hard-delete them and their binary data every 4th cycle.
*/
@Service()
export class PruningService {
private hardDeletionBatchSize = 100;
/** Timer for soft-deleting executions on a rolling basis. */
private softDeletionInterval: NodeJS.Timer | undefined;
private rates: Record<string, number> = {
softDeletion: this.globalConfig.pruning.softDeleteInterval * TIME.MINUTE,
hardDeletion: this.globalConfig.pruning.hardDeleteInterval * TIME.MINUTE,
/** Timeout for next hard-deletion of soft-deleted executions. */
private hardDeletionTimeout: NodeJS.Timeout | undefined;
private readonly rates = {
softDeletion: this.pruningConfig.softDeleteInterval * Time.minutes.toMilliseconds,
hardDeletion: this.pruningConfig.hardDeleteInterval * Time.minutes.toMilliseconds,
};
public softDeletionInterval: NodeJS.Timer | undefined;
public hardDeletionTimeout: NodeJS.Timeout | undefined;
/** Max number of executions to hard-delete in a cycle. */
private readonly batchSize = 100;
private isShuttingDown = false;
@ -31,103 +41,68 @@ export class PruningService {
private readonly executionRepository: ExecutionRepository,
private readonly binaryDataService: BinaryDataService,
private readonly orchestrationService: OrchestrationService,
private readonly globalConfig: GlobalConfig,
private readonly pruningConfig: PruningConfig,
) {
this.logger = this.logger.scoped('pruning');
}
/**
* @important Requires `OrchestrationService` to be initialized.
*/
init() {
const { isLeader } = this.instanceSettings;
const { isMultiMainSetupEnabled } = this.orchestrationService;
strict(this.instanceSettings.instanceRole !== 'unset', 'Instance role is not set');
if (isLeader) this.startPruning();
if (this.instanceSettings.isLeader) this.startPruning();
if (isMultiMainSetupEnabled) {
if (this.orchestrationService.isMultiMainSetupEnabled) {
this.orchestrationService.multiMainSetup.on('leader-takeover', () => this.startPruning());
this.orchestrationService.multiMainSetup.on('leader-stepdown', () => this.stopPruning());
}
}
private isEnabled() {
const { instanceType, isFollower } = this.instanceSettings;
if (!this.globalConfig.pruning.isEnabled || instanceType !== 'main') {
return false;
}
if (this.globalConfig.multiMainSetup.enabled && instanceType === 'main' && isFollower) {
return false;
}
return true;
get isEnabled() {
return (
this.pruningConfig.isEnabled &&
this.instanceSettings.instanceType === 'main' &&
this.instanceSettings.isLeader
);
}
/**
* @important Call this method only after DB migrations have completed.
*/
startPruning() {
if (!this.isEnabled()) return;
if (!this.isEnabled || !dbConnectionState.migrated || this.isShuttingDown) return;
if (this.isShuttingDown) {
this.logger.warn('Cannot start pruning while shutting down');
return;
}
this.logger.debug('Starting soft-deletion and hard-deletion timers');
this.setSoftDeletionInterval();
this.scheduleHardDeletion();
this.scheduleRollingSoftDeletions();
this.scheduleNextHardDeletion();
}
stopPruning() {
if (!this.isEnabled()) return;
this.logger.debug('Removing soft-deletion and hard-deletion timers');
if (!this.isEnabled) return;
clearInterval(this.softDeletionInterval);
clearTimeout(this.hardDeletionTimeout);
}
private setSoftDeletionInterval(rateMs = this.rates.softDeletion) {
const when = [rateMs / TIME.MINUTE, 'min'].join(' ');
private scheduleRollingSoftDeletions(rateMs = this.rates.softDeletion) {
this.softDeletionInterval = setInterval(
async () => await this.softDeleteOnPruningCycle(),
async () => await this.softDelete(),
this.rates.softDeletion,
);
this.logger.debug(`Soft-deletion scheduled every ${when}`);
this.logger.debug(`Soft-deletion every ${rateMs * Time.milliseconds.toMinutes} minutes`);
}
private scheduleHardDeletion(rateMs = this.rates.hardDeletion) {
const when = [rateMs / TIME.MINUTE, 'min'].join(' ');
private scheduleNextHardDeletion(rateMs = this.rates.hardDeletion) {
this.hardDeletionTimeout = setTimeout(() => {
this.hardDeleteOnPruningCycle()
.then((rate) => this.scheduleHardDeletion(rate))
this.hardDelete()
.then((rate) => this.scheduleNextHardDeletion(rate))
.catch((error) => {
this.scheduleHardDeletion(1 * TIME.SECOND);
const errorMessage =
error instanceof Error
? error.message
: jsonStringify(error, { replaceCircularRefs: true });
this.logger.error('Failed to hard-delete executions', { errorMessage });
this.scheduleNextHardDeletion(1_000);
this.logger.error('Failed to hard-delete executions', { error: ensureError(error) });
});
}, rateMs);
this.logger.debug(`Hard-deletion scheduled for next ${when}`);
this.logger.debug(`Hard-deletion in next ${rateMs * Time.milliseconds.toMinutes} minutes`);
}
/**
* Mark executions as deleted based on age and count, in a pruning cycle.
*/
async softDeleteOnPruningCycle() {
this.logger.debug('Starting soft-deletion of executions');
/** Soft-delete executions based on max age and/or max count. */
async softDelete() {
const result = await this.executionRepository.softDeletePrunableExecutions();
if (result.affected === 0) {
@ -145,10 +120,11 @@ export class PruningService {
}
/**
* Permanently remove all soft-deleted executions and their binary data, in a pruning cycle.
* @return Delay in ms after which the next cycle should be started
* Delete all soft-deleted executions and their binary data.
*
* @returns Delay in milliseconds until next hard-deletion
*/
private async hardDeleteOnPruningCycle() {
private async hardDelete(): Promise<number> {
const ids = await this.executionRepository.findSoftDeletedExecutions();
const executionIds = ids.map((o) => o.executionId);
@ -160,8 +136,6 @@ export class PruningService {
}
try {
this.logger.debug('Starting hard-deletion of executions', { executionIds });
await this.binaryDataService.deleteMany(ids);
await this.executionRepository.deleteByIds(executionIds);
@ -170,16 +144,13 @@ export class PruningService {
} catch (error) {
this.logger.error('Failed to hard-delete executions', {
executionIds,
error: error instanceof Error ? error.message : `${error}`,
error: ensureError(error),
});
}
/**
* For next batch, speed up hard-deletion cycle in high-volume case
* to prevent high concurrency from causing duplicate deletions.
*/
const isHighVolume = executionIds.length >= this.hardDeletionBatchSize;
// if high volume, speed up next hard-deletion
if (executionIds.length >= this.batchSize) return 1 * Time.seconds.toMilliseconds;
return isHighVolume ? 1 * TIME.SECOND : this.rates.hardDeletion;
return this.rates.hardDeletion;
}
}

View file

@ -2,22 +2,29 @@ import type express from 'express';
import { mock } from 'jest-mock-extended';
import type { IdentityProviderInstance, ServiceProviderInstance } from 'samlify';
import { SettingsRepository } from '@/databases/repositories/settings.repository';
import { Logger } from '@/logging/logger.service';
import { UrlService } from '@/services/url.service';
import * as samlHelpers from '@/sso/saml/saml-helpers';
import { SamlService } from '@/sso/saml/saml.service.ee';
import { mockInstance } from '@test/mocking';
import { SAML_PREFERENCES_DB_KEY } from '../constants';
import { InvalidSamlMetadataError } from '../errors/invalid-saml-metadata.error';
describe('SamlService', () => {
const logger = mockInstance(Logger);
const urlService = mockInstance(UrlService);
const samlService = new SamlService(logger, urlService);
const settingsRepository = mockInstance(SettingsRepository);
beforeEach(() => {
jest.restoreAllMocks();
});
describe('getAttributesFromLoginResponse', () => {
test('throws when any attribute is missing', async () => {
//
// ARRANGE
//
jest
.spyOn(samlService, 'getIdentityProviderInstance')
.mockReturnValue(mock<IdentityProviderInstance>());
@ -41,9 +48,7 @@ describe('SamlService', () => {
],
});
//
// ACT & ASSERT
//
await expect(
samlService.getAttributesFromLoginResponse(mock<express.Request>(), 'post'),
).rejects.toThrowError(
@ -51,4 +56,74 @@ describe('SamlService', () => {
);
});
});
describe('init', () => {
test('calls `reset` if an InvalidSamlMetadataError is thrown', async () => {
// ARRANGE
jest
.spyOn(samlService, 'loadFromDbAndApplySamlPreferences')
.mockRejectedValue(new InvalidSamlMetadataError());
jest.spyOn(samlService, 'reset');
// ACT
await samlService.init();
// ASSERT
expect(samlService.reset).toHaveBeenCalledTimes(1);
});
test('calls `reset` if a SyntaxError is thrown', async () => {
// ARRANGE
jest
.spyOn(samlService, 'loadFromDbAndApplySamlPreferences')
.mockRejectedValue(new SyntaxError());
jest.spyOn(samlService, 'reset');
// ACT
await samlService.init();
// ASSERT
expect(samlService.reset).toHaveBeenCalledTimes(1);
});
test('does not call reset and rethrows if another error is thrown', async () => {
// ARRANGE
jest
.spyOn(samlService, 'loadFromDbAndApplySamlPreferences')
.mockRejectedValue(new TypeError());
jest.spyOn(samlService, 'reset');
// ACT & ASSERT
await expect(samlService.init()).rejects.toThrowError(TypeError);
expect(samlService.reset).toHaveBeenCalledTimes(0);
});
test('does not call reset if no error is trown', async () => {
// ARRANGE
jest.spyOn(samlService, 'reset');
// ACT
await samlService.init();
// ASSERT
expect(samlService.reset).toHaveBeenCalledTimes(0);
});
});
describe('reset', () => {
test('disables saml login and deletes the saml `features.saml` key in the db', async () => {
// ARRANGE
jest.spyOn(samlHelpers, 'setSamlLoginEnabled');
jest.spyOn(settingsRepository, 'delete');
// ACT
await samlService.reset();
// ASSERT
expect(samlHelpers.setSamlLoginEnabled).toHaveBeenCalledTimes(1);
expect(samlHelpers.setSamlLoginEnabled).toHaveBeenCalledWith(false);
expect(settingsRepository.delete).toHaveBeenCalledTimes(1);
expect(settingsRepository.delete).toHaveBeenCalledWith({ key: SAML_PREFERENCES_DB_KEY });
});
});
});

View file

@ -0,0 +1,7 @@
import { ApplicationError } from 'n8n-workflow';
export class InvalidSamlMetadataError extends ApplicationError {
constructor() {
super('Invalid SAML metadata', { level: 'warning' });
}
}

View file

@ -16,6 +16,7 @@ import { Logger } from '@/logging/logger.service';
import { UrlService } from '@/services/url.service';
import { SAML_PREFERENCES_DB_KEY } from './constants';
import { InvalidSamlMetadataError } from './errors/invalid-saml-metadata.error';
import {
createUserFromSamlAttributes,
getMappedSamlAttributesFromFlowResult,
@ -81,11 +82,24 @@ export class SamlService {
) {}
async init(): Promise<void> {
// load preferences first but do not apply so as to not load samlify unnecessarily
await this.loadFromDbAndApplySamlPreferences(false);
if (isSamlLicensedAndEnabled()) {
await this.loadSamlify();
await this.loadFromDbAndApplySamlPreferences(true);
try {
// load preferences first but do not apply so as to not load samlify unnecessarily
await this.loadFromDbAndApplySamlPreferences(false);
if (isSamlLicensedAndEnabled()) {
await this.loadSamlify();
await this.loadFromDbAndApplySamlPreferences(true);
}
} catch (error) {
// If the SAML configuration has been corrupted in the database we'll
// delete the corrupted configuration and enable email logins again.
if (error instanceof InvalidSamlMetadataError || error instanceof SyntaxError) {
this.logger.warn(
`SAML initialization failed because of invalid metadata in database: ${error.message}. IMPORTANT: Disabling SAML and switching to email-based login for all users. Please review your configuration and re-enable SAML.`,
);
await this.reset();
} else {
throw error;
}
}
}
@ -98,7 +112,7 @@ export class SamlService {
validate: async (response: string) => {
const valid = await validateResponse(response);
if (!valid) {
throw new ApplicationError('Invalid SAML response');
throw new InvalidSamlMetadataError();
}
},
});
@ -230,7 +244,7 @@ export class SamlService {
} else if (prefs.metadata) {
const validationResult = await validateMetadata(prefs.metadata);
if (!validationResult) {
throw new ApplicationError('Invalid SAML metadata');
throw new InvalidSamlMetadataError();
}
}
this.getIdentityProviderInstance(true);
@ -371,4 +385,13 @@ export class SamlService {
}
return attributes;
}
/**
* Disables SAML, switches to email based logins and deletes the SAML
* configuration from the database.
*/
async reset() {
await setSamlLoginEnabled(false);
await Container.get(SettingsRepository).delete({ key: SAML_PREFERENCES_DB_KEY });
}
}

View file

@ -5,10 +5,10 @@ import type { AuthProviderType } from '@/databases/entities/auth-identity';
import { SettingsRepository } from '@/databases/repositories/settings.repository';
/**
* Only one authentication method can be active at a time. This function sets the current authentication method
* and saves it to the database.
* SSO methods should only switch to email and then to another method. Email can switch to any method.
* @param authenticationMethod
* Only one authentication method can be active at a time. This function sets
* the current authentication method and saves it to the database.
* SSO methods should only switch to email and then to another method. Email
* can switch to any method.
*/
export async function setCurrentAuthenticationMethod(
authenticationMethod: AuthProviderType,

View file

@ -1,6 +1,5 @@
import { GlobalConfig } from '@n8n/config';
import { type Workflow, type INode, type WorkflowSettings } from 'n8n-workflow';
import { strict as assert } from 'node:assert';
import { Service } from 'typedi';
import type { Project } from '@/databases/entities/project';
@ -68,11 +67,9 @@ export class SubworkflowPolicyChecker {
const owner = await this.ownershipService.getPersonalProjectOwnerCached(subworkflowProject.id);
assert(owner !== null); // only `null` if not personal
return {
hasReadAccess,
ownerName: owner.firstName + ' ' + owner.lastName,
ownerName: owner ? owner.firstName + ' ' + owner.lastName : 'No owner (team project)',
};
}

View file

@ -26,7 +26,7 @@ import { mockInstance } from '../../shared/mocking';
config.set('executions.mode', 'queue');
config.set('binaryDataManager.availableModes', 'filesystem');
Container.get(TaskRunnersConfig).disabled = false;
Container.get(TaskRunnersConfig).enabled = true;
mockInstance(LoadNodesAndCredentials);
const binaryDataService = mockInstance(BinaryDataService);
const externalHooks = mockInstance(ExternalHooks);

View file

@ -1,4 +1,4 @@
import { GlobalConfig } from '@n8n/config';
import { PruningConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { BinaryDataService, InstanceSettings } from 'n8n-core';
import type { ExecutionStatus } from 'n8n-workflow';
@ -27,19 +27,19 @@ describe('softDeleteOnPruningCycle()', () => {
const now = new Date();
const yesterday = new Date(Date.now() - TIME.DAY);
let workflow: WorkflowEntity;
let globalConfig: GlobalConfig;
let pruningConfig: PruningConfig;
beforeAll(async () => {
await testDb.init();
globalConfig = Container.get(GlobalConfig);
pruningConfig = Container.get(PruningConfig);
pruningService = new PruningService(
mockLogger(),
instanceSettings,
Container.get(ExecutionRepository),
mockInstance(BinaryDataService),
mock(),
globalConfig,
pruningConfig,
);
workflow = await createWorkflow();
@ -62,8 +62,8 @@ describe('softDeleteOnPruningCycle()', () => {
describe('when EXECUTIONS_DATA_PRUNE_MAX_COUNT is set', () => {
beforeAll(() => {
globalConfig.pruning.maxAge = 336;
globalConfig.pruning.maxCount = 1;
pruningConfig.maxAge = 336;
pruningConfig.maxCount = 1;
});
test('should mark as deleted based on EXECUTIONS_DATA_PRUNE_MAX_COUNT', async () => {
@ -73,7 +73,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -92,7 +92,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -113,7 +113,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -132,7 +132,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -150,7 +150,7 @@ describe('softDeleteOnPruningCycle()', () => {
await annotateExecution(executions[0].id, { vote: 'up' }, [workflow.id]);
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -163,8 +163,8 @@ describe('softDeleteOnPruningCycle()', () => {
describe('when EXECUTIONS_DATA_MAX_AGE is set', () => {
beforeAll(() => {
globalConfig.pruning.maxAge = 1;
globalConfig.pruning.maxCount = 0;
pruningConfig.maxAge = 1;
pruningConfig.maxCount = 0;
});
test('should mark as deleted based on EXECUTIONS_DATA_MAX_AGE', async () => {
@ -179,7 +179,7 @@ describe('softDeleteOnPruningCycle()', () => {
),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -203,7 +203,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -221,7 +221,7 @@ describe('softDeleteOnPruningCycle()', () => {
])('should prune %s executions', async (status, attributes) => {
const execution = await createExecution({ status, ...attributes }, workflow);
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -239,7 +239,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -266,7 +266,7 @@ describe('softDeleteOnPruningCycle()', () => {
await annotateExecution(executions[0].id, { vote: 'up' }, [workflow.id]);
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([

View file

@ -18,14 +18,14 @@ describe('TaskRunnerModule in external mode', () => {
describe('start', () => {
it('should throw if the task runner is disabled', async () => {
runnerConfig.disabled = true;
runnerConfig.enabled = false;
// Act
await expect(module.start()).rejects.toThrow('Task runner is disabled');
});
it('should start the task runner', async () => {
runnerConfig.disabled = false;
runnerConfig.enabled = true;
// Act
await module.start();

View file

@ -18,14 +18,14 @@ describe('TaskRunnerModule in internal_childprocess mode', () => {
describe('start', () => {
it('should throw if the task runner is disabled', async () => {
runnerConfig.disabled = true;
runnerConfig.enabled = false;
// Act
await expect(module.start()).rejects.toThrow('Task runner is disabled');
});
it('should start the task runner', async () => {
runnerConfig.disabled = false;
runnerConfig.enabled = true;
// Act
await module.start();

View file

@ -10,7 +10,7 @@ import { retryUntil } from '@test-integration/retry-until';
describe('TaskRunnerProcess', () => {
const authToken = 'token';
const runnerConfig = Container.get(TaskRunnersConfig);
runnerConfig.disabled = false;
runnerConfig.enabled = true;
runnerConfig.mode = 'internal_childprocess';
runnerConfig.authToken = authToken;
runnerConfig.port = 0; // Use any port

View file

@ -1,6 +1,6 @@
{
"name": "n8n-core",
"version": "1.66.0",
"version": "1.67.0",
"description": "Core functionality of n8n",
"main": "dist/index",
"types": "dist/index.d.ts",

View file

@ -76,7 +76,6 @@ import type {
IPollFunctions,
IRequestOptions,
IRunExecutionData,
ISourceData,
ITaskData,
ITaskDataConnections,
ITriggerFunctions,
@ -109,6 +108,7 @@ import type {
AiEvent,
ISupplyDataFunctions,
WebhookType,
SchedulingFunctions,
} from 'n8n-workflow';
import {
NodeConnectionType,
@ -166,7 +166,14 @@ import { extractValue } from './ExtractValue';
import { InstanceSettings } from './InstanceSettings';
import type { ExtendedValidationResult, IResponseError } from './Interfaces';
// eslint-disable-next-line import/no-cycle
import { HookContext, PollContext, TriggerContext, WebhookContext } from './node-execution-context';
import {
ExecuteSingleContext,
HookContext,
PollContext,
TriggerContext,
WebhookContext,
} from './node-execution-context';
import { ScheduledTaskManager } from './ScheduledTaskManager';
import { getSecretsProxy } from './Secrets';
import { SSHClientsManager } from './SSHClientsManager';
@ -3018,7 +3025,7 @@ const executionCancellationFunctions = (
},
});
const getRequestHelperFunctions = (
export const getRequestHelperFunctions = (
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
@ -3338,11 +3345,19 @@ const getRequestHelperFunctions = (
};
};
const getSSHTunnelFunctions = (): SSHTunnelFunctions => ({
export const getSSHTunnelFunctions = (): SSHTunnelFunctions => ({
getSSHClient: async (credentials) =>
await Container.get(SSHClientsManager).getClient(credentials),
});
export const getSchedulingFunctions = (workflow: Workflow): SchedulingFunctions => {
const scheduledTaskManager = Container.get(ScheduledTaskManager);
return {
registerCron: (cronExpression, onTick) =>
scheduledTaskManager.registerCron(workflow, cronExpression, onTick),
};
};
const getAllowedPaths = () => {
const restrictFileAccessTo = process.env[RESTRICT_FILE_ACCESS_TO];
if (!restrictFileAccessTo) {
@ -3409,7 +3424,7 @@ export function isFilePathBlocked(filePath: string): boolean {
return false;
}
const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions => ({
export const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions => ({
async createReadStream(filePath) {
try {
await fsAccess(filePath);
@ -3445,7 +3460,7 @@ const getFileSystemHelperFunctions = (node: INode): FileSystemHelperFunctions =>
},
});
const getNodeHelperFunctions = (
export const getNodeHelperFunctions = (
{ executionId }: IWorkflowExecuteAdditionalData,
workflowId: string,
): NodeHelperFunctions => ({
@ -3453,7 +3468,7 @@ const getNodeHelperFunctions = (
await copyBinaryFile(workflowId, executionId!, filePath, fileName, mimeType),
});
const getBinaryHelperFunctions = (
export const getBinaryHelperFunctions = (
{ executionId }: IWorkflowExecuteAdditionalData,
workflowId: string,
): BinaryHelperFunctions => ({
@ -3471,7 +3486,7 @@ const getBinaryHelperFunctions = (
},
});
const getCheckProcessedHelperFunctions = (
export const getCheckProcessedHelperFunctions = (
workflow: Workflow,
node: INode,
): DeduplicationHelperFunctions => ({
@ -4180,145 +4195,19 @@ export function getExecuteSingleFunctions(
mode: WorkflowExecuteMode,
abortSignal?: AbortSignal,
): IExecuteSingleFunctions {
return ((workflow, runExecutionData, connectionInputData, inputData, node, itemIndex) => {
return {
...getCommonWorkflowFunctions(workflow, node, additionalData),
...executionCancellationFunctions(abortSignal),
continueOnFail: () => continueOnFail(node),
evaluateExpression: (expression: string, evaluateItemIndex: number | undefined) => {
evaluateItemIndex = evaluateItemIndex === undefined ? itemIndex : evaluateItemIndex;
return workflow.expression.resolveSimpleParameterValue(
`=${expression}`,
{},
runExecutionData,
runIndex,
evaluateItemIndex,
node.name,
connectionInputData,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
executeData,
);
},
getContext(type: ContextType): IContextObject {
return NodeHelpers.getContext(runExecutionData, type, node);
},
getCredentials: async (type) =>
await getCredentials(
workflow,
node,
type,
additionalData,
mode,
executeData,
runExecutionData,
runIndex,
connectionInputData,
itemIndex,
),
getInputData: (inputIndex = 0, inputName = 'main') => {
if (!inputData.hasOwnProperty(inputName)) {
// Return empty array because else it would throw error when nothing is connected to input
return { json: {} };
}
// TODO: Check if nodeType has input with that index defined
if (inputData[inputName].length < inputIndex) {
throw new ApplicationError('Could not get input index', {
extra: { inputIndex, inputName },
});
}
const allItems = inputData[inputName][inputIndex];
if (allItems === null) {
throw new ApplicationError('Input index was not set', {
extra: { inputIndex, inputName },
});
}
if (allItems[itemIndex] === null) {
throw new ApplicationError('Value of input with given index was not set', {
extra: { inputIndex, inputName, itemIndex },
});
}
return allItems[itemIndex];
},
getInputSourceData: (inputIndex = 0, inputName = 'main') => {
if (executeData?.source === null) {
// Should never happen as n8n sets it automatically
throw new ApplicationError('Source data is missing');
}
return executeData.source[inputName][inputIndex] as ISourceData;
},
getItemIndex: () => itemIndex,
getMode: () => mode,
getExecuteData: () => executeData,
getNodeParameter: (
parameterName: string,
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object => {
return getNodeParameter(
workflow,
runExecutionData,
runIndex,
connectionInputData,
node,
parameterName,
itemIndex,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
executeData,
fallbackValue,
options,
);
},
getWorkflowDataProxy: (): IWorkflowDataProxyData => {
const dataProxy = new WorkflowDataProxy(
workflow,
runExecutionData,
runIndex,
itemIndex,
node.name,
connectionInputData,
{},
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
executeData,
);
return dataProxy.getDataProxy();
},
helpers: {
createDeferredPromise,
returnJsonArray,
...getRequestHelperFunctions(
workflow,
node,
additionalData,
runExecutionData,
connectionInputData,
),
...getBinaryHelperFunctions(additionalData, workflow.id),
assertBinaryData: (propertyName, inputIndex = 0) =>
assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex),
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
},
logAiEvent: (eventName: AiEvent, msg: string) => {
return additionalData.logAiEvent(eventName, {
executionId: additionalData.executionId ?? 'unsaved-execution',
nodeName: node.name,
workflowName: workflow.name ?? 'Unnamed workflow',
nodeType: node.type,
workflowId: workflow.id ?? 'unsaved-workflow',
msg,
});
},
};
})(workflow, runExecutionData, connectionInputData, inputData, node, itemIndex);
return new ExecuteSingleContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
itemIndex,
executeData,
abortSignal,
);
}
export function getCredentialTestFunctions(): ICredentialTestFunctions {

View file

@ -0,0 +1,301 @@
import { mock } from 'jest-mock-extended';
import type {
INode,
IWorkflowExecuteAdditionalData,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
IExecuteData,
Workflow,
WorkflowExecuteMode,
ICredentialsHelper,
Expression,
INodeType,
INodeTypes,
OnError,
ContextType,
IContextObject,
ICredentialDataDecryptedObject,
ISourceData,
} from 'n8n-workflow';
import { ApplicationError, NodeHelpers } from 'n8n-workflow';
import { ExecuteSingleContext } from '../execute-single-context';
describe('ExecuteSingleContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const runExecutionData = mock<IRunExecutionData>();
const connectionInputData = mock<INodeExecutionData[]>();
const inputData: ITaskDataConnections = { main: [[{ json: { test: 'data' } }]] };
const executeData = mock<IExecuteData>();
const runIndex = 0;
const itemIndex = 0;
const abortSignal = mock<AbortSignal>();
const executeSingleContext = new ExecuteSingleContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
itemIndex,
executeData,
abortSignal,
);
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
describe('getExecutionCancelSignal', () => {
it('should return the abort signal', () => {
expect(executeSingleContext.getExecutionCancelSignal()).toBe(abortSignal);
});
});
describe('continueOnFail', () => {
afterEach(() => {
node.onError = undefined;
node.continueOnFail = false;
});
it('should return false for nodes by default', () => {
expect(executeSingleContext.continueOnFail()).toEqual(false);
});
it('should return true if node has continueOnFail set to true', () => {
node.continueOnFail = true;
expect(executeSingleContext.continueOnFail()).toEqual(true);
});
test.each([
['continueRegularOutput', true],
['continueErrorOutput', true],
['stopWorkflow', false],
])('if node has onError set to %s, it should return %s', (onError, expected) => {
node.onError = onError as OnError;
expect(executeSingleContext.continueOnFail()).toEqual(expected);
});
});
describe('evaluateExpression', () => {
it('should evaluate the expression correctly', () => {
const expression = '$json.test';
const expectedResult = 'data';
const resolveSimpleParameterValueSpy = jest.spyOn(
workflow.expression,
'resolveSimpleParameterValue',
);
resolveSimpleParameterValueSpy.mockReturnValue(expectedResult);
expect(executeSingleContext.evaluateExpression(expression, itemIndex)).toEqual(
expectedResult,
);
expect(resolveSimpleParameterValueSpy).toHaveBeenCalledWith(
`=${expression}`,
{},
runExecutionData,
runIndex,
itemIndex,
node.name,
connectionInputData,
mode,
expect.objectContaining({}),
executeData,
);
resolveSimpleParameterValueSpy.mockRestore();
});
});
describe('getContext', () => {
it('should return the context object', () => {
const contextType: ContextType = 'node';
const expectedContext = mock<IContextObject>();
const getContextSpy = jest.spyOn(NodeHelpers, 'getContext');
getContextSpy.mockReturnValue(expectedContext);
expect(executeSingleContext.getContext(contextType)).toEqual(expectedContext);
expect(getContextSpy).toHaveBeenCalledWith(runExecutionData, contextType, node);
getContextSpy.mockRestore();
});
});
describe('getInputData', () => {
const inputIndex = 0;
const inputName = 'main';
afterEach(() => {
inputData[inputName] = [[{ json: { test: 'data' } }]];
});
it('should return the input data correctly', () => {
const expectedData = { json: { test: 'data' } };
expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData);
});
it('should return an empty object if the input name does not exist', () => {
const inputName = 'nonExistent';
const expectedData = { json: {} };
expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData);
});
it('should throw an error if the input index is out of range', () => {
const inputIndex = 1;
expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow(
ApplicationError,
);
});
it('should throw an error if the input index was not set', () => {
inputData.main[inputIndex] = null;
expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow(
ApplicationError,
);
});
it('should throw an error if the value of input with given index was not set', () => {
delete inputData.main[inputIndex]![itemIndex];
expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow(
ApplicationError,
);
});
});
describe('getItemIndex', () => {
it('should return the item index correctly', () => {
expect(executeSingleContext.getItemIndex()).toEqual(itemIndex);
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = executeSingleContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = executeSingleContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await executeSingleContext.getCredentials<ICredentialDataDecryptedObject>(
testCredentialType,
);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getExecuteData', () => {
it('should return the execute data correctly', () => {
expect(executeSingleContext.getExecuteData()).toEqual(executeData);
});
});
describe('getWorkflowDataProxy', () => {
it('should return the workflow data proxy correctly', () => {
const workflowDataProxy = executeSingleContext.getWorkflowDataProxy();
expect(workflowDataProxy.isProxy).toBe(true);
expect(Object.keys(workflowDataProxy.$input)).toEqual([
'all',
'context',
'first',
'item',
'last',
'params',
]);
});
});
describe('getInputSourceData', () => {
it('should return the input source data correctly', () => {
const inputSourceData = mock<ISourceData>();
executeData.source = { main: [inputSourceData] };
expect(executeSingleContext.getInputSourceData()).toEqual(inputSourceData);
});
it('should throw an error if the source data is missing', () => {
executeData.source = null;
expect(() => executeSingleContext.getInputSourceData()).toThrow(ApplicationError);
});
});
describe('logAiEvent', () => {
it('should log the AI event correctly', () => {
const eventName = 'ai-tool-called';
const msg = 'test message';
executeSingleContext.logAiEvent(eventName, msg);
expect(additionalData.logAiEvent).toHaveBeenCalledWith(eventName, {
executionId: additionalData.executionId,
nodeName: node.name,
workflowName: workflow.name,
nodeType: node.type,
workflowId: workflow.id,
msg,
});
});
});
});

View file

@ -0,0 +1,218 @@
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IRunExecutionData,
IExecuteSingleFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
ITaskDataConnections,
IExecuteData,
ContextType,
AiEvent,
ISourceData,
} from 'n8n-workflow';
import {
ApplicationError,
createDeferredPromise,
NodeHelpers,
WorkflowDataProxy,
} from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
assertBinaryData,
continueOnFail,
getAdditionalKeys,
getBinaryDataBuffer,
getBinaryHelperFunctions,
getCredentials,
getNodeParameter,
getRequestHelperFunctions,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { NodeExecutionContext } from './node-execution-context';
export class ExecuteSingleContext extends NodeExecutionContext implements IExecuteSingleFunctions {
readonly helpers: IExecuteSingleFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly runExecutionData: IRunExecutionData,
private readonly runIndex: number,
private readonly connectionInputData: INodeExecutionData[],
private readonly inputData: ITaskDataConnections,
private readonly itemIndex: number,
private readonly executeData: IExecuteData,
private readonly abortSignal?: AbortSignal,
) {
super(workflow, node, additionalData, mode);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...getRequestHelperFunctions(
workflow,
node,
additionalData,
runExecutionData,
connectionInputData,
),
...getBinaryHelperFunctions(additionalData, workflow.id),
assertBinaryData: (propertyName, inputIndex = 0) =>
assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex),
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
};
}
getExecutionCancelSignal() {
return this.abortSignal;
}
onExecutionCancellation(handler: () => unknown) {
const fn = () => {
this.abortSignal?.removeEventListener('abort', fn);
handler();
};
this.abortSignal?.addEventListener('abort', fn);
}
continueOnFail() {
return continueOnFail(this.node);
}
evaluateExpression(expression: string, evaluateItemIndex: number | undefined) {
evaluateItemIndex = evaluateItemIndex ?? this.itemIndex;
return this.workflow.expression.resolveSimpleParameterValue(
`=${expression}`,
{},
this.runExecutionData,
this.runIndex,
evaluateItemIndex,
this.node.name,
this.connectionInputData,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData),
this.executeData,
);
}
getContext(type: ContextType) {
return NodeHelpers.getContext(this.runExecutionData, type, this.node);
}
getInputData(inputIndex = 0, inputName = 'main') {
if (!this.inputData.hasOwnProperty(inputName)) {
// Return empty array because else it would throw error when nothing is connected to input
return { json: {} };
}
// TODO: Check if nodeType has input with that index defined
if (this.inputData[inputName].length < inputIndex) {
throw new ApplicationError('Could not get input index', {
extra: { inputIndex, inputName },
});
}
const allItems = this.inputData[inputName][inputIndex];
if (allItems === null || allItems === undefined) {
throw new ApplicationError('Input index was not set', {
extra: { inputIndex, inputName },
});
}
const data = allItems[this.itemIndex];
if (data === null || data === undefined) {
throw new ApplicationError('Value of input with given index was not set', {
extra: { inputIndex, inputName, itemIndex: this.itemIndex },
});
}
return data;
}
getItemIndex() {
return this.itemIndex;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
getNodeParameter(parameterName: string, fallbackValue?: any, options?: IGetNodeParameterOptions) {
return getNodeParameter(
this.workflow,
this.runExecutionData,
this.runIndex,
this.connectionInputData,
this.node,
parameterName,
this.itemIndex,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData),
this.executeData,
fallbackValue,
options,
);
}
// TODO: extract out in a BaseExecutionContext
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await getCredentials<T>(
this.workflow,
this.node,
type,
this.additionalData,
this.mode,
this.executeData,
this.runExecutionData,
this.runIndex,
this.connectionInputData,
this.itemIndex,
);
}
getExecuteData() {
return this.executeData;
}
getWorkflowDataProxy() {
return new WorkflowDataProxy(
this.workflow,
this.runExecutionData,
this.runIndex,
this.itemIndex,
this.node.name,
this.connectionInputData,
{},
this.mode,
getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData),
this.executeData,
).getDataProxy();
}
getInputSourceData(inputIndex = 0, inputName = 'main'): ISourceData {
if (this.executeData?.source === null) {
// Should never happen as n8n sets it automatically
throw new ApplicationError('Source data is missing');
}
return this.executeData.source[inputName][inputIndex] as ISourceData;
}
logAiEvent(eventName: AiEvent, msg: string) {
return this.additionalData.logAiEvent(eventName, {
executionId: this.additionalData.executionId ?? 'unsaved-execution',
nodeName: this.node.name,
workflowName: this.workflow.name ?? 'Unnamed workflow',
nodeType: this.node.type,
workflowId: this.workflow.id ?? 'unsaved-workflow',
msg,
});
}
}

View file

@ -1,136 +0,0 @@
import FileType from 'file-type';
import { IncomingMessage, type ClientRequest } from 'http';
import { mock } from 'jest-mock-extended';
import type { Workflow, IWorkflowExecuteAdditionalData, IBinaryData } from 'n8n-workflow';
import type { Socket } from 'net';
import { Container } from 'typedi';
import { BinaryDataService } from '@/BinaryData/BinaryData.service';
import { BinaryHelpers } from '../binary-helpers';
jest.mock('file-type');
describe('BinaryHelpers', () => {
let binaryDataService = mock<BinaryDataService>();
Container.set(BinaryDataService, binaryDataService);
const workflow = mock<Workflow>({ id: '123' });
const additionalData = mock<IWorkflowExecuteAdditionalData>({ executionId: '456' });
const binaryHelpers = new BinaryHelpers(workflow, additionalData);
beforeEach(() => {
jest.clearAllMocks();
binaryDataService.store.mockImplementation(
async (_workflowId, _executionId, _buffer, value) => value,
);
});
describe('getBinaryPath', () => {
it('should call getPath method of BinaryDataService', () => {
binaryHelpers.getBinaryPath('mock-binary-data-id');
expect(binaryDataService.getPath).toHaveBeenCalledWith('mock-binary-data-id');
});
});
describe('getBinaryMetadata', () => {
it('should call getMetadata method of BinaryDataService', async () => {
await binaryHelpers.getBinaryMetadata('mock-binary-data-id');
expect(binaryDataService.getMetadata).toHaveBeenCalledWith('mock-binary-data-id');
});
});
describe('getBinaryStream', () => {
it('should call getStream method of BinaryDataService', async () => {
await binaryHelpers.getBinaryStream('mock-binary-data-id');
expect(binaryDataService.getAsStream).toHaveBeenCalledWith('mock-binary-data-id', undefined);
});
});
describe('prepareBinaryData', () => {
it('should guess the mime type and file extension if not provided', async () => {
const buffer = Buffer.from('test');
const fileTypeData = { mime: 'application/pdf', ext: 'pdf' };
(FileType.fromBuffer as jest.Mock).mockResolvedValue(fileTypeData);
const binaryData = await binaryHelpers.prepareBinaryData(buffer);
expect(binaryData.mimeType).toEqual('application/pdf');
expect(binaryData.fileExtension).toEqual('pdf');
expect(binaryData.fileType).toEqual('pdf');
expect(binaryData.fileName).toBeUndefined();
expect(binaryData.directory).toBeUndefined();
expect(binaryDataService.store).toHaveBeenCalledWith(
workflow.id,
additionalData.executionId!,
buffer,
binaryData,
);
});
it('should use the provided mime type and file extension if provided', async () => {
const buffer = Buffer.from('test');
const mimeType = 'application/octet-stream';
const binaryData = await binaryHelpers.prepareBinaryData(buffer, undefined, mimeType);
expect(binaryData.mimeType).toEqual(mimeType);
expect(binaryData.fileExtension).toEqual('bin');
expect(binaryData.fileType).toBeUndefined();
expect(binaryData.fileName).toBeUndefined();
expect(binaryData.directory).toBeUndefined();
expect(binaryDataService.store).toHaveBeenCalledWith(
workflow.id,
additionalData.executionId!,
buffer,
binaryData,
);
});
const mockSocket = mock<Socket>({ readableHighWaterMark: 0 });
it('should use the contentDisposition.filename, responseUrl, and contentType properties to set the fileName, directory, and mimeType properties of the binaryData object', async () => {
const incomingMessage = new IncomingMessage(mockSocket);
incomingMessage.contentDisposition = { filename: 'test.txt', type: 'attachment' };
incomingMessage.contentType = 'text/plain';
incomingMessage.responseUrl = 'https://example.com/test.txt';
const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage);
expect(binaryData.fileName).toEqual('test.txt');
expect(binaryData.fileType).toEqual('text');
expect(binaryData.directory).toBeUndefined();
expect(binaryData.mimeType).toEqual('text/plain');
expect(binaryData.fileExtension).toEqual('txt');
});
it('should use the req.path property to set the fileName property of the binaryData object if contentDisposition.filename and responseUrl are not provided', async () => {
const incomingMessage = new IncomingMessage(mockSocket);
incomingMessage.contentType = 'text/plain';
incomingMessage.req = mock<ClientRequest>({ path: '/test.txt' });
const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage);
expect(binaryData.fileName).toEqual('test.txt');
expect(binaryData.directory).toBeUndefined();
expect(binaryData.mimeType).toEqual('text/plain');
expect(binaryData.fileExtension).toEqual('txt');
});
});
describe('setBinaryDataBuffer', () => {
it('should call store method of BinaryDataService', async () => {
const binaryData = mock<IBinaryData>();
const bufferOrStream = mock<Buffer>();
await binaryHelpers.setBinaryDataBuffer(binaryData, bufferOrStream);
expect(binaryDataService.store).toHaveBeenCalledWith(
workflow.id,
additionalData.executionId,
bufferOrStream,
binaryData,
);
});
});
});

View file

@ -1,33 +0,0 @@
import { mock } from 'jest-mock-extended';
import type { Workflow } from 'n8n-workflow';
import { Container } from 'typedi';
import { ScheduledTaskManager } from '@/ScheduledTaskManager';
import { SchedulingHelpers } from '../scheduling-helpers';
describe('SchedulingHelpers', () => {
const scheduledTaskManager = mock<ScheduledTaskManager>();
Container.set(ScheduledTaskManager, scheduledTaskManager);
const workflow = mock<Workflow>();
const schedulingHelpers = new SchedulingHelpers(workflow);
beforeEach(() => {
jest.clearAllMocks();
});
describe('registerCron', () => {
it('should call registerCron method of ScheduledTaskManager', () => {
const cronExpression = '* * * * * *';
const onTick = jest.fn();
schedulingHelpers.registerCron(cronExpression, onTick);
expect(scheduledTaskManager.registerCron).toHaveBeenCalledWith(
workflow,
cronExpression,
onTick,
);
});
});
});

View file

@ -1,32 +0,0 @@
import { mock } from 'jest-mock-extended';
import type { SSHCredentials } from 'n8n-workflow';
import type { Client } from 'ssh2';
import { Container } from 'typedi';
import { SSHClientsManager } from '@/SSHClientsManager';
import { SSHTunnelHelpers } from '../ssh-tunnel-helpers';
describe('SSHTunnelHelpers', () => {
const sshClientsManager = mock<SSHClientsManager>();
Container.set(SSHClientsManager, sshClientsManager);
const sshTunnelHelpers = new SSHTunnelHelpers();
beforeEach(() => {
jest.clearAllMocks();
});
describe('getSSHClient', () => {
const credentials = mock<SSHCredentials>();
it('should call SSHClientsManager.getClient with the given credentials', async () => {
const mockClient = mock<Client>();
sshClientsManager.getClient.mockResolvedValue(mockClient);
const client = await sshTunnelHelpers.getSSHClient(credentials);
expect(sshClientsManager.getClient).toHaveBeenCalledWith(credentials);
expect(client).toBe(mockClient);
});
});
});

View file

@ -1,148 +0,0 @@
import FileType from 'file-type';
import { IncomingMessage } from 'http';
import MimeTypes from 'mime-types';
import { ApplicationError, fileTypeFromMimeType } from 'n8n-workflow';
import type {
BinaryHelperFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
IBinaryData,
} from 'n8n-workflow';
import path from 'path';
import type { Readable } from 'stream';
import Container from 'typedi';
import { BinaryDataService } from '@/BinaryData/BinaryData.service';
import { binaryToBuffer } from '@/BinaryData/utils';
// eslint-disable-next-line import/no-cycle
import { binaryToString } from '@/NodeExecuteFunctions';
export class BinaryHelpers {
private readonly binaryDataService = Container.get(BinaryDataService);
constructor(
private readonly workflow: Workflow,
private readonly additionalData: IWorkflowExecuteAdditionalData,
) {}
get exported(): BinaryHelperFunctions {
return {
getBinaryPath: this.getBinaryPath.bind(this),
getBinaryMetadata: this.getBinaryMetadata.bind(this),
getBinaryStream: this.getBinaryStream.bind(this),
binaryToBuffer,
binaryToString,
prepareBinaryData: this.prepareBinaryData.bind(this),
setBinaryDataBuffer: this.setBinaryDataBuffer.bind(this),
copyBinaryFile: this.copyBinaryFile.bind(this),
};
}
getBinaryPath(binaryDataId: string) {
return this.binaryDataService.getPath(binaryDataId);
}
async getBinaryMetadata(binaryDataId: string) {
return await this.binaryDataService.getMetadata(binaryDataId);
}
async getBinaryStream(binaryDataId: string, chunkSize?: number) {
return await this.binaryDataService.getAsStream(binaryDataId, chunkSize);
}
// eslint-disable-next-line complexity
async prepareBinaryData(binaryData: Buffer | Readable, filePath?: string, mimeType?: string) {
let fileExtension: string | undefined;
if (binaryData instanceof IncomingMessage) {
if (!filePath) {
try {
const { responseUrl } = binaryData;
filePath =
binaryData.contentDisposition?.filename ??
((responseUrl && new URL(responseUrl).pathname) ?? binaryData.req?.path)?.slice(1);
} catch {}
}
if (!mimeType) {
mimeType = binaryData.contentType;
}
}
if (!mimeType) {
// If no mime type is given figure it out
if (filePath) {
// Use file path to guess mime type
const mimeTypeLookup = MimeTypes.lookup(filePath);
if (mimeTypeLookup) {
mimeType = mimeTypeLookup;
}
}
if (!mimeType) {
if (Buffer.isBuffer(binaryData)) {
// Use buffer to guess mime type
const fileTypeData = await FileType.fromBuffer(binaryData);
if (fileTypeData) {
mimeType = fileTypeData.mime;
fileExtension = fileTypeData.ext;
}
} else if (binaryData instanceof IncomingMessage) {
mimeType = binaryData.headers['content-type'];
} else {
// TODO: detect filetype from other kind of streams
}
}
}
if (!fileExtension && mimeType) {
fileExtension = MimeTypes.extension(mimeType) || undefined;
}
if (!mimeType) {
// Fall back to text
mimeType = 'text/plain';
}
const returnData: IBinaryData = {
mimeType,
fileType: fileTypeFromMimeType(mimeType),
fileExtension,
data: '',
};
if (filePath) {
if (filePath.includes('?')) {
// Remove maybe present query parameters
filePath = filePath.split('?').shift();
}
const filePathParts = path.parse(filePath as string);
if (filePathParts.dir !== '') {
returnData.directory = filePathParts.dir;
}
returnData.fileName = filePathParts.base;
// Remove the dot
const extractedFileExtension = filePathParts.ext.slice(1);
if (extractedFileExtension) {
returnData.fileExtension = extractedFileExtension;
}
}
return await this.setBinaryDataBuffer(returnData, binaryData);
}
async setBinaryDataBuffer(binaryData: IBinaryData, bufferOrStream: Buffer | Readable) {
return await this.binaryDataService.store(
this.workflow.id,
this.additionalData.executionId!,
bufferOrStream,
binaryData,
);
}
async copyBinaryFile(): Promise<never> {
throw new ApplicationError('`copyBinaryFile` has been removed. Please upgrade this node.');
}
}

View file

@ -1,381 +0,0 @@
import { createHash } from 'crypto';
import { pick } from 'lodash';
import { jsonParse, NodeOperationError, sleep } from 'n8n-workflow';
import type {
RequestHelperFunctions,
IAdditionalCredentialOptions,
IAllExecuteFunctions,
IExecuteData,
IHttpRequestOptions,
IN8nHttpFullResponse,
IN8nHttpResponse,
INode,
INodeExecutionData,
IOAuth2Options,
IRequestOptions,
IRunExecutionData,
IWorkflowDataProxyAdditionalKeys,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
PaginationOptions,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { Readable } from 'stream';
// eslint-disable-next-line import/no-cycle
import {
applyPaginationRequestData,
binaryToString,
httpRequest,
httpRequestWithAuthentication,
proxyRequestToAxios,
requestOAuth1,
requestOAuth2,
requestWithAuthentication,
validateUrl,
} from '@/NodeExecuteFunctions';
export class RequestHelpers {
constructor(
private readonly context: IAllExecuteFunctions,
private readonly workflow: Workflow,
private readonly node: INode,
private readonly additionalData: IWorkflowExecuteAdditionalData,
private readonly runExecutionData: IRunExecutionData | null = null,
private readonly connectionInputData: INodeExecutionData[] = [],
) {}
get exported(): RequestHelperFunctions {
return {
httpRequest,
httpRequestWithAuthentication: this.httpRequestWithAuthentication.bind(this),
requestWithAuthenticationPaginated: this.requestWithAuthenticationPaginated.bind(this),
request: this.request.bind(this),
requestWithAuthentication: this.requestWithAuthentication.bind(this),
requestOAuth1: this.requestOAuth1.bind(this),
requestOAuth2: this.requestOAuth2.bind(this),
};
}
get httpRequest() {
return httpRequest;
}
async httpRequestWithAuthentication(
credentialsType: string,
requestOptions: IHttpRequestOptions,
additionalCredentialOptions?: IAdditionalCredentialOptions,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await httpRequestWithAuthentication.call(
this.context,
credentialsType,
requestOptions,
this.workflow,
this.node,
this.additionalData,
additionalCredentialOptions,
);
}
// eslint-disable-next-line complexity
async requestWithAuthenticationPaginated(
requestOptions: IRequestOptions,
itemIndex: number,
paginationOptions: PaginationOptions,
credentialsType?: string,
additionalCredentialOptions?: IAdditionalCredentialOptions,
): Promise<unknown[]> {
const responseData = [];
if (!requestOptions.qs) {
requestOptions.qs = {};
}
requestOptions.resolveWithFullResponse = true;
requestOptions.simple = false;
let tempResponseData: IN8nHttpFullResponse;
let makeAdditionalRequest: boolean;
let paginateRequestData: PaginationOptions['request'];
const runIndex = 0;
const additionalKeys = {
$request: requestOptions,
$response: {} as IN8nHttpFullResponse,
$version: this.node.typeVersion,
$pageCount: 0,
};
const executeData: IExecuteData = {
data: {},
node: this.node,
source: null,
};
const hashData = {
identicalCount: 0,
previousLength: 0,
previousHash: '',
};
do {
paginateRequestData = this.getResolvedValue(
paginationOptions.request as unknown as NodeParameterValueType,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as object as PaginationOptions['request'];
const tempRequestOptions = applyPaginationRequestData(requestOptions, paginateRequestData);
if (!validateUrl(tempRequestOptions.uri as string)) {
throw new NodeOperationError(
this.node,
`'${paginateRequestData.url}' is not a valid URL.`,
{
itemIndex,
runIndex,
type: 'invalid_url',
},
);
}
if (credentialsType) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
tempResponseData = await this.requestWithAuthentication(
credentialsType,
tempRequestOptions,
additionalCredentialOptions,
);
} else {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
tempResponseData = await this.request(tempRequestOptions);
}
const newResponse: IN8nHttpFullResponse = Object.assign(
{
body: {},
headers: {},
statusCode: 0,
},
pick(tempResponseData, ['body', 'headers', 'statusCode']),
);
let contentBody: Exclude<IN8nHttpResponse, Buffer>;
if (newResponse.body instanceof Readable && paginationOptions.binaryResult !== true) {
// Keep the original string version that we can use it to hash if needed
contentBody = await binaryToString(newResponse.body as Buffer | Readable);
const responseContentType = newResponse.headers['content-type']?.toString() ?? '';
if (responseContentType.includes('application/json')) {
newResponse.body = jsonParse(contentBody, { fallbackValue: {} });
} else {
newResponse.body = contentBody;
}
tempResponseData.__bodyResolved = true;
tempResponseData.body = newResponse.body;
} else {
contentBody = newResponse.body;
}
if (paginationOptions.binaryResult !== true || tempResponseData.headers.etag) {
// If the data is not binary (and so not a stream), or an etag is present,
// we check via etag or hash if identical data is received
let contentLength = 0;
if ('content-length' in tempResponseData.headers) {
contentLength = parseInt(tempResponseData.headers['content-length'] as string) || 0;
}
if (hashData.previousLength === contentLength) {
let hash: string;
if (tempResponseData.headers.etag) {
// If an etag is provided, we use it as "hash"
hash = tempResponseData.headers.etag as string;
} else {
// If there is no etag, we calculate a hash from the data in the body
if (typeof contentBody !== 'string') {
contentBody = JSON.stringify(contentBody);
}
hash = createHash('md5').update(contentBody).digest('base64');
}
if (hashData.previousHash === hash) {
hashData.identicalCount += 1;
if (hashData.identicalCount > 2) {
// Length was identical 5x and hash 3x
throw new NodeOperationError(
this.node,
'The returned response was identical 5x, so requests got stopped',
{
itemIndex,
description:
'Check if "Pagination Completed When" has been configured correctly.',
},
);
}
} else {
hashData.identicalCount = 0;
}
hashData.previousHash = hash;
} else {
hashData.identicalCount = 0;
}
hashData.previousLength = contentLength;
}
responseData.push(tempResponseData);
additionalKeys.$response = newResponse;
additionalKeys.$pageCount = additionalKeys.$pageCount + 1;
const maxRequests = this.getResolvedValue(
paginationOptions.maxRequests,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as number;
if (maxRequests && additionalKeys.$pageCount >= maxRequests) {
break;
}
makeAdditionalRequest = this.getResolvedValue(
paginationOptions.continue,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as boolean;
if (makeAdditionalRequest) {
if (paginationOptions.requestInterval) {
const requestInterval = this.getResolvedValue(
paginationOptions.requestInterval,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as number;
await sleep(requestInterval);
}
if (tempResponseData.statusCode < 200 || tempResponseData.statusCode >= 300) {
// We have it configured to let all requests pass no matter the response code
// via "requestOptions.simple = false" to not by default fail if it is for example
// configured to stop on 404 response codes. For that reason we have to throw here
// now an error manually if the response code is not a success one.
let data = tempResponseData.body;
if (data instanceof Readable && paginationOptions.binaryResult !== true) {
data = await binaryToString(data as Buffer | Readable);
} else if (typeof data === 'object') {
data = JSON.stringify(data);
}
throw Object.assign(new Error(`${tempResponseData.statusCode} - "${data?.toString()}"`), {
statusCode: tempResponseData.statusCode,
error: data,
isAxiosError: true,
response: {
headers: tempResponseData.headers,
status: tempResponseData.statusCode,
statusText: tempResponseData.statusMessage,
},
});
}
}
} while (makeAdditionalRequest);
return responseData;
}
async request(uriOrObject: string | IRequestOptions, options?: IRequestOptions) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await proxyRequestToAxios(
this.workflow,
this.additionalData,
this.node,
uriOrObject,
options,
);
}
async requestWithAuthentication(
credentialsType: string,
requestOptions: IRequestOptions,
additionalCredentialOptions?: IAdditionalCredentialOptions,
itemIndex?: number,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await requestWithAuthentication.call(
this.context,
credentialsType,
requestOptions,
this.workflow,
this.node,
this.additionalData,
additionalCredentialOptions,
itemIndex,
);
}
async requestOAuth1(credentialsType: string, requestOptions: IRequestOptions) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await requestOAuth1.call(this.context, credentialsType, requestOptions);
}
async requestOAuth2(
credentialsType: string,
requestOptions: IRequestOptions,
oAuth2Options?: IOAuth2Options,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await requestOAuth2.call(
this.context,
credentialsType,
requestOptions,
this.node,
this.additionalData,
oAuth2Options,
);
}
private getResolvedValue(
parameterValue: NodeParameterValueType,
itemIndex: number,
runIndex: number,
executeData: IExecuteData,
additionalKeys?: IWorkflowDataProxyAdditionalKeys,
returnObjectAsString = false,
): NodeParameterValueType {
const mode: WorkflowExecuteMode = 'internal';
if (
typeof parameterValue === 'object' ||
(typeof parameterValue === 'string' && parameterValue.charAt(0) === '=')
) {
return this.workflow.expression.getParameterValue(
parameterValue,
this.runExecutionData,
runIndex,
itemIndex,
this.node.name,
this.connectionInputData,
mode,
additionalKeys ?? {},
executeData,
returnObjectAsString,
);
}
return parameterValue;
}
}

View file

@ -1,20 +0,0 @@
import type { CronExpression, Workflow, SchedulingFunctions } from 'n8n-workflow';
import { Container } from 'typedi';
import { ScheduledTaskManager } from '@/ScheduledTaskManager';
export class SchedulingHelpers {
private readonly scheduledTaskManager = Container.get(ScheduledTaskManager);
constructor(private readonly workflow: Workflow) {}
get exported(): SchedulingFunctions {
return {
registerCron: this.registerCron.bind(this),
};
}
registerCron(cronExpression: CronExpression, onTick: () => void) {
this.scheduledTaskManager.registerCron(this.workflow, cronExpression, onTick);
}
}

View file

@ -1,18 +0,0 @@
import type { SSHCredentials, SSHTunnelFunctions } from 'n8n-workflow';
import { Container } from 'typedi';
import { SSHClientsManager } from '@/SSHClientsManager';
export class SSHTunnelHelpers {
private readonly sshClientsManager = Container.get(SSHClientsManager);
get exported(): SSHTunnelFunctions {
return {
getSSHClient: this.getSSHClient.bind(this),
};
}
async getSSHClient(credentials: SSHCredentials) {
return await this.sshClientsManager.getClient(credentials);
}
}

View file

@ -21,10 +21,10 @@ import {
getCredentials,
getNodeParameter,
getNodeWebhookUrl,
getRequestHelperFunctions,
getWebhookDescription,
} from '@/NodeExecuteFunctions';
import { RequestHelpers } from './helpers/request-helpers';
import { NodeExecutionContext } from './node-execution-context';
export class HookContext extends NodeExecutionContext implements IHookFunctions {
@ -40,7 +40,7 @@ export class HookContext extends NodeExecutionContext implements IHookFunctions
) {
super(workflow, node, additionalData, mode);
this.helpers = new RequestHelpers(this, workflow, node, additionalData);
this.helpers = getRequestHelperFunctions(workflow, node, additionalData);
}
getActivationMode() {

View file

@ -1,4 +1,5 @@
// eslint-disable-next-line import/no-cycle
export { ExecuteSingleContext } from './execute-single-context';
export { HookContext } from './hook-context';
export { LoadOptionsContext } from './load-options-context';
export { PollContext } from './poll-context';

View file

@ -13,10 +13,14 @@ import type {
import { extractValue } from '@/ExtractValue';
// eslint-disable-next-line import/no-cycle
import { getAdditionalKeys, getCredentials, getNodeParameter } from '@/NodeExecuteFunctions';
import {
getAdditionalKeys,
getCredentials,
getNodeParameter,
getRequestHelperFunctions,
getSSHTunnelFunctions,
} from '@/NodeExecuteFunctions';
import { RequestHelpers } from './helpers/request-helpers';
import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers';
import { NodeExecutionContext } from './node-execution-context';
export class LoadOptionsContext extends NodeExecutionContext implements ILoadOptionsFunctions {
@ -31,8 +35,8 @@ export class LoadOptionsContext extends NodeExecutionContext implements ILoadOpt
super(workflow, node, additionalData, 'internal');
this.helpers = {
...new RequestHelpers(this, workflow, node, additionalData).exported,
...new SSHTunnelHelpers().exported,
...getSSHTunnelFunctions(),
...getRequestHelperFunctions(workflow, node, additionalData),
};
}

View file

@ -16,14 +16,14 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getAdditionalKeys,
getBinaryHelperFunctions,
getCredentials,
getNodeParameter,
getRequestHelperFunctions,
getSchedulingFunctions,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { BinaryHelpers } from './helpers/binary-helpers';
import { RequestHelpers } from './helpers/request-helpers';
import { SchedulingHelpers } from './helpers/scheduling-helpers';
import { NodeExecutionContext } from './node-execution-context';
const throwOnEmit = () => {
@ -51,9 +51,9 @@ export class PollContext extends NodeExecutionContext implements IPollFunctions
this.helpers = {
createDeferredPromise,
returnJsonArray,
...new BinaryHelpers(workflow, additionalData).exported,
...new RequestHelpers(this, workflow, node, additionalData).exported,
...new SchedulingHelpers(workflow).exported,
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getSchedulingFunctions(workflow),
};
}

View file

@ -16,15 +16,15 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getAdditionalKeys,
getBinaryHelperFunctions,
getCredentials,
getNodeParameter,
getRequestHelperFunctions,
getSchedulingFunctions,
getSSHTunnelFunctions,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { BinaryHelpers } from './helpers/binary-helpers';
import { RequestHelpers } from './helpers/request-helpers';
import { SchedulingHelpers } from './helpers/scheduling-helpers';
import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers';
import { NodeExecutionContext } from './node-execution-context';
const throwOnEmit = () => {
@ -52,10 +52,10 @@ export class TriggerContext extends NodeExecutionContext implements ITriggerFunc
this.helpers = {
createDeferredPromise,
returnJsonArray,
...new BinaryHelpers(workflow, additionalData).exported,
...new RequestHelpers(this, workflow, node, additionalData).exported,
...new SchedulingHelpers(workflow).exported,
...new SSHTunnelHelpers().exported,
...getSSHTunnelFunctions(),
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getSchedulingFunctions(workflow),
};
}

View file

@ -24,15 +24,15 @@ import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
import {
copyBinaryFile,
getAdditionalKeys,
getBinaryHelperFunctions,
getCredentials,
getInputConnectionData,
getNodeParameter,
getNodeWebhookUrl,
getRequestHelperFunctions,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { BinaryHelpers } from './helpers/binary-helpers';
import { RequestHelpers } from './helpers/request-helpers';
import { NodeExecutionContext } from './node-execution-context';
export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions {
@ -54,8 +54,8 @@ export class WebhookContext extends NodeExecutionContext implements IWebhookFunc
this.helpers = {
createDeferredPromise,
returnJsonArray,
...new BinaryHelpers(workflow, additionalData).exported,
...new RequestHelpers(this, workflow, node, additionalData).exported,
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
};
this.nodeHelpers = {

View file

@ -1,6 +1,6 @@
{
"name": "n8n-design-system",
"version": "1.56.0",
"version": "1.57.0",
"main": "src/main.ts",
"import": "src/main.ts",
"scripts": {

View file

@ -317,6 +317,7 @@ async function onCopyButtonClick(content: string, e: MouseEvent) {
<textarea
ref="chatInput"
v-model="textInputValue"
class="ignore-key-press-node-creator ignore-key-press-canvas"
:disabled="sessionEnded"
:placeholder="t('assistantChat.inputPlaceholder')"
rows="1"

View file

@ -156,6 +156,7 @@ exports[`AskAssistantChat > does not render retry button if no error is present
data-test-id="chat-input-wrapper"
>
<textarea
class="ignore-key-press-node-creator ignore-key-press-canvas"
data-test-id="chat-input"
placeholder="Enter your response..."
rows="1"
@ -903,6 +904,7 @@ Testing more code
data-test-id="chat-input-wrapper"
>
<textarea
class="ignore-key-press-node-creator ignore-key-press-canvas"
data-test-id="chat-input"
placeholder="Enter your response..."
rows="1"
@ -1078,6 +1080,7 @@ exports[`AskAssistantChat > renders default placeholder chat correctly 1`] = `
data-test-id="chat-input-wrapper"
>
<textarea
class="ignore-key-press-node-creator ignore-key-press-canvas"
data-test-id="chat-input"
placeholder="Enter your response..."
rows="1"
@ -1323,6 +1326,7 @@ exports[`AskAssistantChat > renders end of session chat correctly 1`] = `
data-test-id="chat-input-wrapper"
>
<textarea
class="ignore-key-press-node-creator ignore-key-press-canvas"
data-test-id="chat-input"
disabled=""
placeholder="Enter your response..."
@ -1493,6 +1497,7 @@ exports[`AskAssistantChat > renders error message correctly with retry button 1`
data-test-id="chat-input-wrapper"
>
<textarea
class="ignore-key-press-node-creator ignore-key-press-canvas"
data-test-id="chat-input"
placeholder="Enter your response..."
rows="1"
@ -1737,6 +1742,7 @@ catch(e) {
data-test-id="chat-input-wrapper"
>
<textarea
class="ignore-key-press-node-creator ignore-key-press-canvas"
data-test-id="chat-input"
placeholder="Enter your response..."
rows="1"
@ -1913,6 +1919,7 @@ exports[`AskAssistantChat > renders streaming chat correctly 1`] = `
data-test-id="chat-input-wrapper"
>
<textarea
class="ignore-key-press-node-creator ignore-key-press-canvas"
data-test-id="chat-input"
placeholder="Enter your response..."
rows="1"

View file

@ -1,14 +1,14 @@
<script lang="ts" setup>
<script lang="ts" setup generic="Value extends string">
import RadioButton from './RadioButton.vue';
interface RadioOption {
label: string;
value: string;
value: Value;
disabled?: boolean;
}
interface RadioButtonsProps {
modelValue?: string;
modelValue?: Value;
options?: RadioOption[];
/** @default medium */
size?: 'small' | 'medium';
@ -22,11 +22,11 @@ const props = withDefaults(defineProps<RadioButtonsProps>(), {
});
const emit = defineEmits<{
'update:modelValue': [value: string, e: MouseEvent];
'update:modelValue': [value: Value, e: MouseEvent];
}>();
const onClick = (
option: { label: string; value: string; disabled?: boolean },
option: { label: string; value: Value; disabled?: boolean },
event: MouseEvent,
) => {
if (props.disabled || option.disabled) {

View file

@ -1,11 +1,11 @@
<script lang="ts" setup>
<script lang="ts" setup generic="Value extends string | number">
import { onMounted, onUnmounted, ref } from 'vue';
import type { RouteLocationRaw } from 'vue-router';
import N8nIcon from '../N8nIcon';
interface TabOptions {
value: string;
value: Value;
label?: string;
icon?: string;
href?: string;
@ -15,7 +15,7 @@ interface TabOptions {
}
interface TabsProps {
modelValue?: string;
modelValue?: Value;
options?: TabOptions[];
}
@ -56,12 +56,12 @@ onUnmounted(() => {
});
const emit = defineEmits<{
tooltipClick: [tab: string, e: MouseEvent];
'update:modelValue': [tab: string];
tooltipClick: [tab: Value, e: MouseEvent];
'update:modelValue': [tab: Value];
}>();
const handleTooltipClick = (tab: string, event: MouseEvent) => emit('tooltipClick', tab, event);
const handleTabClick = (tab: string) => emit('update:modelValue', tab);
const handleTooltipClick = (tab: Value, event: MouseEvent) => emit('tooltipClick', tab, event);
const handleTabClick = (tab: Value) => emit('update:modelValue', tab);
const scroll = (left: number) => {
const container = tabs.value;
@ -84,7 +84,7 @@ const scrollRight = () => scroll(50);
<div ref="tabs" :class="$style.tabs">
<div
v-for="option in options"
:id="option.value"
:id="option.value.toString()"
:key="option.value"
:class="{ [$style.alignRight]: option.align === 'right' }"
>

View file

@ -7,6 +7,7 @@ export default {
component: N8nTree,
};
// @ts-expect-error Storybook incorrect slot types
export const Default: StoryFn = (args, { argTypes }) => ({
setup: () => ({ args }),
props: Object.keys(argTypes),

View file

@ -1,19 +1,16 @@
<script lang="ts" setup>
<script lang="ts" setup generic="Value extends unknown = unknown">
import { computed, useCssModule } from 'vue';
interface TreeProps {
value?: Record<string, unknown>;
value?: Record<string, Value>;
path?: Array<string | number>;
depth?: number;
nodeClass?: string;
}
defineSlots<{
[key: string]: (props: {
label?: string;
path?: Array<string | number>;
value?: unknown;
}) => never;
label(props: { label: string; path: Array<string | number> }): never;
value(props: { value: Value }): never;
}>();
defineOptions({ name: 'N8nTree' });
@ -29,11 +26,11 @@ const classes = computed((): Record<string, boolean> => {
return { [props.nodeClass]: !!props.nodeClass, [$style.indent]: props.depth > 0 };
});
const isObject = (data: unknown): data is Record<string, unknown> => {
const isObject = (data: unknown): data is Record<string, Value> => {
return typeof data === 'object' && data !== null;
};
const isSimple = (data: unknown): boolean => {
const isSimple = (data: Value): boolean => {
if (data === null || data === undefined) {
return true;
}
@ -70,16 +67,21 @@ const getPath = (key: string): Array<string | number> => {
<div v-else>
<slot v-if="$slots.label" name="label" :label="label" :path="getPath(label)" />
<span v-else>{{ label }}</span>
<n8n-tree
<N8nTree
v-if="isObject(value[label])"
:path="getPath(label)"
:depth="depth + 1"
:value="value[label] as Record<string, unknown>"
:value="value[label]"
:node-class="nodeClass"
>
<template v-for="(_, name) in $slots" #[name]="data">
<slot :name="name" v-bind="data"></slot>
<template v-if="$slots.label" #label="data">
<slot name="label" v-bind="data" />
</template>
</n8n-tree>
<template v-if="$slots.value" #value="data">
<slot name="value" v-bind="data" />
</template>
</N8nTree>
</div>
</div>
</div>

View file

@ -1,6 +1,6 @@
{
"name": "n8n-editor-ui",
"version": "1.66.0",
"version": "1.67.0",
"description": "Workflow Editor UI for n8n",
"main": "index.js",
"scripts": {
@ -93,7 +93,6 @@
"@types/luxon": "^3.2.0",
"@types/uuid": "catalog:",
"@vitest/coverage-v8": "catalog:frontend",
"cross-env": "^7.0.3",
"miragejs": "^0.1.48",
"unplugin-icons": "^0.19.0",
"unplugin-vue-components": "^0.27.2",

View file

@ -1129,8 +1129,8 @@ export interface IInviteResponse {
error?: string;
}
export interface ITab {
value: string | number;
export interface ITab<Value extends string | number = string | number> {
value: Value;
label?: string;
href?: string;
icon?: string;

View file

@ -1,38 +1,61 @@
import { createComponentRenderer } from '@/__tests__/render';
import { SETTINGS_STORE_DEFAULT_STATE } from '@/__tests__/utils';
import NodeErrorView from '@/components/Error/NodeErrorView.vue';
import { STORES } from '@/constants';
import { createTestingPinia } from '@pinia/testing';
import { type INode } from 'n8n-workflow';
import type { NodeError } from 'n8n-workflow';
import { useAssistantStore } from '@/stores/assistant.store';
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import { mockedStore } from '@/__tests__/utils';
import userEvent from '@testing-library/user-event';
import { useNDVStore } from '@/stores/ndv.store';
const DEFAULT_SETUP = {
pinia: createTestingPinia({
initialState: {
[STORES.SETTINGS]: SETTINGS_STORE_DEFAULT_STATE,
},
}),
};
const renderComponent = createComponentRenderer(NodeErrorView);
const renderComponent = createComponentRenderer(NodeErrorView, DEFAULT_SETUP);
let mockAiAssistantStore: ReturnType<typeof mockedStore<typeof useAssistantStore>>;
let mockNodeTypeStore: ReturnType<typeof mockedStore<typeof useNodeTypesStore>>;
let mockNdvStore: ReturnType<typeof mockedStore<typeof useNDVStore>>;
describe('NodeErrorView.vue', () => {
let mockNode: INode;
afterEach(() => {
mockNode = {
parameters: {
mode: 'runOnceForAllItems',
language: 'javaScript',
jsCode: 'cons error = 9;',
notice: '',
let error: NodeError;
beforeEach(() => {
createTestingPinia();
mockAiAssistantStore = mockedStore(useAssistantStore);
mockNodeTypeStore = mockedStore(useNodeTypesStore);
mockNdvStore = mockedStore(useNDVStore);
//@ts-expect-error
error = {
name: 'NodeOperationError',
message: 'Test error message',
description: 'Test error description',
context: {
descriptionKey: 'noInputConnection',
nodeCause: 'Test node cause',
runIndex: '1',
itemIndex: '2',
parameter: 'testParameter',
data: { key: 'value' },
causeDetailed: 'Detailed cause',
},
id: 'd1ce5dc9-f9ae-4ac6-84e5-0696ba175dd9',
name: 'Code',
type: 'n8n-nodes-base.code',
typeVersion: 2,
position: [940, 240],
node: {
parameters: {
mode: 'runOnceForAllItems',
language: 'javaScript',
jsCode: 'cons error = 9;',
notice: '',
},
id: 'd1ce5dc9-f9ae-4ac6-84e5-0696ba175dd9',
name: 'ErrorCode',
type: 'n8n-nodes-base.code',
typeVersion: 2,
position: [940, 240],
},
stack: 'Test stack trace',
};
});
afterEach(() => {
vi.clearAllMocks();
});
@ -40,7 +63,7 @@ describe('NodeErrorView.vue', () => {
const { getByTestId } = renderComponent({
props: {
error: {
node: mockNode,
node: error.node,
messages: ['Unexpected identifier [line 1]'],
},
},
@ -55,7 +78,7 @@ describe('NodeErrorView.vue', () => {
const { getByTestId } = renderComponent({
props: {
error: {
node: mockNode,
node: error.node,
message: 'Unexpected identifier [line 1]',
},
},
@ -67,24 +90,20 @@ describe('NodeErrorView.vue', () => {
});
it('should not render AI assistant button when error happens in deprecated function node', async () => {
const aiAssistantStore = useAssistantStore(DEFAULT_SETUP.pinia);
const nodeTypeStore = useNodeTypesStore(DEFAULT_SETUP.pinia);
//@ts-expect-error
nodeTypeStore.getNodeType = vi.fn(() => ({
mockNodeTypeStore.getNodeType = vi.fn(() => ({
type: 'n8n-nodes-base.function',
typeVersion: 1,
hidden: true,
}));
//@ts-expect-error
aiAssistantStore.canShowAssistantButtonsOnCanvas = true;
mockAiAssistantStore.canShowAssistantButtonsOnCanvas = true;
const { queryByTestId } = renderComponent({
props: {
error: {
node: {
...mockNode,
...error.node,
type: 'n8n-nodes-base.function',
typeVersion: 1,
},
@ -96,4 +115,73 @@ describe('NodeErrorView.vue', () => {
expect(aiAssistantButton).toBeNull();
});
it('renders error message', () => {
const { getByTestId } = renderComponent({
props: { error },
});
expect(getByTestId('node-error-message').textContent).toContain('Test error message');
});
it('renders error description', () => {
const { getByTestId } = renderComponent({
props: { error },
});
expect(getByTestId('node-error-description').innerHTML).toContain(
'This node has no input data. Please make sure this node is connected to another node.',
);
});
it('renders stack trace', () => {
const { getByText } = renderComponent({
props: { error },
});
expect(getByText('Test stack trace')).toBeTruthy();
});
it('renders open node button when the error is in sub node', () => {
const { getByTestId, queryByTestId } = renderComponent({
props: {
error: {
...error,
name: 'NodeOperationError',
functionality: 'configuration-node',
},
},
});
expect(getByTestId('node-error-view-open-node-button')).toHaveTextContent('Open errored node');
expect(queryByTestId('ask-assistant-button')).not.toBeInTheDocument();
});
it('does not renders open node button when the error is in sub node', () => {
mockAiAssistantStore.canShowAssistantButtonsOnCanvas = true;
const { getByTestId, queryByTestId } = renderComponent({
props: {
error,
},
});
expect(queryByTestId('node-error-view-open-node-button')).not.toBeInTheDocument();
expect(getByTestId('ask-assistant-button')).toBeInTheDocument();
});
it('open error node details when open error node is clicked', async () => {
const { getByTestId, emitted } = renderComponent({
props: {
error: {
...error,
name: 'NodeOperationError',
functionality: 'configuration-node',
},
},
});
await userEvent.click(getByTestId('node-error-view-open-node-button'));
expect(emitted().click).toHaveLength(1);
expect(mockNdvStore.activeNodeName).toBe(error.node.name);
});
});

View file

@ -117,7 +117,7 @@ const prepareRawMessages = computed(() => {
});
const isAskAssistantAvailable = computed(() => {
if (!node.value) {
if (!node.value || isSubNodeError.value) {
return false;
}
const isCustomNode = node.value.type === undefined || isCommunityPackageName(node.value.type);
@ -132,6 +132,13 @@ const assistantAlreadyAsked = computed(() => {
});
});
const isSubNodeError = computed(() => {
return (
props.error.name === 'NodeOperationError' &&
(props.error as NodeOperationError).functionality === 'configuration-node'
);
});
function nodeVersionTag(nodeType: NodeError['node']): string {
if (!nodeType || ('hidden' in nodeType && nodeType.hidden)) {
return i18n.baseText('nodeSettings.deprecated');
@ -153,19 +160,6 @@ function prepareDescription(description: string): string {
}
function getErrorDescription(): string {
const isSubNodeError =
props.error.name === 'NodeOperationError' &&
(props.error as NodeOperationError).functionality === 'configuration-node';
if (isSubNodeError) {
return prepareDescription(
props.error.description +
i18n.baseText('pushConnection.executionError.openNode', {
interpolate: { node: props.error.node.name },
}),
);
}
if (props.error.context?.descriptionKey) {
const interpolate = {
nodeCause: props.error.context.nodeCause as string,
@ -205,13 +199,10 @@ function addItemIndexSuffix(message: string): string {
function getErrorMessage(): string {
let message = '';
const isSubNodeError =
props.error.name === 'NodeOperationError' &&
(props.error as NodeOperationError).functionality === 'configuration-node';
const isNonEmptyString = (value?: unknown): value is string =>
!!value && typeof value === 'string';
if (isSubNodeError) {
if (isSubNodeError.value) {
message = i18n.baseText('nodeErrorView.errorSubNode', {
interpolate: { node: props.error.node.name },
});
@ -390,6 +381,10 @@ function nodeIsHidden() {
return nodeType?.hidden ?? false;
}
const onOpenErrorNodeDetailClick = () => {
ndvStore.activeNodeName = props.error.node.name;
};
async function onAskAssistantClick() {
const { message, lineNumber, description } = props.error;
const sessionInProgress = !assistantStore.isSessionEnded;
@ -428,14 +423,25 @@ async function onAskAssistantClick() {
</div>
</div>
<div
v-if="error.description || error.context?.descriptionKey"
v-if="(error.description || error.context?.descriptionKey) && !isSubNodeError"
data-test-id="node-error-description"
class="node-error-view__header-description"
v-n8n-html="getErrorDescription()"
></div>
<div v-if="isSubNodeError">
<n8n-button
icon="arrow-right"
type="secondary"
:label="i18n.baseText('pushConnection.executionError.openNode')"
class="node-error-view__button"
data-test-id="node-error-view-open-node-button"
@click="onOpenErrorNodeDetailClick"
/>
</div>
<div
v-if="isAskAssistantAvailable"
class="node-error-view__assistant-button"
class="node-error-view__button"
data-test-id="node-error-view-ask-assistant-button"
>
<InlineAskAssistantButton :asked="assistantAlreadyAsked" @click="onAskAssistantClick" />
@ -696,9 +702,14 @@ async function onAskAssistantClick() {
}
}
&__assistant-button {
&__button {
margin-left: var(--spacing-s);
margin-bottom: var(--spacing-xs);
flex-direction: row-reverse;
span {
margin-right: var(--spacing-5xs);
margin-left: var(--spacing-5xs);
}
}
&__debugging {
@ -831,7 +842,7 @@ async function onAskAssistantClick() {
}
}
.node-error-view__assistant-button {
.node-error-view__button {
margin-top: var(--spacing-xs);
}
</style>

Some files were not shown because too many files have changed in this diff Show more