Merge remote-tracking branch 'origin/master' into ADO-2728/feature-change-auto-add-of-chattrigger

This commit is contained in:
Charlie Kolb 2024-11-07 10:28:34 +01:00
commit f2a63d78b0
No known key found for this signature in database
289 changed files with 6786 additions and 4286 deletions

View file

@ -15,3 +15,4 @@
# refactor: Move test files alongside tested files (#11504)
7e58fc4fec468aca0b45d5bfe6150e1af632acbc
f32b13c6ed078be042a735bc8621f27e00dc3116

View file

@ -65,6 +65,7 @@ jobs:
continue-on-error: true
with:
workingDir: packages/design-system
onlyChanged: true
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
exitZeroOnChanges: false

View file

@ -38,6 +38,12 @@ jobs:
- name: Build
run: pnpm build
- name: Cache build artifacts
uses: actions/cache/save@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}-base:build
- name: Dry-run publishing
run: pnpm publish -r --no-git-checks --dry-run
@ -119,6 +125,40 @@ jobs:
makeLatest: false
body: ${{github.event.pull_request.body}}
create-sentry-release:
name: Create a Sentry Release
needs: [publish-to-npm, publish-to-docker-hub]
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
timeout-minutes: 5
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
steps:
- uses: actions/checkout@v4.1.1
- name: Restore cached build artifacts
uses: actions/cache/restore@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}:db-tests
- name: Create a frontend release
uses: getsentry/action-release@v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_FRONTEND_PROJECT }}
version: ${{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/editor-ui/dist
- name: Create a backend release
uses: getsentry/action-release@v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_BACKEND_PROJECT }}
version: ${{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/cli/dist packages/core/dist packages/nodes-base/dist packages/@n8n/n8n-nodes-langchain/dist
trigger-release-note:
name: Trigger a release note
needs: [publish-to-npm, create-github-release]

View file

@ -1,3 +1,39 @@
# [1.67.0](https://github.com/n8n-io/n8n/compare/n8n@1.66.0...n8n@1.67.0) (2024-11-06)
### Bug Fixes
* Bring back nodes panel telemetry events ([#11456](https://github.com/n8n-io/n8n/issues/11456)) ([130c942](https://github.com/n8n-io/n8n/commit/130c942f633788d1b2f937d6fea342d4450c6e3d))
* **core:** Account for double quotes in instance base URL ([#11495](https://github.com/n8n-io/n8n/issues/11495)) ([c5191e6](https://github.com/n8n-io/n8n/commit/c5191e697a9a9ebfa2b67587cd01b5835ebf6ea8))
* **core:** Do not delete waiting executions when saving of successful executions is disabled ([#11458](https://github.com/n8n-io/n8n/issues/11458)) ([e8757e5](https://github.com/n8n-io/n8n/commit/e8757e58f69e091ac3d2a2f8e8c8e33ac57c1e47))
* **core:** Don't send a `executionFinished` event to the browser with no run data if the execution has already been cleaned up ([#11502](https://github.com/n8n-io/n8n/issues/11502)) ([d1153f5](https://github.com/n8n-io/n8n/commit/d1153f51e80911cbc8f34ba5f038f349b75295c3))
* **core:** Include `projectId` in range query middleware ([#11590](https://github.com/n8n-io/n8n/issues/11590)) ([a6070af](https://github.com/n8n-io/n8n/commit/a6070afdda29631fd36e5213f52bf815268bcda4))
* **core:** Save exeution progress for waiting executions, even when progress saving is disabled ([#11535](https://github.com/n8n-io/n8n/issues/11535)) ([6b9353c](https://github.com/n8n-io/n8n/commit/6b9353c80f61ab36945fff434d98242dc1cab7b3))
* **core:** Use the correct docs URL for regular nodes when used as tools ([#11529](https://github.com/n8n-io/n8n/issues/11529)) ([a092b8e](https://github.com/n8n-io/n8n/commit/a092b8e972e1253d92df416f19096a045858e7c1))
* **Edit Image Node:** Fix Text operation by setting Arial as default font ([#11125](https://github.com/n8n-io/n8n/issues/11125)) ([60c1ace](https://github.com/n8n-io/n8n/commit/60c1ace64be29d651ce7b777fbb576598e38b9d7))
* **editor:** Auto focus first fields on SignIn, SignUp and ForgotMyPassword views ([#11445](https://github.com/n8n-io/n8n/issues/11445)) ([5b5bd72](https://github.com/n8n-io/n8n/commit/5b5bd7291dde17880b7699f7e6832938599ffd8f))
* **editor:** Do not overwrite the webhookId in the new canvas ([#11562](https://github.com/n8n-io/n8n/issues/11562)) ([dfd785b](https://github.com/n8n-io/n8n/commit/dfd785bc0894257eb6e62b0dd8f71248c27aae53))
* **editor:** Ensure Enter key on Cancel button correctly cancels node rename ([#11563](https://github.com/n8n-io/n8n/issues/11563)) ([be05ae3](https://github.com/n8n-io/n8n/commit/be05ae36e7790156cb48b317fc254ae46a3b2d8c))
* **editor:** Fix emitting `n8nReady` notification via `postmessage` on new canvas ([#11558](https://github.com/n8n-io/n8n/issues/11558)) ([463d101](https://github.com/n8n-io/n8n/commit/463d101f3592e6df4afd66c4d0fde0cb4aec34cc))
* **editor:** Fix run index input for RunData view in sub-nodes ([#11538](https://github.com/n8n-io/n8n/issues/11538)) ([434d31c](https://github.com/n8n-io/n8n/commit/434d31ce928342d52b6ab8b78639afd7829216d4))
* **editor:** Fix selected credential being overwritten in NDV ([#11496](https://github.com/n8n-io/n8n/issues/11496)) ([a26c0e2](https://github.com/n8n-io/n8n/commit/a26c0e2c3c7da87bfaba9737a967aa0070810d85))
* **editor:** Keep workflow pristine after load on new canvas ([#11579](https://github.com/n8n-io/n8n/issues/11579)) ([7254359](https://github.com/n8n-io/n8n/commit/7254359855b89769613cd5cc24dbb4f45a7cc76f))
* Show Pinned data in demo mode ([#11490](https://github.com/n8n-io/n8n/issues/11490)) ([ca2a583](https://github.com/n8n-io/n8n/commit/ca2a583b5cbb0cba3ecb694261806de16547aa91))
* Toast not aligned to the bottom when AI assistant disable ([#11549](https://github.com/n8n-io/n8n/issues/11549)) ([e80f7e0](https://github.com/n8n-io/n8n/commit/e80f7e0a02a972379f73af6a44de11768081086e))
### Features
* Add Rapid7 InsightVm credentials ([#11462](https://github.com/n8n-io/n8n/issues/11462)) ([46eceab](https://github.com/n8n-io/n8n/commit/46eceabc27ac219b11b85c16c533a2cff848c5dd))
* **AI Transform Node:** UX improvements ([#11280](https://github.com/n8n-io/n8n/issues/11280)) ([8a48407](https://github.com/n8n-io/n8n/commit/8a484077af3d3e1fe2d1b90b1ea9edf4ba41fcb6))
* **Anthropic Chat Model Node:** Add support for Haiku 3.5 ([#11551](https://github.com/n8n-io/n8n/issues/11551)) ([8b39825](https://github.com/n8n-io/n8n/commit/8b398256a81594a52f20f8eb8adf8ff205209bc1))
* **Convert to File Node:** Add delimiter convert to csv ([#11556](https://github.com/n8n-io/n8n/issues/11556)) ([63d454b](https://github.com/n8n-io/n8n/commit/63d454b776c092ff8c6c521a7e083774adb8f649))
* **editor:** Update panning and selection keybindings on new canvas ([#11534](https://github.com/n8n-io/n8n/issues/11534)) ([5e2e205](https://github.com/n8n-io/n8n/commit/5e2e205394adf76faf02aee2d4f21df71848e1d4))
* **Gmail Trigger Node:** Add filter option to include drafts ([#11441](https://github.com/n8n-io/n8n/issues/11441)) ([7a2be77](https://github.com/n8n-io/n8n/commit/7a2be77f384a32ede3acad8fe24fb89227c058bf))
* **Intercom Node:** Update credential to new style ([#11485](https://github.com/n8n-io/n8n/issues/11485)) ([b137e13](https://github.com/n8n-io/n8n/commit/b137e13845f0714ebf7421c837f5ab104b66709b))
# [1.66.0](https://github.com/n8n-io/n8n/compare/n8n@1.65.0...n8n@1.66.0) (2024-10-31)

View file

@ -26,6 +26,22 @@ const nodeDetailsView = new NDV();
const NEW_CREDENTIAL_NAME = 'Something else';
const NEW_CREDENTIAL_NAME2 = 'Something else entirely';
function createNotionCredential() {
workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME);
workflowPage.actions.openNode(NOTION_NODE_NAME);
workflowPage.getters.nodeCredentialsSelect().click();
getVisibleSelect().find('li').last().click();
credentialsModal.actions.fillCredentialsForm();
cy.get('body').type('{esc}');
workflowPage.actions.deleteNode(NOTION_NODE_NAME);
}
function deleteSelectedCredential() {
workflowPage.getters.nodeCredentialsEditButton().click();
credentialsModal.getters.deleteButton().click();
cy.get('.el-message-box').find('button').contains('Yes').click();
}
describe('Credentials', () => {
beforeEach(() => {
cy.visit(credentialsPage.url);
@ -229,6 +245,40 @@ describe('Credentials', () => {
.should('have.value', NEW_CREDENTIAL_NAME);
});
it('should set a default credential when adding nodes', () => {
workflowPage.actions.visit();
createNotionCredential();
workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true);
workflowPage.getters
.nodeCredentialsSelect()
.find('input')
.should('have.value', NEW_NOTION_ACCOUNT_NAME);
deleteSelectedCredential();
});
it('should set a default credential when editing a node', () => {
workflowPage.actions.visit();
createNotionCredential();
workflowPage.actions.addNodeToCanvas(HTTP_REQUEST_NODE_NAME, true, true);
nodeDetailsView.getters.parameterInput('authentication').click();
getVisibleSelect().find('li').contains('Predefined').click();
nodeDetailsView.getters.parameterInput('nodeCredentialType').click();
getVisibleSelect().find('li').contains('Notion API').click();
workflowPage.getters
.nodeCredentialsSelect()
.find('input')
.should('have.value', NEW_NOTION_ACCOUNT_NAME);
deleteSelectedCredential();
});
it('should setup generic authentication for HTTP node', () => {
workflowPage.actions.visit();
workflowPage.actions.addNodeToCanvas(SCHEDULE_TRIGGER_NODE_NAME);

View file

@ -44,6 +44,7 @@ import {
openNode,
getConnectionBySourceAndTarget,
} from '../composables/workflow';
import { NDV, WorkflowPage } from '../pages';
import { createMockNodeExecutionData, runMockWorkflowExecution } from '../utils';
describe('Langchain Integration', () => {
@ -232,12 +233,7 @@ describe('Langchain Integration', () => {
const inputMessage = 'Hello!';
const outputMessage = 'Hi there! How can I assist you today?';
runMockWorkflowExecution({
trigger: () => {
sendManualChatMessage(inputMessage);
},
runData: [
const runData = [
createMockNodeExecutionData(MANUAL_CHAT_TRIGGER_NODE_NAME, {
jsonData: {
main: { input: inputMessage },
@ -320,7 +316,13 @@ describe('Langchain Integration', () => {
main: { output: 'Hi there! How can I assist you today?' },
},
}),
],
];
runMockWorkflowExecution({
trigger: () => {
sendManualChatMessage(inputMessage);
},
runData,
lastNodeExecuted: AGENT_NODE_NAME,
});
@ -357,4 +359,56 @@ describe('Langchain Integration', () => {
getConnectionBySourceAndTarget(CHAT_TRIGGER_NODE_DISPLAY_NAME, AGENT_NODE_NAME).should('exist');
getNodes().should('have.length', 3);
});
it('should render runItems for sub-nodes and allow switching between them', () => {
const workflowPage = new WorkflowPage();
const ndv = new NDV();
cy.visit(workflowPage.url);
cy.createFixtureWorkflow('In_memory_vector_store_fake_embeddings.json');
workflowPage.actions.zoomToFit();
workflowPage.actions.executeNode('Populate VS');
cy.get('[data-label="25 items"]').should('exist');
const assertInputOutputText = (text: string, assertion: 'exist' | 'not.exist') => {
ndv.getters.outputPanel().contains(text).should(assertion);
ndv.getters.inputPanel().contains(text).should(assertion);
};
workflowPage.actions.openNode('Character Text Splitter');
ndv.getters.outputRunSelector().should('exist');
ndv.getters.inputRunSelector().should('exist');
ndv.getters.inputRunSelector().find('input').should('include.value', '3 of 3');
ndv.getters.outputRunSelector().find('input').should('include.value', '3 of 3');
assertInputOutputText('Kyiv', 'exist');
assertInputOutputText('Berlin', 'not.exist');
assertInputOutputText('Prague', 'not.exist');
ndv.actions.changeOutputRunSelector('2 of 3');
assertInputOutputText('Berlin', 'exist');
assertInputOutputText('Kyiv', 'not.exist');
assertInputOutputText('Prague', 'not.exist');
ndv.actions.changeOutputRunSelector('1 of 3');
assertInputOutputText('Prague', 'exist');
assertInputOutputText('Berlin', 'not.exist');
assertInputOutputText('Kyiv', 'not.exist');
ndv.actions.toggleInputRunLinking();
ndv.actions.changeOutputRunSelector('2 of 3');
ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3');
ndv.getters.outputRunSelector().find('input').should('include.value', '2 of 3');
ndv.getters.inputPanel().contains('Prague').should('exist');
ndv.getters.inputPanel().contains('Berlin').should('not.exist');
ndv.getters.outputPanel().contains('Berlin').should('exist');
ndv.getters.outputPanel().contains('Prague').should('not.exist');
ndv.actions.toggleInputRunLinking();
ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3');
ndv.getters.outputRunSelector().find('input').should('include.value', '1 of 3');
assertInputOutputText('Prague', 'exist');
assertInputOutputText('Berlin', 'not.exist');
assertInputOutputText('Kyiv', 'not.exist');
});
});

View file

@ -441,7 +441,9 @@ describe('Projects', { disableAutoLogin: true }, () => {
.should('contain.text', 'Notion account personal project');
});
it('should move resources between projects', () => {
// Skip flaky test
// eslint-disable-next-line n8n-local-rules/no-skipped-tests
it.skip('should move resources between projects', () => {
cy.signinAsOwner();
cy.visit(workflowsPage.url);
@ -684,7 +686,9 @@ describe('Projects', { disableAutoLogin: true }, () => {
.should('have.length', 1);
});
it('should allow to change inaccessible credential when the workflow was moved to a team project', () => {
// Skip flaky test
// eslint-disable-next-line n8n-local-rules/no-skipped-tests
it.skip('should allow to change inaccessible credential when the workflow was moved to a team project', () => {
cy.signinAsOwner();
cy.visit(workflowsPage.url);

File diff suppressed because one or more lines are too long

View file

@ -16,7 +16,7 @@ export function createMockNodeExecutionData(
return {
[name]: {
startTime: new Date().getTime(),
executionTime: 0,
executionTime: 1,
executionStatus,
data: jsonData
? Object.keys(jsonData).reduce((acc, key) => {
@ -33,6 +33,7 @@ export function createMockNodeExecutionData(
}, {} as ITaskDataConnections)
: data,
source: [null],
inputOverride,
...rest,
},
};

View file

@ -1,6 +1,6 @@
{
"name": "n8n-monorepo",
"version": "1.66.0",
"version": "1.67.0",
"private": true,
"engines": {
"node": ">=20.15",
@ -45,6 +45,7 @@
"@types/jest": "^29.5.3",
"@types/node": "*",
"@types/supertest": "^6.0.2",
"cross-env": "^7.0.3",
"jest": "^29.6.2",
"jest-environment-jsdom": "^29.6.2",
"jest-expect-message": "^1.1.3",
@ -83,7 +84,6 @@
},
"patchedDependencies": {
"typedi@0.10.0": "patches/typedi@0.10.0.patch",
"@sentry/cli@2.36.2": "patches/@sentry__cli@2.36.2.patch",
"pkce-challenge@3.0.0": "patches/pkce-challenge@3.0.0.patch",
"pyodide@0.23.4": "patches/pyodide@0.23.4.patch",
"@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch",

View file

@ -1,11 +1,11 @@
{
"name": "@n8n/chat",
"version": "0.29.0",
"version": "0.30.0",
"scripts": {
"dev": "pnpm run storybook",
"build": "pnpm build:vite && pnpm build:bundle",
"build:vite": "vite build",
"build:bundle": "INCLUDE_VUE=true vite build",
"build:vite": "cross-env vite build",
"build:bundle": "cross-env INCLUDE_VUE=true vite build",
"preview": "vite preview",
"test:dev": "vitest",
"test": "vitest run",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/config",
"version": "1.16.0",
"version": "1.17.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -7,6 +7,7 @@ export const LOG_SCOPES = [
'external-secrets',
'license',
'multi-main-setup',
'pruning',
'pubsub',
'redis',
'scaling',

View file

@ -50,4 +50,8 @@ export class TaskRunnersConfig {
/** How many concurrent tasks can a runner execute at a time */
@Env('N8N_RUNNERS_MAX_CONCURRENCY')
maxConcurrency: number = 5;
/** Should the output of deduplication be asserted for correctness */
@Env('N8N_RUNNERS_ASSERT_DEDUPLICATION_OUTPUT')
assertDeduplicationOutput: boolean = false;
}

View file

@ -233,6 +233,7 @@ describe('GlobalConfig', () => {
launcherRunner: 'javascript',
maxOldSpaceSize: '',
maxConcurrency: 5,
assertDeduplicationOutput: false,
},
sentry: {
backendDsn: '',

View file

@ -206,10 +206,28 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
// If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will try to parse the output manually
if (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) {
const finalResponse = (steps as AgentFinish).returnValues;
const returnValues = (await outputParser.parse(finalResponse as unknown as string)) as Record<
string,
unknown
>;
let parserInput: string;
if (finalResponse instanceof Object) {
if ('output' in finalResponse) {
try {
// If the output is an object, we will try to parse it as JSON
// this is because parser expects stringified JSON object like { "output": { .... } }
// so we try to parse the output before wrapping it and then stringify it
parserInput = JSON.stringify({ output: jsonParse(finalResponse.output) });
} catch (error) {
// If parsing of the output fails, we will use the raw output
parserInput = finalResponse.output;
}
} else {
// If the output is not an object, we will stringify it as it is
parserInput = JSON.stringify(finalResponse);
}
} else {
parserInput = finalResponse;
}
const returnValues = (await outputParser.parse(parserInput)) as Record<string, unknown>;
return handleParsedStepOutput(returnValues);
}
return handleAgentFinishOutput(steps);

View file

@ -1,4 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { ChatAnthropic } from '@langchain/anthropic';
import type { LLMResult } from '@langchain/core/outputs';
import {
NodeConnectionType,
type INodePropertyOptions,
@ -9,8 +12,6 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { ChatAnthropic } from '@langchain/anthropic';
import type { LLMResult } from '@langchain/core/outputs';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
@ -36,6 +37,10 @@ const modelField: INodeProperties = {
name: 'Claude 3 Sonnet(20240229)',
value: 'claude-3-sonnet-20240229',
},
{
name: 'Claude 3.5 Haiku(20241022)',
value: 'claude-3-5-haiku-20241022',
},
{
name: 'Claude 3 Haiku(20240307)',
value: 'claude-3-haiku-20240307',

View file

@ -1,5 +1,6 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { NodeConnectionType } from 'n8n-workflow';
import { PromptTemplate } from '@langchain/core/prompts';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
ISupplyDataFunctions,
INodeType,
@ -7,6 +8,7 @@ import type {
SupplyData,
} from 'n8n-workflow';
import { NAIVE_FIX_PROMPT } from './prompt';
import {
N8nOutputFixingParser,
type N8nStructuredOutputParser,
@ -65,6 +67,27 @@ export class OutputParserAutofixing implements INodeType {
default: '',
},
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
{
displayName: 'Options',
name: 'options',
type: 'collection',
placeholder: 'Add Option',
default: {},
options: [
{
displayName: 'Retry Prompt',
name: 'prompt',
type: 'string',
default: NAIVE_FIX_PROMPT,
typeOptions: {
rows: 10,
},
hint: 'Should include "{error}", "{instructions}", and "{completion}" placeholders',
description:
'Prompt template used for fixing the output. Uses placeholders: "{instructions}" for parsing rules, "{completion}" for the failed attempt, and "{error}" for the validation error message.',
},
],
},
],
};
@ -77,8 +100,20 @@ export class OutputParserAutofixing implements INodeType {
NodeConnectionType.AiOutputParser,
itemIndex,
)) as N8nStructuredOutputParser;
const prompt = this.getNodeParameter('options.prompt', itemIndex, NAIVE_FIX_PROMPT) as string;
const parser = new N8nOutputFixingParser(this, model, outputParser);
if (prompt.length === 0 || !prompt.includes('{error}')) {
throw new NodeOperationError(
this.getNode(),
'Auto-fixing parser prompt has to contain {error} placeholder',
);
}
const parser = new N8nOutputFixingParser(
this,
model,
outputParser,
PromptTemplate.fromTemplate(prompt),
);
return {
response: parser,

View file

@ -0,0 +1,16 @@
export const NAIVE_FIX_PROMPT = `Instructions:
--------------
{instructions}
--------------
Completion:
--------------
{completion}
--------------
Above, the Completion did not satisfy the constraints given in the Instructions.
Error:
--------------
{error}
--------------
Please try again. Please only respond with an answer that satisfies the constraints laid out in the Instructions:`;

View file

@ -1,15 +1,19 @@
/* eslint-disable @typescript-eslint/unbound-method */
/* eslint-disable @typescript-eslint/no-unsafe-call */
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { OutputParserException } from '@langchain/core/output_parsers';
import type { MockProxy } from 'jest-mock-extended';
import { mock } from 'jest-mock-extended';
import { normalizeItems } from 'n8n-core';
import type { IExecuteFunctions, IWorkflowDataProxyData } from 'n8n-workflow';
import { ApplicationError, NodeConnectionType } from 'n8n-workflow';
import { ApplicationError, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { N8nOutputFixingParser } from '../../../../utils/output_parsers/N8nOutputParser';
import type { N8nStructuredOutputParser } from '../../../../utils/output_parsers/N8nOutputParser';
import type {
N8nOutputFixingParser,
N8nStructuredOutputParser,
} from '../../../../utils/output_parsers/N8nOutputParser';
import { OutputParserAutofixing } from '../OutputParserAutofixing.node';
import { NAIVE_FIX_PROMPT } from '../prompt';
describe('OutputParserAutofixing', () => {
let outputParser: OutputParserAutofixing;
@ -34,6 +38,13 @@ describe('OutputParserAutofixing', () => {
throw new ApplicationError('Unexpected connection type');
});
thisArg.getNodeParameter.mockReset();
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return NAIVE_FIX_PROMPT;
}
throw new ApplicationError('Not implemented');
});
});
afterEach(() => {
@ -48,6 +59,56 @@ describe('OutputParserAutofixing', () => {
});
}
describe('Configuration', () => {
it('should throw error when prompt template does not contain {error} placeholder', async () => {
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return 'Invalid prompt without error placeholder';
}
throw new ApplicationError('Not implemented');
});
await expect(outputParser.supplyData.call(thisArg, 0)).rejects.toThrow(
new NodeOperationError(
thisArg.getNode(),
'Auto-fixing parser prompt has to contain {error} placeholder',
),
);
});
it('should throw error when prompt template is empty', async () => {
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return '';
}
throw new ApplicationError('Not implemented');
});
await expect(outputParser.supplyData.call(thisArg, 0)).rejects.toThrow(
new NodeOperationError(
thisArg.getNode(),
'Auto-fixing parser prompt has to contain {error} placeholder',
),
);
});
it('should use default prompt when none specified', async () => {
thisArg.getNodeParameter.mockImplementation((parameterName) => {
if (parameterName === 'options.prompt') {
return NAIVE_FIX_PROMPT;
}
throw new ApplicationError('Not implemented');
});
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
expect(response).toBeDefined();
});
});
describe('Parsing', () => {
it('should successfully parse valid output without needing to fix it', async () => {
const validOutput = { name: 'Alice', age: 25 };
@ -57,36 +118,30 @@ describe('OutputParserAutofixing', () => {
response: N8nOutputFixingParser;
};
// Ensure the response contains the output-fixing parser
expect(response).toBeDefined();
expect(response).toBeInstanceOf(N8nOutputFixingParser);
const result = await response.parse('{"name": "Alice", "age": 25}');
// Validate that the parser succeeds without retry
expect(result).toEqual(validOutput);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1); // Only one call to parse
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1);
});
it('should throw an error when both structured parser and fixing parser fail', async () => {
mockStructuredOutputParser.parse
.mockRejectedValueOnce(new Error('Invalid JSON')) // First attempt fails
.mockRejectedValueOnce(new Error('Fixing attempt failed')); // Second attempt fails
it('should not retry on non-OutputParserException errors', async () => {
const error = new Error('Some other error');
mockStructuredOutputParser.parse.mockRejectedValueOnce(error);
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
response.getRetryChain = getMockedRetryChain('{}');
await expect(response.parse('Invalid JSON string')).rejects.toThrow('Fixing attempt failed');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2);
await expect(response.parse('Invalid JSON string')).rejects.toThrow(error);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1);
});
it('should reject on the first attempt and succeed on retry with the parsed content', async () => {
it('should retry on OutputParserException and succeed', async () => {
const validOutput = { name: 'Bob', age: 28 };
mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON'));
mockStructuredOutputParser.parse
.mockRejectedValueOnce(new OutputParserException('Invalid JSON'))
.mockResolvedValueOnce(validOutput);
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
@ -94,27 +149,42 @@ describe('OutputParserAutofixing', () => {
response.getRetryChain = getMockedRetryChain(JSON.stringify(validOutput));
mockStructuredOutputParser.parse.mockResolvedValueOnce(validOutput);
const result = await response.parse('Invalid JSON string');
expect(result).toEqual(validOutput);
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second succeeds
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2);
});
it('should handle non-JSON formatted response from fixing parser', async () => {
mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Invalid JSON'));
it('should handle failed retry attempt', async () => {
mockStructuredOutputParser.parse
.mockRejectedValueOnce(new OutputParserException('Invalid JSON'))
.mockRejectedValueOnce(new Error('Still invalid JSON'));
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
response.getRetryChain = getMockedRetryChain('This is not JSON');
response.getRetryChain = getMockedRetryChain('Still not valid JSON');
mockStructuredOutputParser.parse.mockRejectedValueOnce(new Error('Unexpected token'));
await expect(response.parse('Invalid JSON string')).rejects.toThrow('Still invalid JSON');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2);
});
// Expect the structured parser to throw an error on invalid JSON from retry
await expect(response.parse('Invalid JSON string')).rejects.toThrow('Unexpected token');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(2); // First fails, second tries and fails
it('should throw non-OutputParserException errors immediately without retry', async () => {
const customError = new Error('Database connection error');
const retryChainSpy = jest.fn();
mockStructuredOutputParser.parse.mockRejectedValueOnce(customError);
const { response } = (await outputParser.supplyData.call(thisArg, 0)) as {
response: N8nOutputFixingParser;
};
response.getRetryChain = retryChainSpy;
await expect(response.parse('Some input')).rejects.toThrow('Database connection error');
expect(mockStructuredOutputParser.parse).toHaveBeenCalledTimes(1);
expect(retryChainSpy).not.toHaveBeenCalled();
});
});
});

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-nodes-langchain",
"version": "1.66.0",
"version": "1.67.0",
"description": "",
"main": "index.js",
"scripts": {

View file

@ -1,12 +1,12 @@
import type { Callbacks } from '@langchain/core/callbacks/manager';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { AIMessage } from '@langchain/core/messages';
import { BaseOutputParser } from '@langchain/core/output_parsers';
import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
import type { PromptTemplate } from '@langchain/core/prompts';
import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser';
import { NAIVE_FIX_PROMPT } from './prompt';
import { logAiEvent } from '../helpers';
export class N8nOutputFixingParser extends BaseOutputParser {
@ -16,12 +16,13 @@ export class N8nOutputFixingParser extends BaseOutputParser {
private context: ISupplyDataFunctions,
private model: BaseLanguageModel,
private outputParser: N8nStructuredOutputParser,
private fixPromptTemplate: PromptTemplate,
) {
super();
}
getRetryChain() {
return NAIVE_FIX_PROMPT.pipe(this.model);
return this.fixPromptTemplate.pipe(this.model);
}
/**
@ -47,11 +48,14 @@ export class N8nOutputFixingParser extends BaseOutputParser {
return response;
} catch (error) {
if (!(error instanceof OutputParserException)) {
throw error;
}
try {
// Second attempt: use retry chain to fix the output
const result = (await this.getRetryChain().invoke({
completion,
error,
error: error.message,
instructions: this.getFormatInstructions(),
})) as AIMessage;

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/task-runner",
"version": "1.4.0",
"version": "1.5.0",
"scripts": {
"clean": "rimraf dist .turbo",
"start": "node dist/start.js",
@ -17,10 +17,22 @@
},
"main": "dist/start.js",
"module": "src/start.ts",
"types": "dist/start.d.ts",
"types": "dist/index.d.ts",
"files": [
"dist/**/*"
],
"exports": {
"./start": {
"require": "./dist/start.js",
"import": "./src/start.ts",
"types": "./dist/start.d.ts"
},
".": {
"require": "./dist/index.js",
"import": "./src/index.ts",
"types": "./dist/index.d.ts"
}
},
"dependencies": {
"@n8n/config": "workspace:*",
"acorn": "8.14.0",

View file

@ -63,4 +63,35 @@ describe('TaskRunnerNodeTypes', () => {
});
});
});
describe('addNodeTypeDescriptions', () => {
it('should add new node types', () => {
const nodeTypes = new TaskRunnerNodeTypes(TYPES);
const nodeTypeDescriptions = [
{ name: 'new-type', version: 1 },
{ name: 'new-type', version: 2 },
] as INodeTypeDescription[];
nodeTypes.addNodeTypeDescriptions(nodeTypeDescriptions);
expect(nodeTypes.getByNameAndVersion('new-type', 1)).toEqual({
description: { name: 'new-type', version: 1 },
});
expect(nodeTypes.getByNameAndVersion('new-type', 2)).toEqual({
description: { name: 'new-type', version: 2 },
});
});
});
describe('onlyUnknown', () => {
it('should return only unknown node types', () => {
const nodeTypes = new TaskRunnerNodeTypes(TYPES);
const candidate = { name: 'unknown', version: 1 };
expect(nodeTypes.onlyUnknown([candidate])).toEqual([candidate]);
expect(nodeTypes.onlyUnknown([SINGLE_VERSIONED])).toEqual([]);
});
});
});

View file

@ -0,0 +1,29 @@
import type { IExecuteData, INodeExecutionData } from 'n8n-workflow';
import type { DataRequestResponse } from '@/runner-types';
/**
* Reconstructs data from a DataRequestResponse to the initial
* data structures.
*/
export class DataRequestResponseReconstruct {
/**
* Reconstructs `connectionInputData` from a DataRequestResponse
*/
reconstructConnectionInputData(
inputData: DataRequestResponse['inputData'],
): INodeExecutionData[] {
return inputData?.main?.[0] ?? [];
}
/**
* Reconstruct `executeData` from a DataRequestResponse
*/
reconstructExecuteData(response: DataRequestResponse): IExecuteData {
return {
data: response.inputData,
node: response.node,
source: response.connectionInputSource,
};
}
}

View file

@ -1,2 +1,4 @@
export * from './task-runner';
export * from './runner-types';
export * from './message-types';
export * from './data-request/data-request-response-reconstruct';

View file

@ -3,15 +3,21 @@ import type { CodeExecutionMode, IDataObject } from 'n8n-workflow';
import fs from 'node:fs';
import { builtinModules } from 'node:module';
import type { JsRunnerConfig } from '@/config/js-runner-config';
import { MainConfig } from '@/config/main-config';
import { ExecutionError } from '@/js-task-runner/errors/execution-error';
import { ValidationError } from '@/js-task-runner/errors/validation-error';
import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { JSExecSettings } from '@/js-task-runner/js-task-runner';
import { JsTaskRunner } from '@/js-task-runner/js-task-runner';
import type { DataRequestResponse } from '@/runner-types';
import type { Task } from '@/task-runner';
import { newCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data';
import type { JsRunnerConfig } from '../../config/js-runner-config';
import { MainConfig } from '../../config/main-config';
import { ExecutionError } from '../errors/execution-error';
import {
newDataRequestResponse,
newTaskWithSettings,
withPairedItem,
wrapIntoJson,
} from './test-data';
jest.mock('ws');
@ -68,7 +74,7 @@ describe('JsTaskRunner', () => {
nodeMode: 'runOnceForAllItems',
...settings,
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson)),
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson)),
runner,
});
};
@ -91,7 +97,7 @@ describe('JsTaskRunner', () => {
nodeMode: 'runOnceForEachItem',
...settings,
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson)),
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson)),
runner,
});
};
@ -108,7 +114,7 @@ describe('JsTaskRunner', () => {
await execTaskWithParams({
task,
taskData: newCodeTaskData([wrapIntoJson({})]),
taskData: newDataRequestResponse([wrapIntoJson({})]),
});
expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [
@ -243,7 +249,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.VAR1 }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: false,
isProcessAvailable: true,
@ -262,7 +268,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.VAR1 }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: true,
isProcessAvailable: true,
@ -279,7 +285,7 @@ describe('JsTaskRunner', () => {
code: 'return Object.values($env).concat(Object.keys($env))',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: false,
isProcessAvailable: true,
@ -298,7 +304,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.N8N_RUNNERS_N8N_URI }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -313,7 +319,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: Buffer.from("test-buffer").toString() }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -325,7 +331,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: Buffer.from("test-buffer").toString() }',
nodeMode: 'runOnceForEachItem',
}),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newDataRequestResponse(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -771,7 +777,7 @@ describe('JsTaskRunner', () => {
code: 'unknown',
nodeMode,
}),
taskData: newCodeTaskData([wrapIntoJson({ a: 1 })]),
taskData: newDataRequestResponse([wrapIntoJson({ a: 1 })]),
}),
).rejects.toThrow(ExecutionError);
},
@ -793,7 +799,7 @@ describe('JsTaskRunner', () => {
jest.spyOn(runner, 'sendOffers').mockImplementation(() => {});
jest
.spyOn(runner, 'requestData')
.mockResolvedValue(newCodeTaskData([wrapIntoJson({ a: 1 })]));
.mockResolvedValue(newDataRequestResponse([wrapIntoJson({ a: 1 })]));
await runner.receivedSettings(taskId, task.settings);

View file

@ -2,7 +2,8 @@ import type { IDataObject, INode, INodeExecutionData, ITaskData } from 'n8n-work
import { NodeConnectionType } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { DataRequestResponse } from '@/runner-types';
import type { Task } from '@/task-runner';
/**
@ -46,10 +47,10 @@ export const newTaskData = (opts: Partial<ITaskData> & Pick<ITaskData, 'source'>
});
/**
* Creates a new all code task data with the given options
* Creates a new data request response with the given options
*/
export const newCodeTaskData = (
codeNodeInputData: INodeExecutionData[],
export const newDataRequestResponse = (
inputData: INodeExecutionData[],
opts: Partial<DataRequestResponse> = {},
): DataRequestResponse => {
const codeNode = newNode({
@ -83,9 +84,8 @@ export const newCodeTaskData = (
nodes: [manualTriggerNode, codeNode],
},
inputData: {
main: [codeNodeInputData],
main: [inputData],
},
connectionInputData: codeNodeInputData,
node: codeNode,
runExecutionData: {
startData: {},
@ -95,7 +95,7 @@ export const newCodeTaskData = (
newTaskData({
source: [],
data: {
main: [codeNodeInputData],
main: [inputData],
},
}),
],
@ -137,14 +137,13 @@ export const newCodeTaskData = (
var: 'value',
},
},
executeData: {
node: codeNode,
data: {
main: [codeNodeInputData],
},
source: {
main: [{ previousNode: manualTriggerNode.name }],
connectionInputSource: {
main: [
{
previousNode: 'Trigger',
previousNodeOutput: 0,
},
],
},
...opts,
};

View file

@ -1,8 +1,13 @@
import { getAdditionalKeys } from 'n8n-core';
import type { IDataObject, INodeType, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import type {
IDataObject,
IExecuteData,
INodeType,
IWorkflowExecuteAdditionalData,
} from 'n8n-workflow';
import { Workflow, WorkflowDataProxy } from 'n8n-workflow';
import { newCodeTaskData } from '../../__tests__/test-data';
import { newDataRequestResponse } from '../../__tests__/test-data';
import { BuiltInsParser } from '../built-ins-parser';
import { BuiltInsParserState } from '../built-ins-parser-state';
@ -159,7 +164,12 @@ describe('BuiltInsParser', () => {
describe('WorkflowDataProxy built-ins', () => {
it('should have a known list of built-ins', () => {
const data = newCodeTaskData([]);
const data = newDataRequestResponse([]);
const executeData: IExecuteData = {
data: {},
node: data.node,
source: data.connectionInputSource,
};
const dataProxy = new WorkflowDataProxy(
new Workflow({
...data.workflow,
@ -179,7 +189,7 @@ describe('BuiltInsParser', () => {
data.runIndex,
0,
data.activeNodeName,
data.connectionInputData,
[],
data.siblingParameters,
data.mode,
getAdditionalKeys(
@ -187,7 +197,7 @@ describe('BuiltInsParser', () => {
data.mode,
data.runExecutionData,
),
data.executeData,
executeData,
data.defaultReturnRunIndex,
data.selfData,
data.contextNodeName,

View file

@ -1,4 +1,4 @@
import type { N8nMessage } from '../../runner-types';
import type { BrokerMessage } from '@/message-types';
/**
* Class to keep track of which built-in variables are accessed in the code
@ -53,7 +53,7 @@ export class BuiltInsParserState {
this.needs$prevNode = true;
}
toDataRequestParams(): N8nMessage.ToRequester.TaskDataRequest['requestParams'] {
toDataRequestParams(): BrokerMessage.ToRequester.TaskDataRequest['requestParams'] {
return {
dataOfNodes: this.needsAllNodes ? 'all' : Array.from(this.neededNodeNames),
env: this.needs$env,

View file

@ -1,27 +1,25 @@
import { getAdditionalKeys } from 'n8n-core';
import {
WorkflowDataProxy,
// type IWorkflowDataProxyAdditionalKeys,
Workflow,
} from 'n8n-workflow';
import { WorkflowDataProxy, Workflow } from 'n8n-workflow';
import type {
CodeExecutionMode,
INode,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
WorkflowParameters,
IDataObject,
IExecuteData,
INodeExecutionData,
INodeParameters,
IRunExecutionData,
WorkflowExecuteMode,
WorkflowParameters,
ITaskDataConnections,
INode,
IRunExecutionData,
EnvProviderState,
IExecuteData,
INodeTypeDescription,
} from 'n8n-workflow';
import * as a from 'node:assert';
import { runInNewContext, type Context } from 'node:vm';
import type { TaskResultData } from '@/runner-types';
import type { MainConfig } from '@/config/main-config';
import type { DataRequestResponse, PartialAdditionalData, TaskResultData } from '@/runner-types';
import { type Task, TaskRunner } from '@/task-runner';
import { BuiltInsParser } from './built-ins-parser/built-ins-parser';
@ -32,7 +30,7 @@ import { makeSerializable } from './errors/serializable-error';
import type { RequireResolver } from './require-resolver';
import { createRequireResolver } from './require-resolver';
import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation';
import type { MainConfig } from '../config/main-config';
import { DataRequestResponseReconstruct } from '../data-request/data-request-response-reconstruct';
export interface JSExecSettings {
code: string;
@ -44,34 +42,19 @@ export interface JSExecSettings {
mode: WorkflowExecuteMode;
}
export interface PartialAdditionalData {
executionId?: string;
restartExecutionId?: string;
restApiUrl: string;
instanceBaseUrl: string;
formWaitingBaseUrl: string;
webhookBaseUrl: string;
webhookWaitingBaseUrl: string;
webhookTestBaseUrl: string;
currentNodeParameters?: INodeParameters;
executionTimeoutTimestamp?: number;
userId?: string;
variables: IDataObject;
}
export interface DataRequestResponse {
export interface JsTaskData {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
connectionInputData: INodeExecutionData[];
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState?: EnvProviderState;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
@ -88,6 +71,8 @@ export class JsTaskRunner extends TaskRunner {
private readonly builtInsParser = new BuiltInsParser();
private readonly taskDataReconstruct = new DataRequestResponseReconstruct();
constructor(config: MainConfig, name = 'JS Task Runner') {
super({
taskType: 'javascript',
@ -114,11 +99,15 @@ export class JsTaskRunner extends TaskRunner {
? neededBuiltInsResult.result
: BuiltInsParserState.newNeedsAllDataState();
const data = await this.requestData<DataRequestResponse>(
const dataResponse = await this.requestData<DataRequestResponse>(
task.taskId,
neededBuiltIns.toDataRequestParams(),
);
const data = this.reconstructTaskData(dataResponse);
await this.requestNodeTypeIfNeeded(neededBuiltIns, data.workflow, task.taskId);
const workflowParams = data.workflow;
const workflow = new Workflow({
...workflowParams,
@ -177,7 +166,7 @@ export class JsTaskRunner extends TaskRunner {
private async runForAllItems(
taskId: string,
settings: JSExecSettings,
data: DataRequestResponse,
data: JsTaskData,
workflow: Workflow,
customConsole: CustomConsole,
): Promise<INodeExecutionData[]> {
@ -224,7 +213,7 @@ export class JsTaskRunner extends TaskRunner {
private async runForEachItem(
taskId: string,
settings: JSExecSettings,
data: DataRequestResponse,
data: JsTaskData,
workflow: Workflow,
customConsole: CustomConsole,
): Promise<INodeExecutionData[]> {
@ -291,7 +280,7 @@ export class JsTaskRunner extends TaskRunner {
return returnData;
}
private createDataProxy(data: DataRequestResponse, workflow: Workflow, itemIndex: number) {
private createDataProxy(data: JsTaskData, workflow: Workflow, itemIndex: number) {
return new WorkflowDataProxy(
workflow,
data.runExecutionData,
@ -335,4 +324,43 @@ export class JsTaskRunner extends TaskRunner {
return new ExecutionError({ message: JSON.stringify(error) });
}
private reconstructTaskData(response: DataRequestResponse): JsTaskData {
return {
...response,
connectionInputData: this.taskDataReconstruct.reconstructConnectionInputData(
response.inputData,
),
executeData: this.taskDataReconstruct.reconstructExecuteData(response),
};
}
private async requestNodeTypeIfNeeded(
neededBuiltIns: BuiltInsParserState,
workflow: JsTaskData['workflow'],
taskId: string,
) {
/**
* We request node types only when we know a task needs all nodes, because
* needing all nodes means that the task relies on paired item functionality,
* which is the same requirement for needing node types.
*/
if (neededBuiltIns.needsAllNodes) {
const uniqueNodeTypes = new Map(
workflow.nodes.map((node) => [
`${node.type}|${node.typeVersion}`,
{ name: node.type, version: node.typeVersion },
]),
);
const unknownNodeTypes = this.nodeTypes.onlyUnknown([...uniqueNodeTypes.values()]);
const nodeTypes = await this.requestNodeTypes<INodeTypeDescription[]>(
taskId,
unknownNodeTypes,
);
this.nodeTypes.addNodeTypeDescriptions(nodeTypes);
}
}
}

View file

@ -0,0 +1,251 @@
import type { INodeTypeBaseDescription } from 'n8n-workflow';
import type {
NeededNodeType,
RPC_ALLOW_LIST,
TaskDataRequestParams,
TaskResultData,
} from './runner-types';
export namespace BrokerMessage {
export namespace ToRunner {
export interface InfoRequest {
type: 'broker:inforequest';
}
export interface RunnerRegistered {
type: 'broker:runnerregistered';
}
export interface TaskOfferAccept {
type: 'broker:taskofferaccept';
taskId: string;
offerId: string;
}
export interface TaskCancel {
type: 'broker:taskcancel';
taskId: string;
reason: string;
}
export interface TaskSettings {
type: 'broker:tasksettings';
taskId: string;
settings: unknown;
}
export interface RPCResponse {
type: 'broker:rpcresponse';
callId: string;
taskId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskDataResponse {
type: 'broker:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface NodeTypes {
type: 'broker:nodetypes';
taskId: string;
requestId: string;
nodeTypes: INodeTypeBaseDescription[];
}
export type All =
| InfoRequest
| TaskOfferAccept
| TaskCancel
| TaskSettings
| RunnerRegistered
| RPCResponse
| TaskDataResponse
| NodeTypes;
}
export namespace ToRequester {
export interface TaskReady {
type: 'broker:taskready';
requestId: string;
taskId: string;
}
export interface TaskDone {
type: 'broker:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'broker:taskerror';
taskId: string;
error: unknown;
}
export interface TaskDataRequest {
type: 'broker:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface NodeTypesRequest {
type: 'broker:nodetypesrequest';
taskId: string;
requestId: string;
requestParams: NeededNodeType[];
}
export interface RPC {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | NodeTypesRequest | RPC;
}
}
export namespace RequesterMessage {
export namespace ToBroker {
export interface TaskSettings {
type: 'requester:tasksettings';
taskId: string;
settings: unknown;
}
export interface TaskCancel {
type: 'requester:taskcancel';
taskId: string;
reason: string;
}
export interface TaskDataResponse {
type: 'requester:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface NodeTypesResponse {
type: 'requester:nodetypesresponse';
taskId: string;
requestId: string;
nodeTypes: INodeTypeBaseDescription[];
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
callId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskRequest {
type: 'requester:taskrequest';
requestId: string;
taskType: string;
}
export type All =
| TaskSettings
| TaskCancel
| RPCResponse
| TaskDataResponse
| NodeTypesResponse
| TaskRequest;
}
}
export namespace RunnerMessage {
export namespace ToBroker {
export interface Info {
type: 'runner:info';
name: string;
types: string[];
}
export interface TaskAccepted {
type: 'runner:taskaccepted';
taskId: string;
}
export interface TaskRejected {
type: 'runner:taskrejected';
taskId: string;
reason: string;
}
export interface TaskDone {
type: 'runner:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'runner:taskerror';
taskId: string;
error: unknown;
}
export interface TaskOffer {
type: 'runner:taskoffer';
offerId: string;
taskType: string;
validFor: number;
}
export interface TaskDataRequest {
type: 'runner:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface NodeTypesRequest {
type: 'runner:nodetypesrequest';
taskId: string;
requestId: string;
/**
* Which node types should be included in the runner's node types request.
*
* Node types are needed only when the script relies on paired item functionality.
* If so, we need only the node types not already cached in the runner.
*
* TODO: In future we can trim this down to only node types in the paired item chain,
* rather than assuming we need all node types in the workflow.
*
* @example [{ name: 'n8n-nodes-base.httpRequest', version: 1 }]
*/
requestParams: NeededNodeType[];
}
export interface RPC {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All =
| Info
| TaskDone
| TaskError
| TaskAccepted
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest
| NodeTypesRequest;
}
}

View file

@ -7,6 +7,8 @@ import {
type IVersionedNodeType,
} from 'n8n-workflow';
import type { NeededNodeType } from './runner-types';
type VersionedTypes = Map<number, INodeTypeDescription>;
export const DEFAULT_NODETYPE_VERSION = 1;
@ -61,4 +63,30 @@ export class TaskRunnerNodeTypes implements INodeTypes {
getKnownTypes(): IDataObject {
throw new ApplicationError('Unimplemented `getKnownTypes`', { level: 'error' });
}
addNodeTypeDescriptions(nodeTypeDescriptions: INodeTypeDescription[]) {
const newNodeTypes = this.parseNodeTypes(nodeTypeDescriptions);
for (const [name, newVersions] of newNodeTypes.entries()) {
if (!this.nodeTypesByVersion.has(name)) {
this.nodeTypesByVersion.set(name, newVersions);
} else {
const existingVersions = this.nodeTypesByVersion.get(name)!;
for (const [version, nodeType] of newVersions.entries()) {
existingVersions.set(version, nodeType);
}
}
}
}
/** Filter out node type versions that are already registered. */
onlyUnknown(nodeTypes: NeededNodeType[]) {
return nodeTypes.filter(({ name, version }) => {
const existingVersions = this.nodeTypesByVersion.get(name);
if (!existingVersions) return true;
return !existingVersions.has(version);
});
}
}

View file

@ -1,216 +1,90 @@
import type { INodeExecutionData, INodeTypeBaseDescription } from 'n8n-workflow';
import type {
EnvProviderState,
IDataObject,
IExecuteData,
IExecuteFunctions,
INode,
INodeExecutionData,
INodeParameters,
IRunExecutionData,
ITaskDataConnections,
ITaskDataConnectionsSource,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
WorkflowParameters,
} from 'n8n-workflow';
/**
* Specifies what data should be included for a task data request.
*/
export interface TaskDataRequestParams {
dataOfNodes: string[] | 'all';
prevNode: boolean;
/** Whether input data for the node should be included */
input: boolean;
/** Whether env provider's state should be included */
env: boolean;
}
export interface DataRequestResponse {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
connectionInputSource: ITaskDataConnectionsSource | null;
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: PartialAdditionalData;
}
export interface TaskResultData {
result: INodeExecutionData[];
customData?: Record<string, string>;
}
export namespace N8nMessage {
export namespace ToRunner {
export interface InfoRequest {
type: 'broker:inforequest';
}
export interface TaskData {
executeFunctions: IExecuteFunctions;
inputData: ITaskDataConnections;
node: INode;
export interface RunnerRegistered {
type: 'broker:runnerregistered';
}
export interface TaskOfferAccept {
type: 'broker:taskofferaccept';
taskId: string;
offerId: string;
}
export interface TaskCancel {
type: 'broker:taskcancel';
taskId: string;
reason: string;
}
export interface TaskSettings {
type: 'broker:tasksettings';
taskId: string;
settings: unknown;
}
export interface RPCResponse {
type: 'broker:rpcresponse';
callId: string;
taskId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskDataResponse {
type: 'broker:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface NodeTypes {
type: 'broker:nodetypes';
nodeTypes: INodeTypeBaseDescription[];
}
export type All =
| InfoRequest
| TaskOfferAccept
| TaskCancel
| TaskSettings
| RunnerRegistered
| RPCResponse
| TaskDataResponse
| NodeTypes;
}
export namespace ToRequester {
export interface TaskReady {
type: 'broker:taskready';
requestId: string;
taskId: string;
}
export interface TaskDone {
type: 'broker:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'broker:taskerror';
taskId: string;
error: unknown;
}
export interface TaskDataRequest {
type: 'broker:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface RPC {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC;
}
workflow: Workflow;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: IWorkflowExecuteAdditionalData;
}
export namespace RequesterMessage {
export namespace ToN8n {
export interface TaskSettings {
type: 'requester:tasksettings';
taskId: string;
settings: unknown;
}
export interface TaskCancel {
type: 'requester:taskcancel';
taskId: string;
reason: string;
}
export interface TaskDataResponse {
type: 'requester:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
callId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskRequest {
type: 'requester:taskrequest';
requestId: string;
taskType: string;
}
export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest;
}
}
export namespace RunnerMessage {
export namespace ToN8n {
export interface Info {
type: 'runner:info';
name: string;
types: string[];
}
export interface TaskAccepted {
type: 'runner:taskaccepted';
taskId: string;
}
export interface TaskRejected {
type: 'runner:taskrejected';
taskId: string;
reason: string;
}
export interface TaskDone {
type: 'runner:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'runner:taskerror';
taskId: string;
error: unknown;
}
export interface TaskOffer {
type: 'runner:taskoffer';
offerId: string;
taskType: string;
validFor: number;
}
export interface TaskDataRequest {
type: 'runner:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface RPC {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All =
| Info
| TaskDone
| TaskError
| TaskAccepted
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest;
}
export interface PartialAdditionalData {
executionId?: string;
restartExecutionId?: string;
restApiUrl: string;
instanceBaseUrl: string;
formWaitingBaseUrl: string;
webhookBaseUrl: string;
webhookWaitingBaseUrl: string;
webhookTestBaseUrl: string;
currentNodeParameters?: INodeParameters;
executionTimeoutTimestamp?: number;
userId?: string;
variables: IDataObject;
}
export const RPC_ALLOW_LIST = [
@ -238,3 +112,6 @@ export const RPC_ALLOW_LIST = [
'helpers.httpRequest',
'logNodeOutput',
] as const;
/** Node types needed for the runner to execute a task. */
export type NeededNodeType = { name: string; version: number };

View file

@ -1,15 +1,11 @@
import { ApplicationError, type INodeTypeDescription } from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { type MessageEvent, WebSocket } from 'ws';
import type { BaseRunnerConfig } from './config/base-runner-config';
import { TaskRunnerNodeTypes } from './node-types';
import {
RPC_ALLOW_LIST,
type RunnerMessage,
type N8nMessage,
type TaskResultData,
} from './runner-types';
import type { BaseRunnerConfig } from '@/config/base-runner-config';
import type { BrokerMessage, RunnerMessage } from '@/message-types';
import { TaskRunnerNodeTypes } from '@/node-types';
import { RPC_ALLOW_LIST, type TaskResultData } from '@/runner-types';
export interface Task<T = unknown> {
taskId: string;
@ -29,6 +25,12 @@ interface DataRequest {
reject: (error: unknown) => void;
}
interface NodeTypesRequest {
requestId: string;
resolve: (data: unknown) => void;
reject: (error: unknown) => void;
}
interface RPCCall {
callId: string;
resolve: (data: unknown) => void;
@ -62,6 +64,8 @@ export abstract class TaskRunner {
dataRequests: Map<DataRequest['requestId'], DataRequest> = new Map();
nodeTypesRequests: Map<NodeTypesRequest['requestId'], NodeTypesRequest> = new Map();
rpcCalls: Map<RPCCall['callId'], RPCCall> = new Map();
nodeTypes: TaskRunnerNodeTypes = new TaskRunnerNodeTypes([]);
@ -90,7 +94,7 @@ export abstract class TaskRunner {
private receiveMessage = (message: MessageEvent) => {
// eslint-disable-next-line n8n-local-rules/no-uncaught-json-parse
const data = JSON.parse(message.data as string) as N8nMessage.ToRunner.All;
const data = JSON.parse(message.data as string) as BrokerMessage.ToRunner.All;
void this.onMessage(data);
};
@ -140,11 +144,11 @@ export abstract class TaskRunner {
}
}
send(message: RunnerMessage.ToN8n.All) {
send(message: RunnerMessage.ToBroker.All) {
this.ws.send(JSON.stringify(message));
}
onMessage(message: N8nMessage.ToRunner.All) {
onMessage(message: BrokerMessage.ToRunner.All) {
switch (message.type) {
case 'broker:inforequest':
this.send({
@ -172,15 +176,11 @@ export abstract class TaskRunner {
this.handleRpcResponse(message.callId, message.status, message.data);
break;
case 'broker:nodetypes':
this.setNodeTypes(message.nodeTypes as unknown as INodeTypeDescription[]);
this.processNodeTypesResponse(message.requestId, message.nodeTypes);
break;
}
}
setNodeTypes(nodeTypes: INodeTypeDescription[]) {
this.nodeTypes = new TaskRunnerNodeTypes(nodeTypes);
}
processDataResponse(requestId: string, data: unknown) {
const request = this.dataRequests.get(requestId);
if (!request) {
@ -191,6 +191,16 @@ export abstract class TaskRunner {
request.resolve(data);
}
processNodeTypesResponse(requestId: string, nodeTypes: unknown) {
const request = this.nodeTypesRequests.get(requestId);
if (!request) return;
// Deleting of the request is handled in `requestNodeTypes`, using a
// `finally` wrapped around the return
request.resolve(nodeTypes);
}
hasOpenTasks() {
return Object.values(this.runningTasks).length < this.maxConcurrency;
}
@ -252,7 +262,7 @@ export abstract class TaskRunner {
this.sendOffers();
}
taskDone(taskId: string, data: RunnerMessage.ToN8n.TaskDone['data']) {
taskDone(taskId: string, data: RunnerMessage.ToBroker.TaskDone['data']) {
this.send({
type: 'runner:taskdone',
taskId,
@ -286,9 +296,37 @@ export abstract class TaskRunner {
throw new ApplicationError('Unimplemented');
}
async requestNodeTypes<T = unknown>(
taskId: Task['taskId'],
requestParams: RunnerMessage.ToBroker.NodeTypesRequest['requestParams'],
) {
const requestId = nanoid();
const nodeTypesPromise = new Promise<T>((resolve, reject) => {
this.nodeTypesRequests.set(requestId, {
requestId,
resolve: resolve as (data: unknown) => void,
reject,
});
});
this.send({
type: 'runner:nodetypesrequest',
taskId,
requestId,
requestParams,
});
try {
return await nodeTypesPromise;
} finally {
this.nodeTypesRequests.delete(requestId);
}
}
async requestData<T = unknown>(
taskId: Task['taskId'],
requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'],
requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'],
): Promise<T> {
const requestId = nanoid();
@ -314,7 +352,7 @@ export abstract class TaskRunner {
}
}
async makeRpcCall(taskId: string, name: RunnerMessage.ToN8n.RPC['name'], params: unknown[]) {
async makeRpcCall(taskId: string, name: RunnerMessage.ToBroker.RPC['name'], params: unknown[]) {
const callId = nanoid();
const dataPromise = new Promise((resolve, reject) => {
@ -342,7 +380,7 @@ export abstract class TaskRunner {
handleRpcResponse(
callId: string,
status: N8nMessage.ToRunner.RPCResponse['status'],
status: BrokerMessage.ToRunner.RPCResponse['status'],
data: unknown,
) {
const call = this.rpcCalls.get(callId);

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "1.66.0",
"version": "1.67.0",
"description": "n8n Workflow Automation Tool",
"main": "dist/index",
"types": "dist/index.d.ts",

View file

@ -27,7 +27,7 @@ import { Subscriber } from '@/scaling/pubsub/subscriber.service';
import { Server } from '@/server';
import { OrchestrationService } from '@/services/orchestration.service';
import { OwnershipService } from '@/services/ownership.service';
import { PruningService } from '@/services/pruning.service';
import { PruningService } from '@/services/pruning/pruning.service';
import { UrlService } from '@/services/url.service';
import { WaitTracker } from '@/wait-tracker';
import { WorkflowRunner } from '@/workflow-runner';

View file

@ -127,6 +127,9 @@ export const TIME = {
* Eventually this will superseed `TIME` above
*/
export const Time = {
milliseconds: {
toMinutes: 1 / (60 * 1000),
},
seconds: {
toMilliseconds: 1000,
},

View file

@ -50,8 +50,8 @@ export class CreateTable extends TableOperation {
ref: {
tableName: string;
columnName: string;
onDelete?: 'CASCADE';
onUpdate?: 'CASCADE';
onDelete?: 'RESTRICT' | 'CASCADE' | 'NO ACTION' | 'SET NULL';
onUpdate?: 'RESTRICT' | 'CASCADE' | 'NO ACTION' | 'SET NULL';
name?: string;
},
) {

View file

@ -20,6 +20,7 @@ import { Settings } from './settings';
import { SharedCredentials } from './shared-credentials';
import { SharedWorkflow } from './shared-workflow';
import { TagEntity } from './tag-entity';
import { TestDefinition } from './test-definition';
import { User } from './user';
import { Variables } from './variables';
import { WebhookEntity } from './webhook-entity';
@ -58,4 +59,5 @@ export const entities = {
ProjectRelation,
ApiKey,
ProcessedData,
TestDefinition,
};

View file

@ -0,0 +1,61 @@
import {
Column,
Entity,
Generated,
Index,
ManyToOne,
OneToOne,
PrimaryColumn,
RelationId,
} from '@n8n/typeorm';
import { Length } from 'class-validator';
import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee';
import { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { WithTimestamps } from './abstract-entity';
/**
* Entity representing a Test Definition
* It combines:
* - the workflow under test
* - the workflow used to evaluate the results of test execution
* - the filter used to select test cases from previous executions of the workflow under test - annotation tag
*/
@Entity()
@Index(['workflow'])
@Index(['evaluationWorkflow'])
export class TestDefinition extends WithTimestamps {
@Generated()
@PrimaryColumn()
id: number;
@Column({ length: 255 })
@Length(1, 255, { message: 'Test name must be $constraint1 to $constraint2 characters long.' })
name: string;
/**
* Relation to the workflow under test
*/
@ManyToOne('WorkflowEntity', 'tests')
workflow: WorkflowEntity;
@RelationId((test: TestDefinition) => test.workflow)
workflowId: string;
/**
* Relation to the workflow used to evaluate the results of test execution
*/
@ManyToOne('WorkflowEntity', 'evaluationTests')
evaluationWorkflow: WorkflowEntity;
@RelationId((test: TestDefinition) => test.evaluationWorkflow)
evaluationWorkflowId: string;
/**
* Relation to the annotation tag associated with the test
* This tag will be used to select the test cases to run from previous executions
*/
@OneToOne('AnnotationTagEntity', 'test')
annotationTag: AnnotationTagEntity;
}

View file

@ -0,0 +1,37 @@
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
const testEntityTableName = 'test_definition';
export class CreateTestDefinitionTable1730386903556 implements ReversibleMigration {
async up({ schemaBuilder: { createTable, column } }: MigrationContext) {
await createTable(testEntityTableName)
.withColumns(
column('id').int.notNull.primary.autoGenerate,
column('name').varchar(255).notNull,
column('workflowId').varchar(36).notNull,
column('evaluationWorkflowId').varchar(36),
column('annotationTagId').varchar(16),
)
.withIndexOn('workflowId')
.withIndexOn('evaluationWorkflowId')
.withForeignKey('workflowId', {
tableName: 'workflow_entity',
columnName: 'id',
onDelete: 'CASCADE',
})
.withForeignKey('evaluationWorkflowId', {
tableName: 'workflow_entity',
columnName: 'id',
onDelete: 'SET NULL',
})
.withForeignKey('annotationTagId', {
tableName: 'annotation_tag_entity',
columnName: 'id',
onDelete: 'SET NULL',
}).withTimestamps;
}
async down({ schemaBuilder: { dropTable } }: MigrationContext) {
await dropTable(testEntityTableName);
}
}

View file

@ -68,6 +68,7 @@ import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-C
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping';
import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText';
import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable';
export const mysqlMigrations: Migration[] = [
InitialMigration1588157391238,
@ -138,4 +139,5 @@ export const mysqlMigrations: Migration[] = [
CreateProcessedDataTable1726606152711,
AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644,
UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556,
];

View file

@ -68,6 +68,7 @@ import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-C
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from '../common/1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping';
import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText';
import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable';
export const postgresMigrations: Migration[] = [
InitialMigration1587669153312,
@ -138,4 +139,5 @@ export const postgresMigrations: Migration[] = [
CreateProcessedDataTable1726606152711,
AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644,
UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556,
];

View file

@ -65,6 +65,7 @@ import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-Cre
import { CreateProcessedDataTable1726606152711 } from '../common/1726606152711-CreateProcessedDataTable';
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
import { UpdateProcessedDataValueColumnToText1729607673464 } from '../common/1729607673464-UpdateProcessedDataValueColumnToText';
import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-CreateTestDefinitionTable';
const sqliteMigrations: Migration[] = [
InitialMigration1588102412422,
@ -132,6 +133,7 @@ const sqliteMigrations: Migration[] = [
CreateProcessedDataTable1726606152711,
AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644,
UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556,
];
export { sqliteMigrations };

View file

@ -513,7 +513,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
.execute();
}
async hardDeleteSoftDeletedExecutions() {
async findSoftDeletedExecutions() {
const date = new Date();
date.setHours(date.getHours() - this.globalConfig.pruning.hardDeleteBuffer);

View file

@ -108,6 +108,22 @@ describe('`parseRangeQuery` middleware', () => {
expect(nextFn).toBeCalledTimes(1);
});
test('should parse `projectId` field', () => {
const req = mock<ExecutionRequest.GetMany>({
query: {
filter: '{ "projectId": "123" }',
limit: undefined,
firstId: undefined,
lastId: undefined,
},
});
parseRangeQuery(req, res, nextFn);
expect(req.rangeQuery.projectId).toBe('123');
expect(nextFn).toBeCalledTimes(1);
});
test('should delete invalid fields', () => {
const req = mock<ExecutionRequest.GetMany>({
query: {

View file

@ -66,6 +66,7 @@ export const schemaGetExecutionsQueryFilter = {
startedBefore: { type: 'date-time' },
annotationTags: { type: 'array', items: { type: 'string' } },
vote: { type: 'string' },
projectId: { type: 'string' },
},
$defs: {
metadata: {

View file

@ -3,6 +3,7 @@ import { validate } from 'class-validator';
import type { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee';
import type { CredentialsEntity } from '@/databases/entities/credentials-entity';
import type { TagEntity } from '@/databases/entities/tag-entity';
import type { TestDefinition } from '@/databases/entities/test-definition';
import type { User } from '@/databases/entities/user';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
@ -12,6 +13,7 @@ import { BadRequestError } from './errors/response-errors/bad-request.error';
export async function validateEntity(
entity:
| WorkflowEntity
| TestDefinition
| CredentialsEntity
| TagEntity
| AnnotationTagEntity

View file

@ -1,3 +1,4 @@
import type { NeededNodeType } from '@n8n/task-runner';
import type { Dirent } from 'fs';
import { readdir } from 'fs/promises';
import { loadClassInIsolation } from 'n8n-core';
@ -149,4 +150,22 @@ export class NodeTypes implements INodeTypes {
dirent.name.toLowerCase().startsWith('v')
);
}
getNodeTypeDescriptions(nodeTypes: NeededNodeType[]): INodeTypeDescription[] {
return nodeTypes.map(({ name: nodeTypeName, version: nodeTypeVersion }) => {
const nodeType = this.getNode(nodeTypeName);
if (!nodeType) throw new ApplicationError(`Unknown node type: ${nodeTypeName}`);
const { description } = NodeHelpers.getVersionedNodeType(nodeType.type, nodeTypeVersion);
const descriptionCopy = { ...description };
descriptionCopy.name = descriptionCopy.name.startsWith('n8n-nodes')
? descriptionCopy.name
: `n8n-nodes-base.${descriptionCopy.name}`; // nodes-base nodes are unprefixed
return descriptionCopy;
});
}
}

View file

@ -1,7 +1,8 @@
import type { RunnerMessage, TaskResultData } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import type { INodeTypeBaseDescription } from 'n8n-workflow';
import { TaskRejectError } from '../errors';
import type { RunnerMessage, TaskResultData } from '../runner-types';
import { TaskBroker } from '../task-broker.service';
import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service';
@ -11,7 +12,7 @@ describe('TaskBroker', () => {
let taskBroker: TaskBroker;
beforeEach(() => {
taskBroker = new TaskBroker(mock(), mock());
taskBroker = new TaskBroker(mock());
jest.restoreAllMocks();
});
@ -76,13 +77,6 @@ describe('TaskBroker', () => {
const messageCallback = jest.fn();
taskBroker.registerRunner(runner, messageCallback);
expect(messageCallback).toBeCalledWith({
type: 'broker:nodetypes',
// We're mocking the node types service, so this will
// be undefined.
nodeType: undefined,
});
});
});
@ -381,7 +375,7 @@ describe('TaskBroker', () => {
const runnerId = 'runner1';
const taskId = 'task1';
const message: RunnerMessage.ToN8n.TaskAccepted = {
const message: RunnerMessage.ToBroker.TaskAccepted = {
type: 'runner:taskaccepted',
taskId,
};
@ -406,7 +400,7 @@ describe('TaskBroker', () => {
const taskId = 'task1';
const rejectionReason = 'Task execution failed';
const message: RunnerMessage.ToN8n.TaskRejected = {
const message: RunnerMessage.ToBroker.TaskRejected = {
type: 'runner:taskrejected',
taskId,
reason: rejectionReason,
@ -433,7 +427,7 @@ describe('TaskBroker', () => {
const requesterId = 'requester1';
const data = mock<TaskResultData>();
const message: RunnerMessage.ToN8n.TaskDone = {
const message: RunnerMessage.ToBroker.TaskDone = {
type: 'runner:taskdone',
taskId,
data,
@ -464,7 +458,7 @@ describe('TaskBroker', () => {
const requesterId = 'requester1';
const errorMessage = 'Task execution failed';
const message: RunnerMessage.ToN8n.TaskError = {
const message: RunnerMessage.ToBroker.TaskError = {
type: 'runner:taskerror',
taskId,
error: errorMessage,
@ -494,14 +488,14 @@ describe('TaskBroker', () => {
const taskId = 'task1';
const requesterId = 'requester1';
const requestId = 'request1';
const requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'] = {
const requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'] = {
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
};
const message: RunnerMessage.ToN8n.TaskDataRequest = {
const message: RunnerMessage.ToBroker.TaskDataRequest = {
type: 'runner:taskdatarequest',
taskId,
requestId,
@ -534,7 +528,7 @@ describe('TaskBroker', () => {
const rpcName = 'helpers.httpRequestWithAuthentication';
const rpcParams = ['param1', 'param2'];
const message: RunnerMessage.ToN8n.RPC = {
const message: RunnerMessage.ToBroker.RPC = {
type: 'runner:rpc',
taskId,
callId,
@ -560,5 +554,68 @@ describe('TaskBroker', () => {
params: rpcParams,
});
});
it('should handle `runner:nodetypesrequest` message', async () => {
const runnerId = 'runner1';
const taskId = 'task1';
const requesterId = 'requester1';
const requestId = 'request1';
const requestParams = [
{
name: 'n8n-nodes-base.someNode',
version: 1,
},
];
const message: RunnerMessage.ToBroker.NodeTypesRequest = {
type: 'runner:nodetypesrequest',
taskId,
requestId,
requestParams,
};
const requesterMessageCallback = jest.fn();
taskBroker.registerRunner(mock<TaskRunner>({ id: runnerId }), jest.fn());
taskBroker.setTasks({
[taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' },
});
taskBroker.registerRequester(requesterId, requesterMessageCallback);
await taskBroker.onRunnerMessage(runnerId, message);
expect(requesterMessageCallback).toHaveBeenCalledWith({
type: 'broker:nodetypesrequest',
taskId,
requestId,
requestParams,
});
});
});
describe('onRequesterMessage', () => {
it('should handle `requester:nodetypesresponse` message', async () => {
const runnerId = 'runner1';
const taskId = 'task1';
const requesterId = 'requester1';
const requestId = 'request1';
const nodeTypes = [mock<INodeTypeBaseDescription>(), mock<INodeTypeBaseDescription>()];
const runnerMessageCallback = jest.fn();
taskBroker.registerRunner(mock<TaskRunner>({ id: runnerId }), runnerMessageCallback);
taskBroker.setTasks({
[taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' },
});
await taskBroker.handleRequesterNodeTypesResponse(taskId, requestId, nodeTypes);
expect(runnerMessageCallback).toHaveBeenCalledWith({
type: 'broker:nodetypes',
taskId,
requestId,
nodeTypes,
});
});
});
});

View file

@ -1,22 +1,10 @@
import type { Response } from 'express';
import type { INodeExecutionData, INodeTypeBaseDescription } from 'n8n-workflow';
import type { INodeExecutionData } from 'n8n-workflow';
import type WebSocket from 'ws';
import type { TaskRunner } from './task-broker.service';
import type { AuthlessRequest } from '../requests';
/**
* Specifies what data should be included for a task data request.
*/
export interface TaskDataRequestParams {
dataOfNodes: string[] | 'all';
prevNode: boolean;
/** Whether input data for the node should be included */
input: boolean;
/** Whether env provider's state should be included */
env: boolean;
}
export interface DisconnectAnalyzer {
determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error>;
}
@ -34,230 +22,3 @@ export interface TaskRunnerServerInitRequest
}
export type TaskRunnerServerInitResponse = Response & { req: TaskRunnerServerInitRequest };
export namespace N8nMessage {
export namespace ToRunner {
export interface InfoRequest {
type: 'broker:inforequest';
}
export interface RunnerRegistered {
type: 'broker:runnerregistered';
}
export interface TaskOfferAccept {
type: 'broker:taskofferaccept';
taskId: string;
offerId: string;
}
export interface TaskCancel {
type: 'broker:taskcancel';
taskId: string;
reason: string;
}
export interface TaskSettings {
type: 'broker:tasksettings';
taskId: string;
settings: unknown;
}
export interface RPCResponse {
type: 'broker:rpcresponse';
callId: string;
taskId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskDataResponse {
type: 'broker:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface NodeTypes {
type: 'broker:nodetypes';
nodeTypes: INodeTypeBaseDescription[];
}
export type All =
| InfoRequest
| TaskOfferAccept
| TaskCancel
| TaskSettings
| RunnerRegistered
| RPCResponse
| TaskDataResponse
| NodeTypes;
}
export namespace ToRequester {
export interface TaskReady {
type: 'broker:taskready';
requestId: string;
taskId: string;
}
export interface TaskDone {
type: 'broker:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'broker:taskerror';
taskId: string;
error: unknown;
}
export interface TaskDataRequest {
type: 'broker:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface RPC {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC;
}
}
export namespace RequesterMessage {
export namespace ToN8n {
export interface TaskSettings {
type: 'requester:tasksettings';
taskId: string;
settings: unknown;
}
export interface TaskCancel {
type: 'requester:taskcancel';
taskId: string;
reason: string;
}
export interface TaskDataResponse {
type: 'requester:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
callId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskRequest {
type: 'requester:taskrequest';
requestId: string;
taskType: string;
}
export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest;
}
}
export namespace RunnerMessage {
export namespace ToN8n {
export interface Info {
type: 'runner:info';
name: string;
types: string[];
}
export interface TaskAccepted {
type: 'runner:taskaccepted';
taskId: string;
}
export interface TaskRejected {
type: 'runner:taskrejected';
taskId: string;
reason: string;
}
export interface TaskDone {
type: 'runner:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'runner:taskerror';
taskId: string;
error: unknown;
}
export interface TaskOffer {
type: 'runner:taskoffer';
offerId: string;
taskType: string;
validFor: number;
}
export interface TaskDataRequest {
type: 'runner:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface RPC {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All =
| Info
| TaskDone
| TaskError
| TaskAccepted
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest;
}
}
export const RPC_ALLOW_LIST = [
'logNodeOutput',
'helpers.httpRequestWithAuthentication',
'helpers.requestWithAuthenticationPaginated',
// "helpers.normalizeItems"
// "helpers.constructExecutionMetaData"
// "helpers.assertBinaryData"
'helpers.getBinaryDataBuffer',
// "helpers.copyInputItems"
// "helpers.returnJsonArray"
'helpers.getSSHClient',
'helpers.createReadStream',
// "helpers.getStoragePath"
'helpers.writeContentToFile',
'helpers.prepareBinaryData',
'helpers.setBinaryDataBuffer',
'helpers.copyBinaryFile',
'helpers.binaryToBuffer',
// "helpers.binaryToString"
// "helpers.getBinaryPath"
'helpers.getBinaryStream',
'helpers.getBinaryMetadata',
'helpers.createDeferredPromise',
'helpers.httpRequest',
] as const;

View file

@ -1,3 +1,4 @@
import type { BrokerMessage, RunnerMessage } from '@n8n/task-runner';
import { Service } from 'typedi';
import type WebSocket from 'ws';
@ -5,11 +6,9 @@ import { Logger } from '@/logging/logger.service';
import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer';
import type {
RunnerMessage,
N8nMessage,
DisconnectAnalyzer,
TaskRunnerServerInitRequest,
TaskRunnerServerInitResponse,
DisconnectAnalyzer,
} from './runner-types';
import { TaskBroker, type MessageCallback, type TaskRunner } from './task-broker.service';
@ -35,7 +34,7 @@ export class TaskRunnerWsServer {
return this.disconnectAnalyzer;
}
sendMessage(id: TaskRunner['id'], message: N8nMessage.ToRunner.All) {
sendMessage(id: TaskRunner['id'], message: BrokerMessage.ToRunner.All) {
this.runnerConnections.get(id)?.send(JSON.stringify(message));
}
@ -49,9 +48,9 @@ export class TaskRunnerWsServer {
try {
const buffer = Array.isArray(data) ? Buffer.concat(data) : Buffer.from(data);
const message: RunnerMessage.ToN8n.All = JSON.parse(
const message: RunnerMessage.ToBroker.All = JSON.parse(
buffer.toString('utf8'),
) as RunnerMessage.ToN8n.All;
) as RunnerMessage.ToBroker.All;
if (!isConnected && message.type !== 'runner:info') {
return;
@ -71,7 +70,7 @@ export class TaskRunnerWsServer {
this.sendMessage.bind(this, id) as MessageCallback,
);
this.logger.info(`Runner "${message.name}"(${id}) has been registered`);
this.logger.info(`Runner "${message.name}" (${id}) has been registered`);
return;
}
@ -94,7 +93,7 @@ export class TaskRunnerWsServer {
connection.on('message', onMessage);
connection.send(
JSON.stringify({ type: 'broker:inforequest' } as N8nMessage.ToRunner.InfoRequest),
JSON.stringify({ type: 'broker:inforequest' } as BrokerMessage.ToRunner.InfoRequest),
);
}

View file

@ -1,12 +1,16 @@
import type {
BrokerMessage,
RequesterMessage,
RunnerMessage,
TaskResultData,
} from '@n8n/task-runner';
import { ApplicationError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { Service } from 'typedi';
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
import { Logger } from '@/logging/logger.service';
import { TaskRejectError } from './errors';
import type { N8nMessage, RunnerMessage, RequesterMessage, TaskResultData } from './runner-types';
export interface TaskRunner {
id: string;
@ -38,13 +42,15 @@ export interface TaskRequest {
acceptInProgress?: boolean;
}
export type MessageCallback = (message: N8nMessage.ToRunner.All) => Promise<void> | void;
export type MessageCallback = (message: BrokerMessage.ToRunner.All) => Promise<void> | void;
export type RequesterMessageCallback = (
message: N8nMessage.ToRequester.All,
message: BrokerMessage.ToRequester.All,
) => Promise<void> | void;
type RunnerAcceptCallback = () => void;
type RequesterAcceptCallback = (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void;
type RequesterAcceptCallback = (
settings: RequesterMessage.ToBroker.TaskSettings['settings'],
) => void;
type TaskRejectCallback = (reason: TaskRejectError) => void;
@Service()
@ -72,19 +78,7 @@ export class TaskBroker {
private pendingTaskRequests: TaskRequest[] = [];
constructor(
private readonly logger: Logger,
private readonly loadNodesAndCredentials: LoadNodesAndCredentials,
) {
this.loadNodesAndCredentials.addPostProcessor(this.updateNodeTypes);
}
updateNodeTypes = async () => {
await this.messageAllRunners({
type: 'broker:nodetypes',
nodeTypes: this.loadNodesAndCredentials.types.nodes,
});
};
constructor(private readonly logger: Logger) {}
expireTasks() {
const now = process.hrtime.bigint();
@ -98,10 +92,6 @@ export class TaskBroker {
registerRunner(runner: TaskRunner, messageCallback: MessageCallback) {
this.knownRunners.set(runner.id, { runner, messageCallback });
void this.knownRunners.get(runner.id)!.messageCallback({ type: 'broker:runnerregistered' });
void this.knownRunners.get(runner.id)!.messageCallback({
type: 'broker:nodetypes',
nodeTypes: this.loadNodesAndCredentials.types.nodes,
});
}
deregisterRunner(runnerId: string, error: Error) {
@ -134,23 +124,15 @@ export class TaskBroker {
this.requesters.delete(requesterId);
}
private async messageRunner(runnerId: TaskRunner['id'], message: N8nMessage.ToRunner.All) {
private async messageRunner(runnerId: TaskRunner['id'], message: BrokerMessage.ToRunner.All) {
await this.knownRunners.get(runnerId)?.messageCallback(message);
}
private async messageAllRunners(message: N8nMessage.ToRunner.All) {
await Promise.allSettled(
[...this.knownRunners.values()].map(async (runner) => {
await runner.messageCallback(message);
}),
);
}
private async messageRequester(requesterId: string, message: N8nMessage.ToRequester.All) {
private async messageRequester(requesterId: string, message: BrokerMessage.ToRequester.All) {
await this.requesters.get(requesterId)?.(message);
}
async onRunnerMessage(runnerId: TaskRunner['id'], message: RunnerMessage.ToN8n.All) {
async onRunnerMessage(runnerId: TaskRunner['id'], message: RunnerMessage.ToBroker.All) {
const runner = this.knownRunners.get(runnerId);
if (!runner) {
return;
@ -180,7 +162,9 @@ export class TaskBroker {
case 'runner:taskdatarequest':
await this.handleDataRequest(message.taskId, message.requestId, message.requestParams);
break;
case 'runner:nodetypesrequest':
await this.handleNodeTypesRequest(message.taskId, message.requestId, message.requestParams);
break;
case 'runner:rpc':
await this.handleRpcRequest(message.taskId, message.callId, message.name, message.params);
break;
@ -193,7 +177,7 @@ export class TaskBroker {
async handleRpcRequest(
taskId: Task['id'],
callId: string,
name: RunnerMessage.ToN8n.RPC['name'],
name: RunnerMessage.ToBroker.RPC['name'],
params: unknown[],
) {
const task = this.tasks.get(taskId);
@ -227,8 +211,8 @@ export class TaskBroker {
async handleDataRequest(
taskId: Task['id'],
requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'],
requestParams: RunnerMessage.ToN8n.TaskDataRequest['requestParams'],
requestId: RunnerMessage.ToBroker.TaskDataRequest['requestId'],
requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'],
) {
const task = this.tasks.get(taskId);
if (!task) {
@ -242,9 +226,26 @@ export class TaskBroker {
});
}
async handleNodeTypesRequest(
taskId: Task['id'],
requestId: RunnerMessage.ToBroker.NodeTypesRequest['requestId'],
requestParams: RunnerMessage.ToBroker.NodeTypesRequest['requestParams'],
) {
const task = this.tasks.get(taskId);
if (!task) {
return;
}
await this.messageRequester(task.requesterId, {
type: 'broker:nodetypesrequest',
taskId,
requestId,
requestParams,
});
}
async handleResponse(
taskId: Task['id'],
requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'],
requestId: RunnerMessage.ToBroker.TaskDataRequest['requestId'],
data: unknown,
) {
const task = this.tasks.get(taskId);
@ -259,7 +260,7 @@ export class TaskBroker {
});
}
async onRequesterMessage(requesterId: string, message: RequesterMessage.ToN8n.All) {
async onRequesterMessage(requesterId: string, message: RequesterMessage.ToBroker.All) {
switch (message.type) {
case 'requester:tasksettings':
this.handleRequesterAccept(message.taskId, message.settings);
@ -277,6 +278,13 @@ export class TaskBroker {
case 'requester:taskdataresponse':
await this.handleRequesterDataResponse(message.taskId, message.requestId, message.data);
break;
case 'requester:nodetypesresponse':
await this.handleRequesterNodeTypesResponse(
message.taskId,
message.requestId,
message.nodeTypes,
);
break;
case 'requester:rpcresponse':
await this.handleRequesterRpcResponse(
message.taskId,
@ -291,7 +299,7 @@ export class TaskBroker {
async handleRequesterRpcResponse(
taskId: string,
callId: string,
status: RequesterMessage.ToN8n.RPCResponse['status'],
status: RequesterMessage.ToBroker.RPCResponse['status'],
data: unknown,
) {
const runner = await this.getRunnerOrFailTask(taskId);
@ -315,9 +323,24 @@ export class TaskBroker {
});
}
async handleRequesterNodeTypesResponse(
taskId: Task['id'],
requestId: RequesterMessage.ToBroker.NodeTypesResponse['requestId'],
nodeTypes: RequesterMessage.ToBroker.NodeTypesResponse['nodeTypes'],
) {
const runner = await this.getRunnerOrFailTask(taskId);
await this.messageRunner(runner.id, {
type: 'broker:nodetypes',
taskId,
requestId,
nodeTypes,
});
}
handleRequesterAccept(
taskId: Task['id'],
settings: RequesterMessage.ToN8n.TaskSettings['settings'],
settings: RequesterMessage.ToBroker.TaskSettings['settings'],
) {
const acceptReject = this.requesterAcceptRejects.get(taskId);
if (acceptReject) {
@ -467,10 +490,12 @@ export class TaskBroker {
this.pendingTaskRequests.splice(requestIndex, 1);
try {
const acceptPromise = new Promise<RequesterMessage.ToN8n.TaskSettings['settings']>(
const acceptPromise = new Promise<RequesterMessage.ToBroker.TaskSettings['settings']>(
(resolve, reject) => {
this.requesterAcceptRejects.set(taskId, {
accept: resolve as (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void,
accept: resolve as (
settings: RequesterMessage.ToBroker.TaskSettings['settings'],
) => void,
reject,
});

View file

@ -1,42 +1,10 @@
import type { PartialAdditionalData, TaskData } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import type { IExecuteFunctions, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import { type INode, type INodeExecutionData, type Workflow } from 'n8n-workflow';
import type { Workflow } from 'n8n-workflow';
import { DataRequestResponseBuilder } from '../data-request-response-builder';
import type { TaskData } from '../task-manager';
const triggerNode: INode = mock<INode>({
name: 'Trigger',
});
const debugHelperNode: INode = mock<INode>({
name: 'DebugHelper',
});
const codeNode: INode = mock<INode>({
name: 'Code',
});
const workflow: TaskData['workflow'] = mock<Workflow>();
const debugHelperNodeOutItems: INodeExecutionData[] = [
{
json: {
uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032',
email: 'Dan.Schmidt31@yahoo.com',
firstname: 'Toni',
lastname: 'Schuster',
password: 'Q!D6C2',
},
pairedItem: {
item: 0,
},
},
];
const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems;
const connectionInputData: TaskData['connectionInputData'] = codeNodeInputItems;
const envProviderState: TaskData['envProviderState'] = mock<TaskData['envProviderState']>({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
const additionalData = mock<IWorkflowExecuteAdditionalData>({
const additionalData = mock<PartialAdditionalData>({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
@ -50,165 +18,34 @@ const additionalData = mock<IWorkflowExecuteAdditionalData>({
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
});
const executeFunctions = mock<IExecuteFunctions>();
/**
* Drawn with https://asciiflow.com/#/
* Task data for an execution of the following WF:
* where denotes the currently being executing node.
*
*
* Trigger DebugHelper Code
*
*/
const taskData: TaskData = {
executeFunctions,
workflow,
connectionInputData,
inputData: {
main: [codeNodeInputItems],
},
itemIndex: 0,
activeNodeName: codeNode.name,
contextNodeName: codeNode.name,
defaultReturnRunIndex: -1,
mode: 'manual',
envProviderState,
node: codeNode,
runExecutionData: {
startData: {
destinationNode: codeNode.name,
runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name],
},
resultData: {
runData: {
[triggerNode.name]: [
{
hints: [],
startTime: 1730313407328,
executionTime: 1,
source: [],
executionStatus: 'success',
data: {
main: [[]],
},
},
],
[debugHelperNode.name]: [
{
hints: [],
startTime: 1730313407330,
executionTime: 1,
source: [
{
previousNode: triggerNode.name,
},
],
executionStatus: 'success',
data: {
main: [debugHelperNodeOutItems],
},
},
],
},
const workflow: TaskData['workflow'] = mock<Workflow>({
id: '1',
name: 'Test Workflow',
active: true,
connectionsBySourceNode: {},
nodes: {},
pinData: {},
},
executionData: {
contextData: {},
nodeExecutionStack: [],
metadata: {},
waitingExecution: {
[codeNode.name]: {
'0': {
main: [codeNodeInputItems],
},
},
},
waitingExecutionSource: {
[codeNode.name]: {
'0': {
main: [
{
previousNode: debugHelperNode.name,
},
],
},
},
},
},
},
runIndex: 0,
selfData: {},
siblingParameters: {},
executeData: {
node: codeNode,
data: {
main: [codeNodeInputItems],
},
source: {
main: [
{
previousNode: debugHelperNode.name,
previousNodeOutput: 0,
},
],
},
},
settings: {},
staticData: {},
});
const taskData = mock<TaskData>({
additionalData,
} as const;
workflow,
});
describe('DataRequestResponseBuilder', () => {
const allDataParam: DataRequestResponseBuilder['requestParams'] = {
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
};
const builder = new DataRequestResponseBuilder();
const newRequestParam = (opts: Partial<DataRequestResponseBuilder['requestParams']>) => ({
...allDataParam,
...opts,
});
describe('all data', () => {
it('should build the runExecutionData as is when everything is requested', () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam);
const { runExecutionData } = dataRequestResponseBuilder.build();
expect(runExecutionData).toStrictEqual(taskData.runExecutionData);
});
});
describe('envProviderState', () => {
it("should filter out envProviderState when it's not requested", () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(
taskData,
newRequestParam({
env: false,
}),
);
const result = dataRequestResponseBuilder.build();
expect(result.envProviderState).toStrictEqual({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
});
});
describe('additionalData', () => {
it('picks only specific properties for additional data', () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam);
const result = dataRequestResponseBuilder.build();
const result = builder.buildFromTaskData(taskData);
expect(result.additionalData).toStrictEqual({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
variables: additionalData.variables,
webhookBaseUrl: 'http://localhost:5678/webhook',
webhookTestBaseUrl: 'http://localhost:5678/webhook-test',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
@ -217,108 +54,21 @@ describe('DataRequestResponseBuilder', () => {
currentNodeParameters: undefined,
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
variables: additionalData.variables,
});
});
});
describe('input data', () => {
const allExceptInputParam = newRequestParam({
input: false,
});
it('drops input data from executeData', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.executeData).toStrictEqual({
node: taskData.executeData!.node,
source: taskData.executeData!.source,
data: {},
});
});
it('drops input data from result', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
it('picks only specific properties for workflow', () => {
const result = builder.buildFromTaskData(taskData);
expect(result.inputData).toStrictEqual({});
});
it('drops input data from result', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.inputData).toStrictEqual({});
});
it('drops input data from connectionInputData', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.connectionInputData).toStrictEqual([]);
});
});
describe('nodes', () => {
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: ['Code'], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return only DebugHelper's data when only DebugHelper node is requested", () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return DebugHelper's data when only prevNode node is requested", () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [], prevNode: true }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
expect(result.workflow).toStrictEqual({
id: '1',
name: 'Test Workflow',
active: true,
connections: workflow.connectionsBySourceNode,
nodes: [],
pinData: workflow.pinData,
settings: workflow.settings,
staticData: workflow.staticData,
});
});
});

View file

@ -0,0 +1,300 @@
import type { DataRequestResponse, TaskDataRequestParams } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import type { IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import { type INode, type INodeExecutionData } from 'n8n-workflow';
import { DataRequestResponseStripper } from '../data-request-response-stripper';
const triggerNode: INode = mock<INode>({
name: 'Trigger',
});
const debugHelperNode: INode = mock<INode>({
name: 'DebugHelper',
});
const codeNode: INode = mock<INode>({
name: 'Code',
});
const workflow: DataRequestResponse['workflow'] = mock<DataRequestResponse['workflow']>();
const debugHelperNodeOutItems: INodeExecutionData[] = [
{
json: {
uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032',
email: 'Dan.Schmidt31@yahoo.com',
firstname: 'Toni',
lastname: 'Schuster',
password: 'Q!D6C2',
},
pairedItem: {
item: 0,
},
},
];
const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems;
const envProviderState: DataRequestResponse['envProviderState'] = mock<
DataRequestResponse['envProviderState']
>({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
const additionalData = mock<IWorkflowExecuteAdditionalData>({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
variables: {},
webhookBaseUrl: 'http://localhost:5678/webhook',
webhookTestBaseUrl: 'http://localhost:5678/webhook-test',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
executionId: '45844',
userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5',
currentNodeParameters: undefined,
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
});
/**
* Drawn with https://asciiflow.com/#/
* Task data for an execution of the following WF:
* where denotes the currently being executing node.
*
*
* Trigger DebugHelper Code
*
*/
const taskData: DataRequestResponse = {
workflow,
inputData: {
main: [codeNodeInputItems],
},
itemIndex: 0,
activeNodeName: codeNode.name,
contextNodeName: codeNode.name,
defaultReturnRunIndex: -1,
mode: 'manual',
envProviderState,
node: codeNode,
runExecutionData: {
startData: {
destinationNode: codeNode.name,
runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name],
},
resultData: {
runData: {
[triggerNode.name]: [
{
hints: [],
startTime: 1730313407328,
executionTime: 1,
source: [],
executionStatus: 'success',
data: {
main: [[]],
},
},
],
[debugHelperNode.name]: [
{
hints: [],
startTime: 1730313407330,
executionTime: 1,
source: [
{
previousNode: triggerNode.name,
},
],
executionStatus: 'success',
data: {
main: [debugHelperNodeOutItems],
},
},
],
},
pinData: {},
},
executionData: {
contextData: {},
nodeExecutionStack: [],
metadata: {},
waitingExecution: {
[codeNode.name]: {
'0': {
main: [codeNodeInputItems],
},
},
},
waitingExecutionSource: {
[codeNode.name]: {
'0': {
main: [
{
previousNode: debugHelperNode.name,
},
],
},
},
},
},
},
runIndex: 0,
selfData: {},
siblingParameters: {},
connectionInputSource: {
main: [
{
previousNode: debugHelperNode.name,
previousNodeOutput: 0,
},
],
},
additionalData,
} as const;
describe('DataRequestResponseStripper', () => {
const allDataParam: TaskDataRequestParams = {
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
};
const newRequestParam = (opts: Partial<TaskDataRequestParams>) => ({
...allDataParam,
...opts,
});
describe('all data', () => {
it('should build the runExecutionData as is when everything is requested', () => {
const dataRequestResponseBuilder = new DataRequestResponseStripper(taskData, allDataParam);
const { runExecutionData } = dataRequestResponseBuilder.strip();
expect(runExecutionData).toStrictEqual(taskData.runExecutionData);
});
});
describe('envProviderState', () => {
it("should filter out envProviderState when it's not requested", () => {
const dataRequestResponseBuilder = new DataRequestResponseStripper(
taskData,
newRequestParam({
env: false,
}),
);
const result = dataRequestResponseBuilder.strip();
expect(result.envProviderState).toStrictEqual({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
});
});
describe('input data', () => {
const allExceptInputParam = newRequestParam({
input: false,
});
it('drops input data from result', () => {
const result = new DataRequestResponseStripper(taskData, allExceptInputParam).strip();
expect(result.inputData).toStrictEqual({});
});
it('drops input data from result', () => {
const result = new DataRequestResponseStripper(taskData, allExceptInputParam).strip();
expect(result.inputData).toStrictEqual({});
});
});
describe('nodes', () => {
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: ['Code'], prevNode: false }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return only DebugHelper's data when only DebugHelper node is requested", () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return DebugHelper's data when only prevNode node is requested", () => {
const result = new DataRequestResponseStripper(
taskData,
newRequestParam({ dataOfNodes: [], prevNode: true }),
).strip();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
});
describe('passthrough properties', () => {
test.each<Array<keyof DataRequestResponse>>([
['workflow'],
['connectionInputSource'],
['node'],
['runIndex'],
['itemIndex'],
['activeNodeName'],
['siblingParameters'],
['mode'],
['defaultReturnRunIndex'],
['selfData'],
['contextNodeName'],
['additionalData'],
])("it doesn't change %s", (propertyName) => {
const dataRequestResponseBuilder = new DataRequestResponseStripper(taskData, allDataParam);
const result = dataRequestResponseBuilder.strip();
expect(result[propertyName]).toBe(taskData[propertyName]);
});
});
});

View file

@ -1,60 +1,30 @@
import type {
EnvProviderState,
IExecuteData,
INodeExecutionData,
IPinData,
IRunData,
IRunExecutionData,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowParameters,
} from 'n8n-workflow';
import type { DataRequestResponse, PartialAdditionalData, TaskData } from './task-manager';
import type { N8nMessage } from '../runner-types';
import type { DataRequestResponse, PartialAdditionalData, TaskData } from '@n8n/task-runner';
import type { IWorkflowExecuteAdditionalData, Workflow, WorkflowParameters } from 'n8n-workflow';
/**
* Builds the response to a data request coming from a Task Runner. Tries to minimize
* the amount of data that is sent to the runner by only providing what is requested.
* Transforms TaskData to DataRequestResponse. The main purpose of the
* transformation is to make sure there is no duplication in the data
* (e.g. connectionInputData and executeData.data can be derived from
* inputData).
*/
export class DataRequestResponseBuilder {
private requestedNodeNames = new Set<string>();
constructor(
private readonly taskData: TaskData,
private readonly requestParams: N8nMessage.ToRequester.TaskDataRequest['requestParams'],
) {
this.requestedNodeNames = new Set(requestParams.dataOfNodes);
if (this.requestParams.prevNode && this.requestParams.dataOfNodes !== 'all') {
this.requestedNodeNames.add(this.determinePrevNodeName());
}
}
/**
* Builds a response to the data request
*/
build(): DataRequestResponse {
const { taskData: td } = this;
buildFromTaskData(taskData: TaskData): DataRequestResponse {
return {
workflow: this.buildWorkflow(td.workflow),
connectionInputData: this.buildConnectionInputData(td.connectionInputData),
inputData: this.buildInputData(td.inputData),
itemIndex: td.itemIndex,
activeNodeName: td.activeNodeName,
contextNodeName: td.contextNodeName,
defaultReturnRunIndex: td.defaultReturnRunIndex,
mode: td.mode,
envProviderState: this.buildEnvProviderState(td.envProviderState),
node: td.node, // The current node being executed
runExecutionData: this.buildRunExecutionData(td.runExecutionData),
runIndex: td.runIndex,
selfData: td.selfData,
siblingParameters: td.siblingParameters,
executeData: this.buildExecuteData(td.executeData),
additionalData: this.buildAdditionalData(td.additionalData),
workflow: this.buildWorkflow(taskData.workflow),
inputData: taskData.inputData,
connectionInputSource: taskData.executeData?.source ?? null,
itemIndex: taskData.itemIndex,
activeNodeName: taskData.activeNodeName,
contextNodeName: taskData.contextNodeName,
defaultReturnRunIndex: taskData.defaultReturnRunIndex,
mode: taskData.mode,
envProviderState: taskData.envProviderState,
node: taskData.node,
runExecutionData: taskData.runExecutionData,
runIndex: taskData.runIndex,
selfData: taskData.selfData,
siblingParameters: taskData.siblingParameters,
additionalData: this.buildAdditionalData(taskData.additionalData),
};
}
@ -77,86 +47,6 @@ export class DataRequestResponseBuilder {
};
}
private buildExecuteData(executeData: IExecuteData | undefined): IExecuteData | undefined {
if (executeData === undefined) {
return undefined;
}
return {
node: executeData.node, // The current node being executed
data: this.requestParams.input ? executeData.data : {},
source: executeData.source,
};
}
private buildRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData {
if (this.requestParams.dataOfNodes === 'all') {
return runExecutionData;
}
return {
startData: runExecutionData.startData,
resultData: {
error: runExecutionData.resultData.error,
lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted,
metadata: runExecutionData.resultData.metadata,
runData: this.buildRunData(runExecutionData.resultData.runData),
pinData: this.buildPinData(runExecutionData.resultData.pinData),
},
executionData: runExecutionData.executionData
? {
// TODO: Figure out what these two are and can they be filtered
contextData: runExecutionData.executionData?.contextData,
nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack,
metadata: runExecutionData.executionData.metadata,
waitingExecution: runExecutionData.executionData.waitingExecution,
waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource,
}
: undefined,
};
}
private buildRunData(runData: IRunData): IRunData {
return this.filterObjectByNodeNames(runData);
}
private buildPinData(pinData: IPinData | undefined): IPinData | undefined {
return pinData ? this.filterObjectByNodeNames(pinData) : undefined;
}
private buildEnvProviderState(envProviderState: EnvProviderState): EnvProviderState {
if (this.requestParams.env) {
// In case `isEnvAccessBlocked` = true, the provider state has already sanitized
// the environment variables and we can return it as is.
return envProviderState;
}
return {
env: {},
isEnvAccessBlocked: envProviderState.isEnvAccessBlocked,
isProcessAvailable: envProviderState.isProcessAvailable,
};
}
private buildInputData(inputData: ITaskDataConnections): ITaskDataConnections {
if (this.requestParams.input) {
return inputData;
}
return {};
}
private buildConnectionInputData(
connectionInputData: INodeExecutionData[],
): INodeExecutionData[] {
if (this.requestParams.input) {
return connectionInputData;
}
return [];
}
private buildWorkflow(workflow: Workflow): Omit<WorkflowParameters, 'nodeTypes'> {
return {
id: workflow.id,
@ -169,37 +59,4 @@ export class DataRequestResponseBuilder {
staticData: workflow.staticData,
};
}
/**
* Assuming the given `obj` is an object where the keys are node names,
* filters the object to only include the node names that are requested.
*/
private filterObjectByNodeNames<T extends Record<string, unknown>>(obj: T): T {
if (this.requestParams.dataOfNodes === 'all') {
return obj;
}
const filteredObj: T = {} as T;
for (const nodeName in obj) {
if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) {
continue;
}
if (this.requestedNodeNames.has(nodeName)) {
filteredObj[nodeName] = obj[nodeName];
}
}
return filteredObj;
}
private determinePrevNodeName(): string {
const sourceData = this.taskData.executeData?.source?.main?.[0];
if (!sourceData) {
return '';
}
return sourceData.previousNode;
}
}

View file

@ -0,0 +1,131 @@
import type { DataRequestResponse, BrokerMessage } from '@n8n/task-runner';
import type {
EnvProviderState,
IPinData,
IRunData,
IRunExecutionData,
ITaskDataConnections,
} from 'n8n-workflow';
/**
* Strips data from data request response based on the specified parameters
*/
export class DataRequestResponseStripper {
private requestedNodeNames = new Set<string>();
constructor(
private readonly dataResponse: DataRequestResponse,
private readonly stripParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'],
) {
this.requestedNodeNames = new Set(stripParams.dataOfNodes);
if (this.stripParams.prevNode && this.stripParams.dataOfNodes !== 'all') {
this.requestedNodeNames.add(this.determinePrevNodeName());
}
}
/**
* Builds a response to the data request
*/
strip(): DataRequestResponse {
const { dataResponse: dr } = this;
return {
...dr,
inputData: this.stripInputData(dr.inputData),
envProviderState: this.stripEnvProviderState(dr.envProviderState),
runExecutionData: this.stripRunExecutionData(dr.runExecutionData),
};
}
private stripRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData {
if (this.stripParams.dataOfNodes === 'all') {
return runExecutionData;
}
return {
startData: runExecutionData.startData,
resultData: {
error: runExecutionData.resultData.error,
lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted,
metadata: runExecutionData.resultData.metadata,
runData: this.stripRunData(runExecutionData.resultData.runData),
pinData: this.stripPinData(runExecutionData.resultData.pinData),
},
executionData: runExecutionData.executionData
? {
// TODO: Figure out what these two are and can they be stripped
contextData: runExecutionData.executionData?.contextData,
nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack,
metadata: runExecutionData.executionData.metadata,
waitingExecution: runExecutionData.executionData.waitingExecution,
waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource,
}
: undefined,
};
}
private stripRunData(runData: IRunData): IRunData {
return this.filterObjectByNodeNames(runData);
}
private stripPinData(pinData: IPinData | undefined): IPinData | undefined {
return pinData ? this.filterObjectByNodeNames(pinData) : undefined;
}
private stripEnvProviderState(envProviderState: EnvProviderState): EnvProviderState {
if (this.stripParams.env) {
// In case `isEnvAccessBlocked` = true, the provider state has already sanitized
// the environment variables and we can return it as is.
return envProviderState;
}
return {
env: {},
isEnvAccessBlocked: envProviderState.isEnvAccessBlocked,
isProcessAvailable: envProviderState.isProcessAvailable,
};
}
private stripInputData(inputData: ITaskDataConnections): ITaskDataConnections {
if (this.stripParams.input) {
return inputData;
}
return {};
}
/**
* Assuming the given `obj` is an object where the keys are node names,
* filters the object to only include the node names that are requested.
*/
private filterObjectByNodeNames<T extends Record<string, unknown>>(obj: T): T {
if (this.stripParams.dataOfNodes === 'all') {
return obj;
}
const filteredObj: T = {} as T;
for (const nodeName in obj) {
if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) {
continue;
}
if (this.requestedNodeNames.has(nodeName)) {
filteredObj[nodeName] = obj[nodeName];
}
}
return filteredObj;
}
private determinePrevNodeName(): string {
const sourceData = this.dataResponse.connectionInputSource?.main?.[0];
if (!sourceData) {
return '';
}
return sourceData.previousNode;
}
}

View file

@ -1,17 +1,20 @@
import Container from 'typedi';
import type { RequesterMessage } from '@n8n/task-runner';
import Container, { Service } from 'typedi';
import { NodeTypes } from '@/node-types';
import { TaskManager } from './task-manager';
import type { RequesterMessage } from '../runner-types';
import type { RequesterMessageCallback } from '../task-broker.service';
import { TaskBroker } from '../task-broker.service';
@Service()
export class LocalTaskManager extends TaskManager {
taskBroker: TaskBroker;
id: string = 'single-main';
constructor() {
super();
constructor(nodeTypes: NodeTypes) {
super(nodeTypes);
this.registerRequester();
}
@ -24,7 +27,7 @@ export class LocalTaskManager extends TaskManager {
);
}
sendMessage(message: RequesterMessage.ToN8n.All) {
sendMessage(message: RequesterMessage.ToBroker.All) {
void this.taskBroker.onRequesterMessage(this.id, message);
}
}

View file

@ -1,30 +1,30 @@
import {
type EnvProviderState,
type IExecuteFunctions,
type Workflow,
type IRunExecutionData,
type INodeExecutionData,
type ITaskDataConnections,
type INode,
type WorkflowParameters,
type INodeParameters,
type WorkflowExecuteMode,
type IExecuteData,
type IDataObject,
type IWorkflowExecuteAdditionalData,
type Result,
createResultOk,
createResultError,
import { TaskRunnersConfig } from '@n8n/config';
import type { TaskResultData, RequesterMessage, BrokerMessage, TaskData } from '@n8n/task-runner';
import { DataRequestResponseReconstruct, RPC_ALLOW_LIST } from '@n8n/task-runner';
import type {
EnvProviderState,
IExecuteFunctions,
Workflow,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
INode,
INodeParameters,
WorkflowExecuteMode,
IExecuteData,
IDataObject,
IWorkflowExecuteAdditionalData,
Result,
} from 'n8n-workflow';
import { createResultOk, createResultError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import * as a from 'node:assert/strict';
import Container, { Service } from 'typedi';
import { NodeTypes } from '@/node-types';
import { DataRequestResponseBuilder } from './data-request-response-builder';
import {
RPC_ALLOW_LIST,
type TaskResultData,
type N8nMessage,
type RequesterMessage,
} from '../runner-types';
import { DataRequestResponseStripper } from './data-request-response-stripper';
export type RequestAccept = (jobId: string) => void;
export type RequestReject = (reason: string) => void;
@ -32,62 +32,6 @@ export type RequestReject = (reason: string) => void;
export type TaskAccept = (data: TaskResultData) => void;
export type TaskReject = (error: unknown) => void;
export interface TaskData {
executeFunctions: IExecuteFunctions;
inputData: ITaskDataConnections;
node: INode;
workflow: Workflow;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: IWorkflowExecuteAdditionalData;
}
export interface PartialAdditionalData {
executionId?: string;
restartExecutionId?: string;
restApiUrl: string;
instanceBaseUrl: string;
formWaitingBaseUrl: string;
webhookBaseUrl: string;
webhookWaitingBaseUrl: string;
webhookTestBaseUrl: string;
currentNodeParameters?: INodeParameters;
executionTimeoutTimestamp?: number;
userId?: string;
variables: IDataObject;
}
export interface DataRequestResponse {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: PartialAdditionalData;
}
export interface TaskRequest {
requestId: string;
taskType: string;
@ -105,7 +49,8 @@ interface ExecuteFunctionObject {
[name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject;
}
export class TaskManager {
@Service()
export abstract class TaskManager {
requestAcceptRejects: Map<string, { accept: RequestAccept; reject: RequestReject }> = new Map();
taskAcceptRejects: Map<string, { accept: TaskAccept; reject: TaskReject }> = new Map();
@ -114,6 +59,12 @@ export class TaskManager {
tasks: Map<string, Task> = new Map();
private readonly runnerConfig = Container.get(TaskRunnersConfig);
private readonly dataResponseBuilder = new DataRequestResponseBuilder();
constructor(private readonly nodeTypes: NodeTypes) {}
async startTask<TData, TError>(
additionalData: IWorkflowExecuteAdditionalData,
taskType: string,
@ -219,9 +170,9 @@ export class TaskManager {
}
}
sendMessage(_message: RequesterMessage.ToN8n.All) {}
sendMessage(_message: RequesterMessage.ToBroker.All) {}
onMessage(message: N8nMessage.ToRequester.All) {
onMessage(message: BrokerMessage.ToRequester.All) {
switch (message.type) {
case 'broker:taskready':
this.taskReady(message.requestId, message.taskId);
@ -235,6 +186,9 @@ export class TaskManager {
case 'broker:taskdatarequest':
this.sendTaskData(message.taskId, message.requestId, message.requestParams);
break;
case 'broker:nodetypesrequest':
this.sendNodeTypes(message.taskId, message.requestId, message.requestParams);
break;
case 'broker:rpc':
void this.handleRpc(message.taskId, message.callId, message.name, message.params);
break;
@ -282,7 +236,7 @@ export class TaskManager {
sendTaskData(
taskId: string,
requestId: string,
requestParams: N8nMessage.ToRequester.TaskDataRequest['requestParams'],
requestParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'],
) {
const job = this.tasks.get(taskId);
if (!job) {
@ -290,21 +244,52 @@ export class TaskManager {
return;
}
const dataRequestResponseBuilder = new DataRequestResponseBuilder(job.data, requestParams);
const requestedData = dataRequestResponseBuilder.build();
const dataRequestResponse = this.dataResponseBuilder.buildFromTaskData(job.data);
if (this.runnerConfig.assertDeduplicationOutput) {
const reconstruct = new DataRequestResponseReconstruct();
a.deepStrictEqual(
reconstruct.reconstructConnectionInputData(dataRequestResponse.inputData),
job.data.connectionInputData,
);
a.deepStrictEqual(
reconstruct.reconstructExecuteData(dataRequestResponse),
job.data.executeData,
);
}
const strippedData = new DataRequestResponseStripper(
dataRequestResponse,
requestParams,
).strip();
this.sendMessage({
type: 'requester:taskdataresponse',
taskId,
requestId,
data: requestedData,
data: strippedData,
});
}
sendNodeTypes(
taskId: string,
requestId: string,
neededNodeTypes: BrokerMessage.ToRequester.NodeTypesRequest['requestParams'],
) {
const nodeTypes = this.nodeTypes.getNodeTypeDescriptions(neededNodeTypes);
this.sendMessage({
type: 'requester:nodetypesresponse',
taskId,
requestId,
nodeTypes,
});
}
async handleRpc(
taskId: string,
callId: string,
name: N8nMessage.ToRequester.RPC['name'],
name: BrokerMessage.ToRequester.RPC['name'],
params: unknown[],
) {
const job = this.tasks.get(taskId);

View file

@ -54,7 +54,7 @@ export class TaskRunnerModule {
private async loadTaskManager() {
const { TaskManager } = await import('@/runners/task-managers/task-manager');
const { LocalTaskManager } = await import('@/runners/task-managers/local-task-manager');
this.taskManager = new LocalTaskManager();
this.taskManager = Container.get(LocalTaskManager);
Container.set(TaskManager, this.taskManager);
}

View file

@ -92,7 +92,7 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
}
startNode(grantToken: string, n8nUri: string) {
const startScript = require.resolve('@n8n/task-runner');
const startScript = require.resolve('@n8n/task-runner/start');
return spawn('node', [startScript], {
env: this.getProcessEnvVars(grantToken, n8nUri),

View file

@ -20,7 +20,7 @@ export class OrchestrationService {
private subscriber: Subscriber;
protected isInitialized = false;
isInitialized = false;
private isMultiMainSetupLicensed = false;

View file

@ -1,183 +0,0 @@
import { GlobalConfig } from '@n8n/config';
import { BinaryDataService, InstanceSettings } from 'n8n-core';
import { jsonStringify } from 'n8n-workflow';
import { Service } from 'typedi';
import { inTest, TIME } from '@/constants';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { OnShutdown } from '@/decorators/on-shutdown';
import { Logger } from '@/logging/logger.service';
import { OrchestrationService } from './orchestration.service';
@Service()
export class PruningService {
private hardDeletionBatchSize = 100;
private rates: Record<string, number> = {
softDeletion: this.globalConfig.pruning.softDeleteInterval * TIME.MINUTE,
hardDeletion: this.globalConfig.pruning.hardDeleteInterval * TIME.MINUTE,
};
public softDeletionInterval: NodeJS.Timer | undefined;
public hardDeletionTimeout: NodeJS.Timeout | undefined;
private isShuttingDown = false;
constructor(
private readonly logger: Logger,
private readonly instanceSettings: InstanceSettings,
private readonly executionRepository: ExecutionRepository,
private readonly binaryDataService: BinaryDataService,
private readonly orchestrationService: OrchestrationService,
private readonly globalConfig: GlobalConfig,
) {}
/**
* @important Requires `OrchestrationService` to be initialized.
*/
init() {
const { isLeader } = this.instanceSettings;
const { isMultiMainSetupEnabled } = this.orchestrationService;
if (isLeader) this.startPruning();
if (isMultiMainSetupEnabled) {
this.orchestrationService.multiMainSetup.on('leader-takeover', () => this.startPruning());
this.orchestrationService.multiMainSetup.on('leader-stepdown', () => this.stopPruning());
}
}
private isPruningEnabled() {
const { instanceType, isFollower } = this.instanceSettings;
if (!this.globalConfig.pruning.isEnabled || inTest || instanceType !== 'main') {
return false;
}
if (this.globalConfig.multiMainSetup.enabled && instanceType === 'main' && isFollower) {
return false;
}
return true;
}
/**
* @important Call this method only after DB migrations have completed.
*/
startPruning() {
if (!this.isPruningEnabled()) return;
if (this.isShuttingDown) {
this.logger.warn('[Pruning] Cannot start pruning while shutting down');
return;
}
this.logger.debug('[Pruning] Starting soft-deletion and hard-deletion timers');
this.setSoftDeletionInterval();
this.scheduleHardDeletion();
}
stopPruning() {
if (!this.isPruningEnabled()) return;
this.logger.debug('[Pruning] Removing soft-deletion and hard-deletion timers');
clearInterval(this.softDeletionInterval);
clearTimeout(this.hardDeletionTimeout);
}
private setSoftDeletionInterval(rateMs = this.rates.softDeletion) {
const when = [rateMs / TIME.MINUTE, 'min'].join(' ');
this.softDeletionInterval = setInterval(
async () => await this.softDeleteOnPruningCycle(),
this.rates.softDeletion,
);
this.logger.debug(`[Pruning] Soft-deletion scheduled every ${when}`);
}
private scheduleHardDeletion(rateMs = this.rates.hardDeletion) {
const when = [rateMs / TIME.MINUTE, 'min'].join(' ');
this.hardDeletionTimeout = setTimeout(() => {
this.hardDeleteOnPruningCycle()
.then((rate) => this.scheduleHardDeletion(rate))
.catch((error) => {
this.scheduleHardDeletion(1 * TIME.SECOND);
const errorMessage =
error instanceof Error
? error.message
: jsonStringify(error, { replaceCircularRefs: true });
this.logger.error('[Pruning] Failed to hard-delete executions', { errorMessage });
});
}, rateMs);
this.logger.debug(`[Pruning] Hard-deletion scheduled for next ${when}`);
}
/**
* Mark executions as deleted based on age and count, in a pruning cycle.
*/
async softDeleteOnPruningCycle() {
this.logger.debug('[Pruning] Starting soft-deletion of executions');
const result = await this.executionRepository.softDeletePrunableExecutions();
if (result.affected === 0) {
this.logger.debug('[Pruning] Found no executions to soft-delete');
return;
}
this.logger.debug('[Pruning] Soft-deleted executions', { count: result.affected });
}
@OnShutdown()
shutdown(): void {
this.isShuttingDown = true;
this.stopPruning();
}
/**
* Permanently remove all soft-deleted executions and their binary data, in a pruning cycle.
* @return Delay in ms after which the next cycle should be started
*/
private async hardDeleteOnPruningCycle() {
const ids = await this.executionRepository.hardDeleteSoftDeletedExecutions();
const executionIds = ids.map((o) => o.executionId);
if (executionIds.length === 0) {
this.logger.debug('[Pruning] Found no executions to hard-delete');
return this.rates.hardDeletion;
}
try {
this.logger.debug('[Pruning] Starting hard-deletion of executions', { executionIds });
await this.binaryDataService.deleteMany(ids);
await this.executionRepository.deleteByIds(executionIds);
this.logger.debug('[Pruning] Hard-deleted executions', { executionIds });
} catch (error) {
this.logger.error('[Pruning] Failed to hard-delete executions', {
executionIds,
error: error instanceof Error ? error.message : `${error}`,
});
}
/**
* For next batch, speed up hard-deletion cycle in high-volume case
* to prevent high concurrency from causing duplicate deletions.
*/
const isHighVolume = executionIds.length >= this.hardDeletionBatchSize;
return isHighVolume ? 1 * TIME.SECOND : this.rates.hardDeletion;
}
}

View file

@ -0,0 +1,208 @@
import type { PruningConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import type { InstanceSettings } from 'n8n-core';
import type { MultiMainSetup } from '@/scaling/multi-main-setup.ee';
import type { OrchestrationService } from '@/services/orchestration.service';
import { mockLogger } from '@test/mocking';
import { PruningService } from '../pruning.service';
jest.mock('@/db', () => ({
connectionState: { migrated: true },
}));
describe('PruningService', () => {
describe('init', () => {
it('should start pruning on main instance that is the leader', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock(),
);
const startPruningSpy = jest.spyOn(pruningService, 'startPruning');
pruningService.init();
expect(startPruningSpy).toHaveBeenCalled();
});
it('should not start pruning on main instance that is a follower', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock(),
);
const startPruningSpy = jest.spyOn(pruningService, 'startPruning');
pruningService.init();
expect(startPruningSpy).not.toHaveBeenCalled();
});
it('should register leadership events if main on multi-main setup', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>({ on: jest.fn() }),
}),
mock(),
);
pruningService.init();
// @ts-expect-error Private method
expect(pruningService.orchestrationService.multiMainSetup.on).toHaveBeenCalledWith(
'leader-takeover',
expect.any(Function),
);
// @ts-expect-error Private method
expect(pruningService.orchestrationService.multiMainSetup.on).toHaveBeenCalledWith(
'leader-stepdown',
expect.any(Function),
);
});
});
describe('isEnabled', () => {
it('should return `true` based on config if leader main', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<PruningConfig>({ isEnabled: true }),
);
expect(pruningService.isEnabled).toBe(true);
});
it('should return `false` based on config if leader main', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<PruningConfig>({ isEnabled: false }),
);
expect(pruningService.isEnabled).toBe(false);
});
it('should return `false` if non-main even if config is enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false, instanceType: 'worker' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<PruningConfig>({ isEnabled: true }),
);
expect(pruningService.isEnabled).toBe(false);
});
it('should return `false` if follower main even if config is enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false, isFollower: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<PruningConfig>({ isEnabled: true }),
);
expect(pruningService.isEnabled).toBe(false);
});
});
describe('startPruning', () => {
it('should not start pruning if service is disabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<PruningConfig>({ isEnabled: false }),
);
const scheduleRollingSoftDeletionsSpy = jest.spyOn(
pruningService,
// @ts-expect-error Private method
'scheduleRollingSoftDeletions',
);
// @ts-expect-error Private method
const scheduleNextHardDeletionSpy = jest.spyOn(pruningService, 'scheduleNextHardDeletion');
pruningService.startPruning();
expect(scheduleRollingSoftDeletionsSpy).not.toHaveBeenCalled();
expect(scheduleNextHardDeletionSpy).not.toHaveBeenCalled();
});
it('should start pruning if service is enabled and DB is migrated', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<PruningConfig>({ isEnabled: true }),
);
const scheduleRollingSoftDeletionsSpy = jest
// @ts-expect-error Private method
.spyOn(pruningService, 'scheduleRollingSoftDeletions')
.mockImplementation();
const scheduleNextHardDeletionSpy = jest
// @ts-expect-error Private method
.spyOn(pruningService, 'scheduleNextHardDeletion')
.mockImplementation();
pruningService.startPruning();
expect(scheduleRollingSoftDeletionsSpy).toHaveBeenCalled();
expect(scheduleNextHardDeletionSpy).toHaveBeenCalled();
});
});
});

View file

@ -0,0 +1,156 @@
import { PruningConfig } from '@n8n/config';
import { BinaryDataService, InstanceSettings } from 'n8n-core';
import { ensureError } from 'n8n-workflow';
import { strict } from 'node:assert';
import { Service } from 'typedi';
import { Time } from '@/constants';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { connectionState as dbConnectionState } from '@/db';
import { OnShutdown } from '@/decorators/on-shutdown';
import { Logger } from '@/logging/logger.service';
import { OrchestrationService } from '../orchestration.service';
/**
* Responsible for pruning executions from the database and their associated binary data
* from the filesystem, on a rolling basis. By default we soft-delete execution rows
* every cycle and hard-delete them and their binary data every 4th cycle.
*/
@Service()
export class PruningService {
/** Timer for soft-deleting executions on a rolling basis. */
private softDeletionInterval: NodeJS.Timer | undefined;
/** Timeout for next hard-deletion of soft-deleted executions. */
private hardDeletionTimeout: NodeJS.Timeout | undefined;
private readonly rates = {
softDeletion: this.pruningConfig.softDeleteInterval * Time.minutes.toMilliseconds,
hardDeletion: this.pruningConfig.hardDeleteInterval * Time.minutes.toMilliseconds,
};
/** Max number of executions to hard-delete in a cycle. */
private readonly batchSize = 100;
private isShuttingDown = false;
constructor(
private readonly logger: Logger,
private readonly instanceSettings: InstanceSettings,
private readonly executionRepository: ExecutionRepository,
private readonly binaryDataService: BinaryDataService,
private readonly orchestrationService: OrchestrationService,
private readonly pruningConfig: PruningConfig,
) {
this.logger = this.logger.scoped('pruning');
}
init() {
strict(this.instanceSettings.instanceRole !== 'unset', 'Instance role is not set');
if (this.instanceSettings.isLeader) this.startPruning();
if (this.orchestrationService.isMultiMainSetupEnabled) {
this.orchestrationService.multiMainSetup.on('leader-takeover', () => this.startPruning());
this.orchestrationService.multiMainSetup.on('leader-stepdown', () => this.stopPruning());
}
}
get isEnabled() {
return (
this.pruningConfig.isEnabled &&
this.instanceSettings.instanceType === 'main' &&
this.instanceSettings.isLeader
);
}
startPruning() {
if (!this.isEnabled || !dbConnectionState.migrated || this.isShuttingDown) return;
this.scheduleRollingSoftDeletions();
this.scheduleNextHardDeletion();
}
stopPruning() {
if (!this.isEnabled) return;
clearInterval(this.softDeletionInterval);
clearTimeout(this.hardDeletionTimeout);
}
private scheduleRollingSoftDeletions(rateMs = this.rates.softDeletion) {
this.softDeletionInterval = setInterval(
async () => await this.softDelete(),
this.rates.softDeletion,
);
this.logger.debug(`Soft-deletion every ${rateMs * Time.milliseconds.toMinutes} minutes`);
}
private scheduleNextHardDeletion(rateMs = this.rates.hardDeletion) {
this.hardDeletionTimeout = setTimeout(() => {
this.hardDelete()
.then((rate) => this.scheduleNextHardDeletion(rate))
.catch((error) => {
this.scheduleNextHardDeletion(1_000);
this.logger.error('Failed to hard-delete executions', { error: ensureError(error) });
});
}, rateMs);
this.logger.debug(`Hard-deletion in next ${rateMs * Time.milliseconds.toMinutes} minutes`);
}
/** Soft-delete executions based on max age and/or max count. */
async softDelete() {
const result = await this.executionRepository.softDeletePrunableExecutions();
if (result.affected === 0) {
this.logger.debug('Found no executions to soft-delete');
return;
}
this.logger.debug('Soft-deleted executions', { count: result.affected });
}
@OnShutdown()
shutdown(): void {
this.isShuttingDown = true;
this.stopPruning();
}
/**
* Delete all soft-deleted executions and their binary data.
*
* @returns Delay in milliseconds until next hard-deletion
*/
private async hardDelete(): Promise<number> {
const ids = await this.executionRepository.findSoftDeletedExecutions();
const executionIds = ids.map((o) => o.executionId);
if (executionIds.length === 0) {
this.logger.debug('Found no executions to hard-delete');
return this.rates.hardDeletion;
}
try {
await this.binaryDataService.deleteMany(ids);
await this.executionRepository.deleteByIds(executionIds);
this.logger.debug('Hard-deleted executions', { executionIds });
} catch (error) {
this.logger.error('Failed to hard-delete executions', {
executionIds,
error: ensureError(error),
});
}
// if high volume, speed up next hard-deletion
if (executionIds.length >= this.batchSize) return 1 * Time.seconds.toMilliseconds;
return this.rates.hardDeletion;
}
}

View file

@ -1,4 +1,4 @@
import { GlobalConfig } from '@n8n/config';
import { PruningConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { BinaryDataService, InstanceSettings } from 'n8n-core';
import type { ExecutionStatus } from 'n8n-workflow';
@ -8,8 +8,7 @@ import { TIME } from '@/constants';
import type { ExecutionEntity } from '@/databases/entities/execution-entity';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { Logger } from '@/logging/logger.service';
import { PruningService } from '@/services/pruning.service';
import { PruningService } from '@/services/pruning/pruning.service';
import {
annotateExecution,
@ -18,7 +17,7 @@ import {
} from './shared/db/executions';
import { createWorkflow } from './shared/db/workflows';
import * as testDb from './shared/test-db';
import { mockInstance } from '../shared/mocking';
import { mockInstance, mockLogger } from '../shared/mocking';
describe('softDeleteOnPruningCycle()', () => {
let pruningService: PruningService;
@ -28,19 +27,19 @@ describe('softDeleteOnPruningCycle()', () => {
const now = new Date();
const yesterday = new Date(Date.now() - TIME.DAY);
let workflow: WorkflowEntity;
let globalConfig: GlobalConfig;
let pruningConfig: PruningConfig;
beforeAll(async () => {
await testDb.init();
globalConfig = Container.get(GlobalConfig);
pruningConfig = Container.get(PruningConfig);
pruningService = new PruningService(
mockInstance(Logger),
mockLogger(),
instanceSettings,
Container.get(ExecutionRepository),
mockInstance(BinaryDataService),
mock(),
globalConfig,
pruningConfig,
);
workflow = await createWorkflow();
@ -63,8 +62,8 @@ describe('softDeleteOnPruningCycle()', () => {
describe('when EXECUTIONS_DATA_PRUNE_MAX_COUNT is set', () => {
beforeAll(() => {
globalConfig.pruning.maxAge = 336;
globalConfig.pruning.maxCount = 1;
pruningConfig.maxAge = 336;
pruningConfig.maxCount = 1;
});
test('should mark as deleted based on EXECUTIONS_DATA_PRUNE_MAX_COUNT', async () => {
@ -74,7 +73,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -93,7 +92,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -114,7 +113,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -133,7 +132,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -151,7 +150,7 @@ describe('softDeleteOnPruningCycle()', () => {
await annotateExecution(executions[0].id, { vote: 'up' }, [workflow.id]);
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -164,8 +163,8 @@ describe('softDeleteOnPruningCycle()', () => {
describe('when EXECUTIONS_DATA_MAX_AGE is set', () => {
beforeAll(() => {
globalConfig.pruning.maxAge = 1;
globalConfig.pruning.maxCount = 0;
pruningConfig.maxAge = 1;
pruningConfig.maxCount = 0;
});
test('should mark as deleted based on EXECUTIONS_DATA_MAX_AGE', async () => {
@ -180,7 +179,7 @@ describe('softDeleteOnPruningCycle()', () => {
),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -204,7 +203,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -222,7 +221,7 @@ describe('softDeleteOnPruningCycle()', () => {
])('should prune %s executions', async (status, attributes) => {
const execution = await createExecution({ status, ...attributes }, workflow);
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -240,7 +239,7 @@ describe('softDeleteOnPruningCycle()', () => {
await createSuccessfulExecution(workflow),
];
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([
@ -267,7 +266,7 @@ describe('softDeleteOnPruningCycle()', () => {
await annotateExecution(executions[0].id, { vote: 'up' }, [workflow.id]);
await pruningService.softDeleteOnPruningCycle();
await pruningService.softDelete();
const result = await findAllExecutions();
expect(result).toEqual([

View file

@ -8,6 +8,7 @@ import { TaskRunnerWsServer } from '../../../src/runners/runner-ws-server';
describe('TaskRunnerModule in internal_childprocess mode', () => {
const runnerConfig = Container.get(TaskRunnersConfig);
runnerConfig.port = 0; // Random port
runnerConfig.mode = 'internal_childprocess';
const module = Container.get(TaskRunnerModule);

View file

@ -1,6 +1,6 @@
{
"name": "n8n-core",
"version": "1.66.0",
"version": "1.67.0",
"description": "Core functionality of n8n",
"main": "dist/index",
"types": "dist/index.d.ts",

View file

@ -76,7 +76,6 @@ import type {
IPollFunctions,
IRequestOptions,
IRunExecutionData,
ISourceData,
ITaskData,
ITaskDataConnections,
ITriggerFunctions,
@ -166,7 +165,13 @@ import { extractValue } from './ExtractValue';
import { InstanceSettings } from './InstanceSettings';
import type { ExtendedValidationResult, IResponseError } from './Interfaces';
// eslint-disable-next-line import/no-cycle
import { HookContext, PollContext, TriggerContext, WebhookContext } from './node-execution-context';
import {
ExecuteSingleContext,
HookContext,
PollContext,
TriggerContext,
WebhookContext,
} from './node-execution-context';
import { getSecretsProxy } from './Secrets';
import { SSHClientsManager } from './SSHClientsManager';
@ -4180,145 +4185,19 @@ export function getExecuteSingleFunctions(
mode: WorkflowExecuteMode,
abortSignal?: AbortSignal,
): IExecuteSingleFunctions {
return ((workflow, runExecutionData, connectionInputData, inputData, node, itemIndex) => {
return {
...getCommonWorkflowFunctions(workflow, node, additionalData),
...executionCancellationFunctions(abortSignal),
continueOnFail: () => continueOnFail(node),
evaluateExpression: (expression: string, evaluateItemIndex: number | undefined) => {
evaluateItemIndex = evaluateItemIndex === undefined ? itemIndex : evaluateItemIndex;
return workflow.expression.resolveSimpleParameterValue(
`=${expression}`,
{},
runExecutionData,
runIndex,
evaluateItemIndex,
node.name,
connectionInputData,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
executeData,
);
},
getContext(type: ContextType): IContextObject {
return NodeHelpers.getContext(runExecutionData, type, node);
},
getCredentials: async (type) =>
await getCredentials(
workflow,
node,
type,
additionalData,
mode,
executeData,
runExecutionData,
runIndex,
connectionInputData,
itemIndex,
),
getInputData: (inputIndex = 0, inputName = 'main') => {
if (!inputData.hasOwnProperty(inputName)) {
// Return empty array because else it would throw error when nothing is connected to input
return { json: {} };
}
// TODO: Check if nodeType has input with that index defined
if (inputData[inputName].length < inputIndex) {
throw new ApplicationError('Could not get input index', {
extra: { inputIndex, inputName },
});
}
const allItems = inputData[inputName][inputIndex];
if (allItems === null) {
throw new ApplicationError('Input index was not set', {
extra: { inputIndex, inputName },
});
}
if (allItems[itemIndex] === null) {
throw new ApplicationError('Value of input with given index was not set', {
extra: { inputIndex, inputName, itemIndex },
});
}
return allItems[itemIndex];
},
getInputSourceData: (inputIndex = 0, inputName = 'main') => {
if (executeData?.source === null) {
// Should never happen as n8n sets it automatically
throw new ApplicationError('Source data is missing');
}
return executeData.source[inputName][inputIndex] as ISourceData;
},
getItemIndex: () => itemIndex,
getMode: () => mode,
getExecuteData: () => executeData,
getNodeParameter: (
parameterName: string,
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object => {
return getNodeParameter(
workflow,
runExecutionData,
runIndex,
connectionInputData,
node,
parameterName,
itemIndex,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
executeData,
fallbackValue,
options,
);
},
getWorkflowDataProxy: (): IWorkflowDataProxyData => {
const dataProxy = new WorkflowDataProxy(
workflow,
runExecutionData,
runIndex,
itemIndex,
node.name,
connectionInputData,
{},
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
executeData,
);
return dataProxy.getDataProxy();
},
helpers: {
createDeferredPromise,
returnJsonArray,
...getRequestHelperFunctions(
return new ExecuteSingleContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
),
...getBinaryHelperFunctions(additionalData, workflow.id),
assertBinaryData: (propertyName, inputIndex = 0) =>
assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex),
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
},
logAiEvent: (eventName: AiEvent, msg: string) => {
return additionalData.logAiEvent(eventName, {
executionId: additionalData.executionId ?? 'unsaved-execution',
nodeName: node.name,
workflowName: workflow.name ?? 'Unnamed workflow',
nodeType: node.type,
workflowId: workflow.id ?? 'unsaved-workflow',
msg,
});
},
};
})(workflow, runExecutionData, connectionInputData, inputData, node, itemIndex);
inputData,
itemIndex,
executeData,
abortSignal,
);
}
export function getCredentialTestFunctions(): ICredentialTestFunctions {

View file

@ -0,0 +1,301 @@
import { mock } from 'jest-mock-extended';
import type {
INode,
IWorkflowExecuteAdditionalData,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
IExecuteData,
Workflow,
WorkflowExecuteMode,
ICredentialsHelper,
Expression,
INodeType,
INodeTypes,
OnError,
ContextType,
IContextObject,
ICredentialDataDecryptedObject,
ISourceData,
} from 'n8n-workflow';
import { ApplicationError, NodeHelpers } from 'n8n-workflow';
import { ExecuteSingleContext } from '../execute-single-context';
describe('ExecuteSingleContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const runExecutionData = mock<IRunExecutionData>();
const connectionInputData = mock<INodeExecutionData[]>();
const inputData: ITaskDataConnections = { main: [[{ json: { test: 'data' } }]] };
const executeData = mock<IExecuteData>();
const runIndex = 0;
const itemIndex = 0;
const abortSignal = mock<AbortSignal>();
const executeSingleContext = new ExecuteSingleContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
itemIndex,
executeData,
abortSignal,
);
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
describe('getExecutionCancelSignal', () => {
it('should return the abort signal', () => {
expect(executeSingleContext.getExecutionCancelSignal()).toBe(abortSignal);
});
});
describe('continueOnFail', () => {
afterEach(() => {
node.onError = undefined;
node.continueOnFail = false;
});
it('should return false for nodes by default', () => {
expect(executeSingleContext.continueOnFail()).toEqual(false);
});
it('should return true if node has continueOnFail set to true', () => {
node.continueOnFail = true;
expect(executeSingleContext.continueOnFail()).toEqual(true);
});
test.each([
['continueRegularOutput', true],
['continueErrorOutput', true],
['stopWorkflow', false],
])('if node has onError set to %s, it should return %s', (onError, expected) => {
node.onError = onError as OnError;
expect(executeSingleContext.continueOnFail()).toEqual(expected);
});
});
describe('evaluateExpression', () => {
it('should evaluate the expression correctly', () => {
const expression = '$json.test';
const expectedResult = 'data';
const resolveSimpleParameterValueSpy = jest.spyOn(
workflow.expression,
'resolveSimpleParameterValue',
);
resolveSimpleParameterValueSpy.mockReturnValue(expectedResult);
expect(executeSingleContext.evaluateExpression(expression, itemIndex)).toEqual(
expectedResult,
);
expect(resolveSimpleParameterValueSpy).toHaveBeenCalledWith(
`=${expression}`,
{},
runExecutionData,
runIndex,
itemIndex,
node.name,
connectionInputData,
mode,
expect.objectContaining({}),
executeData,
);
resolveSimpleParameterValueSpy.mockRestore();
});
});
describe('getContext', () => {
it('should return the context object', () => {
const contextType: ContextType = 'node';
const expectedContext = mock<IContextObject>();
const getContextSpy = jest.spyOn(NodeHelpers, 'getContext');
getContextSpy.mockReturnValue(expectedContext);
expect(executeSingleContext.getContext(contextType)).toEqual(expectedContext);
expect(getContextSpy).toHaveBeenCalledWith(runExecutionData, contextType, node);
getContextSpy.mockRestore();
});
});
describe('getInputData', () => {
const inputIndex = 0;
const inputName = 'main';
afterEach(() => {
inputData[inputName] = [[{ json: { test: 'data' } }]];
});
it('should return the input data correctly', () => {
const expectedData = { json: { test: 'data' } };
expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData);
});
it('should return an empty object if the input name does not exist', () => {
const inputName = 'nonExistent';
const expectedData = { json: {} };
expect(executeSingleContext.getInputData(inputIndex, inputName)).toEqual(expectedData);
});
it('should throw an error if the input index is out of range', () => {
const inputIndex = 1;
expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow(
ApplicationError,
);
});
it('should throw an error if the input index was not set', () => {
inputData.main[inputIndex] = null;
expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow(
ApplicationError,
);
});
it('should throw an error if the value of input with given index was not set', () => {
delete inputData.main[inputIndex]![itemIndex];
expect(() => executeSingleContext.getInputData(inputIndex, inputName)).toThrow(
ApplicationError,
);
});
});
describe('getItemIndex', () => {
it('should return the item index correctly', () => {
expect(executeSingleContext.getItemIndex()).toEqual(itemIndex);
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = executeSingleContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = executeSingleContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await executeSingleContext.getCredentials<ICredentialDataDecryptedObject>(
testCredentialType,
);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getExecuteData', () => {
it('should return the execute data correctly', () => {
expect(executeSingleContext.getExecuteData()).toEqual(executeData);
});
});
describe('getWorkflowDataProxy', () => {
it('should return the workflow data proxy correctly', () => {
const workflowDataProxy = executeSingleContext.getWorkflowDataProxy();
expect(workflowDataProxy.isProxy).toBe(true);
expect(Object.keys(workflowDataProxy.$input)).toEqual([
'all',
'context',
'first',
'item',
'last',
'params',
]);
});
});
describe('getInputSourceData', () => {
it('should return the input source data correctly', () => {
const inputSourceData = mock<ISourceData>();
executeData.source = { main: [inputSourceData] };
expect(executeSingleContext.getInputSourceData()).toEqual(inputSourceData);
});
it('should throw an error if the source data is missing', () => {
executeData.source = null;
expect(() => executeSingleContext.getInputSourceData()).toThrow(ApplicationError);
});
});
describe('logAiEvent', () => {
it('should log the AI event correctly', () => {
const eventName = 'ai-tool-called';
const msg = 'test message';
executeSingleContext.logAiEvent(eventName, msg);
expect(additionalData.logAiEvent).toHaveBeenCalledWith(eventName, {
executionId: additionalData.executionId,
nodeName: node.name,
workflowName: workflow.name,
nodeType: node.type,
workflowId: workflow.id,
msg,
});
});
});
});

View file

@ -0,0 +1,212 @@
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IRunExecutionData,
IExecuteSingleFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
ITaskDataConnections,
IExecuteData,
ContextType,
AiEvent,
ISourceData,
} from 'n8n-workflow';
import {
ApplicationError,
createDeferredPromise,
NodeHelpers,
WorkflowDataProxy,
} from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
assertBinaryData,
continueOnFail,
getAdditionalKeys,
getBinaryDataBuffer,
getCredentials,
getNodeParameter,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { BinaryHelpers } from './helpers/binary-helpers';
import { RequestHelpers } from './helpers/request-helpers';
import { NodeExecutionContext } from './node-execution-context';
export class ExecuteSingleContext extends NodeExecutionContext implements IExecuteSingleFunctions {
readonly helpers: IExecuteSingleFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly runExecutionData: IRunExecutionData,
private readonly runIndex: number,
private readonly connectionInputData: INodeExecutionData[],
private readonly inputData: ITaskDataConnections,
private readonly itemIndex: number,
private readonly executeData: IExecuteData,
private readonly abortSignal?: AbortSignal,
) {
super(workflow, node, additionalData, mode);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...new BinaryHelpers(workflow, additionalData).exported,
...new RequestHelpers(this, workflow, node, additionalData).exported,
assertBinaryData: (propertyName, inputIndex = 0) =>
assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex),
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
};
}
getExecutionCancelSignal() {
return this.abortSignal;
}
onExecutionCancellation(handler: () => unknown) {
const fn = () => {
this.abortSignal?.removeEventListener('abort', fn);
handler();
};
this.abortSignal?.addEventListener('abort', fn);
}
continueOnFail() {
return continueOnFail(this.node);
}
evaluateExpression(expression: string, evaluateItemIndex: number | undefined) {
evaluateItemIndex = evaluateItemIndex ?? this.itemIndex;
return this.workflow.expression.resolveSimpleParameterValue(
`=${expression}`,
{},
this.runExecutionData,
this.runIndex,
evaluateItemIndex,
this.node.name,
this.connectionInputData,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData),
this.executeData,
);
}
getContext(type: ContextType) {
return NodeHelpers.getContext(this.runExecutionData, type, this.node);
}
getInputData(inputIndex = 0, inputName = 'main') {
if (!this.inputData.hasOwnProperty(inputName)) {
// Return empty array because else it would throw error when nothing is connected to input
return { json: {} };
}
// TODO: Check if nodeType has input with that index defined
if (this.inputData[inputName].length < inputIndex) {
throw new ApplicationError('Could not get input index', {
extra: { inputIndex, inputName },
});
}
const allItems = this.inputData[inputName][inputIndex];
if (allItems === null || allItems === undefined) {
throw new ApplicationError('Input index was not set', {
extra: { inputIndex, inputName },
});
}
const data = allItems[this.itemIndex];
if (data === null || data === undefined) {
throw new ApplicationError('Value of input with given index was not set', {
extra: { inputIndex, inputName, itemIndex: this.itemIndex },
});
}
return data;
}
getItemIndex() {
return this.itemIndex;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
getNodeParameter(parameterName: string, fallbackValue?: any, options?: IGetNodeParameterOptions) {
return getNodeParameter(
this.workflow,
this.runExecutionData,
this.runIndex,
this.connectionInputData,
this.node,
parameterName,
this.itemIndex,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData),
this.executeData,
fallbackValue,
options,
);
}
// TODO: extract out in a BaseExecutionContext
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await getCredentials<T>(
this.workflow,
this.node,
type,
this.additionalData,
this.mode,
this.executeData,
this.runExecutionData,
this.runIndex,
this.connectionInputData,
this.itemIndex,
);
}
getExecuteData() {
return this.executeData;
}
getWorkflowDataProxy() {
return new WorkflowDataProxy(
this.workflow,
this.runExecutionData,
this.runIndex,
this.itemIndex,
this.node.name,
this.connectionInputData,
{},
this.mode,
getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData),
this.executeData,
).getDataProxy();
}
getInputSourceData(inputIndex = 0, inputName = 'main'): ISourceData {
if (this.executeData?.source === null) {
// Should never happen as n8n sets it automatically
throw new ApplicationError('Source data is missing');
}
return this.executeData.source[inputName][inputIndex] as ISourceData;
}
logAiEvent(eventName: AiEvent, msg: string) {
return this.additionalData.logAiEvent(eventName, {
executionId: this.additionalData.executionId ?? 'unsaved-execution',
nodeName: this.node.name,
workflowName: this.workflow.name ?? 'Unnamed workflow',
nodeType: this.node.type,
workflowId: this.workflow.id ?? 'unsaved-workflow',
msg,
});
}
}

View file

@ -1,4 +1,5 @@
// eslint-disable-next-line import/no-cycle
export { ExecuteSingleContext } from './execute-single-context';
export { HookContext } from './hook-context';
export { LoadOptionsContext } from './load-options-context';
export { PollContext } from './poll-context';

View file

@ -1,6 +1,6 @@
{
"name": "n8n-design-system",
"version": "1.56.0",
"version": "1.57.0",
"main": "src/main.ts",
"import": "src/main.ts",
"scripts": {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import AssistantAvatar from '../AssistantAvatar.vue';
import AssistantAvatar from './AssistantAvatar.vue';
describe('AskAssistantAvatar', () => {
it('renders small avatar correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import AskAssistantButton from '../AskAssistantButton.vue';
import AskAssistantButton from './AskAssistantButton.vue';
describe('AskAssistantButton', () => {
it('renders default button correctly', () => {

View file

@ -2,7 +2,7 @@ import { render } from '@testing-library/vue';
import { n8nHtml } from 'n8n-design-system/directives';
import AskAssistantChat from '../AskAssistantChat.vue';
import AskAssistantChat from './AskAssistantChat.vue';
const stubs = ['n8n-avatar', 'n8n-button', 'n8n-icon', 'n8n-icon-button'];

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import AssistantIcon from '../AssistantIcon.vue';
import AssistantIcon from './AssistantIcon.vue';
describe('AssistantIcon', () => {
it('renders default icon correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import AssistantLoadingMessage from '../AssistantLoadingMessage.vue';
import AssistantLoadingMessage from './AssistantLoadingMessage.vue';
describe('AssistantLoadingMessage', () => {
it('renders loading message correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import AssistantText from '../AssistantText.vue';
import AssistantText from './AssistantText.vue';
describe('AssistantText', () => {
it('renders default text correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import BetaTag from '../BetaTag.vue';
import BetaTag from './BetaTag.vue';
describe('BetaTag', () => {
it('renders beta tag correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import BlinkingCursor from '../BlinkingCursor.vue';
import BlinkingCursor from './BlinkingCursor.vue';
describe('BlinkingCursor', () => {
it('renders blinking cursor correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import CodeDiff from '../CodeDiff.vue';
import CodeDiff from './CodeDiff.vue';
const stubs = ['n8n-button', 'n8n-icon'];

View file

@ -2,7 +2,7 @@ import { render } from '@testing-library/vue';
import { beforeAll, describe } from 'vitest';
import { createRouter, createWebHistory } from 'vue-router';
import CondtionalRouterLink from '../CondtionalRouterLink.vue';
import CondtionalRouterLink from './CondtionalRouterLink.vue';
const slots = {
default: 'Button',

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import N8NActionBox from '../ActionBox.vue';
import N8NActionBox from './ActionBox.vue';
describe('N8NActionBox', () => {
it('should render correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import N8nActionDropdown from '../ActionDropdown.vue';
import N8nActionDropdown from './ActionDropdown.vue';
describe('components', () => {
describe('N8nActionDropdown', () => {

View file

@ -1,7 +1,7 @@
import { render, screen } from '@testing-library/vue';
import N8nIcon from '../../N8nIcon';
import N8nAlert from '../Alert.vue';
import N8nAlert from './Alert.vue';
import N8nIcon from '../N8nIcon';
describe('components', () => {
describe('N8nAlert', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import N8nAvatar from '../Avatar.vue';
import N8nAvatar from './Avatar.vue';
describe('components', () => {
describe('N8nAlert', () => {

Some files were not shown because too many files have changed in this diff Show more