Merge branch 'master' into ai-396-implement-split-pane

This commit is contained in:
Oleg Ivaniv 2024-11-06 09:26:38 +01:00
commit 6f86529fb7
No known key found for this signature in database
500 changed files with 10665 additions and 6163 deletions

View file

@ -11,3 +11,8 @@
# refactor: Run lintfix (no-changelog) (#7537)
62c096710fab2f7e886518abdbded34b55e93f62
# refactor: Move test files alongside tested files (#11504)
7e58fc4fec468aca0b45d5bfe6150e1af632acbc
f32b13c6ed078be042a735bc8621f27e00dc3116

View file

@ -49,6 +49,9 @@ jobs:
with:
ref: refs/pull/${{ github.event.pull_request.number }}/merge
cacheKey: ${{ github.sha }}-base:build
collectCoverage: true
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
lint:
name: Lint

View file

@ -38,6 +38,12 @@ jobs:
- name: Build
run: pnpm build
- name: Cache build artifacts
uses: actions/cache/save@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}-base:build
- name: Dry-run publishing
run: pnpm publish -r --no-git-checks --dry-run
@ -119,6 +125,39 @@ jobs:
makeLatest: false
body: ${{github.event.pull_request.body}}
create-sentry-release:
name: Create release on Sentry
needs: [publish-to-npm, publish-to-docker-hub]
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
timeout-minutes: 5
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
steps:
- name: Restore cached build artifacts
uses: actions/cache/restore@v4.0.0
with:
path: ./packages/**/dist
key: ${{ github.sha }}:db-tests
- name: Create a frontend release
uses: getsentry/action-release@v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_FRONTEND_PROJECT }}
version: {{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/editor-ui/dist
- name: Create a backend release
uses: getsentry/action-release@v1.7.0
continue-on-error: true
with:
projects: ${{ secrets.SENTRY_BACKEND_PROJECT }}
version: {{ needs.publish-to-npm.outputs.release }}
sourcemaps: packages/cli/dist packages/core/dist packages/nodes-base/dist packages/@n8n/n8n-nodes-langchain/dist
trigger-release-note:
name: Trigger a release note
needs: [publish-to-npm, create-github-release]

View file

@ -1,3 +1,45 @@
# [1.66.0](https://github.com/n8n-io/n8n/compare/n8n@1.65.0...n8n@1.66.0) (2024-10-31)
### Bug Fixes
* **Asana Node:** Fix issue with pagination ([#11415](https://github.com/n8n-io/n8n/issues/11415)) ([04c075a](https://github.com/n8n-io/n8n/commit/04c075a46bcc7b1964397f0244b0fde99476212d))
* **core:** Add 'user_id' to `license-community-plus-registered` telemetry event ([#11430](https://github.com/n8n-io/n8n/issues/11430)) ([7a8dafe](https://github.com/n8n-io/n8n/commit/7a8dafe9902fbc0d5001c50579c34959b95211ab))
* **core:** Add safeguard for command publishing ([#11337](https://github.com/n8n-io/n8n/issues/11337)) ([656439e](https://github.com/n8n-io/n8n/commit/656439e87138f9f96dea5a683cfdac3f661ffefb))
* **core:** Ensure `LoggerProxy` is not scoped ([#11379](https://github.com/n8n-io/n8n/issues/11379)) ([f4ea943](https://github.com/n8n-io/n8n/commit/f4ea943c9cb2321e41705de6c5c27535a0f5eae0))
* **core:** Ensure `remove-triggers-and-pollers` command is not debounced ([#11486](https://github.com/n8n-io/n8n/issues/11486)) ([529d4fc](https://github.com/n8n-io/n8n/commit/529d4fc3ef5206bd1b02d27634342cc50b45997e))
* **core:** Ensure job processor does not reprocess amended executions ([#11438](https://github.com/n8n-io/n8n/issues/11438)) ([c152a3a](https://github.com/n8n-io/n8n/commit/c152a3ac56f140a39eea4771a94f5a3082118df7))
* **core:** Fix Message Event Bus Metrics not counting up for labeled metrics ([#11396](https://github.com/n8n-io/n8n/issues/11396)) ([7fc3b25](https://github.com/n8n-io/n8n/commit/7fc3b25d21c6c4f1802f34b1ae065a65cac3001b))
* **core:** Fix resolving of $fromAI expression via `evaluateExpression` ([#11397](https://github.com/n8n-io/n8n/issues/11397)) ([2e64464](https://github.com/n8n-io/n8n/commit/2e6446454defbd3e5a47b66e6fd46d4f1b9fbd0f))
* **core:** Make execution and its data creation atomic ([#11392](https://github.com/n8n-io/n8n/issues/11392)) ([ed30d43](https://github.com/n8n-io/n8n/commit/ed30d43236bf3c6b657022636a02a41be01aa152))
* **core:** On unhandled rejections, extract the original exception correctly ([#11389](https://github.com/n8n-io/n8n/issues/11389)) ([8608bae](https://github.com/n8n-io/n8n/commit/8608baeb7ec302daddc8adca6e39778dcf7b2eda))
* **editor:** Fix TypeError: Cannot read properties of undefined (reading '0') ([#11399](https://github.com/n8n-io/n8n/issues/11399)) ([ae37c52](https://github.com/n8n-io/n8n/commit/ae37c520a91c75e353e818944b36a3619c0d8b4a))
* **editor:** Add Retry button for AI Assistant errors ([#11345](https://github.com/n8n-io/n8n/issues/11345)) ([7699240](https://github.com/n8n-io/n8n/commit/7699240073122cdef31cf109fd37fa66961f588a))
* **editor:** Change tooltip for workflow with execute workflow trigger ([#11374](https://github.com/n8n-io/n8n/issues/11374)) ([dcd6038](https://github.com/n8n-io/n8n/commit/dcd6038c3085135803cdaa546a239359a6d449eb))
* **editor:** Ensure toasts show above modal overlays ([#11410](https://github.com/n8n-io/n8n/issues/11410)) ([351134f](https://github.com/n8n-io/n8n/commit/351134f786af933f5f310bf8d9897269387635a0))
* **editor:** Fit view consistently after nodes are initialized on new canvas ([#11457](https://github.com/n8n-io/n8n/issues/11457)) ([497d637](https://github.com/n8n-io/n8n/commit/497d637fc5308b9c4a06bc764152fde1f1a9c130))
* **editor:** Fix adding connections when initializing workspace in templates view on new canvas ([#11451](https://github.com/n8n-io/n8n/issues/11451)) ([ea47b02](https://github.com/n8n-io/n8n/commit/ea47b025fb16c967d4fc73dcacc6e260d2aecd61))
* **editor:** Fix rendering of AI logs ([#11450](https://github.com/n8n-io/n8n/issues/11450)) ([73b0a80](https://github.com/n8n-io/n8n/commit/73b0a80ac92b4f4b5a300d0ec1c833b4395a222a))
* **editor:** Hide data mapping tooltip in credential edit modal ([#11356](https://github.com/n8n-io/n8n/issues/11356)) ([ff14dcb](https://github.com/n8n-io/n8n/commit/ff14dcb3a1ddaea4eca7c1ecd2e92c0abb0c413c))
* **editor:** Prevent running workflow that has issues if listening to webhook ([#11402](https://github.com/n8n-io/n8n/issues/11402)) ([8b0a48f](https://github.com/n8n-io/n8n/commit/8b0a48f53010378e497e4cc362fda75a958cf363))
* **editor:** Run external hooks after settings have been initialized ([#11423](https://github.com/n8n-io/n8n/issues/11423)) ([0ab24c8](https://github.com/n8n-io/n8n/commit/0ab24c814abd1787268750ba808993ab2735ac52))
* **editor:** Support middle click to scroll when using a mouse on new canvas ([#11384](https://github.com/n8n-io/n8n/issues/11384)) ([46f3b4a](https://github.com/n8n-io/n8n/commit/46f3b4a258f89f02e0d2bd1eef25a22e3a721167))
* **HTTP Request Tool Node:** Fix HTML response optimization issue ([#11439](https://github.com/n8n-io/n8n/issues/11439)) ([cf37e94](https://github.com/n8n-io/n8n/commit/cf37e94dd875e9f6ab1f189146fb34e7296af93c))
* **n8n Form Node:** Form Trigger does not wait in multi-form mode ([#11404](https://github.com/n8n-io/n8n/issues/11404)) ([151f4dd](https://github.com/n8n-io/n8n/commit/151f4dd7b8f800af424f8ae64cb8238975fb3cb8))
* Update required node js version in CONTRIBUTING.md ([#11437](https://github.com/n8n-io/n8n/issues/11437)) ([4f511aa](https://github.com/n8n-io/n8n/commit/4f511aab68651caa8fe47f70cd7cdb88bb06a3e2))
### Features
* **Anthropic Chat Model Node:** Add model claude-3-5-sonnet-20241022 ([#11465](https://github.com/n8n-io/n8n/issues/11465)) ([f6c8890](https://github.com/n8n-io/n8n/commit/f6c8890a8069de221b9b96e735418ecc9624cf7b))
* **core:** Handle nodes with multiple inputs and connections during partial executions ([#11376](https://github.com/n8n-io/n8n/issues/11376)) ([cb7c4d2](https://github.com/n8n-io/n8n/commit/cb7c4d29a6f042b590822e5b9c67fff0a8f0863d))
* **editor:** Add descriptive header to projects /workflow ([#11203](https://github.com/n8n-io/n8n/issues/11203)) ([5d19e8f](https://github.com/n8n-io/n8n/commit/5d19e8f2b45dc1abc5a8253f9e3a0fdacb1ebd91))
* **editor:** Improve placeholder for vector store tool ([#11483](https://github.com/n8n-io/n8n/issues/11483)) ([629e092](https://github.com/n8n-io/n8n/commit/629e09240785bc648ff6575f97910fbb4e77cdab))
* **editor:** Remove edge execution animation on new canvas ([#11446](https://github.com/n8n-io/n8n/issues/11446)) ([a701d87](https://github.com/n8n-io/n8n/commit/a701d87f5ba94ffc811e424b60e188b26ac6c1c5))
* **editor:** Update ownership pills ([#11155](https://github.com/n8n-io/n8n/issues/11155)) ([8147038](https://github.com/n8n-io/n8n/commit/8147038cf87dca657602e617e49698065bf1a63f))
# [1.65.0](https://github.com/n8n-io/n8n/compare/n8n@1.64.0...n8n@1.65.0) (2024-10-24)

View file

@ -40,6 +40,14 @@ export function getOutputPanelDataContainer() {
return getOutputPanel().getByTestId('ndv-data-container');
}
export function getOutputTableRows() {
return getOutputPanelDataContainer().find('table tr');
}
export function getOutputTableRow(row: number) {
return getOutputTableRows().eq(row);
}
export function getOutputPanelTable() {
return getOutputPanelDataContainer().get('table');
}

View file

@ -69,6 +69,13 @@ export function getNodeCreatorPlusButton() {
return cy.getByTestId('node-creator-plus-button');
}
export function getCanvasNodes() {
return cy.ifCanvasVersion(
() => cy.getByTestId('canvas-node'),
() => cy.getByTestId('canvas-node').not('[data-node-type="n8n-nodes-internal.addNodes"]'),
);
}
/**
* Actions
*/

View file

@ -44,6 +44,7 @@ import {
openNode,
getConnectionBySourceAndTarget,
} from '../composables/workflow';
import { NDV, WorkflowPage } from '../pages';
import { createMockNodeExecutionData, runMockWorkflowExecution } from '../utils';
describe('Langchain Integration', () => {
@ -232,95 +233,96 @@ describe('Langchain Integration', () => {
const inputMessage = 'Hello!';
const outputMessage = 'Hi there! How can I assist you today?';
const runData = [
createMockNodeExecutionData(MANUAL_CHAT_TRIGGER_NODE_NAME, {
jsonData: {
main: { input: inputMessage },
},
}),
createMockNodeExecutionData(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, {
jsonData: {
ai_languageModel: {
response: {
generations: [
{
text: `{
"action": "Final Answer",
"action_input": "${outputMessage}"
}`,
message: {
lc: 1,
type: 'constructor',
id: ['langchain', 'schema', 'AIMessage'],
kwargs: {
content: `{
"action": "Final Answer",
"action_input": "${outputMessage}"
}`,
additional_kwargs: {},
},
},
generationInfo: { finish_reason: 'stop' },
},
],
llmOutput: {
tokenUsage: {
completionTokens: 26,
promptTokens: 519,
totalTokens: 545,
},
},
},
},
},
metadata: {
subRun: [{ node: AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, runIndex: 0 }],
},
inputOverride: {
ai_languageModel: [
[
{
json: {
messages: [
{
lc: 1,
type: 'constructor',
id: ['langchain', 'schema', 'SystemMessage'],
kwargs: {
content:
'Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist. However, above all else, all responses must adhere to the format of RESPONSE FORMAT INSTRUCTIONS.',
additional_kwargs: {},
},
},
{
lc: 1,
type: 'constructor',
id: ['langchain', 'schema', 'HumanMessage'],
kwargs: {
content:
'TOOLS\n------\nAssistant can ask the user to use tools to look up information that may be helpful in answering the users original question. The tools the human can use are:\n\n\n\nRESPONSE FORMAT INSTRUCTIONS\n----------------------------\n\nOutput a JSON markdown code snippet containing a valid JSON object in one of two formats:\n\n**Option 1:**\nUse this if you want the human to use a tool.\nMarkdown code snippet formatted in the following schema:\n\n```json\n{\n "action": string, // The action to take. Must be one of []\n "action_input": string // The input to the action. May be a stringified object.\n}\n```\n\n**Option #2:**\nUse this if you want to respond directly and conversationally to the human. Markdown code snippet formatted in the following schema:\n\n```json\n{\n "action": "Final Answer",\n "action_input": string // You should put what you want to return to use here and make sure to use valid json newline characters.\n}\n```\n\nFor both options, remember to always include the surrounding markdown code snippet delimiters (begin with "```json" and end with "```")!\n\n\nUSER\'S INPUT\n--------------------\nHere is the user\'s input (remember to respond with a markdown code snippet of a json blob with a single action, and NOTHING else):\n\nHello!',
additional_kwargs: {},
},
},
],
options: { stop: ['Observation:'], promptIndex: 0 },
},
},
],
],
},
}),
createMockNodeExecutionData(AGENT_NODE_NAME, {
jsonData: {
main: { output: 'Hi there! How can I assist you today?' },
},
}),
];
runMockWorkflowExecution({
trigger: () => {
sendManualChatMessage(inputMessage);
},
runData: [
createMockNodeExecutionData(MANUAL_CHAT_TRIGGER_NODE_NAME, {
jsonData: {
main: { input: inputMessage },
},
}),
createMockNodeExecutionData(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, {
jsonData: {
ai_languageModel: {
response: {
generations: [
{
text: `{
"action": "Final Answer",
"action_input": "${outputMessage}"
}`,
message: {
lc: 1,
type: 'constructor',
id: ['langchain', 'schema', 'AIMessage'],
kwargs: {
content: `{
"action": "Final Answer",
"action_input": "${outputMessage}"
}`,
additional_kwargs: {},
},
},
generationInfo: { finish_reason: 'stop' },
},
],
llmOutput: {
tokenUsage: {
completionTokens: 26,
promptTokens: 519,
totalTokens: 545,
},
},
},
},
},
metadata: {
subRun: [{ node: AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, runIndex: 0 }],
},
inputOverride: {
ai_languageModel: [
[
{
json: {
messages: [
{
lc: 1,
type: 'constructor',
id: ['langchain', 'schema', 'SystemMessage'],
kwargs: {
content:
'Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist. However, above all else, all responses must adhere to the format of RESPONSE FORMAT INSTRUCTIONS.',
additional_kwargs: {},
},
},
{
lc: 1,
type: 'constructor',
id: ['langchain', 'schema', 'HumanMessage'],
kwargs: {
content:
'TOOLS\n------\nAssistant can ask the user to use tools to look up information that may be helpful in answering the users original question. The tools the human can use are:\n\n\n\nRESPONSE FORMAT INSTRUCTIONS\n----------------------------\n\nOutput a JSON markdown code snippet containing a valid JSON object in one of two formats:\n\n**Option 1:**\nUse this if you want the human to use a tool.\nMarkdown code snippet formatted in the following schema:\n\n```json\n{\n "action": string, // The action to take. Must be one of []\n "action_input": string // The input to the action. May be a stringified object.\n}\n```\n\n**Option #2:**\nUse this if you want to respond directly and conversationally to the human. Markdown code snippet formatted in the following schema:\n\n```json\n{\n "action": "Final Answer",\n "action_input": string // You should put what you want to return to use here and make sure to use valid json newline characters.\n}\n```\n\nFor both options, remember to always include the surrounding markdown code snippet delimiters (begin with "```json" and end with "```")!\n\n\nUSER\'S INPUT\n--------------------\nHere is the user\'s input (remember to respond with a markdown code snippet of a json blob with a single action, and NOTHING else):\n\nHello!',
additional_kwargs: {},
},
},
],
options: { stop: ['Observation:'], promptIndex: 0 },
},
},
],
],
},
}),
createMockNodeExecutionData(AGENT_NODE_NAME, {
jsonData: {
main: { output: 'Hi there! How can I assist you today?' },
},
}),
],
runData,
lastNodeExecuted: AGENT_NODE_NAME,
});
@ -357,4 +359,56 @@ describe('Langchain Integration', () => {
getConnectionBySourceAndTarget(CHAT_TRIGGER_NODE_DISPLAY_NAME, AGENT_NODE_NAME).should('exist');
getNodes().should('have.length', 3);
});
it('should render runItems for sub-nodes and allow switching between them', () => {
const workflowPage = new WorkflowPage();
const ndv = new NDV();
cy.visit(workflowPage.url);
cy.createFixtureWorkflow('In_memory_vector_store_fake_embeddings.json');
workflowPage.actions.zoomToFit();
workflowPage.actions.executeNode('Populate VS');
cy.get('[data-label="25 items"]').should('exist');
const assertInputOutputText = (text: string, assertion: 'exist' | 'not.exist') => {
ndv.getters.outputPanel().contains(text).should(assertion);
ndv.getters.inputPanel().contains(text).should(assertion);
};
workflowPage.actions.openNode('Character Text Splitter');
ndv.getters.outputRunSelector().should('exist');
ndv.getters.inputRunSelector().should('exist');
ndv.getters.inputRunSelector().find('input').should('include.value', '3 of 3');
ndv.getters.outputRunSelector().find('input').should('include.value', '3 of 3');
assertInputOutputText('Kyiv', 'exist');
assertInputOutputText('Berlin', 'not.exist');
assertInputOutputText('Prague', 'not.exist');
ndv.actions.changeOutputRunSelector('2 of 3');
assertInputOutputText('Berlin', 'exist');
assertInputOutputText('Kyiv', 'not.exist');
assertInputOutputText('Prague', 'not.exist');
ndv.actions.changeOutputRunSelector('1 of 3');
assertInputOutputText('Prague', 'exist');
assertInputOutputText('Berlin', 'not.exist');
assertInputOutputText('Kyiv', 'not.exist');
ndv.actions.toggleInputRunLinking();
ndv.actions.changeOutputRunSelector('2 of 3');
ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3');
ndv.getters.outputRunSelector().find('input').should('include.value', '2 of 3');
ndv.getters.inputPanel().contains('Prague').should('exist');
ndv.getters.inputPanel().contains('Berlin').should('not.exist');
ndv.getters.outputPanel().contains('Berlin').should('exist');
ndv.getters.outputPanel().contains('Prague').should('not.exist');
ndv.actions.toggleInputRunLinking();
ndv.getters.inputRunSelector().find('input').should('include.value', '1 of 3');
ndv.getters.outputRunSelector().find('input').should('include.value', '1 of 3');
assertInputOutputText('Prague', 'exist');
assertInputOutputText('Berlin', 'not.exist');
assertInputOutputText('Kyiv', 'not.exist');
});
});

View file

@ -1,21 +1,29 @@
import workflow from '../fixtures/Manual_wait_set.json';
import { getOutputTableRow } from '../composables/ndv';
import { getCanvasNodes, openNode } from '../composables/workflow';
import SIMPLE_WORKFLOW from '../fixtures/Manual_wait_set.json';
import WORKFLOW_WITH_PINNED from '../fixtures/Webhook_set_pinned.json';
import { importWorkflow, visitDemoPage } from '../pages/demo';
import { errorToast } from '../pages/notifications';
import { WorkflowPage } from '../pages/workflow';
const workflowPage = new WorkflowPage();
describe('Demo', () => {
beforeEach(() => {
cy.overrideSettings({ previewMode: true });
cy.signout();
});
it('can import template', () => {
visitDemoPage();
errorToast().should('not.exist');
importWorkflow(workflow);
workflowPage.getters.canvasNodes().should('have.length', 3);
importWorkflow(SIMPLE_WORKFLOW);
getCanvasNodes().should('have.length', 3);
});
it('can import workflow with pin data', () => {
visitDemoPage();
importWorkflow(WORKFLOW_WITH_PINNED);
getCanvasNodes().should('have.length', 2);
openNode('Webhook');
getOutputTableRow(0).should('include.text', 'headers');
getOutputTableRow(1).should('include.text', 'dragons');
});
it('can override theme to dark', () => {

View file

@ -441,7 +441,9 @@ describe('Projects', { disableAutoLogin: true }, () => {
.should('contain.text', 'Notion account personal project');
});
it('should move resources between projects', () => {
// Skip flaky test
// eslint-disable-next-line n8n-local-rules/no-skipped-tests
it.skip('should move resources between projects', () => {
cy.signinAsOwner();
cy.visit(workflowsPage.url);
@ -684,7 +686,9 @@ describe('Projects', { disableAutoLogin: true }, () => {
.should('have.length', 1);
});
it('should allow to change inaccessible credential when the workflow was moved to a team project', () => {
// Skip flaky test
// eslint-disable-next-line n8n-local-rules/no-skipped-tests
it.skip('should allow to change inaccessible credential when the workflow was moved to a team project', () => {
cy.signinAsOwner();
cy.visit(workflowsPage.url);

File diff suppressed because one or more lines are too long

View file

@ -16,7 +16,7 @@ export function createMockNodeExecutionData(
return {
[name]: {
startTime: new Date().getTime(),
executionTime: 0,
executionTime: 1,
executionStatus,
data: jsonData
? Object.keys(jsonData).reduce((acc, key) => {
@ -33,6 +33,7 @@ export function createMockNodeExecutionData(
}, {} as ITaskDataConnections)
: data,
source: [null],
inputOverride,
...rest,
},
};

View file

@ -1,6 +1,6 @@
{
"name": "n8n-monorepo",
"version": "1.65.0",
"version": "1.66.0",
"private": true,
"engines": {
"node": ">=20.15",
@ -84,13 +84,11 @@
},
"patchedDependencies": {
"typedi@0.10.0": "patches/typedi@0.10.0.patch",
"@sentry/cli@2.36.2": "patches/@sentry__cli@2.36.2.patch",
"pkce-challenge@3.0.0": "patches/pkce-challenge@3.0.0.patch",
"pyodide@0.23.4": "patches/pyodide@0.23.4.patch",
"@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch",
"@types/ws@8.5.4": "patches/@types__ws@8.5.4.patch",
"@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch",
"@langchain/core@0.3.3": "patches/@langchain__core@0.3.3.patch"
"@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch"
}
}
}

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/api-types",
"version": "0.5.0",
"version": "0.6.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/config",
"version": "1.15.0",
"version": "1.16.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -7,6 +7,7 @@ export const LOG_SCOPES = [
'external-secrets',
'license',
'multi-main-setup',
'pruning',
'pubsub',
'redis',
'scaling',

View file

@ -0,0 +1,35 @@
import { Config, Env } from '../decorators';
@Config
export class PruningConfig {
/** Whether to delete past executions on a rolling basis. */
@Env('EXECUTIONS_DATA_PRUNE')
isEnabled: boolean = true;
/** How old (hours) a finished execution must be to qualify for soft-deletion. */
@Env('EXECUTIONS_DATA_MAX_AGE')
maxAge: number = 336;
/**
* Max number of finished executions to keep in database. Does not necessarily
* prune to the exact max number. `0` for unlimited.
*/
@Env('EXECUTIONS_DATA_PRUNE_MAX_COUNT')
maxCount: number = 10_000;
/**
* How old (hours) a finished execution must be to qualify for hard-deletion.
* This buffer by default excludes recent executions as the user may need
* them while building a workflow.
*/
@Env('EXECUTIONS_DATA_HARD_DELETE_BUFFER')
hardDeleteBuffer: number = 1;
/** How often (minutes) execution data should be hard-deleted. */
@Env('EXECUTIONS_DATA_PRUNE_HARD_DELETE_INTERVAL')
hardDeleteInterval: number = 15;
/** How often (minutes) execution data should be soft-deleted */
@Env('EXECUTIONS_DATA_PRUNE_SOFT_DELETE_INTERVAL')
softDeleteInterval: number = 60;
}

View file

@ -10,6 +10,7 @@ import { LicenseConfig } from './configs/license.config';
import { LoggingConfig } from './configs/logging.config';
import { MultiMainSetupConfig } from './configs/multi-main-setup.config';
import { NodesConfig } from './configs/nodes.config';
import { PruningConfig } from './configs/pruning.config';
import { PublicApiConfig } from './configs/public-api.config';
import { TaskRunnersConfig } from './configs/runners.config';
import { ScalingModeConfig } from './configs/scaling-mode.config';
@ -24,6 +25,7 @@ import { Config, Env, Nested } from './decorators';
export { Config, Env, Nested } from './decorators';
export { TaskRunnersConfig } from './configs/runners.config';
export { SecurityConfig } from './configs/security.config';
export { PruningConfig } from './configs/pruning.config';
export { FrontendBetaFeatures, FrontendConfig } from './configs/frontend.config';
export { LOG_SCOPES } from './configs/logging.config';
export type { LogScope } from './configs/logging.config';
@ -112,4 +114,7 @@ export class GlobalConfig {
@Nested
security: SecurityConfig;
@Nested
pruning: PruningConfig;
}

View file

@ -271,6 +271,14 @@ describe('GlobalConfig', () => {
blockFileAccessToN8nFiles: true,
daysAbandonedWorkflow: 90,
},
pruning: {
isEnabled: true,
maxAge: 336,
maxCount: 10_000,
hardDeleteBuffer: 1,
hardDeleteInterval: 15,
softDeleteInterval: 60,
},
};
it('should use all default values when no env variables are defined', () => {

View file

@ -1,4 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { ChatAnthropic } from '@langchain/anthropic';
import type { LLMResult } from '@langchain/core/outputs';
import {
NodeConnectionType,
type INodePropertyOptions,
@ -9,8 +12,6 @@ import {
type SupplyData,
} from 'n8n-workflow';
import { ChatAnthropic } from '@langchain/anthropic';
import type { LLMResult } from '@langchain/core/outputs';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing';
@ -36,6 +37,10 @@ const modelField: INodeProperties = {
name: 'Claude 3 Sonnet(20240229)',
value: 'claude-3-sonnet-20240229',
},
{
name: 'Claude 3.5 Haiku(20241022)',
value: 'claude-3-5-haiku-20241022',
},
{
name: 'Claude 3 Haiku(20240307)',
value: 'claude-3-haiku-20240307',

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-nodes-langchain",
"version": "1.65.0",
"version": "1.66.0",
"description": "",
"main": "index.js",
"scripts": {
@ -137,22 +137,23 @@
"@google-cloud/resource-manager": "5.3.0",
"@google/generative-ai": "0.19.0",
"@huggingface/inference": "2.8.0",
"@langchain/anthropic": "0.3.1",
"@langchain/aws": "0.1.0",
"@langchain/cohere": "0.3.0",
"@langchain/community": "0.3.2",
"@langchain/anthropic": "0.3.7",
"@langchain/aws": "0.1.1",
"@langchain/cohere": "0.3.1",
"@langchain/community": "0.3.11",
"@langchain/core": "catalog:",
"@langchain/google-genai": "0.1.0",
"@langchain/google-genai": "0.1.2",
"@langchain/google-vertexai": "0.1.0",
"@langchain/groq": "0.1.2",
"@langchain/mistralai": "0.1.1",
"@langchain/ollama": "0.1.0",
"@langchain/openai": "0.3.0",
"@langchain/pinecone": "0.1.0",
"@langchain/ollama": "0.1.1",
"@langchain/openai": "0.3.11",
"@langchain/pinecone": "0.1.1",
"@langchain/qdrant": "0.1.0",
"@langchain/redis": "0.1.0",
"@langchain/textsplitters": "0.1.0",
"@mozilla/readability": "0.5.0",
"@n8n/json-schema-to-zod": "workspace:*",
"@n8n/typeorm": "0.3.20-12",
"@n8n/vm2": "3.9.25",
"@pinecone-database/pinecone": "3.0.3",
@ -168,14 +169,13 @@
"generate-schema": "2.6.0",
"html-to-text": "9.0.5",
"jsdom": "23.0.1",
"@n8n/json-schema-to-zod": "workspace:*",
"langchain": "0.3.2",
"langchain": "0.3.5",
"lodash": "catalog:",
"mammoth": "1.7.2",
"mime-types": "2.1.35",
"n8n-nodes-base": "workspace:*",
"n8n-workflow": "workspace:*",
"openai": "4.63.0",
"openai": "4.69.0",
"pdf-parse": "1.1.1",
"pg": "8.12.0",
"redis": "4.6.12",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/permissions",
"version": "0.15.0",
"version": "0.16.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/task-runner",
"version": "1.3.0",
"version": "1.4.0",
"scripts": {
"clean": "rimraf dist .turbo",
"start": "node dist/start.js",
@ -17,14 +17,28 @@
},
"main": "dist/start.js",
"module": "src/start.ts",
"types": "dist/start.d.ts",
"types": "dist/index.d.ts",
"files": [
"dist/**/*"
],
"exports": {
"./start": {
"require": "./dist/start.js",
"import": "./src/start.ts",
"types": "./dist/start.d.ts"
},
".": {
"require": "./dist/index.js",
"import": "./src/index.ts",
"types": "./dist/index.d.ts"
}
},
"dependencies": {
"@n8n/config": "workspace:*",
"n8n-workflow": "workspace:*",
"acorn": "8.14.0",
"acorn-walk": "8.3.4",
"n8n-core": "workspace:*",
"n8n-workflow": "workspace:*",
"nanoid": "^3.3.6",
"typedi": "catalog:",
"ws": "^8.18.0"

View file

@ -1,2 +1,3 @@
export * from './task-runner';
export * from './runner-types';
export * from './message-types';

View file

@ -4,14 +4,11 @@ import fs from 'node:fs';
import { builtinModules } from 'node:module';
import { ValidationError } from '@/js-task-runner/errors/validation-error';
import {
JsTaskRunner,
type AllCodeTaskData,
type JSExecSettings,
} from '@/js-task-runner/js-task-runner';
import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner';
import { JsTaskRunner } from '@/js-task-runner/js-task-runner';
import type { Task } from '@/task-runner';
import { newAllCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data';
import { newCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data';
import type { JsRunnerConfig } from '../../config/js-runner-config';
import { MainConfig } from '../../config/main-config';
import { ExecutionError } from '../errors/execution-error';
@ -43,7 +40,7 @@ describe('JsTaskRunner', () => {
runner = defaultTaskRunner,
}: {
task: Task<JSExecSettings>;
taskData: AllCodeTaskData;
taskData: DataRequestResponse;
runner?: JsTaskRunner;
}) => {
jest.spyOn(runner, 'requestData').mockResolvedValue(taskData);
@ -71,7 +68,7 @@ describe('JsTaskRunner', () => {
nodeMode: 'runOnceForAllItems',
...settings,
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson)),
runner,
});
};
@ -94,7 +91,7 @@ describe('JsTaskRunner', () => {
nodeMode: 'runOnceForEachItem',
...settings,
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson)),
taskData: newCodeTaskData(inputItems.map(wrapIntoJson)),
runner,
});
};
@ -111,7 +108,7 @@ describe('JsTaskRunner', () => {
await execTaskWithParams({
task,
taskData: newAllCodeTaskData([wrapIntoJson({})]),
taskData: newCodeTaskData([wrapIntoJson({})]),
});
expect(defaultTaskRunner.makeRpcCall).toHaveBeenCalledWith(task.taskId, 'logNodeOutput', [
@ -246,7 +243,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.VAR1 }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: false,
isProcessAvailable: true,
@ -265,7 +262,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.VAR1 }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: true,
isProcessAvailable: true,
@ -282,7 +279,7 @@ describe('JsTaskRunner', () => {
code: 'return Object.values($env).concat(Object.keys($env))',
nodeMode: 'runOnceForAllItems',
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
envProviderState: {
isEnvAccessBlocked: false,
isProcessAvailable: true,
@ -301,7 +298,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: $env.N8N_RUNNERS_N8N_URI }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -316,7 +313,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: Buffer.from("test-buffer").toString() }',
nodeMode: 'runOnceForAllItems',
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -328,7 +325,7 @@ describe('JsTaskRunner', () => {
code: 'return { val: Buffer.from("test-buffer").toString() }',
nodeMode: 'runOnceForEachItem',
}),
taskData: newAllCodeTaskData(inputItems.map(wrapIntoJson), {
taskData: newCodeTaskData(inputItems.map(wrapIntoJson), {
envProviderState: undefined,
}),
});
@ -774,7 +771,7 @@ describe('JsTaskRunner', () => {
code: 'unknown',
nodeMode,
}),
taskData: newAllCodeTaskData([wrapIntoJson({ a: 1 })]),
taskData: newCodeTaskData([wrapIntoJson({ a: 1 })]),
}),
).rejects.toThrow(ExecutionError);
},
@ -796,7 +793,7 @@ describe('JsTaskRunner', () => {
jest.spyOn(runner, 'sendOffers').mockImplementation(() => {});
jest
.spyOn(runner, 'requestData')
.mockResolvedValue(newAllCodeTaskData([wrapIntoJson({ a: 1 })]));
.mockResolvedValue(newCodeTaskData([wrapIntoJson({ a: 1 })]));
await runner.receivedSettings(taskId, task.settings);

View file

@ -2,7 +2,7 @@ import type { IDataObject, INode, INodeExecutionData, ITaskData } from 'n8n-work
import { NodeConnectionType } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import type { AllCodeTaskData, JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { DataRequestResponse, JSExecSettings } from '@/js-task-runner/js-task-runner';
import type { Task } from '@/task-runner';
/**
@ -48,10 +48,10 @@ export const newTaskData = (opts: Partial<ITaskData> & Pick<ITaskData, 'source'>
/**
* Creates a new all code task data with the given options
*/
export const newAllCodeTaskData = (
export const newCodeTaskData = (
codeNodeInputData: INodeExecutionData[],
opts: Partial<AllCodeTaskData> = {},
): AllCodeTaskData => {
opts: Partial<DataRequestResponse> = {},
): DataRequestResponse => {
const codeNode = newNode({
name: 'JsCode',
parameters: {

View file

@ -0,0 +1,117 @@
import { BuiltInsParserState } from '../built-ins-parser-state';
describe('BuiltInsParserState', () => {
describe('toDataRequestSpecification', () => {
it('should return empty array when no properties are marked as needed', () => {
const state = new BuiltInsParserState();
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: [],
env: false,
input: false,
prevNode: false,
});
});
it('should return all nodes and input when markNeedsAllNodes is called', () => {
const state = new BuiltInsParserState();
state.markNeedsAllNodes();
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: 'all',
env: false,
input: true,
prevNode: false,
});
});
it('should return specific node names when nodes are marked as needed individually', () => {
const state = new BuiltInsParserState();
state.markNodeAsNeeded('Node1');
state.markNodeAsNeeded('Node2');
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: ['Node1', 'Node2'],
env: false,
input: false,
prevNode: false,
});
});
it('should ignore individual nodes when needsAllNodes is marked as true', () => {
const state = new BuiltInsParserState();
state.markNodeAsNeeded('Node1');
state.markNeedsAllNodes();
state.markNodeAsNeeded('Node2'); // should be ignored since all nodes are needed
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: 'all',
env: false,
input: true,
prevNode: false,
});
});
it('should mark env as needed when markEnvAsNeeded is called', () => {
const state = new BuiltInsParserState();
state.markEnvAsNeeded();
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: [],
env: true,
input: false,
prevNode: false,
});
});
it('should mark input as needed when markInputAsNeeded is called', () => {
const state = new BuiltInsParserState();
state.markInputAsNeeded();
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: [],
env: false,
input: true,
prevNode: false,
});
});
it('should mark prevNode as needed when markPrevNodeAsNeeded is called', () => {
const state = new BuiltInsParserState();
state.markPrevNodeAsNeeded();
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: [],
env: false,
input: false,
prevNode: true,
});
});
it('should return correct specification when multiple properties are marked as needed', () => {
const state = new BuiltInsParserState();
state.markNeedsAllNodes();
state.markEnvAsNeeded();
state.markInputAsNeeded();
state.markPrevNodeAsNeeded();
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
});
});
it('should return correct specification when all properties are marked as needed', () => {
const state = BuiltInsParserState.newNeedsAllDataState();
expect(state.toDataRequestParams()).toEqual({
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
});
});
});
});

View file

@ -0,0 +1,251 @@
import { getAdditionalKeys } from 'n8n-core';
import type { IDataObject, INodeType, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import { Workflow, WorkflowDataProxy } from 'n8n-workflow';
import { newCodeTaskData } from '../../__tests__/test-data';
import { BuiltInsParser } from '../built-ins-parser';
import { BuiltInsParserState } from '../built-ins-parser-state';
describe('BuiltInsParser', () => {
const parser = new BuiltInsParser();
const parseAndExpectOk = (code: string) => {
const result = parser.parseUsedBuiltIns(code);
if (!result.ok) {
fail(result.error);
}
return result.result;
};
describe('Env, input, execution and prevNode', () => {
const cases: Array<[string, BuiltInsParserState]> = [
['$env', new BuiltInsParserState({ needs$env: true })],
['$execution', new BuiltInsParserState({ needs$execution: true })],
['$prevNode', new BuiltInsParserState({ needs$prevNode: true })],
];
test.each(cases)("should identify built-ins in '%s'", (code, expected) => {
const state = parseAndExpectOk(code);
expect(state).toEqual(expected);
});
});
describe('Input', () => {
it('should mark input as needed when $input is used', () => {
const state = parseAndExpectOk(`
$input.item.json.age = 10 + Math.floor(Math.random() * 30);
$input.item.json.password = $input.item.json.password.split('').map(() => '*').join("")
delete $input.item.json.lastname
const emailParts = $input.item.json.email.split("@")
$input.item.json.emailData = {
user: emailParts[0],
domain: emailParts[1]
}
return $input.item;
`);
expect(state).toEqual(new BuiltInsParserState({ needs$input: true }));
});
it('should mark input as needed when $json is used', () => {
const state = parseAndExpectOk(`
$json.age = 10 + Math.floor(Math.random() * 30);
return $json;
`);
expect(state).toEqual(new BuiltInsParserState({ needs$input: true }));
});
});
describe('$(...)', () => {
const cases: Array<[string, BuiltInsParserState]> = [
[
'$("nodeName").first()',
new BuiltInsParserState({ neededNodeNames: new Set(['nodeName']) }),
],
[
'$("nodeName").all(); $("secondNode").matchingItem()',
new BuiltInsParserState({ neededNodeNames: new Set(['nodeName', 'secondNode']) }),
],
];
test.each(cases)("should identify nodes in '%s'", (code, expected) => {
const state = parseAndExpectOk(code);
expect(state).toEqual(expected);
});
it('should need all nodes when $() is called with a variable', () => {
const state = parseAndExpectOk('var n = "name"; $(n)');
expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true }));
});
it('should require all nodes when there are multiple usages of $() and one is with a variable', () => {
const state = parseAndExpectOk(`
$("nodeName");
$("secondNode");
var n = "name";
$(n)
`);
expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true }));
});
test.each([
['without parameters', '$()'],
['number literal', '$(123)'],
])('should ignore when $ is called %s', (_, code) => {
const state = parseAndExpectOk(code);
expect(state).toEqual(new BuiltInsParserState());
});
test.each([
'$("node").item',
'$("node")["item"]',
'$("node").pairedItem()',
'$("node")["pairedItem"]()',
'$("node").itemMatching(0)',
'$("node")["itemMatching"](0)',
'$("node")[variable]',
'var a = $("node")',
'let a = $("node")',
'const a = $("node")',
'a = $("node")',
])('should require all nodes if %s is used', (code) => {
const state = parseAndExpectOk(code);
expect(state).toEqual(new BuiltInsParserState({ needsAllNodes: true, needs$input: true }));
});
test.each(['$("node").first()', '$("node").last()', '$("node").all()', '$("node").params'])(
'should require only accessed node if %s is used',
(code) => {
const state = parseAndExpectOk(code);
expect(state).toEqual(
new BuiltInsParserState({
needsAllNodes: false,
neededNodeNames: new Set(['node']),
}),
);
},
);
});
describe('ECMAScript syntax', () => {
describe('ES2020', () => {
it('should parse optional chaining', () => {
parseAndExpectOk(`
const a = { b: { c: 1 } };
return a.b?.c;
`);
});
it('should parse nullish coalescing', () => {
parseAndExpectOk(`
const a = null;
return a ?? 1;
`);
});
});
describe('ES2021', () => {
it('should parse numeric separators', () => {
parseAndExpectOk(`
const a = 1_000_000;
return a;
`);
});
});
});
describe('WorkflowDataProxy built-ins', () => {
it('should have a known list of built-ins', () => {
const data = newCodeTaskData([]);
const dataProxy = new WorkflowDataProxy(
new Workflow({
...data.workflow,
nodeTypes: {
getByName() {
return undefined as unknown as INodeType;
},
getByNameAndVersion() {
return undefined as unknown as INodeType;
},
getKnownTypes() {
return undefined as unknown as IDataObject;
},
},
}),
data.runExecutionData,
data.runIndex,
0,
data.activeNodeName,
data.connectionInputData,
data.siblingParameters,
data.mode,
getAdditionalKeys(
data.additionalData as IWorkflowExecuteAdditionalData,
data.mode,
data.runExecutionData,
),
data.executeData,
data.defaultReturnRunIndex,
data.selfData,
data.contextNodeName,
// Make sure that even if we don't receive the envProviderState for
// whatever reason, we don't expose the task runner's env to the code
data.envProviderState ?? {
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
},
).getDataProxy({ throwOnMissingExecutionData: false });
/**
* NOTE! If you are adding new built-ins to the WorkflowDataProxy class
* make sure the built-ins parser and Task Runner handle them properly.
*/
expect(Object.keys(dataProxy)).toStrictEqual([
'$',
'$input',
'$binary',
'$data',
'$env',
'$evaluateExpression',
'$item',
'$fromAI',
'$fromai',
'$fromAi',
'$items',
'$json',
'$node',
'$self',
'$parameter',
'$prevNode',
'$runIndex',
'$mode',
'$workflow',
'$itemIndex',
'$now',
'$today',
'$jmesPath',
'DateTime',
'Interval',
'Duration',
'$execution',
'$vars',
'$secrets',
'$executionId',
'$resumeWebhookUrl',
'$getPairedItem',
'$jmespath',
'$position',
'$thisItem',
'$thisItemIndex',
'$thisRunIndex',
'$nodeVersion',
'$nodeId',
'$webhookId',
]);
});
});
});

View file

@ -0,0 +1,28 @@
import type {
AssignmentExpression,
Identifier,
Literal,
MemberExpression,
Node,
VariableDeclarator,
} from 'acorn';
export function isLiteral(node?: Node): node is Literal {
return node?.type === 'Literal';
}
export function isIdentifier(node?: Node): node is Identifier {
return node?.type === 'Identifier';
}
export function isMemberExpression(node?: Node): node is MemberExpression {
return node?.type === 'MemberExpression';
}
export function isVariableDeclarator(node?: Node): node is VariableDeclarator {
return node?.type === 'VariableDeclarator';
}
export function isAssignmentExpression(node?: Node): node is AssignmentExpression {
return node?.type === 'AssignmentExpression';
}

View file

@ -0,0 +1,74 @@
import type { BrokerMessage } from '@/message-types';
/**
* Class to keep track of which built-in variables are accessed in the code
*/
export class BuiltInsParserState {
neededNodeNames: Set<string> = new Set();
needsAllNodes = false;
needs$env = false;
needs$input = false;
needs$execution = false;
needs$prevNode = false;
constructor(opts: Partial<BuiltInsParserState> = {}) {
Object.assign(this, opts);
}
/**
* Marks that all nodes are needed, including input data
*/
markNeedsAllNodes() {
this.needsAllNodes = true;
this.needs$input = true;
this.neededNodeNames = new Set();
}
markNodeAsNeeded(nodeName: string) {
if (this.needsAllNodes) {
return;
}
this.neededNodeNames.add(nodeName);
}
markEnvAsNeeded() {
this.needs$env = true;
}
markInputAsNeeded() {
this.needs$input = true;
}
markExecutionAsNeeded() {
this.needs$execution = true;
}
markPrevNodeAsNeeded() {
this.needs$prevNode = true;
}
toDataRequestParams(): BrokerMessage.ToRequester.TaskDataRequest['requestParams'] {
return {
dataOfNodes: this.needsAllNodes ? 'all' : Array.from(this.neededNodeNames),
env: this.needs$env,
input: this.needs$input,
prevNode: this.needs$prevNode,
};
}
static newNeedsAllDataState() {
const obj = new BuiltInsParserState();
obj.markNeedsAllNodes();
obj.markEnvAsNeeded();
obj.markInputAsNeeded();
obj.markExecutionAsNeeded();
obj.markPrevNodeAsNeeded();
return obj;
}
}

View file

@ -0,0 +1,142 @@
import type { CallExpression, Identifier, Node, Program } from 'acorn';
import { parse } from 'acorn';
import { ancestor } from 'acorn-walk';
import type { Result } from 'n8n-workflow';
import { toResult } from 'n8n-workflow';
import {
isAssignmentExpression,
isIdentifier,
isLiteral,
isMemberExpression,
isVariableDeclarator,
} from './acorn-helpers';
import { BuiltInsParserState } from './built-ins-parser-state';
/**
* Class for parsing Code Node code to identify which built-in variables
* are accessed
*/
export class BuiltInsParser {
/**
* Parses which built-in variables are accessed in the given code
*/
public parseUsedBuiltIns(code: string): Result<BuiltInsParserState, Error> {
return toResult(() => {
const wrappedCode = `async function VmCodeWrapper() { ${code} }`;
const ast = parse(wrappedCode, { ecmaVersion: 2025, sourceType: 'module' });
return this.identifyBuiltInsByWalkingAst(ast);
});
}
/** Traverse the AST of the script and mark any data needed for it to run. */
private identifyBuiltInsByWalkingAst(ast: Program) {
const accessedBuiltIns = new BuiltInsParserState();
ancestor(
ast,
{
CallExpression: this.visitCallExpression,
Identifier: this.visitIdentifier,
},
undefined,
accessedBuiltIns,
);
return accessedBuiltIns;
}
private visitCallExpression = (
node: CallExpression,
state: BuiltInsParserState,
ancestors: Node[],
) => {
// $(...)
const isDollar = node.callee.type === 'Identifier' && node.callee.name === '$';
if (!isDollar) return;
// $(): This is not valid, ignore
if (node.arguments.length === 0) {
return;
}
const firstArg = node.arguments[0];
if (!isLiteral(firstArg)) {
// $(variable): Can't easily determine statically, mark all nodes as needed
state.markNeedsAllNodes();
return;
}
if (typeof firstArg.value !== 'string') {
// $(123): Static value, but not a string --> invalid code --> ignore
return;
}
// $("node"): Static value, mark 'nodeName' as needed
state.markNodeAsNeeded(firstArg.value);
// Determine how $("node") is used
this.handlePrevNodeCall(node, state, ancestors);
};
private handlePrevNodeCall(_node: CallExpression, state: BuiltInsParserState, ancestors: Node[]) {
// $("node").item, .pairedItem or .itemMatching: In a case like this, the execution
// engine will traverse back from current node (i.e. the Code Node) to
// the "node" node and use `pairedItem`s to find which item is linked
// to the current item. So, we need to mark all nodes as needed.
// TODO: We could also mark all the nodes between the current node and
// the "node" node as needed, but that would require more complex logic.
const directParent = ancestors[ancestors.length - 2];
if (isMemberExpression(directParent)) {
const accessedProperty = directParent.property;
if (directParent.computed) {
// $("node")["item"], ["pairedItem"] or ["itemMatching"]
if (isLiteral(accessedProperty)) {
if (this.isPairedItemProperty(accessedProperty.value)) {
state.markNeedsAllNodes();
}
// Else: $("node")[123]: Static value, but not any of the ones above --> ignore
}
// $("node")[variable]
else if (isIdentifier(accessedProperty)) {
state.markNeedsAllNodes();
}
}
// $("node").item, .pairedItem or .itemMatching
else if (isIdentifier(accessedProperty) && this.isPairedItemProperty(accessedProperty.name)) {
state.markNeedsAllNodes();
}
} else if (isVariableDeclarator(directParent) || isAssignmentExpression(directParent)) {
// const variable = $("node") or variable = $("node"):
// In this case we would need to track down all the possible use sites
// of 'variable' and determine if `.item` is accessed on it. This is
// more complex and skipped for now.
// TODO: Optimize for this case
state.markNeedsAllNodes();
} else {
// Something else than the cases above. Mark all nodes as needed as it
// could be a dynamic access.
state.markNeedsAllNodes();
}
}
private visitIdentifier = (node: Identifier, state: BuiltInsParserState) => {
if (node.name === '$env') {
state.markEnvAsNeeded();
} else if (node.name === '$input' || node.name === '$json') {
state.markInputAsNeeded();
} else if (node.name === '$execution') {
state.markExecutionAsNeeded();
} else if (node.name === '$prevNode') {
state.markPrevNodeAsNeeded();
}
};
private isPairedItemProperty(
property?: string | boolean | null | number | RegExp | bigint,
): boolean {
return property === 'item' || property === 'pairedItem' || property === 'itemMatching';
}
}

View file

@ -24,6 +24,8 @@ import { runInNewContext, type Context } from 'node:vm';
import type { TaskResultData } from '@/runner-types';
import { type Task, TaskRunner } from '@/task-runner';
import { BuiltInsParser } from './built-ins-parser/built-ins-parser';
import { BuiltInsParserState } from './built-ins-parser/built-ins-parser-state';
import { isErrorLike } from './errors/error-like';
import { ExecutionError } from './errors/execution-error';
import { makeSerializable } from './errors/serializable-error';
@ -57,7 +59,7 @@ export interface PartialAdditionalData {
variables: IDataObject;
}
export interface AllCodeTaskData {
export interface DataRequestResponse {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
node: INode;
@ -84,6 +86,8 @@ type CustomConsole = {
export class JsTaskRunner extends TaskRunner {
private readonly requireResolver: RequireResolver;
private readonly builtInsParser = new BuiltInsParser();
constructor(config: MainConfig, name = 'JS Task Runner') {
super({
taskType: 'javascript',
@ -102,12 +106,20 @@ export class JsTaskRunner extends TaskRunner {
}
async executeTask(task: Task<JSExecSettings>): Promise<TaskResultData> {
const allData = await this.requestData<AllCodeTaskData>(task.taskId, 'all');
const settings = task.settings;
a.ok(settings, 'JS Code not sent to runner');
const workflowParams = allData.workflow;
const neededBuiltInsResult = this.builtInsParser.parseUsedBuiltIns(settings.code);
const neededBuiltIns = neededBuiltInsResult.ok
? neededBuiltInsResult.result
: BuiltInsParserState.newNeedsAllDataState();
const data = await this.requestData<DataRequestResponse>(
task.taskId,
neededBuiltIns.toDataRequestParams(),
);
const workflowParams = data.workflow;
const workflow = new Workflow({
...workflowParams,
nodeTypes: this.nodeTypes,
@ -126,12 +138,12 @@ export class JsTaskRunner extends TaskRunner {
const result =
settings.nodeMode === 'runOnceForAllItems'
? await this.runForAllItems(task.taskId, settings, allData, workflow, customConsole)
: await this.runForEachItem(task.taskId, settings, allData, workflow, customConsole);
? await this.runForAllItems(task.taskId, settings, data, workflow, customConsole)
: await this.runForEachItem(task.taskId, settings, data, workflow, customConsole);
return {
result,
customData: allData.runExecutionData.resultData.metadata,
customData: data.runExecutionData.resultData.metadata,
};
}
@ -165,12 +177,12 @@ export class JsTaskRunner extends TaskRunner {
private async runForAllItems(
taskId: string,
settings: JSExecSettings,
allData: AllCodeTaskData,
data: DataRequestResponse,
workflow: Workflow,
customConsole: CustomConsole,
): Promise<INodeExecutionData[]> {
const dataProxy = this.createDataProxy(allData, workflow, allData.itemIndex);
const inputItems = allData.connectionInputData;
const dataProxy = this.createDataProxy(data, workflow, data.itemIndex);
const inputItems = data.connectionInputData;
const context: Context = {
require: this.requireResolver,
@ -212,16 +224,16 @@ export class JsTaskRunner extends TaskRunner {
private async runForEachItem(
taskId: string,
settings: JSExecSettings,
allData: AllCodeTaskData,
data: DataRequestResponse,
workflow: Workflow,
customConsole: CustomConsole,
): Promise<INodeExecutionData[]> {
const inputItems = allData.connectionInputData;
const inputItems = data.connectionInputData;
const returnData: INodeExecutionData[] = [];
for (let index = 0; index < inputItems.length; index++) {
const item = inputItems[index];
const dataProxy = this.createDataProxy(allData, workflow, index);
const dataProxy = this.createDataProxy(data, workflow, index);
const context: Context = {
require: this.requireResolver,
module: {},
@ -279,33 +291,37 @@ export class JsTaskRunner extends TaskRunner {
return returnData;
}
private createDataProxy(allData: AllCodeTaskData, workflow: Workflow, itemIndex: number) {
private createDataProxy(data: DataRequestResponse, workflow: Workflow, itemIndex: number) {
return new WorkflowDataProxy(
workflow,
allData.runExecutionData,
allData.runIndex,
data.runExecutionData,
data.runIndex,
itemIndex,
allData.activeNodeName,
allData.connectionInputData,
allData.siblingParameters,
allData.mode,
data.activeNodeName,
data.connectionInputData,
data.siblingParameters,
data.mode,
getAdditionalKeys(
allData.additionalData as IWorkflowExecuteAdditionalData,
allData.mode,
allData.runExecutionData,
data.additionalData as IWorkflowExecuteAdditionalData,
data.mode,
data.runExecutionData,
),
allData.executeData,
allData.defaultReturnRunIndex,
allData.selfData,
allData.contextNodeName,
data.executeData,
data.defaultReturnRunIndex,
data.selfData,
data.contextNodeName,
// Make sure that even if we don't receive the envProviderState for
// whatever reason, we don't expose the task runner's env to the code
allData.envProviderState ?? {
data.envProviderState ?? {
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
},
).getDataProxy();
// Because we optimize the needed data, it can be partially available.
// We assign the available built-ins to the execution context, which
// means we run the getter for '$json', and by default $json throws
// if there is no data available.
).getDataProxy({ throwOnMissingExecutionData: false });
}
private toExecutionErrorIfNeeded(error: unknown): Error {

View file

@ -0,0 +1,204 @@
import type { INodeTypeBaseDescription } from 'n8n-workflow';
import type { RPC_ALLOW_LIST, TaskDataRequestParams, TaskResultData } from './runner-types';
export namespace BrokerMessage {
export namespace ToRunner {
export interface InfoRequest {
type: 'broker:inforequest';
}
export interface RunnerRegistered {
type: 'broker:runnerregistered';
}
export interface TaskOfferAccept {
type: 'broker:taskofferaccept';
taskId: string;
offerId: string;
}
export interface TaskCancel {
type: 'broker:taskcancel';
taskId: string;
reason: string;
}
export interface TaskSettings {
type: 'broker:tasksettings';
taskId: string;
settings: unknown;
}
export interface RPCResponse {
type: 'broker:rpcresponse';
callId: string;
taskId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskDataResponse {
type: 'broker:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface NodeTypes {
type: 'broker:nodetypes';
nodeTypes: INodeTypeBaseDescription[];
}
export type All =
| InfoRequest
| TaskOfferAccept
| TaskCancel
| TaskSettings
| RunnerRegistered
| RPCResponse
| TaskDataResponse
| NodeTypes;
}
export namespace ToRequester {
export interface TaskReady {
type: 'broker:taskready';
requestId: string;
taskId: string;
}
export interface TaskDone {
type: 'broker:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'broker:taskerror';
taskId: string;
error: unknown;
}
export interface TaskDataRequest {
type: 'broker:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface RPC {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC;
}
}
export namespace RequesterMessage {
export namespace ToBroker {
export interface TaskSettings {
type: 'requester:tasksettings';
taskId: string;
settings: unknown;
}
export interface TaskCancel {
type: 'requester:taskcancel';
taskId: string;
reason: string;
}
export interface TaskDataResponse {
type: 'requester:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
callId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskRequest {
type: 'requester:taskrequest';
requestId: string;
taskType: string;
}
export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest;
}
}
export namespace RunnerMessage {
export namespace ToBroker {
export interface Info {
type: 'runner:info';
name: string;
types: string[];
}
export interface TaskAccepted {
type: 'runner:taskaccepted';
taskId: string;
}
export interface TaskRejected {
type: 'runner:taskrejected';
taskId: string;
reason: string;
}
export interface TaskDone {
type: 'runner:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'runner:taskerror';
taskId: string;
error: unknown;
}
export interface TaskOffer {
type: 'runner:taskoffer';
offerId: string;
taskType: string;
validFor: number;
}
export interface TaskDataRequest {
type: 'runner:taskdatarequest';
taskId: string;
requestId: string;
requestParams: TaskDataRequestParams;
}
export interface RPC {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All =
| Info
| TaskDone
| TaskError
| TaskAccepted
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest;
}
}

View file

@ -1,213 +1,90 @@
import type { INodeExecutionData, INodeTypeBaseDescription } from 'n8n-workflow';
import type {
EnvProviderState,
IDataObject,
IExecuteData,
IExecuteFunctions,
INode,
INodeExecutionData,
INodeParameters,
IRunExecutionData,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
WorkflowParameters,
} from 'n8n-workflow';
export type DataRequestType = 'input' | 'node' | 'all';
/**
* Specifies what data should be included for a task data request.
*/
export interface TaskDataRequestParams {
dataOfNodes: string[] | 'all';
prevNode: boolean;
/** Whether input data for the node should be included */
input: boolean;
/** Whether env provider's state should be included */
env: boolean;
}
export interface DataRequestResponse {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: PartialAdditionalData;
}
export interface TaskResultData {
result: INodeExecutionData[];
customData?: Record<string, string>;
}
export namespace N8nMessage {
export namespace ToRunner {
export interface InfoRequest {
type: 'broker:inforequest';
}
export interface TaskData {
executeFunctions: IExecuteFunctions;
inputData: ITaskDataConnections;
node: INode;
export interface RunnerRegistered {
type: 'broker:runnerregistered';
}
export interface TaskOfferAccept {
type: 'broker:taskofferaccept';
taskId: string;
offerId: string;
}
export interface TaskCancel {
type: 'broker:taskcancel';
taskId: string;
reason: string;
}
export interface TaskSettings {
type: 'broker:tasksettings';
taskId: string;
settings: unknown;
}
export interface RPCResponse {
type: 'broker:rpcresponse';
callId: string;
taskId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskDataResponse {
type: 'broker:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface NodeTypes {
type: 'broker:nodetypes';
nodeTypes: INodeTypeBaseDescription[];
}
export type All =
| InfoRequest
| TaskOfferAccept
| TaskCancel
| TaskSettings
| RunnerRegistered
| RPCResponse
| TaskDataResponse
| NodeTypes;
}
export namespace ToRequester {
export interface TaskReady {
type: 'broker:taskready';
requestId: string;
taskId: string;
}
export interface TaskDone {
type: 'broker:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'broker:taskerror';
taskId: string;
error: unknown;
}
export interface TaskDataRequest {
type: 'broker:taskdatarequest';
taskId: string;
requestId: string;
requestType: DataRequestType;
param?: string;
}
export interface RPC {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC;
}
workflow: Workflow;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: IWorkflowExecuteAdditionalData;
}
export namespace RequesterMessage {
export namespace ToN8n {
export interface TaskSettings {
type: 'requester:tasksettings';
taskId: string;
settings: unknown;
}
export interface TaskCancel {
type: 'requester:taskcancel';
taskId: string;
reason: string;
}
export interface TaskDataResponse {
type: 'requester:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
callId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskRequest {
type: 'requester:taskrequest';
requestId: string;
taskType: string;
}
export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest;
}
}
export namespace RunnerMessage {
export namespace ToN8n {
export interface Info {
type: 'runner:info';
name: string;
types: string[];
}
export interface TaskAccepted {
type: 'runner:taskaccepted';
taskId: string;
}
export interface TaskRejected {
type: 'runner:taskrejected';
taskId: string;
reason: string;
}
export interface TaskDone {
type: 'runner:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'runner:taskerror';
taskId: string;
error: unknown;
}
export interface TaskOffer {
type: 'runner:taskoffer';
offerId: string;
taskType: string;
validFor: number;
}
export interface TaskDataRequest {
type: 'runner:taskdatarequest';
taskId: string;
requestId: string;
requestType: DataRequestType;
param?: string;
}
export interface RPC {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All =
| Info
| TaskDone
| TaskError
| TaskAccepted
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest;
}
export interface PartialAdditionalData {
executionId?: string;
restartExecutionId?: string;
restApiUrl: string;
instanceBaseUrl: string;
formWaitingBaseUrl: string;
webhookBaseUrl: string;
webhookWaitingBaseUrl: string;
webhookTestBaseUrl: string;
currentNodeParameters?: INodeParameters;
executionTimeoutTimestamp?: number;
userId?: string;
variables: IDataObject;
}
export const RPC_ALLOW_LIST = [

View file

@ -2,14 +2,10 @@ import { ApplicationError, type INodeTypeDescription } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { type MessageEvent, WebSocket } from 'ws';
import type { BaseRunnerConfig } from './config/base-runner-config';
import { TaskRunnerNodeTypes } from './node-types';
import {
RPC_ALLOW_LIST,
type RunnerMessage,
type N8nMessage,
type TaskResultData,
} from './runner-types';
import type { BaseRunnerConfig } from '@/config/base-runner-config';
import type { BrokerMessage, RunnerMessage } from '@/message-types';
import { TaskRunnerNodeTypes } from '@/node-types';
import { RPC_ALLOW_LIST, type TaskResultData } from '@/runner-types';
export interface Task<T = unknown> {
taskId: string;
@ -90,7 +86,7 @@ export abstract class TaskRunner {
private receiveMessage = (message: MessageEvent) => {
// eslint-disable-next-line n8n-local-rules/no-uncaught-json-parse
const data = JSON.parse(message.data as string) as N8nMessage.ToRunner.All;
const data = JSON.parse(message.data as string) as BrokerMessage.ToRunner.All;
void this.onMessage(data);
};
@ -140,11 +136,11 @@ export abstract class TaskRunner {
}
}
send(message: RunnerMessage.ToN8n.All) {
send(message: RunnerMessage.ToBroker.All) {
this.ws.send(JSON.stringify(message));
}
onMessage(message: N8nMessage.ToRunner.All) {
onMessage(message: BrokerMessage.ToRunner.All) {
switch (message.type) {
case 'broker:inforequest':
this.send({
@ -252,7 +248,7 @@ export abstract class TaskRunner {
this.sendOffers();
}
taskDone(taskId: string, data: RunnerMessage.ToN8n.TaskDone['data']) {
taskDone(taskId: string, data: RunnerMessage.ToBroker.TaskDone['data']) {
this.send({
type: 'runner:taskdone',
taskId,
@ -288,8 +284,7 @@ export abstract class TaskRunner {
async requestData<T = unknown>(
taskId: Task['taskId'],
type: RunnerMessage.ToN8n.TaskDataRequest['requestType'],
param?: string,
requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'],
): Promise<T> {
const requestId = nanoid();
@ -305,8 +300,7 @@ export abstract class TaskRunner {
type: 'runner:taskdatarequest',
taskId,
requestId,
requestType: type,
param,
requestParams,
});
try {
@ -316,7 +310,7 @@ export abstract class TaskRunner {
}
}
async makeRpcCall(taskId: string, name: RunnerMessage.ToN8n.RPC['name'], params: unknown[]) {
async makeRpcCall(taskId: string, name: RunnerMessage.ToBroker.RPC['name'], params: unknown[]) {
const callId = nanoid();
const dataPromise = new Promise((resolve, reject) => {
@ -344,7 +338,7 @@ export abstract class TaskRunner {
handleRpcResponse(
callId: string,
status: N8nMessage.ToRunner.RPCResponse['status'],
status: BrokerMessage.ToRunner.RPCResponse['status'],
data: unknown,
) {
const call = this.rpcCalls.get(callId);

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "1.65.0",
"version": "1.66.0",
"description": "n8n Workflow Automation Tool",
"main": "dist/index",
"types": "dist/index.d.ts",

View file

@ -4,6 +4,7 @@ import Container from 'typedi';
import { ActiveExecutions } from '@/active-executions';
import config from '@/config';
import type { User } from '@/databases/entities/user';
import { ExecutionNotFoundError } from '@/errors/execution-not-found-error';
import { Telemetry } from '@/telemetry';
import { WorkflowRunner } from '@/workflow-runner';
import { mockInstance } from '@test/mocking';
@ -64,6 +65,19 @@ test('processError should return early in Bull stalled edge case', async () => {
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
});
test('processError should return early if the error is `ExecutionNotFoundError`', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution({ status: 'success', finished: true }, workflow);
await runner.processError(
new ExecutionNotFoundError(execution.id),
new Date(),
'webhook',
execution.id,
new WorkflowHooks(hookFunctions, 'webhook', execution.id, workflow),
);
expect(watchedWorkflowExecuteAfter).toHaveBeenCalledTimes(0);
});
test('processError should process error', async () => {
const workflow = await createWorkflow({}, owner);
const execution = await createExecution(

View file

@ -1,6 +1,12 @@
/* eslint-disable @typescript-eslint/no-unsafe-member-access */
import { ActiveWorkflows, InstanceSettings, NodeExecuteFunctions } from 'n8n-core';
import {
ActiveWorkflows,
InstanceSettings,
NodeExecuteFunctions,
PollContext,
TriggerContext,
} from 'n8n-core';
import type {
ExecutionError,
IDeferredPromise,
@ -274,18 +280,11 @@ export class ActiveWorkflowManager {
activation: WorkflowActivateMode,
): IGetExecutePollFunctions {
return (workflow: Workflow, node: INode) => {
const returnFunctions = NodeExecuteFunctions.getExecutePollFunctions(
workflow,
node,
additionalData,
mode,
activation,
);
returnFunctions.__emit = (
const __emit = (
data: INodeExecutionData[][],
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
donePromise?: IDeferredPromise<IRun | undefined>,
): void => {
) => {
this.logger.debug(`Received event to trigger execution for workflow "${workflow.name}"`);
void this.workflowStaticDataService.saveStaticData(workflow);
const executePromise = this.workflowExecutionService.runWorkflow(
@ -309,14 +308,15 @@ export class ActiveWorkflowManager {
}
};
returnFunctions.__emitError = (error: ExecutionError): void => {
const __emitError = (error: ExecutionError) => {
void this.executionService
.createErrorExecution(error, node, workflowData, workflow, mode)
.then(() => {
this.executeErrorWorkflow(error, workflowData, mode);
});
};
return returnFunctions;
return new PollContext(workflow, node, additionalData, mode, activation, __emit, __emitError);
};
}
@ -331,18 +331,11 @@ export class ActiveWorkflowManager {
activation: WorkflowActivateMode,
): IGetExecuteTriggerFunctions {
return (workflow: Workflow, node: INode) => {
const returnFunctions = NodeExecuteFunctions.getExecuteTriggerFunctions(
workflow,
node,
additionalData,
mode,
activation,
);
returnFunctions.emit = (
const emit = (
data: INodeExecutionData[][],
responsePromise?: IDeferredPromise<IExecuteResponsePromiseData>,
donePromise?: IDeferredPromise<IRun | undefined>,
): void => {
) => {
this.logger.debug(`Received trigger for workflow "${workflow.name}"`);
void this.workflowStaticDataService.saveStaticData(workflow);
@ -366,7 +359,7 @@ export class ActiveWorkflowManager {
executePromise.catch((error: Error) => this.logger.error(error.message, { error }));
}
};
returnFunctions.emitError = (error: Error): void => {
const emitError = (error: Error): void => {
this.logger.info(
`The trigger node "${node.name}" of workflow "${workflowData.name}" failed with the error: "${error.message}". Will try to reactivate.`,
{
@ -391,7 +384,7 @@ export class ActiveWorkflowManager {
this.addQueuedWorkflowActivation(activation, workflowData as WorkflowEntity);
};
return returnFunctions;
return new TriggerContext(workflow, node, additionalData, mode, activation, emit, emitError);
};
}

View file

@ -22,14 +22,12 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'
import { EventService } from '@/events/event.service';
import { ExecutionService } from '@/executions/execution.service';
import { License } from '@/license';
import { LocalTaskManager } from '@/runners/task-managers/local-task-manager';
import { TaskManager } from '@/runners/task-managers/task-manager';
import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler';
import { Subscriber } from '@/scaling/pubsub/subscriber.service';
import { Server } from '@/server';
import { OrchestrationService } from '@/services/orchestration.service';
import { OwnershipService } from '@/services/ownership.service';
import { PruningService } from '@/services/pruning.service';
import { PruningService } from '@/services/pruning/pruning.service';
import { UrlService } from '@/services/url.service';
import { WaitTracker } from '@/wait-tracker';
import { WorkflowRunner } from '@/workflow-runner';
@ -224,19 +222,9 @@ export class Start extends BaseCommand {
const { taskRunners: taskRunnerConfig } = this.globalConfig;
if (!taskRunnerConfig.disabled) {
Container.set(TaskManager, new LocalTaskManager());
const { TaskRunnerServer } = await import('@/runners/task-runner-server');
const taskRunnerServer = Container.get(TaskRunnerServer);
await taskRunnerServer.start();
if (
taskRunnerConfig.mode === 'internal_childprocess' ||
taskRunnerConfig.mode === 'internal_launcher'
) {
const { TaskRunnerProcess } = await import('@/runners/task-runner-process');
const runnerProcess = Container.get(TaskRunnerProcess);
await runnerProcess.start();
}
const { TaskRunnerModule } = await import('@/runners/task-runner-module');
const taskRunnerModule = Container.get(TaskRunnerModule);
await taskRunnerModule.start();
}
}

View file

@ -8,8 +8,6 @@ import { EventMessageGeneric } from '@/eventbus/event-message-classes/event-mess
import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus';
import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay';
import { Logger } from '@/logging/logger.service';
import { LocalTaskManager } from '@/runners/task-managers/local-task-manager';
import { TaskManager } from '@/runners/task-managers/task-manager';
import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler';
import { Subscriber } from '@/scaling/pubsub/subscriber.service';
import type { ScalingService } from '@/scaling/scaling.service';
@ -116,19 +114,9 @@ export class Worker extends BaseCommand {
const { taskRunners: taskRunnerConfig } = this.globalConfig;
if (!taskRunnerConfig.disabled) {
Container.set(TaskManager, new LocalTaskManager());
const { TaskRunnerServer } = await import('@/runners/task-runner-server');
const taskRunnerServer = Container.get(TaskRunnerServer);
await taskRunnerServer.start();
if (
taskRunnerConfig.mode === 'internal_childprocess' ||
taskRunnerConfig.mode === 'internal_launcher'
) {
const { TaskRunnerProcess } = await import('@/runners/task-runner-process');
const runnerProcess = Container.get(TaskRunnerProcess);
await runnerProcess.start();
}
const { TaskRunnerModule } = await import('@/runners/task-runner-module');
const taskRunnerModule = Container.get(TaskRunnerModule);
await taskRunnerModule.start();
}
}

View file

@ -98,54 +98,6 @@ export const schema = {
env: 'EXECUTIONS_DATA_SAVE_MANUAL_EXECUTIONS',
},
// To not exceed the database's capacity and keep its size moderate
// the execution data gets pruned regularly (default: 15 minute interval).
// All saved execution data older than the max age will be deleted.
// Pruning is currently not activated by default, which will change in
// a future version.
pruneData: {
doc: 'Delete data of past executions on a rolling basis',
format: Boolean,
default: true,
env: 'EXECUTIONS_DATA_PRUNE',
},
pruneDataMaxAge: {
doc: 'How old (hours) the finished execution data has to be to get soft-deleted',
format: Number,
default: 336,
env: 'EXECUTIONS_DATA_MAX_AGE',
},
pruneDataHardDeleteBuffer: {
doc: 'How old (hours) the finished execution data has to be to get hard-deleted. By default, this buffer excludes recent executions as the user may need them while building a workflow.',
format: Number,
default: 1,
env: 'EXECUTIONS_DATA_HARD_DELETE_BUFFER',
},
pruneDataIntervals: {
hardDelete: {
doc: 'How often (minutes) execution data should be hard-deleted',
format: Number,
default: 15,
env: 'EXECUTIONS_DATA_PRUNE_HARD_DELETE_INTERVAL',
},
softDelete: {
doc: 'How often (minutes) execution data should be soft-deleted',
format: Number,
default: 60,
env: 'EXECUTIONS_DATA_PRUNE_SOFT_DELETE_INTERVAL',
},
},
// Additional pruning option to delete executions if total count exceeds the configured max.
// Deletes the oldest entries first
// Set to 0 for No limit
pruneDataMaxCount: {
doc: "Maximum number of finished executions to keep in DB. Doesn't necessarily prune exactly to max number. 0 = no limit",
format: Number,
default: 10000,
env: 'EXECUTIONS_DATA_PRUNE_MAX_COUNT',
},
queueRecovery: {
interval: {
doc: 'How often (minutes) to check for queue recovery',

View file

@ -35,7 +35,6 @@ import type {
} from 'n8n-workflow';
import { Service } from 'typedi';
import config from '@/config';
import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee';
import { AnnotationTagMapping } from '@/databases/entities/annotation-tag-mapping.ee';
import { ExecutionAnnotation } from '@/databases/entities/execution-annotation.ee';
@ -460,8 +459,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
}
async softDeletePrunableExecutions() {
const maxAge = config.getEnv('executions.pruneDataMaxAge'); // in h
const maxCount = config.getEnv('executions.pruneDataMaxCount');
const { maxAge, maxCount } = this.globalConfig.pruning;
// Sub-query to exclude executions having annotations
const annotatedExecutionsSubQuery = this.manager
@ -515,9 +513,9 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
.execute();
}
async hardDeleteSoftDeletedExecutions() {
async findSoftDeletedExecutions() {
const date = new Date();
date.setHours(date.getHours() - config.getEnv('executions.pruneDataHardDeleteBuffer'));
date.setHours(date.getHours() - this.globalConfig.pruning.hardDeleteBuffer);
const workflowIdsAndExecutionIds = (
await this.find({

View file

@ -771,8 +771,8 @@ export class TelemetryEventRelay extends EventRelay {
executions_data_save_manual_executions: config.getEnv(
'executions.saveDataManualExecutions',
),
executions_data_prune: config.getEnv('executions.pruneData'),
executions_data_max_age: config.getEnv('executions.pruneDataMaxAge'),
executions_data_prune: this.globalConfig.pruning.isEnabled,
executions_data_max_age: this.globalConfig.pruning.maxAge,
},
n8n_deployment_type: config.getEnv('deployment.type'),
n8n_binary_data_mode: binaryDataConfig.mode,

View file

@ -1,4 +1,5 @@
import {
deepCopy,
ErrorReporterProxy,
type IRunExecutionData,
type ITaskData,
@ -57,7 +58,7 @@ test('should ignore on leftover async call', async () => {
expect(executionRepository.updateExistingExecution).not.toHaveBeenCalled();
});
test('should update execution', async () => {
test('should update execution when saving progress is enabled', async () => {
jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({
...commonSettings,
progress: true,
@ -86,6 +87,37 @@ test('should update execution', async () => {
expect(reporterSpy).not.toHaveBeenCalled();
});
test('should update execution when saving progress is disabled, but waitTill is defined', async () => {
jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({
...commonSettings,
progress: false,
});
const reporterSpy = jest.spyOn(ErrorReporterProxy, 'error');
executionRepository.findSingleExecution.mockResolvedValue({} as IExecutionResponse);
const args = deepCopy(commonArgs);
args[4].waitTill = new Date();
await saveExecutionProgress(...args);
expect(executionRepository.updateExistingExecution).toHaveBeenCalledWith('some-execution-id', {
data: {
executionData: undefined,
resultData: {
lastNodeExecuted: 'My Node',
runData: {
'My Node': [{}],
},
},
startData: {},
},
status: 'running',
});
expect(reporterSpy).not.toHaveBeenCalled();
});
test('should report error on failure', async () => {
jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({
...commonSettings,

View file

@ -16,7 +16,7 @@ export async function saveExecutionProgress(
) {
const saveSettings = toSaveSettings(workflowData.settings);
if (!saveSettings.progress) return;
if (!saveSettings.progress && !executionData.waitTill) return;
const logger = Container.get(Logger);

View file

@ -1,7 +1,7 @@
import type { RunnerMessage, TaskResultData } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import { TaskRejectError } from '../errors';
import type { RunnerMessage, TaskResultData } from '../runner-types';
import { TaskBroker } from '../task-broker.service';
import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service';
@ -381,7 +381,7 @@ describe('TaskBroker', () => {
const runnerId = 'runner1';
const taskId = 'task1';
const message: RunnerMessage.ToN8n.TaskAccepted = {
const message: RunnerMessage.ToBroker.TaskAccepted = {
type: 'runner:taskaccepted',
taskId,
};
@ -406,7 +406,7 @@ describe('TaskBroker', () => {
const taskId = 'task1';
const rejectionReason = 'Task execution failed';
const message: RunnerMessage.ToN8n.TaskRejected = {
const message: RunnerMessage.ToBroker.TaskRejected = {
type: 'runner:taskrejected',
taskId,
reason: rejectionReason,
@ -433,7 +433,7 @@ describe('TaskBroker', () => {
const requesterId = 'requester1';
const data = mock<TaskResultData>();
const message: RunnerMessage.ToN8n.TaskDone = {
const message: RunnerMessage.ToBroker.TaskDone = {
type: 'runner:taskdone',
taskId,
data,
@ -464,7 +464,7 @@ describe('TaskBroker', () => {
const requesterId = 'requester1';
const errorMessage = 'Task execution failed';
const message: RunnerMessage.ToN8n.TaskError = {
const message: RunnerMessage.ToBroker.TaskError = {
type: 'runner:taskerror',
taskId,
error: errorMessage,
@ -494,15 +494,18 @@ describe('TaskBroker', () => {
const taskId = 'task1';
const requesterId = 'requester1';
const requestId = 'request1';
const requestType = 'input';
const param = 'test_param';
const requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'] = {
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
};
const message: RunnerMessage.ToN8n.TaskDataRequest = {
const message: RunnerMessage.ToBroker.TaskDataRequest = {
type: 'runner:taskdatarequest',
taskId,
requestId,
requestType,
param,
requestParams,
};
const requesterMessageCallback = jest.fn();
@ -519,8 +522,7 @@ describe('TaskBroker', () => {
type: 'broker:taskdatarequest',
taskId,
requestId,
requestType,
param,
requestParams,
});
});
@ -532,7 +534,7 @@ describe('TaskBroker', () => {
const rpcName = 'helpers.httpRequestWithAuthentication';
const rpcParams = ['param1', 'param2'];
const message: RunnerMessage.ToN8n.RPC = {
const message: RunnerMessage.ToBroker.RPC = {
type: 'runner:rpc',
taskId,
callId,

View file

@ -0,0 +1,16 @@
import { Service } from 'typedi';
import { TaskRunnerDisconnectedError } from './errors/task-runner-disconnected-error';
import type { DisconnectAnalyzer } from './runner-types';
import type { TaskRunner } from './task-broker.service';
/**
* Analyzes the disconnect reason of a task runner to provide a more
* meaningful error message to the user.
*/
@Service()
export class DefaultTaskRunnerDisconnectAnalyzer implements DisconnectAnalyzer {
async determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error> {
return new TaskRunnerDisconnectedError(runnerId);
}
}

View file

@ -3,7 +3,7 @@ import { Service } from 'typedi';
import config from '@/config';
import { TaskRunnerDisconnectedError } from './errors/task-runner-disconnected-error';
import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer';
import { TaskRunnerOomError } from './errors/task-runner-oom-error';
import { SlidingWindowSignal } from './sliding-window-signal';
import type { TaskRunner } from './task-broker.service';
@ -15,13 +15,19 @@ import { TaskRunnerProcess } from './task-runner-process';
* meaningful error message to the user.
*/
@Service()
export class TaskRunnerDisconnectAnalyzer {
export class InternalTaskRunnerDisconnectAnalyzer extends DefaultTaskRunnerDisconnectAnalyzer {
private get isCloudDeployment() {
return config.get('deployment.type') === 'cloud';
}
private readonly exitReasonSignal: SlidingWindowSignal<TaskRunnerProcessEventMap, 'exit'>;
constructor(
private readonly runnerConfig: TaskRunnersConfig,
private readonly taskRunnerProcess: TaskRunnerProcess,
) {
super();
// When the task runner process is running as a child process, there's
// no determinate time when it exits compared to when the runner disconnects
// (i.e. it's a race condition). Hence we use a sliding window to determine
@ -32,17 +38,13 @@ export class TaskRunnerDisconnectAnalyzer {
});
}
private get isCloudDeployment() {
return config.get('deployment.type') === 'cloud';
}
async determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error> {
const exitCode = await this.awaitExitSignal();
if (exitCode === 'oom') {
return new TaskRunnerOomError(runnerId, this.isCloudDeployment);
}
return new TaskRunnerDisconnectedError(runnerId);
return await super.determineDisconnectReason(runnerId);
}
private async awaitExitSignal(): Promise<ExitReason> {

View file

@ -1,10 +1,26 @@
import type { Response } from 'express';
import type { INodeExecutionData, INodeTypeBaseDescription } from 'n8n-workflow';
import type { INodeExecutionData } from 'n8n-workflow';
import type WebSocket from 'ws';
import type { TaskRunner } from './task-broker.service';
import type { AuthlessRequest } from '../requests';
/**
* Specifies what data should be included for a task data request.
*/
export interface TaskDataRequestParams {
dataOfNodes: string[] | 'all';
prevNode: boolean;
/** Whether input data for the node should be included */
input: boolean;
/** Whether env provider's state should be included */
env: boolean;
}
export interface DisconnectAnalyzer {
determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error>;
}
export type DataRequestType = 'input' | 'node' | 'all';
export interface TaskResultData {
@ -18,232 +34,3 @@ export interface TaskRunnerServerInitRequest
}
export type TaskRunnerServerInitResponse = Response & { req: TaskRunnerServerInitRequest };
export namespace N8nMessage {
export namespace ToRunner {
export interface InfoRequest {
type: 'broker:inforequest';
}
export interface RunnerRegistered {
type: 'broker:runnerregistered';
}
export interface TaskOfferAccept {
type: 'broker:taskofferaccept';
taskId: string;
offerId: string;
}
export interface TaskCancel {
type: 'broker:taskcancel';
taskId: string;
reason: string;
}
export interface TaskSettings {
type: 'broker:tasksettings';
taskId: string;
settings: unknown;
}
export interface RPCResponse {
type: 'broker:rpcresponse';
callId: string;
taskId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskDataResponse {
type: 'broker:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface NodeTypes {
type: 'broker:nodetypes';
nodeTypes: INodeTypeBaseDescription[];
}
export type All =
| InfoRequest
| TaskOfferAccept
| TaskCancel
| TaskSettings
| RunnerRegistered
| RPCResponse
| TaskDataResponse
| NodeTypes;
}
export namespace ToRequester {
export interface TaskReady {
type: 'broker:taskready';
requestId: string;
taskId: string;
}
export interface TaskDone {
type: 'broker:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'broker:taskerror';
taskId: string;
error: unknown;
}
export interface TaskDataRequest {
type: 'broker:taskdatarequest';
taskId: string;
requestId: string;
requestType: DataRequestType;
param?: string;
}
export interface RPC {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC;
}
}
export namespace RequesterMessage {
export namespace ToN8n {
export interface TaskSettings {
type: 'requester:tasksettings';
taskId: string;
settings: unknown;
}
export interface TaskCancel {
type: 'requester:taskcancel';
taskId: string;
reason: string;
}
export interface TaskDataResponse {
type: 'requester:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
callId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskRequest {
type: 'requester:taskrequest';
requestId: string;
taskType: string;
}
export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest;
}
}
export namespace RunnerMessage {
export namespace ToN8n {
export interface Info {
type: 'runner:info';
name: string;
types: string[];
}
export interface TaskAccepted {
type: 'runner:taskaccepted';
taskId: string;
}
export interface TaskRejected {
type: 'runner:taskrejected';
taskId: string;
reason: string;
}
export interface TaskDone {
type: 'runner:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'runner:taskerror';
taskId: string;
error: unknown;
}
export interface TaskOffer {
type: 'runner:taskoffer';
offerId: string;
taskType: string;
validFor: number;
}
export interface TaskDataRequest {
type: 'runner:taskdatarequest';
taskId: string;
requestId: string;
requestType: DataRequestType;
param?: string;
}
export interface RPC {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All =
| Info
| TaskDone
| TaskError
| TaskAccepted
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest;
}
}
export const RPC_ALLOW_LIST = [
'logNodeOutput',
'helpers.httpRequestWithAuthentication',
'helpers.requestWithAuthenticationPaginated',
// "helpers.normalizeItems"
// "helpers.constructExecutionMetaData"
// "helpers.assertBinaryData"
'helpers.getBinaryDataBuffer',
// "helpers.copyInputItems"
// "helpers.returnJsonArray"
'helpers.getSSHClient',
'helpers.createReadStream',
// "helpers.getStoragePath"
'helpers.writeContentToFile',
'helpers.prepareBinaryData',
'helpers.setBinaryDataBuffer',
'helpers.copyBinaryFile',
'helpers.binaryToBuffer',
// "helpers.binaryToString"
// "helpers.getBinaryPath"
'helpers.getBinaryStream',
'helpers.getBinaryMetadata',
'helpers.createDeferredPromise',
'helpers.httpRequest',
] as const;

View file

@ -1,32 +1,40 @@
import type { BrokerMessage, RunnerMessage } from '@n8n/task-runner';
import { Service } from 'typedi';
import type WebSocket from 'ws';
import { Logger } from '@/logging/logger.service';
import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer';
import type {
RunnerMessage,
N8nMessage,
DisconnectAnalyzer,
TaskRunnerServerInitRequest,
TaskRunnerServerInitResponse,
} from './runner-types';
import { TaskBroker, type MessageCallback, type TaskRunner } from './task-broker.service';
import { TaskRunnerDisconnectAnalyzer } from './task-runner-disconnect-analyzer';
function heartbeat(this: WebSocket) {
this.isAlive = true;
}
@Service()
export class TaskRunnerService {
export class TaskRunnerWsServer {
runnerConnections: Map<TaskRunner['id'], WebSocket> = new Map();
constructor(
private readonly logger: Logger,
private readonly taskBroker: TaskBroker,
private readonly disconnectAnalyzer: TaskRunnerDisconnectAnalyzer,
private disconnectAnalyzer: DefaultTaskRunnerDisconnectAnalyzer,
) {}
sendMessage(id: TaskRunner['id'], message: N8nMessage.ToRunner.All) {
setDisconnectAnalyzer(disconnectAnalyzer: DisconnectAnalyzer) {
this.disconnectAnalyzer = disconnectAnalyzer;
}
getDisconnectAnalyzer() {
return this.disconnectAnalyzer;
}
sendMessage(id: TaskRunner['id'], message: BrokerMessage.ToRunner.All) {
this.runnerConnections.get(id)?.send(JSON.stringify(message));
}
@ -40,9 +48,9 @@ export class TaskRunnerService {
try {
const buffer = Array.isArray(data) ? Buffer.concat(data) : Buffer.from(data);
const message: RunnerMessage.ToN8n.All = JSON.parse(
const message: RunnerMessage.ToBroker.All = JSON.parse(
buffer.toString('utf8'),
) as RunnerMessage.ToN8n.All;
) as RunnerMessage.ToBroker.All;
if (!isConnected && message.type !== 'runner:info') {
return;
@ -85,7 +93,7 @@ export class TaskRunnerService {
connection.on('message', onMessage);
connection.send(
JSON.stringify({ type: 'broker:inforequest' } as N8nMessage.ToRunner.InfoRequest),
JSON.stringify({ type: 'broker:inforequest' } as BrokerMessage.ToRunner.InfoRequest),
);
}

View file

@ -1,3 +1,9 @@
import type {
BrokerMessage,
RequesterMessage,
RunnerMessage,
TaskResultData,
} from '@n8n/task-runner';
import { ApplicationError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { Service } from 'typedi';
@ -6,7 +12,6 @@ import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
import { Logger } from '@/logging/logger.service';
import { TaskRejectError } from './errors';
import type { N8nMessage, RunnerMessage, RequesterMessage, TaskResultData } from './runner-types';
export interface TaskRunner {
id: string;
@ -38,13 +43,15 @@ export interface TaskRequest {
acceptInProgress?: boolean;
}
export type MessageCallback = (message: N8nMessage.ToRunner.All) => Promise<void> | void;
export type MessageCallback = (message: BrokerMessage.ToRunner.All) => Promise<void> | void;
export type RequesterMessageCallback = (
message: N8nMessage.ToRequester.All,
message: BrokerMessage.ToRequester.All,
) => Promise<void> | void;
type RunnerAcceptCallback = () => void;
type RequesterAcceptCallback = (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void;
type RequesterAcceptCallback = (
settings: RequesterMessage.ToBroker.TaskSettings['settings'],
) => void;
type TaskRejectCallback = (reason: TaskRejectError) => void;
@Service()
@ -134,11 +141,11 @@ export class TaskBroker {
this.requesters.delete(requesterId);
}
private async messageRunner(runnerId: TaskRunner['id'], message: N8nMessage.ToRunner.All) {
private async messageRunner(runnerId: TaskRunner['id'], message: BrokerMessage.ToRunner.All) {
await this.knownRunners.get(runnerId)?.messageCallback(message);
}
private async messageAllRunners(message: N8nMessage.ToRunner.All) {
private async messageAllRunners(message: BrokerMessage.ToRunner.All) {
await Promise.allSettled(
[...this.knownRunners.values()].map(async (runner) => {
await runner.messageCallback(message);
@ -146,11 +153,11 @@ export class TaskBroker {
);
}
private async messageRequester(requesterId: string, message: N8nMessage.ToRequester.All) {
private async messageRequester(requesterId: string, message: BrokerMessage.ToRequester.All) {
await this.requesters.get(requesterId)?.(message);
}
async onRunnerMessage(runnerId: TaskRunner['id'], message: RunnerMessage.ToN8n.All) {
async onRunnerMessage(runnerId: TaskRunner['id'], message: RunnerMessage.ToBroker.All) {
const runner = this.knownRunners.get(runnerId);
if (!runner) {
return;
@ -178,12 +185,7 @@ export class TaskBroker {
await this.taskErrorHandler(message.taskId, message.error);
break;
case 'runner:taskdatarequest':
await this.handleDataRequest(
message.taskId,
message.requestId,
message.requestType,
message.param,
);
await this.handleDataRequest(message.taskId, message.requestId, message.requestParams);
break;
case 'runner:rpc':
@ -198,7 +200,7 @@ export class TaskBroker {
async handleRpcRequest(
taskId: Task['id'],
callId: string,
name: RunnerMessage.ToN8n.RPC['name'],
name: RunnerMessage.ToBroker.RPC['name'],
params: unknown[],
) {
const task = this.tasks.get(taskId);
@ -232,9 +234,8 @@ export class TaskBroker {
async handleDataRequest(
taskId: Task['id'],
requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'],
requestType: RunnerMessage.ToN8n.TaskDataRequest['requestType'],
param?: string,
requestId: RunnerMessage.ToBroker.TaskDataRequest['requestId'],
requestParams: RunnerMessage.ToBroker.TaskDataRequest['requestParams'],
) {
const task = this.tasks.get(taskId);
if (!task) {
@ -244,14 +245,13 @@ export class TaskBroker {
type: 'broker:taskdatarequest',
taskId,
requestId,
requestType,
param,
requestParams,
});
}
async handleResponse(
taskId: Task['id'],
requestId: RunnerMessage.ToN8n.TaskDataRequest['requestId'],
requestId: RunnerMessage.ToBroker.TaskDataRequest['requestId'],
data: unknown,
) {
const task = this.tasks.get(taskId);
@ -266,7 +266,7 @@ export class TaskBroker {
});
}
async onRequesterMessage(requesterId: string, message: RequesterMessage.ToN8n.All) {
async onRequesterMessage(requesterId: string, message: RequesterMessage.ToBroker.All) {
switch (message.type) {
case 'requester:tasksettings':
this.handleRequesterAccept(message.taskId, message.settings);
@ -298,7 +298,7 @@ export class TaskBroker {
async handleRequesterRpcResponse(
taskId: string,
callId: string,
status: RequesterMessage.ToN8n.RPCResponse['status'],
status: RequesterMessage.ToBroker.RPCResponse['status'],
data: unknown,
) {
const runner = await this.getRunnerOrFailTask(taskId);
@ -324,7 +324,7 @@ export class TaskBroker {
handleRequesterAccept(
taskId: Task['id'],
settings: RequesterMessage.ToN8n.TaskSettings['settings'],
settings: RequesterMessage.ToBroker.TaskSettings['settings'],
) {
const acceptReject = this.requesterAcceptRejects.get(taskId);
if (acceptReject) {
@ -474,10 +474,12 @@ export class TaskBroker {
this.pendingTaskRequests.splice(requestIndex, 1);
try {
const acceptPromise = new Promise<RequesterMessage.ToN8n.TaskSettings['settings']>(
const acceptPromise = new Promise<RequesterMessage.ToBroker.TaskSettings['settings']>(
(resolve, reject) => {
this.requesterAcceptRejects.set(taskId, {
accept: resolve as (settings: RequesterMessage.ToN8n.TaskSettings['settings']) => void,
accept: resolve as (
settings: RequesterMessage.ToBroker.TaskSettings['settings'],
) => void,
reject,
});

View file

@ -0,0 +1,324 @@
import type { TaskData } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended';
import type { IExecuteFunctions, IWorkflowExecuteAdditionalData } from 'n8n-workflow';
import { type INode, type INodeExecutionData, type Workflow } from 'n8n-workflow';
import { DataRequestResponseBuilder } from '../data-request-response-builder';
const triggerNode: INode = mock<INode>({
name: 'Trigger',
});
const debugHelperNode: INode = mock<INode>({
name: 'DebugHelper',
});
const codeNode: INode = mock<INode>({
name: 'Code',
});
const workflow: TaskData['workflow'] = mock<Workflow>();
const debugHelperNodeOutItems: INodeExecutionData[] = [
{
json: {
uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032',
email: 'Dan.Schmidt31@yahoo.com',
firstname: 'Toni',
lastname: 'Schuster',
password: 'Q!D6C2',
},
pairedItem: {
item: 0,
},
},
];
const codeNodeInputItems: INodeExecutionData[] = debugHelperNodeOutItems;
const connectionInputData: TaskData['connectionInputData'] = codeNodeInputItems;
const envProviderState: TaskData['envProviderState'] = mock<TaskData['envProviderState']>({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
const additionalData = mock<IWorkflowExecuteAdditionalData>({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
variables: {},
webhookBaseUrl: 'http://localhost:5678/webhook',
webhookTestBaseUrl: 'http://localhost:5678/webhook-test',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
executionId: '45844',
userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5',
currentNodeParameters: undefined,
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
});
const executeFunctions = mock<IExecuteFunctions>();
/**
* Drawn with https://asciiflow.com/#/
* Task data for an execution of the following WF:
* where denotes the currently being executing node.
*
*
* Trigger DebugHelper Code
*
*/
const taskData: TaskData = {
executeFunctions,
workflow,
connectionInputData,
inputData: {
main: [codeNodeInputItems],
},
itemIndex: 0,
activeNodeName: codeNode.name,
contextNodeName: codeNode.name,
defaultReturnRunIndex: -1,
mode: 'manual',
envProviderState,
node: codeNode,
runExecutionData: {
startData: {
destinationNode: codeNode.name,
runNodeFilter: [triggerNode.name, debugHelperNode.name, codeNode.name],
},
resultData: {
runData: {
[triggerNode.name]: [
{
hints: [],
startTime: 1730313407328,
executionTime: 1,
source: [],
executionStatus: 'success',
data: {
main: [[]],
},
},
],
[debugHelperNode.name]: [
{
hints: [],
startTime: 1730313407330,
executionTime: 1,
source: [
{
previousNode: triggerNode.name,
},
],
executionStatus: 'success',
data: {
main: [debugHelperNodeOutItems],
},
},
],
},
pinData: {},
},
executionData: {
contextData: {},
nodeExecutionStack: [],
metadata: {},
waitingExecution: {
[codeNode.name]: {
'0': {
main: [codeNodeInputItems],
},
},
},
waitingExecutionSource: {
[codeNode.name]: {
'0': {
main: [
{
previousNode: debugHelperNode.name,
},
],
},
},
},
},
},
runIndex: 0,
selfData: {},
siblingParameters: {},
executeData: {
node: codeNode,
data: {
main: [codeNodeInputItems],
},
source: {
main: [
{
previousNode: debugHelperNode.name,
previousNodeOutput: 0,
},
],
},
},
additionalData,
} as const;
describe('DataRequestResponseBuilder', () => {
const allDataParam: DataRequestResponseBuilder['requestParams'] = {
dataOfNodes: 'all',
env: true,
input: true,
prevNode: true,
};
const newRequestParam = (opts: Partial<DataRequestResponseBuilder['requestParams']>) => ({
...allDataParam,
...opts,
});
describe('all data', () => {
it('should build the runExecutionData as is when everything is requested', () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam);
const { runExecutionData } = dataRequestResponseBuilder.build();
expect(runExecutionData).toStrictEqual(taskData.runExecutionData);
});
});
describe('envProviderState', () => {
it("should filter out envProviderState when it's not requested", () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(
taskData,
newRequestParam({
env: false,
}),
);
const result = dataRequestResponseBuilder.build();
expect(result.envProviderState).toStrictEqual({
env: {},
isEnvAccessBlocked: false,
isProcessAvailable: true,
});
});
});
describe('additionalData', () => {
it('picks only specific properties for additional data', () => {
const dataRequestResponseBuilder = new DataRequestResponseBuilder(taskData, allDataParam);
const result = dataRequestResponseBuilder.build();
expect(result.additionalData).toStrictEqual({
formWaitingBaseUrl: 'http://localhost:5678/form-waiting',
instanceBaseUrl: 'http://localhost:5678/',
restApiUrl: 'http://localhost:5678/rest',
webhookBaseUrl: 'http://localhost:5678/webhook',
webhookTestBaseUrl: 'http://localhost:5678/webhook-test',
webhookWaitingBaseUrl: 'http://localhost:5678/webhook-waiting',
executionId: '45844',
userId: '114984bc-44b3-4dd4-9b54-a4a8d34d51d5',
currentNodeParameters: undefined,
executionTimeoutTimestamp: undefined,
restartExecutionId: undefined,
variables: additionalData.variables,
});
});
});
describe('input data', () => {
const allExceptInputParam = newRequestParam({
input: false,
});
it('drops input data from executeData', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.executeData).toStrictEqual({
node: taskData.executeData!.node,
source: taskData.executeData!.source,
data: {},
});
});
it('drops input data from result', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.inputData).toStrictEqual({});
});
it('drops input data from result', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.inputData).toStrictEqual({});
});
it('drops input data from connectionInputData', () => {
const result = new DataRequestResponseBuilder(taskData, allExceptInputParam).build();
expect(result.connectionInputData).toStrictEqual([]);
});
});
describe('nodes', () => {
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: ['Code'], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it('should return empty run data when only Code node is requested', () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [codeNode.name], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return only DebugHelper's data when only DebugHelper node is requested", () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [debugHelperNode.name], prevNode: false }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
it("should return DebugHelper's data when only prevNode node is requested", () => {
const result = new DataRequestResponseBuilder(
taskData,
newRequestParam({ dataOfNodes: [], prevNode: true }),
).build();
expect(result.runExecutionData.resultData.runData).toStrictEqual({
[debugHelperNode.name]: taskData.runExecutionData.resultData.runData[debugHelperNode.name],
});
expect(result.runExecutionData.resultData.pinData).toStrictEqual({});
// executionData & startData contain only metadata --> returned as is
expect(result.runExecutionData.startData).toStrictEqual(taskData.runExecutionData.startData);
expect(result.runExecutionData.executionData).toStrictEqual(
taskData.runExecutionData.executionData,
);
});
});
});

View file

@ -0,0 +1,208 @@
import type {
DataRequestResponse,
BrokerMessage,
PartialAdditionalData,
TaskData,
} from '@n8n/task-runner';
import type {
EnvProviderState,
IExecuteData,
INodeExecutionData,
IPinData,
IRunData,
IRunExecutionData,
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowParameters,
} from 'n8n-workflow';
/**
* Builds the response to a data request coming from a Task Runner. Tries to minimize
* the amount of data that is sent to the runner by only providing what is requested.
*/
export class DataRequestResponseBuilder {
private requestedNodeNames = new Set<string>();
constructor(
private readonly taskData: TaskData,
private readonly requestParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'],
) {
this.requestedNodeNames = new Set(requestParams.dataOfNodes);
if (this.requestParams.prevNode && this.requestParams.dataOfNodes !== 'all') {
this.requestedNodeNames.add(this.determinePrevNodeName());
}
}
/**
* Builds a response to the data request
*/
build(): DataRequestResponse {
const { taskData: td } = this;
return {
workflow: this.buildWorkflow(td.workflow),
connectionInputData: this.buildConnectionInputData(td.connectionInputData),
inputData: this.buildInputData(td.inputData),
itemIndex: td.itemIndex,
activeNodeName: td.activeNodeName,
contextNodeName: td.contextNodeName,
defaultReturnRunIndex: td.defaultReturnRunIndex,
mode: td.mode,
envProviderState: this.buildEnvProviderState(td.envProviderState),
node: td.node, // The current node being executed
runExecutionData: this.buildRunExecutionData(td.runExecutionData),
runIndex: td.runIndex,
selfData: td.selfData,
siblingParameters: td.siblingParameters,
executeData: this.buildExecuteData(td.executeData),
additionalData: this.buildAdditionalData(td.additionalData),
};
}
private buildAdditionalData(
additionalData: IWorkflowExecuteAdditionalData,
): PartialAdditionalData {
return {
formWaitingBaseUrl: additionalData.formWaitingBaseUrl,
instanceBaseUrl: additionalData.instanceBaseUrl,
restApiUrl: additionalData.restApiUrl,
variables: additionalData.variables,
webhookBaseUrl: additionalData.webhookBaseUrl,
webhookTestBaseUrl: additionalData.webhookTestBaseUrl,
webhookWaitingBaseUrl: additionalData.webhookWaitingBaseUrl,
currentNodeParameters: additionalData.currentNodeParameters,
executionId: additionalData.executionId,
executionTimeoutTimestamp: additionalData.executionTimeoutTimestamp,
restartExecutionId: additionalData.restartExecutionId,
userId: additionalData.userId,
};
}
private buildExecuteData(executeData: IExecuteData | undefined): IExecuteData | undefined {
if (executeData === undefined) {
return undefined;
}
return {
node: executeData.node, // The current node being executed
data: this.requestParams.input ? executeData.data : {},
source: executeData.source,
};
}
private buildRunExecutionData(runExecutionData: IRunExecutionData): IRunExecutionData {
if (this.requestParams.dataOfNodes === 'all') {
return runExecutionData;
}
return {
startData: runExecutionData.startData,
resultData: {
error: runExecutionData.resultData.error,
lastNodeExecuted: runExecutionData.resultData.lastNodeExecuted,
metadata: runExecutionData.resultData.metadata,
runData: this.buildRunData(runExecutionData.resultData.runData),
pinData: this.buildPinData(runExecutionData.resultData.pinData),
},
executionData: runExecutionData.executionData
? {
// TODO: Figure out what these two are and can they be filtered
contextData: runExecutionData.executionData?.contextData,
nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack,
metadata: runExecutionData.executionData.metadata,
waitingExecution: runExecutionData.executionData.waitingExecution,
waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource,
}
: undefined,
};
}
private buildRunData(runData: IRunData): IRunData {
return this.filterObjectByNodeNames(runData);
}
private buildPinData(pinData: IPinData | undefined): IPinData | undefined {
return pinData ? this.filterObjectByNodeNames(pinData) : undefined;
}
private buildEnvProviderState(envProviderState: EnvProviderState): EnvProviderState {
if (this.requestParams.env) {
// In case `isEnvAccessBlocked` = true, the provider state has already sanitized
// the environment variables and we can return it as is.
return envProviderState;
}
return {
env: {},
isEnvAccessBlocked: envProviderState.isEnvAccessBlocked,
isProcessAvailable: envProviderState.isProcessAvailable,
};
}
private buildInputData(inputData: ITaskDataConnections): ITaskDataConnections {
if (this.requestParams.input) {
return inputData;
}
return {};
}
private buildConnectionInputData(
connectionInputData: INodeExecutionData[],
): INodeExecutionData[] {
if (this.requestParams.input) {
return connectionInputData;
}
return [];
}
private buildWorkflow(workflow: Workflow): Omit<WorkflowParameters, 'nodeTypes'> {
return {
id: workflow.id,
name: workflow.name,
active: workflow.active,
connections: workflow.connectionsBySourceNode,
nodes: Object.values(workflow.nodes),
pinData: workflow.pinData,
settings: workflow.settings,
staticData: workflow.staticData,
};
}
/**
* Assuming the given `obj` is an object where the keys are node names,
* filters the object to only include the node names that are requested.
*/
private filterObjectByNodeNames<T extends Record<string, unknown>>(obj: T): T {
if (this.requestParams.dataOfNodes === 'all') {
return obj;
}
const filteredObj: T = {} as T;
for (const nodeName in obj) {
if (!Object.prototype.hasOwnProperty.call(obj, nodeName)) {
continue;
}
if (this.requestedNodeNames.has(nodeName)) {
filteredObj[nodeName] = obj[nodeName];
}
}
return filteredObj;
}
private determinePrevNodeName(): string {
const sourceData = this.taskData.executeData?.source?.main?.[0];
if (!sourceData) {
return '';
}
return sourceData.previousNode;
}
}

View file

@ -1,7 +1,7 @@
import type { RequesterMessage } from '@n8n/task-runner';
import Container from 'typedi';
import { TaskManager } from './task-manager';
import type { RequesterMessage } from '../runner-types';
import type { RequesterMessageCallback } from '../task-broker.service';
import { TaskBroker } from '../task-broker.service';
@ -24,7 +24,7 @@ export class LocalTaskManager extends TaskManager {
);
}
sendMessage(message: RequesterMessage.ToN8n.All) {
sendMessage(message: RequesterMessage.ToBroker.All) {
void this.taskBroker.onRequesterMessage(this.id, message);
}
}

View file

@ -1,29 +1,24 @@
import {
type EnvProviderState,
type IExecuteFunctions,
type Workflow,
type IRunExecutionData,
type INodeExecutionData,
type ITaskDataConnections,
type INode,
type WorkflowParameters,
type INodeParameters,
type WorkflowExecuteMode,
type IExecuteData,
type IDataObject,
type IWorkflowExecuteAdditionalData,
type Result,
createResultOk,
createResultError,
import type { TaskResultData, RequesterMessage, BrokerMessage, TaskData } from '@n8n/task-runner';
import { RPC_ALLOW_LIST } from '@n8n/task-runner';
import type {
EnvProviderState,
IExecuteFunctions,
Workflow,
IRunExecutionData,
INodeExecutionData,
ITaskDataConnections,
INode,
INodeParameters,
WorkflowExecuteMode,
IExecuteData,
IDataObject,
IWorkflowExecuteAdditionalData,
Result,
} from 'n8n-workflow';
import { createResultOk, createResultError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import {
RPC_ALLOW_LIST,
type TaskResultData,
type N8nMessage,
type RequesterMessage,
} from '../runner-types';
import { DataRequestResponseBuilder } from './data-request-response-builder';
export type RequestAccept = (jobId: string) => void;
export type RequestReject = (reason: string) => void;
@ -31,62 +26,6 @@ export type RequestReject = (reason: string) => void;
export type TaskAccept = (data: TaskResultData) => void;
export type TaskReject = (error: unknown) => void;
export interface TaskData {
executeFunctions: IExecuteFunctions;
inputData: ITaskDataConnections;
node: INode;
workflow: Workflow;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: IWorkflowExecuteAdditionalData;
}
export interface PartialAdditionalData {
executionId?: string;
restartExecutionId?: string;
restApiUrl: string;
instanceBaseUrl: string;
formWaitingBaseUrl: string;
webhookBaseUrl: string;
webhookWaitingBaseUrl: string;
webhookTestBaseUrl: string;
currentNodeParameters?: INodeParameters;
executionTimeoutTimestamp?: number;
userId?: string;
variables: IDataObject;
}
export interface AllCodeTaskData {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
envProviderState: EnvProviderState;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: PartialAdditionalData;
}
export interface TaskRequest {
requestId: string;
taskType: string;
@ -104,19 +43,6 @@ interface ExecuteFunctionObject {
[name: string]: ((...args: unknown[]) => unknown) | ExecuteFunctionObject;
}
const workflowToParameters = (workflow: Workflow): Omit<WorkflowParameters, 'nodeTypes'> => {
return {
id: workflow.id,
name: workflow.name,
active: workflow.active,
connections: workflow.connectionsBySourceNode,
nodes: Object.values(workflow.nodes),
pinData: workflow.pinData,
settings: workflow.settings,
staticData: workflow.staticData,
};
};
export class TaskManager {
requestAcceptRejects: Map<string, { accept: RequestAccept; reject: RequestReject }> = new Map();
@ -231,9 +157,9 @@ export class TaskManager {
}
}
sendMessage(_message: RequesterMessage.ToN8n.All) {}
sendMessage(_message: RequesterMessage.ToBroker.All) {}
onMessage(message: N8nMessage.ToRequester.All) {
onMessage(message: BrokerMessage.ToRequester.All) {
switch (message.type) {
case 'broker:taskready':
this.taskReady(message.requestId, message.taskId);
@ -245,7 +171,7 @@ export class TaskManager {
this.taskError(message.taskId, message.error);
break;
case 'broker:taskdatarequest':
this.sendTaskData(message.taskId, message.requestId, message.requestType);
this.sendTaskData(message.taskId, message.requestId, message.requestParams);
break;
case 'broker:rpc':
void this.handleRpc(message.taskId, message.callId, message.name, message.params);
@ -294,60 +220,29 @@ export class TaskManager {
sendTaskData(
taskId: string,
requestId: string,
requestType: N8nMessage.ToRequester.TaskDataRequest['requestType'],
requestParams: BrokerMessage.ToRequester.TaskDataRequest['requestParams'],
) {
const job = this.tasks.get(taskId);
if (!job) {
// TODO: logging
return;
}
if (requestType === 'all') {
const jd = job.data;
const ad = jd.additionalData;
const data: AllCodeTaskData = {
workflow: workflowToParameters(jd.workflow),
connectionInputData: jd.connectionInputData,
inputData: jd.inputData,
itemIndex: jd.itemIndex,
activeNodeName: jd.activeNodeName,
contextNodeName: jd.contextNodeName,
defaultReturnRunIndex: jd.defaultReturnRunIndex,
mode: jd.mode,
envProviderState: jd.envProviderState,
node: jd.node,
runExecutionData: jd.runExecutionData,
runIndex: jd.runIndex,
selfData: jd.selfData,
siblingParameters: jd.siblingParameters,
executeData: jd.executeData,
additionalData: {
formWaitingBaseUrl: ad.formWaitingBaseUrl,
instanceBaseUrl: ad.instanceBaseUrl,
restApiUrl: ad.restApiUrl,
variables: ad.variables,
webhookBaseUrl: ad.webhookBaseUrl,
webhookTestBaseUrl: ad.webhookTestBaseUrl,
webhookWaitingBaseUrl: ad.webhookWaitingBaseUrl,
currentNodeParameters: ad.currentNodeParameters,
executionId: ad.executionId,
executionTimeoutTimestamp: ad.executionTimeoutTimestamp,
restartExecutionId: ad.restartExecutionId,
userId: ad.userId,
},
};
this.sendMessage({
type: 'requester:taskdataresponse',
taskId,
requestId,
data,
});
}
const dataRequestResponseBuilder = new DataRequestResponseBuilder(job.data, requestParams);
const requestedData = dataRequestResponseBuilder.build();
this.sendMessage({
type: 'requester:taskdataresponse',
taskId,
requestId,
data: requestedData,
});
}
async handleRpc(
taskId: string,
callId: string,
name: N8nMessage.ToRequester.RPC['name'],
name: BrokerMessage.ToRequester.RPC['name'],
params: unknown[],
) {
const job = this.tasks.get(taskId);

View file

@ -0,0 +1,85 @@
import { TaskRunnersConfig } from '@n8n/config';
import * as a from 'node:assert/strict';
import Container, { Service } from 'typedi';
import type { TaskRunnerProcess } from '@/runners/task-runner-process';
import { TaskRunnerWsServer } from './runner-ws-server';
import type { LocalTaskManager } from './task-managers/local-task-manager';
import type { TaskRunnerServer } from './task-runner-server';
/**
* Module responsible for loading and starting task runner. Task runner can be
* run either internally (=launched by n8n as a child process) or externally
* (=launched by some other orchestrator)
*/
@Service()
export class TaskRunnerModule {
private taskRunnerHttpServer: TaskRunnerServer | undefined;
private taskRunnerWsServer: TaskRunnerWsServer | undefined;
private taskManager: LocalTaskManager | undefined;
private taskRunnerProcess: TaskRunnerProcess | undefined;
constructor(private readonly runnerConfig: TaskRunnersConfig) {}
async start() {
a.ok(!this.runnerConfig.disabled, 'Task runner is disabled');
await this.loadTaskManager();
await this.loadTaskRunnerServer();
if (
this.runnerConfig.mode === 'internal_childprocess' ||
this.runnerConfig.mode === 'internal_launcher'
) {
await this.startInternalTaskRunner();
}
}
async stop() {
if (this.taskRunnerProcess) {
await this.taskRunnerProcess.stop();
this.taskRunnerProcess = undefined;
}
if (this.taskRunnerHttpServer) {
await this.taskRunnerHttpServer.stop();
this.taskRunnerHttpServer = undefined;
}
}
private async loadTaskManager() {
const { TaskManager } = await import('@/runners/task-managers/task-manager');
const { LocalTaskManager } = await import('@/runners/task-managers/local-task-manager');
this.taskManager = new LocalTaskManager();
Container.set(TaskManager, this.taskManager);
}
private async loadTaskRunnerServer() {
// These are imported dynamically because we need to set the task manager
// instance before importing them
const { TaskRunnerServer } = await import('@/runners/task-runner-server');
this.taskRunnerHttpServer = Container.get(TaskRunnerServer);
this.taskRunnerWsServer = Container.get(TaskRunnerWsServer);
await this.taskRunnerHttpServer.start();
}
private async startInternalTaskRunner() {
a.ok(this.taskRunnerWsServer, 'Task Runner WS Server not loaded');
const { TaskRunnerProcess } = await import('@/runners/task-runner-process');
this.taskRunnerProcess = Container.get(TaskRunnerProcess);
await this.taskRunnerProcess.start();
const { InternalTaskRunnerDisconnectAnalyzer } = await import(
'@/runners/internal-task-runner-disconnect-analyzer'
);
this.taskRunnerWsServer.setDisconnectAnalyzer(
Container.get(InternalTaskRunnerDisconnectAnalyzer),
);
}
}

View file

@ -69,8 +69,8 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
super();
a.ok(
this.runnerConfig.mode === 'internal_childprocess' ||
this.runnerConfig.mode === 'internal_launcher',
this.runnerConfig.mode !== 'external',
'Task Runner Process cannot be used in external mode',
);
this.logger = logger.scoped('task-runner');
@ -92,7 +92,7 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
}
startNode(grantToken: string, n8nUri: string) {
const startScript = require.resolve('@n8n/task-runner');
const startScript = require.resolve('@n8n/task-runner/start');
return spawn('node', [startScript], {
env: this.getProcessEnvVars(grantToken, n8nUri),

View file

@ -19,7 +19,7 @@ import type {
TaskRunnerServerInitRequest,
TaskRunnerServerInitResponse,
} from '@/runners/runner-types';
import { TaskRunnerService } from '@/runners/runner-ws-server';
import { TaskRunnerWsServer } from '@/runners/runner-ws-server';
/**
* Task Runner HTTP & WS server
@ -44,7 +44,7 @@ export class TaskRunnerServer {
private readonly logger: Logger,
private readonly globalConfig: GlobalConfig,
private readonly taskRunnerAuthController: TaskRunnerAuthController,
private readonly taskRunnerService: TaskRunnerService,
private readonly taskRunnerService: TaskRunnerWsServer,
) {
this.app = express();
this.app.disable('x-powered-by');

View file

@ -0,0 +1,43 @@
import type { GlobalConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import config from '@/config';
import { UrlService } from '../url.service';
describe('UrlService', () => {
beforeEach(() => {
process.env.WEBHOOK_URL = undefined;
config.load(config.default);
});
describe('getInstanceBaseUrl', () => {
it('should set URL from N8N_EDITOR_BASE_URL', () => {
config.set('editorBaseUrl', 'https://example.com/');
process.env.WEBHOOK_URL = undefined;
const urlService = new UrlService(mock<GlobalConfig>());
expect(urlService.getInstanceBaseUrl()).toBe('https://example.com');
});
it('should set URL from WEBHOOK_URL', () => {
config.set('editorBaseUrl', '');
process.env.WEBHOOK_URL = 'https://example.com/';
const urlService = new UrlService(mock<GlobalConfig>());
expect(urlService.getInstanceBaseUrl()).toBe('https://example.com');
});
it('should trim quotes when setting URL from N8N_EDITOR_BASE_URL', () => {
config.set('editorBaseUrl', '"https://example.com"');
process.env.WEBHOOK_URL = undefined;
const urlService = new UrlService(mock<GlobalConfig>());
expect(urlService.getInstanceBaseUrl()).toBe('https://example.com');
});
it('should trim quotes when setting URL from WEBHOOK_URL', () => {
config.set('editorBaseUrl', '');
process.env.WEBHOOK_URL = '"https://example.com/"';
const urlService = new UrlService(mock<GlobalConfig>());
expect(urlService.getInstanceBaseUrl()).toBe('https://example.com');
});
});
});

View file

@ -1,4 +1,4 @@
import { NodeExecuteFunctions } from 'n8n-core';
import { LoadOptionsContext, NodeExecuteFunctions } from 'n8n-core';
import type {
ILoadOptions,
ILoadOptionsFunctions,
@ -253,6 +253,6 @@ export class DynamicNodeParametersService {
workflow: Workflow,
) {
const node = workflow.nodes['Temp-Node'];
return NodeExecuteFunctions.getLoadOptionsFunctions(workflow, node, path, additionalData);
return new LoadOptionsContext(workflow, node, additionalData, path);
}
}

View file

@ -222,9 +222,9 @@ export class FrontendService {
licensePruneTime: -1,
},
pruning: {
isEnabled: config.getEnv('executions.pruneData'),
maxAge: config.getEnv('executions.pruneDataMaxAge'),
maxCount: config.getEnv('executions.pruneDataMaxCount'),
isEnabled: this.globalConfig.pruning.isEnabled,
maxAge: this.globalConfig.pruning.maxAge,
maxCount: this.globalConfig.pruning.maxCount,
},
security: {
blockFileAccessToN8nFiles: this.securityConfig.blockFileAccessToN8nFiles,

View file

@ -0,0 +1,213 @@
import type { GlobalConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import type { InstanceSettings } from 'n8n-core';
import type { MultiMainSetup } from '@/scaling/multi-main-setup.ee';
import type { OrchestrationService } from '@/services/orchestration.service';
import { mockLogger } from '@test/mocking';
import { PruningService } from '../pruning.service';
describe('PruningService', () => {
describe('init', () => {
it('should start pruning if leader', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock(),
);
const startPruningSpy = jest.spyOn(pruningService, 'startPruning');
pruningService.init();
expect(startPruningSpy).toHaveBeenCalled();
});
it('should not start pruning if follower', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock(),
);
const startPruningSpy = jest.spyOn(pruningService, 'startPruning');
pruningService.init();
expect(startPruningSpy).not.toHaveBeenCalled();
});
it('should register leadership events if multi-main setup is enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>({ on: jest.fn() }),
}),
mock(),
);
pruningService.init();
// @ts-expect-error Private method
expect(pruningService.orchestrationService.multiMainSetup.on).toHaveBeenCalledWith(
'leader-takeover',
expect.any(Function),
);
// @ts-expect-error Private method
expect(pruningService.orchestrationService.multiMainSetup.on).toHaveBeenCalledWith(
'leader-stepdown',
expect.any(Function),
);
});
});
describe('isEnabled', () => {
it('should return `true` based on config if leader main', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true } }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(true);
});
it('should return `false` based on config if leader main', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: false } }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(false);
});
it('should return `false` if non-main even if enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false, instanceType: 'worker' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true } }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(false);
});
it('should return `false` if follower main even if enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: false, isFollower: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true }, multiMainSetup: { enabled: true } }),
);
// @ts-expect-error Private method
const isEnabled = pruningService.isEnabled();
expect(isEnabled).toBe(false);
});
});
describe('startPruning', () => {
it('should not start pruning if service is disabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: false } }),
);
// @ts-expect-error Private method
const setSoftDeletionInterval = jest.spyOn(pruningService, 'setSoftDeletionInterval');
// @ts-expect-error Private method
const scheduleHardDeletion = jest.spyOn(pruningService, 'scheduleHardDeletion');
pruningService.startPruning();
expect(setSoftDeletionInterval).not.toHaveBeenCalled();
expect(scheduleHardDeletion).not.toHaveBeenCalled();
});
it('should start pruning if service is enabled', () => {
const pruningService = new PruningService(
mockLogger(),
mock<InstanceSettings>({ isLeader: true, instanceType: 'main' }),
mock(),
mock(),
mock<OrchestrationService>({
isMultiMainSetupEnabled: true,
multiMainSetup: mock<MultiMainSetup>(),
}),
mock<GlobalConfig>({ pruning: { isEnabled: true } }),
);
const setSoftDeletionInterval = jest
// @ts-expect-error Private method
.spyOn(pruningService, 'setSoftDeletionInterval')
.mockImplementation();
const scheduleHardDeletion = jest
// @ts-expect-error Private method
.spyOn(pruningService, 'scheduleHardDeletion')
.mockImplementation();
pruningService.startPruning();
expect(setSoftDeletionInterval).toHaveBeenCalled();
expect(scheduleHardDeletion).toHaveBeenCalled();
});
});
});

View file

@ -3,21 +3,20 @@ import { BinaryDataService, InstanceSettings } from 'n8n-core';
import { jsonStringify } from 'n8n-workflow';
import { Service } from 'typedi';
import config from '@/config';
import { inTest, TIME } from '@/constants';
import { TIME } from '@/constants';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { OnShutdown } from '@/decorators/on-shutdown';
import { Logger } from '@/logging/logger.service';
import { OrchestrationService } from './orchestration.service';
import { OrchestrationService } from '../orchestration.service';
@Service()
export class PruningService {
private hardDeletionBatchSize = 100;
private rates: Record<string, number> = {
softDeletion: config.getEnv('executions.pruneDataIntervals.softDelete') * TIME.MINUTE,
hardDeletion: config.getEnv('executions.pruneDataIntervals.hardDelete') * TIME.MINUTE,
softDeletion: this.globalConfig.pruning.softDeleteInterval * TIME.MINUTE,
hardDeletion: this.globalConfig.pruning.hardDeleteInterval * TIME.MINUTE,
};
public softDeletionInterval: NodeJS.Timer | undefined;
@ -33,7 +32,9 @@ export class PruningService {
private readonly binaryDataService: BinaryDataService,
private readonly orchestrationService: OrchestrationService,
private readonly globalConfig: GlobalConfig,
) {}
) {
this.logger = this.logger.scoped('pruning');
}
/**
* @important Requires `OrchestrationService` to be initialized.
@ -50,9 +51,9 @@ export class PruningService {
}
}
private isPruningEnabled() {
private isEnabled() {
const { instanceType, isFollower } = this.instanceSettings;
if (!config.getEnv('executions.pruneData') || inTest || instanceType !== 'main') {
if (!this.globalConfig.pruning.isEnabled || instanceType !== 'main') {
return false;
}
@ -67,23 +68,23 @@ export class PruningService {
* @important Call this method only after DB migrations have completed.
*/
startPruning() {
if (!this.isPruningEnabled()) return;
if (!this.isEnabled()) return;
if (this.isShuttingDown) {
this.logger.warn('[Pruning] Cannot start pruning while shutting down');
this.logger.warn('Cannot start pruning while shutting down');
return;
}
this.logger.debug('[Pruning] Starting soft-deletion and hard-deletion timers');
this.logger.debug('Starting soft-deletion and hard-deletion timers');
this.setSoftDeletionInterval();
this.scheduleHardDeletion();
}
stopPruning() {
if (!this.isPruningEnabled()) return;
if (!this.isEnabled()) return;
this.logger.debug('[Pruning] Removing soft-deletion and hard-deletion timers');
this.logger.debug('Removing soft-deletion and hard-deletion timers');
clearInterval(this.softDeletionInterval);
clearTimeout(this.hardDeletionTimeout);
@ -97,7 +98,7 @@ export class PruningService {
this.rates.softDeletion,
);
this.logger.debug(`[Pruning] Soft-deletion scheduled every ${when}`);
this.logger.debug(`Soft-deletion scheduled every ${when}`);
}
private scheduleHardDeletion(rateMs = this.rates.hardDeletion) {
@ -114,27 +115,27 @@ export class PruningService {
? error.message
: jsonStringify(error, { replaceCircularRefs: true });
this.logger.error('[Pruning] Failed to hard-delete executions', { errorMessage });
this.logger.error('Failed to hard-delete executions', { errorMessage });
});
}, rateMs);
this.logger.debug(`[Pruning] Hard-deletion scheduled for next ${when}`);
this.logger.debug(`Hard-deletion scheduled for next ${when}`);
}
/**
* Mark executions as deleted based on age and count, in a pruning cycle.
*/
async softDeleteOnPruningCycle() {
this.logger.debug('[Pruning] Starting soft-deletion of executions');
this.logger.debug('Starting soft-deletion of executions');
const result = await this.executionRepository.softDeletePrunableExecutions();
if (result.affected === 0) {
this.logger.debug('[Pruning] Found no executions to soft-delete');
this.logger.debug('Found no executions to soft-delete');
return;
}
this.logger.debug('[Pruning] Soft-deleted executions', { count: result.affected });
this.logger.debug('Soft-deleted executions', { count: result.affected });
}
@OnShutdown()
@ -148,26 +149,26 @@ export class PruningService {
* @return Delay in ms after which the next cycle should be started
*/
private async hardDeleteOnPruningCycle() {
const ids = await this.executionRepository.hardDeleteSoftDeletedExecutions();
const ids = await this.executionRepository.findSoftDeletedExecutions();
const executionIds = ids.map((o) => o.executionId);
if (executionIds.length === 0) {
this.logger.debug('[Pruning] Found no executions to hard-delete');
this.logger.debug('Found no executions to hard-delete');
return this.rates.hardDeletion;
}
try {
this.logger.debug('[Pruning] Starting hard-deletion of executions', { executionIds });
this.logger.debug('Starting hard-deletion of executions', { executionIds });
await this.binaryDataService.deleteMany(ids);
await this.executionRepository.deleteByIds(executionIds);
this.logger.debug('[Pruning] Hard-deleted executions', { executionIds });
this.logger.debug('Hard-deleted executions', { executionIds });
} catch (error) {
this.logger.error('[Pruning] Failed to hard-delete executions', {
this.logger.error('Failed to hard-delete executions', {
executionIds,
error: error instanceof Error ? error.message : `${error}`,
});

View file

@ -14,7 +14,7 @@ export class UrlService {
/** Returns the base URL of the webhooks */
getWebhookBaseUrl() {
let urlBaseWebhook = process.env.WEBHOOK_URL ?? this.baseUrl;
let urlBaseWebhook = this.trimQuotes(process.env.WEBHOOK_URL) || this.baseUrl;
if (!urlBaseWebhook.endsWith('/')) {
urlBaseWebhook += '/';
}
@ -23,7 +23,7 @@ export class UrlService {
/** Return the n8n instance base URL without trailing slash */
getInstanceBaseUrl(): string {
const n8nBaseUrl = config.getEnv('editorBaseUrl') || this.getWebhookBaseUrl();
const n8nBaseUrl = this.trimQuotes(config.getEnv('editorBaseUrl')) || this.getWebhookBaseUrl();
return n8nBaseUrl.endsWith('/') ? n8nBaseUrl.slice(0, n8nBaseUrl.length - 1) : n8nBaseUrl;
}
@ -36,4 +36,9 @@ export class UrlService {
}
return `${protocol}://${host}:${port}${path}`;
}
/** Remove leading and trailing double quotes from a URL. */
private trimQuotes(url?: string) {
return url?.replace(/^["]+|["]+$/g, '') ?? '';
}
}

View file

@ -468,7 +468,7 @@ function hookFunctionsSave(): IWorkflowExecuteHooks {
(executionStatus === 'success' && !saveSettings.success) ||
(executionStatus !== 'success' && !saveSettings.error);
if (shouldNotSave) {
if (shouldNotSave && !fullRunData.waitTill) {
if (!fullRunData.waitTill && !isManualMode) {
executeErrorWorkflow(
this.workflowData,

View file

@ -35,6 +35,7 @@ import * as WorkflowHelpers from '@/workflow-helpers';
import { generateFailedExecutionFromError } from '@/workflow-helpers';
import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service';
import { ExecutionNotFoundError } from './errors/execution-not-found-error';
import { EventService } from './events/event.service';
@Service()
@ -57,12 +58,21 @@ export class WorkflowRunner {
/** The process did error */
async processError(
error: ExecutionError,
error: ExecutionError | ExecutionNotFoundError,
startedAt: Date,
executionMode: WorkflowExecuteMode,
executionId: string,
hooks?: WorkflowHooks,
) {
// This means the execution was probably cancelled and has already
// been cleaned up.
//
// FIXME: This is a quick fix. The proper fix would be to not remove
// the execution from the active executions while it's still running.
if (error instanceof ExecutionNotFoundError) {
return;
}
ErrorReporter.error(error, { executionId });
const isQueueMode = config.getEnv('executions.mode') === 'queue';

View file

@ -1,15 +1,14 @@
import { GlobalConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { BinaryDataService, InstanceSettings } from 'n8n-core';
import type { ExecutionStatus } from 'n8n-workflow';
import Container from 'typedi';
import config from '@/config';
import { TIME } from '@/constants';
import type { ExecutionEntity } from '@/databases/entities/execution-entity';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { Logger } from '@/logging/logger.service';
import { PruningService } from '@/services/pruning.service';
import { PruningService } from '@/services/pruning/pruning.service';
import {
annotateExecution,
@ -18,7 +17,7 @@ import {
} from './shared/db/executions';
import { createWorkflow } from './shared/db/workflows';
import * as testDb from './shared/test-db';
import { mockInstance } from '../shared/mocking';
import { mockInstance, mockLogger } from '../shared/mocking';
describe('softDeleteOnPruningCycle()', () => {
let pruningService: PruningService;
@ -28,17 +27,19 @@ describe('softDeleteOnPruningCycle()', () => {
const now = new Date();
const yesterday = new Date(Date.now() - TIME.DAY);
let workflow: WorkflowEntity;
let globalConfig: GlobalConfig;
beforeAll(async () => {
await testDb.init();
globalConfig = Container.get(GlobalConfig);
pruningService = new PruningService(
mockInstance(Logger),
mockLogger(),
instanceSettings,
Container.get(ExecutionRepository),
mockInstance(BinaryDataService),
mock(),
mock(),
globalConfig,
);
workflow = await createWorkflow();
@ -52,10 +53,6 @@ describe('softDeleteOnPruningCycle()', () => {
await testDb.terminate();
});
afterEach(() => {
config.load(config.default);
});
async function findAllExecutions() {
return await Container.get(ExecutionRepository).find({
order: { id: 'asc' },
@ -64,9 +61,9 @@ describe('softDeleteOnPruningCycle()', () => {
}
describe('when EXECUTIONS_DATA_PRUNE_MAX_COUNT is set', () => {
beforeEach(() => {
config.set('executions.pruneDataMaxCount', 1);
config.set('executions.pruneDataMaxAge', 336);
beforeAll(() => {
globalConfig.pruning.maxAge = 336;
globalConfig.pruning.maxCount = 1;
});
test('should mark as deleted based on EXECUTIONS_DATA_PRUNE_MAX_COUNT', async () => {
@ -165,9 +162,9 @@ describe('softDeleteOnPruningCycle()', () => {
});
describe('when EXECUTIONS_DATA_MAX_AGE is set', () => {
beforeEach(() => {
config.set('executions.pruneDataMaxAge', 1); // 1h
config.set('executions.pruneDataMaxCount', 0);
beforeAll(() => {
globalConfig.pruning.maxAge = 1;
globalConfig.pruning.maxCount = 0;
});
test('should mark as deleted based on EXECUTIONS_DATA_MAX_AGE', async () => {

View file

@ -0,0 +1,40 @@
import { TaskRunnersConfig } from '@n8n/config';
import Container from 'typedi';
import { TaskRunnerModule } from '@/runners/task-runner-module';
import { DefaultTaskRunnerDisconnectAnalyzer } from '../../../src/runners/default-task-runner-disconnect-analyzer';
import { TaskRunnerWsServer } from '../../../src/runners/runner-ws-server';
describe('TaskRunnerModule in external mode', () => {
const runnerConfig = Container.get(TaskRunnersConfig);
runnerConfig.mode = 'external';
runnerConfig.port = 0;
const module = Container.get(TaskRunnerModule);
afterEach(async () => {
await module.stop();
});
describe('start', () => {
it('should throw if the task runner is disabled', async () => {
runnerConfig.disabled = true;
// Act
await expect(module.start()).rejects.toThrow('Task runner is disabled');
});
it('should start the task runner', async () => {
runnerConfig.disabled = false;
// Act
await module.start();
});
it('should use DefaultTaskRunnerDisconnectAnalyzer', () => {
const wsServer = Container.get(TaskRunnerWsServer);
expect(wsServer.getDisconnectAnalyzer()).toBeInstanceOf(DefaultTaskRunnerDisconnectAnalyzer);
});
});
});

View file

@ -0,0 +1,40 @@
import { TaskRunnersConfig } from '@n8n/config';
import Container from 'typedi';
import { TaskRunnerModule } from '@/runners/task-runner-module';
import { InternalTaskRunnerDisconnectAnalyzer } from '../../../src/runners/internal-task-runner-disconnect-analyzer';
import { TaskRunnerWsServer } from '../../../src/runners/runner-ws-server';
describe('TaskRunnerModule in internal_childprocess mode', () => {
const runnerConfig = Container.get(TaskRunnersConfig);
runnerConfig.port = 0; // Random port
runnerConfig.mode = 'internal_childprocess';
const module = Container.get(TaskRunnerModule);
afterEach(async () => {
await module.stop();
});
describe('start', () => {
it('should throw if the task runner is disabled', async () => {
runnerConfig.disabled = true;
// Act
await expect(module.start()).rejects.toThrow('Task runner is disabled');
});
it('should start the task runner', async () => {
runnerConfig.disabled = false;
// Act
await module.start();
});
it('should use InternalTaskRunnerDisconnectAnalyzer', () => {
const wsServer = Container.get(TaskRunnerWsServer);
expect(wsServer.getDisconnectAnalyzer()).toBeInstanceOf(InternalTaskRunnerDisconnectAnalyzer);
});
});
});

View file

@ -1,7 +1,7 @@
import { TaskRunnersConfig } from '@n8n/config';
import Container from 'typedi';
import { TaskRunnerService } from '@/runners/runner-ws-server';
import { TaskRunnerWsServer } from '@/runners/runner-ws-server';
import { TaskBroker } from '@/runners/task-broker.service';
import { TaskRunnerProcess } from '@/runners/task-runner-process';
import { TaskRunnerServer } from '@/runners/task-runner-server';
@ -18,7 +18,7 @@ describe('TaskRunnerProcess', () => {
const runnerProcess = Container.get(TaskRunnerProcess);
const taskBroker = Container.get(TaskBroker);
const taskRunnerService = Container.get(TaskRunnerService);
const taskRunnerService = Container.get(TaskRunnerWsServer);
const startLauncherSpy = jest.spyOn(runnerProcess, 'startLauncher');
const startNodeSpy = jest.spyOn(runnerProcess, 'startNode');

View file

@ -1,6 +1,6 @@
{
"name": "n8n-core",
"version": "1.65.0",
"version": "1.66.0",
"description": "Core functionality of n8n",
"main": "dist/index",
"types": "dist/index.d.ts",
@ -13,13 +13,13 @@
"scripts": {
"clean": "rimraf dist .turbo",
"typecheck": "tsc --noEmit",
"build": "tsc -p tsconfig.build.json",
"build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json",
"dev": "pnpm watch",
"format": "biome format --write .",
"format:check": "biome ci .",
"lint": "eslint . --quiet",
"lintfix": "eslint . --fix",
"watch": "tsc -p tsconfig.build.json --watch",
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\"",
"test": "jest"
},
"files": [

View file

@ -23,12 +23,11 @@ import type {
} from 'axios';
import axios from 'axios';
import crypto, { createHmac } from 'crypto';
import type { Request, Response } from 'express';
import FileType from 'file-type';
import FormData from 'form-data';
import { createReadStream } from 'fs';
import { access as fsAccess, writeFile as fsWriteFile } from 'fs/promises';
import { IncomingMessage, type IncomingHttpHeaders } from 'http';
import { IncomingMessage } from 'http';
import { Agent, type AgentOptions } from 'https';
import get from 'lodash/get';
import isEmpty from 'lodash/isEmpty';
@ -60,7 +59,6 @@ import type {
IGetNodeParameterOptions,
IHookFunctions,
IHttpRequestOptions,
ILoadOptionsFunctions,
IN8nHttpFullResponse,
IN8nHttpResponse,
INode,
@ -101,7 +99,6 @@ import type {
INodeParameters,
EnsureTypeOptions,
SSHTunnelFunctions,
SchedulingFunctions,
DeduplicationHelperFunctions,
IDeduplicationOutput,
IDeduplicationOutputItems,
@ -111,6 +108,7 @@ import type {
ICheckProcessedContextData,
AiEvent,
ISupplyDataFunctions,
WebhookType,
} from 'n8n-workflow';
import {
NodeConnectionType,
@ -167,7 +165,8 @@ import {
import { extractValue } from './ExtractValue';
import { InstanceSettings } from './InstanceSettings';
import type { ExtendedValidationResult, IResponseError } from './Interfaces';
import { ScheduledTaskManager } from './ScheduledTaskManager';
// eslint-disable-next-line import/no-cycle
import { HookContext, PollContext, TriggerContext, WebhookContext } from './node-execution-context';
import { getSecretsProxy } from './Secrets';
import { SSHClientsManager } from './SSHClientsManager';
@ -215,7 +214,7 @@ const createFormDataObject = (data: Record<string, unknown>) => {
return formData;
};
const validateUrl = (url?: string): boolean => {
export const validateUrl = (url?: string): boolean => {
if (!url) return false;
try {
@ -776,7 +775,7 @@ export function parseIncomingMessage(message: IncomingMessage) {
}
}
async function binaryToString(body: Buffer | Readable, encoding?: BufferEncoding) {
export async function binaryToString(body: Buffer | Readable, encoding?: BufferEncoding) {
const buffer = await binaryToBuffer(body);
if (!encoding && body instanceof IncomingMessage) {
parseIncomingMessage(body);
@ -1010,7 +1009,7 @@ export const removeEmptyBody = (requestOptions: IHttpRequestOptions | IRequestOp
}
};
async function httpRequest(
export async function httpRequest(
requestOptions: IHttpRequestOptions,
): Promise<IN8nHttpFullResponse | IN8nHttpResponse> {
removeEmptyBody(requestOptions);
@ -1205,7 +1204,7 @@ export async function copyBinaryFile(
* base64 and adds metadata.
*/
// eslint-disable-next-line complexity
async function prepareBinaryData(
export async function prepareBinaryData(
binaryData: Buffer | Readable,
executionId: string,
workflowId: string,
@ -1348,6 +1347,7 @@ export async function clearAllProcessedItems(
options,
);
}
export async function getProcessedDataCount(
scope: DeduplicationScope,
contextData: ICheckProcessedContextData,
@ -1359,7 +1359,8 @@ export async function getProcessedDataCount(
options,
);
}
function applyPaginationRequestData(
export function applyPaginationRequestData(
requestData: IRequestOptions,
paginationRequestData: PaginationOptions['request'],
): IRequestOptions {
@ -2628,7 +2629,7 @@ export function continueOnFail(node: INode): boolean {
*
*/
export function getNodeWebhookUrl(
name: string,
name: WebhookType,
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
@ -2673,7 +2674,7 @@ export function getNodeWebhookUrl(
*
*/
export function getWebhookDescription(
name: string,
name: WebhookType,
workflow: Workflow,
node: INode,
): IWebhookDescription | undefined {
@ -2798,7 +2799,7 @@ const addExecutionDataFunctions = async (
}
};
async function getInputConnectionData(
export async function getInputConnectionData(
this: IAllExecuteFunctions,
workflow: Workflow,
runExecutionData: IRunExecutionData,
@ -3342,14 +3343,6 @@ const getSSHTunnelFunctions = (): SSHTunnelFunctions => ({
await Container.get(SSHClientsManager).getClient(credentials),
});
const getSchedulingFunctions = (workflow: Workflow): SchedulingFunctions => {
const scheduledTaskManager = Container.get(ScheduledTaskManager);
return {
registerCron: (cronExpression, onTick) =>
scheduledTaskManager.registerCron(workflow, cronExpression, onTick),
};
};
const getAllowedPaths = () => {
const restrictFileAccessTo = process.env[RESTRICT_FILE_ACCESS_TO];
if (!restrictFileAccessTo) {
@ -3553,57 +3546,7 @@ export function getExecutePollFunctions(
mode: WorkflowExecuteMode,
activation: WorkflowActivateMode,
): IPollFunctions {
return ((workflow: Workflow, node: INode) => {
return {
...getCommonWorkflowFunctions(workflow, node, additionalData),
__emit: (): void => {
throw new ApplicationError(
'Overwrite NodeExecuteFunctions.getExecutePollFunctions.__emit function',
);
},
__emitError() {
throw new ApplicationError(
'Overwrite NodeExecuteFunctions.getExecutePollFunctions.__emitError function',
);
},
getMode: () => mode,
getActivationMode: () => activation,
getCredentials: async (type) =>
await getCredentials(workflow, node, type, additionalData, mode),
getNodeParameter: (
parameterName: string,
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object => {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
workflow,
runExecutionData,
runIndex,
connectionInputData,
node,
parameterName,
itemIndex,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
undefined,
fallbackValue,
options,
);
},
helpers: {
createDeferredPromise,
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getSchedulingFunctions(workflow),
returnJsonArray,
},
};
})(workflow, node);
return new PollContext(workflow, node, additionalData, mode, activation);
}
/**
@ -3617,58 +3560,7 @@ export function getExecuteTriggerFunctions(
mode: WorkflowExecuteMode,
activation: WorkflowActivateMode,
): ITriggerFunctions {
return ((workflow: Workflow, node: INode) => {
return {
...getCommonWorkflowFunctions(workflow, node, additionalData),
emit: (): void => {
throw new ApplicationError(
'Overwrite NodeExecuteFunctions.getExecuteTriggerFunctions.emit function',
);
},
emitError: (): void => {
throw new ApplicationError(
'Overwrite NodeExecuteFunctions.getExecuteTriggerFunctions.emit function',
);
},
getMode: () => mode,
getActivationMode: () => activation,
getCredentials: async (type) =>
await getCredentials(workflow, node, type, additionalData, mode),
getNodeParameter: (
parameterName: string,
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object => {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
workflow,
runExecutionData,
runIndex,
connectionInputData,
node,
parameterName,
itemIndex,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
undefined,
fallbackValue,
options,
);
},
helpers: {
createDeferredPromise,
...getSSHTunnelFunctions(),
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
...getSchedulingFunctions(workflow),
returnJsonArray,
},
};
})(workflow, node);
return new TriggerContext(workflow, node, additionalData, mode, activation);
}
/**
@ -4400,6 +4292,7 @@ export function getExecuteSingleFunctions(
},
helpers: {
createDeferredPromise,
returnJsonArray,
...getRequestHelperFunctions(
workflow,
node,
@ -4439,85 +4332,6 @@ export function getCredentialTestFunctions(): ICredentialTestFunctions {
};
}
/**
* Returns the execute functions regular nodes have access to in load-options-function.
*/
export function getLoadOptionsFunctions(
workflow: Workflow,
node: INode,
path: string,
additionalData: IWorkflowExecuteAdditionalData,
): ILoadOptionsFunctions {
return ((workflow: Workflow, node: INode, path: string) => {
return {
...getCommonWorkflowFunctions(workflow, node, additionalData),
getCredentials: async (type) =>
await getCredentials(workflow, node, type, additionalData, 'internal'),
getCurrentNodeParameter: (
parameterPath: string,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object | undefined => {
const nodeParameters = additionalData.currentNodeParameters;
if (parameterPath.charAt(0) === '&') {
parameterPath = `${path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`;
}
let returnData = get(nodeParameters, parameterPath);
// This is outside the try/catch because it throws errors with proper messages
if (options?.extractValue) {
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
if (nodeType === undefined) {
throw new ApplicationError('Node type is not known so cannot return parameter value', {
tags: { nodeType: node.type },
});
}
returnData = extractValue(
returnData,
parameterPath,
node,
nodeType,
) as NodeParameterValueType;
}
return returnData;
},
getCurrentNodeParameters: () => additionalData.currentNodeParameters,
getNodeParameter: (
parameterName: string,
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object => {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const mode = 'internal' as WorkflowExecuteMode;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
workflow,
runExecutionData,
runIndex,
connectionInputData,
node,
parameterName,
itemIndex,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
undefined,
fallbackValue,
options,
);
},
helpers: {
...getSSHTunnelFunctions(),
...getRequestHelperFunctions(workflow, node, additionalData),
},
};
})(workflow, node, path);
}
/**
* Returns the execute functions regular nodes have access to in hook-function.
*/
@ -4529,59 +4343,7 @@ export function getExecuteHookFunctions(
activation: WorkflowActivateMode,
webhookData?: IWebhookData,
): IHookFunctions {
return ((workflow: Workflow, node: INode) => {
return {
...getCommonWorkflowFunctions(workflow, node, additionalData),
getCredentials: async (type) =>
await getCredentials(workflow, node, type, additionalData, mode),
getMode: () => mode,
getActivationMode: () => activation,
getNodeParameter: (
parameterName: string,
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object => {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
workflow,
runExecutionData,
runIndex,
connectionInputData,
node,
parameterName,
itemIndex,
mode,
getAdditionalKeys(additionalData, mode, runExecutionData),
undefined,
fallbackValue,
options,
);
},
getNodeWebhookUrl: (name: string): string | undefined => {
return getNodeWebhookUrl(
name,
workflow,
node,
additionalData,
mode,
getAdditionalKeys(additionalData, mode, null),
webhookData?.isTest,
);
},
getWebhookName(): string {
if (webhookData === undefined) {
throw new ApplicationError('Only supported in webhook functions');
}
return webhookData.webhookDescription.name;
},
getWebhookDescription: (name) => getWebhookDescription(name, workflow, node),
helpers: getRequestHelperFunctions(workflow, node, additionalData),
};
})(workflow, node);
return new HookContext(workflow, node, additionalData, mode, activation, webhookData);
}
/**
@ -4597,170 +4359,13 @@ export function getExecuteWebhookFunctions(
closeFunctions: CloseFunction[],
runExecutionData: IRunExecutionData | null,
): IWebhookFunctions {
return ((workflow: Workflow, node: INode, runExecutionData: IRunExecutionData | null) => {
return {
...getCommonWorkflowFunctions(workflow, node, additionalData),
getBodyData(): IDataObject {
if (additionalData.httpRequest === undefined) {
throw new ApplicationError('Request is missing');
}
return additionalData.httpRequest.body;
},
getCredentials: async (type) =>
await getCredentials(workflow, node, type, additionalData, mode),
getHeaderData(): IncomingHttpHeaders {
if (additionalData.httpRequest === undefined) {
throw new ApplicationError('Request is missing');
}
return additionalData.httpRequest.headers;
},
async getInputConnectionData(
inputName: NodeConnectionType,
itemIndex: number,
): Promise<unknown> {
// To be able to use expressions like "$json.sessionId" set the
// body data the webhook received to what is normally used for
// incoming node data.
const connectionInputData: INodeExecutionData[] = [
{ json: additionalData.httpRequest?.body || {} },
];
const runExecutionData: IRunExecutionData = {
resultData: {
runData: {},
},
};
const executeData: IExecuteData = {
data: {
main: [connectionInputData],
},
node,
source: null,
};
const runIndex = 0;
return await getInputConnectionData.call(
this,
workflow,
runExecutionData,
runIndex,
connectionInputData,
{} as ITaskDataConnections,
additionalData,
executeData,
mode,
closeFunctions,
inputName,
itemIndex,
);
},
getMode: () => mode,
evaluateExpression: (expression: string, evaluateItemIndex?: number) => {
const itemIndex = evaluateItemIndex === undefined ? 0 : evaluateItemIndex;
const runIndex = 0;
let connectionInputData: INodeExecutionData[] = [];
let executionData: IExecuteData | undefined;
if (runExecutionData?.executionData !== undefined) {
executionData = runExecutionData.executionData.nodeExecutionStack[0];
if (executionData !== undefined) {
connectionInputData = executionData.data.main[0]!;
}
}
const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData);
return workflow.expression.resolveSimpleParameterValue(
`=${expression}`,
{},
runExecutionData,
runIndex,
itemIndex,
node.name,
connectionInputData,
mode,
additionalKeys,
executionData,
);
},
getNodeParameter: (
parameterName: string,
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object => {
const itemIndex = 0;
const runIndex = 0;
let connectionInputData: INodeExecutionData[] = [];
let executionData: IExecuteData | undefined;
if (runExecutionData?.executionData !== undefined) {
executionData = runExecutionData.executionData.nodeExecutionStack[0];
if (executionData !== undefined) {
connectionInputData = executionData.data.main[0]!;
}
}
const additionalKeys = getAdditionalKeys(additionalData, mode, runExecutionData);
return getNodeParameter(
workflow,
runExecutionData,
runIndex,
connectionInputData,
node,
parameterName,
itemIndex,
mode,
additionalKeys,
executionData,
fallbackValue,
options,
);
},
getParamsData(): object {
if (additionalData.httpRequest === undefined) {
throw new ApplicationError('Request is missing');
}
return additionalData.httpRequest.params;
},
getQueryData(): object {
if (additionalData.httpRequest === undefined) {
throw new ApplicationError('Request is missing');
}
return additionalData.httpRequest.query;
},
getRequestObject(): Request {
if (additionalData.httpRequest === undefined) {
throw new ApplicationError('Request is missing');
}
return additionalData.httpRequest;
},
getResponseObject(): Response {
if (additionalData.httpResponse === undefined) {
throw new ApplicationError('Response is missing');
}
return additionalData.httpResponse;
},
getNodeWebhookUrl: (name: string): string | undefined =>
getNodeWebhookUrl(
name,
workflow,
node,
additionalData,
mode,
getAdditionalKeys(additionalData, mode, null),
),
getWebhookName: () => webhookData.webhookDescription.name,
helpers: {
createDeferredPromise,
...getRequestHelperFunctions(workflow, node, additionalData),
...getBinaryHelperFunctions(additionalData, workflow.id),
returnJsonArray,
},
nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id),
};
})(workflow, node, runExecutionData);
return new WebhookContext(
workflow,
node,
additionalData,
mode,
webhookData,
closeFunctions,
runExecutionData,
);
}

View file

@ -45,6 +45,7 @@ import {
NodeExecutionOutput,
sleep,
ErrorReporterProxy,
ExecutionCancelledError,
} from 'n8n-workflow';
import PCancelable from 'p-cancelable';
@ -154,10 +155,6 @@ export class WorkflowExecute {
return this.processRunExecutionData(workflow);
}
static isAbortError(e?: ExecutionBaseError) {
return e?.message === 'AbortError';
}
forceInputNodeExecution(workflow: Workflow): boolean {
return workflow.settings.executionOrder !== 'v1';
}
@ -1479,7 +1476,7 @@ export class WorkflowExecute {
// Add the execution data again so that it can get restarted
this.runExecutionData.executionData!.nodeExecutionStack.unshift(executionData);
// Only execute the nodeExecuteAfter hook if the node did not get aborted
if (!WorkflowExecute.isAbortError(executionError)) {
if (!this.isCancelled) {
await this.executeHook('nodeExecuteAfter', [
executionNode.name,
taskData,
@ -1827,7 +1824,7 @@ export class WorkflowExecute {
return await this.processSuccessExecution(
startedAt,
workflow,
new WorkflowOperationError('Workflow has been canceled or timed out'),
new ExecutionCancelledError(this.additionalData.executionId ?? 'unknown'),
closeFunction,
);
}
@ -1928,7 +1925,7 @@ export class WorkflowExecute {
this.moveNodeMetadata();
// Prevent from running the hook if the error is an abort error as it was already handled
if (!WorkflowExecute.isAbortError(executionError)) {
if (!this.isCancelled) {
await this.executeHook('workflowExecuteAfter', [fullRunData, newStaticData]);
}
@ -1959,4 +1956,8 @@ export class WorkflowExecute {
return fullRunData;
}
private get isCancelled() {
return this.abortController.signal.aborted;
}
}

View file

@ -20,3 +20,4 @@ export { ObjectStoreService } from './ObjectStore/ObjectStore.service.ee';
export { BinaryData } from './BinaryData/types';
export { isStoredMode as isValidNonDefaultMode } from './BinaryData/utils';
export * from './ExecutionMetadata';
export * from './node-execution-context';

View file

@ -0,0 +1,168 @@
import { mock } from 'jest-mock-extended';
import type {
INode,
INodeExecutionData,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { Container } from 'typedi';
import { InstanceSettings } from '@/InstanceSettings';
import { NodeExecutionContext } from '../node-execution-context';
class TestContext extends NodeExecutionContext {}
describe('BaseContext', () => {
const instanceSettings = mock<InstanceSettings>({ instanceId: 'abc123' });
Container.set(InstanceSettings, instanceSettings);
const workflow = mock<Workflow>({
id: '123',
name: 'Test Workflow',
active: true,
nodeTypes: mock(),
timezone: 'UTC',
});
const node = mock<INode>();
let additionalData = mock<IWorkflowExecuteAdditionalData>({
credentialsHelper: mock(),
});
const mode: WorkflowExecuteMode = 'manual';
const testContext = new TestContext(workflow, node, additionalData, mode);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getNode', () => {
it('should return a deep copy of the node', () => {
const result = testContext.getNode();
expect(result).not.toBe(node);
expect(JSON.stringify(result)).toEqual(JSON.stringify(node));
});
});
describe('getWorkflow', () => {
it('should return the id, name, and active properties of the workflow', () => {
const result = testContext.getWorkflow();
expect(result).toEqual({ id: '123', name: 'Test Workflow', active: true });
});
});
describe('getMode', () => {
it('should return the mode property', () => {
const result = testContext.getMode();
expect(result).toBe(mode);
});
});
describe('getWorkflowStaticData', () => {
it('should call getStaticData method of workflow', () => {
testContext.getWorkflowStaticData('testType');
expect(workflow.getStaticData).toHaveBeenCalledWith('testType', node);
});
});
describe('getChildNodes', () => {
it('should return an array of NodeTypeAndVersion objects for the child nodes of the given node', () => {
const childNode1 = mock<INode>({ name: 'Child Node 1', type: 'testType1', typeVersion: 1 });
const childNode2 = mock<INode>({ name: 'Child Node 2', type: 'testType2', typeVersion: 2 });
workflow.getChildNodes.mockReturnValue(['Child Node 1', 'Child Node 2']);
workflow.nodes = {
'Child Node 1': childNode1,
'Child Node 2': childNode2,
};
const result = testContext.getChildNodes('Test Node');
expect(result).toEqual([
{ name: 'Child Node 1', type: 'testType1', typeVersion: 1 },
{ name: 'Child Node 2', type: 'testType2', typeVersion: 2 },
]);
});
});
describe('getParentNodes', () => {
it('should return an array of NodeTypeAndVersion objects for the parent nodes of the given node', () => {
const parentNode1 = mock<INode>({ name: 'Parent Node 1', type: 'testType1', typeVersion: 1 });
const parentNode2 = mock<INode>({ name: 'Parent Node 2', type: 'testType2', typeVersion: 2 });
workflow.getParentNodes.mockReturnValue(['Parent Node 1', 'Parent Node 2']);
workflow.nodes = {
'Parent Node 1': parentNode1,
'Parent Node 2': parentNode2,
};
const result = testContext.getParentNodes('Test Node');
expect(result).toEqual([
{ name: 'Parent Node 1', type: 'testType1', typeVersion: 1 },
{ name: 'Parent Node 2', type: 'testType2', typeVersion: 2 },
]);
});
});
describe('getKnownNodeTypes', () => {
it('should call getKnownTypes method of workflow.nodeTypes', () => {
testContext.getKnownNodeTypes();
expect(workflow.nodeTypes.getKnownTypes).toHaveBeenCalled();
});
});
describe('getRestApiUrl', () => {
it('should return the restApiUrl property of additionalData', () => {
additionalData.restApiUrl = 'https://example.com/api';
const result = testContext.getRestApiUrl();
expect(result).toBe('https://example.com/api');
});
});
describe('getInstanceBaseUrl', () => {
it('should return the instanceBaseUrl property of additionalData', () => {
additionalData.instanceBaseUrl = 'https://example.com';
const result = testContext.getInstanceBaseUrl();
expect(result).toBe('https://example.com');
});
});
describe('getInstanceId', () => {
it('should return the instanceId property of instanceSettings', () => {
const result = testContext.getInstanceId();
expect(result).toBe('abc123');
});
});
describe('getTimezone', () => {
it('should return the timezone property of workflow', () => {
const result = testContext.getTimezone();
expect(result).toBe('UTC');
});
});
describe('getCredentialsProperties', () => {
it('should call getCredentialsProperties method of additionalData.credentialsHelper', () => {
testContext.getCredentialsProperties('testType');
expect(additionalData.credentialsHelper.getCredentialsProperties).toHaveBeenCalledWith(
'testType',
);
});
});
describe('prepareOutputData', () => {
it('should return the input array wrapped in another array', async () => {
const outputData = [mock<INodeExecutionData>(), mock<INodeExecutionData>()];
const result = await testContext.prepareOutputData(outputData);
expect(result).toEqual([outputData]);
});
});
});

View file

@ -0,0 +1,147 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWebhookDescription,
IWebhookData,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
import { HookContext } from '../hook-context';
describe('HookContext', () => {
const testCredentialType = 'testCredential';
const webhookDescription: IWebhookDescription = {
name: 'default',
httpMethod: 'GET',
responseMode: 'onReceived',
path: 'testPath',
};
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
nodeType.description.webhooks = [webhookDescription];
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const activation: WorkflowActivateMode = 'init';
const webhookData = mock<IWebhookData>({
webhookDescription: {
name: 'default',
isFullPath: true,
},
});
const hookContext = new HookContext(
workflow,
node,
additionalData,
mode,
activation,
webhookData,
);
beforeEach(() => {
jest.clearAllMocks();
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
expression.getSimpleParameterValue.mockImplementation((_, value) => value);
});
describe('getActivationMode', () => {
it('should return the activation property', () => {
const result = hookContext.getActivationMode();
expect(result).toBe(activation);
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await hookContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getNodeParameter', () => {
it('should return parameter value when it exists', () => {
const parameter = hookContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
});
describe('getNodeWebhookUrl', () => {
it('should return node webhook url', () => {
const url = hookContext.getNodeWebhookUrl('default');
expect(url).toContain('testPath');
});
});
describe('getWebhookName', () => {
it('should return webhook name', () => {
const name = hookContext.getWebhookName();
expect(name).toBe('default');
});
it('should throw an error if webhookData is undefined', () => {
const hookContextWithoutWebhookData = new HookContext(
workflow,
node,
additionalData,
mode,
activation,
);
expect(() => hookContextWithoutWebhookData.getWebhookName()).toThrow(ApplicationError);
});
});
describe('getWebhookDescription', () => {
it('should return webhook description', () => {
const description = hookContext.getWebhookDescription('default');
expect(description).toEqual<IWebhookDescription>(webhookDescription);
});
});
});

View file

@ -0,0 +1,102 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWorkflowExecuteAdditionalData,
Workflow,
} from 'n8n-workflow';
import { LoadOptionsContext } from '../load-options-context';
describe('LoadOptionsContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const path = 'testPath';
const loadOptionsContext = new LoadOptionsContext(workflow, node, additionalData, path);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await loadOptionsContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getCurrentNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
});
it('should return parameter value when it exists', () => {
additionalData.currentNodeParameters = {
testParameter: 'testValue',
};
const parameter = loadOptionsContext.getCurrentNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = loadOptionsContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = loadOptionsContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View file

@ -0,0 +1,96 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { PollContext } from '../poll-context';
describe('PollContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const activation: WorkflowActivateMode = 'init';
const pollContext = new PollContext(workflow, node, additionalData, mode, activation);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getActivationMode', () => {
it('should return the activation property', () => {
const result = pollContext.getActivationMode();
expect(result).toBe(activation);
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await pollContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = pollContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = pollContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View file

@ -0,0 +1,96 @@
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { TriggerContext } from '../trigger-context';
describe('TriggerContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({ credentialsHelper });
const mode: WorkflowExecuteMode = 'manual';
const activation: WorkflowActivateMode = 'init';
const triggerContext = new TriggerContext(workflow, node, additionalData, mode, activation);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getActivationMode', () => {
it('should return the activation property', () => {
const result = triggerContext.getActivationMode();
expect(result).toBe(activation);
});
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await triggerContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = triggerContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = triggerContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View file

@ -0,0 +1,161 @@
import type { Request, Response } from 'express';
import { mock } from 'jest-mock-extended';
import type {
Expression,
ICredentialDataDecryptedObject,
ICredentialsHelper,
INode,
INodeType,
INodeTypes,
IWebhookData,
IWorkflowExecuteAdditionalData,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { WebhookContext } from '../webhook-context';
describe('WebhookContext', () => {
const testCredentialType = 'testCredential';
const nodeType = mock<INodeType>({
description: {
credentials: [
{
name: testCredentialType,
required: true,
},
],
properties: [
{
name: 'testParameter',
required: true,
},
],
},
});
const nodeTypes = mock<INodeTypes>();
const expression = mock<Expression>();
const workflow = mock<Workflow>({ expression, nodeTypes });
const node = mock<INode>({
credentials: {
[testCredentialType]: {
id: 'testCredentialId',
},
},
});
node.parameters = {
testParameter: 'testValue',
};
const credentialsHelper = mock<ICredentialsHelper>();
const additionalData = mock<IWorkflowExecuteAdditionalData>({
credentialsHelper,
});
additionalData.httpRequest = {
body: { test: 'body' },
headers: { test: 'header' },
params: { test: 'param' },
query: { test: 'query' },
} as unknown as Request;
additionalData.httpResponse = mock<Response>();
const mode: WorkflowExecuteMode = 'manual';
const webhookData = mock<IWebhookData>({
webhookDescription: {
name: 'default',
},
});
const runExecutionData = null;
const webhookContext = new WebhookContext(
workflow,
node,
additionalData,
mode,
webhookData,
[],
runExecutionData,
);
beforeEach(() => {
jest.clearAllMocks();
});
describe('getCredentials', () => {
it('should get decrypted credentials', async () => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
credentialsHelper.getDecrypted.mockResolvedValue({ secret: 'token' });
const credentials =
await webhookContext.getCredentials<ICredentialDataDecryptedObject>(testCredentialType);
expect(credentials).toEqual({ secret: 'token' });
});
});
describe('getBodyData', () => {
it('should return the body data of the request', () => {
const bodyData = webhookContext.getBodyData();
expect(bodyData).toEqual({ test: 'body' });
});
});
describe('getHeaderData', () => {
it('should return the header data of the request', () => {
const headerData = webhookContext.getHeaderData();
expect(headerData).toEqual({ test: 'header' });
});
});
describe('getParamsData', () => {
it('should return the params data of the request', () => {
const paramsData = webhookContext.getParamsData();
expect(paramsData).toEqual({ test: 'param' });
});
});
describe('getQueryData', () => {
it('should return the query data of the request', () => {
const queryData = webhookContext.getQueryData();
expect(queryData).toEqual({ test: 'query' });
});
});
describe('getRequestObject', () => {
it('should return the request object', () => {
const request = webhookContext.getRequestObject();
expect(request).toBe(additionalData.httpRequest);
});
});
describe('getResponseObject', () => {
it('should return the response object', () => {
const response = webhookContext.getResponseObject();
expect(response).toBe(additionalData.httpResponse);
});
});
describe('getWebhookName', () => {
it('should return the name of the webhook', () => {
const webhookName = webhookContext.getWebhookName();
expect(webhookName).toBe('default');
});
});
describe('getNodeParameter', () => {
beforeEach(() => {
nodeTypes.getByNameAndVersion.mockReturnValue(nodeType);
expression.getParameterValue.mockImplementation((value) => value);
});
it('should return parameter value when it exists', () => {
const parameter = webhookContext.getNodeParameter('testParameter');
expect(parameter).toBe('testValue');
});
it('should return the fallback value when the parameter does not exist', () => {
const parameter = webhookContext.getNodeParameter('otherParameter', 'fallback');
expect(parameter).toBe('fallback');
});
});
});

View file

@ -0,0 +1,136 @@
import FileType from 'file-type';
import { IncomingMessage, type ClientRequest } from 'http';
import { mock } from 'jest-mock-extended';
import type { Workflow, IWorkflowExecuteAdditionalData, IBinaryData } from 'n8n-workflow';
import type { Socket } from 'net';
import { Container } from 'typedi';
import { BinaryDataService } from '@/BinaryData/BinaryData.service';
import { BinaryHelpers } from '../binary-helpers';
jest.mock('file-type');
describe('BinaryHelpers', () => {
let binaryDataService = mock<BinaryDataService>();
Container.set(BinaryDataService, binaryDataService);
const workflow = mock<Workflow>({ id: '123' });
const additionalData = mock<IWorkflowExecuteAdditionalData>({ executionId: '456' });
const binaryHelpers = new BinaryHelpers(workflow, additionalData);
beforeEach(() => {
jest.clearAllMocks();
binaryDataService.store.mockImplementation(
async (_workflowId, _executionId, _buffer, value) => value,
);
});
describe('getBinaryPath', () => {
it('should call getPath method of BinaryDataService', () => {
binaryHelpers.getBinaryPath('mock-binary-data-id');
expect(binaryDataService.getPath).toHaveBeenCalledWith('mock-binary-data-id');
});
});
describe('getBinaryMetadata', () => {
it('should call getMetadata method of BinaryDataService', async () => {
await binaryHelpers.getBinaryMetadata('mock-binary-data-id');
expect(binaryDataService.getMetadata).toHaveBeenCalledWith('mock-binary-data-id');
});
});
describe('getBinaryStream', () => {
it('should call getStream method of BinaryDataService', async () => {
await binaryHelpers.getBinaryStream('mock-binary-data-id');
expect(binaryDataService.getAsStream).toHaveBeenCalledWith('mock-binary-data-id', undefined);
});
});
describe('prepareBinaryData', () => {
it('should guess the mime type and file extension if not provided', async () => {
const buffer = Buffer.from('test');
const fileTypeData = { mime: 'application/pdf', ext: 'pdf' };
(FileType.fromBuffer as jest.Mock).mockResolvedValue(fileTypeData);
const binaryData = await binaryHelpers.prepareBinaryData(buffer);
expect(binaryData.mimeType).toEqual('application/pdf');
expect(binaryData.fileExtension).toEqual('pdf');
expect(binaryData.fileType).toEqual('pdf');
expect(binaryData.fileName).toBeUndefined();
expect(binaryData.directory).toBeUndefined();
expect(binaryDataService.store).toHaveBeenCalledWith(
workflow.id,
additionalData.executionId!,
buffer,
binaryData,
);
});
it('should use the provided mime type and file extension if provided', async () => {
const buffer = Buffer.from('test');
const mimeType = 'application/octet-stream';
const binaryData = await binaryHelpers.prepareBinaryData(buffer, undefined, mimeType);
expect(binaryData.mimeType).toEqual(mimeType);
expect(binaryData.fileExtension).toEqual('bin');
expect(binaryData.fileType).toBeUndefined();
expect(binaryData.fileName).toBeUndefined();
expect(binaryData.directory).toBeUndefined();
expect(binaryDataService.store).toHaveBeenCalledWith(
workflow.id,
additionalData.executionId!,
buffer,
binaryData,
);
});
const mockSocket = mock<Socket>({ readableHighWaterMark: 0 });
it('should use the contentDisposition.filename, responseUrl, and contentType properties to set the fileName, directory, and mimeType properties of the binaryData object', async () => {
const incomingMessage = new IncomingMessage(mockSocket);
incomingMessage.contentDisposition = { filename: 'test.txt', type: 'attachment' };
incomingMessage.contentType = 'text/plain';
incomingMessage.responseUrl = 'https://example.com/test.txt';
const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage);
expect(binaryData.fileName).toEqual('test.txt');
expect(binaryData.fileType).toEqual('text');
expect(binaryData.directory).toBeUndefined();
expect(binaryData.mimeType).toEqual('text/plain');
expect(binaryData.fileExtension).toEqual('txt');
});
it('should use the req.path property to set the fileName property of the binaryData object if contentDisposition.filename and responseUrl are not provided', async () => {
const incomingMessage = new IncomingMessage(mockSocket);
incomingMessage.contentType = 'text/plain';
incomingMessage.req = mock<ClientRequest>({ path: '/test.txt' });
const binaryData = await binaryHelpers.prepareBinaryData(incomingMessage);
expect(binaryData.fileName).toEqual('test.txt');
expect(binaryData.directory).toBeUndefined();
expect(binaryData.mimeType).toEqual('text/plain');
expect(binaryData.fileExtension).toEqual('txt');
});
});
describe('setBinaryDataBuffer', () => {
it('should call store method of BinaryDataService', async () => {
const binaryData = mock<IBinaryData>();
const bufferOrStream = mock<Buffer>();
await binaryHelpers.setBinaryDataBuffer(binaryData, bufferOrStream);
expect(binaryDataService.store).toHaveBeenCalledWith(
workflow.id,
additionalData.executionId,
bufferOrStream,
binaryData,
);
});
});
});

View file

@ -0,0 +1,33 @@
import { mock } from 'jest-mock-extended';
import type { Workflow } from 'n8n-workflow';
import { Container } from 'typedi';
import { ScheduledTaskManager } from '@/ScheduledTaskManager';
import { SchedulingHelpers } from '../scheduling-helpers';
describe('SchedulingHelpers', () => {
const scheduledTaskManager = mock<ScheduledTaskManager>();
Container.set(ScheduledTaskManager, scheduledTaskManager);
const workflow = mock<Workflow>();
const schedulingHelpers = new SchedulingHelpers(workflow);
beforeEach(() => {
jest.clearAllMocks();
});
describe('registerCron', () => {
it('should call registerCron method of ScheduledTaskManager', () => {
const cronExpression = '* * * * * *';
const onTick = jest.fn();
schedulingHelpers.registerCron(cronExpression, onTick);
expect(scheduledTaskManager.registerCron).toHaveBeenCalledWith(
workflow,
cronExpression,
onTick,
);
});
});
});

View file

@ -0,0 +1,32 @@
import { mock } from 'jest-mock-extended';
import type { SSHCredentials } from 'n8n-workflow';
import type { Client } from 'ssh2';
import { Container } from 'typedi';
import { SSHClientsManager } from '@/SSHClientsManager';
import { SSHTunnelHelpers } from '../ssh-tunnel-helpers';
describe('SSHTunnelHelpers', () => {
const sshClientsManager = mock<SSHClientsManager>();
Container.set(SSHClientsManager, sshClientsManager);
const sshTunnelHelpers = new SSHTunnelHelpers();
beforeEach(() => {
jest.clearAllMocks();
});
describe('getSSHClient', () => {
const credentials = mock<SSHCredentials>();
it('should call SSHClientsManager.getClient with the given credentials', async () => {
const mockClient = mock<Client>();
sshClientsManager.getClient.mockResolvedValue(mockClient);
const client = await sshTunnelHelpers.getSSHClient(credentials);
expect(sshClientsManager.getClient).toHaveBeenCalledWith(credentials);
expect(client).toBe(mockClient);
});
});
});

View file

@ -0,0 +1,148 @@
import FileType from 'file-type';
import { IncomingMessage } from 'http';
import MimeTypes from 'mime-types';
import { ApplicationError, fileTypeFromMimeType } from 'n8n-workflow';
import type {
BinaryHelperFunctions,
IWorkflowExecuteAdditionalData,
Workflow,
IBinaryData,
} from 'n8n-workflow';
import path from 'path';
import type { Readable } from 'stream';
import Container from 'typedi';
import { BinaryDataService } from '@/BinaryData/BinaryData.service';
import { binaryToBuffer } from '@/BinaryData/utils';
// eslint-disable-next-line import/no-cycle
import { binaryToString } from '@/NodeExecuteFunctions';
export class BinaryHelpers {
private readonly binaryDataService = Container.get(BinaryDataService);
constructor(
private readonly workflow: Workflow,
private readonly additionalData: IWorkflowExecuteAdditionalData,
) {}
get exported(): BinaryHelperFunctions {
return {
getBinaryPath: this.getBinaryPath.bind(this),
getBinaryMetadata: this.getBinaryMetadata.bind(this),
getBinaryStream: this.getBinaryStream.bind(this),
binaryToBuffer,
binaryToString,
prepareBinaryData: this.prepareBinaryData.bind(this),
setBinaryDataBuffer: this.setBinaryDataBuffer.bind(this),
copyBinaryFile: this.copyBinaryFile.bind(this),
};
}
getBinaryPath(binaryDataId: string) {
return this.binaryDataService.getPath(binaryDataId);
}
async getBinaryMetadata(binaryDataId: string) {
return await this.binaryDataService.getMetadata(binaryDataId);
}
async getBinaryStream(binaryDataId: string, chunkSize?: number) {
return await this.binaryDataService.getAsStream(binaryDataId, chunkSize);
}
// eslint-disable-next-line complexity
async prepareBinaryData(binaryData: Buffer | Readable, filePath?: string, mimeType?: string) {
let fileExtension: string | undefined;
if (binaryData instanceof IncomingMessage) {
if (!filePath) {
try {
const { responseUrl } = binaryData;
filePath =
binaryData.contentDisposition?.filename ??
((responseUrl && new URL(responseUrl).pathname) ?? binaryData.req?.path)?.slice(1);
} catch {}
}
if (!mimeType) {
mimeType = binaryData.contentType;
}
}
if (!mimeType) {
// If no mime type is given figure it out
if (filePath) {
// Use file path to guess mime type
const mimeTypeLookup = MimeTypes.lookup(filePath);
if (mimeTypeLookup) {
mimeType = mimeTypeLookup;
}
}
if (!mimeType) {
if (Buffer.isBuffer(binaryData)) {
// Use buffer to guess mime type
const fileTypeData = await FileType.fromBuffer(binaryData);
if (fileTypeData) {
mimeType = fileTypeData.mime;
fileExtension = fileTypeData.ext;
}
} else if (binaryData instanceof IncomingMessage) {
mimeType = binaryData.headers['content-type'];
} else {
// TODO: detect filetype from other kind of streams
}
}
}
if (!fileExtension && mimeType) {
fileExtension = MimeTypes.extension(mimeType) || undefined;
}
if (!mimeType) {
// Fall back to text
mimeType = 'text/plain';
}
const returnData: IBinaryData = {
mimeType,
fileType: fileTypeFromMimeType(mimeType),
fileExtension,
data: '',
};
if (filePath) {
if (filePath.includes('?')) {
// Remove maybe present query parameters
filePath = filePath.split('?').shift();
}
const filePathParts = path.parse(filePath as string);
if (filePathParts.dir !== '') {
returnData.directory = filePathParts.dir;
}
returnData.fileName = filePathParts.base;
// Remove the dot
const extractedFileExtension = filePathParts.ext.slice(1);
if (extractedFileExtension) {
returnData.fileExtension = extractedFileExtension;
}
}
return await this.setBinaryDataBuffer(returnData, binaryData);
}
async setBinaryDataBuffer(binaryData: IBinaryData, bufferOrStream: Buffer | Readable) {
return await this.binaryDataService.store(
this.workflow.id,
this.additionalData.executionId!,
bufferOrStream,
binaryData,
);
}
async copyBinaryFile(): Promise<never> {
throw new ApplicationError('`copyBinaryFile` has been removed. Please upgrade this node.');
}
}

View file

@ -0,0 +1,381 @@
import { createHash } from 'crypto';
import { pick } from 'lodash';
import { jsonParse, NodeOperationError, sleep } from 'n8n-workflow';
import type {
RequestHelperFunctions,
IAdditionalCredentialOptions,
IAllExecuteFunctions,
IExecuteData,
IHttpRequestOptions,
IN8nHttpFullResponse,
IN8nHttpResponse,
INode,
INodeExecutionData,
IOAuth2Options,
IRequestOptions,
IRunExecutionData,
IWorkflowDataProxyAdditionalKeys,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
PaginationOptions,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { Readable } from 'stream';
// eslint-disable-next-line import/no-cycle
import {
applyPaginationRequestData,
binaryToString,
httpRequest,
httpRequestWithAuthentication,
proxyRequestToAxios,
requestOAuth1,
requestOAuth2,
requestWithAuthentication,
validateUrl,
} from '@/NodeExecuteFunctions';
export class RequestHelpers {
constructor(
private readonly context: IAllExecuteFunctions,
private readonly workflow: Workflow,
private readonly node: INode,
private readonly additionalData: IWorkflowExecuteAdditionalData,
private readonly runExecutionData: IRunExecutionData | null = null,
private readonly connectionInputData: INodeExecutionData[] = [],
) {}
get exported(): RequestHelperFunctions {
return {
httpRequest,
httpRequestWithAuthentication: this.httpRequestWithAuthentication.bind(this),
requestWithAuthenticationPaginated: this.requestWithAuthenticationPaginated.bind(this),
request: this.request.bind(this),
requestWithAuthentication: this.requestWithAuthentication.bind(this),
requestOAuth1: this.requestOAuth1.bind(this),
requestOAuth2: this.requestOAuth2.bind(this),
};
}
get httpRequest() {
return httpRequest;
}
async httpRequestWithAuthentication(
credentialsType: string,
requestOptions: IHttpRequestOptions,
additionalCredentialOptions?: IAdditionalCredentialOptions,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await httpRequestWithAuthentication.call(
this.context,
credentialsType,
requestOptions,
this.workflow,
this.node,
this.additionalData,
additionalCredentialOptions,
);
}
// eslint-disable-next-line complexity
async requestWithAuthenticationPaginated(
requestOptions: IRequestOptions,
itemIndex: number,
paginationOptions: PaginationOptions,
credentialsType?: string,
additionalCredentialOptions?: IAdditionalCredentialOptions,
): Promise<unknown[]> {
const responseData = [];
if (!requestOptions.qs) {
requestOptions.qs = {};
}
requestOptions.resolveWithFullResponse = true;
requestOptions.simple = false;
let tempResponseData: IN8nHttpFullResponse;
let makeAdditionalRequest: boolean;
let paginateRequestData: PaginationOptions['request'];
const runIndex = 0;
const additionalKeys = {
$request: requestOptions,
$response: {} as IN8nHttpFullResponse,
$version: this.node.typeVersion,
$pageCount: 0,
};
const executeData: IExecuteData = {
data: {},
node: this.node,
source: null,
};
const hashData = {
identicalCount: 0,
previousLength: 0,
previousHash: '',
};
do {
paginateRequestData = this.getResolvedValue(
paginationOptions.request as unknown as NodeParameterValueType,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as object as PaginationOptions['request'];
const tempRequestOptions = applyPaginationRequestData(requestOptions, paginateRequestData);
if (!validateUrl(tempRequestOptions.uri as string)) {
throw new NodeOperationError(
this.node,
`'${paginateRequestData.url}' is not a valid URL.`,
{
itemIndex,
runIndex,
type: 'invalid_url',
},
);
}
if (credentialsType) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
tempResponseData = await this.requestWithAuthentication(
credentialsType,
tempRequestOptions,
additionalCredentialOptions,
);
} else {
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
tempResponseData = await this.request(tempRequestOptions);
}
const newResponse: IN8nHttpFullResponse = Object.assign(
{
body: {},
headers: {},
statusCode: 0,
},
pick(tempResponseData, ['body', 'headers', 'statusCode']),
);
let contentBody: Exclude<IN8nHttpResponse, Buffer>;
if (newResponse.body instanceof Readable && paginationOptions.binaryResult !== true) {
// Keep the original string version that we can use it to hash if needed
contentBody = await binaryToString(newResponse.body as Buffer | Readable);
const responseContentType = newResponse.headers['content-type']?.toString() ?? '';
if (responseContentType.includes('application/json')) {
newResponse.body = jsonParse(contentBody, { fallbackValue: {} });
} else {
newResponse.body = contentBody;
}
tempResponseData.__bodyResolved = true;
tempResponseData.body = newResponse.body;
} else {
contentBody = newResponse.body;
}
if (paginationOptions.binaryResult !== true || tempResponseData.headers.etag) {
// If the data is not binary (and so not a stream), or an etag is present,
// we check via etag or hash if identical data is received
let contentLength = 0;
if ('content-length' in tempResponseData.headers) {
contentLength = parseInt(tempResponseData.headers['content-length'] as string) || 0;
}
if (hashData.previousLength === contentLength) {
let hash: string;
if (tempResponseData.headers.etag) {
// If an etag is provided, we use it as "hash"
hash = tempResponseData.headers.etag as string;
} else {
// If there is no etag, we calculate a hash from the data in the body
if (typeof contentBody !== 'string') {
contentBody = JSON.stringify(contentBody);
}
hash = createHash('md5').update(contentBody).digest('base64');
}
if (hashData.previousHash === hash) {
hashData.identicalCount += 1;
if (hashData.identicalCount > 2) {
// Length was identical 5x and hash 3x
throw new NodeOperationError(
this.node,
'The returned response was identical 5x, so requests got stopped',
{
itemIndex,
description:
'Check if "Pagination Completed When" has been configured correctly.',
},
);
}
} else {
hashData.identicalCount = 0;
}
hashData.previousHash = hash;
} else {
hashData.identicalCount = 0;
}
hashData.previousLength = contentLength;
}
responseData.push(tempResponseData);
additionalKeys.$response = newResponse;
additionalKeys.$pageCount = additionalKeys.$pageCount + 1;
const maxRequests = this.getResolvedValue(
paginationOptions.maxRequests,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as number;
if (maxRequests && additionalKeys.$pageCount >= maxRequests) {
break;
}
makeAdditionalRequest = this.getResolvedValue(
paginationOptions.continue,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as boolean;
if (makeAdditionalRequest) {
if (paginationOptions.requestInterval) {
const requestInterval = this.getResolvedValue(
paginationOptions.requestInterval,
itemIndex,
runIndex,
executeData,
additionalKeys,
false,
) as number;
await sleep(requestInterval);
}
if (tempResponseData.statusCode < 200 || tempResponseData.statusCode >= 300) {
// We have it configured to let all requests pass no matter the response code
// via "requestOptions.simple = false" to not by default fail if it is for example
// configured to stop on 404 response codes. For that reason we have to throw here
// now an error manually if the response code is not a success one.
let data = tempResponseData.body;
if (data instanceof Readable && paginationOptions.binaryResult !== true) {
data = await binaryToString(data as Buffer | Readable);
} else if (typeof data === 'object') {
data = JSON.stringify(data);
}
throw Object.assign(new Error(`${tempResponseData.statusCode} - "${data?.toString()}"`), {
statusCode: tempResponseData.statusCode,
error: data,
isAxiosError: true,
response: {
headers: tempResponseData.headers,
status: tempResponseData.statusCode,
statusText: tempResponseData.statusMessage,
},
});
}
}
} while (makeAdditionalRequest);
return responseData;
}
async request(uriOrObject: string | IRequestOptions, options?: IRequestOptions) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await proxyRequestToAxios(
this.workflow,
this.additionalData,
this.node,
uriOrObject,
options,
);
}
async requestWithAuthentication(
credentialsType: string,
requestOptions: IRequestOptions,
additionalCredentialOptions?: IAdditionalCredentialOptions,
itemIndex?: number,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await requestWithAuthentication.call(
this.context,
credentialsType,
requestOptions,
this.workflow,
this.node,
this.additionalData,
additionalCredentialOptions,
itemIndex,
);
}
async requestOAuth1(credentialsType: string, requestOptions: IRequestOptions) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await requestOAuth1.call(this.context, credentialsType, requestOptions);
}
async requestOAuth2(
credentialsType: string,
requestOptions: IRequestOptions,
oAuth2Options?: IOAuth2Options,
) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return await requestOAuth2.call(
this.context,
credentialsType,
requestOptions,
this.node,
this.additionalData,
oAuth2Options,
);
}
private getResolvedValue(
parameterValue: NodeParameterValueType,
itemIndex: number,
runIndex: number,
executeData: IExecuteData,
additionalKeys?: IWorkflowDataProxyAdditionalKeys,
returnObjectAsString = false,
): NodeParameterValueType {
const mode: WorkflowExecuteMode = 'internal';
if (
typeof parameterValue === 'object' ||
(typeof parameterValue === 'string' && parameterValue.charAt(0) === '=')
) {
return this.workflow.expression.getParameterValue(
parameterValue,
this.runExecutionData,
runIndex,
itemIndex,
this.node.name,
this.connectionInputData,
mode,
additionalKeys ?? {},
executeData,
returnObjectAsString,
);
}
return parameterValue;
}
}

View file

@ -0,0 +1,20 @@
import type { CronExpression, Workflow, SchedulingFunctions } from 'n8n-workflow';
import { Container } from 'typedi';
import { ScheduledTaskManager } from '@/ScheduledTaskManager';
export class SchedulingHelpers {
private readonly scheduledTaskManager = Container.get(ScheduledTaskManager);
constructor(private readonly workflow: Workflow) {}
get exported(): SchedulingFunctions {
return {
registerCron: this.registerCron.bind(this),
};
}
registerCron(cronExpression: CronExpression, onTick: () => void) {
this.scheduledTaskManager.registerCron(this.workflow, cronExpression, onTick);
}
}

View file

@ -0,0 +1,18 @@
import type { SSHCredentials, SSHTunnelFunctions } from 'n8n-workflow';
import { Container } from 'typedi';
import { SSHClientsManager } from '@/SSHClientsManager';
export class SSHTunnelHelpers {
private readonly sshClientsManager = Container.get(SSHClientsManager);
get exported(): SSHTunnelFunctions {
return {
getSSHClient: this.getSSHClient.bind(this),
};
}
async getSSHClient(credentials: SSHCredentials) {
return await this.sshClientsManager.getClient(credentials);
}
}

View file

@ -0,0 +1,103 @@
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IHookFunctions,
IRunExecutionData,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
IWebhookData,
WebhookType,
} from 'n8n-workflow';
import { ApplicationError } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getAdditionalKeys,
getCredentials,
getNodeParameter,
getNodeWebhookUrl,
getWebhookDescription,
} from '@/NodeExecuteFunctions';
import { RequestHelpers } from './helpers/request-helpers';
import { NodeExecutionContext } from './node-execution-context';
export class HookContext extends NodeExecutionContext implements IHookFunctions {
readonly helpers: IHookFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly activation: WorkflowActivateMode,
private readonly webhookData?: IWebhookData,
) {
super(workflow, node, additionalData, mode);
this.helpers = new RequestHelpers(this, workflow, node, additionalData);
}
getActivationMode() {
return this.activation;
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await getCredentials<T>(this.workflow, this.node, type, this.additionalData, this.mode);
}
getNodeParameter(
parameterName: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
this.workflow,
runExecutionData,
runIndex,
connectionInputData,
this.node,
parameterName,
itemIndex,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, runExecutionData),
undefined,
fallbackValue,
options,
);
}
getNodeWebhookUrl(name: WebhookType): string | undefined {
return getNodeWebhookUrl(
name,
this.workflow,
this.node,
this.additionalData,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, null),
this.webhookData?.isTest,
);
}
getWebhookName(): string {
if (this.webhookData === undefined) {
throw new ApplicationError('Only supported in webhook functions');
}
return this.webhookData.webhookDescription.name;
}
getWebhookDescription(name: WebhookType) {
return getWebhookDescription(name, this.workflow, this.node);
}
}

View file

@ -0,0 +1,6 @@
// eslint-disable-next-line import/no-cycle
export { HookContext } from './hook-context';
export { LoadOptionsContext } from './load-options-context';
export { PollContext } from './poll-context';
export { TriggerContext } from './trigger-context';
export { WebhookContext } from './webhook-context';

View file

@ -0,0 +1,102 @@
import { get } from 'lodash';
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
ILoadOptionsFunctions,
IRunExecutionData,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
Workflow,
} from 'n8n-workflow';
import { extractValue } from '@/ExtractValue';
// eslint-disable-next-line import/no-cycle
import { getAdditionalKeys, getCredentials, getNodeParameter } from '@/NodeExecuteFunctions';
import { RequestHelpers } from './helpers/request-helpers';
import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers';
import { NodeExecutionContext } from './node-execution-context';
export class LoadOptionsContext extends NodeExecutionContext implements ILoadOptionsFunctions {
readonly helpers: ILoadOptionsFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
private readonly path: string,
) {
super(workflow, node, additionalData, 'internal');
this.helpers = {
...new RequestHelpers(this, workflow, node, additionalData).exported,
...new SSHTunnelHelpers().exported,
};
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await getCredentials<T>(this.workflow, this.node, type, this.additionalData, this.mode);
}
getCurrentNodeParameter(
parameterPath: string,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object | undefined {
const nodeParameters = this.additionalData.currentNodeParameters;
if (parameterPath.charAt(0) === '&') {
parameterPath = `${this.path.split('.').slice(1, -1).join('.')}.${parameterPath.slice(1)}`;
}
let returnData = get(nodeParameters, parameterPath);
// This is outside the try/catch because it throws errors with proper messages
if (options?.extractValue) {
const nodeType = this.workflow.nodeTypes.getByNameAndVersion(
this.node.type,
this.node.typeVersion,
);
returnData = extractValue(
returnData,
parameterPath,
this.node,
nodeType,
) as NodeParameterValueType;
}
return returnData;
}
getCurrentNodeParameters() {
return this.additionalData.currentNodeParameters;
}
getNodeParameter(
parameterName: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
this.workflow,
runExecutionData,
runIndex,
connectionInputData,
this.node,
parameterName,
itemIndex,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, runExecutionData),
undefined,
fallbackValue,
options,
);
}
}

View file

@ -0,0 +1,107 @@
import type {
FunctionsBase,
INode,
INodeExecutionData,
IWorkflowExecuteAdditionalData,
NodeTypeAndVersion,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { deepCopy, LoggerProxy } from 'n8n-workflow';
import { Container } from 'typedi';
import { InstanceSettings } from '@/InstanceSettings';
export abstract class NodeExecutionContext implements Omit<FunctionsBase, 'getCredentials'> {
protected readonly instanceSettings = Container.get(InstanceSettings);
constructor(
protected readonly workflow: Workflow,
protected readonly node: INode,
protected readonly additionalData: IWorkflowExecuteAdditionalData,
protected readonly mode: WorkflowExecuteMode,
) {}
get logger() {
return LoggerProxy;
}
getExecutionId() {
return this.additionalData.executionId!;
}
getNode(): INode {
return deepCopy(this.node);
}
getWorkflow() {
const { id, name, active } = this.workflow;
return { id, name, active };
}
getMode() {
return this.mode;
}
getWorkflowStaticData(type: string) {
return this.workflow.getStaticData(type, this.node);
}
getChildNodes(nodeName: string) {
const output: NodeTypeAndVersion[] = [];
const nodeNames = this.workflow.getChildNodes(nodeName);
for (const n of nodeNames) {
const node = this.workflow.nodes[n];
output.push({
name: node.name,
type: node.type,
typeVersion: node.typeVersion,
});
}
return output;
}
getParentNodes(nodeName: string) {
const output: NodeTypeAndVersion[] = [];
const nodeNames = this.workflow.getParentNodes(nodeName);
for (const n of nodeNames) {
const node = this.workflow.nodes[n];
output.push({
name: node.name,
type: node.type,
typeVersion: node.typeVersion,
});
}
return output;
}
getKnownNodeTypes() {
return this.workflow.nodeTypes.getKnownTypes();
}
getRestApiUrl() {
return this.additionalData.restApiUrl;
}
getInstanceBaseUrl() {
return this.additionalData.instanceBaseUrl;
}
getInstanceId() {
return this.instanceSettings.instanceId;
}
getTimezone() {
return this.workflow.timezone;
}
getCredentialsProperties(type: string) {
return this.additionalData.credentialsHelper.getCredentialsProperties(type);
}
async prepareOutputData(outputData: INodeExecutionData[]) {
return [outputData];
}
}

View file

@ -0,0 +1,94 @@
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IPollFunctions,
IRunExecutionData,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getAdditionalKeys,
getCredentials,
getNodeParameter,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { BinaryHelpers } from './helpers/binary-helpers';
import { RequestHelpers } from './helpers/request-helpers';
import { SchedulingHelpers } from './helpers/scheduling-helpers';
import { NodeExecutionContext } from './node-execution-context';
const throwOnEmit = () => {
throw new ApplicationError('Overwrite PollContext.__emit function');
};
const throwOnEmitError = () => {
throw new ApplicationError('Overwrite PollContext.__emitError function');
};
export class PollContext extends NodeExecutionContext implements IPollFunctions {
readonly helpers: IPollFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly activation: WorkflowActivateMode,
readonly __emit: IPollFunctions['__emit'] = throwOnEmit,
readonly __emitError: IPollFunctions['__emitError'] = throwOnEmitError,
) {
super(workflow, node, additionalData, mode);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...new BinaryHelpers(workflow, additionalData).exported,
...new RequestHelpers(this, workflow, node, additionalData).exported,
...new SchedulingHelpers(workflow).exported,
};
}
getActivationMode() {
return this.activation;
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await getCredentials<T>(this.workflow, this.node, type, this.additionalData, this.mode);
}
getNodeParameter(
parameterName: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
this.workflow,
runExecutionData,
runIndex,
connectionInputData,
this.node,
parameterName,
itemIndex,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, runExecutionData),
undefined,
fallbackValue,
options,
);
}
}

View file

@ -0,0 +1,96 @@
import type {
ICredentialDataDecryptedObject,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IRunExecutionData,
ITriggerFunctions,
IWorkflowExecuteAdditionalData,
NodeParameterValueType,
Workflow,
WorkflowActivateMode,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
getAdditionalKeys,
getCredentials,
getNodeParameter,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { BinaryHelpers } from './helpers/binary-helpers';
import { RequestHelpers } from './helpers/request-helpers';
import { SchedulingHelpers } from './helpers/scheduling-helpers';
import { SSHTunnelHelpers } from './helpers/ssh-tunnel-helpers';
import { NodeExecutionContext } from './node-execution-context';
const throwOnEmit = () => {
throw new ApplicationError('Overwrite TriggerContext.emit function');
};
const throwOnEmitError = () => {
throw new ApplicationError('Overwrite TriggerContext.emitError function');
};
export class TriggerContext extends NodeExecutionContext implements ITriggerFunctions {
readonly helpers: ITriggerFunctions['helpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly activation: WorkflowActivateMode,
readonly emit: ITriggerFunctions['emit'] = throwOnEmit,
readonly emitError: ITriggerFunctions['emitError'] = throwOnEmitError,
) {
super(workflow, node, additionalData, mode);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...new BinaryHelpers(workflow, additionalData).exported,
...new RequestHelpers(this, workflow, node, additionalData).exported,
...new SchedulingHelpers(workflow).exported,
...new SSHTunnelHelpers().exported,
};
}
getActivationMode() {
return this.activation;
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await getCredentials<T>(this.workflow, this.node, type, this.additionalData, this.mode);
}
getNodeParameter(
parameterName: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object {
const runExecutionData: IRunExecutionData | null = null;
const itemIndex = 0;
const runIndex = 0;
const connectionInputData: INodeExecutionData[] = [];
return getNodeParameter(
this.workflow,
runExecutionData,
runIndex,
connectionInputData,
this.node,
parameterName,
itemIndex,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, runExecutionData),
undefined,
fallbackValue,
options,
);
}
}

View file

@ -0,0 +1,233 @@
import type { Request, Response } from 'express';
import type {
CloseFunction,
ICredentialDataDecryptedObject,
IDataObject,
IExecuteData,
IGetNodeParameterOptions,
INode,
INodeExecutionData,
IRunExecutionData,
ITaskDataConnections,
IWebhookData,
IWebhookFunctions,
IWorkflowExecuteAdditionalData,
NodeConnectionType,
NodeParameterValueType,
WebhookType,
Workflow,
WorkflowExecuteMode,
} from 'n8n-workflow';
import { ApplicationError, createDeferredPromise } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
import {
copyBinaryFile,
getAdditionalKeys,
getCredentials,
getInputConnectionData,
getNodeParameter,
getNodeWebhookUrl,
returnJsonArray,
} from '@/NodeExecuteFunctions';
import { BinaryHelpers } from './helpers/binary-helpers';
import { RequestHelpers } from './helpers/request-helpers';
import { NodeExecutionContext } from './node-execution-context';
export class WebhookContext extends NodeExecutionContext implements IWebhookFunctions {
readonly helpers: IWebhookFunctions['helpers'];
readonly nodeHelpers: IWebhookFunctions['nodeHelpers'];
constructor(
workflow: Workflow,
node: INode,
additionalData: IWorkflowExecuteAdditionalData,
mode: WorkflowExecuteMode,
private readonly webhookData: IWebhookData,
private readonly closeFunctions: CloseFunction[],
private readonly runExecutionData: IRunExecutionData | null,
) {
super(workflow, node, additionalData, mode);
this.helpers = {
createDeferredPromise,
returnJsonArray,
...new BinaryHelpers(workflow, additionalData).exported,
...new RequestHelpers(this, workflow, node, additionalData).exported,
};
this.nodeHelpers = {
copyBinaryFile: async (filePath, fileName, mimeType) =>
await copyBinaryFile(
this.workflow.id,
this.additionalData.executionId!,
filePath,
fileName,
mimeType,
),
};
}
async getCredentials<T extends object = ICredentialDataDecryptedObject>(type: string) {
return await getCredentials<T>(this.workflow, this.node, type, this.additionalData, this.mode);
}
getBodyData() {
return this.assertHttpRequest().body as IDataObject;
}
getHeaderData() {
return this.assertHttpRequest().headers;
}
getParamsData(): object {
return this.assertHttpRequest().params;
}
getQueryData(): object {
return this.assertHttpRequest().query;
}
getRequestObject(): Request {
return this.assertHttpRequest();
}
getResponseObject(): Response {
if (this.additionalData.httpResponse === undefined) {
throw new ApplicationError('Response is missing');
}
return this.additionalData.httpResponse;
}
private assertHttpRequest() {
const { httpRequest } = this.additionalData;
if (httpRequest === undefined) {
throw new ApplicationError('Request is missing');
}
return httpRequest;
}
getNodeWebhookUrl(name: WebhookType): string | undefined {
return getNodeWebhookUrl(
name,
this.workflow,
this.node,
this.additionalData,
this.mode,
getAdditionalKeys(this.additionalData, this.mode, null),
);
}
getWebhookName() {
return this.webhookData.webhookDescription.name;
}
async getInputConnectionData(inputName: NodeConnectionType, itemIndex: number): Promise<unknown> {
// To be able to use expressions like "$json.sessionId" set the
// body data the webhook received to what is normally used for
// incoming node data.
const connectionInputData: INodeExecutionData[] = [
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
{ json: this.additionalData.httpRequest?.body || {} },
];
const runExecutionData: IRunExecutionData = {
resultData: {
runData: {},
},
};
const executeData: IExecuteData = {
data: {
main: [connectionInputData],
},
node: this.node,
source: null,
};
const runIndex = 0;
return await getInputConnectionData.call(
this,
this.workflow,
runExecutionData,
runIndex,
connectionInputData,
{} as ITaskDataConnections,
this.additionalData,
executeData,
this.mode,
this.closeFunctions,
inputName,
itemIndex,
);
}
evaluateExpression(expression: string, evaluateItemIndex?: number) {
const itemIndex = evaluateItemIndex ?? 0;
const runIndex = 0;
let connectionInputData: INodeExecutionData[] = [];
let executionData: IExecuteData | undefined;
if (this.runExecutionData?.executionData !== undefined) {
executionData = this.runExecutionData.executionData.nodeExecutionStack[0];
if (executionData !== undefined) {
connectionInputData = executionData.data.main[0]!;
}
}
const additionalKeys = getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData);
return this.workflow.expression.resolveSimpleParameterValue(
`=${expression}`,
{},
this.runExecutionData,
runIndex,
itemIndex,
this.node.name,
connectionInputData,
this.mode,
additionalKeys,
executionData,
);
}
getNodeParameter(
parameterName: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fallbackValue?: any,
options?: IGetNodeParameterOptions,
): NodeParameterValueType | object {
const itemIndex = 0;
const runIndex = 0;
let connectionInputData: INodeExecutionData[] = [];
let executionData: IExecuteData | undefined;
if (this.runExecutionData?.executionData !== undefined) {
executionData = this.runExecutionData.executionData.nodeExecutionStack[0];
if (executionData !== undefined) {
connectionInputData = executionData.data.main[0]!;
}
}
const additionalKeys = getAdditionalKeys(this.additionalData, this.mode, this.runExecutionData);
return getNodeParameter(
this.workflow,
this.runExecutionData,
runIndex,
connectionInputData,
this.node,
parameterName,
itemIndex,
this.mode,
additionalKeys,
executionData,
fallbackValue,
options,
);
}
}

View file

@ -1,6 +1,6 @@
{
"name": "n8n-design-system",
"version": "1.55.0",
"version": "1.56.0",
"main": "src/main.ts",
"import": "src/main.ts",
"scripts": {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import AssistantAvatar from '../AssistantAvatar.vue';
import AssistantAvatar from './AssistantAvatar.vue';
describe('AskAssistantAvatar', () => {
it('renders small avatar correctly', () => {

View file

@ -1,6 +1,6 @@
import { render } from '@testing-library/vue';
import AskAssistantButton from '../AskAssistantButton.vue';
import AskAssistantButton from './AskAssistantButton.vue';
describe('AskAssistantButton', () => {
it('renders default button correctly', () => {

View file

@ -2,7 +2,7 @@ import { render } from '@testing-library/vue';
import { n8nHtml } from 'n8n-design-system/directives';
import AskAssistantChat from '../AskAssistantChat.vue';
import AskAssistantChat from './AskAssistantChat.vue';
const stubs = ['n8n-avatar', 'n8n-button', 'n8n-icon', 'n8n-icon-button'];

Some files were not shown because too many files have changed in this diff Show more