Merge remote-tracking branch 'origin/master' into ADO-2729/feature-set-field-default-value-of-added-node-based-on-previous

This commit is contained in:
Charlie Kolb 2024-11-19 09:16:16 +01:00
commit 69e1667332
No known key found for this signature in database
215 changed files with 5965 additions and 1875 deletions

View file

@ -0,0 +1,29 @@
/**
* Getters
*/
export const getExecutionsSidebar = () => cy.getByTestId('executions-sidebar');
export const getWorkflowExecutionPreviewIframe = () => cy.getByTestId('workflow-preview-iframe');
export const getExecutionPreviewBody = () =>
getWorkflowExecutionPreviewIframe()
.its('0.contentDocument.body')
.then((el) => cy.wrap(el));
export const getExecutionPreviewBodyNodes = () =>
getExecutionPreviewBody().findChildByTestId('canvas-node');
export const getExecutionPreviewBodyNodesByName = (name: string) =>
getExecutionPreviewBody().findChildByTestId('canvas-node').filter(`[data-name="${name}"]`).eq(0);
export function getExecutionPreviewOutputPanelRelatedExecutionLink() {
return getExecutionPreviewBody().findChildByTestId('related-execution-link');
}
/**
* Actions
*/
export const openExecutionPreviewNode = (name: string) =>
getExecutionPreviewBodyNodesByName(name).dblclick();

View file

@ -48,10 +48,38 @@ export function getOutputTableRow(row: number) {
return getOutputTableRows().eq(row); return getOutputTableRows().eq(row);
} }
export function getOutputTableHeaders() {
return getOutputPanelDataContainer().find('table thead th');
}
export function getOutputTableHeaderByText(text: string) {
return getOutputTableHeaders().contains(text);
}
export function getOutputTbodyCell(row: number, col: number) {
return getOutputTableRows().eq(row).find('td').eq(col);
}
export function getOutputRunSelector() {
return getOutputPanel().findChildByTestId('run-selector');
}
export function getOutputRunSelectorInput() {
return getOutputRunSelector().find('input');
}
export function getOutputPanelTable() { export function getOutputPanelTable() {
return getOutputPanelDataContainer().get('table'); return getOutputPanelDataContainer().get('table');
} }
export function getOutputPanelItemsCount() {
return getOutputPanel().getByTestId('ndv-items-count');
}
export function getOutputPanelRelatedExecutionLink() {
return getOutputPanel().getByTestId('related-execution-link');
}
/** /**
* Actions * Actions
*/ */
@ -90,3 +118,8 @@ export function setParameterSelectByContent(name: string, content: string) {
getParameterInputByName(name).realClick(); getParameterInputByName(name).realClick();
getVisibleSelect().find('.option-headline').contains(content).click(); getVisibleSelect().find('.option-headline').contains(content).click();
} }
export function changeOutputRunSelector(runName: string) {
getOutputRunSelector().click();
getVisibleSelect().find('.el-select-dropdown__item').contains(runName).click();
}

View file

@ -76,6 +76,14 @@ export function getCanvasNodes() {
); );
} }
export function getSaveButton() {
return cy.getByTestId('workflow-save-button');
}
export function getZoomToFitButton() {
return cy.getByTestId('zoom-to-fit');
}
/** /**
* Actions * Actions
*/ */
@ -170,3 +178,19 @@ export function clickManualChatButton() {
export function openNode(nodeName: string) { export function openNode(nodeName: string) {
getNodeByName(nodeName).dblclick(); getNodeByName(nodeName).dblclick();
} }
export function saveWorkflowOnButtonClick() {
cy.intercept('POST', '/rest/workflows').as('createWorkflow');
getSaveButton().should('contain', 'Save');
getSaveButton().click();
getSaveButton().should('contain', 'Saved');
cy.url().should('not.have.string', '/new');
}
export function pasteWorkflow(workflow: object) {
cy.get('body').paste(JSON.stringify(workflow));
}
export function clickZoomToFit() {
getZoomToFitButton().click();
}

View file

@ -23,7 +23,6 @@ describe('Workflows', () => {
}); });
it('should create multiple new workflows using add workflow button', () => { it('should create multiple new workflows using add workflow button', () => {
cy.viewport(1920, 1080);
[...Array(multipleWorkflowsCount).keys()].forEach(() => { [...Array(multipleWorkflowsCount).keys()].forEach(() => {
cy.visit(WorkflowsPage.url); cy.visit(WorkflowsPage.url);
WorkflowsPage.getters.createWorkflowButton().click(); WorkflowsPage.getters.createWorkflowButton().click();
@ -36,7 +35,6 @@ describe('Workflows', () => {
}); });
it('should search for a workflow', () => { it('should search for a workflow', () => {
cy.viewport(1920, 1080);
// One Result // One Result
WorkflowsPage.getters.searchBar().type('Empty State Card Workflow'); WorkflowsPage.getters.searchBar().type('Empty State Card Workflow');
WorkflowsPage.getters.workflowCards().should('have.length', 1); WorkflowsPage.getters.workflowCards().should('have.length', 1);
@ -62,7 +60,6 @@ describe('Workflows', () => {
}); });
it('should delete all the workflows', () => { it('should delete all the workflows', () => {
cy.viewport(1920, 1080);
WorkflowsPage.getters.workflowCards().should('have.length', multipleWorkflowsCount + 1); WorkflowsPage.getters.workflowCards().should('have.length', multipleWorkflowsCount + 1);
WorkflowsPage.getters.workflowCards().each(($el) => { WorkflowsPage.getters.workflowCards().each(($el) => {
@ -78,7 +75,6 @@ describe('Workflows', () => {
}); });
it('should respect tag querystring filter when listing workflows', () => { it('should respect tag querystring filter when listing workflows', () => {
cy.viewport(1920, 1080);
WorkflowsPage.getters.newWorkflowButtonCard().click(); WorkflowsPage.getters.newWorkflowButtonCard().click();
cy.createFixtureWorkflow('Test_workflow_2.json', getUniqueWorkflowName('My New Workflow')); cy.createFixtureWorkflow('Test_workflow_2.json', getUniqueWorkflowName('My New Workflow'));

View file

@ -53,7 +53,6 @@ describe('Workflow tags', () => {
}); });
it('should detach a tag inline by clicking on X on tag pill', () => { it('should detach a tag inline by clicking on X on tag pill', () => {
cy.viewport(1920, 1080);
wf.getters.createTagButton().click(); wf.getters.createTagButton().click();
wf.actions.addTags(TEST_TAGS); wf.actions.addTags(TEST_TAGS);
wf.getters.nthTagPill(1).click(); wf.getters.nthTagPill(1).click();
@ -74,7 +73,6 @@ describe('Workflow tags', () => {
}); });
it('should not show non existing tag as a selectable option', () => { it('should not show non existing tag as a selectable option', () => {
cy.viewport(1920, 1080);
const NON_EXISTING_TAG = 'My Test Tag'; const NON_EXISTING_TAG = 'My Test Tag';
wf.getters.createTagButton().click(); wf.getters.createTagButton().click();

View file

@ -514,7 +514,6 @@ describe('Execution', () => {
}); });
it('should send proper payload for node rerun', () => { it('should send proper payload for node rerun', () => {
cy.viewport(1920, 1080);
cy.createFixtureWorkflow('Multiple_trigger_node_rerun.json', 'Multiple trigger node rerun'); cy.createFixtureWorkflow('Multiple_trigger_node_rerun.json', 'Multiple trigger node rerun');
workflowPage.getters.zoomToFitButton().click(); workflowPage.getters.zoomToFitButton().click();

View file

@ -101,7 +101,6 @@ describe('Workflow Executions', () => {
}); });
it('should show workflow data in executions tab after hard reload and modify name and tags', () => { it('should show workflow data in executions tab after hard reload and modify name and tags', () => {
cy.viewport(1920, 1080);
executionsTab.actions.switchToExecutionsTab(); executionsTab.actions.switchToExecutionsTab();
checkMainHeaderELements(); checkMainHeaderELements();
workflowPage.getters.saveButton().find('button').should('not.exist'); workflowPage.getters.saveButton().find('button').should('not.exist');

View file

@ -31,29 +31,31 @@ describe('NDV', () => {
ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.inputTableRow(1).realHover(); ndv.actions.dragMainPanelToRight();
ndv.getters.inputTableRow(1).realMouseMove(10, 1);
ndv.getters.outputTableRow(4).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.outputTableRow(4).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.inputTableRow(2).realHover(); ndv.getters.inputTableRow(2).realMouseMove(10, 1);
ndv.getters.outputTableRow(2).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.outputTableRow(2).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.inputTableRow(3).realHover(); ndv.getters.inputTableRow(3).realMouseMove(10, 1);
ndv.getters.outputTableRow(6).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.outputTableRow(6).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
// output to input // output to input
ndv.getters.outputTableRow(1).realHover(); ndv.actions.dragMainPanelToLeft();
ndv.getters.outputTableRow(1).realMouseMove(10, 1);
ndv.getters.inputTableRow(4).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(4).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.outputTableRow(4).realHover(); ndv.getters.outputTableRow(4).realMouseMove(10, 1);
ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.outputTableRow(2).realHover(); ndv.getters.outputTableRow(2).realMouseMove(10, 1);
ndv.getters.inputTableRow(2).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(2).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.outputTableRow(6).realHover(); ndv.getters.outputTableRow(6).realMouseMove(10, 1);
ndv.getters.inputTableRow(3).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(3).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.outputTableRow(1).realHover(); ndv.getters.outputTableRow(1).realMouseMove(10, 1);
ndv.getters.inputTableRow(4).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(4).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
}); });
@ -75,31 +77,32 @@ describe('NDV', () => {
ndv.actions.switchInputMode('Table'); ndv.actions.switchInputMode('Table');
ndv.actions.switchOutputMode('Table'); ndv.actions.switchOutputMode('Table');
ndv.getters.backToCanvas().realHover(); // reset to default hover ndv.getters.backToCanvas().realMouseMove(10, 1); // reset to default hover
ndv.getters.outputHoveringItem().should('not.exist'); ndv.getters.outputHoveringItem().should('not.exist');
ndv.getters.parameterExpressionPreview('value').should('include.text', '1111'); ndv.getters.parameterExpressionPreview('value').should('include.text', '1111');
ndv.actions.selectInputNode('Set1'); ndv.actions.selectInputNode('Set1');
ndv.getters.backToCanvas().realHover(); // reset to default hover ndv.getters.backToCanvas().realMouseMove(10, 1); // reset to default hover
ndv.getters.inputTableRow(1).should('have.text', '1000'); ndv.getters.inputTableRow(1).should('have.text', '1000');
ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.inputTableRow(1).realHover(); ndv.actions.dragMainPanelToRight();
cy.wait(50); ndv.getters.inputTbodyCell(1, 0).realMouseMove(10, 1);
ndv.getters.outputHoveringItem().should('have.text', '1000'); ndv.getters.outputHoveringItem().should('have.text', '1000');
ndv.getters.parameterExpressionPreview('value').should('include.text', '1000'); ndv.getters.parameterExpressionPreview('value').should('include.text', '1000');
ndv.actions.selectInputNode('Sort'); ndv.actions.selectInputNode('Sort');
ndv.actions.dragMainPanelToLeft();
ndv.actions.changeOutputRunSelector('1 of 2 (6 items)'); ndv.actions.changeOutputRunSelector('1 of 2 (6 items)');
ndv.getters.backToCanvas().realHover(); // reset to default hover ndv.getters.backToCanvas().realMouseMove(10, 1); // reset to default hover
ndv.getters.inputTableRow(1).should('have.text', '1111'); ndv.getters.inputTableRow(1).should('have.text', '1111');
ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.inputTableRow(1).realHover(); ndv.actions.dragMainPanelToRight();
cy.wait(50); ndv.getters.inputTbodyCell(1, 0).realMouseMove(10, 1);
ndv.getters.outputHoveringItem().should('have.text', '1111'); ndv.getters.outputHoveringItem().should('have.text', '1111');
ndv.getters.parameterExpressionPreview('value').should('include.text', '1111'); ndv.getters.parameterExpressionPreview('value').should('include.text', '1111');
}); });
@ -132,20 +135,22 @@ describe('NDV', () => {
ndv.getters.inputTableRow(1).should('have.text', '1111'); ndv.getters.inputTableRow(1).should('have.text', '1111');
ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(1).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.actions.dragMainPanelToLeft();
ndv.getters.outputTableRow(1).should('have.text', '1111'); ndv.getters.outputTableRow(1).should('have.text', '1111');
ndv.getters.outputTableRow(1).realHover(); ndv.getters.outputTableRow(1).realMouseMove(10, 1);
ndv.getters.outputTableRow(3).should('have.text', '4444'); ndv.getters.outputTableRow(3).should('have.text', '4444');
ndv.getters.outputTableRow(3).realHover(); ndv.getters.outputTableRow(3).realMouseMove(10, 1);
ndv.getters.inputTableRow(3).should('have.text', '4444'); ndv.getters.inputTableRow(3).should('have.text', '4444');
ndv.getters.inputTableRow(3).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(3).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.actions.changeOutputRunSelector('2 of 2 (6 items)'); ndv.actions.changeOutputRunSelector('2 of 2 (6 items)');
cy.wait(50);
ndv.getters.inputTableRow(1).should('have.text', '1000'); ndv.getters.inputTableRow(1).should('have.text', '1000');
ndv.getters.inputTableRow(1).realHover(); ndv.actions.dragMainPanelToRight();
ndv.getters.inputTableRow(1).realMouseMove(10, 1);
ndv.getters.outputTableRow(1).should('have.text', '1000'); ndv.getters.outputTableRow(1).should('have.text', '1000');
ndv.getters ndv.getters
@ -155,7 +160,8 @@ describe('NDV', () => {
.should('equal', 'hovering-item'); .should('equal', 'hovering-item');
ndv.getters.outputTableRow(3).should('have.text', '2000'); ndv.getters.outputTableRow(3).should('have.text', '2000');
ndv.getters.outputTableRow(3).realHover(); ndv.actions.dragMainPanelToLeft();
ndv.getters.outputTableRow(3).realMouseMove(10, 1);
ndv.getters.inputTableRow(3).should('have.text', '2000'); ndv.getters.inputTableRow(3).should('have.text', '2000');
@ -175,14 +181,15 @@ describe('NDV', () => {
ndv.actions.switchOutputBranch('False Branch (2 items)'); ndv.actions.switchOutputBranch('False Branch (2 items)');
ndv.getters.outputTableRow(1).should('have.text', '8888'); ndv.getters.outputTableRow(1).should('have.text', '8888');
ndv.getters.outputTableRow(1).realHover(); ndv.actions.dragMainPanelToLeft();
ndv.getters.outputTableRow(1).realMouseMove(10, 1);
ndv.getters.inputTableRow(5).should('have.text', '8888'); ndv.getters.inputTableRow(5).should('have.text', '8888');
ndv.getters.inputTableRow(5).invoke('attr', 'data-test-id').should('equal', 'hovering-item'); ndv.getters.inputTableRow(5).invoke('attr', 'data-test-id').should('equal', 'hovering-item');
ndv.getters.outputTableRow(2).should('have.text', '9999'); ndv.getters.outputTableRow(2).should('have.text', '9999');
ndv.getters.outputTableRow(2).realHover(); ndv.getters.outputTableRow(2).realMouseMove(10, 1);
ndv.getters.inputTableRow(6).should('have.text', '9999'); ndv.getters.inputTableRow(6).should('have.text', '9999');
@ -192,29 +199,35 @@ describe('NDV', () => {
workflowPage.actions.openNode('Set5'); workflowPage.actions.openNode('Set5');
ndv.actions.dragMainPanelToRight();
ndv.actions.switchInputBranch('True Branch'); ndv.actions.switchInputBranch('True Branch');
ndv.actions.dragMainPanelToLeft();
ndv.actions.changeOutputRunSelector('1 of 2 (2 items)'); ndv.actions.changeOutputRunSelector('1 of 2 (2 items)');
ndv.getters.outputTableRow(1).should('have.text', '8888'); ndv.getters.outputTableRow(1).should('have.text', '8888');
ndv.getters.outputTableRow(1).realHover(); ndv.getters.outputTableRow(1).realMouseMove(10, 1);
cy.wait(100);
ndv.getters.inputHoveringItem().should('not.exist'); ndv.getters.inputHoveringItem().should('not.exist');
ndv.getters.inputTableRow(1).should('have.text', '1111'); ndv.getters.inputTableRow(1).should('have.text', '1111');
ndv.getters.inputTableRow(1).realHover();
cy.wait(100); ndv.actions.dragMainPanelToRight();
ndv.getters.inputTableRow(1).realMouseMove(10, 1);
ndv.getters.outputHoveringItem().should('not.exist'); ndv.getters.outputHoveringItem().should('not.exist');
ndv.actions.switchInputBranch('False Branch'); ndv.actions.switchInputBranch('False Branch');
ndv.getters.inputTableRow(1).should('have.text', '8888'); ndv.getters.inputTableRow(1).should('have.text', '8888');
ndv.getters.inputTableRow(1).realHover(); ndv.actions.dragMainPanelToRight();
ndv.getters.inputTableRow(1).realMouseMove(10, 1);
ndv.actions.dragMainPanelToLeft();
ndv.actions.changeOutputRunSelector('2 of 2 (4 items)'); ndv.actions.changeOutputRunSelector('2 of 2 (4 items)');
ndv.getters.outputTableRow(1).should('have.text', '1111'); ndv.getters.outputTableRow(1).should('have.text', '1111');
ndv.getters.outputTableRow(1).realHover(); ndv.getters.outputTableRow(1).realMouseMove(10, 1);
ndv.actions.changeOutputRunSelector('1 of 2 (2 items)'); ndv.actions.changeOutputRunSelector('1 of 2 (2 items)');
ndv.getters.inputTableRow(1).should('have.text', '8888'); ndv.getters.inputTableRow(1).should('have.text', '8888');
ndv.getters.inputTableRow(1).realHover(); ndv.actions.dragMainPanelToRight();
ndv.getters.inputTableRow(1).realMouseMove(10, 1);
ndv.getters.outputHoveringItem().should('have.text', '8888'); ndv.getters.outputHoveringItem().should('have.text', '8888');
// todo there's a bug here need to fix ADO-534 // todo there's a bug here need to fix ADO-534
// ndv.getters.outputHoveringItem().should('not.exist'); // ndv.getters.outputHoveringItem().should('not.exist');

View file

@ -56,10 +56,10 @@ describe('Template credentials setup', () => {
it('can be opened from template collection page', () => { it('can be opened from template collection page', () => {
visitTemplateCollectionPage(testData.ecommerceStarterPack); visitTemplateCollectionPage(testData.ecommerceStarterPack);
templateCredentialsSetupPage.enableTemplateCredentialSetupFeatureFlag(); templateCredentialsSetupPage.enableTemplateCredentialSetupFeatureFlag();
clickUseWorkflowButtonByTitle('Promote new Shopify products on Twitter and Telegram'); clickUseWorkflowButtonByTitle('Promote new Shopify products');
templateCredentialsSetupPage.getters templateCredentialsSetupPage.getters
.title("Set up 'Promote new Shopify products on Twitter and Telegram' template") .title("Set up 'Promote new Shopify products' template")
.should('be.visible'); .should('be.visible');
}); });
@ -67,7 +67,7 @@ describe('Template credentials setup', () => {
templateCredentialsSetupPage.visitTemplateCredentialSetupPage(testTemplate.id); templateCredentialsSetupPage.visitTemplateCredentialSetupPage(testTemplate.id);
templateCredentialsSetupPage.getters templateCredentialsSetupPage.getters
.title("Set up 'Promote new Shopify products on Twitter and Telegram' template") .title("Set up 'Promote new Shopify products' template")
.should('be.visible'); .should('be.visible');
templateCredentialsSetupPage.getters templateCredentialsSetupPage.getters
@ -182,7 +182,6 @@ describe('Template credentials setup', () => {
}); });
it('should fill credentials from workflow editor', () => { it('should fill credentials from workflow editor', () => {
cy.viewport(1920, 1080);
templateCredentialsSetupPage.visitTemplateCredentialSetupPage(testTemplate.id); templateCredentialsSetupPage.visitTemplateCredentialSetupPage(testTemplate.id);
templateCredentialsSetupPage.getters.skipLink().click(); templateCredentialsSetupPage.getters.skipLink().click();

View file

@ -36,7 +36,6 @@ describe('AI Assistant::enabled', () => {
}); });
it('renders placeholder UI', () => { it('renders placeholder UI', () => {
cy.viewport(1920, 1080);
aiAssistant.getters.askAssistantFloatingButton().should('be.visible'); aiAssistant.getters.askAssistantFloatingButton().should('be.visible');
aiAssistant.getters.askAssistantFloatingButton().click(); aiAssistant.getters.askAssistantFloatingButton().click();
aiAssistant.getters.askAssistantChat().should('be.visible'); aiAssistant.getters.askAssistantChat().should('be.visible');

View file

@ -0,0 +1,140 @@
import {
getExecutionPreviewOutputPanelRelatedExecutionLink,
getExecutionsSidebar,
getWorkflowExecutionPreviewIframe,
openExecutionPreviewNode,
} from '../composables/executions';
import {
changeOutputRunSelector,
getOutputPanelItemsCount,
getOutputPanelRelatedExecutionLink,
getOutputRunSelectorInput,
getOutputTableHeaders,
getOutputTableRows,
getOutputTbodyCell,
} from '../composables/ndv';
import {
clickExecuteWorkflowButton,
clickZoomToFit,
getCanvasNodes,
navigateToNewWorkflowPage,
openNode,
pasteWorkflow,
saveWorkflowOnButtonClick,
} from '../composables/workflow';
import SUBWORKFLOW_DEBUGGING_EXAMPLE from '../fixtures/Subworkflow-debugging-execute-workflow.json';
describe('Subworkflow debugging', () => {
beforeEach(() => {
navigateToNewWorkflowPage();
pasteWorkflow(SUBWORKFLOW_DEBUGGING_EXAMPLE);
saveWorkflowOnButtonClick();
getCanvasNodes().should('have.length', 11);
clickZoomToFit();
clickExecuteWorkflowButton();
});
describe('can inspect sub executed workflow', () => {
it('(Run once with all items/ Wait for Sub-workflow completion) (default behavior)', () => {
openNode('Execute Workflow with param');
getOutputPanelItemsCount().should('contain.text', '2 items, 1 sub-execution');
getOutputPanelRelatedExecutionLink().should('contain.text', 'Inspect Sub-Execution');
getOutputPanelRelatedExecutionLink().should('have.attr', 'href');
// ensure workflow executed and waited on output
getOutputTableHeaders().should('have.length', 2);
getOutputTbodyCell(1, 0).should('have.text', 'world Natalie Moore');
});
it('(Run once for each item/ Wait for Sub-workflow completion)', () => {
openNode('Execute Workflow with param1');
getOutputPanelItemsCount().should('contain.text', '2 items, 2 sub-execution');
getOutputPanelRelatedExecutionLink().should('not.exist');
// ensure workflow executed and waited on output
getOutputTableHeaders().should('have.length', 3);
getOutputTbodyCell(1, 0).find('a').should('have.attr', 'href');
getOutputTbodyCell(1, 1).should('have.text', 'world Natalie Moore');
});
it('(Run once with all items/ Wait for Sub-workflow completion)', () => {
openNode('Execute Workflow with param2');
getOutputPanelItemsCount().should('not.exist');
getOutputPanelRelatedExecutionLink().should('contain.text', 'Inspect Sub-Execution');
getOutputPanelRelatedExecutionLink().should('have.attr', 'href');
// ensure workflow executed but returned same data as input
getOutputRunSelectorInput().should('have.value', '2 of 2 (3 items, 1 sub-execution)');
getOutputTableHeaders().should('have.length', 6);
getOutputTableHeaders().eq(0).should('have.text', 'uid');
getOutputTableRows().should('have.length', 4);
getOutputTbodyCell(1, 1).should('include.text', 'Jon_Ebert@yahoo.com');
changeOutputRunSelector('1 of 2 (2 items, 1 sub-execution)');
getOutputRunSelectorInput().should('have.value', '1 of 2 (2 items, 1 sub-execution)');
getOutputTableHeaders().should('have.length', 6);
getOutputTableHeaders().eq(0).should('have.text', 'uid');
getOutputTableRows().should('have.length', 3);
getOutputTbodyCell(1, 1).should('include.text', 'Terry.Dach@hotmail.com');
});
it('(Run once for each item/ Wait for Sub-workflow completion)', () => {
openNode('Execute Workflow with param3');
// ensure workflow executed but returned same data as input
getOutputRunSelectorInput().should('have.value', '2 of 2 (3 items, 3 sub-executions)');
getOutputTableHeaders().should('have.length', 7);
getOutputTableHeaders().eq(1).should('have.text', 'uid');
getOutputTableRows().should('have.length', 4);
getOutputTbodyCell(1, 0).find('a').should('have.attr', 'href');
getOutputTbodyCell(1, 2).should('include.text', 'Jon_Ebert@yahoo.com');
changeOutputRunSelector('1 of 2 (2 items, 2 sub-executions)');
getOutputRunSelectorInput().should('have.value', '1 of 2 (2 items, 2 sub-executions)');
getOutputTableHeaders().should('have.length', 7);
getOutputTableHeaders().eq(1).should('have.text', 'uid');
getOutputTableRows().should('have.length', 3);
getOutputTbodyCell(1, 0).find('a').should('have.attr', 'href');
getOutputTbodyCell(1, 2).should('include.text', 'Terry.Dach@hotmail.com');
});
});
it('can inspect parent executions', () => {
cy.url().then((workflowUrl) => {
openNode('Execute Workflow with param');
getOutputPanelItemsCount().should('contain.text', '2 items, 1 sub-execution');
getOutputPanelRelatedExecutionLink().should('contain.text', 'Inspect Sub-Execution');
getOutputPanelRelatedExecutionLink().should('have.attr', 'href');
// ensure workflow executed and waited on output
getOutputTableHeaders().should('have.length', 2);
getOutputTbodyCell(1, 0).should('have.text', 'world Natalie Moore');
// cypress cannot handle new tabs so removing it
getOutputPanelRelatedExecutionLink().invoke('removeAttr', 'target').click();
getExecutionsSidebar().should('be.visible');
getWorkflowExecutionPreviewIframe().should('be.visible');
openExecutionPreviewNode('Execute Workflow Trigger');
getExecutionPreviewOutputPanelRelatedExecutionLink().should(
'include.text',
'Inspect Parent Execution',
);
getExecutionPreviewOutputPanelRelatedExecutionLink()
.invoke('removeAttr', 'target')
.click({ force: true });
cy.url().then((currentUrl) => {
expect(currentUrl === workflowUrl);
});
});
});
});

View file

@ -66,7 +66,6 @@ describe('NDV', () => {
}); });
it('should disconect Switch outputs if rules order was changed', () => { it('should disconect Switch outputs if rules order was changed', () => {
cy.viewport(1920, 1080);
cy.createFixtureWorkflow('NDV-test-switch_reorder.json', 'NDV test switch reorder'); cy.createFixtureWorkflow('NDV-test-switch_reorder.json', 'NDV test switch reorder');
workflowPage.actions.zoomToFit(); workflowPage.actions.zoomToFit();
@ -233,7 +232,6 @@ describe('NDV', () => {
ndv.getters.outputPanel().find('[class*=_pagination]').should('exist'); ndv.getters.outputPanel().find('[class*=_pagination]').should('exist');
}); });
it('should display large schema', () => { it('should display large schema', () => {
cy.viewport(1920, 1080);
cy.createFixtureWorkflow( cy.createFixtureWorkflow(
'Test_workflow_schema_test_pinned_data.json', 'Test_workflow_schema_test_pinned_data.json',
'NDV test schema view 2', 'NDV test schema view 2',
@ -720,7 +718,6 @@ describe('NDV', () => {
}); });
it('Should open appropriate node creator after clicking on connection hint link', () => { it('Should open appropriate node creator after clicking on connection hint link', () => {
cy.viewport(1920, 1080);
const nodeCreator = new NodeCreator(); const nodeCreator = new NodeCreator();
const hintMapper = { const hintMapper = {
Memory: 'AI Nodes', Memory: 'AI Nodes',

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,354 @@
{
"meta": {
"instanceId": "08ce71ad998aeaade0abedb8dd96153d8eaa03fcb84cfccc1530095bf9ee478e"
},
"nodes": [
{
"parameters": {},
"id": "4535ce3e-280e-49b0-8854-373472ec86d1",
"name": "When clicking Test workflow",
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"position": [80, 860]
},
{
"parameters": {
"category": "randomData",
"randomDataSeed": "0",
"randomDataCount": 2
},
"id": "d7fba18a-d51f-4509-af45-68cd9425ac6b",
"name": "DebugHelper1",
"type": "n8n-nodes-base.debugHelper",
"typeVersion": 1,
"position": [280, 860]
},
{
"parameters": {
"source": "parameter",
"workflowJson": "{\n \"meta\": {\n \"instanceId\": \"a786b722078489c1fa382391a9f3476c2784761624deb2dfb4634827256d51a0\"\n },\n \"nodes\": [\n {\n \"parameters\": {},\n \"type\": \"n8n-nodes-base.executeWorkflowTrigger\",\n \"typeVersion\": 1,\n \"position\": [\n 0,\n 0\n ],\n \"id\": \"00600a51-e63a-4b6e-93f5-f01d50a21e0c\",\n \"name\": \"Execute Workflow Trigger\"\n },\n {\n \"parameters\": {\n \"assignments\": {\n \"assignments\": [\n {\n \"id\": \"87ff01af-2e28-48da-ae6c-304040200b15\",\n \"name\": \"hello\",\n \"value\": \"=world {{ $json.firstname }} {{ $json.lastname }}\",\n \"type\": \"string\"\n }\n ]\n },\n \"includeOtherFields\": false,\n \"options\": {}\n },\n \"type\": \"n8n-nodes-base.set\",\n \"typeVersion\": 3.4,\n \"position\": [\n 280,\n 0\n ],\n \"id\": \"642219a1-d655-4a30-af5c-fcccbb690322\",\n \"name\": \"Edit Fields\"\n }\n ],\n \"connections\": {\n \"Execute Workflow Trigger\": {\n \"main\": [\n [\n {\n \"node\": \"Edit Fields\",\n \"type\": \"main\",\n \"index\": 0\n }\n ]\n ]\n }\n },\n \"pinData\": {}\n}",
"mode": "each",
"options": {
"waitForSubWorkflow": false
}
},
"type": "n8n-nodes-base.executeWorkflow",
"typeVersion": 1.1,
"position": [680, 1540],
"id": "f90a25da-dd89-4bf8-8f5b-bf8ee1de0b70",
"name": "Execute Workflow with param3"
},
{
"parameters": {
"assignments": {
"assignments": [
{
"id": "c93f26bd-3489-467b-909e-6462e1463707",
"name": "uid",
"value": "={{ $json.uid }}",
"type": "string"
},
{
"id": "3dd706ce-d925-4219-8531-ad12369972fe",
"name": "email",
"value": "={{ $json.email }}",
"type": "string"
}
]
},
"options": {}
},
"type": "n8n-nodes-base.set",
"typeVersion": 3.4,
"position": [900, 1540],
"id": "3be57648-3be8-4b0f-abfa-8fdcafee804d",
"name": "Edit Fields8"
},
{
"parameters": {
"source": "parameter",
"workflowJson": "{\n \"meta\": {\n \"instanceId\": \"a786b722078489c1fa382391a9f3476c2784761624deb2dfb4634827256d51a0\"\n },\n \"nodes\": [\n {\n \"parameters\": {},\n \"type\": \"n8n-nodes-base.executeWorkflowTrigger\",\n \"typeVersion\": 1,\n \"position\": [\n 0,\n 0\n ],\n \"id\": \"00600a51-e63a-4b6e-93f5-f01d50a21e0c\",\n \"name\": \"Execute Workflow Trigger\"\n },\n {\n \"parameters\": {\n \"assignments\": {\n \"assignments\": [\n {\n \"id\": \"87ff01af-2e28-48da-ae6c-304040200b15\",\n \"name\": \"hello\",\n \"value\": \"=world {{ $json.firstname }} {{ $json.lastname }}\",\n \"type\": \"string\"\n }\n ]\n },\n \"includeOtherFields\": false,\n \"options\": {}\n },\n \"type\": \"n8n-nodes-base.set\",\n \"typeVersion\": 3.4,\n \"position\": [\n 280,\n 0\n ],\n \"id\": \"642219a1-d655-4a30-af5c-fcccbb690322\",\n \"name\": \"Edit Fields\"\n }\n ],\n \"connections\": {\n \"Execute Workflow Trigger\": {\n \"main\": [\n [\n {\n \"node\": \"Edit Fields\",\n \"type\": \"main\",\n \"index\": 0\n }\n ]\n ]\n }\n },\n \"pinData\": {}\n}",
"options": {
"waitForSubWorkflow": false
}
},
"type": "n8n-nodes-base.executeWorkflow",
"typeVersion": 1.1,
"position": [620, 1220],
"id": "dabc2356-3660-4d17-b305-936a002029ba",
"name": "Execute Workflow with param2"
},
{
"parameters": {
"assignments": {
"assignments": [
{
"id": "c93f26bd-3489-467b-909e-6462e1463707",
"name": "uid",
"value": "={{ $json.uid }}",
"type": "string"
},
{
"id": "3dd706ce-d925-4219-8531-ad12369972fe",
"name": "email",
"value": "={{ $json.email }}",
"type": "string"
}
]
},
"options": {}
},
"type": "n8n-nodes-base.set",
"typeVersion": 3.4,
"position": [840, 1220],
"id": "9d2a9dda-e2a1-43e8-a66f-a8a555692e5f",
"name": "Edit Fields7"
},
{
"parameters": {
"source": "parameter",
"workflowJson": "{\n \"meta\": {\n \"instanceId\": \"a786b722078489c1fa382391a9f3476c2784761624deb2dfb4634827256d51a0\"\n },\n \"nodes\": [\n {\n \"parameters\": {},\n \"type\": \"n8n-nodes-base.executeWorkflowTrigger\",\n \"typeVersion\": 1,\n \"position\": [\n 0,\n 0\n ],\n \"id\": \"00600a51-e63a-4b6e-93f5-f01d50a21e0c\",\n \"name\": \"Execute Workflow Trigger\"\n },\n {\n \"parameters\": {\n \"assignments\": {\n \"assignments\": [\n {\n \"id\": \"87ff01af-2e28-48da-ae6c-304040200b15\",\n \"name\": \"hello\",\n \"value\": \"=world {{ $json.firstname }} {{ $json.lastname }}\",\n \"type\": \"string\"\n }\n ]\n },\n \"includeOtherFields\": false,\n \"options\": {}\n },\n \"type\": \"n8n-nodes-base.set\",\n \"typeVersion\": 3.4,\n \"position\": [\n 280,\n 0\n ],\n \"id\": \"642219a1-d655-4a30-af5c-fcccbb690322\",\n \"name\": \"Edit Fields\"\n }\n ],\n \"connections\": {\n \"Execute Workflow Trigger\": {\n \"main\": [\n [\n {\n \"node\": \"Edit Fields\",\n \"type\": \"main\",\n \"index\": 0\n }\n ]\n ]\n }\n },\n \"pinData\": {}\n}",
"mode": "each",
"options": {
"waitForSubWorkflow": true
}
},
"type": "n8n-nodes-base.executeWorkflow",
"typeVersion": 1.1,
"position": [560, 900],
"id": "07e47f60-622a-484c-ab24-35f6f2280595",
"name": "Execute Workflow with param1"
},
{
"parameters": {
"assignments": {
"assignments": [
{
"id": "c93f26bd-3489-467b-909e-6462e1463707",
"name": "uid",
"value": "={{ $json.uid }}",
"type": "string"
},
{
"id": "3dd706ce-d925-4219-8531-ad12369972fe",
"name": "email",
"value": "={{ $json.email }}",
"type": "string"
}
]
},
"options": {}
},
"type": "n8n-nodes-base.set",
"typeVersion": 3.4,
"position": [760, 900],
"id": "80563d0a-0bab-444f-a04c-4041a505d78b",
"name": "Edit Fields6"
},
{
"parameters": {
"source": "parameter",
"workflowJson": "{\n \"meta\": {\n \"instanceId\": \"a786b722078489c1fa382391a9f3476c2784761624deb2dfb4634827256d51a0\"\n },\n \"nodes\": [\n {\n \"parameters\": {},\n \"type\": \"n8n-nodes-base.executeWorkflowTrigger\",\n \"typeVersion\": 1,\n \"position\": [\n 0,\n 0\n ],\n \"id\": \"00600a51-e63a-4b6e-93f5-f01d50a21e0c\",\n \"name\": \"Execute Workflow Trigger\"\n },\n {\n \"parameters\": {\n \"assignments\": {\n \"assignments\": [\n {\n \"id\": \"87ff01af-2e28-48da-ae6c-304040200b15\",\n \"name\": \"hello\",\n \"value\": \"=world {{ $json.firstname }} {{ $json.lastname }}\",\n \"type\": \"string\"\n }\n ]\n },\n \"includeOtherFields\": false,\n \"options\": {}\n },\n \"type\": \"n8n-nodes-base.set\",\n \"typeVersion\": 3.4,\n \"position\": [\n 280,\n 0\n ],\n \"id\": \"642219a1-d655-4a30-af5c-fcccbb690322\",\n \"name\": \"Edit Fields\"\n }\n ],\n \"connections\": {\n \"Execute Workflow Trigger\": {\n \"main\": [\n [\n {\n \"node\": \"Edit Fields\",\n \"type\": \"main\",\n \"index\": 0\n }\n ]\n ]\n }\n },\n \"pinData\": {}\n}",
"options": {
"waitForSubWorkflow": true
}
},
"type": "n8n-nodes-base.executeWorkflow",
"typeVersion": 1.1,
"position": [560, 580],
"id": "f04af481-f4d9-4d91-a60a-a377580e8393",
"name": "Execute Workflow with param"
},
{
"parameters": {
"assignments": {
"assignments": [
{
"id": "c93f26bd-3489-467b-909e-6462e1463707",
"name": "uid",
"value": "={{ $json.uid }}",
"type": "string"
},
{
"id": "3dd706ce-d925-4219-8531-ad12369972fe",
"name": "email",
"value": "={{ $json.email }}",
"type": "string"
}
]
},
"options": {}
},
"type": "n8n-nodes-base.set",
"typeVersion": 3.4,
"position": [760, 580],
"id": "80c10607-a0ac-4090-86a1-890da0a2aa52",
"name": "Edit Fields2"
},
{
"parameters": {
"content": "## Execute Workflow (Run once with all items/ DONT Wait for Sub-workflow completion)",
"height": 254.84308966329985,
"width": 457.58120569815793
},
"id": "534ef523-3453-4a16-9ff0-8ac9f025d47d",
"name": "Sticky Note5",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [500, 1080]
},
{
"parameters": {
"content": "## Execute Workflow (Run once with for each item/ DONT Wait for Sub-workflow completion) ",
"height": 284.59778445962905,
"width": 457.58120569815793
},
"id": "838f0fa3-5ee4-4d1a-afb8-42e009f1aa9e",
"name": "Sticky Note4",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [580, 1400]
},
{
"parameters": {
"category": "randomData",
"randomDataSeed": "1",
"randomDataCount": 3
},
"id": "86699a49-2aa7-488e-8ea9-828404c98f08",
"name": "DebugHelper",
"type": "n8n-nodes-base.debugHelper",
"typeVersion": 1,
"position": [320, 1120]
},
{
"parameters": {
"content": "## Execute Workflow (Run once with for each item/ Wait for Sub-workflow completion) ",
"height": 284.59778445962905,
"width": 457.58120569815793
},
"id": "885d35f0-8ae6-45ec-821b-a82c27e7577a",
"name": "Sticky Note3",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [480, 760]
},
{
"parameters": {
"content": "## Execute Workflow (Run once with all items/ Wait for Sub-workflow completion) (default behavior)",
"height": 254.84308966329985,
"width": 457.58120569815793
},
"id": "505bd7f2-767e-41b8-9325-77300aed5883",
"name": "Sticky Note2",
"type": "n8n-nodes-base.stickyNote",
"typeVersion": 1,
"position": [460, 460]
}
],
"connections": {
"When clicking Test workflow": {
"main": [
[
{
"node": "DebugHelper1",
"type": "main",
"index": 0
},
{
"node": "DebugHelper",
"type": "main",
"index": 0
}
]
]
},
"DebugHelper1": {
"main": [
[
{
"node": "Execute Workflow with param3",
"type": "main",
"index": 0
},
{
"node": "Execute Workflow with param2",
"type": "main",
"index": 0
},
{
"node": "Execute Workflow with param1",
"type": "main",
"index": 0
},
{
"node": "Execute Workflow with param",
"type": "main",
"index": 0
}
]
]
},
"Execute Workflow with param3": {
"main": [
[
{
"node": "Edit Fields8",
"type": "main",
"index": 0
}
]
]
},
"Execute Workflow with param2": {
"main": [
[
{
"node": "Edit Fields7",
"type": "main",
"index": 0
}
]
]
},
"Execute Workflow with param1": {
"main": [
[
{
"node": "Edit Fields6",
"type": "main",
"index": 0
}
]
]
},
"Execute Workflow with param": {
"main": [
[
{
"node": "Edit Fields2",
"type": "main",
"index": 0
}
]
]
},
"DebugHelper": {
"main": [
[
{
"node": "Execute Workflow with param2",
"type": "main",
"index": 0
},
{
"node": "Execute Workflow with param3",
"type": "main",
"index": 0
}
]
]
}
},
"pinData": {}
}

View file

@ -1,7 +1,7 @@
{ {
"workflow": { "workflow": {
"id": 1205, "id": 1205,
"name": "Promote new Shopify products on Twitter and Telegram", "name": "Promote new Shopify products",
"views": 478, "views": 478,
"recentViews": 9880, "recentViews": 9880,
"totalViews": 478, "totalViews": 478,

View file

@ -1202,7 +1202,7 @@
}, },
{ {
"id": 1205, "id": 1205,
"name": "Promote New Shopify Products on Social Media (Twitter and Telegram)", "name": "Promote New Shopify Products",
"totalViews": 219, "totalViews": 219,
"recentViews": 0, "recentViews": 0,
"user": { "user": {

View file

@ -26,6 +26,7 @@
"cypress": "^13.14.2", "cypress": "^13.14.2",
"cypress-otp": "^1.0.3", "cypress-otp": "^1.0.3",
"cypress-real-events": "^1.13.0", "cypress-real-events": "^1.13.0",
"flatted": "catalog:",
"lodash": "catalog:", "lodash": "catalog:",
"nanoid": "catalog:", "nanoid": "catalog:",
"start-server-and-test": "^2.0.8" "start-server-and-test": "^2.0.8"

View file

@ -323,6 +323,12 @@ export class NDV extends BasePage {
addItemToFixedCollection: (paramName: string) => { addItemToFixedCollection: (paramName: string) => {
this.getters.fixedCollectionParameter(paramName).getByTestId('fixed-collection-add').click(); this.getters.fixedCollectionParameter(paramName).getByTestId('fixed-collection-add').click();
}, },
dragMainPanelToLeft: () => {
cy.drag('[data-test-id=panel-drag-button]', [-1000, 0], { moveTwice: true });
},
dragMainPanelToRight: () => {
cy.drag('[data-test-id=panel-drag-button]', [1000, 0], { moveTwice: true });
},
}; };
} }

View file

@ -17,7 +17,8 @@ export class WorkflowPage extends BasePage {
workflowTagsContainer: () => cy.getByTestId('workflow-tags-container'), workflowTagsContainer: () => cy.getByTestId('workflow-tags-container'),
workflowTagsInput: () => workflowTagsInput: () =>
this.getters.workflowTagsContainer().then(($el) => cy.wrap($el.find('input').first())), this.getters.workflowTagsContainer().then(($el) => cy.wrap($el.find('input').first())),
tagPills: () => cy.get('[data-test-id="workflow-tags-container"] span.el-tag'), tagPills: () =>
cy.get('[data-test-id="workflow-tags-container"] span.el-tag:not(.count-container)'),
nthTagPill: (n: number) => nthTagPill: (n: number) =>
cy.get(`[data-test-id="workflow-tags-container"] span.el-tag:nth-child(${n})`), cy.get(`[data-test-id="workflow-tags-container"] span.el-tag:nth-child(${n})`),
tagsDropdown: () => cy.getByTestId('workflow-tags-dropdown'), tagsDropdown: () => cy.getByTestId('workflow-tags-dropdown'),

View file

@ -177,6 +177,16 @@ Cypress.Commands.add('drag', (selector, pos, options) => {
pageY: newPosition.y, pageY: newPosition.y,
force: true, force: true,
}); });
if (options?.moveTwice) {
// first move like hover to trigger object to be visible
// like in main panel in ndv
element.trigger('mousemove', {
which: 1,
pageX: newPosition.x,
pageY: newPosition.y,
force: true,
});
}
if (options?.clickToFinish) { if (options?.clickToFinish) {
// Click to finish the drag // Click to finish the drag
// For some reason, mouseup isn't working when moving nodes // For some reason, mouseup isn't working when moving nodes

View file

@ -1,7 +1,7 @@
// Load type definitions that come with Cypress module // Load type definitions that come with Cypress module
/// <reference types="cypress" /> /// <reference types="cypress" />
import type { FrontendSettings } from '@n8n/api-types'; import type { FrontendSettings, PushPayload, PushType } from '@n8n/api-types';
Cypress.Keyboard.defaults({ Cypress.Keyboard.defaults({
keystrokeDelay: 0, keystrokeDelay: 0,
@ -59,14 +59,20 @@ declare global {
drag( drag(
selector: string | Chainable<JQuery<HTMLElement>>, selector: string | Chainable<JQuery<HTMLElement>>,
target: [number, number], target: [number, number],
options?: { abs?: boolean; index?: number; realMouse?: boolean; clickToFinish?: boolean }, options?: {
abs?: boolean;
index?: number;
realMouse?: boolean;
clickToFinish?: boolean;
moveTwice?: boolean;
},
): void; ): void;
draganddrop( draganddrop(
draggableSelector: string, draggableSelector: string,
droppableSelector: string, droppableSelector: string,
options?: Partial<DragAndDropOptions>, options?: Partial<DragAndDropOptions>,
): void; ): void;
push(type: string, data: unknown): void; push<Type extends PushType>(type: Type, data: PushPayload<Type>): void;
shouldNotHaveConsoleErrors(): void; shouldNotHaveConsoleErrors(): void;
window(): Chainable< window(): Chainable<
AUTWindow & { AUTWindow & {

View file

@ -1,5 +1,5 @@
import { stringify } from 'flatted';
import type { IDataObject, IPinData, ITaskData, ITaskDataConnections } from 'n8n-workflow'; import type { IDataObject, IPinData, ITaskData, ITaskDataConnections } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { clickExecuteWorkflowButton } from '../composables/workflow'; import { clickExecuteWorkflowButton } from '../composables/workflow';
@ -39,41 +39,35 @@ export function createMockNodeExecutionData(
}; };
} }
export function createMockWorkflowExecutionData({ function createMockWorkflowExecutionData({
executionId,
runData, runData,
pinData = {},
lastNodeExecuted, lastNodeExecuted,
}: { }: {
executionId: string;
runData: Record<string, ITaskData | ITaskData[]>; runData: Record<string, ITaskData | ITaskData[]>;
pinData?: IPinData; pinData?: IPinData;
lastNodeExecuted: string; lastNodeExecuted: string;
}) { }) {
return { return {
executionId, data: stringify({
data: { startData: {},
data: { resultData: {
startData: {}, runData,
resultData: { pinData: {},
runData, lastNodeExecuted,
pinData,
lastNodeExecuted,
},
executionData: {
contextData: {},
nodeExecutionStack: [],
metadata: {},
waitingExecution: {},
waitingExecutionSource: {},
},
}, },
mode: 'manual', executionData: {
startedAt: new Date().toISOString(), contextData: {},
stoppedAt: new Date().toISOString(), nodeExecutionStack: [],
status: 'success', metadata: {},
finished: true, waitingExecution: {},
}, waitingExecutionSource: {},
},
}),
mode: 'manual',
startedAt: new Date().toISOString(),
stoppedAt: new Date().toISOString(),
status: 'success',
finished: true,
}; };
} }
@ -81,14 +75,12 @@ export function runMockWorkflowExecution({
trigger, trigger,
lastNodeExecuted, lastNodeExecuted,
runData, runData,
workflowExecutionData,
}: { }: {
trigger?: () => void; trigger?: () => void;
lastNodeExecuted: string; lastNodeExecuted: string;
runData: Array<ReturnType<typeof createMockNodeExecutionData>>; runData: Array<ReturnType<typeof createMockNodeExecutionData>>;
workflowExecutionData?: ReturnType<typeof createMockWorkflowExecutionData>;
}) { }) {
const executionId = nanoid(8); const executionId = Math.floor(Math.random() * 1_000_000).toString();
cy.intercept('POST', '/rest/workflows/**/run?**', { cy.intercept('POST', '/rest/workflows/**/run?**', {
statusCode: 201, statusCode: 201,
@ -125,13 +117,17 @@ export function runMockWorkflowExecution({
resolvedRunData[nodeName] = nodeExecution[nodeName]; resolvedRunData[nodeName] = nodeExecution[nodeName];
}); });
cy.push( cy.intercept('GET', `/rest/executions/${executionId}`, {
'executionFinished', statusCode: 200,
createMockWorkflowExecutionData({ body: {
executionId, data: createMockWorkflowExecutionData({
lastNodeExecuted, lastNodeExecuted,
runData: resolvedRunData, runData: resolvedRunData,
...workflowExecutionData, }),
}), },
); }).as('getExecution');
cy.push('executionFinished', { executionId });
cy.wait('@getExecution');
} }

View file

@ -13,7 +13,11 @@
"N8N_RUNNERS_MAX_CONCURRENCY", "N8N_RUNNERS_MAX_CONCURRENCY",
"NODE_FUNCTION_ALLOW_BUILTIN", "NODE_FUNCTION_ALLOW_BUILTIN",
"NODE_FUNCTION_ALLOW_EXTERNAL", "NODE_FUNCTION_ALLOW_EXTERNAL",
"NODE_OPTIONS" "NODE_OPTIONS",
"N8N_SENTRY_DSN",
"N8N_VERSION",
"ENVIRONMENT",
"DEPLOYMENT_NAME"
], ],
"uid": 2000, "uid": 2000,
"gid": 2000 "gid": 2000

View file

@ -27,7 +27,8 @@ export interface IUserManagementSettings {
} }
export interface FrontendSettings { export interface FrontendSettings {
isDocker?: boolean; inE2ETests: boolean;
isDocker: boolean;
databaseType: 'sqlite' | 'mariadb' | 'mysqldb' | 'postgresdb'; databaseType: 'sqlite' | 'mariadb' | 'mysqldb' | 'postgresdb';
endpointForm: string; endpointForm: string;
endpointFormTest: string; endpointFormTest: string;

View file

@ -1,4 +1,4 @@
import type { IRun, ITaskData, WorkflowExecuteMode } from 'n8n-workflow'; import type { ITaskData, WorkflowExecuteMode } from 'n8n-workflow';
type ExecutionStarted = { type ExecutionStarted = {
type: 'executionStarted'; type: 'executionStarted';
@ -12,12 +12,17 @@ type ExecutionStarted = {
}; };
}; };
type ExecutionWaiting = {
type: 'executionWaiting';
data: {
executionId: string;
};
};
type ExecutionFinished = { type ExecutionFinished = {
type: 'executionFinished'; type: 'executionFinished';
data: { data: {
executionId: string; executionId: string;
data: IRun;
retryOf?: string;
}; };
}; };
@ -47,6 +52,7 @@ type NodeExecuteAfter = {
export type ExecutionPushMessage = export type ExecutionPushMessage =
| ExecutionStarted | ExecutionStarted
| ExecutionWaiting
| ExecutionFinished | ExecutionFinished
| ExecutionRecovered | ExecutionRecovered
| NodeExecuteBefore | NodeExecuteBefore

View file

@ -0,0 +1,30 @@
import { Config, Env, Nested } from '../decorators';
@Config
class PostHogConfig {
/** API key for PostHog. */
@Env('N8N_DIAGNOSTICS_POSTHOG_API_KEY')
apiKey: string = 'phc_4URIAm1uYfJO7j8kWSe0J8lc8IqnstRLS7Jx8NcakHo';
/** API host for PostHog. */
@Env('N8N_DIAGNOSTICS_POSTHOG_API_HOST')
apiHost: string = 'https://ph.n8n.io';
}
@Config
export class DiagnosticsConfig {
/** Whether diagnostics are enabled. */
@Env('N8N_DIAGNOSTICS_ENABLED')
enabled: boolean = false;
/** Diagnostics config for frontend. */
@Env('N8N_DIAGNOSTICS_CONFIG_FRONTEND')
frontendConfig: string = '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io';
/** Diagnostics config for backend. */
@Env('N8N_DIAGNOSTICS_CONFIG_BACKEND')
backendConfig: string = '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io';
@Nested
posthogConfig: PostHogConfig;
}

View file

@ -53,4 +53,12 @@ export class TaskRunnersConfig {
/** Should the output of deduplication be asserted for correctness */ /** Should the output of deduplication be asserted for correctness */
@Env('N8N_RUNNERS_ASSERT_DEDUPLICATION_OUTPUT') @Env('N8N_RUNNERS_ASSERT_DEDUPLICATION_OUTPUT')
assertDeduplicationOutput: boolean = false; assertDeduplicationOutput: boolean = false;
/** How long (in seconds) a task is allowed to take for completion, else the task will be aborted and the runner restarted. Must be greater than 0. */
@Env('N8N_RUNNERS_TASK_TIMEOUT')
taskTimeout: number = 60;
/** How often (in seconds) the runner must send a heartbeat to the broker, else the task will be aborted and the runner restarted. Must be greater than 0. */
@Env('N8N_RUNNERS_HEARTBEAT_INTERVAL')
heartbeatInterval: number = 30;
} }

View file

@ -1,6 +1,7 @@
import { CacheConfig } from './configs/cache.config'; import { CacheConfig } from './configs/cache.config';
import { CredentialsConfig } from './configs/credentials.config'; import { CredentialsConfig } from './configs/credentials.config';
import { DatabaseConfig } from './configs/database.config'; import { DatabaseConfig } from './configs/database.config';
import { DiagnosticsConfig } from './configs/diagnostics.config';
import { EndpointsConfig } from './configs/endpoints.config'; import { EndpointsConfig } from './configs/endpoints.config';
import { EventBusConfig } from './configs/event-bus.config'; import { EventBusConfig } from './configs/event-bus.config';
import { ExternalSecretsConfig } from './configs/external-secrets.config'; import { ExternalSecretsConfig } from './configs/external-secrets.config';
@ -117,4 +118,7 @@ export class GlobalConfig {
@Nested @Nested
pruning: PruningConfig; pruning: PruningConfig;
@Nested
diagnostics: DiagnosticsConfig;
} }

View file

@ -234,6 +234,8 @@ describe('GlobalConfig', () => {
maxOldSpaceSize: '', maxOldSpaceSize: '',
maxConcurrency: 5, maxConcurrency: 5,
assertDeduplicationOutput: false, assertDeduplicationOutput: false,
taskTimeout: 60,
heartbeatInterval: 30,
}, },
sentry: { sentry: {
backendDsn: '', backendDsn: '',
@ -280,6 +282,15 @@ describe('GlobalConfig', () => {
hardDeleteInterval: 15, hardDeleteInterval: 15,
softDeleteInterval: 60, softDeleteInterval: 60,
}, },
diagnostics: {
enabled: false,
frontendConfig: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
backendConfig: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io',
posthogConfig: {
apiKey: 'phc_4URIAm1uYfJO7j8kWSe0J8lc8IqnstRLS7Jx8NcakHo',
apiHost: 'https://ph.n8n.io',
},
},
}; };
it('should use all default values when no env variables are defined', () => { it('should use all default values when no env variables are defined', () => {

View file

@ -87,6 +87,36 @@ export class EmbeddingsAzureOpenAi implements INodeType {
'Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.', 'Maximum amount of time a request is allowed to take in seconds. Set to -1 for no timeout.',
type: 'number', type: 'number',
}, },
{
displayName: 'Dimensions',
name: 'dimensions',
default: undefined,
description:
'The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.',
type: 'options',
options: [
{
name: '256',
value: 256,
},
{
name: '512',
value: 512,
},
{
name: '1024',
value: 1024,
},
{
name: '1536',
value: 1536,
},
{
name: '3072',
value: 3072,
},
],
},
], ],
}, },
], ],
@ -105,6 +135,7 @@ export class EmbeddingsAzureOpenAi implements INodeType {
batchSize?: number; batchSize?: number;
stripNewLines?: boolean; stripNewLines?: boolean;
timeout?: number; timeout?: number;
dimensions?: number | undefined;
}; };
if (options.timeout === -1) { if (options.timeout === -1) {

View file

@ -135,6 +135,36 @@ export class EmbeddingsOpenAi implements INodeType {
type: 'collection', type: 'collection',
default: {}, default: {},
options: [ options: [
{
displayName: 'Dimensions',
name: 'dimensions',
default: undefined,
description:
'The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models.',
type: 'options',
options: [
{
name: '256',
value: 256,
},
{
name: '512',
value: 512,
},
{
name: '1024',
value: 1024,
},
{
name: '1536',
value: 1536,
},
{
name: '3072',
value: 3072,
},
],
},
{ {
displayName: 'Base URL', displayName: 'Base URL',
name: 'baseURL', name: 'baseURL',
@ -179,6 +209,7 @@ export class EmbeddingsOpenAi implements INodeType {
batchSize?: number; batchSize?: number;
stripNewLines?: boolean; stripNewLines?: boolean;
timeout?: number; timeout?: number;
dimensions?: number | undefined;
}; };
if (options.timeout === -1) { if (options.timeout === -1) {

View file

@ -10,6 +10,7 @@ import type {
INodeTypeDescription, INodeTypeDescription,
SupplyData, SupplyData,
INodeParameterResourceLocator, INodeParameterResourceLocator,
ExecuteWorkflowData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { BaseRetriever, type BaseRetrieverInput } from '@langchain/core/retrievers'; import { BaseRetriever, type BaseRetrieverInput } from '@langchain/core/retrievers';
@ -293,6 +294,8 @@ export class RetrieverWorkflow implements INodeType {
}; };
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> { async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const workflowProxy = this.getWorkflowDataProxy(0);
class WorkflowRetriever extends BaseRetriever { class WorkflowRetriever extends BaseRetriever {
lc_namespace = ['n8n-nodes-langchain', 'retrievers', 'workflow']; lc_namespace = ['n8n-nodes-langchain', 'retrievers', 'workflow'];
@ -349,6 +352,9 @@ export class RetrieverWorkflow implements INodeType {
}, },
); );
} }
// same as current workflow
baseMetadata.workflowId = workflowProxy.$workflow.id;
} }
const rawData: IDataObject = { query }; const rawData: IDataObject = { query };
@ -384,21 +390,29 @@ export class RetrieverWorkflow implements INodeType {
const items = [newItem] as INodeExecutionData[]; const items = [newItem] as INodeExecutionData[];
let receivedItems: INodeExecutionData[][]; let receivedData: ExecuteWorkflowData;
try { try {
receivedItems = (await this.executeFunctions.executeWorkflow( receivedData = await this.executeFunctions.executeWorkflow(
workflowInfo, workflowInfo,
items, items,
config?.getChild(), config?.getChild(),
)) as INodeExecutionData[][]; {
parentExecution: {
executionId: workflowProxy.$execution.id,
workflowId: workflowProxy.$workflow.id,
},
},
);
} catch (error) { } catch (error) {
// Make sure a valid error gets returned that can by json-serialized else it will // Make sure a valid error gets returned that can by json-serialized else it will
// not show up in the frontend // not show up in the frontend
throw new NodeOperationError(this.executeFunctions.getNode(), error as Error); throw new NodeOperationError(this.executeFunctions.getNode(), error as Error);
} }
const receivedItems = receivedData.data?.[0] ?? [];
const returnData: Document[] = []; const returnData: Document[] = [];
for (const [index, itemData] of receivedItems[0].entries()) { for (const [index, itemData] of receivedItems.entries()) {
const pageContent = objectToString(itemData.json); const pageContent = objectToString(itemData.json);
returnData.push( returnData.push(
new Document({ new Document({
@ -406,6 +420,7 @@ export class RetrieverWorkflow implements INodeType {
metadata: { metadata: {
...baseMetadata, ...baseMetadata,
itemIndex: index, itemIndex: index,
executionId: receivedData.executionId,
}, },
}), }),
); );

View file

@ -14,8 +14,10 @@ import type {
ISupplyDataFunctions, ISupplyDataFunctions,
SupplyData, SupplyData,
ExecutionError, ExecutionError,
ExecuteWorkflowData,
IDataObject, IDataObject,
INodeParameterResourceLocator, INodeParameterResourceLocator,
ITaskMetadata,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
@ -358,9 +360,14 @@ export class ToolWorkflow implements INodeType {
}; };
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> { async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const workflowProxy = this.getWorkflowDataProxy(0);
const name = this.getNodeParameter('name', itemIndex) as string; const name = this.getNodeParameter('name', itemIndex) as string;
const description = this.getNodeParameter('description', itemIndex) as string; const description = this.getNodeParameter('description', itemIndex) as string;
let subExecutionId: string | undefined;
let subWorkflowId: string | undefined;
const useSchema = this.getNodeParameter('specifyInputSchema', itemIndex) as boolean; const useSchema = this.getNodeParameter('specifyInputSchema', itemIndex) as boolean;
let tool: DynamicTool | DynamicStructuredTool | undefined = undefined; let tool: DynamicTool | DynamicStructuredTool | undefined = undefined;
@ -396,11 +403,16 @@ export class ToolWorkflow implements INodeType {
) as INodeParameterResourceLocator; ) as INodeParameterResourceLocator;
workflowInfo.id = value as string; workflowInfo.id = value as string;
} }
subWorkflowId = workflowInfo.id;
} else if (source === 'parameter') { } else if (source === 'parameter') {
// Read workflow from parameter // Read workflow from parameter
const workflowJson = this.getNodeParameter('workflowJson', itemIndex) as string; const workflowJson = this.getNodeParameter('workflowJson', itemIndex) as string;
try { try {
workflowInfo.code = JSON.parse(workflowJson) as IWorkflowBase; workflowInfo.code = JSON.parse(workflowJson) as IWorkflowBase;
// subworkflow is same as parent workflow
subWorkflowId = workflowProxy.$workflow.id;
} catch (error) { } catch (error) {
throw new NodeOperationError( throw new NodeOperationError(
this.getNode(), this.getNode(),
@ -440,13 +452,15 @@ export class ToolWorkflow implements INodeType {
const items = [newItem] as INodeExecutionData[]; const items = [newItem] as INodeExecutionData[];
let receivedData: INodeExecutionData; let receivedData: ExecuteWorkflowData;
try { try {
receivedData = (await this.executeWorkflow( receivedData = await this.executeWorkflow(workflowInfo, items, runManager?.getChild(), {
workflowInfo, parentExecution: {
items, executionId: workflowProxy.$execution.id,
runManager?.getChild(), workflowId: workflowProxy.$workflow.id,
)) as INodeExecutionData; },
});
subExecutionId = receivedData.executionId;
} catch (error) { } catch (error) {
// Make sure a valid error gets returned that can by json-serialized else it will // Make sure a valid error gets returned that can by json-serialized else it will
// not show up in the frontend // not show up in the frontend
@ -454,6 +468,7 @@ export class ToolWorkflow implements INodeType {
} }
const response: string | undefined = get(receivedData, [ const response: string | undefined = get(receivedData, [
'data',
0, 0,
0, 0,
'json', 'json',
@ -503,10 +518,25 @@ export class ToolWorkflow implements INodeType {
response = `There was an error: "${executionError.message}"`; response = `There was an error: "${executionError.message}"`;
} }
let metadata: ITaskMetadata | undefined;
if (subExecutionId && subWorkflowId) {
metadata = {
subExecution: {
executionId: subExecutionId,
workflowId: subWorkflowId,
},
};
}
if (executionError) { if (executionError) {
void this.addOutputData(NodeConnectionType.AiTool, index, executionError); void this.addOutputData(NodeConnectionType.AiTool, index, executionError, metadata);
} else { } else {
void this.addOutputData(NodeConnectionType.AiTool, index, [[{ json: { response } }]]); void this.addOutputData(
NodeConnectionType.AiTool,
index,
[[{ json: { response } }]],
metadata,
);
} }
return response; return response;
}; };

View file

@ -10,7 +10,12 @@ import type { Tool } from '@langchain/core/tools';
import { VectorStore } from '@langchain/core/vectorstores'; import { VectorStore } from '@langchain/core/vectorstores';
import { TextSplitter } from '@langchain/textsplitters'; import { TextSplitter } from '@langchain/textsplitters';
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base'; import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
import type { IExecuteFunctions, INodeExecutionData, ISupplyDataFunctions } from 'n8n-workflow'; import type {
IExecuteFunctions,
INodeExecutionData,
ISupplyDataFunctions,
ITaskMetadata,
} from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers'; import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
@ -220,8 +225,24 @@ export function logWrapper(
arguments: [query, config], arguments: [query, config],
})) as Array<Document<Record<string, any>>>; })) as Array<Document<Record<string, any>>>;
const executionId: string | undefined = response[0]?.metadata?.executionId as string;
const workflowId: string | undefined = response[0]?.metadata?.workflowId as string;
const metadata: ITaskMetadata = {};
if (executionId && workflowId) {
metadata.subExecution = {
executionId,
workflowId,
};
}
logAiEvent(executeFunctions, 'ai-documents-retrieved', { query }); logAiEvent(executeFunctions, 'ai-documents-retrieved', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(
connectionType,
index,
[[{ json: { response } }]],
metadata,
);
return response; return response;
}; };
} }

View file

@ -35,6 +35,8 @@
}, },
"dependencies": { "dependencies": {
"@n8n/config": "workspace:*", "@n8n/config": "workspace:*",
"@sentry/integrations": "catalog:",
"@sentry/node": "catalog:",
"acorn": "8.14.0", "acorn": "8.14.0",
"acorn-walk": "8.3.4", "acorn-walk": "8.3.4",
"n8n-core": "workspace:*", "n8n-core": "workspace:*",

View file

@ -0,0 +1,31 @@
import { mock } from 'jest-mock-extended';
import { ApplicationError } from 'n8n-workflow';
import { ErrorReporter } from '../error-reporter';
describe('ErrorReporter', () => {
const errorReporting = new ErrorReporter(mock());
describe('beforeSend', () => {
it('should return null if originalException is an ApplicationError with level warning', () => {
const hint = { originalException: new ApplicationError('Test error', { level: 'warning' }) };
expect(errorReporting.beforeSend(mock(), hint)).toBeNull();
});
it('should return event if originalException is an ApplicationError with level error', () => {
const hint = { originalException: new ApplicationError('Test error', { level: 'error' }) };
expect(errorReporting.beforeSend(mock(), hint)).not.toBeNull();
});
it('should return null if originalException is an Error with a non-unique stack', () => {
const hint = { originalException: new Error('Test error') };
errorReporting.beforeSend(mock(), hint);
expect(errorReporting.beforeSend(mock(), hint)).toBeNull();
});
it('should return event if originalException is an Error with a unique stack', () => {
const hint = { originalException: new Error('Test error') };
expect(errorReporting.beforeSend(mock(), hint)).not.toBeNull();
});
});
});

View file

@ -1,4 +1,16 @@
import { Config, Env } from '@n8n/config'; import { Config, Env, Nested } from '@n8n/config';
@Config
class HealthcheckServerConfig {
@Env('N8N_RUNNERS_SERVER_ENABLED')
enabled: boolean = false;
@Env('N8N_RUNNERS_SERVER_HOST')
host: string = '127.0.0.1';
@Env('N8N_RUNNERS_SERVER_PORT')
port: number = 5680;
}
@Config @Config
export class BaseRunnerConfig { export class BaseRunnerConfig {
@ -13,4 +25,7 @@ export class BaseRunnerConfig {
@Env('N8N_RUNNERS_MAX_CONCURRENCY') @Env('N8N_RUNNERS_MAX_CONCURRENCY')
maxConcurrency: number = 5; maxConcurrency: number = 5;
@Nested
healthcheckServer!: HealthcheckServerConfig;
} }

View file

@ -2,6 +2,7 @@ import { Config, Nested } from '@n8n/config';
import { BaseRunnerConfig } from './base-runner-config'; import { BaseRunnerConfig } from './base-runner-config';
import { JsRunnerConfig } from './js-runner-config'; import { JsRunnerConfig } from './js-runner-config';
import { SentryConfig } from './sentry-config';
@Config @Config
export class MainConfig { export class MainConfig {
@ -10,4 +11,7 @@ export class MainConfig {
@Nested @Nested
jsRunnerConfig!: JsRunnerConfig; jsRunnerConfig!: JsRunnerConfig;
@Nested
sentryConfig!: SentryConfig;
} }

View file

@ -0,0 +1,21 @@
import { Config, Env } from '@n8n/config';
@Config
export class SentryConfig {
/** Sentry DSN */
@Env('N8N_SENTRY_DSN')
sentryDsn: string = '';
//#region Metadata about the environment
@Env('N8N_VERSION')
n8nVersion: string = '';
@Env('ENVIRONMENT')
environment: string = '';
@Env('DEPLOYMENT_NAME')
deploymentName: string = '';
//#endregion
}

View file

@ -0,0 +1,93 @@
import { RewriteFrames } from '@sentry/integrations';
import { init, setTag, captureException, close } from '@sentry/node';
import type { ErrorEvent, EventHint } from '@sentry/types';
import * as a from 'assert/strict';
import { createHash } from 'crypto';
import { ApplicationError } from 'n8n-workflow';
import type { SentryConfig } from '@/config/sentry-config';
/**
* Handles error reporting using Sentry
*/
export class ErrorReporter {
private isInitialized = false;
/** Hashes of error stack traces, to deduplicate error reports. */
private readonly seenErrors = new Set<string>();
private get dsn() {
return this.sentryConfig.sentryDsn;
}
constructor(private readonly sentryConfig: SentryConfig) {
a.ok(this.dsn, 'Sentry DSN is required to initialize Sentry');
}
async start() {
if (this.isInitialized) return;
// Collect longer stacktraces
Error.stackTraceLimit = 50;
process.on('uncaughtException', captureException);
const ENABLED_INTEGRATIONS = [
'InboundFilters',
'FunctionToString',
'LinkedErrors',
'OnUnhandledRejection',
'ContextLines',
];
setTag('server_type', 'task_runner');
init({
dsn: this.dsn,
release: this.sentryConfig.n8nVersion,
environment: this.sentryConfig.environment,
enableTracing: false,
serverName: this.sentryConfig.deploymentName,
beforeBreadcrumb: () => null,
beforeSend: async (event, hint) => await this.beforeSend(event, hint),
integrations: (integrations) => [
...integrations.filter(({ name }) => ENABLED_INTEGRATIONS.includes(name)),
new RewriteFrames({ root: process.cwd() }),
],
});
this.isInitialized = true;
}
async stop() {
if (!this.isInitialized) {
return;
}
await close(1000);
}
async beforeSend(event: ErrorEvent, { originalException }: EventHint) {
if (!originalException) return null;
if (originalException instanceof Promise) {
originalException = await originalException.catch((error) => error as Error);
}
if (originalException instanceof ApplicationError) {
const { level, extra, tags } = originalException;
if (level === 'warning') return null;
event.level = level;
if (extra) event.extra = { ...event.extra, ...extra };
if (tags) event.tags = { ...event.tags, ...tags };
}
if (originalException instanceof Error && originalException.stack) {
const eventHash = createHash('sha1').update(originalException.stack).digest('base64');
if (this.seenErrors.has(eventHash)) return null;
this.seenErrors.add(eventHash);
}
return event;
}
}

View file

@ -0,0 +1,38 @@
import { ApplicationError } from 'n8n-workflow';
import { createServer } from 'node:http';
export class HealthcheckServer {
private server = createServer((_, res) => {
res.writeHead(200);
res.end('OK');
});
async start(host: string, port: number) {
return await new Promise<void>((resolve, reject) => {
const portInUseErrorHandler = (error: NodeJS.ErrnoException) => {
if (error.code === 'EADDRINUSE') {
reject(new ApplicationError(`Port ${port} is already in use`));
} else {
reject(error);
}
};
this.server.on('error', portInUseErrorHandler);
this.server.listen(port, host, () => {
this.server.removeListener('error', portInUseErrorHandler);
console.log(`Healthcheck server listening on ${host}, port ${port}`);
resolve();
});
});
}
async stop() {
return await new Promise<void>((resolve, reject) => {
this.server.close((error) => {
if (error) reject(error);
else resolve();
});
});
}
}

View file

@ -36,6 +36,12 @@ describe('JsTaskRunner', () => {
...defaultConfig.jsRunnerConfig, ...defaultConfig.jsRunnerConfig,
...opts, ...opts,
}, },
sentryConfig: {
sentryDsn: '',
deploymentName: '',
environment: '',
n8nVersion: '',
},
}); });
const defaultTaskRunner = createRunnerWithOpts(); const defaultTaskRunner = createRunnerWithOpts();

View file

@ -2,10 +2,14 @@ import { ensureError } from 'n8n-workflow';
import Container from 'typedi'; import Container from 'typedi';
import { MainConfig } from './config/main-config'; import { MainConfig } from './config/main-config';
import type { ErrorReporter } from './error-reporter';
import type { HealthcheckServer } from './healthcheck-server';
import { JsTaskRunner } from './js-task-runner/js-task-runner'; import { JsTaskRunner } from './js-task-runner/js-task-runner';
let healthcheckServer: HealthcheckServer | undefined;
let runner: JsTaskRunner | undefined; let runner: JsTaskRunner | undefined;
let isShuttingDown = false; let isShuttingDown = false;
let errorReporter: ErrorReporter | undefined;
function createSignalHandler(signal: string) { function createSignalHandler(signal: string) {
return async function onSignal() { return async function onSignal() {
@ -20,11 +24,18 @@ function createSignalHandler(signal: string) {
if (runner) { if (runner) {
await runner.stop(); await runner.stop();
runner = undefined; runner = undefined;
void healthcheckServer?.stop();
}
if (errorReporter) {
await errorReporter.stop();
errorReporter = undefined;
} }
} catch (e) { } catch (e) {
const error = ensureError(e); const error = ensureError(e);
console.error('Error stopping task runner', { error }); console.error('Error stopping task runner', { error });
} finally { } finally {
console.log('Task runner stopped');
process.exit(0); process.exit(0);
} }
}; };
@ -33,8 +44,22 @@ function createSignalHandler(signal: string) {
void (async function start() { void (async function start() {
const config = Container.get(MainConfig); const config = Container.get(MainConfig);
if (config.sentryConfig.sentryDsn) {
const { ErrorReporter } = await import('@/error-reporter');
errorReporter = new ErrorReporter(config.sentryConfig);
await errorReporter.start();
}
runner = new JsTaskRunner(config); runner = new JsTaskRunner(config);
const { enabled, host, port } = config.baseRunnerConfig.healthcheckServer;
if (enabled) {
const { HealthcheckServer } = await import('./healthcheck-server');
healthcheckServer = new HealthcheckServer();
await healthcheckServer.start(host, port);
}
process.on('SIGINT', createSignalHandler('SIGINT')); process.on('SIGINT', createSignalHandler('SIGINT'));
process.on('SIGTERM', createSignalHandler('SIGTERM')); process.on('SIGTERM', createSignalHandler('SIGTERM'));
})().catch((e) => { })().catch((e) => {

View file

@ -25,6 +25,7 @@
"start:default": "cd bin && ./n8n", "start:default": "cd bin && ./n8n",
"start:windows": "cd bin && n8n", "start:windows": "cd bin && n8n",
"test": "pnpm test:sqlite", "test": "pnpm test:sqlite",
"test:dev": "N8N_LOG_LEVEL=silent DB_TYPE=sqlite jest --watch",
"test:sqlite": "N8N_LOG_LEVEL=silent DB_TYPE=sqlite jest", "test:sqlite": "N8N_LOG_LEVEL=silent DB_TYPE=sqlite jest",
"test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --no-coverage", "test:postgres": "N8N_LOG_LEVEL=silent DB_TYPE=postgresdb DB_POSTGRESDB_SCHEMA=alt_schema DB_TABLE_PREFIX=test_ jest --no-coverage",
"test:mysql": "N8N_LOG_LEVEL=silent DB_TYPE=mysqldb DB_TABLE_PREFIX=test_ jest --no-coverage", "test:mysql": "N8N_LOG_LEVEL=silent DB_TYPE=mysqldb DB_TABLE_PREFIX=test_ jest --no-coverage",
@ -97,8 +98,8 @@
"@n8n_io/license-sdk": "2.13.1", "@n8n_io/license-sdk": "2.13.1",
"@oclif/core": "4.0.7", "@oclif/core": "4.0.7",
"@rudderstack/rudder-sdk-node": "2.0.9", "@rudderstack/rudder-sdk-node": "2.0.9",
"@sentry/integrations": "7.87.0", "@sentry/integrations": "catalog:",
"@sentry/node": "7.87.0", "@sentry/node": "catalog:",
"aws4": "1.11.0", "aws4": "1.11.0",
"axios": "catalog:", "axios": "catalog:",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
@ -122,7 +123,7 @@
"express-rate-limit": "7.2.0", "express-rate-limit": "7.2.0",
"fast-glob": "catalog:", "fast-glob": "catalog:",
"flat": "5.0.2", "flat": "5.0.2",
"flatted": "3.2.7", "flatted": "catalog:",
"formidable": "3.5.1", "formidable": "3.5.1",
"handlebars": "4.7.8", "handlebars": "4.7.8",
"helmet": "7.1.0", "helmet": "7.1.0",

View file

@ -1,9 +1,11 @@
import { mock } from 'jest-mock-extended'; import { mock } from 'jest-mock-extended';
import type { import type { IWorkflowBase } from 'n8n-workflow';
IExecuteWorkflowInfo, import {
IWorkflowExecuteAdditionalData, type IExecuteWorkflowInfo,
ExecuteWorkflowOptions, type IWorkflowExecuteAdditionalData,
IRun, type ExecuteWorkflowOptions,
type IRun,
type INodeExecutionData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type PCancelable from 'p-cancelable'; import type PCancelable from 'p-cancelable';
import Container from 'typedi'; import Container from 'typedi';
@ -21,43 +23,59 @@ import { WorkflowStatisticsService } from '@/services/workflow-statistics.servic
import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service'; import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service';
import { Telemetry } from '@/telemetry'; import { Telemetry } from '@/telemetry';
import { PermissionChecker } from '@/user-management/permission-checker'; import { PermissionChecker } from '@/user-management/permission-checker';
import { executeWorkflow, getBase } from '@/workflow-execute-additional-data'; import { executeWorkflow, getBase, getRunData } from '@/workflow-execute-additional-data';
import { mockInstance } from '@test/mocking'; import { mockInstance } from '@test/mocking';
const run = mock<IRun>({ const EXECUTION_ID = '123';
data: { resultData: {} }, const LAST_NODE_EXECUTED = 'Last node executed';
finished: true,
mode: 'manual',
startedAt: new Date(),
status: 'new',
});
const cancelablePromise = mock<PCancelable<IRun>>({ const getMockRun = ({ lastNodeOutput }: { lastNodeOutput: Array<INodeExecutionData[] | null> }) =>
then: jest mock<IRun>({
.fn() data: {
.mockImplementation(async (onfulfilled) => await Promise.resolve(run).then(onfulfilled)), resultData: {
catch: jest runData: {
.fn() [LAST_NODE_EXECUTED]: [
.mockImplementation(async (onrejected) => await Promise.resolve(run).catch(onrejected)), {
finally: jest startTime: 100,
.fn() data: {
.mockImplementation(async (onfinally) => await Promise.resolve(run).finally(onfinally)), main: lastNodeOutput,
[Symbol.toStringTag]: 'PCancelable', },
}); },
],
},
lastNodeExecuted: LAST_NODE_EXECUTED,
},
},
finished: true,
mode: 'manual',
startedAt: new Date(),
status: 'new',
});
const getCancelablePromise = async (run: IRun) =>
await mock<PCancelable<IRun>>({
then: jest
.fn()
.mockImplementation(async (onfulfilled) => await Promise.resolve(run).then(onfulfilled)),
catch: jest
.fn()
.mockImplementation(async (onrejected) => await Promise.resolve(run).catch(onrejected)),
finally: jest
.fn()
.mockImplementation(async (onfinally) => await Promise.resolve(run).finally(onfinally)),
[Symbol.toStringTag]: 'PCancelable',
});
const processRunExecutionData = jest.fn();
jest.mock('n8n-core', () => ({ jest.mock('n8n-core', () => ({
__esModule: true, __esModule: true,
...jest.requireActual('n8n-core'), ...jest.requireActual('n8n-core'),
WorkflowExecute: jest.fn().mockImplementation(() => ({ WorkflowExecute: jest.fn().mockImplementation(() => ({
processRunExecutionData: jest.fn().mockReturnValue(cancelablePromise), processRunExecutionData,
})), })),
})); }));
jest.mock('../workflow-helpers', () => ({
...jest.requireActual('../workflow-helpers'),
getDataLastExecutedNodeData: jest.fn().mockReturnValue({ data: { main: [] } }),
}));
describe('WorkflowExecuteAdditionalData', () => { describe('WorkflowExecuteAdditionalData', () => {
const variablesService = mockInstance(VariablesService); const variablesService = mockInstance(VariablesService);
variablesService.getAllCached.mockResolvedValue([]); variablesService.getAllCached.mockResolvedValue([]);
@ -95,17 +113,129 @@ describe('WorkflowExecuteAdditionalData', () => {
expect(eventService.emit).toHaveBeenCalledWith(eventName, payload); expect(eventService.emit).toHaveBeenCalledWith(eventName, payload);
}); });
it('`executeWorkflow` should set subworkflow execution as running', async () => { describe('executeWorkflow', () => {
const executionId = '123'; const runWithData = getMockRun({ lastNodeOutput: [[{ json: { test: 1 } }]] });
workflowRepository.get.mockResolvedValue(mock<WorkflowEntity>({ id: executionId, nodes: [] }));
activeExecutions.add.mockResolvedValue(executionId);
await executeWorkflow( beforeEach(() => {
mock<IExecuteWorkflowInfo>(), workflowRepository.get.mockResolvedValue(
mock<IWorkflowExecuteAdditionalData>(), mock<WorkflowEntity>({ id: EXECUTION_ID, nodes: [] }),
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined }), );
); activeExecutions.add.mockResolvedValue(EXECUTION_ID);
processRunExecutionData.mockReturnValue(getCancelablePromise(runWithData));
});
expect(executionRepository.setRunning).toHaveBeenCalledWith(executionId); it('should execute workflow, return data and execution id', async () => {
const response = await executeWorkflow(
mock<IExecuteWorkflowInfo>(),
mock<IWorkflowExecuteAdditionalData>(),
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined, doNotWaitToFinish: false }),
);
expect(response).toEqual({
data: runWithData.data.resultData.runData[LAST_NODE_EXECUTED][0].data!.main,
executionId: EXECUTION_ID,
});
});
it('should execute workflow, skip waiting', async () => {
const response = await executeWorkflow(
mock<IExecuteWorkflowInfo>(),
mock<IWorkflowExecuteAdditionalData>(),
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined, doNotWaitToFinish: true }),
);
expect(response).toEqual({
data: [null],
executionId: EXECUTION_ID,
});
});
it('should set sub workflow execution as running', async () => {
await executeWorkflow(
mock<IExecuteWorkflowInfo>(),
mock<IWorkflowExecuteAdditionalData>(),
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined }),
);
expect(executionRepository.setRunning).toHaveBeenCalledWith(EXECUTION_ID);
});
});
describe('getRunData', () => {
it('should throw error to add trigger ndoe', async () => {
const workflow = mock<IWorkflowBase>({
id: '1',
name: 'test',
nodes: [],
active: false,
});
await expect(getRunData(workflow)).rejects.toThrowError('Missing node to start execution');
});
const workflow = mock<IWorkflowBase>({
id: '1',
name: 'test',
nodes: [
{
type: 'n8n-nodes-base.executeWorkflowTrigger',
},
],
active: false,
});
it('should return default data', async () => {
expect(await getRunData(workflow)).toEqual({
executionData: {
executionData: {
contextData: {},
metadata: {},
nodeExecutionStack: [
{
data: { main: [[{ json: {} }]] },
metadata: { parentExecution: undefined },
node: workflow.nodes[0],
source: null,
},
],
waitingExecution: {},
waitingExecutionSource: {},
},
resultData: { runData: {} },
startData: {},
},
executionMode: 'integrated',
workflowData: workflow,
});
});
it('should return run data with input data and metadata', async () => {
const data = [{ json: { test: 1 } }];
const parentExecution = {
executionId: '123',
workflowId: '567',
};
expect(await getRunData(workflow, data, parentExecution)).toEqual({
executionData: {
executionData: {
contextData: {},
metadata: {},
nodeExecutionStack: [
{
data: { main: [data] },
metadata: { parentExecution },
node: workflow.nodes[0],
source: null,
},
],
waitingExecution: {},
waitingExecutionSource: {},
},
resultData: { runData: {} },
startData: {},
},
executionMode: 'integrated',
workflowData: workflow,
});
});
}); });
}); });

View file

@ -296,43 +296,6 @@ export const schema = {
}, },
}, },
diagnostics: {
enabled: {
doc: 'Whether diagnostic mode is enabled.',
format: Boolean,
default: true,
env: 'N8N_DIAGNOSTICS_ENABLED',
},
config: {
posthog: {
apiKey: {
doc: 'API key for PostHog',
format: String,
default: 'phc_4URIAm1uYfJO7j8kWSe0J8lc8IqnstRLS7Jx8NcakHo',
env: 'N8N_DIAGNOSTICS_POSTHOG_API_KEY',
},
apiHost: {
doc: 'API host for PostHog',
format: String,
default: 'https://ph.n8n.io',
env: 'N8N_DIAGNOSTICS_POSTHOG_API_HOST',
},
},
frontend: {
doc: 'Diagnostics config for frontend.',
format: String,
default: '1zPn9bgWPzlQc0p8Gj1uiK6DOTn;https://telemetry.n8n.io',
env: 'N8N_DIAGNOSTICS_CONFIG_FRONTEND',
},
backend: {
doc: 'Diagnostics config for backend.',
format: String,
default: '1zPn7YoGC3ZXE9zLeTKLuQCB4F6;https://telemetry.n8n.io',
env: 'N8N_DIAGNOSTICS_CONFIG_BACKEND',
},
},
},
defaultLocale: { defaultLocale: {
doc: 'Default locale for the UI', doc: 'Default locale for the UI',
format: String, format: String,

View file

@ -110,25 +110,12 @@ export const UM_FIX_INSTRUCTION =
'Please fix the database by running ./packages/cli/bin/n8n user-management:reset'; 'Please fix the database by running ./packages/cli/bin/n8n user-management:reset';
/** /**
* Units of time in milliseconds * Convert time from any time unit to any other unit
* @deprecated Please use constants.Time instead.
*/
export const TIME = {
SECOND: 1000,
MINUTE: 60 * 1000,
HOUR: 60 * 60 * 1000,
DAY: 24 * 60 * 60 * 1000,
} as const;
/**
* Convert time from any unit to any other unit
*
* Please amend conversions as necessary.
* Eventually this will superseed `TIME` above
*/ */
export const Time = { export const Time = {
milliseconds: { milliseconds: {
toMinutes: 1 / (60 * 1000), toMinutes: 1 / (60 * 1000),
toSeconds: 1 / 1000,
}, },
seconds: { seconds: {
toMilliseconds: 1000, toMilliseconds: 1000,
@ -150,9 +137,9 @@ export const MIN_PASSWORD_CHAR_LENGTH = 8;
export const MAX_PASSWORD_CHAR_LENGTH = 64; export const MAX_PASSWORD_CHAR_LENGTH = 64;
export const TEST_WEBHOOK_TIMEOUT = 2 * TIME.MINUTE; export const TEST_WEBHOOK_TIMEOUT = 2 * Time.minutes.toMilliseconds;
export const TEST_WEBHOOK_TIMEOUT_BUFFER = 30 * TIME.SECOND; export const TEST_WEBHOOK_TIMEOUT_BUFFER = 30 * Time.seconds.toMilliseconds;
export const GENERIC_OAUTH2_CREDENTIALS_WITH_EDITABLE_SCOPE = [ export const GENERIC_OAUTH2_CREDENTIALS_WITH_EDITABLE_SCOPE = [
'oAuth2Api', 'oAuth2Api',

View file

@ -1,18 +1,10 @@
import { import { Column, Entity, Index, ManyToOne, RelationId } from '@n8n/typeorm';
Column,
Entity,
Generated,
Index,
ManyToOne,
PrimaryColumn,
RelationId,
} from '@n8n/typeorm';
import { Length } from 'class-validator'; import { Length } from 'class-validator';
import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee'; import { AnnotationTagEntity } from '@/databases/entities/annotation-tag-entity.ee';
import { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { WithTimestamps } from './abstract-entity'; import { WithTimestampsAndStringId } from './abstract-entity';
/** /**
* Entity representing a Test Definition * Entity representing a Test Definition
@ -24,11 +16,7 @@ import { WithTimestamps } from './abstract-entity';
@Entity() @Entity()
@Index(['workflow']) @Index(['workflow'])
@Index(['evaluationWorkflow']) @Index(['evaluationWorkflow'])
export class TestDefinition extends WithTimestamps { export class TestDefinition extends WithTimestampsAndStringId {
@Generated()
@PrimaryColumn()
id: number;
@Column({ length: 255 }) @Column({ length: 255 })
@Length(1, 255, { @Length(1, 255, {
message: 'Test definition name must be $constraint1 to $constraint2 characters long.', message: 'Test definition name must be $constraint1 to $constraint2 characters long.',

View file

@ -0,0 +1,18 @@
import type { MigrationContext, IrreversibleMigration } from '@/databases/types';
export class MigrateTestDefinitionKeyToString1731582748663 implements IrreversibleMigration {
async up(context: MigrationContext) {
const { queryRunner, tablePrefix } = context;
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition CHANGE id tmp_id int NOT NULL AUTO_INCREMENT;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition ADD COLUMN id varchar(36) NOT NULL;`,
);
await queryRunner.query(`UPDATE ${tablePrefix}test_definition SET id = CONVERT(tmp_id, CHAR);`);
await queryRunner.query(
`CREATE INDEX \`TMP_idx_${tablePrefix}test_definition_id\` ON ${tablePrefix}test_definition (\`id\`);`,
);
}
}

View file

@ -43,6 +43,7 @@ import { MigrateIntegerKeysToString1690000000001 } from './1690000000001-Migrate
import { SeparateExecutionData1690000000030 } from './1690000000030-SeparateExecutionData'; import { SeparateExecutionData1690000000030 } from './1690000000030-SeparateExecutionData';
import { FixExecutionDataType1690000000031 } from './1690000000031-FixExecutionDataType'; import { FixExecutionDataType1690000000031 } from './1690000000031-FixExecutionDataType';
import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActivatedAtUserSetting'; import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActivatedAtUserSetting';
import { MigrateTestDefinitionKeyToString1731582748663 } from './1731582748663-MigrateTestDefinitionKeyToString';
import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities'; import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities';
import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections'; import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections';
import { RemoveResetPasswordColumns1690000000030 } from '../common/1690000000030-RemoveResetPasswordColumns'; import { RemoveResetPasswordColumns1690000000030 } from '../common/1690000000030-RemoveResetPasswordColumns';
@ -142,4 +143,5 @@ export const mysqlMigrations: Migration[] = [
UpdateProcessedDataValueColumnToText1729607673464, UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556, CreateTestDefinitionTable1730386903556,
AddDescriptionToTestDefinition1731404028106, AddDescriptionToTestDefinition1731404028106,
MigrateTestDefinitionKeyToString1731582748663,
]; ];

View file

@ -0,0 +1,30 @@
import type { MigrationContext, IrreversibleMigration } from '@/databases/types';
export class MigrateTestDefinitionKeyToString1731582748663 implements IrreversibleMigration {
async up(context: MigrationContext) {
const { queryRunner, tablePrefix } = context;
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition RENAME COLUMN id to tmp_id;`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}test_definition ADD COLUMN id varchar(36);`);
await queryRunner.query(`UPDATE ${tablePrefix}test_definition SET id = tmp_id::text;`);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition ALTER COLUMN id SET NOT NULL;`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition ALTER COLUMN tmp_id DROP DEFAULT;`,
);
await queryRunner.query(`DROP SEQUENCE IF EXISTS ${tablePrefix}test_definition_id_seq;`);
await queryRunner.query(
`CREATE UNIQUE INDEX "pk_${tablePrefix}test_definition_id" ON ${tablePrefix}test_definition ("id");`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}test_definition DROP CONSTRAINT IF EXISTS "PK_${tablePrefix}245a0013672c8cdc7727afa9b99";`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}test_definition DROP COLUMN tmp_id;`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}test_definition ADD PRIMARY KEY (id);`);
}
}

View file

@ -43,6 +43,7 @@ import { AddMissingPrimaryKeyOnExecutionData1690787606731 } from './169078760673
import { MigrateToTimestampTz1694091729095 } from './1694091729095-MigrateToTimestampTz'; import { MigrateToTimestampTz1694091729095 } from './1694091729095-MigrateToTimestampTz';
import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActivatedAtUserSetting'; import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActivatedAtUserSetting';
import { FixExecutionMetadataSequence1721377157740 } from './1721377157740-FixExecutionMetadataSequence'; import { FixExecutionMetadataSequence1721377157740 } from './1721377157740-FixExecutionMetadataSequence';
import { MigrateTestDefinitionKeyToString1731582748663 } from './1731582748663-MigrateTestDefinitionKeyToString';
import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities'; import { CreateLdapEntities1674509946020 } from '../common/1674509946020-CreateLdapEntities';
import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections'; import { PurgeInvalidWorkflowConnections1675940580449 } from '../common/1675940580449-PurgeInvalidWorkflowConnections';
import { RemoveResetPasswordColumns1690000000030 } from '../common/1690000000030-RemoveResetPasswordColumns'; import { RemoveResetPasswordColumns1690000000030 } from '../common/1690000000030-RemoveResetPasswordColumns';
@ -142,4 +143,5 @@ export const postgresMigrations: Migration[] = [
UpdateProcessedDataValueColumnToText1729607673464, UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556, CreateTestDefinitionTable1730386903556,
AddDescriptionToTestDefinition1731404028106, AddDescriptionToTestDefinition1731404028106,
MigrateTestDefinitionKeyToString1731582748663,
]; ];

View file

@ -0,0 +1,25 @@
import type { MigrationContext, IrreversibleMigration } from '@/databases/types';
export class MigrateTestDefinitionKeyToString1731582748663 implements IrreversibleMigration {
transaction = false as const;
async up(context: MigrationContext) {
const { queryRunner, tablePrefix } = context;
await queryRunner.query(`
CREATE TABLE "${tablePrefix}TMP_test_definition" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(255) NOT NULL, "workflowId" varchar(36) NOT NULL, "evaluationWorkflowId" varchar(36), "annotationTagId" varchar(16), "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "description" text, CONSTRAINT "FK_${tablePrefix}test_definition_annotation_tag" FOREIGN KEY ("annotationTagId") REFERENCES "annotation_tag_entity" ("id") ON DELETE SET NULL ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}test_definition_evaluation_workflow_entity" FOREIGN KEY ("evaluationWorkflowId") REFERENCES "workflow_entity" ("id") ON DELETE SET NULL ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}test_definition_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION);`);
await queryRunner.query(
`INSERT INTO "${tablePrefix}TMP_test_definition" SELECT * FROM "${tablePrefix}test_definition";`,
);
await queryRunner.query(`DROP TABLE "${tablePrefix}test_definition";`);
await queryRunner.query(
`ALTER TABLE "${tablePrefix}TMP_test_definition" RENAME TO "${tablePrefix}test_definition";`,
);
await queryRunner.query(
`CREATE INDEX "idx_${tablePrefix}test_definition_workflow_id" ON "${tablePrefix}test_definition" ("workflowId");`,
);
await queryRunner.query(
`CREATE INDEX "idx_${tablePrefix}test_definition_evaluation_workflow_id" ON "${tablePrefix}test_definition" ("evaluationWorkflowId");`,
);
}
}

View file

@ -40,6 +40,7 @@ import { AddActivatedAtUserSetting1717498465931 } from './1717498465931-AddActiv
import { AddApiKeysTable1724951148974 } from './1724951148974-AddApiKeysTable'; import { AddApiKeysTable1724951148974 } from './1724951148974-AddApiKeysTable';
import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from './1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping'; import { AddMissingPrimaryKeyOnAnnotationTagMapping1728659839644 } from './1728659839644-AddMissingPrimaryKeyOnAnnotationTagMapping';
import { AddDescriptionToTestDefinition1731404028106 } from './1731404028106-AddDescriptionToTestDefinition'; import { AddDescriptionToTestDefinition1731404028106 } from './1731404028106-AddDescriptionToTestDefinition';
import { MigrateTestDefinitionKeyToString1731582748663 } from './1731582748663-MigrateTestDefinitionKeyToString';
import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames'; import { UniqueWorkflowNames1620821879465 } from '../common/1620821879465-UniqueWorkflowNames';
import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials'; import { UpdateWorkflowCredentials1630330987096 } from '../common/1630330987096-UpdateWorkflowCredentials';
import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds'; import { AddNodeIds1658930531669 } from '../common/1658930531669-AddNodeIds';
@ -136,6 +137,7 @@ const sqliteMigrations: Migration[] = [
UpdateProcessedDataValueColumnToText1729607673464, UpdateProcessedDataValueColumnToText1729607673464,
CreateTestDefinitionTable1730386903556, CreateTestDefinitionTable1730386903556,
AddDescriptionToTestDefinition1731404028106, AddDescriptionToTestDefinition1731404028106,
MigrateTestDefinitionKeyToString1731582748663,
]; ];
export { sqliteMigrations }; export { sqliteMigrations };

View file

@ -37,7 +37,7 @@ export class TestDefinitionRepository extends Repository<TestDefinition> {
return { testDefinitions, count }; return { testDefinitions, count };
} }
async getOne(id: number, accessibleWorkflowIds: string[]) { async getOne(id: string, accessibleWorkflowIds: string[]) {
return await this.findOne({ return await this.findOne({
where: { where: {
id, id,
@ -49,7 +49,7 @@ export class TestDefinitionRepository extends Repository<TestDefinition> {
}); });
} }
async deleteById(id: number, accessibleWorkflowIds: string[]) { async deleteById(id: string, accessibleWorkflowIds: string[]) {
return await this.delete({ return await this.delete({
id, id,
workflow: { workflow: {

View file

@ -30,7 +30,7 @@ export class TestDefinitionService {
workflowId?: string; workflowId?: string;
evaluationWorkflowId?: string; evaluationWorkflowId?: string;
annotationTagId?: string; annotationTagId?: string;
id?: number; id?: string;
}) { }) {
const entity: TestDefinitionLike = {}; const entity: TestDefinitionLike = {};
@ -72,13 +72,13 @@ export class TestDefinitionService {
workflowId?: string; workflowId?: string;
evaluationWorkflowId?: string; evaluationWorkflowId?: string;
annotationTagId?: string; annotationTagId?: string;
id?: number; id?: string;
}) { }) {
const entity = this.toEntityLike(attrs); const entity = this.toEntityLike(attrs);
return this.testDefinitionRepository.create(entity); return this.testDefinitionRepository.create(entity);
} }
async findOne(id: number, accessibleWorkflowIds: string[]) { async findOne(id: string, accessibleWorkflowIds: string[]) {
return await this.testDefinitionRepository.getOne(id, accessibleWorkflowIds); return await this.testDefinitionRepository.getOne(id, accessibleWorkflowIds);
} }
@ -88,7 +88,7 @@ export class TestDefinitionService {
return await this.testDefinitionRepository.save(test); return await this.testDefinitionRepository.save(test);
} }
async update(id: number, attrs: TestDefinitionLike) { async update(id: string, attrs: TestDefinitionLike) {
if (attrs.name) { if (attrs.name) {
const updatedTest = this.toEntity(attrs); const updatedTest = this.toEntity(attrs);
await validateEntity(updatedTest); await validateEntity(updatedTest);
@ -115,7 +115,7 @@ export class TestDefinitionService {
} }
} }
async delete(id: number, accessibleWorkflowIds: string[]) { async delete(id: string, accessibleWorkflowIds: string[]) {
const deleteResult = await this.testDefinitionRepository.deleteById(id, accessibleWorkflowIds); const deleteResult = await this.testDefinitionRepository.deleteById(id, accessibleWorkflowIds);
if (deleteResult.affected === 0) { if (deleteResult.affected === 0) {

View file

@ -2,7 +2,6 @@ import express from 'express';
import assert from 'node:assert'; import assert from 'node:assert';
import { Get, Post, Patch, RestController, Delete } from '@/decorators'; import { Get, Post, Patch, RestController, Delete } from '@/decorators';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error'; import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { NotFoundError } from '@/errors/response-errors/not-found.error'; import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { import {
@ -11,21 +10,12 @@ import {
} from '@/evaluation/test-definition.schema'; } from '@/evaluation/test-definition.schema';
import { listQueryMiddleware } from '@/middlewares'; import { listQueryMiddleware } from '@/middlewares';
import { getSharedWorkflowIds } from '@/public-api/v1/handlers/workflows/workflows.service'; import { getSharedWorkflowIds } from '@/public-api/v1/handlers/workflows/workflows.service';
import { isPositiveInteger } from '@/utils';
import { TestDefinitionService } from './test-definition.service.ee'; import { TestDefinitionService } from './test-definition.service.ee';
import { TestDefinitionsRequest } from './test-definitions.types.ee'; import { TestDefinitionsRequest } from './test-definitions.types.ee';
@RestController('/evaluation/test-definitions') @RestController('/evaluation/test-definitions')
export class TestDefinitionsController { export class TestDefinitionsController {
private validateId(id: string) {
if (!isPositiveInteger(id)) {
throw new BadRequestError('Test ID is not a number');
}
return Number(id);
}
constructor(private readonly testDefinitionService: TestDefinitionService) {} constructor(private readonly testDefinitionService: TestDefinitionService) {}
@Get('/', { middlewares: listQueryMiddleware }) @Get('/', { middlewares: listQueryMiddleware })
@ -40,7 +30,7 @@ export class TestDefinitionsController {
@Get('/:id') @Get('/:id')
async getOne(req: TestDefinitionsRequest.GetOne) { async getOne(req: TestDefinitionsRequest.GetOne) {
const testDefinitionId = this.validateId(req.params.id); const { id: testDefinitionId } = req.params;
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']); const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
@ -82,7 +72,7 @@ export class TestDefinitionsController {
@Delete('/:id') @Delete('/:id')
async delete(req: TestDefinitionsRequest.Delete) { async delete(req: TestDefinitionsRequest.Delete) {
const testDefinitionId = this.validateId(req.params.id); const { id: testDefinitionId } = req.params;
const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']); const userAccessibleWorkflowIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
@ -96,7 +86,7 @@ export class TestDefinitionsController {
@Patch('/:id') @Patch('/:id')
async patch(req: TestDefinitionsRequest.Patch, res: express.Response) { async patch(req: TestDefinitionsRequest.Patch, res: express.Response) {
const testDefinitionId = this.validateId(req.params.id); const { id: testDefinitionId } = req.params;
const bodyParseResult = testDefinitionPatchRequestBodySchema.safeParse(req.body); const bodyParseResult = testDefinitionPatchRequestBodySchema.safeParse(req.body);
if (!bodyParseResult.success) { if (!bodyParseResult.success) {

View file

@ -2,7 +2,6 @@ import type { GlobalConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended'; import { mock } from 'jest-mock-extended';
import type { IWorkflowBase } from 'n8n-workflow'; import type { IWorkflowBase } from 'n8n-workflow';
import config from '@/config';
import { N8N_VERSION } from '@/constants'; import { N8N_VERSION } from '@/constants';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import type { ProjectRelationRepository } from '@/databases/repositories/project-relation.repository'; import type { ProjectRelationRepository } from '@/databases/repositories/project-relation.repository';
@ -66,7 +65,7 @@ describe('TelemetryEventRelay', () => {
}); });
beforeEach(() => { beforeEach(() => {
config.set('diagnostics.enabled', true); globalConfig.diagnostics.enabled = true;
}); });
afterEach(() => { afterEach(() => {
@ -75,7 +74,7 @@ describe('TelemetryEventRelay', () => {
describe('init', () => { describe('init', () => {
it('with diagnostics enabled, should init telemetry and register listeners', async () => { it('with diagnostics enabled, should init telemetry and register listeners', async () => {
config.set('diagnostics.enabled', true); globalConfig.diagnostics.enabled = true;
const telemetryEventRelay = new TelemetryEventRelay( const telemetryEventRelay = new TelemetryEventRelay(
eventService, eventService,
telemetry, telemetry,
@ -96,7 +95,7 @@ describe('TelemetryEventRelay', () => {
}); });
it('with diagnostics disabled, should neither init telemetry nor register listeners', async () => { it('with diagnostics disabled, should neither init telemetry nor register listeners', async () => {
config.set('diagnostics.enabled', false); globalConfig.diagnostics.enabled = false;
const telemetryEventRelay = new TelemetryEventRelay( const telemetryEventRelay = new TelemetryEventRelay(
eventService, eventService,
telemetry, telemetry,

View file

@ -37,7 +37,7 @@ export class TelemetryEventRelay extends EventRelay {
} }
async init() { async init() {
if (!config.getEnv('diagnostics.enabled')) return; if (!this.globalConfig.diagnostics.enabled) return;
await this.telemetry.init(); await this.telemetry.init();

View file

@ -1,5 +1,4 @@
import { import {
deepCopy,
ErrorReporterProxy, ErrorReporterProxy,
type IRunExecutionData, type IRunExecutionData,
type ITaskData, type ITaskData,
@ -87,37 +86,6 @@ test('should update execution when saving progress is enabled', async () => {
expect(reporterSpy).not.toHaveBeenCalled(); expect(reporterSpy).not.toHaveBeenCalled();
}); });
test('should update execution when saving progress is disabled, but waitTill is defined', async () => {
jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({
...commonSettings,
progress: false,
});
const reporterSpy = jest.spyOn(ErrorReporterProxy, 'error');
executionRepository.findSingleExecution.mockResolvedValue({} as IExecutionResponse);
const args = deepCopy(commonArgs);
args[4].waitTill = new Date();
await saveExecutionProgress(...args);
expect(executionRepository.updateExistingExecution).toHaveBeenCalledWith('some-execution-id', {
data: {
executionData: undefined,
resultData: {
lastNodeExecuted: 'My Node',
runData: {
'My Node': [{}],
},
},
startData: {},
},
status: 'running',
});
expect(reporterSpy).not.toHaveBeenCalled();
});
test('should report error on failure', async () => { test('should report error on failure', async () => {
jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({ jest.spyOn(fnModule, 'toSaveSettings').mockReturnValue({
...commonSettings, ...commonSettings,

View file

@ -16,7 +16,7 @@ export async function saveExecutionProgress(
) { ) {
const saveSettings = toSaveSettings(workflowData.settings); const saveSettings = toSaveSettings(workflowData.settings);
if (!saveSettings.progress && !executionData.waitTill) return; if (!saveSettings.progress) return;
const logger = Container.get(Logger); const logger = Container.get(Logger);

View file

@ -18,20 +18,20 @@ export function toSaveSettings(workflowSettings: IWorkflowSettings = {}) {
PROGRESS: config.getEnv('executions.saveExecutionProgress'), PROGRESS: config.getEnv('executions.saveExecutionProgress'),
}; };
const {
saveDataErrorExecution = DEFAULTS.ERROR,
saveDataSuccessExecution = DEFAULTS.SUCCESS,
saveManualExecutions = DEFAULTS.MANUAL,
saveExecutionProgress = DEFAULTS.PROGRESS,
} = workflowSettings;
return { return {
error: workflowSettings.saveDataErrorExecution error: saveDataErrorExecution === 'DEFAULT' ? DEFAULTS.ERROR : saveDataErrorExecution === 'all',
? workflowSettings.saveDataErrorExecution !== 'none' success:
: DEFAULTS.ERROR !== 'none', saveDataSuccessExecution === 'DEFAULT'
success: workflowSettings.saveDataSuccessExecution ? DEFAULTS.SUCCESS
? workflowSettings.saveDataSuccessExecution !== 'none' : saveDataSuccessExecution === 'all',
: DEFAULTS.SUCCESS !== 'none', manual: saveManualExecutions === 'DEFAULT' ? DEFAULTS.MANUAL : saveManualExecutions,
manual: progress: saveExecutionProgress === 'DEFAULT' ? DEFAULTS.PROGRESS : saveExecutionProgress,
workflowSettings === undefined || workflowSettings.saveManualExecutions === 'DEFAULT'
? DEFAULTS.MANUAL
: (workflowSettings.saveManualExecutions ?? DEFAULTS.MANUAL),
progress:
workflowSettings === undefined || workflowSettings.saveExecutionProgress === 'DEFAULT'
? DEFAULTS.PROGRESS
: (workflowSettings.saveExecutionProgress ?? DEFAULTS.PROGRESS),
}; };
} }

View file

@ -1,6 +1,5 @@
import type { Scope } from '@n8n/permissions'; import type { Scope } from '@n8n/permissions';
import type { Application } from 'express'; import type { Application } from 'express';
import type { WorkflowExecute } from 'n8n-core';
import type { import type {
ExecutionError, ExecutionError,
ICredentialDataDecryptedObject, ICredentialDataDecryptedObject,
@ -14,7 +13,6 @@ import type {
ITelemetryTrackProperties, ITelemetryTrackProperties,
IWorkflowBase, IWorkflowBase,
CredentialLoadingDetails, CredentialLoadingDetails,
Workflow,
WorkflowExecuteMode, WorkflowExecuteMode,
ExecutionStatus, ExecutionStatus,
ExecutionSummary, ExecutionSummary,
@ -300,12 +298,6 @@ export interface IWorkflowErrorData {
}; };
} }
export interface IWorkflowExecuteProcess {
startedAt: Date;
workflow: Workflow;
workflowExecute: WorkflowExecute;
}
export interface IWorkflowStatisticsDataLoaded { export interface IWorkflowStatisticsDataLoaded {
dataLoaded: boolean; dataLoaded: boolean;
} }

View file

@ -3,7 +3,6 @@ import { mock } from 'jest-mock-extended';
import { InstanceSettings } from 'n8n-core'; import { InstanceSettings } from 'n8n-core';
import { PostHog } from 'posthog-node'; import { PostHog } from 'posthog-node';
import config from '@/config';
import { PostHogClient } from '@/posthog'; import { PostHogClient } from '@/posthog';
import { mockInstance } from '@test/mocking'; import { mockInstance } from '@test/mocking';
@ -20,12 +19,11 @@ describe('PostHog', () => {
const globalConfig = mock<GlobalConfig>({ logging: { level: 'debug' } }); const globalConfig = mock<GlobalConfig>({ logging: { level: 'debug' } });
beforeAll(() => { beforeAll(() => {
config.set('diagnostics.config.posthog.apiKey', apiKey); globalConfig.diagnostics.posthogConfig = { apiKey, apiHost };
config.set('diagnostics.config.posthog.apiHost', apiHost);
}); });
beforeEach(() => { beforeEach(() => {
config.set('diagnostics.enabled', true); globalConfig.diagnostics.enabled = true;
jest.resetAllMocks(); jest.resetAllMocks();
}); });
@ -37,7 +35,7 @@ describe('PostHog', () => {
}); });
it('does not initialize or track if diagnostics are not enabled', async () => { it('does not initialize or track if diagnostics are not enabled', async () => {
config.set('diagnostics.enabled', false); globalConfig.diagnostics.enabled = false;
const ph = new PostHogClient(instanceSettings, globalConfig); const ph = new PostHogClient(instanceSettings, globalConfig);
await ph.init(); await ph.init();

View file

@ -4,7 +4,6 @@ import type { FeatureFlags, ITelemetryTrackProperties } from 'n8n-workflow';
import type { PostHog } from 'posthog-node'; import type { PostHog } from 'posthog-node';
import { Service } from 'typedi'; import { Service } from 'typedi';
import config from '@/config';
import type { PublicUser } from '@/interfaces'; import type { PublicUser } from '@/interfaces';
@Service() @Service()
@ -17,14 +16,14 @@ export class PostHogClient {
) {} ) {}
async init() { async init() {
const enabled = config.getEnv('diagnostics.enabled'); const { enabled, posthogConfig } = this.globalConfig.diagnostics;
if (!enabled) { if (!enabled) {
return; return;
} }
const { PostHog } = await import('posthog-node'); const { PostHog } = await import('posthog-node');
this.postHog = new PostHog(config.getEnv('diagnostics.config.posthog.apiKey'), { this.postHog = new PostHog(posthogConfig.apiKey, {
host: config.getEnv('diagnostics.config.posthog.apiHost'), host: posthogConfig.apiHost,
}); });
const logLevel = this.globalConfig.logging.level; const logLevel = this.globalConfig.logging.level;

View file

@ -1,8 +1,12 @@
import type { TaskRunnersConfig } from '@n8n/config';
import type { RunnerMessage, TaskResultData } from '@n8n/task-runner'; import type { RunnerMessage, TaskResultData } from '@n8n/task-runner';
import { mock } from 'jest-mock-extended'; import { mock } from 'jest-mock-extended';
import type { INodeTypeBaseDescription } from 'n8n-workflow'; import { ApplicationError, type INodeTypeBaseDescription } from 'n8n-workflow';
import { Time } from '@/constants';
import { TaskRejectError } from '../errors'; import { TaskRejectError } from '../errors';
import type { RunnerLifecycleEvents } from '../runner-lifecycle-events';
import { TaskBroker } from '../task-broker.service'; import { TaskBroker } from '../task-broker.service';
import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service'; import type { TaskOffer, TaskRequest, TaskRunner } from '../task-broker.service';
@ -12,7 +16,7 @@ describe('TaskBroker', () => {
let taskBroker: TaskBroker; let taskBroker: TaskBroker;
beforeEach(() => { beforeEach(() => {
taskBroker = new TaskBroker(mock()); taskBroker = new TaskBroker(mock(), mock(), mock());
jest.restoreAllMocks(); jest.restoreAllMocks();
}); });
@ -618,4 +622,131 @@ describe('TaskBroker', () => {
}); });
}); });
}); });
describe('task timeouts', () => {
let taskBroker: TaskBroker;
let config: TaskRunnersConfig;
let runnerLifecycleEvents = mock<RunnerLifecycleEvents>();
beforeAll(() => {
jest.useFakeTimers();
config = mock<TaskRunnersConfig>({ taskTimeout: 30 });
taskBroker = new TaskBroker(mock(), config, runnerLifecycleEvents);
});
afterAll(() => {
jest.useRealTimers();
});
it('on sending task, we should set up task timeout', async () => {
jest.spyOn(global, 'setTimeout');
const taskId = 'task1';
const runnerId = 'runner1';
const runner = mock<TaskRunner>({ id: runnerId });
const runnerMessageCallback = jest.fn();
taskBroker.registerRunner(runner, runnerMessageCallback);
taskBroker.setTasks({
[taskId]: { id: taskId, runnerId, requesterId: 'requester1', taskType: 'test' },
});
await taskBroker.sendTaskSettings(taskId, {});
expect(setTimeout).toHaveBeenCalledWith(
expect.any(Function),
config.taskTimeout * Time.seconds.toMilliseconds,
);
});
it('on task completion, we should clear timeout', async () => {
jest.spyOn(global, 'clearTimeout');
const taskId = 'task1';
const runnerId = 'runner1';
const requesterId = 'requester1';
const requesterCallback = jest.fn();
taskBroker.registerRequester(requesterId, requesterCallback);
taskBroker.setTasks({
[taskId]: {
id: taskId,
runnerId,
requesterId,
taskType: 'test',
timeout: setTimeout(() => {}, config.taskTimeout * Time.seconds.toMilliseconds),
},
});
await taskBroker.taskDoneHandler(taskId, { result: [] });
expect(clearTimeout).toHaveBeenCalled();
expect(taskBroker.getTasks().get(taskId)).toBeUndefined();
});
it('on task error, we should clear timeout', async () => {
jest.spyOn(global, 'clearTimeout');
const taskId = 'task1';
const runnerId = 'runner1';
const requesterId = 'requester1';
const requesterCallback = jest.fn();
taskBroker.registerRequester(requesterId, requesterCallback);
taskBroker.setTasks({
[taskId]: {
id: taskId,
runnerId,
requesterId,
taskType: 'test',
timeout: setTimeout(() => {}, config.taskTimeout * Time.seconds.toMilliseconds),
},
});
await taskBroker.taskErrorHandler(taskId, new Error('Test error'));
expect(clearTimeout).toHaveBeenCalled();
expect(taskBroker.getTasks().get(taskId)).toBeUndefined();
});
it('on timeout, we should emit `runner:timed-out-during-task` event and send error to requester', async () => {
jest.spyOn(global, 'clearTimeout');
const taskId = 'task1';
const runnerId = 'runner1';
const requesterId = 'requester1';
const runner = mock<TaskRunner>({ id: runnerId });
const runnerCallback = jest.fn();
const requesterCallback = jest.fn();
taskBroker.registerRunner(runner, runnerCallback);
taskBroker.registerRequester(requesterId, requesterCallback);
taskBroker.setTasks({
[taskId]: { id: taskId, runnerId, requesterId, taskType: 'test' },
});
await taskBroker.sendTaskSettings(taskId, {});
jest.runAllTimers();
await Promise.resolve();
expect(runnerLifecycleEvents.emit).toHaveBeenCalledWith('runner:timed-out-during-task');
await Promise.resolve();
expect(clearTimeout).toHaveBeenCalled();
expect(requesterCallback).toHaveBeenCalledWith({
type: 'broker:taskerror',
taskId,
error: new ApplicationError(`Task execution timed out after ${config.taskTimeout} seconds`),
});
await Promise.resolve();
expect(taskBroker.getTasks().get(taskId)).toBeUndefined();
});
});
}); });

View file

@ -7,6 +7,8 @@ import type { TaskRunnerAuthService } from '@/runners/auth/task-runner-auth.serv
import { TaskRunnerProcess } from '@/runners/task-runner-process'; import { TaskRunnerProcess } from '@/runners/task-runner-process';
import { mockInstance } from '@test/mocking'; import { mockInstance } from '@test/mocking';
import type { RunnerLifecycleEvents } from '../runner-lifecycle-events';
const spawnMock = jest.fn(() => const spawnMock = jest.fn(() =>
mock<ChildProcess>({ mock<ChildProcess>({
stdout: { stdout: {
@ -25,7 +27,7 @@ describe('TaskRunnerProcess', () => {
runnerConfig.enabled = true; runnerConfig.enabled = true;
runnerConfig.mode = 'internal_childprocess'; runnerConfig.mode = 'internal_childprocess';
const authService = mock<TaskRunnerAuthService>(); const authService = mock<TaskRunnerAuthService>();
let taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService); let taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService, mock());
afterEach(async () => { afterEach(async () => {
spawnMock.mockClear(); spawnMock.mockClear();
@ -35,34 +37,59 @@ describe('TaskRunnerProcess', () => {
it('should throw if runner mode is external', () => { it('should throw if runner mode is external', () => {
runnerConfig.mode = 'external'; runnerConfig.mode = 'external';
expect(() => new TaskRunnerProcess(logger, runnerConfig, authService)).toThrow(); expect(() => new TaskRunnerProcess(logger, runnerConfig, authService, mock())).toThrow();
runnerConfig.mode = 'internal_childprocess'; runnerConfig.mode = 'internal_childprocess';
}); });
it('should register listener for `runner:failed-heartbeat-check` event', () => {
const runnerLifecycleEvents = mock<RunnerLifecycleEvents>();
new TaskRunnerProcess(logger, runnerConfig, authService, runnerLifecycleEvents);
expect(runnerLifecycleEvents.on).toHaveBeenCalledWith(
'runner:failed-heartbeat-check',
expect.any(Function),
);
});
it('should register listener for `runner:timed-out-during-task` event', () => {
const runnerLifecycleEvents = mock<RunnerLifecycleEvents>();
new TaskRunnerProcess(logger, runnerConfig, authService, runnerLifecycleEvents);
expect(runnerLifecycleEvents.on).toHaveBeenCalledWith(
'runner:timed-out-during-task',
expect.any(Function),
);
});
}); });
describe('start', () => { describe('start', () => {
beforeEach(() => { beforeEach(() => {
taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService); taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService, mock());
}); });
test.each(['PATH', 'NODE_FUNCTION_ALLOW_BUILTIN', 'NODE_FUNCTION_ALLOW_EXTERNAL'])( test.each([
'should propagate %s from env as is', 'PATH',
async (envVar) => { 'NODE_FUNCTION_ALLOW_BUILTIN',
jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken'); 'NODE_FUNCTION_ALLOW_EXTERNAL',
process.env[envVar] = 'custom value'; 'N8N_SENTRY_DSN',
'N8N_VERSION',
'ENVIRONMENT',
'DEPLOYMENT_NAME',
])('should propagate %s from env as is', async (envVar) => {
jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken');
process.env[envVar] = 'custom value';
await taskRunnerProcess.start(); await taskRunnerProcess.start();
// @ts-expect-error The type is not correct // @ts-expect-error The type is not correct
const options = spawnMock.mock.calls[0][2] as SpawnOptions; const options = spawnMock.mock.calls[0][2] as SpawnOptions;
expect(options.env).toEqual( expect(options.env).toEqual(
expect.objectContaining({ expect.objectContaining({
[envVar]: 'custom value', [envVar]: 'custom value',
}), }),
); );
}, });
);
it('should pass NODE_OPTIONS env if maxOldSpaceSize is configured', async () => { it('should pass NODE_OPTIONS env if maxOldSpaceSize is configured', async () => {
jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken'); jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken');

View file

@ -0,0 +1,45 @@
import type { TaskRunnersConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended';
import { Time } from '@/constants';
import { TaskRunnerWsServer } from '@/runners/runner-ws-server';
describe('TaskRunnerWsServer', () => {
describe('heartbeat timer', () => {
it('should set up heartbeat timer on server start', async () => {
const setIntervalSpy = jest.spyOn(global, 'setInterval');
const server = new TaskRunnerWsServer(
mock(),
mock(),
mock(),
mock<TaskRunnersConfig>({ path: '/runners', heartbeatInterval: 30 }),
mock(),
);
expect(setIntervalSpy).toHaveBeenCalledWith(
expect.any(Function),
30 * Time.seconds.toMilliseconds,
);
await server.shutdown();
});
it('should clear heartbeat timer on server stop', async () => {
jest.spyOn(global, 'setInterval');
const clearIntervalSpy = jest.spyOn(global, 'clearInterval');
const server = new TaskRunnerWsServer(
mock(),
mock(),
mock(),
mock<TaskRunnersConfig>({ path: '/runners', heartbeatInterval: 30 }),
mock(),
);
await server.shutdown();
expect(clearIntervalSpy).toHaveBeenCalled();
});
});
});

View file

@ -1,8 +1,10 @@
import { Service } from 'typedi'; import { Service } from 'typedi';
import config from '@/config';
import { TaskRunnerDisconnectedError } from './errors/task-runner-disconnected-error'; import { TaskRunnerDisconnectedError } from './errors/task-runner-disconnected-error';
import type { DisconnectAnalyzer } from './runner-types'; import { TaskRunnerFailedHeartbeatError } from './errors/task-runner-failed-heartbeat.error';
import type { TaskRunner } from './task-broker.service'; import type { DisconnectAnalyzer, DisconnectErrorOptions } from './runner-types';
/** /**
* Analyzes the disconnect reason of a task runner to provide a more * Analyzes the disconnect reason of a task runner to provide a more
@ -10,7 +12,16 @@ import type { TaskRunner } from './task-broker.service';
*/ */
@Service() @Service()
export class DefaultTaskRunnerDisconnectAnalyzer implements DisconnectAnalyzer { export class DefaultTaskRunnerDisconnectAnalyzer implements DisconnectAnalyzer {
async determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error> { async toDisconnectError(opts: DisconnectErrorOptions): Promise<Error> {
return new TaskRunnerDisconnectedError(runnerId); const { reason, heartbeatInterval } = opts;
if (reason === 'failed-heartbeat-check' && heartbeatInterval) {
return new TaskRunnerFailedHeartbeatError(
heartbeatInterval,
config.get('deployment.type') !== 'cloud',
);
}
return new TaskRunnerDisconnectedError(opts.runnerId ?? 'Unknown runner ID');
} }
} }

View file

@ -0,0 +1,32 @@
import { ApplicationError } from 'n8n-workflow';
export class TaskRunnerFailedHeartbeatError extends ApplicationError {
description: string;
constructor(heartbeatInterval: number, isSelfHosted: boolean) {
super('Task execution aborted because runner became unresponsive');
const subtitle =
'The task runner failed to respond as expected, so it was considered unresponsive, and the task was aborted. You can try the following:';
const fixes = {
optimizeScript:
'Optimize your script to prevent CPU-intensive operations, e.g. by breaking them down into smaller chunks or batch processing.',
ensureTermination:
'Ensure that all paths in your script are able to terminate, i.e. no infinite loops.',
increaseInterval: `If your task can reasonably keep the task runner busy for more than ${heartbeatInterval} ${heartbeatInterval === 1 ? 'second' : 'seconds'}, increase the heartbeat interval using the N8N_RUNNERS_HEARTBEAT_INTERVAL environment variable.`,
};
const suggestions = [fixes.optimizeScript, fixes.ensureTermination];
if (isSelfHosted) suggestions.push(fixes.increaseInterval);
const suggestionsText = suggestions
.map((suggestion, index) => `${index + 1}. ${suggestion}`)
.join('<br/>');
const description = `${subtitle}<br/><br/>${suggestionsText}`;
this.description = description;
}
}

View file

@ -0,0 +1,34 @@
import { ApplicationError } from 'n8n-workflow';
export class TaskRunnerTimeoutError extends ApplicationError {
description: string;
constructor(taskTimeout: number, isSelfHosted: boolean) {
super(
`Task execution timed out after ${taskTimeout} ${taskTimeout === 1 ? 'second' : 'seconds'}`,
);
const subtitle =
'The task runner was taking too long on this task, so it was suspected of being unresponsive and restarted, and the task was aborted. You can try the following:';
const fixes = {
optimizeScript:
'Optimize your script to prevent long-running tasks, e.g. by processing data in smaller batches.',
ensureTermination:
'Ensure that all paths in your script are able to terminate, i.e. no infinite loops.',
increaseTimeout: `If your task can reasonably take more than ${taskTimeout} ${taskTimeout === 1 ? 'second' : 'seconds'}, increase the timeout using the N8N_RUNNERS_TASK_TIMEOUT environment variable.`,
};
const suggestions = [fixes.optimizeScript, fixes.ensureTermination];
if (isSelfHosted) suggestions.push(fixes.increaseTimeout);
const suggestionsText = suggestions
.map((suggestion, index) => `${index + 1}. ${suggestion}`)
.join('<br/>');
const description = `${subtitle}<br/><br/>${suggestionsText}`;
this.description = description;
}
}

View file

@ -5,8 +5,8 @@ import config from '@/config';
import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer'; import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer';
import { TaskRunnerOomError } from './errors/task-runner-oom-error'; import { TaskRunnerOomError } from './errors/task-runner-oom-error';
import type { DisconnectErrorOptions } from './runner-types';
import { SlidingWindowSignal } from './sliding-window-signal'; import { SlidingWindowSignal } from './sliding-window-signal';
import type { TaskRunner } from './task-broker.service';
import type { ExitReason, TaskRunnerProcessEventMap } from './task-runner-process'; import type { ExitReason, TaskRunnerProcessEventMap } from './task-runner-process';
import { TaskRunnerProcess } from './task-runner-process'; import { TaskRunnerProcess } from './task-runner-process';
@ -38,13 +38,13 @@ export class InternalTaskRunnerDisconnectAnalyzer extends DefaultTaskRunnerDisco
}); });
} }
async determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error> { async toDisconnectError(opts: DisconnectErrorOptions): Promise<Error> {
const exitCode = await this.awaitExitSignal(); const exitCode = await this.awaitExitSignal();
if (exitCode === 'oom') { if (exitCode === 'oom') {
return new TaskRunnerOomError(runnerId, this.isCloudDeployment); return new TaskRunnerOomError(opts.runnerId ?? 'Unknown runner ID', this.isCloudDeployment);
} }
return await super.determineDisconnectReason(runnerId); return await super.toDisconnectError(opts);
} }
private async awaitExitSignal(): Promise<ExitReason> { private async awaitExitSignal(): Promise<ExitReason> {

View file

@ -0,0 +1,11 @@
import { Service } from 'typedi';
import { TypedEmitter } from '@/typed-emitter';
type RunnerLifecycleEventMap = {
'runner:failed-heartbeat-check': never;
'runner:timed-out-during-task': never;
};
@Service()
export class RunnerLifecycleEvents extends TypedEmitter<RunnerLifecycleEventMap> {}

View file

@ -6,7 +6,7 @@ import type { TaskRunner } from './task-broker.service';
import type { AuthlessRequest } from '../requests'; import type { AuthlessRequest } from '../requests';
export interface DisconnectAnalyzer { export interface DisconnectAnalyzer {
determineDisconnectReason(runnerId: TaskRunner['id']): Promise<Error>; toDisconnectError(opts: DisconnectErrorOptions): Promise<Error>;
} }
export type DataRequestType = 'input' | 'node' | 'all'; export type DataRequestType = 'input' | 'node' | 'all';
@ -22,3 +22,11 @@ export interface TaskRunnerServerInitRequest
} }
export type TaskRunnerServerInitResponse = Response & { req: TaskRunnerServerInitRequest }; export type TaskRunnerServerInitResponse = Response & { req: TaskRunnerServerInitRequest };
export type DisconnectReason = 'shutting-down' | 'failed-heartbeat-check' | 'unknown';
export type DisconnectErrorOptions = {
runnerId?: TaskRunner['id'];
reason?: DisconnectReason;
heartbeatInterval?: number;
};

View file

@ -1,12 +1,17 @@
import { TaskRunnersConfig } from '@n8n/config';
import type { BrokerMessage, RunnerMessage } from '@n8n/task-runner'; import type { BrokerMessage, RunnerMessage } from '@n8n/task-runner';
import { ApplicationError } from 'n8n-workflow';
import { Service } from 'typedi'; import { Service } from 'typedi';
import type WebSocket from 'ws'; import type WebSocket from 'ws';
import { Time } from '@/constants';
import { Logger } from '@/logging/logger.service'; import { Logger } from '@/logging/logger.service';
import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer'; import { DefaultTaskRunnerDisconnectAnalyzer } from './default-task-runner-disconnect-analyzer';
import { RunnerLifecycleEvents } from './runner-lifecycle-events';
import type { import type {
DisconnectAnalyzer, DisconnectAnalyzer,
DisconnectReason,
TaskRunnerServerInitRequest, TaskRunnerServerInitRequest,
TaskRunnerServerInitResponse, TaskRunnerServerInitResponse,
} from './runner-types'; } from './runner-types';
@ -20,11 +25,50 @@ function heartbeat(this: WebSocket) {
export class TaskRunnerWsServer { export class TaskRunnerWsServer {
runnerConnections: Map<TaskRunner['id'], WebSocket> = new Map(); runnerConnections: Map<TaskRunner['id'], WebSocket> = new Map();
private heartbeatTimer: NodeJS.Timer | undefined;
constructor( constructor(
private readonly logger: Logger, private readonly logger: Logger,
private readonly taskBroker: TaskBroker, private readonly taskBroker: TaskBroker,
private disconnectAnalyzer: DefaultTaskRunnerDisconnectAnalyzer, private disconnectAnalyzer: DefaultTaskRunnerDisconnectAnalyzer,
) {} private readonly taskTunnersConfig: TaskRunnersConfig,
private readonly runnerLifecycleEvents: RunnerLifecycleEvents,
) {
this.startHeartbeatChecks();
}
private startHeartbeatChecks() {
const { heartbeatInterval } = this.taskTunnersConfig;
if (heartbeatInterval <= 0) {
throw new ApplicationError('Heartbeat interval must be greater than 0');
}
this.heartbeatTimer = setInterval(() => {
for (const [runnerId, connection] of this.runnerConnections.entries()) {
if (!connection.isAlive) {
void this.removeConnection(runnerId, 'failed-heartbeat-check');
this.runnerLifecycleEvents.emit('runner:failed-heartbeat-check');
return;
}
connection.isAlive = false;
connection.ping();
}
}, heartbeatInterval * Time.seconds.toMilliseconds);
}
async shutdown() {
if (this.heartbeatTimer) {
clearInterval(this.heartbeatTimer);
this.heartbeatTimer = undefined;
}
await Promise.all(
Array.from(this.runnerConnections.keys()).map(
async (id) => await this.removeConnection(id, 'shutting-down'),
),
);
}
setDisconnectAnalyzer(disconnectAnalyzer: DisconnectAnalyzer) { setDisconnectAnalyzer(disconnectAnalyzer: DisconnectAnalyzer) {
this.disconnectAnalyzer = disconnectAnalyzer; this.disconnectAnalyzer = disconnectAnalyzer;
@ -97,11 +141,15 @@ export class TaskRunnerWsServer {
); );
} }
async removeConnection(id: TaskRunner['id']) { async removeConnection(id: TaskRunner['id'], reason: DisconnectReason = 'unknown') {
const connection = this.runnerConnections.get(id); const connection = this.runnerConnections.get(id);
if (connection) { if (connection) {
const disconnectReason = await this.disconnectAnalyzer.determineDisconnectReason(id); const disconnectError = await this.disconnectAnalyzer.toDisconnectError({
this.taskBroker.deregisterRunner(id, disconnectReason); runnerId: id,
reason,
heartbeatInterval: this.taskTunnersConfig.heartbeatInterval,
});
this.taskBroker.deregisterRunner(id, disconnectError);
connection.close(); connection.close();
this.runnerConnections.delete(id); this.runnerConnections.delete(id);
} }

View file

@ -1,3 +1,4 @@
import { TaskRunnersConfig } from '@n8n/config';
import type { import type {
BrokerMessage, BrokerMessage,
RequesterMessage, RequesterMessage,
@ -8,9 +9,13 @@ import { ApplicationError } from 'n8n-workflow';
import { nanoid } from 'nanoid'; import { nanoid } from 'nanoid';
import { Service } from 'typedi'; import { Service } from 'typedi';
import config from '@/config';
import { Time } from '@/constants';
import { Logger } from '@/logging/logger.service'; import { Logger } from '@/logging/logger.service';
import { TaskRejectError } from './errors'; import { TaskRejectError } from './errors';
import { TaskRunnerTimeoutError } from './errors/task-runner-timeout.error';
import { RunnerLifecycleEvents } from './runner-lifecycle-events';
export interface TaskRunner { export interface TaskRunner {
id: string; id: string;
@ -24,6 +29,7 @@ export interface Task {
runnerId: TaskRunner['id']; runnerId: TaskRunner['id'];
requesterId: string; requesterId: string;
taskType: string; taskType: string;
timeout?: NodeJS.Timeout;
} }
export interface TaskOffer { export interface TaskOffer {
@ -78,7 +84,15 @@ export class TaskBroker {
private pendingTaskRequests: TaskRequest[] = []; private pendingTaskRequests: TaskRequest[] = [];
constructor(private readonly logger: Logger) {} constructor(
private readonly logger: Logger,
private readonly taskRunnersConfig: TaskRunnersConfig,
private readonly runnerLifecycleEvents: RunnerLifecycleEvents,
) {
if (this.taskRunnersConfig.taskTimeout <= 0) {
throw new ApplicationError('Task timeout must be greater than 0');
}
}
expireTasks() { expireTasks() {
const now = process.hrtime.bigint(); const now = process.hrtime.bigint();
@ -408,6 +422,14 @@ export class TaskBroker {
async sendTaskSettings(taskId: Task['id'], settings: unknown) { async sendTaskSettings(taskId: Task['id'], settings: unknown) {
const runner = await this.getRunnerOrFailTask(taskId); const runner = await this.getRunnerOrFailTask(taskId);
const task = this.tasks.get(taskId);
if (!task) return;
task.timeout = setTimeout(async () => {
await this.handleTaskTimeout(taskId);
}, this.taskRunnersConfig.taskTimeout * Time.seconds.toMilliseconds);
await this.messageRunner(runner.id, { await this.messageRunner(runner.id, {
type: 'broker:tasksettings', type: 'broker:tasksettings',
taskId, taskId,
@ -415,11 +437,27 @@ export class TaskBroker {
}); });
} }
private async handleTaskTimeout(taskId: Task['id']) {
const task = this.tasks.get(taskId);
if (!task) return;
this.runnerLifecycleEvents.emit('runner:timed-out-during-task');
await this.taskErrorHandler(
taskId,
new TaskRunnerTimeoutError(
this.taskRunnersConfig.taskTimeout,
config.getEnv('deployment.type') !== 'cloud',
),
);
}
async taskDoneHandler(taskId: Task['id'], data: TaskResultData) { async taskDoneHandler(taskId: Task['id'], data: TaskResultData) {
const task = this.tasks.get(taskId); const task = this.tasks.get(taskId);
if (!task) { if (!task) return;
return;
} clearTimeout(task.timeout);
await this.requesters.get(task.requesterId)?.({ await this.requesters.get(task.requesterId)?.({
type: 'broker:taskdone', type: 'broker:taskdone',
taskId: task.id, taskId: task.id,
@ -430,9 +468,10 @@ export class TaskBroker {
async taskErrorHandler(taskId: Task['id'], error: unknown) { async taskErrorHandler(taskId: Task['id'], error: unknown) {
const task = this.tasks.get(taskId); const task = this.tasks.get(taskId);
if (!task) { if (!task) return;
return;
} clearTimeout(task.timeout);
await this.requesters.get(task.requesterId)?.({ await this.requesters.get(task.requesterId)?.({
type: 'broker:taskerror', type: 'broker:taskerror',
taskId: task.id, taskId: task.id,

View file

@ -10,6 +10,7 @@ import { Logger } from '@/logging/logger.service';
import { TaskRunnerAuthService } from './auth/task-runner-auth.service'; import { TaskRunnerAuthService } from './auth/task-runner-auth.service';
import { forwardToLogger } from './forward-to-logger'; import { forwardToLogger } from './forward-to-logger';
import { NodeProcessOomDetector } from './node-process-oom-detector'; import { NodeProcessOomDetector } from './node-process-oom-detector';
import { RunnerLifecycleEvents } from './runner-lifecycle-events';
import { TypedEmitter } from '../typed-emitter'; import { TypedEmitter } from '../typed-emitter';
type ChildProcess = ReturnType<typeof spawn>; type ChildProcess = ReturnType<typeof spawn>;
@ -59,12 +60,18 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
'PATH', 'PATH',
'NODE_FUNCTION_ALLOW_BUILTIN', 'NODE_FUNCTION_ALLOW_BUILTIN',
'NODE_FUNCTION_ALLOW_EXTERNAL', 'NODE_FUNCTION_ALLOW_EXTERNAL',
'N8N_SENTRY_DSN',
// Metadata about the environment
'N8N_VERSION',
'ENVIRONMENT',
'DEPLOYMENT_NAME',
] as const; ] as const;
constructor( constructor(
logger: Logger, logger: Logger,
private readonly runnerConfig: TaskRunnersConfig, private readonly runnerConfig: TaskRunnersConfig,
private readonly authService: TaskRunnerAuthService, private readonly authService: TaskRunnerAuthService,
private readonly runnerLifecycleEvents: RunnerLifecycleEvents,
) { ) {
super(); super();
@ -74,6 +81,16 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
); );
this.logger = logger.scoped('task-runner'); this.logger = logger.scoped('task-runner');
this.runnerLifecycleEvents.on('runner:failed-heartbeat-check', () => {
this.logger.warn('Task runner failed heartbeat check, restarting...');
void this.forceRestart();
});
this.runnerLifecycleEvents.on('runner:timed-out-during-task', () => {
this.logger.warn('Task runner timed out during task, restarting...');
void this.forceRestart();
});
} }
async start() { async start() {
@ -111,9 +128,7 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
@OnShutdown() @OnShutdown()
async stop() { async stop() {
if (!this.process) { if (!this.process) return;
return;
}
this.isShuttingDown = true; this.isShuttingDown = true;
@ -128,10 +143,22 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
this.isShuttingDown = false; this.isShuttingDown = false;
} }
killNode() { /** Force-restart a runner suspected of being unresponsive. */
if (!this.process) { async forceRestart() {
return; if (!this.process) return;
if (this.useLauncher) {
await this.killLauncher(); // @TODO: Implement SIGKILL in launcher
} else {
this.process.kill('SIGKILL');
} }
await this._runPromise;
}
killNode() {
if (!this.process) return;
this.process.kill(); this.process.kill();
} }
@ -168,7 +195,6 @@ export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
this.emit('exit', { reason: this.oomDetector?.didProcessOom ? 'oom' : 'unknown' }); this.emit('exit', { reason: this.oomDetector?.didProcessOom ? 'oom' : 'unknown' });
resolveFn(); resolveFn();
// If we are not shutting down, restart the process
if (!this.isShuttingDown) { if (!this.isShuttingDown) {
setImmediate(async () => await this.start()); setImmediate(async () => await this.start());
} }

View file

@ -44,7 +44,7 @@ export class TaskRunnerServer {
private readonly logger: Logger, private readonly logger: Logger,
private readonly globalConfig: GlobalConfig, private readonly globalConfig: GlobalConfig,
private readonly taskRunnerAuthController: TaskRunnerAuthController, private readonly taskRunnerAuthController: TaskRunnerAuthController,
private readonly taskRunnerService: TaskRunnerWsServer, private readonly taskRunnerWsServer: TaskRunnerWsServer,
) { ) {
this.app = express(); this.app = express();
this.app.disable('x-powered-by'); this.app.disable('x-powered-by');
@ -148,7 +148,7 @@ export class TaskRunnerServer {
// eslint-disable-next-line @typescript-eslint/unbound-method // eslint-disable-next-line @typescript-eslint/unbound-method
this.taskRunnerAuthController.authMiddleware, this.taskRunnerAuthController.authMiddleware,
(req: TaskRunnerServerInitRequest, res: TaskRunnerServerInitResponse) => (req: TaskRunnerServerInitRequest, res: TaskRunnerServerInitResponse) =>
this.taskRunnerService.handleRequest(req, res), this.taskRunnerWsServer.handleRequest(req, res),
); );
const authEndpoint = `${this.getEndpointBasePath()}/auth`; const authEndpoint = `${this.getEndpointBasePath()}/auth`;

View file

@ -3,7 +3,7 @@ import { InstanceSettings } from 'n8n-core';
import { Service } from 'typedi'; import { Service } from 'typedi';
import config from '@/config'; import config from '@/config';
import { TIME } from '@/constants'; import { Time } from '@/constants';
import { Logger } from '@/logging/logger.service'; import { Logger } from '@/logging/logger.service';
import { Publisher } from '@/scaling/pubsub/publisher.service'; import { Publisher } from '@/scaling/pubsub/publisher.service';
import { RedisClientService } from '@/services/redis-client.service'; import { RedisClientService } from '@/services/redis-client.service';
@ -54,7 +54,7 @@ export class MultiMainSetup extends TypedEmitter<MultiMainEvents> {
this.leaderCheckInterval = setInterval(async () => { this.leaderCheckInterval = setInterval(async () => {
await this.checkLeader(); await this.checkLeader();
}, this.globalConfig.multiMainSetup.interval * TIME.SECOND); }, this.globalConfig.multiMainSetup.interval * Time.seconds.toMilliseconds);
} }
async shutdown() { async shutdown() {

View file

@ -103,7 +103,7 @@ export class InstanceRiskReporter implements RiskReporter {
}; };
settings.telemetry = { settings.telemetry = {
diagnosticsEnabled: config.getEnv('diagnostics.enabled'), diagnosticsEnabled: this.globalConfig.diagnostics.enabled,
}; };
return settings; return settings;

View file

@ -39,7 +39,7 @@ describe('WorkflowStatisticsService', () => {
}); });
Object.assign(entityManager, { connection: dataSource }); Object.assign(entityManager, { connection: dataSource });
config.set('diagnostics.enabled', true); globalConfig.diagnostics.enabled = true;
config.set('deployment.type', 'n8n-testing'); config.set('deployment.type', 'n8n-testing');
mocked(ownershipService.getWorkflowProjectCached).mockResolvedValue(fakeProject); mocked(ownershipService.getWorkflowProjectCached).mockResolvedValue(fakeProject);
mocked(ownershipService.getPersonalProjectOwnerCached).mockResolvedValue(fakeUser); mocked(ownershipService.getPersonalProjectOwnerCached).mockResolvedValue(fakeUser);

View file

@ -4,7 +4,7 @@ import { ApplicationError, jsonStringify } from 'n8n-workflow';
import Container, { Service } from 'typedi'; import Container, { Service } from 'typedi';
import config from '@/config'; import config from '@/config';
import { TIME } from '@/constants'; import { Time } from '@/constants';
import { MalformedRefreshValueError } from '@/errors/cache-errors/malformed-refresh-value.error'; import { MalformedRefreshValueError } from '@/errors/cache-errors/malformed-refresh-value.error';
import { UncacheableValueError } from '@/errors/cache-errors/uncacheable-value.error'; import { UncacheableValueError } from '@/errors/cache-errors/uncacheable-value.error';
import type { import type {
@ -160,7 +160,7 @@ export class CacheService extends TypedEmitter<CacheEvents> {
}); });
} }
await this.cache.store.expire(key, ttlMs / TIME.SECOND); await this.cache.store.expire(key, ttlMs * Time.milliseconds.toSeconds);
} }
// ---------------------------------- // ----------------------------------

View file

@ -10,7 +10,7 @@ import path from 'path';
import { Container, Service } from 'typedi'; import { Container, Service } from 'typedi';
import config from '@/config'; import config from '@/config';
import { LICENSE_FEATURES, N8N_VERSION } from '@/constants'; import { inE2ETests, LICENSE_FEATURES, N8N_VERSION } from '@/constants';
import { CredentialTypes } from '@/credential-types'; import { CredentialTypes } from '@/credential-types';
import { CredentialsOverwrites } from '@/credentials-overwrites'; import { CredentialsOverwrites } from '@/credentials-overwrites';
import { getVariablesLimit } from '@/environments/variables/environment-helpers'; import { getVariablesLimit } from '@/environments/variables/environment-helpers';
@ -66,11 +66,11 @@ export class FrontendService {
const restEndpoint = this.globalConfig.endpoints.rest; const restEndpoint = this.globalConfig.endpoints.rest;
const telemetrySettings: ITelemetrySettings = { const telemetrySettings: ITelemetrySettings = {
enabled: config.getEnv('diagnostics.enabled'), enabled: this.globalConfig.diagnostics.enabled,
}; };
if (telemetrySettings.enabled) { if (telemetrySettings.enabled) {
const conf = config.getEnv('diagnostics.config.frontend'); const conf = this.globalConfig.diagnostics.frontendConfig;
const [key, url] = conf.split(';'); const [key, url] = conf.split(';');
if (!key || !url) { if (!key || !url) {
@ -82,6 +82,7 @@ export class FrontendService {
} }
this.settings = { this.settings = {
inE2ETests,
isDocker: this.isDocker(), isDocker: this.isDocker(),
databaseType: this.globalConfig.database.type, databaseType: this.globalConfig.database.type,
previewMode: process.env.N8N_PREVIEW_MODE === 'true', previewMode: process.env.N8N_PREVIEW_MODE === 'true',
@ -121,15 +122,15 @@ export class FrontendService {
instanceId: this.instanceSettings.instanceId, instanceId: this.instanceSettings.instanceId,
telemetry: telemetrySettings, telemetry: telemetrySettings,
posthog: { posthog: {
enabled: config.getEnv('diagnostics.enabled'), enabled: this.globalConfig.diagnostics.enabled,
apiHost: config.getEnv('diagnostics.config.posthog.apiHost'), apiHost: this.globalConfig.diagnostics.posthogConfig.apiHost,
apiKey: config.getEnv('diagnostics.config.posthog.apiKey'), apiKey: this.globalConfig.diagnostics.posthogConfig.apiKey,
autocapture: false, autocapture: false,
disableSessionRecording: config.getEnv('deployment.type') !== 'cloud', disableSessionRecording: config.getEnv('deployment.type') !== 'cloud',
debug: this.globalConfig.logging.level === 'debug', debug: this.globalConfig.logging.level === 'debug',
}, },
personalizationSurveyEnabled: personalizationSurveyEnabled:
config.getEnv('personalization.enabled') && config.getEnv('diagnostics.enabled'), config.getEnv('personalization.enabled') && this.globalConfig.diagnostics.enabled,
defaultLocale: config.getEnv('defaultLocale'), defaultLocale: config.getEnv('defaultLocale'),
userManagement: { userManagement: {
quota: this.license.getUsersLimit(), quota: this.license.getUsersLimit(),

View file

@ -21,6 +21,10 @@ describe('Telemetry', () => {
const instanceId = 'Telemetry unit test'; const instanceId = 'Telemetry unit test';
const testDateTime = new Date('2022-01-01 00:00:00'); const testDateTime = new Date('2022-01-01 00:00:00');
const instanceSettings = mockInstance(InstanceSettings, { instanceId }); const instanceSettings = mockInstance(InstanceSettings, { instanceId });
const globalConfig = mock<GlobalConfig>({
diagnostics: { enabled: true },
logging: { level: 'info', outputs: ['console'] },
});
beforeAll(() => { beforeAll(() => {
// @ts-expect-error Spying on private method // @ts-expect-error Spying on private method
@ -28,7 +32,6 @@ describe('Telemetry', () => {
jest.useFakeTimers(); jest.useFakeTimers();
jest.setSystemTime(testDateTime); jest.setSystemTime(testDateTime);
config.set('diagnostics.enabled', true);
config.set('deployment.type', 'n8n-testing'); config.set('deployment.type', 'n8n-testing');
}); });
@ -45,14 +48,7 @@ describe('Telemetry', () => {
const postHog = new PostHogClient(instanceSettings, mock()); const postHog = new PostHogClient(instanceSettings, mock());
await postHog.init(); await postHog.init();
telemetry = new Telemetry( telemetry = new Telemetry(mock(), postHog, mock(), instanceSettings, mock(), globalConfig);
mock(),
postHog,
mock(),
instanceSettings,
mock(),
mock<GlobalConfig>({ logging: { level: 'info', outputs: ['console'] } }),
);
// @ts-expect-error Assigning to private property // @ts-expect-error Assigning to private property
telemetry.rudderStack = mockRudderStack; telemetry.rudderStack = mockRudderStack;
}); });

View file

@ -5,7 +5,6 @@ import { InstanceSettings } from 'n8n-core';
import type { ITelemetryTrackProperties } from 'n8n-workflow'; import type { ITelemetryTrackProperties } from 'n8n-workflow';
import { Container, Service } from 'typedi'; import { Container, Service } from 'typedi';
import config from '@/config';
import { LOWEST_SHUTDOWN_PRIORITY, N8N_VERSION } from '@/constants'; import { LOWEST_SHUTDOWN_PRIORITY, N8N_VERSION } from '@/constants';
import { ProjectRelationRepository } from '@/databases/repositories/project-relation.repository'; import { ProjectRelationRepository } from '@/databases/repositories/project-relation.repository';
import { ProjectRepository } from '@/databases/repositories/project.repository'; import { ProjectRepository } from '@/databases/repositories/project.repository';
@ -54,10 +53,9 @@ export class Telemetry {
) {} ) {}
async init() { async init() {
const enabled = config.getEnv('diagnostics.enabled'); const { enabled, backendConfig } = this.globalConfig.diagnostics;
if (enabled) { if (enabled) {
const conf = config.getEnv('diagnostics.config.backend'); const [key, dataPlaneUrl] = backendConfig.split(';');
const [key, dataPlaneUrl] = conf.split(';');
if (!key || !dataPlaneUrl) { if (!key || !dataPlaneUrl) {
this.logger.warn('Diagnostics backend config is invalid'); this.logger.warn('Diagnostics backend config is invalid');

View file

@ -464,6 +464,11 @@ export async function executeWebhook(
projectId: project?.id, projectId: project?.id,
}; };
// When resuming from a wait node, copy over the pushRef from the execution-data
if (!runData.pushRef) {
runData.pushRef = runExecutionData.pushRef;
}
let responsePromise: IDeferredPromise<IN8nHttpFullResponse> | undefined; let responsePromise: IDeferredPromise<IN8nHttpFullResponse> | undefined;
if (responseMode === 'responseNode') { if (responseMode === 'responseNode') {
responsePromise = createDeferredPromise<IN8nHttpFullResponse>(); responsePromise = createDeferredPromise<IN8nHttpFullResponse>();

View file

@ -36,6 +36,8 @@ import type {
ExecuteWorkflowOptions, ExecuteWorkflowOptions,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
EnvProviderState, EnvProviderState,
ExecuteWorkflowData,
RelatedExecution,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { Container } from 'typedi'; import { Container } from 'typedi';
@ -45,11 +47,7 @@ import { CredentialsHelper } from '@/credentials-helper';
import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import type { AiEventMap, AiEventPayload } from '@/events/maps/ai.event-map'; import type { AiEventMap, AiEventPayload } from '@/events/maps/ai.event-map';
import { ExternalHooks } from '@/external-hooks'; import { ExternalHooks } from '@/external-hooks';
import type { import type { IWorkflowErrorData, UpdateExecutionPayload } from '@/interfaces';
IWorkflowExecuteProcess,
IWorkflowErrorData,
UpdateExecutionPayload,
} from '@/interfaces';
import { NodeTypes } from '@/node-types'; import { NodeTypes } from '@/node-types';
import { Push } from '@/push'; import { Push } from '@/push';
import { WorkflowStatisticsService } from '@/services/workflow-statistics.service'; import { WorkflowStatisticsService } from '@/services/workflow-statistics.service';
@ -310,53 +308,19 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
], ],
workflowExecuteAfter: [ workflowExecuteAfter: [
async function (this: WorkflowHooks, fullRunData: IRun): Promise<void> { async function (this: WorkflowHooks, fullRunData: IRun): Promise<void> {
const { pushRef, executionId, retryOf } = this; const { pushRef, executionId } = this;
if (pushRef === undefined) return;
const { id: workflowId } = this.workflowData; const { id: workflowId } = this.workflowData;
logger.debug('Executing hook (hookFunctionsPush)', { logger.debug('Executing hook (hookFunctionsPush)', {
executionId, executionId,
pushRef, pushRef,
workflowId, workflowId,
}); });
// Push data to session which started the workflow
if (pushRef === undefined) {
return;
}
// Clone the object except the runData. That one is not supposed const pushType =
// to be send. Because that data got send piece by piece after fullRunData.status === 'waiting' ? 'executionWaiting' : 'executionFinished';
// each node which finished executing pushInstance.send(pushType, { executionId }, pushRef);
// Edit: we now DO send the runData to the UI if mode=manual so that it shows the point of crashes
let pushRunData;
if (fullRunData.mode === 'manual') {
pushRunData = fullRunData;
} else {
pushRunData = {
...fullRunData,
data: {
...fullRunData.data,
resultData: {
...fullRunData.data.resultData,
runData: {},
},
},
};
}
// Push data to editor-ui once workflow finished
logger.debug(`Save execution progress to database for execution ID ${executionId} `, {
executionId,
workflowId,
});
// TODO: Look at this again
pushInstance.send(
'executionFinished',
{
executionId,
data: pushRunData,
retryOf,
},
pushRef,
);
}, },
], ],
}; };
@ -468,22 +432,21 @@ function hookFunctionsSave(): IWorkflowExecuteHooks {
(executionStatus === 'success' && !saveSettings.success) || (executionStatus === 'success' && !saveSettings.success) ||
(executionStatus !== 'success' && !saveSettings.error); (executionStatus !== 'success' && !saveSettings.error);
if (shouldNotSave && !fullRunData.waitTill) { if (shouldNotSave && !fullRunData.waitTill && !isManualMode) {
if (!fullRunData.waitTill && !isManualMode) { executeErrorWorkflow(
executeErrorWorkflow( this.workflowData,
this.workflowData, fullRunData,
fullRunData, this.mode,
this.mode, this.executionId,
this.executionId, this.retryOf,
this.retryOf, );
);
await Container.get(ExecutionRepository).hardDelete({
workflowId: this.workflowData.id,
executionId: this.executionId,
});
return; await Container.get(ExecutionRepository).hardDelete({
} workflowId: this.workflowData.id,
executionId: this.executionId,
});
return;
} }
// Although it is treated as IWorkflowBase here, it's being instantiated elsewhere with properties that may be sensitive // Although it is treated as IWorkflowBase here, it's being instantiated elsewhere with properties that may be sensitive
@ -686,6 +649,7 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
export async function getRunData( export async function getRunData(
workflowData: IWorkflowBase, workflowData: IWorkflowBase,
inputData?: INodeExecutionData[], inputData?: INodeExecutionData[],
parentExecution?: RelatedExecution,
): Promise<IWorkflowExecutionDataProcess> { ): Promise<IWorkflowExecutionDataProcess> {
const mode = 'integrated'; const mode = 'integrated';
@ -705,6 +669,7 @@ export async function getRunData(
data: { data: {
main: [inputData], main: [inputData],
}, },
metadata: { parentExecution },
source: null, source: null,
}); });
@ -776,7 +741,41 @@ export async function executeWorkflow(
workflowInfo: IExecuteWorkflowInfo, workflowInfo: IExecuteWorkflowInfo,
additionalData: IWorkflowExecuteAdditionalData, additionalData: IWorkflowExecuteAdditionalData,
options: ExecuteWorkflowOptions, options: ExecuteWorkflowOptions,
): Promise<Array<INodeExecutionData[] | null> | IWorkflowExecuteProcess> { ): Promise<ExecuteWorkflowData> {
const activeExecutions = Container.get(ActiveExecutions);
const workflowData =
options.loadedWorkflowData ??
(await getWorkflowData(workflowInfo, options.parentWorkflowId, options.parentWorkflowSettings));
const runData =
options.loadedRunData ??
(await getRunData(workflowData, options.inputData, options.parentExecution));
const executionId = await activeExecutions.add(runData);
const executionPromise = startExecution(
additionalData,
options,
executionId,
runData,
workflowData,
);
if (options.doNotWaitToFinish) {
return { executionId, data: [null] };
}
return await executionPromise;
}
async function startExecution(
additionalData: IWorkflowExecuteAdditionalData,
options: ExecuteWorkflowOptions,
executionId: string,
runData: IWorkflowExecutionDataProcess,
workflowData: IWorkflowBase,
): Promise<ExecuteWorkflowData> {
const externalHooks = Container.get(ExternalHooks); const externalHooks = Container.get(ExternalHooks);
await externalHooks.init(); await externalHooks.init();
@ -785,10 +784,6 @@ export async function executeWorkflow(
const eventService = Container.get(EventService); const eventService = Container.get(EventService);
const executionRepository = Container.get(ExecutionRepository); const executionRepository = Container.get(ExecutionRepository);
const workflowData =
options.loadedWorkflowData ??
(await getWorkflowData(workflowInfo, options.parentWorkflowId, options.parentWorkflowSettings));
const workflowName = workflowData ? workflowData.name : undefined; const workflowName = workflowData ? workflowData.name : undefined;
const workflow = new Workflow({ const workflow = new Workflow({
id: workflowData.id, id: workflowData.id,
@ -801,10 +796,6 @@ export async function executeWorkflow(
settings: workflowData.settings, settings: workflowData.settings,
}); });
const runData = options.loadedRunData ?? (await getRunData(workflowData, options.inputData));
const executionId = await activeExecutions.add(runData);
/** /**
* A subworkflow execution in queue mode is not enqueued, but rather runs in the * A subworkflow execution in queue mode is not enqueued, but rather runs in the
* same worker process as the parent execution. Hence ensure the subworkflow * same worker process as the parent execution. Hence ensure the subworkflow
@ -926,7 +917,10 @@ export async function executeWorkflow(
activeExecutions.finalizeExecution(executionId, data); activeExecutions.finalizeExecution(executionId, data);
const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data); const returnData = WorkflowHelpers.getDataLastExecutedNodeData(data);
return returnData!.data!.main; return {
executionId,
data: returnData!.data!.main,
};
} }
activeExecutions.finalizeExecution(executionId, data); activeExecutions.finalizeExecution(executionId, data);
@ -1117,6 +1111,9 @@ export function getWorkflowHooksWorkerMain(
hookFunctions.nodeExecuteAfter = []; hookFunctions.nodeExecuteAfter = [];
hookFunctions.workflowExecuteAfter = [ hookFunctions.workflowExecuteAfter = [
async function (this: WorkflowHooks, fullRunData: IRun): Promise<void> { async function (this: WorkflowHooks, fullRunData: IRun): Promise<void> {
// Don't delete executions before they are finished
if (!fullRunData.finished) return;
const executionStatus = determineFinalExecutionStatus(fullRunData); const executionStatus = determineFinalExecutionStatus(fullRunData);
const saveSettings = toSaveSettings(this.workflowData.settings); const saveSettings = toSaveSettings(this.workflowData.settings);

View file

@ -1,3 +0,0 @@
import { TIME } from '@/constants';
export const WORKFLOW_HISTORY_PRUNE_INTERVAL = 1 * TIME.HOUR;

View file

@ -1,9 +1,9 @@
import { DateTime } from 'luxon'; import { DateTime } from 'luxon';
import { Service } from 'typedi'; import { Service } from 'typedi';
import { Time } from '@/constants';
import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository'; import { WorkflowHistoryRepository } from '@/databases/repositories/workflow-history.repository';
import { WORKFLOW_HISTORY_PRUNE_INTERVAL } from './constants';
import { import {
getWorkflowHistoryPruneTime, getWorkflowHistoryPruneTime,
isWorkflowHistoryEnabled, isWorkflowHistoryEnabled,
@ -20,7 +20,7 @@ export class WorkflowHistoryManager {
clearInterval(this.pruneTimer); clearInterval(this.pruneTimer);
} }
this.pruneTimer = setInterval(async () => await this.prune(), WORKFLOW_HISTORY_PRUNE_INTERVAL); this.pruneTimer = setInterval(async () => await this.prune(), 1 * Time.hours.toMilliseconds);
} }
shutdown() { shutdown() {

View file

@ -740,14 +740,6 @@
} }
return; return;
}).then(() => {
window.addEventListener('storage', function(event) {
if (event.key === 'n8n_redirect_to_next_form_test_page' && event.newValue) {
const newUrl = event.newValue;
localStorage.removeItem('n8n_redirect_to_next_form_test_page');
window.location.replace(newUrl);
}
});
}) })
.catch(function (error) { .catch(function (error) {
console.error('Error:', error); console.error('Error:', error);

View file

@ -4,7 +4,7 @@ import { BinaryDataService, InstanceSettings } from 'n8n-core';
import type { ExecutionStatus } from 'n8n-workflow'; import type { ExecutionStatus } from 'n8n-workflow';
import Container from 'typedi'; import Container from 'typedi';
import { TIME } from '@/constants'; import { Time } from '@/constants';
import type { ExecutionEntity } from '@/databases/entities/execution-entity'; import type { ExecutionEntity } from '@/databases/entities/execution-entity';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { ExecutionRepository } from '@/databases/repositories/execution.repository'; import { ExecutionRepository } from '@/databases/repositories/execution.repository';
@ -25,7 +25,7 @@ describe('softDeleteOnPruningCycle()', () => {
instanceSettings.markAsLeader(); instanceSettings.markAsLeader();
const now = new Date(); const now = new Date();
const yesterday = new Date(Date.now() - TIME.DAY); const yesterday = new Date(Date.now() - 1 * Time.days.toMilliseconds);
let workflow: WorkflowEntity; let workflow: WorkflowEntity;
let pruningConfig: PruningConfig; let pruningConfig: PruningConfig;

View file

@ -1,9 +1,9 @@
import { GlobalConfig } from '@n8n/config';
import { mock } from 'jest-mock-extended'; import { mock } from 'jest-mock-extended';
import { NodeConnectionType } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow';
import Container from 'typedi'; import Container from 'typedi';
import { v4 as uuid } from 'uuid'; import { v4 as uuid } from 'uuid';
import config from '@/config';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository'; import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { generateNanoId } from '@/databases/utils/generators'; import { generateNanoId } from '@/databases/utils/generators';
import { INSTANCE_REPORT, WEBHOOK_VALIDATOR_NODE_TYPES } from '@/security-audit/constants'; import { INSTANCE_REPORT, WEBHOOK_VALIDATOR_NODE_TYPES } from '@/security-audit/constants';
@ -239,8 +239,7 @@ test('should not report outdated instance when up to date', async () => {
}); });
test('should report security settings', async () => { test('should report security settings', async () => {
config.set('diagnostics.enabled', true); Container.get(GlobalConfig).diagnostics.enabled = true;
const testAudit = await securityAuditService.run(['instance']); const testAudit = await securityAuditService.run(['instance']);
const section = getRiskSection( const section = getRiskSection(

View file

@ -20,7 +20,8 @@
"lint": "eslint . --quiet", "lint": "eslint . --quiet",
"lintfix": "eslint . --fix", "lintfix": "eslint . --fix",
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\"", "watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\"",
"test": "jest" "test": "jest",
"test:dev": "jest --watch"
}, },
"files": [ "files": [
"dist", "dist",

View file

@ -39,6 +39,7 @@ import type {
BinaryHelperFunctions, BinaryHelperFunctions,
CloseFunction, CloseFunction,
ContextType, ContextType,
ExecuteWorkflowData,
FieldType, FieldType,
FileSystemHelperFunctions, FileSystemHelperFunctions,
FunctionsBase, FunctionsBase,
@ -78,6 +79,7 @@ import type {
IRunExecutionData, IRunExecutionData,
ITaskData, ITaskData,
ITaskDataConnections, ITaskDataConnections,
ITaskMetadata,
ITriggerFunctions, ITriggerFunctions,
IWebhookData, IWebhookData,
IWebhookDescription, IWebhookDescription,
@ -109,6 +111,7 @@ import type {
ISupplyDataFunctions, ISupplyDataFunctions,
WebhookType, WebhookType,
SchedulingFunctions, SchedulingFunctions,
RelatedExecution,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
NodeConnectionType, NodeConnectionType,
@ -2721,6 +2724,7 @@ const addExecutionDataFunctions = async (
sourceNodeName: string, sourceNodeName: string,
sourceNodeRunIndex: number, sourceNodeRunIndex: number,
currentNodeRunIndex: number, currentNodeRunIndex: number,
metadata?: ITaskMetadata,
): Promise<void> => { ): Promise<void> => {
if (connectionType === NodeConnectionType.Main) { if (connectionType === NodeConnectionType.Main) {
throw new ApplicationError('Setting type is not supported for main connection', { throw new ApplicationError('Setting type is not supported for main connection', {
@ -2746,6 +2750,7 @@ const addExecutionDataFunctions = async (
if (taskData === undefined) { if (taskData === undefined) {
return; return;
} }
taskData.metadata = metadata;
} }
taskData = taskData!; taskData = taskData!;
@ -3622,6 +3627,12 @@ export function getExecuteFunctions(
itemIndex, itemIndex,
), ),
getExecuteData: () => executeData, getExecuteData: () => executeData,
setMetadata: (metadata: ITaskMetadata): void => {
executeData.metadata = {
...(executeData.metadata ?? {}),
...metadata,
};
},
continueOnFail: () => { continueOnFail: () => {
return continueOnFail(node); return continueOnFail(node);
}, },
@ -3643,23 +3654,28 @@ export function getExecuteFunctions(
workflowInfo: IExecuteWorkflowInfo, workflowInfo: IExecuteWorkflowInfo,
inputData?: INodeExecutionData[], inputData?: INodeExecutionData[],
parentCallbackManager?: CallbackManager, parentCallbackManager?: CallbackManager,
): Promise<any> { options?: {
doNotWaitToFinish?: boolean;
parentExecution?: RelatedExecution;
},
): Promise<ExecuteWorkflowData> {
return await additionalData return await additionalData
.executeWorkflow(workflowInfo, additionalData, { .executeWorkflow(workflowInfo, additionalData, {
...options,
parentWorkflowId: workflow.id?.toString(), parentWorkflowId: workflow.id?.toString(),
inputData, inputData,
parentWorkflowSettings: workflow.settings, parentWorkflowSettings: workflow.settings,
node, node,
parentCallbackManager, parentCallbackManager,
}) })
.then( .then(async (result) => {
async (result) => const data = await Container.get(BinaryDataService).duplicateBinaryData(
await Container.get(BinaryDataService).duplicateBinaryData( workflow.id,
workflow.id, additionalData.executionId!,
additionalData.executionId!, result.data,
result, );
), return { ...result, data };
); });
}, },
getContext(type: ContextType): IContextObject { getContext(type: ContextType): IContextObject {
return NodeHelpers.getContext(runExecutionData, type, node); return NodeHelpers.getContext(runExecutionData, type, node);
@ -3853,6 +3869,7 @@ export function getExecuteFunctions(
connectionType: NodeConnectionType, connectionType: NodeConnectionType,
currentNodeRunIndex: number, currentNodeRunIndex: number,
data: INodeExecutionData[][] | ExecutionBaseError, data: INodeExecutionData[][] | ExecutionBaseError,
metadata?: ITaskMetadata,
): void { ): void {
addExecutionDataFunctions( addExecutionDataFunctions(
'output', 'output',
@ -3864,6 +3881,7 @@ export function getExecuteFunctions(
node.name, node.name,
runIndex, runIndex,
currentNodeRunIndex, currentNodeRunIndex,
metadata,
).catch((error) => { ).catch((error) => {
Logger.warn( Logger.warn(
`There was a problem logging output data of node "${this.getNode().name}": ${ `There was a problem logging output data of node "${this.getNode().name}": ${
@ -3972,7 +3990,11 @@ export function getSupplyDataFunctions(
workflowInfo: IExecuteWorkflowInfo, workflowInfo: IExecuteWorkflowInfo,
inputData?: INodeExecutionData[], inputData?: INodeExecutionData[],
parentCallbackManager?: CallbackManager, parentCallbackManager?: CallbackManager,
) => options?: {
doNotWaitToFinish?: boolean;
parentExecution?: RelatedExecution;
},
): Promise<ExecuteWorkflowData> =>
await additionalData await additionalData
.executeWorkflow(workflowInfo, additionalData, { .executeWorkflow(workflowInfo, additionalData, {
parentWorkflowId: workflow.id?.toString(), parentWorkflowId: workflow.id?.toString(),
@ -3980,15 +4002,16 @@ export function getSupplyDataFunctions(
parentWorkflowSettings: workflow.settings, parentWorkflowSettings: workflow.settings,
node, node,
parentCallbackManager, parentCallbackManager,
...options,
}) })
.then( .then(async (result) => {
async (result) => const data = await Container.get(BinaryDataService).duplicateBinaryData(
await Container.get(BinaryDataService).duplicateBinaryData( workflow.id,
workflow.id, additionalData.executionId!,
additionalData.executionId!, result.data,
result, );
), return { ...result, data };
), }),
getNodeOutputs() { getNodeOutputs() {
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
return NodeHelpers.getNodeOutputs(workflow, node, nodeType.description).map((output) => { return NodeHelpers.getNodeOutputs(workflow, node, nodeType.description).map((output) => {
@ -4143,6 +4166,7 @@ export function getSupplyDataFunctions(
connectionType: NodeConnectionType, connectionType: NodeConnectionType,
currentNodeRunIndex: number, currentNodeRunIndex: number,
data: INodeExecutionData[][], data: INodeExecutionData[][],
metadata?: ITaskMetadata,
): void { ): void {
addExecutionDataFunctions( addExecutionDataFunctions(
'output', 'output',
@ -4154,6 +4178,7 @@ export function getSupplyDataFunctions(
node.name, node.name,
runIndex, runIndex,
currentNodeRunIndex, currentNodeRunIndex,
metadata,
).catch((error) => { ).catch((error) => {
Logger.warn( Logger.warn(
`There was a problem logging output data of node "${this.getNode().name}": ${ `There was a problem logging output data of node "${this.getNode().name}": ${

View file

@ -408,7 +408,10 @@ export class WorkflowExecute {
let metaRunData: ITaskMetadata; let metaRunData: ITaskMetadata;
for (const nodeName of Object.keys(metadata)) { for (const nodeName of Object.keys(metadata)) {
for ([index, metaRunData] of metadata[nodeName].entries()) { for ([index, metaRunData] of metadata[nodeName].entries()) {
runData[nodeName][index].metadata = metaRunData; runData[nodeName][index].metadata = {
...(runData[nodeName][index].metadata ?? {}),
...metaRunData,
};
} }
} }
} }
@ -913,7 +916,6 @@ export class WorkflowExecute {
let nodeSuccessData: INodeExecutionData[][] | null | undefined; let nodeSuccessData: INodeExecutionData[][] | null | undefined;
let runIndex: number; let runIndex: number;
let startTime: number; let startTime: number;
let taskData: ITaskData;
if (this.runExecutionData.startData === undefined) { if (this.runExecutionData.startData === undefined) {
this.runExecutionData.startData = {}; this.runExecutionData.startData = {};
@ -1443,12 +1445,13 @@ export class WorkflowExecute {
this.runExecutionData.resultData.runData[executionNode.name] = []; this.runExecutionData.resultData.runData[executionNode.name] = [];
} }
taskData = { const taskData: ITaskData = {
hints: executionHints, hints: executionHints,
startTime, startTime,
executionTime: new Date().getTime() - startTime, executionTime: new Date().getTime() - startTime,
source: !executionData.source ? [] : executionData.source.main, source: !executionData.source ? [] : executionData.source.main,
executionStatus: 'success', metadata: executionData.metadata,
executionStatus: this.runExecutionData.waitTill ? 'waiting' : 'success',
}; };
if (executionError !== undefined) { if (executionError !== undefined) {

View file

@ -17,6 +17,7 @@ import type {
IContextObject, IContextObject,
ICredentialDataDecryptedObject, ICredentialDataDecryptedObject,
ISourceData, ISourceData,
ITaskMetadata,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { ApplicationError, NodeHelpers } from 'n8n-workflow'; import { ApplicationError, NodeHelpers } from 'n8n-workflow';
@ -298,4 +299,33 @@ describe('ExecuteSingleContext', () => {
}); });
}); });
}); });
describe('setMetadata', () => {
it('sets metadata on execution data', () => {
const context = new ExecuteSingleContext(
workflow,
node,
additionalData,
mode,
runExecutionData,
runIndex,
connectionInputData,
inputData,
itemIndex,
executeData,
abortSignal,
);
const metadata: ITaskMetadata = {
subExecution: {
workflowId: '123',
executionId: 'xyz',
},
};
expect(context.getExecuteData().metadata?.subExecution).toEqual(undefined);
context.setMetadata(metadata);
expect(context.getExecuteData().metadata?.subExecution).toEqual(metadata.subExecution);
});
});
}); });

View file

@ -13,6 +13,7 @@ import type {
ContextType, ContextType,
AiEvent, AiEvent,
ISourceData, ISourceData,
ITaskMetadata,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
ApplicationError, ApplicationError,
@ -85,6 +86,13 @@ export class ExecuteSingleContext extends NodeExecutionContext implements IExecu
this.abortSignal?.addEventListener('abort', fn); this.abortSignal?.addEventListener('abort', fn);
} }
setMetadata(metadata: ITaskMetadata): void {
this.executeData.metadata = {
...(this.executeData.metadata ?? {}),
...metadata,
};
}
continueOnFail() { continueOnFail() {
return continueOnFail(this.node); return continueOnFail(this.node);
} }

View file

@ -56,7 +56,7 @@
"esprima-next": "5.8.4", "esprima-next": "5.8.4",
"fast-json-stable-stringify": "^2.1.0", "fast-json-stable-stringify": "^2.1.0",
"file-saver": "^2.0.2", "file-saver": "^2.0.2",
"flatted": "^3.2.4", "flatted": "catalog:",
"highlight.js": "catalog:frontend", "highlight.js": "catalog:frontend",
"humanize-duration": "^3.27.2", "humanize-duration": "^3.27.2",
"jsonpath": "^1.1.1", "jsonpath": "^1.1.1",

View file

@ -182,6 +182,10 @@ export interface IAiDataContent {
metadata: { metadata: {
executionTime: number; executionTime: number;
startTime: number; startTime: number;
subExecution?: {
workflowId: string;
executionId: string;
};
}; };
} }
@ -202,6 +206,10 @@ export interface ITableData {
columns: string[]; columns: string[];
data: GenericValue[][]; data: GenericValue[][];
hasJson: { [key: string]: boolean }; hasJson: { [key: string]: boolean };
metadata: {
hasExecutionIds: boolean;
data: Array<INodeExecutionData['metadata'] | undefined>;
};
} }
// Simple version of n8n-workflow.Workflow // Simple version of n8n-workflow.Workflow
@ -392,15 +400,10 @@ export interface IExecutionsListResponse {
export interface IExecutionsCurrentSummaryExtended { export interface IExecutionsCurrentSummaryExtended {
id: string; id: string;
finished?: boolean;
status: ExecutionStatus; status: ExecutionStatus;
mode: WorkflowExecuteMode; mode: WorkflowExecuteMode;
retryOf?: string | null;
retrySuccessId?: string | null;
startedAt: Date; startedAt: Date;
stoppedAt?: Date;
workflowId: string; workflowId: string;
workflowName?: string;
} }
export interface IExecutionsStopData { export interface IExecutionsStopData {
@ -839,14 +842,12 @@ export interface IUsedCredential {
} }
export interface WorkflowsState { export interface WorkflowsState {
activeExecutions: IExecutionsCurrentSummaryExtended[];
activeWorkflows: string[]; activeWorkflows: string[];
activeWorkflowExecution: ExecutionSummary | null; activeWorkflowExecution: ExecutionSummary | null;
currentWorkflowExecutions: ExecutionSummary[]; currentWorkflowExecutions: ExecutionSummary[];
activeExecutionId: string | null; activeExecutionId: string | null;
executingNode: string[]; executingNode: string[];
executionWaitingForWebhook: boolean; executionWaitingForWebhook: boolean;
finishedExecutionsCount: number;
nodeMetadata: NodeMetadataMap; nodeMetadata: NodeMetadataMap;
subWorkflowExecutionError: Error | null; subWorkflowExecutionError: Error | null;
usedCredentials: Record<string, IUsedCredential>; usedCredentials: Record<string, IUsedCredential>;
@ -1083,11 +1084,6 @@ export interface IVersionsState {
currentVersion: IVersion | undefined; currentVersion: IVersion | undefined;
} }
export interface IWorkflowsState {
currentWorkflowExecutions: ExecutionSummary[];
activeWorkflowExecution: ExecutionSummary | null;
finishedExecutionsCount: number;
}
export interface IWorkflowsMap { export interface IWorkflowsMap {
[name: string]: IWorkflowDb; [name: string]: IWorkflowDb;
} }

View file

@ -1,6 +1,7 @@
import type { FrontendSettings } from '@n8n/api-types'; import type { FrontendSettings } from '@n8n/api-types';
export const defaultSettings: FrontendSettings = { export const defaultSettings: FrontendSettings = {
inE2ETests: false,
databaseType: 'sqlite', databaseType: 'sqlite',
isDocker: false, isDocker: false,
pruning: { pruning: {

Some files were not shown because too many files have changed in this diff Show more