Merge branch 'master' into node-1608-credential-parameters-tech-debt-project

This commit is contained in:
Elias Meire 2024-10-08 09:57:23 +02:00
commit 1853069724
No known key found for this signature in database
497 changed files with 15834 additions and 6689 deletions

View file

@ -1,3 +1,55 @@
## [1.62.1](https://github.com/n8n-io/n8n/compare/n8n@1.61.0...n8n@1.62.1) (2024-10-02)
### Bug Fixes
* **AI Agent Node:** Fix output parsing and empty tool input handling in AI Agent node ([#10970](https://github.com/n8n-io/n8n/issues/10970)) ([3a65bdc](https://github.com/n8n-io/n8n/commit/3a65bdc1f522932d463b4da0e67d29076887d06c))
* **API:** Fix workflow project transfer ([#10651](https://github.com/n8n-io/n8n/issues/10651)) ([5f89e3a](https://github.com/n8n-io/n8n/commit/5f89e3a01c1bbb3589ff0464fd5bc991426f55dc))
* **AwsS3 Node:** Fix search only using first input parameters ([#10998](https://github.com/n8n-io/n8n/issues/10998)) ([846cfde](https://github.com/n8n-io/n8n/commit/846cfde8dcaf7bf80f0a4ca7d65fc2a7b61d0e23))
* **Chat Trigger Node:** Fix Allowed Origins paramter ([#11011](https://github.com/n8n-io/n8n/issues/11011)) ([b5f4afe](https://github.com/n8n-io/n8n/commit/b5f4afe12ec77f527080a4b7f812e12f9f73f8df))
* **core:** Fix ownerless project case in statistics service ([#11051](https://github.com/n8n-io/n8n/issues/11051)) ([bdaadf1](https://github.com/n8n-io/n8n/commit/bdaadf10e058e2c0b1141289189d6526c030a2ca))
* **core:** Handle Redis disconnects gracefully ([#11007](https://github.com/n8n-io/n8n/issues/11007)) ([cd91648](https://github.com/n8n-io/n8n/commit/cd916480c2d2b55f2215c72309dc432340fc3f30))
* **core:** Prevent backend from loading duplicate copies of nodes packages ([#10979](https://github.com/n8n-io/n8n/issues/10979)) ([4584f22](https://github.com/n8n-io/n8n/commit/4584f22a9b16883779d8555cda309fd8bd113f6c))
* **core:** Upgrade @n8n/typeorm to address a rare mutex release issue ([#10993](https://github.com/n8n-io/n8n/issues/10993)) ([2af0fbf](https://github.com/n8n-io/n8n/commit/2af0fbf52f0b404697f5148f81ad0035c9ffb6b9))
* **editor:** Allow resources to move between personal and team projects ([#10683](https://github.com/n8n-io/n8n/issues/10683)) ([136d491](https://github.com/n8n-io/n8n/commit/136d49132567558b7d27069c857c0e0bfee70ce2))
* **editor:** Color scheme for a markdown code blocks in dark mode ([#11008](https://github.com/n8n-io/n8n/issues/11008)) ([b20d2eb](https://github.com/n8n-io/n8n/commit/b20d2eb403f71fe1dc21c92df118adcebef51ffe))
* **editor:** Fix filter execution by "Queued" ([#10987](https://github.com/n8n-io/n8n/issues/10987)) ([819d20f](https://github.com/n8n-io/n8n/commit/819d20fa2eee314b88a7ce1c4db632afac514704))
* **editor:** Fix performance issue in credentials list ([#10988](https://github.com/n8n-io/n8n/issues/10988)) ([7073ec6](https://github.com/n8n-io/n8n/commit/7073ec6fe5384cc8c50dcb242212999a1fbc9041))
* **editor:** Fix schema view pill highlighting ([#10936](https://github.com/n8n-io/n8n/issues/10936)) ([1b973dc](https://github.com/n8n-io/n8n/commit/1b973dcd8dbce598e6ada490fd48fad52f7b4f3a))
* **editor:** Fix workflow executions list page redirection ([#10981](https://github.com/n8n-io/n8n/issues/10981)) ([fe7d060](https://github.com/n8n-io/n8n/commit/fe7d0605681dc963f5e5d1607f9d40c5173e0f9f))
* **editor:** Format action names properly when action is not defined ([#11030](https://github.com/n8n-io/n8n/issues/11030)) ([9c43fb3](https://github.com/n8n-io/n8n/commit/9c43fb301d1ccb82e42f46833e19587289803cd3))
* **Elasticsearch Node:** Fix issue with self signed certificates not working ([#10954](https://github.com/n8n-io/n8n/issues/10954)) ([79622b5](https://github.com/n8n-io/n8n/commit/79622b5f267f2a4a53f3eb48e228939d6e3a9caa))
* **Facebook Lead Ads Trigger Node:** Pagination fix in RLC ([#10956](https://github.com/n8n-io/n8n/issues/10956)) ([6322372](https://github.com/n8n-io/n8n/commit/632237261087ada0177b67922f9f48ca02ef1d9e))
* **Github Document Loader Node:** Pass through apiUrl from credentials & fix log output ([#11049](https://github.com/n8n-io/n8n/issues/11049)) ([a7af981](https://github.com/n8n-io/n8n/commit/a7af98183c47a5e215869c8269729b0fb2f318b5))
* **Google Sheets Node:** Updating on row_number using automatic matching ([#10940](https://github.com/n8n-io/n8n/issues/10940)) ([ed91495](https://github.com/n8n-io/n8n/commit/ed91495ebc1e09b89533ffef4b775eaa0139f365))
* **HTTP Request Tool Node:** Remove default user agent header ([#10971](https://github.com/n8n-io/n8n/issues/10971)) ([5a99e93](https://github.com/n8n-io/n8n/commit/5a99e93f8d2c66d7dbcef382478badd63bc4a0b5))
* **Postgres Node:** Falsy query parameters ignored ([#10960](https://github.com/n8n-io/n8n/issues/10960)) ([4a63cff](https://github.com/n8n-io/n8n/commit/4a63cff5ec722c810e3ff2bd7b0bb1e32f7f403b))
* **Respond to Webhook Node:** Node does not work with Wait node ([#10992](https://github.com/n8n-io/n8n/issues/10992)) ([2df5a5b](https://github.com/n8n-io/n8n/commit/2df5a5b649f8ba3b747782d6d5045820aa74955d))
* **RSS Feed Trigger Node:** Fix regression on missing timestamps ([#10991](https://github.com/n8n-io/n8n/issues/10991)) ([d2bc076](https://github.com/n8n-io/n8n/commit/d2bc0760e2b5c977fcc683f0a0281f099a9c538d))
* **Supabase Node:** Fix issue with delete not always working ([#10952](https://github.com/n8n-io/n8n/issues/10952)) ([1944b46](https://github.com/n8n-io/n8n/commit/1944b46fd472bb59552b5fbf7783168a622a2bd2))
* **Text Classifier Node:** Default system prompt template ([#11018](https://github.com/n8n-io/n8n/issues/11018)) ([77fec19](https://github.com/n8n-io/n8n/commit/77fec195d92e0fe23c60552a72e8c030cf7e5e5c))
* **Todoist Node:** Fix listSearch filter bug in Todoist Node ([#10989](https://github.com/n8n-io/n8n/issues/10989)) ([c4b3272](https://github.com/n8n-io/n8n/commit/c4b327248d7aa1352e8d6acec5627ff406aea3d4))
* **Todoist Node:** Make Section Name optional in Move Task operation ([#10732](https://github.com/n8n-io/n8n/issues/10732)) ([799006a](https://github.com/n8n-io/n8n/commit/799006a3cce6abe210469c839ae392d0c1aec486))
### Features
* Add more context to support chat ([#11014](https://github.com/n8n-io/n8n/issues/11014)) ([8a30f92](https://github.com/n8n-io/n8n/commit/8a30f92156d6a4fe73113bd3cdfb751b8c9ce4b4))
* Add Sysdig API credentials for SecOps ([#7033](https://github.com/n8n-io/n8n/issues/7033)) ([a8d1a1e](https://github.com/n8n-io/n8n/commit/a8d1a1ea854fb2c69643b0a5738440b389121ca3))
* **core:** Filter executions by project ID in internal API ([#10976](https://github.com/n8n-io/n8n/issues/10976)) ([06d749f](https://github.com/n8n-io/n8n/commit/06d749ffa7ced503141d8b07e22c47d971eb1623))
* **core:** Implement Dynamic Parameters within regular nodes used as AI Tools ([#10862](https://github.com/n8n-io/n8n/issues/10862)) ([ef5b7cf](https://github.com/n8n-io/n8n/commit/ef5b7cf9b77b653111eb5b1d9de8116c9f6b9f92))
* **editor:** Do not show error for remote options when credentials aren't specified ([#10944](https://github.com/n8n-io/n8n/issues/10944)) ([9fc3699](https://github.com/n8n-io/n8n/commit/9fc3699beb0c150909889ed17740a5cd9e0461c3))
* **editor:** Enable drag and drop in code editors (Code/SQL/HTML) ([#10888](https://github.com/n8n-io/n8n/issues/10888)) ([af9e227](https://github.com/n8n-io/n8n/commit/af9e227ad4848995b9d82c72f814dbf9d1de506f))
* **editor:** Overhaul document title management ([#10999](https://github.com/n8n-io/n8n/issues/10999)) ([bb28956](https://github.com/n8n-io/n8n/commit/bb2895689fb006897bc244271aca6f0bfa1839b9))
* **editor:** Remove execution annotation feature flag ([#11020](https://github.com/n8n-io/n8n/issues/11020)) ([e7199db](https://github.com/n8n-io/n8n/commit/e7199dbfccdbdf1c4273f916e3006ca610c230e9))
* **editor:** Support node-creator actions for vector store nodes ([#11032](https://github.com/n8n-io/n8n/issues/11032)) ([72b70d9](https://github.com/n8n-io/n8n/commit/72b70d9d98daeba654baf6785ff1ae234c73c977))
* **Google BigQuery Node:** Return numeric values as integers ([#10943](https://github.com/n8n-io/n8n/issues/10943)) ([d7c1d24](https://github.com/n8n-io/n8n/commit/d7c1d24f74648740b2f425640909037ba06c5030))
* **Invoice Ninja Node:** Add more query params to getAll requests ([#9238](https://github.com/n8n-io/n8n/issues/9238)) ([50b7238](https://github.com/n8n-io/n8n/commit/50b723836e70bbe405594f690b73057f9c33fbe4))
* **Iterable Node:** Add support for EDC and USDC selection ([#10908](https://github.com/n8n-io/n8n/issues/10908)) ([0ca9c07](https://github.com/n8n-io/n8n/commit/0ca9c076ca51d313392e45c3b013f2e83aaea843))
* **Question and Answer Chain Node:** Customize question and answer system prompt ([#10385](https://github.com/n8n-io/n8n/issues/10385)) ([08a27b3](https://github.com/n8n-io/n8n/commit/08a27b3148aac2282f64339ddc33ac7c90835d84))
# [1.61.0](https://github.com/n8n-io/n8n/compare/n8n@1.60.0...n8n@1.61.0) (2024-09-25)

View file

@ -32,8 +32,6 @@ export const addProjectMember = (email: string, role?: string) => {
}
};
export const getResourceMoveModal = () => cy.getByTestId('project-move-resource-modal');
export const getResourceMoveConfirmModal = () =>
cy.getByTestId('project-move-resource-confirm-modal');
export const getProjectMoveSelect = () => cy.getByTestId('project-move-resource-modal-select');
export function createProject(name: string) {

View file

@ -144,6 +144,12 @@ export function addToolNodeToParent(nodeName: string, parentNodeName: string) {
export function addOutputParserNodeToParent(nodeName: string, parentNodeName: string) {
addSupplementalNodeToParent(nodeName, 'ai_outputParser', parentNodeName);
}
export function addVectorStoreNodeToParent(nodeName: string, parentNodeName: string) {
addSupplementalNodeToParent(nodeName, 'ai_vectorStore', parentNodeName);
}
export function addRetrieverNodeToParent(nodeName: string, parentNodeName: string) {
addSupplementalNodeToParent(nodeName, 'ai_retriever', parentNodeName);
}
export function clickExecuteWorkflowButton() {
getExecuteWorkflowButton().click();

View file

@ -73,4 +73,28 @@ describe('Workflows', () => {
WorkflowsPage.getters.newWorkflowButtonCard().should('be.visible');
});
it('should respect tag querystring filter when listing workflows', () => {
WorkflowsPage.getters.newWorkflowButtonCard().click();
cy.createFixtureWorkflow('Test_workflow_2.json', getUniqueWorkflowName('My New Workflow'));
cy.visit(WorkflowsPage.url);
WorkflowsPage.getters.createWorkflowButton().click();
cy.createFixtureWorkflow('Test_workflow_1.json', 'Empty State Card Workflow');
cy.visit(WorkflowsPage.url);
WorkflowsPage.getters.workflowFilterButton().click();
WorkflowsPage.getters.workflowTagsDropdown().click();
WorkflowsPage.getters.workflowTagItem('some-tag-1').click();
cy.reload();
WorkflowsPage.getters.workflowCards().should('have.length', 1);
});
});

View file

@ -65,7 +65,7 @@ describe('Resource Locator', () => {
});
it('should show appropriate errors when search filter is required', () => {
workflowPage.actions.addNodeToCanvas('Github', true, true, 'On Pull Request');
workflowPage.actions.addNodeToCanvas('Github', true, true, 'On pull request');
ndv.getters.resourceLocator('owner').should('be.visible');
ndv.getters.resourceLocatorInput('owner').click();
ndv.getters.resourceLocatorErrorMessage().should('contain', NO_CREDENTIALS_MESSAGE);

View file

@ -1,5 +1,11 @@
import * as projects from '../composables/projects';
import { INSTANCE_MEMBERS, MANUAL_TRIGGER_NODE_NAME, NOTION_NODE_NAME } from '../constants';
import {
INSTANCE_ADMIN,
INSTANCE_MEMBERS,
INSTANCE_OWNER,
MANUAL_TRIGGER_NODE_NAME,
NOTION_NODE_NAME,
} from '../constants';
import {
WorkflowsPage,
WorkflowPage,
@ -481,44 +487,15 @@ describe('Projects', { disableAutoLogin: true }, () => {
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Next")')
.find('button:contains("Move workflow")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 2)
.first()
.should('contain.text', 'Project 1')
.click();
projects.getResourceMoveModal().find('button:contains("Next")').click();
projects
.getResourceMoveConfirmModal()
.should('be.visible')
.find('button:contains("Confirm")')
.should('be.disabled');
projects
.getResourceMoveConfirmModal()
.find('input[type="checkbox"]')
.first()
.parents('label')
.click();
projects
.getResourceMoveConfirmModal()
.find('button:contains("Confirm")')
.should('be.disabled');
projects
.getResourceMoveConfirmModal()
.find('input[type="checkbox"]')
.last()
.parents('label')
.click();
projects
.getResourceMoveConfirmModal()
.find('button:contains("Confirm")')
.should('not.be.disabled')
.should('have.length', 5)
.filter(':contains("Project 1")')
.click();
projects.getResourceMoveModal().find('button:contains("Move workflow")').click();
workflowsPage.getters
.workflowCards()
@ -526,9 +503,77 @@ describe('Projects', { disableAutoLogin: true }, () => {
.filter(':contains("Owned by me")')
.should('not.exist');
// Move the credential from Project 1 to Project 2
// Move the workflow from Project 1 to Project 2
projects.getMenuItems().first().click();
workflowsPage.getters.workflowCards().should('have.length', 2);
workflowsPage.getters.workflowCardActions('Workflow in Home project').click();
workflowsPage.getters.workflowMoveButton().click();
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Move workflow")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 5)
.filter(':contains("Project 2")')
.click();
projects.getResourceMoveModal().find('button:contains("Move workflow")').click();
// Move the workflow from Project 2 to a member user
projects.getMenuItems().last().click();
workflowsPage.getters.workflowCards().should('have.length', 2);
workflowsPage.getters.workflowCardActions('Workflow in Home project').click();
workflowsPage.getters.workflowMoveButton().click();
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Move workflow")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 5)
.filter(`:contains("${INSTANCE_MEMBERS[0].email}")`)
.click();
projects.getResourceMoveModal().find('button:contains("Move workflow")').click();
workflowsPage.getters.workflowCards().should('have.length', 1);
// Move the workflow from member user back to Home
projects.getHomeButton().click();
workflowsPage.getters
.workflowCards()
.should('have.length', 3)
.filter(':has(.n8n-badge:contains("Project"))')
.should('have.length', 2);
workflowsPage.getters.workflowCardActions('Workflow in Home project').click();
workflowsPage.getters.workflowMoveButton().click();
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Move workflow")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 5)
.filter(`:contains("${INSTANCE_OWNER.email}")`)
.click();
projects.getResourceMoveModal().find('button:contains("Move workflow")').click();
workflowsPage.getters
.workflowCards()
.should('have.length', 3)
.filter(':contains("Owned by me")')
.should('have.length', 1);
// Move the credential from Project 1 to Project 2
projects.getMenuItems().first().click();
projects.getProjectTabCredentials().click();
credentialsPage.getters.credentialCards().should('have.length', 1);
credentialsPage.getters.credentialCardActions('Credential in Project 1').click();
@ -537,48 +582,162 @@ describe('Projects', { disableAutoLogin: true }, () => {
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Next")')
.find('button:contains("Move credential")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 1)
.first()
.should('contain.text', 'Project 2')
.should('have.length', 5)
.filter(':contains("Project 2")')
.click();
projects.getResourceMoveModal().find('button:contains("Next")').click();
projects.getResourceMoveModal().find('button:contains("Move credential")').click();
projects
.getResourceMoveConfirmModal()
.should('be.visible')
.find('button:contains("Confirm")')
.should('be.disabled');
projects
.getResourceMoveConfirmModal()
.find('input[type="checkbox"]')
.first()
.parents('label')
.click();
projects
.getResourceMoveConfirmModal()
.find('button:contains("Confirm")')
.should('be.disabled');
projects
.getResourceMoveConfirmModal()
.find('input[type="checkbox"]')
.last()
.parents('label')
.click();
projects
.getResourceMoveConfirmModal()
.find('button:contains("Confirm")')
.should('not.be.disabled')
.click();
credentialsPage.getters.credentialCards().should('not.have.length');
// Move the credential from Project 2 to admin user
projects.getMenuItems().last().click();
projects.getProjectTabCredentials().click();
credentialsPage.getters.credentialCards().should('have.length', 2);
credentialsPage.getters.credentialCardActions('Credential in Project 1').click();
credentialsPage.getters.credentialMoveButton().click();
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Move credential")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 5)
.filter(`:contains("${INSTANCE_ADMIN.email}")`)
.click();
projects.getResourceMoveModal().find('button:contains("Move credential")').click();
credentialsPage.getters.credentialCards().should('have.length', 1);
// Move the credential from admin user back to instance owner
projects.getHomeButton().click();
projects.getProjectTabCredentials().click();
credentialsPage.getters.credentialCards().should('have.length', 3);
credentialsPage.getters.credentialCardActions('Credential in Project 1').click();
credentialsPage.getters.credentialMoveButton().click();
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Move credential")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 5)
.filter(`:contains("${INSTANCE_OWNER.email}")`)
.click();
projects.getResourceMoveModal().find('button:contains("Move credential")').click();
credentialsPage.getters
.credentialCards()
.should('have.length', 3)
.filter(':contains("Owned by me")')
.should('have.length', 2);
// Move the credential from admin user back to its original project (Project 1)
credentialsPage.getters.credentialCardActions('Credential in Project 1').click();
credentialsPage.getters.credentialMoveButton().click();
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Move credential")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 5)
.filter(':contains("Project 1")')
.click();
projects.getResourceMoveModal().find('button:contains("Move credential")').click();
projects.getMenuItems().first().click();
projects.getProjectTabCredentials().click();
credentialsPage.getters
.credentialCards()
.filter(':contains("Credential in Project 1")')
.should('have.length', 1);
});
it('should allow to change inaccessible credential when the workflow was moved to a team project', () => {
cy.signinAsOwner();
cy.visit(workflowsPage.url);
// Create a credential in the Home project
projects.getProjectTabCredentials().should('be.visible').click();
credentialsPage.getters.emptyListCreateCredentialButton().click();
projects.createCredential('Credential in Home project');
// Create a workflow in the Home project
projects.getHomeButton().click();
workflowsPage.getters.workflowCards().should('not.have.length');
workflowsPage.getters.newWorkflowButtonCard().click();
workflowsPage.getters.workflowCards().should('not.have.length');
workflowsPage.getters.newWorkflowButtonCard().click();
workflowPage.actions.addNodeToCanvas(MANUAL_TRIGGER_NODE_NAME);
workflowPage.actions.addNodeToCanvas(NOTION_NODE_NAME, true, true);
ndv.getters.backToCanvas().click();
workflowPage.actions.saveWorkflowOnButtonClick();
// Create a project and add a user to it
projects.createProject('Project 1');
projects.addProjectMember(INSTANCE_MEMBERS[0].email);
projects.getProjectSettingsSaveButton().click();
// Move the workflow from Home to Project 1
projects.getHomeButton().click();
workflowsPage.getters
.workflowCards()
.should('have.length', 1)
.filter(':contains("Owned by me")')
.should('exist');
workflowsPage.getters.workflowCardActions('My workflow').click();
workflowsPage.getters.workflowMoveButton().click();
projects
.getResourceMoveModal()
.should('be.visible')
.find('button:contains("Move workflow")')
.should('be.disabled');
projects.getProjectMoveSelect().click();
getVisibleSelect()
.find('li')
.should('have.length', 4)
.filter(':contains("Project 1")')
.click();
projects.getResourceMoveModal().find('button:contains("Move workflow")').click();
workflowsPage.getters
.workflowCards()
.should('have.length', 1)
.filter(':contains("Owned by me")')
.should('not.exist');
//Log out with instance owner and log in with the member user
mainSidebar.actions.openUserMenu();
cy.getByTestId('user-menu-item-logout').click();
cy.get('input[name="email"]').type(INSTANCE_MEMBERS[0].email);
cy.get('input[name="password"]').type(INSTANCE_MEMBERS[0].password);
cy.getByTestId('form-submit-button').click();
// Open the moved workflow
workflowsPage.getters.workflowCards().should('have.length', 1);
workflowsPage.getters.workflowCards().first().click();
// Check if the credential can be changed
workflowPage.getters.canvasNodeByName(NOTION_NODE_NAME).should('be.visible').dblclick();
ndv.getters.credentialInput().find('input').should('be.enabled');
});
it('should handle viewer role', () => {

View file

@ -1,3 +1,9 @@
import {
addNodeToCanvas,
addRetrieverNodeToParent,
addVectorStoreNodeToParent,
getNodeCreatorItems,
} from '../composables/workflow';
import { IF_NODE_NAME } from '../constants';
import { NodeCreator } from '../pages/features/node-creator';
import { NDV } from '../pages/ndv';
@ -504,4 +510,38 @@ describe('Node Creator', () => {
nodeCreatorFeature.getters.searchBar().find('input').clear().type('gith');
nodeCreatorFeature.getters.nodeItemName().first().should('have.text', 'GitHub');
});
it('should show vector stores actions', () => {
const actions = [
'Get ranked documents from vector store',
'Add documents to vector store',
'Retrieve documents for AI processing',
];
nodeCreatorFeature.actions.openNodeCreator();
nodeCreatorFeature.getters.searchBar().find('input').clear().type('Vector Store');
getNodeCreatorItems().then((items) => {
const vectorStores = items.map((_i, el) => el.innerText);
// Loop over all vector stores and check if they have the three actions
vectorStores.each((_i, vectorStore) => {
nodeCreatorFeature.getters.getCreatorItem(vectorStore).click();
actions.forEach((action) => {
nodeCreatorFeature.getters.getCreatorItem(action).should('be.visible');
});
cy.realPress('ArrowLeft');
});
});
});
it('should add node directly for sub-connection', () => {
addNodeToCanvas('Question and Answer Chain', true);
addRetrieverNodeToParent('Vector Store Retriever', 'Question and Answer Chain');
cy.realPress('Escape');
addVectorStoreNodeToParent('In-Memory Vector Store', 'Vector Store Retriever');
cy.realPress('Escape');
WorkflowPage.getters.canvasNodes().should('have.length', 4);
});
});

View file

@ -8,7 +8,7 @@ pre-commit:
- merge
- rebase
prettier_check:
glob: 'packages/**/*.{vue,yml,md}'
glob: 'packages/**/*.{vue,yml,md,css,scss}'
run: ./node_modules/.bin/prettier --write --ignore-unknown --no-error-on-unmatched-pattern {staged_files}
stage_fixed: true
skip:

View file

@ -1,6 +1,6 @@
{
"name": "n8n-monorepo",
"version": "1.61.0",
"version": "1.62.1",
"private": true,
"engines": {
"node": ">=20.15",
@ -15,7 +15,7 @@
"build:frontend": "turbo run build:frontend",
"build:nodes": "turbo run build:nodes",
"typecheck": "turbo typecheck",
"dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat",
"dev": "turbo run dev --parallel --env-mode=loose --filter=!n8n-design-system --filter=!@n8n/chat --filter=!@n8n/task-runner",
"dev:ai": "turbo run dev --parallel --env-mode=loose --filter=@n8n/nodes-langchain --filter=n8n --filter=n8n-core",
"clean": "turbo run clean --parallel",
"reset": "node scripts/ensure-zx.mjs && zx scripts/reset.mjs",
@ -59,7 +59,7 @@
"ts-jest": "^29.1.1",
"tsc-alias": "^1.8.7",
"tsc-watch": "^6.0.4",
"turbo": "2.0.6",
"turbo": "2.1.2",
"typescript": "*",
"zx": "^8.1.4"
},

View file

@ -33,6 +33,7 @@ export interface FrontendSettings {
endpointFormWaiting: string;
endpointWebhook: string;
endpointWebhookTest: string;
endpointWebhookWaiting: string;
saveDataErrorExecution: WorkflowSettings.SaveDataExecution;
saveDataSuccessExecution: WorkflowSettings.SaveDataExecution;
saveManualExecutions: boolean;

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-benchmark",
"version": "1.5.0",
"version": "1.6.1",
"description": "Cli for running benchmark tests for n8n",
"main": "dist/index",
"scripts": {

View file

@ -1,7 +0,0 @@
{
"$schema": "../scenario.schema.json",
"name": "CodeNodeJsOnceForEach",
"description": "A JS Code Node that runs once for each item and adds, modifies and removes properties. The data of 5 items is generated using DebugHelper Node, and returned with RespondToWebhook Node.",
"scenarioData": { "workflowFiles": ["js-code-node-once-for-each.json"] },
"scriptPath": "js-code-node-once-for-each.script.js"
}

View file

@ -1,9 +1,31 @@
{
"createdAt": "2024-08-06T12:19:51.268Z",
"updatedAt": "2024-08-06T12:20:45.000Z",
"name": "JS Code Node Once For Each",
"name": "JS Code Node",
"active": true,
"nodes": [
{
"parameters": {
"respondWith": "allIncomingItems",
"options": {}
},
"type": "n8n-nodes-base.respondToWebhook",
"typeVersion": 1.1,
"position": [1280, 460],
"id": "0067e317-09b8-478a-8c50-e19b4c9e294c",
"name": "Respond to Webhook"
},
{
"parameters": {
"mode": "runOnceForEachItem",
"jsCode": "// Add new field\n$input.item.json.age = 10 + Math.floor(Math.random() * 30);\n// Mutate existing field\n$input.item.json.password = $input.item.json.password.split('').map(() => '*').join(\"\")\n// Remove field\ndelete $input.item.json.lastname\n// New object field\nconst emailParts = $input.item.json.email.split(\"@\")\n$input.item.json.emailData = {\n user: emailParts[0],\n domain: emailParts[1]\n}\n\nreturn $input.item;"
},
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [1040, 460],
"id": "56d751c0-0d30-43c3-89fa-bebf3a9d436f",
"name": "OnceForEachItemJSCode"
},
{
"parameters": {
"httpMethod": "POST",
@ -13,68 +35,23 @@
},
"type": "n8n-nodes-base.webhook",
"typeVersion": 2,
"position": [0, 0],
"id": "849350b3-4212-4416-a462-1cf331157d37",
"position": [580, 460],
"id": "417d749d-156c-4ffe-86ea-336f702dc5da",
"name": "Webhook",
"webhookId": "34ca1895-ccf4-4a4a-8bb8-a042f5edb567"
},
{
"parameters": {
"respondWith": "allIncomingItems",
"options": {}
},
"type": "n8n-nodes-base.respondToWebhook",
"typeVersion": 1.1,
"position": [660, 0],
"id": "f0660aa1-8a65-490f-b5cd-f8d134070c13",
"name": "Respond to Webhook"
},
{
"parameters": {
"category": "randomData",
"randomDataCount": 5
},
"type": "n8n-nodes-base.debugHelper",
"typeVersion": 1,
"position": [220, 0],
"id": "50f1efe8-bd2d-4061-9f51-b38c0e3daeb2",
"name": "DebugHelper"
},
{
"parameters": {
"mode": "runOnceForEachItem",
"jsCode": "// Add new field\n$input.item.json.age = 10 + Math.floor(Math.random() * 30);\n// Mutate existing field\n$input.item.json.password = $input.item.json.password.split('').map(() => '*').join(\"\")\n// Remove field\ndelete $input.item.json.lastname\n// New object field\nconst emailParts = $input.item.json.email.split(\"@\")\n$input.item.json.emailData = {\n user: emailParts[0],\n domain: emailParts[1]\n}\n\nreturn $input.item;"
"jsCode": "const digits = '0123456789';\nconst uppercaseLetters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';\nconst lowercaseLetters = uppercaseLetters.toLowerCase();\nconst alphabet = [digits, uppercaseLetters, lowercaseLetters].join('').split('')\n\nconst randomInt = (min, max) => Math.floor(Math.random() * (max - min + 1)) + min;\nconst randomItem = (arr) => arr.at(randomInt(0, arr.length - 1))\nconst randomString = (len) => Array.from({ length: len }).map(() => randomItem(alphabet)).join('')\n\nconst randomUid = () => [8,4,4,4,8].map(len => randomString(len)).join(\"-\")\nconst randomEmail = () => `${randomString(8)}@${randomString(10)}.com`\n\nconst randomPerson = () => ({\n uid: randomUid(),\n email: randomEmail(),\n firstname: randomString(5),\n lastname: randomString(12),\n password: randomString(10)\n})\n\nreturn Array.from({ length: 100 }).map(() => ({\n json: randomPerson()\n}))"
},
"id": "c30db155-73ca-48b9-8860-c3fe7a0926fb",
"name": "Code",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [440, 0],
"id": "f9f2f865-e228-403d-8e47-72308359e207",
"name": "OnceForEachItemJSCode"
"position": [820, 460]
}
],
"connections": {
"Webhook": {
"main": [
[
{
"node": "DebugHelper",
"type": "main",
"index": 0
}
]
]
},
"DebugHelper": {
"main": [
[
{
"node": "OnceForEachItemJSCode",
"type": "main",
"index": 0
}
]
]
},
"OnceForEachItemJSCode": {
"main": [
[
@ -85,6 +62,28 @@
}
]
]
},
"Webhook": {
"main": [
[
{
"node": "Code",
"type": "main",
"index": 0
}
]
]
},
"Code": {
"main": [
[
{
"node": "OnceForEachItemJSCode",
"type": "main",
"index": 0
}
]
]
}
},
"settings": { "executionOrder": "v1" },

View file

@ -0,0 +1,7 @@
{
"$schema": "../scenario.schema.json",
"name": "CodeNodeJs",
"description": "A JS Code Node that first generates 100 items and then runs once for each item and adds, modifies and removes properties. The data returned with RespondToWebhook Node.",
"scenarioData": { "workflowFiles": ["js-code-node.json"] },
"scriptPath": "js-code-node.script.js"
}

View file

@ -12,7 +12,7 @@ export default function () {
try {
const body = JSON.parse(r.body);
return Array.isArray(body) ? body.length === 5 : false;
return Array.isArray(body) ? body.length === 100 : false;
} catch (error) {
console.error('Error parsing response body: ', error);
return false;

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/chat",
"version": "0.26.0",
"version": "0.27.1",
"scripts": {
"dev": "pnpm run storybook",
"build": "pnpm build:vite && pnpm build:bundle",

View file

@ -1,4 +1,20 @@
@import 'highlight.js/styles/github.css';
@use 'sass:meta';
@include meta.load-css('highlight.js/styles/github.css');
@mixin hljs-dark-theme {
@include meta.load-css('highlight.js/styles/github-dark-dimmed.css');
}
body {
&[data-theme='dark'] {
@include hljs-dark-theme;
}
@media (prefers-color-scheme: dark) {
@include hljs-dark-theme;
}
}
// https://github.com/pxlrbt/markdown-css
.chat-message-markdown {
@ -561,7 +577,6 @@
kbd, /* different style for kbd? */
code {
background: #eee;
padding: 0.1em 0.25em;
border-radius: 0.2rem;
-webkit-box-decoration-break: clone;

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/config",
"version": "1.11.0",
"version": "1.12.1",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -0,0 +1,47 @@
import { Config, Env, Nested } from '../decorators';
import { StringArray } from '../utils';
@Config
class FileLoggingConfig {
/**
* Max number of log files to keep, or max number of days to keep logs for.
* Once the limit is reached, the oldest log files will be rotated out.
* If using days, append a `d` suffix. Only for `file` log output.
*
* @example `N8N_LOG_FILE_COUNT_MAX=7` will keep at most 7 files.
* @example `N8N_LOG_FILE_COUNT_MAX=7d` will keep at most 7 days worth of files.
*/
@Env('N8N_LOG_FILE_COUNT_MAX')
fileCountMax: number = 100;
/** Max size (in MiB) for each log file. Only for `file` log output. */
@Env('N8N_LOG_FILE_SIZE_MAX')
fileSizeMax: number = 16;
/** Location of the log files inside `~/.n8n`. Only for `file` log output. */
@Env('N8N_LOG_FILE_LOCATION')
location: string = 'logs/n8n.log';
}
@Config
export class LoggingConfig {
/**
* Minimum level of logs to output. Logs with this or higher level will be output;
* logs with lower levels will not. Exception: `silent` disables all logging.
*
* @example `N8N_LOG_LEVEL=info` will output `error`, `warn` and `info` logs, but not `debug`.
*/
@Env('N8N_LOG_LEVEL')
level: 'error' | 'warn' | 'info' | 'debug' | 'silent' = 'info';
/**
* Where to output logs to. Options are: `console` or `file` or both in a comma separated list.
*
* @example `N8N_LOG_OUTPUT=console,file` will output to both console and file.
*/
@Env('N8N_LOG_OUTPUT')
outputs: StringArray<'console' | 'file'> = ['console'];
@Nested
file: FileLoggingConfig;
}

View file

@ -0,0 +1,22 @@
import { Config, Env } from '../decorators';
@Config
export class TaskRunnersConfig {
// Defaults to true for now
@Env('N8N_RUNNERS_DISABLED')
disabled: boolean = true;
@Env('N8N_RUNNERS_PATH')
path: string = '/runners';
@Env('N8N_RUNNERS_AUTH_TOKEN')
authToken: string = '';
/** IP address task runners server should listen on */
@Env('N8N_RUNNERS_SERVER_PORT')
port: number = 5679;
/** IP address task runners server should listen on */
@Env('N8N_RUNNERS_SERVER_LISTEN_ADDRESS')
listen_address: string = '127.0.0.1';
}

View file

@ -6,9 +6,13 @@ class HealthConfig {
@Env('QUEUE_HEALTH_CHECK_ACTIVE')
active: boolean = false;
/** Port for worker to respond to health checks requests on, if enabled. */
/** Port for worker server to listen on. */
@Env('QUEUE_HEALTH_CHECK_PORT')
port: number = 5678;
/** IP address for worker server to listen on. */
@Env('N8N_WORKER_SERVER_ADDRESS')
address: string = '0.0.0.0';
}
@Config

View file

@ -5,8 +5,11 @@ import { EndpointsConfig } from './configs/endpoints.config';
import { EventBusConfig } from './configs/event-bus.config';
import { ExternalSecretsConfig } from './configs/external-secrets.config';
import { ExternalStorageConfig } from './configs/external-storage.config';
import { LoggingConfig } from './configs/logging.config';
import { NodesConfig } from './configs/nodes.config';
import { PublicApiConfig } from './configs/public-api.config';
import { TaskRunnersConfig } from './configs/runners.config';
export { TaskRunnersConfig } from './configs/runners.config';
import { ScalingModeConfig } from './configs/scaling-mode.config';
import { SentryConfig } from './configs/sentry.config';
import { TemplatesConfig } from './configs/templates.config';
@ -81,4 +84,10 @@ export class GlobalConfig {
@Nested
queue: ScalingModeConfig;
@Nested
logging: LoggingConfig;
@Nested
taskRunners: TaskRunnersConfig;
}

View file

@ -0,0 +1,7 @@
export class StringArray<T extends string> extends Array<T> {
constructor(str: string) {
super();
const parsed = str.split(',') as StringArray<T>;
return parsed.every((i) => typeof i === 'string') ? parsed : [];
}
}

View file

@ -198,6 +198,7 @@ describe('GlobalConfig', () => {
health: {
active: false,
port: 5678,
address: '0.0.0.0',
},
bull: {
redis: {
@ -221,10 +222,26 @@ describe('GlobalConfig', () => {
},
},
},
taskRunners: {
disabled: true,
path: '/runners',
authToken: '',
listen_address: '127.0.0.1',
port: 5679,
},
sentry: {
backendDsn: '',
frontendDsn: '',
},
logging: {
level: 'info',
outputs: ['console'],
file: {
fileCountMax: 100,
fileSizeMax: 16,
location: 'logs/n8n.log',
},
},
};
it('should use all default values when no env variables are defined', () => {

View file

@ -1,27 +1,28 @@
import { BINARY_ENCODING, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import type { AgentAction, AgentFinish } from 'langchain/agents';
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { HumanMessage } from '@langchain/core/messages';
import type { BaseMessage } from '@langchain/core/messages';
import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers';
import type { BaseMessagePromptTemplateLike } from '@langchain/core/prompts';
import { ChatPromptTemplate } from '@langchain/core/prompts';
import { omit } from 'lodash';
import { RunnableSequence } from '@langchain/core/runnables';
import type { Tool } from '@langchain/core/tools';
import { DynamicStructuredTool } from '@langchain/core/tools';
import type { AgentAction, AgentFinish } from 'langchain/agents';
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents';
import { OutputFixingParser } from 'langchain/output_parsers';
import { omit } from 'lodash';
import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import type { ZodObject } from 'zod';
import { z } from 'zod';
import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers';
import { OutputFixingParser } from 'langchain/output_parsers';
import { HumanMessage } from '@langchain/core/messages';
import { RunnableSequence } from '@langchain/core/runnables';
import { SYSTEM_MESSAGE } from './prompt';
import {
isChatInstance,
getPromptInputByType,
getOptionalOutputParsers,
getConnectedTools,
} from '../../../../../utils/helpers';
import { SYSTEM_MESSAGE } from './prompt';
function getOutputParserSchema(outputParser: BaseOutputParser): ZodObject<any, any, any, any> {
const parserType = outputParser.lc_namespace[outputParser.lc_namespace.length - 1];
@ -74,6 +75,39 @@ async function extractBinaryMessages(ctx: IExecuteFunctions) {
content: [...binaryMessages],
});
}
/**
* Fixes empty content messages in agent steps.
*
* This function is necessary when using RunnableSequence.from in LangChain.
* If a tool doesn't have any arguments, LangChain returns input: '' (empty string).
* This can throw an error for some providers (like Anthropic) which expect the input to always be an object.
* This function replaces empty string inputs with empty objects to prevent such errors.
*
* @param steps - The agent steps to fix
* @returns The fixed agent steps
*/
function fixEmptyContentMessage(steps: AgentFinish | AgentAction[]) {
if (!Array.isArray(steps)) return steps;
steps.forEach((step) => {
if ('messageLog' in step && step.messageLog !== undefined) {
if (Array.isArray(step.messageLog)) {
step.messageLog.forEach((message: BaseMessage) => {
if ('content' in message && Array.isArray(message.content)) {
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
(message.content as Array<{ input?: string | object }>).forEach((content) => {
if (content.input === '') {
content.input = {};
}
});
}
});
}
}
});
return steps;
}
export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Tools Agent');
@ -156,6 +190,14 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
// If the steps do not contain multiple outputs, return them as is
return agentFinishSteps;
}
// If memory is connected we need to stringify the returnValues so that it can be saved in the memory as a string
function handleParsedStepOutput(output: Record<string, unknown>) {
return {
returnValues: memory ? { output: JSON.stringify(output) } : output,
log: 'Final response formatted',
};
}
async function agentStepsParser(
steps: AgentFinish | AgentAction[],
): Promise<AgentFinish | AgentAction[]> {
@ -168,24 +210,18 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
unknown
>;
return {
returnValues,
log: 'Final response formatted',
};
return handleParsedStepOutput(returnValues);
}
}
// If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will parse the output manually
// If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will try to parse the output manually
if (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) {
const finalResponse = (steps as AgentFinish).returnValues;
const returnValues = (await outputParser.parse(finalResponse as unknown as string)) as Record<
string,
unknown
>;
return {
returnValues,
log: 'Final response formatted',
};
return handleParsedStepOutput(returnValues);
}
return handleAgentFinishOutput(steps);
}
@ -233,7 +269,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
});
agent.streamRunnable = false;
const runnableAgent = RunnableSequence.from([agent, agentStepsParser]);
const runnableAgent = RunnableSequence.from([agent, agentStepsParser, fixEmptyContentMessage]);
const executor = AgentExecutor.fromAgentAndTools({
agent: runnableAgent,
@ -273,6 +309,13 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
'IMPORTANT: Always call `format_final_response` to format your final response!',
});
if (memory && outputParser) {
const parsedOutput = jsonParse<{ output: Record<string, unknown> }>(
response.output as string,
);
response.output = parsedOutput?.output ?? parsedOutput;
}
returnData.push({
json: omit(
response,

View file

@ -1,3 +1,8 @@
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { HumanMessage } from '@langchain/core/messages';
import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import type {
IDataObject,
IExecuteFunctions,
@ -6,14 +11,8 @@ import type {
INodeType,
INodeTypeDescription,
} from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { HumanMessage } from '@langchain/core/messages';
import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { z } from 'zod';
import { getTracingConfig } from '../../../utils/tracing';
const SYSTEM_PROMPT_TEMPLATE =
@ -172,11 +171,15 @@ export class TextClassifier implements INodeType {
0,
)) as BaseLanguageModel;
const categories = this.getNodeParameter('categories.categories', 0) as Array<{
const categories = this.getNodeParameter('categories.categories', 0, []) as Array<{
category: string;
description: string;
}>;
if (categories.length === 0) {
throw new NodeOperationError(this.getNode(), 'At least one category must be defined');
}
const options = this.getNodeParameter('options', 0, {}) as {
multiClass: boolean;
fallback?: string;
@ -229,6 +232,7 @@ export class TextClassifier implements INodeType {
const systemPromptTemplateOpt = this.getNodeParameter(
'options.systemPromptTemplate',
itemIdx,
SYSTEM_PROMPT_TEMPLATE,
) as string;
const systemPromptTemplate = SystemMessagePromptTemplate.fromTemplate(
`${systemPromptTemplateOpt ?? SYSTEM_PROMPT_TEMPLATE}

View file

@ -109,17 +109,22 @@ export class DocumentGithubLoader implements INodeType {
0,
)) as CharacterTextSplitter | undefined;
const { index } = this.addInputData(NodeConnectionType.AiDocument, [
[{ json: { repository, branch, ignorePaths, recursive } }],
]);
const docs = new GithubRepoLoader(repository, {
branch,
ignorePaths: (ignorePaths ?? '').split(',').map((p) => p.trim()),
recursive,
accessToken: (credentials.accessToken as string) || '',
apiUrl: credentials.server as string,
});
const loadedDocs = textSplitter
? await textSplitter.splitDocuments(await docs.load())
: await docs.load();
this.addOutputData(NodeConnectionType.AiDocument, index, [[{ json: { loadedDocs } }]]);
return {
response: logWrapper(loadedDocs, this),
};

View file

@ -1,4 +1,6 @@
import { Node, NodeConnectionType } from 'n8n-workflow';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { pick } from 'lodash';
import { Node, NodeConnectionType, commonCORSParameters } from 'n8n-workflow';
import type {
IDataObject,
IWebhookFunctions,
@ -10,10 +12,8 @@ import type {
INodeProperties,
} from 'n8n-workflow';
import { pick } from 'lodash';
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { createPage } from './templates';
import { validateAuth } from './GenericFunctions';
import { createPage } from './templates';
import type { LoadPreviousSessionChatOption } from './types';
const CHAT_TRIGGER_PATH_IDENTIFIER = 'chat';
@ -56,7 +56,6 @@ export class ChatTrigger extends Node {
],
},
},
supportsCORS: true,
maxNodes: 1,
inputs: `={{ (() => {
if (!['hostedChat', 'webhook'].includes($parameter.mode)) {
@ -241,6 +240,15 @@ export class ChatTrigger extends Node {
placeholder: 'Add Field',
default: {},
options: [
// CORS parameters are only valid for when chat is used in hosted or webhook mode
...commonCORSParameters.map((p) => ({
...p,
displayOptions: {
show: {
'/mode': ['hostedChat', 'webhook'],
},
},
})),
{
...allowFileUploadsOption,
displayOptions: {

View file

@ -1,14 +1,16 @@
import { type INodeProperties } from 'n8n-workflow';
import {
PGVectorStore,
type DistanceStrategy,
type PGVectorStoreArgs,
} from '@langchain/community/vectorstores/pgvector';
import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/v2/transport';
import type { EmbeddingsInterface } from '@langchain/core/embeddings';
import type { PostgresNodeCredentials } from 'n8n-nodes-base/dist/nodes/Postgres/v2/helpers/interfaces';
import { configurePostgres } from 'n8n-nodes-base/dist/nodes/Postgres/v2/transport';
import type { INodeProperties } from 'n8n-workflow';
import type pg from 'pg';
import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { metadataFilterField } from '../../../utils/sharedFields';
import { createVectorStoreNode } from '../shared/createVectorStoreNode';
type CollectionOptions = {
useCollection?: boolean;
@ -177,13 +179,46 @@ const retrieveFields: INodeProperties[] = [
},
];
/**
* Extended PGVectorStore class to handle custom filtering.
* This wrapper is necessary because when used as a retriever,
* similaritySearchVectorWithScore should use this.filter instead of
* expecting it from the parameter
*/
class ExtendedPGVectorStore extends PGVectorStore {
static async initialize(
embeddings: EmbeddingsInterface,
args: PGVectorStoreArgs & { dimensions?: number },
): Promise<ExtendedPGVectorStore> {
const { dimensions, ...rest } = args;
const postgresqlVectorStore = new this(embeddings, rest);
await postgresqlVectorStore._initializeClient();
await postgresqlVectorStore.ensureTableInDatabase(dimensions);
if (postgresqlVectorStore.collectionTableName) {
await postgresqlVectorStore.ensureCollectionTableInDatabase();
}
return postgresqlVectorStore;
}
async similaritySearchVectorWithScore(
query: number[],
k: number,
filter?: PGVectorStore['FilterType'],
) {
const mergedFilter = { ...this.filter, ...filter };
return await super.similaritySearchVectorWithScore(query, k, mergedFilter);
}
}
export const VectorStorePGVector = createVectorStoreNode({
meta: {
description: 'Work with your data in Postgresql with the PGVector extension',
icon: 'file:postgres.svg',
displayName: 'Postgres PGVector Store',
docsUrl:
'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoresupabase/',
'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorepgvector/',
name: 'vectorStorePGVector',
credentials: [
{
@ -236,7 +271,7 @@ export const VectorStorePGVector = createVectorStoreNode({
'cosine',
) as DistanceStrategy;
return await PGVectorStore.initialize(embeddings, config);
return await ExtendedPGVectorStore.initialize(embeddings, config);
},
async populateVectorStore(context, embeddings, documents, itemIndex) {
// NOTE: if you are to create the HNSW index before use, you need to consider moving the distanceStrategy field to

View file

@ -88,25 +88,25 @@ function getOperationModeOptions(args: VectorStoreNodeConstructorArgs): INodePro
name: 'Get Many',
value: 'load',
description: 'Get many ranked documents from vector store for query',
action: 'Get many ranked documents from vector store for query',
action: 'Get ranked documents from vector store',
},
{
name: 'Insert Documents',
value: 'insert',
description: 'Insert documents into vector store',
action: 'Insert documents into vector store',
action: 'Add documents to vector store',
},
{
name: 'Retrieve Documents (For Agent/Chain)',
value: 'retrieve',
description: 'Retrieve documents from vector store to be used with AI nodes',
action: 'Retrieve documents from vector store to be used with AI nodes',
action: 'Retrieve documents for AI processing',
},
{
name: 'Update Documents',
value: 'update',
description: 'Update documents in vector store by ID',
action: 'Update documents in vector store by ID',
action: 'Update vector store documents',
},
];

View file

@ -1,30 +1,27 @@
import type { BaseMessage } from '@langchain/core/messages';
import { AgentExecutor } from 'langchain/agents';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAI as OpenAIClient } from 'openai';
import {
ApplicationError,
NodeConnectionType,
NodeOperationError,
updateDisplayOptions,
} from 'n8n-workflow';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import type { BufferWindowMemory } from 'langchain/memory';
import omit from 'lodash/omit';
import type {
IDataObject,
IExecuteFunctions,
INodeExecutionData,
INodeProperties,
} from 'n8n-workflow';
import type { BufferWindowMemory } from 'langchain/memory';
import omit from 'lodash/omit';
import type { BaseMessage } from '@langchain/core/messages';
import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions';
import {
ApplicationError,
NodeConnectionType,
NodeOperationError,
updateDisplayOptions,
} from 'n8n-workflow';
import { OpenAI as OpenAIClient } from 'openai';
import { getConnectedTools } from '../../../../../utils/helpers';
import { getTracingConfig } from '../../../../../utils/tracing';
import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions';
const properties: INodeProperties[] = [
assistantRLC,
@ -63,6 +60,46 @@ const properties: INodeProperties[] = [
},
},
},
{
displayName: 'Memory',
name: 'memory',
type: 'options',
options: [
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name: 'Use memory connector',
value: 'connector',
description: 'Connect one of the supported memory nodes',
},
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name: 'Use thread ID',
value: 'threadId',
description: 'Specify the ID of the thread to continue',
},
],
displayOptions: {
show: {
'@version': [{ _cnd: { gte: 1.6 } }],
},
},
default: 'connector',
},
{
displayName: 'Thread ID',
name: 'threadId',
type: 'string',
default: '',
placeholder: '',
description: 'The ID of the thread to continue, a new thread will be created if not specified',
hint: 'If the thread ID is empty or undefined a new thread will be created and included in the response',
displayOptions: {
show: {
'@version': [{ _cnd: { gte: 1.6 } }],
memory: ['threadId'],
},
},
},
{
displayName: 'Connect your own custom n8n tools to this node on the canvas',
name: 'noticeTools',
@ -201,9 +238,19 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
tools: tools ?? [],
});
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BufferWindowMemory
| undefined;
const useMemoryConnector =
nodeVersion >= 1.6 && this.getNodeParameter('memory', i) === 'connector';
const memory =
useMemoryConnector || nodeVersion < 1.6
? ((await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BufferWindowMemory
| undefined)
: undefined;
const threadId =
nodeVersion >= 1.6 && !useMemoryConnector
? (this.getNodeParameter('threadId', i) as string)
: undefined;
const chainValues: IDataObject = {
content: input,
@ -231,6 +278,8 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
chainValues.threadId = thread.id;
}
} else if (threadId) {
chainValues.threadId = threadId;
}
let filteredResponse: IDataObject = {};
@ -257,7 +306,8 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
tools: assistantTools,
});
}
filteredResponse = omit(response, ['signal', 'timeout']) as IDataObject;
// Remove configuration properties and runId added by Langchain that are not relevant to the user
filteredResponse = omit(response, ['signal', 'timeout', 'content', 'runId']) as IDataObject;
} catch (error) {
if (!(error instanceof ApplicationError)) {
throw new NodeOperationError(this.getNode(), error.message, { itemIndex: i });

View file

@ -1,5 +1,5 @@
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
import type { INodeTypeDescription } from 'n8n-workflow';
import type { INodeInputConfiguration, INodeTypeDescription } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import * as assistant from './assistant';
@ -42,13 +42,21 @@ const prettifyOperation = (resource: string, operation: string) => {
return `${capitalize(operation)} ${capitalize(resource)}`;
};
const configureNodeInputs = (resource: string, operation: string, hideTools: string) => {
const configureNodeInputs = (
resource: string,
operation: string,
hideTools: string,
memory: string | undefined,
) => {
if (resource === 'assistant' && operation === 'message') {
return [
const inputs: INodeInputConfiguration[] = [
{ type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 },
{ type: NodeConnectionType.AiTool, displayName: 'Tools' },
];
if (memory !== 'threadId') {
inputs.push({ type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 });
}
return inputs;
}
if (resource === 'text' && operation === 'message') {
if (hideTools === 'hide') {
@ -69,7 +77,7 @@ export const versionDescription: INodeTypeDescription = {
name: 'openAi',
icon: { light: 'file:openAi.svg', dark: 'file:openAi.dark.svg' },
group: ['transform'],
version: [1, 1.1, 1.2, 1.3, 1.4, 1.5],
version: [1, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6],
subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`,
description: 'Message an assistant or GPT, analyze images, generate audio, etc.',
defaults: {
@ -89,7 +97,7 @@ export const versionDescription: INodeTypeDescription = {
],
},
},
inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation, $parameter.hideTools)}}`,
inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation, $parameter.hideTools, $parameter.memory ?? undefined)}}`,
outputs: [NodeConnectionType.Main],
credentials: [
{

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-nodes-langchain",
"version": "1.61.0",
"version": "1.62.1",
"description": "",
"main": "index.js",
"scripts": {
@ -150,7 +150,7 @@
"@langchain/redis": "0.1.0",
"@langchain/textsplitters": "0.1.0",
"@mozilla/readability": "^0.5.0",
"@n8n/typeorm": "0.3.20-10",
"@n8n/typeorm": "0.3.20-12",
"@n8n/vm2": "3.9.25",
"@pinecone-database/pinecone": "3.0.3",
"@qdrant/js-client-rest": "1.11.0",

View file

@ -0,0 +1,19 @@
const sharedOptions = require('@n8n_io/eslint-config/shared');
/**
* @type {import('@types/eslint').ESLint.ConfigData}
*/
module.exports = {
extends: ['@n8n_io/eslint-config/node'],
...sharedOptions(__dirname),
ignorePatterns: ['jest.config.js'],
rules: {
'unicorn/filename-case': ['error', { case: 'kebabCase' }],
'@typescript-eslint/no-duplicate-imports': 'off',
complexity: 'error',
},
};

View file

@ -0,0 +1,5 @@
/** @type {import('jest').Config} */
module.exports = {
...require('../../../jest.config'),
testTimeout: 10_000,
};

View file

@ -0,0 +1,29 @@
{
"name": "@n8n/task-runner",
"version": "1.0.1",
"scripts": {
"clean": "rimraf dist .turbo",
"start": "node dist/start.js",
"dev": "pnpm build && pnpm start",
"typecheck": "tsc --noEmit",
"build": "tsc -p ./tsconfig.build.json",
"format": "biome format --write src",
"format:check": "biome ci src",
"test": "echo \"Error: no tests in this package\" && exit 0",
"lint": "eslint . --quiet",
"lintfix": "eslint . --fix",
"watch": "tsc -p tsconfig.build.json --watch"
},
"main": "dist/start.js",
"module": "src/start.ts",
"types": "dist/start.d.ts",
"files": [
"dist/**/*"
],
"dependencies": {
"n8n-workflow": "workspace:*",
"n8n-core": "workspace:*",
"nanoid": "^3.3.6",
"ws": "^8.18.0"
}
}

View file

@ -0,0 +1,47 @@
import { ApplicationError } from 'n8n-workflow';
import * as a from 'node:assert/strict';
export type AuthOpts = {
n8nUri: string;
authToken: string;
};
/**
* Requests a one-time token that can be used to establish a task runner connection
*/
export async function authenticate(opts: AuthOpts) {
try {
const authEndpoint = `http://${opts.n8nUri}/rest/runners/auth`;
const response = await fetch(authEndpoint, {
method: 'POST',
headers: {
// eslint-disable-next-line @typescript-eslint/naming-convention
'Content-Type': 'application/json',
},
body: JSON.stringify({
token: opts.authToken,
}),
});
if (!response.ok) {
throw new ApplicationError(
`Invalid response status ${response.status}: ${await response.text()}`,
);
}
const { data } = (await response.json()) as { data: { token: string } };
const grantToken = data.token;
a.ok(grantToken);
return grantToken;
} catch (e) {
console.error(e);
const error = e as Error;
throw new ApplicationError(
`Could not connect to n8n message broker ${opts.n8nUri}: ${error.message}`,
{
cause: error,
},
);
}
}

View file

@ -0,0 +1,147 @@
import { getAdditionalKeys } from 'n8n-core';
import {
type INode,
type INodeType,
type ITaskDataConnections,
type IWorkflowExecuteAdditionalData,
WorkflowDataProxy,
type WorkflowParameters,
type IDataObject,
type IExecuteData,
type INodeExecutionData,
type INodeParameters,
type IRunExecutionData,
// type IWorkflowDataProxyAdditionalKeys,
Workflow,
type WorkflowExecuteMode,
} from 'n8n-workflow';
import * as a from 'node:assert';
import { runInNewContext, type Context } from 'node:vm';
import type { TaskResultData } from './runner-types';
import { type Task, TaskRunner } from './task-runner';
interface JSExecSettings {
code: string;
// For workflow data proxy
mode: WorkflowExecuteMode;
}
export interface PartialAdditionalData {
executionId?: string;
restartExecutionId?: string;
restApiUrl: string;
instanceBaseUrl: string;
formWaitingBaseUrl: string;
webhookBaseUrl: string;
webhookWaitingBaseUrl: string;
webhookTestBaseUrl: string;
currentNodeParameters?: INodeParameters;
executionTimeoutTimestamp?: number;
userId?: string;
variables: IDataObject;
}
export interface AllCodeTaskData {
workflow: Omit<WorkflowParameters, 'nodeTypes'>;
inputData: ITaskDataConnections;
node: INode;
runExecutionData: IRunExecutionData;
runIndex: number;
itemIndex: number;
activeNodeName: string;
connectionInputData: INodeExecutionData[];
siblingParameters: INodeParameters;
mode: WorkflowExecuteMode;
executeData?: IExecuteData;
defaultReturnRunIndex: number;
selfData: IDataObject;
contextNodeName: string;
additionalData: PartialAdditionalData;
}
export class JsTaskRunner extends TaskRunner {
constructor(
taskType: string,
wsUrl: string,
grantToken: string,
maxConcurrency: number,
name?: string,
) {
super(taskType, wsUrl, grantToken, maxConcurrency, name ?? 'JS Task Runner');
}
async executeTask(task: Task<JSExecSettings>): Promise<TaskResultData> {
const allData = await this.requestData<AllCodeTaskData>(task.taskId, 'all');
const settings = task.settings;
a.ok(settings, 'JS Code not sent to runner');
const workflowParams = allData.workflow;
const workflow = new Workflow({
...workflowParams,
nodeTypes: {
getByNameAndVersion() {
return undefined as unknown as INodeType;
},
getByName() {
return undefined as unknown as INodeType;
},
getKnownTypes() {
return {};
},
},
});
const dataProxy = new WorkflowDataProxy(
workflow,
allData.runExecutionData,
allData.runIndex,
allData.itemIndex,
allData.activeNodeName,
allData.connectionInputData,
allData.siblingParameters,
settings.mode,
getAdditionalKeys(
allData.additionalData as IWorkflowExecuteAdditionalData,
allData.mode,
allData.runExecutionData,
),
allData.executeData,
allData.defaultReturnRunIndex,
allData.selfData,
allData.contextNodeName,
);
const customConsole = {
log: (...args: unknown[]) => {
const logOutput = args
.map((arg) => (typeof arg === 'object' && arg !== null ? JSON.stringify(arg) : arg))
.join(' ');
console.log('[JS Code]', logOutput);
void this.makeRpcCall(task.taskId, 'logNodeOutput', [logOutput]);
},
};
const context: Context = {
require,
module: {},
console: customConsole,
...dataProxy.getDataProxy(),
...this.buildRpcCallObject(task.taskId),
};
const result = (await runInNewContext(
`module.exports = async function() {${settings.code}\n}()`,
context,
)) as TaskResultData['result'];
return {
result,
customData: allData.runExecutionData.resultData.metadata,
};
}
}

View file

@ -0,0 +1,2 @@
export * from './task-runner';
export * from './runner-types';

View file

@ -0,0 +1,231 @@
import type { INodeExecutionData } from 'n8n-workflow';
export type DataRequestType = 'input' | 'node' | 'all';
export interface TaskResultData {
result: INodeExecutionData[];
customData?: Record<string, string>;
}
export namespace N8nMessage {
export namespace ToRunner {
export interface InfoRequest {
type: 'broker:inforequest';
}
export interface RunnerRegistered {
type: 'broker:runnerregistered';
}
export interface TaskOfferAccept {
type: 'broker:taskofferaccept';
taskId: string;
offerId: string;
}
export interface TaskCancel {
type: 'broker:taskcancel';
taskId: string;
reason: string;
}
export interface TaskSettings {
type: 'broker:tasksettings';
taskId: string;
settings: unknown;
}
export interface RPCResponse {
type: 'broker:rpcresponse';
callId: string;
taskId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskDataResponse {
type: 'broker:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export type All =
| InfoRequest
| TaskOfferAccept
| TaskCancel
| TaskSettings
| RunnerRegistered
| RPCResponse
| TaskDataResponse;
}
export namespace ToRequester {
export interface TaskReady {
type: 'broker:taskready';
requestId: string;
taskId: string;
}
export interface TaskDone {
type: 'broker:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'broker:taskerror';
taskId: string;
error: unknown;
}
export interface TaskDataRequest {
type: 'broker:taskdatarequest';
taskId: string;
requestId: string;
requestType: DataRequestType;
param?: string;
}
export interface RPC {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All = TaskReady | TaskDone | TaskError | TaskDataRequest | RPC;
}
}
export namespace RequesterMessage {
export namespace ToN8n {
export interface TaskSettings {
type: 'requester:tasksettings';
taskId: string;
settings: unknown;
}
export interface TaskCancel {
type: 'requester:taskcancel';
taskId: string;
reason: string;
}
export interface TaskDataResponse {
type: 'requester:taskdataresponse';
taskId: string;
requestId: string;
data: unknown;
}
export interface RPCResponse {
type: 'requester:rpcresponse';
taskId: string;
callId: string;
status: 'success' | 'error';
data: unknown;
}
export interface TaskRequest {
type: 'requester:taskrequest';
requestId: string;
taskType: string;
}
export type All = TaskSettings | TaskCancel | RPCResponse | TaskDataResponse | TaskRequest;
}
}
export namespace RunnerMessage {
export namespace ToN8n {
export interface Info {
type: 'runner:info';
name: string;
types: string[];
}
export interface TaskAccepted {
type: 'runner:taskaccepted';
taskId: string;
}
export interface TaskRejected {
type: 'runner:taskrejected';
taskId: string;
reason: string;
}
export interface TaskDone {
type: 'runner:taskdone';
taskId: string;
data: TaskResultData;
}
export interface TaskError {
type: 'runner:taskerror';
taskId: string;
error: unknown;
}
export interface TaskOffer {
type: 'runner:taskoffer';
offerId: string;
taskType: string;
validFor: number;
}
export interface TaskDataRequest {
type: 'runner:taskdatarequest';
taskId: string;
requestId: string;
requestType: DataRequestType;
param?: string;
}
export interface RPC {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
params: unknown[];
}
export type All =
| Info
| TaskDone
| TaskError
| TaskAccepted
| TaskRejected
| TaskOffer
| RPC
| TaskDataRequest;
}
}
export const RPC_ALLOW_LIST = [
'helpers.httpRequestWithAuthentication',
'helpers.requestWithAuthenticationPaginated',
// "helpers.normalizeItems"
// "helpers.constructExecutionMetaData"
// "helpers.assertBinaryData"
'helpers.getBinaryDataBuffer',
// "helpers.copyInputItems"
// "helpers.returnJsonArray"
'helpers.getSSHClient',
'helpers.createReadStream',
// "helpers.getStoragePath"
'helpers.writeContentToFile',
'helpers.prepareBinaryData',
'helpers.setBinaryDataBuffer',
'helpers.copyBinaryFile',
'helpers.binaryToBuffer',
// "helpers.binaryToString"
// "helpers.getBinaryPath"
'helpers.getBinaryStream',
'helpers.getBinaryMetadata',
'helpers.createDeferredPromise',
'helpers.httpRequest',
'logNodeOutput',
] as const;

View file

@ -0,0 +1,48 @@
import { ApplicationError, ensureError } from 'n8n-workflow';
import * as a from 'node:assert/strict';
import { authenticate } from './authenticator';
import { JsTaskRunner } from './code';
type Config = {
n8nUri: string;
authToken?: string;
grantToken?: string;
};
function readAndParseConfig(): Config {
const authToken = process.env.N8N_RUNNERS_AUTH_TOKEN;
const grantToken = process.env.N8N_RUNNERS_GRANT_TOKEN;
if (!authToken && !grantToken) {
throw new ApplicationError(
'Missing task runner authentication. Use either N8N_RUNNERS_AUTH_TOKEN or N8N_RUNNERS_GRANT_TOKEN to configure it',
);
}
return {
n8nUri: process.env.N8N_RUNNERS_N8N_URI ?? 'localhost:5678',
authToken,
grantToken,
};
}
void (async function start() {
const config = readAndParseConfig();
let grantToken = config.grantToken;
if (!grantToken) {
a.ok(config.authToken);
grantToken = await authenticate({
authToken: config.authToken,
n8nUri: config.n8nUri,
});
}
const wsUrl = `ws://${config.n8nUri}/runners/_ws`;
new JsTaskRunner('javascript', wsUrl, grantToken, 5);
})().catch((e) => {
const error = ensureError(e);
console.error('Task runner failed to start', { error });
process.exit(1);
});

View file

@ -0,0 +1,362 @@
import { ApplicationError, ensureError } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { URL } from 'node:url';
import { type MessageEvent, WebSocket } from 'ws';
import {
RPC_ALLOW_LIST,
type RunnerMessage,
type N8nMessage,
type TaskResultData,
} from './runner-types';
export interface Task<T = unknown> {
taskId: string;
settings?: T;
active: boolean;
cancelled: boolean;
}
export interface TaskOffer {
offerId: string;
validUntil: bigint;
}
interface DataRequest {
requestId: string;
resolve: (data: unknown) => void;
reject: (error: unknown) => void;
}
interface RPCCall {
callId: string;
resolve: (data: unknown) => void;
reject: (error: unknown) => void;
}
export interface RPCCallObject {
[name: string]: ((...args: unknown[]) => Promise<unknown>) | RPCCallObject;
}
const VALID_TIME_MS = 1000;
const VALID_EXTRA_MS = 100;
export abstract class TaskRunner {
id: string = nanoid();
ws: WebSocket;
canSendOffers = false;
runningTasks: Map<Task['taskId'], Task> = new Map();
offerInterval: NodeJS.Timeout | undefined;
openOffers: Map<TaskOffer['offerId'], TaskOffer> = new Map();
dataRequests: Map<DataRequest['requestId'], DataRequest> = new Map();
rpcCalls: Map<RPCCall['callId'], RPCCall> = new Map();
constructor(
public taskType: string,
wsUrl: string,
grantToken: string,
private maxConcurrency: number,
public name?: string,
) {
const url = new URL(wsUrl);
url.searchParams.append('id', this.id);
this.ws = new WebSocket(url.toString(), {
headers: {
authorization: `Bearer ${grantToken}`,
},
});
this.ws.addEventListener('message', this.receiveMessage);
this.ws.addEventListener('close', this.stopTaskOffers);
}
private receiveMessage = (message: MessageEvent) => {
// eslint-disable-next-line n8n-local-rules/no-uncaught-json-parse
const data = JSON.parse(message.data as string) as N8nMessage.ToRunner.All;
void this.onMessage(data);
};
private stopTaskOffers = () => {
this.canSendOffers = false;
if (this.offerInterval) {
clearInterval(this.offerInterval);
this.offerInterval = undefined;
}
};
private startTaskOffers() {
this.canSendOffers = true;
if (this.offerInterval) {
clearInterval(this.offerInterval);
}
this.offerInterval = setInterval(() => this.sendOffers(), 250);
}
deleteStaleOffers() {
this.openOffers.forEach((offer, key) => {
if (offer.validUntil < process.hrtime.bigint()) {
this.openOffers.delete(key);
}
});
}
sendOffers() {
this.deleteStaleOffers();
const offersToSend =
this.maxConcurrency -
(Object.values(this.openOffers).length + Object.values(this.runningTasks).length);
for (let i = 0; i < offersToSend; i++) {
const offer: TaskOffer = {
offerId: nanoid(),
validUntil: process.hrtime.bigint() + BigInt((VALID_TIME_MS + VALID_EXTRA_MS) * 1_000_000), // Adding a little extra time to account for latency
};
this.openOffers.set(offer.offerId, offer);
this.send({
type: 'runner:taskoffer',
taskType: this.taskType,
offerId: offer.offerId,
validFor: VALID_TIME_MS,
});
}
}
send(message: RunnerMessage.ToN8n.All) {
this.ws.send(JSON.stringify(message));
}
onMessage(message: N8nMessage.ToRunner.All) {
switch (message.type) {
case 'broker:inforequest':
this.send({
type: 'runner:info',
name: this.name ?? 'Node.js Task Runner SDK',
types: [this.taskType],
});
break;
case 'broker:runnerregistered':
this.startTaskOffers();
break;
case 'broker:taskofferaccept':
this.offerAccepted(message.offerId, message.taskId);
break;
case 'broker:taskcancel':
this.taskCancelled(message.taskId);
break;
case 'broker:tasksettings':
void this.receivedSettings(message.taskId, message.settings);
break;
case 'broker:taskdataresponse':
this.processDataResponse(message.requestId, message.data);
break;
case 'broker:rpcresponse':
this.handleRpcResponse(message.callId, message.status, message.data);
}
}
processDataResponse(requestId: string, data: unknown) {
const request = this.dataRequests.get(requestId);
if (!request) {
return;
}
// Deleting of the request is handled in `requestData`, using a
// `finally` wrapped around the return
request.resolve(data);
}
hasOpenTasks() {
return Object.values(this.runningTasks).length < this.maxConcurrency;
}
offerAccepted(offerId: string, taskId: string) {
if (!this.hasOpenTasks()) {
this.send({
type: 'runner:taskrejected',
taskId,
reason: 'No open task slots',
});
return;
}
const offer = this.openOffers.get(offerId);
if (!offer) {
this.send({
type: 'runner:taskrejected',
taskId,
reason: 'Offer expired and no open task slots',
});
return;
} else {
this.openOffers.delete(offerId);
}
this.runningTasks.set(taskId, {
taskId,
active: false,
cancelled: false,
});
this.send({
type: 'runner:taskaccepted',
taskId,
});
}
taskCancelled(taskId: string) {
const task = this.runningTasks.get(taskId);
if (!task) {
return;
}
task.cancelled = true;
if (task.active) {
// TODO
} else {
this.runningTasks.delete(taskId);
}
this.sendOffers();
}
taskErrored(taskId: string, error: unknown) {
this.send({
type: 'runner:taskerror',
taskId,
error,
});
this.runningTasks.delete(taskId);
this.sendOffers();
}
taskDone(taskId: string, data: RunnerMessage.ToN8n.TaskDone['data']) {
this.send({
type: 'runner:taskdone',
taskId,
data,
});
this.runningTasks.delete(taskId);
this.sendOffers();
}
async receivedSettings(taskId: string, settings: unknown) {
const task = this.runningTasks.get(taskId);
if (!task) {
return;
}
if (task.cancelled) {
this.runningTasks.delete(taskId);
return;
}
task.settings = settings;
task.active = true;
try {
const data = await this.executeTask(task);
this.taskDone(taskId, data);
} catch (e) {
if (ensureError(e)) {
this.taskErrored(taskId, (e as Error).message);
} else {
this.taskErrored(taskId, e);
}
}
}
// eslint-disable-next-line @typescript-eslint/naming-convention
async executeTask(_task: Task): Promise<TaskResultData> {
throw new ApplicationError('Unimplemented');
}
async requestData<T = unknown>(
taskId: Task['taskId'],
type: RunnerMessage.ToN8n.TaskDataRequest['requestType'],
param?: string,
): Promise<T> {
const requestId = nanoid();
const p = new Promise<T>((resolve, reject) => {
this.dataRequests.set(requestId, {
requestId,
resolve: resolve as (data: unknown) => void,
reject,
});
});
this.send({
type: 'runner:taskdatarequest',
taskId,
requestId,
requestType: type,
param,
});
try {
return await p;
} finally {
this.dataRequests.delete(requestId);
}
}
async makeRpcCall(taskId: string, name: RunnerMessage.ToN8n.RPC['name'], params: unknown[]) {
const callId = nanoid();
const dataPromise = new Promise((resolve, reject) => {
this.rpcCalls.set(callId, {
callId,
resolve,
reject,
});
});
this.send({
type: 'runner:rpc',
callId,
taskId,
name,
params,
});
try {
return await dataPromise;
} finally {
this.rpcCalls.delete(callId);
}
}
handleRpcResponse(
callId: string,
status: N8nMessage.ToRunner.RPCResponse['status'],
data: unknown,
) {
const call = this.rpcCalls.get(callId);
if (!call) {
return;
}
if (status === 'success') {
call.resolve(data);
} else {
call.reject(typeof data === 'string' ? new Error(data) : data);
}
}
buildRpcCallObject(taskId: string) {
const rpcObject: RPCCallObject = {};
for (const r of RPC_ALLOW_LIST) {
const splitPath = r.split('.');
let obj = rpcObject;
splitPath.forEach((s, index) => {
if (index !== splitPath.length - 1) {
obj[s] = {};
obj = obj[s];
return;
}
obj[s] = async (...args: unknown[]) => await this.makeRpcCall(taskId, r, args);
});
}
return rpcObject;
}
}

View file

@ -0,0 +1,11 @@
{
"extends": ["./tsconfig.json", "../../../tsconfig.build.json"],
"compilerOptions": {
"composite": true,
"rootDir": "src",
"outDir": "dist",
"tsBuildInfoFile": "dist/build.tsbuildinfo"
},
"include": ["src/**/*.ts"],
"exclude": ["src/**/__tests__/**"]
}

View file

@ -0,0 +1,12 @@
{
"extends": ["../../../tsconfig.json", "../../../tsconfig.backend.json"],
"compilerOptions": {
"rootDir": ".",
"baseUrl": "src",
"paths": {
"@/*": ["./*"]
},
"tsBuildInfoFile": "dist/typecheck.tsbuildinfo"
},
"include": ["src/**/*.ts"]
}

View file

@ -2,6 +2,26 @@
This list shows all the versions which include breaking changes and how to upgrade.
# 1.63.0
### What changed?
The worker server used to bind to IPv6 by default. It now binds to IPv4 by default.
### When is action necessary?
If you experience a port conflict error when starting a worker server using its default port, set a different port for the worker server with `QUEUE_HEALTH_CHECK_PORT`.
## 1.57.0
### What changed?
The `verbose` log level was merged into the `debug` log level.
### When is action necessary?
If you are setting the env var `N8N_LOG_LEVEL=verbose`, please update your log level to `N8N_LOG_LEVEL=debug`.
## 1.55.0
### What changed?

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "1.61.0",
"version": "1.62.1",
"description": "n8n Workflow Automation Tool",
"main": "dist/index",
"types": "dist/index.d.ts",
@ -92,7 +92,8 @@
"@n8n/localtunnel": "3.0.0",
"@n8n/n8n-nodes-langchain": "workspace:*",
"@n8n/permissions": "workspace:*",
"@n8n/typeorm": "0.3.20-10",
"@n8n/task-runner": "workspace:*",
"@n8n/typeorm": "0.3.20-12",
"@n8n_io/ai-assistant-sdk": "1.9.4",
"@n8n_io/license-sdk": "2.13.1",
"@oclif/core": "4.0.7",
@ -170,7 +171,7 @@
"typedi": "catalog:",
"uuid": "catalog:",
"validator": "13.7.0",
"winston": "3.8.2",
"winston": "3.14.2",
"ws": "8.17.1",
"xml2js": "catalog:",
"xmllint-wasm": "3.0.1",

View file

@ -5,7 +5,7 @@ import type { InstanceSettings } from 'n8n-core';
import config from '@/config';
import { N8N_VERSION } from '@/constants';
import { License } from '@/license';
import type { Logger } from '@/logger';
import type { Logger } from '@/logging/logger.service';
jest.mock('@n8n_io/license-sdk');

View file

@ -1,21 +1,80 @@
import { mock } from 'jest-mock-extended';
import type {
IExecuteWorkflowInfo,
IWorkflowExecuteAdditionalData,
ExecuteWorkflowOptions,
IRun,
} from 'n8n-workflow';
import type PCancelable from 'p-cancelable';
import Container from 'typedi';
import { ActiveExecutions } from '@/active-executions';
import { CredentialsHelper } from '@/credentials-helper';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { VariablesService } from '@/environments/variables/variables.service.ee';
import { EventService } from '@/events/event.service';
import { ExternalHooks } from '@/external-hooks';
import { SecretsHelper } from '@/secrets-helpers';
import { getBase } from '@/workflow-execute-additional-data';
import { WorkflowStatisticsService } from '@/services/workflow-statistics.service';
import { SubworkflowPolicyChecker } from '@/subworkflows/subworkflow-policy-checker.service';
import { Telemetry } from '@/telemetry';
import { PermissionChecker } from '@/user-management/permission-checker';
import { executeWorkflow, getBase } from '@/workflow-execute-additional-data';
import { mockInstance } from '@test/mocking';
const run = mock<IRun>({
data: { resultData: {} },
finished: true,
mode: 'manual',
startedAt: new Date(),
status: 'new',
});
const cancelablePromise = mock<PCancelable<IRun>>({
then: jest
.fn()
.mockImplementation(async (onfulfilled) => await Promise.resolve(run).then(onfulfilled)),
catch: jest
.fn()
.mockImplementation(async (onrejected) => await Promise.resolve(run).catch(onrejected)),
finally: jest
.fn()
.mockImplementation(async (onfinally) => await Promise.resolve(run).finally(onfinally)),
[Symbol.toStringTag]: 'PCancelable',
});
jest.mock('n8n-core', () => ({
__esModule: true,
...jest.requireActual('n8n-core'),
WorkflowExecute: jest.fn().mockImplementation(() => ({
processRunExecutionData: jest.fn().mockReturnValue(cancelablePromise),
})),
}));
jest.mock('../workflow-helpers', () => ({
...jest.requireActual('../workflow-helpers'),
getDataLastExecutedNodeData: jest.fn().mockReturnValue({ data: { main: [] } }),
}));
describe('WorkflowExecuteAdditionalData', () => {
const variablesService = mockInstance(VariablesService);
variablesService.getAllCached.mockResolvedValue([]);
const credentialsHelper = mockInstance(CredentialsHelper);
const secretsHelper = mockInstance(SecretsHelper);
const eventService = mockInstance(EventService);
mockInstance(ExternalHooks);
Container.set(VariablesService, variablesService);
Container.set(CredentialsHelper, credentialsHelper);
Container.set(SecretsHelper, secretsHelper);
const executionRepository = mockInstance(ExecutionRepository);
mockInstance(Telemetry);
const workflowRepository = mockInstance(WorkflowRepository);
const activeExecutions = mockInstance(ActiveExecutions);
mockInstance(PermissionChecker);
mockInstance(SubworkflowPolicyChecker);
mockInstance(WorkflowStatisticsService);
test('logAiEvent should call MessageEventBus', async () => {
const additionalData = await getBase('user-id');
@ -35,4 +94,18 @@ describe('WorkflowExecuteAdditionalData', () => {
expect(eventService.emit).toHaveBeenCalledTimes(1);
expect(eventService.emit).toHaveBeenCalledWith(eventName, payload);
});
it('`executeWorkflow` should set subworkflow execution as running', async () => {
const executionId = '123';
workflowRepository.get.mockResolvedValue(mock<WorkflowEntity>({ id: executionId, nodes: [] }));
activeExecutions.add.mockResolvedValue(executionId);
await executeWorkflow(
mock<IExecuteWorkflowInfo>(),
mock<IWorkflowExecuteAdditionalData>(),
mock<ExecuteWorkflowOptions>({ loadedWorkflowData: undefined }),
);
expect(executionRepository.setRunning).toHaveBeenCalledWith(executionId);
});
});

View file

@ -13,7 +13,7 @@ import { N8N_VERSION, TEMPLATES_DIR, inDevelopment, inTest } from '@/constants';
import * as Db from '@/db';
import { OnShutdown } from '@/decorators/on-shutdown';
import { ExternalHooks } from '@/external-hooks';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { rawBodyReader, bodyParser, corsMiddleware } from '@/middlewares';
import { send, sendErrorResponse } from '@/response-helper';
import { WaitingForms } from '@/waiting-forms';

View file

@ -13,12 +13,12 @@ import { Service } from 'typedi';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { ExecutionNotFoundError } from '@/errors/execution-not-found-error';
import type {
ExecutionPayload,
CreateExecutionPayload,
IExecutingWorkflowData,
IExecutionDb,
IExecutionsCurrentSummary,
} from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { isWorkflowIdValid } from '@/utils';
import { ConcurrencyControlService } from './concurrency/concurrency-control.service';
@ -52,11 +52,10 @@ export class ActiveExecutions {
if (executionId === undefined) {
// Is a new execution so save in DB
const fullExecutionData: ExecutionPayload = {
const fullExecutionData: CreateExecutionPayload = {
data: executionData.executionData!,
mode,
finished: false,
startedAt: new Date(),
workflowData: executionData.workflowData,
status: executionStatus,
workflowId: executionData.workflowData.id,
@ -74,7 +73,10 @@ export class ActiveExecutions {
executionId = await this.executionRepository.createNewExecution(fullExecutionData);
assert(executionId);
await this.concurrencyControl.throttle({ mode, executionId });
if (config.getEnv('executions.mode') === 'regular') {
await this.concurrencyControl.throttle({ mode, executionId });
await this.executionRepository.setRunning(executionId);
}
executionStatus = 'running';
} else {
// Is an existing execution we want to finish so update in DB
@ -86,6 +88,7 @@ export class ActiveExecutions {
data: executionData.executionData!,
waitTill: null,
status: executionStatus,
// this is resuming, so keep `startedAt` as it was
};
await this.executionRepository.updateExistingExecution(executionId, execution);

View file

@ -37,6 +37,7 @@ import { WorkflowRepository } from '@/databases/repositories/workflow.repository
import { OnShutdown } from '@/decorators/on-shutdown';
import { ExternalHooks } from '@/external-hooks';
import type { IWorkflowDb } from '@/interfaces';
import { Logger } from '@/logging/logger.service';
import { NodeTypes } from '@/node-types';
import { ActiveWorkflowsService } from '@/services/active-workflows.service';
import { OrchestrationService } from '@/services/orchestration.service';
@ -47,7 +48,6 @@ import { WorkflowExecutionService } from '@/workflows/workflow-execution.service
import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service';
import { ExecutionService } from './executions/execution.service';
import { Logger } from './logger';
interface QueuedActivation {
activationMode: WorkflowActivateMode;

View file

@ -12,7 +12,7 @@ import { UserRepository } from '@/databases/repositories/user.repository';
import { AuthError } from '@/errors/response-errors/auth.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { License } from '@/license';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import type { AuthenticatedRequest } from '@/requests';
import { JwtService } from '@/services/jwt.service';
import { UrlService } from '@/services/url.service';

View file

@ -13,13 +13,13 @@ import { generateHostInstanceId } from '@/databases/utils/generators';
import * as Db from '@/db';
import { initErrorHandling } from '@/error-reporting';
import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus';
import { TelemetryEventRelay } from '@/events/telemetry-event-relay';
import { TelemetryEventRelay } from '@/events/relays/telemetry.event-relay';
import { initExpressionEvaluator } from '@/expression-evaluator';
import { ExternalHooks } from '@/external-hooks';
import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee';
import { License } from '@/license';
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { NodeTypes } from '@/node-types';
import { PostHogClient } from '@/posthog';
import { ShutdownService } from '@/shutdown/shutdown.service';

View file

@ -4,7 +4,7 @@ import { mock } from 'jest-mock-extended';
import { main } from '@/commands/db/revert';
import type { IrreversibleMigration, ReversibleMigration } from '@/databases/types';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { mockInstance } from '@test/mocking';
const logger = mockInstance(Logger);

View file

@ -8,7 +8,7 @@ import { Container } from 'typedi';
import { getConnectionOptions } from '@/databases/config';
import type { Migration } from '@/databases/types';
import { wrapMigration } from '@/databases/utils/migration-helpers';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
// This function is extracted to make it easier to unit test it.
// Mocking turned into a mess due to this command using typeorm and the db

View file

@ -21,6 +21,8 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'
import { EventService } from '@/events/event.service';
import { ExecutionService } from '@/executions/execution.service';
import { License } from '@/license';
import { SingleMainTaskManager } from '@/runners/task-managers/single-main-task-manager';
import { TaskManager } from '@/runners/task-managers/task-manager';
import { Publisher } from '@/scaling/pubsub/publisher.service';
import { Server } from '@/server';
import { OrchestrationHandlerMainService } from '@/services/orchestration/main/orchestration.handler.main.service';
@ -220,6 +222,17 @@ export class Start extends BaseCommand {
if (!this.globalConfig.endpoints.disableUi) {
await this.generateStaticAssets();
}
if (!this.globalConfig.taskRunners.disabled) {
Container.set(TaskManager, new SingleMainTaskManager());
const { TaskRunnerServer } = await import('@/runners/task-runner-server');
const taskRunnerServer = Container.get(TaskRunnerServer);
await taskRunnerServer.start();
const { TaskRunnerProcess } = await import('@/runners/task-runner-process');
const runnerProcess = Container.get(TaskRunnerProcess);
await runnerProcess.start();
}
}
async initOrchestration() {
@ -365,10 +378,9 @@ export class Start extends BaseCommand {
if (executions.length === 0) return;
this.logger.debug(
'[Startup] Found enqueued executions to run',
executions.map((e) => e.id),
);
this.logger.debug('[Startup] Found enqueued executions to run', {
executionIds: executions.map((e) => e.id),
});
const ownershipService = Container.get(OwnershipService);
const workflowRunner = Container.get(WorkflowRunner);

View file

@ -4,7 +4,8 @@ import { Container } from 'typedi';
import { ActiveExecutions } from '@/active-executions';
import config from '@/config';
import { OrchestrationHandlerWebhookService } from '@/services/orchestration/webhook/orchestration.handler.webhook.service';
import { PubSubHandler } from '@/scaling/pubsub/pubsub-handler';
import { Subscriber } from '@/scaling/pubsub/subscriber.service';
import { OrchestrationWebhookService } from '@/services/orchestration/webhook/orchestration.webhook.service';
import { WebhookServer } from '@/webhooks/webhook-server';
@ -110,6 +111,11 @@ export class Webhook extends BaseCommand {
async initOrchestration() {
await Container.get(OrchestrationWebhookService).init();
await Container.get(OrchestrationHandlerWebhookService).init();
const subscriber = Container.get(Subscriber);
await subscriber.subscribe('n8n.commands');
subscriber.setCommandMessageHandler();
Container.get(PubSubHandler).init();
}
}

View file

@ -6,7 +6,7 @@ import config from '@/config';
import { N8N_VERSION, inTest } from '@/constants';
import { EventMessageGeneric } from '@/eventbus/event-message-classes/event-message-generic';
import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus';
import { LogStreamingEventRelay } from '@/events/log-streaming-event-relay';
import { LogStreamingEventRelay } from '@/events/relays/log-streaming.event-relay';
import { JobProcessor } from '@/scaling/job-processor';
import { Publisher } from '@/scaling/pubsub/publisher.service';
import type { ScalingService } from '@/scaling/scaling.service';

View file

@ -11,7 +11,7 @@ import type { ExecutionRepository } from '@/databases/repositories/execution.rep
import { InvalidConcurrencyLimitError } from '@/errors/invalid-concurrency-limit.error';
import type { EventService } from '@/events/event.service';
import type { IExecutingWorkflowData } from '@/interfaces';
import type { Logger } from '@/logger';
import type { Logger } from '@/logging/logger.service';
import type { Telemetry } from '@/telemetry';
import { ConcurrencyQueue } from '../concurrency-queue';

View file

@ -7,7 +7,8 @@ import { InvalidConcurrencyLimitError } from '@/errors/invalid-concurrency-limit
import { UnknownExecutionModeError } from '@/errors/unknown-execution-mode.error';
import { EventService } from '@/events/event.service';
import type { IExecutingWorkflowData } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import type { LogMetadata } from '@/logging/types';
import { Telemetry } from '@/telemetry';
import { ConcurrencyQueue } from './concurrency-queue';
@ -70,7 +71,6 @@ export class ConcurrencyControlService {
this.productionQueue.on('execution-released', async (executionId) => {
this.log('Execution released', { executionId });
await this.executionRepository.resetStartedAt(executionId);
});
}
@ -171,8 +171,8 @@ export class ConcurrencyControlService {
throw new UnknownExecutionModeError(mode);
}
private log(message: string, meta?: object) {
this.logger.debug(['[Concurrency Control]', message].join(' '), meta);
private log(message: string, metadata?: LogMetadata) {
this.logger.debug(['[Concurrency Control]', message].join(' '), metadata);
}
private shouldReport(capacity: number) {

View file

@ -1,7 +1,6 @@
import { GlobalConfig } from '@n8n/config';
import convict from 'convict';
import { InstanceSettings } from 'n8n-core';
import { LOG_LEVELS } from 'n8n-workflow';
import path from 'path';
import { Container } from 'typedi';
@ -296,41 +295,6 @@ export const schema = {
env: 'EXTERNAL_HOOK_FILES',
},
logs: {
level: {
doc: 'Log output level',
format: LOG_LEVELS,
default: 'info',
env: 'N8N_LOG_LEVEL',
},
output: {
doc: 'Where to output logs. Options are: console, file. Multiple can be separated by comma (",")',
format: String,
default: 'console',
env: 'N8N_LOG_OUTPUT',
},
file: {
fileCountMax: {
doc: 'Maximum number of files to keep.',
format: Number,
default: 100,
env: 'N8N_LOG_FILE_COUNT_MAX',
},
fileSizeMax: {
doc: 'Maximum size for each log file in MB.',
format: Number,
default: 16,
env: 'N8N_LOG_FILE_SIZE_MAX',
},
location: {
doc: 'Log file location; only used if log output is set to file.',
format: String,
default: path.join(Container.get(InstanceSettings).n8nFolder, 'logs/n8n.log'),
env: 'N8N_LOG_FILE_LOCATION',
},
},
},
push: {
backend: {
format: ['sse', 'websocket'] as const,

View file

@ -168,6 +168,8 @@ export const ARTIFICIAL_TASK_DATA = {
],
};
/** Lowest priority, meaning shut down happens after other groups */
export const LOWEST_SHUTDOWN_PRIORITY = 0;
export const DEFAULT_SHUTDOWN_PRIORITY = 100;
/** Highest priority, meaning shut down happens before all other groups */
export const HIGHEST_SHUTDOWN_PRIORITY = 200;

View file

@ -0,0 +1,79 @@
import { mock } from 'jest-mock-extended';
import { randomString } from 'n8n-workflow';
import { Container } from 'typedi';
import type { ApiKey } from '@/databases/entities/api-key';
import type { User } from '@/databases/entities/user';
import { ApiKeyRepository } from '@/databases/repositories/api-key.repository';
import type { ApiKeysRequest, AuthenticatedRequest } from '@/requests';
import { API_KEY_PREFIX } from '@/services/public-api-key.service';
import { mockInstance } from '@test/mocking';
import { ApiKeysController } from '../api-keys.controller';
describe('ApiKeysController', () => {
const apiKeysRepository = mockInstance(ApiKeyRepository);
const controller = Container.get(ApiKeysController);
let req: AuthenticatedRequest;
beforeAll(() => {
req = mock<AuthenticatedRequest>({ user: mock<User>({ id: '123' }) });
});
describe('createAPIKey', () => {
it('should create and save an API key', async () => {
const apiKeyData = {
id: '123',
userId: '123',
label: 'My API Key',
apiKey: `${API_KEY_PREFIX}${randomString(42)}`,
createdAt: new Date(),
} as ApiKey;
apiKeysRepository.upsert.mockImplementation();
apiKeysRepository.findOneByOrFail.mockResolvedValue(apiKeyData);
const newApiKey = await controller.createAPIKey(req);
expect(apiKeysRepository.upsert).toHaveBeenCalled();
expect(apiKeyData).toEqual(newApiKey);
});
});
describe('getAPIKeys', () => {
it('should return the users api keys redacted', async () => {
const apiKeyData = {
id: '123',
userId: '123',
label: 'My API Key',
apiKey: `${API_KEY_PREFIX}${randomString(42)}`,
createdAt: new Date(),
} as ApiKey;
apiKeysRepository.findBy.mockResolvedValue([apiKeyData]);
const apiKeys = await controller.getAPIKeys(req);
expect(apiKeys[0].apiKey).not.toEqual(apiKeyData.apiKey);
expect(apiKeysRepository.findBy).toHaveBeenCalledWith({ userId: req.user.id });
});
});
describe('deleteAPIKey', () => {
it('should delete the API key', async () => {
const user = mock<User>({
id: '123',
password: 'password',
authIdentities: [],
role: 'global:member',
mfaEnabled: false,
});
const req = mock<ApiKeysRequest.DeleteAPIKey>({ user, params: { id: user.id } });
await controller.deleteAPIKey(req);
expect(apiKeysRepository.delete).toHaveBeenCalledWith({
userId: req.user.id,
id: req.params.id,
});
});
});
});

View file

@ -2,14 +2,11 @@ import { UserUpdateRequestDto } from '@n8n/api-types';
import type { Response } from 'express';
import { mock, anyObject } from 'jest-mock-extended';
import jwt from 'jsonwebtoken';
import { randomString } from 'n8n-workflow';
import { Container } from 'typedi';
import { AUTH_COOKIE_NAME } from '@/constants';
import { MeController } from '@/controllers/me.controller';
import type { ApiKey } from '@/databases/entities/api-key';
import type { User } from '@/databases/entities/user';
import { ApiKeyRepository } from '@/databases/repositories/api-key.repository';
import { AuthUserRepository } from '@/databases/repositories/auth-user.repository';
import { InvalidAuthTokenRepository } from '@/databases/repositories/invalid-auth-token.repository';
import { UserRepository } from '@/databases/repositories/user.repository';
@ -21,7 +18,6 @@ import type { PublicUser } from '@/interfaces';
import { License } from '@/license';
import { MfaService } from '@/mfa/mfa.service';
import type { AuthenticatedRequest, MeRequest } from '@/requests';
import { API_KEY_PREFIX } from '@/services/public-api-key.service';
import { UserService } from '@/services/user.service';
import { mockInstance } from '@test/mocking';
import { badPasswords } from '@test/test-data';
@ -34,7 +30,6 @@ describe('MeController', () => {
const userService = mockInstance(UserService);
const userRepository = mockInstance(UserRepository);
const mockMfaService = mockInstance(MfaService);
const apiKeysRepository = mockInstance(ApiKeyRepository);
mockInstance(AuthUserRepository);
mockInstance(InvalidAuthTokenRepository);
mockInstance(License).isWithinUsersLimit.mockReturnValue(true);
@ -413,68 +408,4 @@ describe('MeController', () => {
await expect(controller.storeSurveyAnswers(req)).rejects.toThrowError(BadRequestError);
});
});
describe('API Key methods', () => {
let req: AuthenticatedRequest;
beforeAll(() => {
req = mock<AuthenticatedRequest>({ user: mock<User>({ id: '123' }) });
});
describe('createAPIKey', () => {
it('should create and save an API key', async () => {
const apiKeyData = {
id: '123',
userId: '123',
label: 'My API Key',
apiKey: `${API_KEY_PREFIX}${randomString(42)}`,
createdAt: new Date(),
} as ApiKey;
apiKeysRepository.upsert.mockImplementation();
apiKeysRepository.findOneByOrFail.mockResolvedValue(apiKeyData);
const newApiKey = await controller.createAPIKey(req);
expect(apiKeysRepository.upsert).toHaveBeenCalled();
expect(apiKeyData).toEqual(newApiKey);
});
});
describe('getAPIKeys', () => {
it('should return the users api keys redacted', async () => {
const apiKeyData = {
id: '123',
userId: '123',
label: 'My API Key',
apiKey: `${API_KEY_PREFIX}${randomString(42)}`,
createdAt: new Date(),
} as ApiKey;
apiKeysRepository.findBy.mockResolvedValue([apiKeyData]);
const apiKeys = await controller.getAPIKeys(req);
expect(apiKeys[0].apiKey).not.toEqual(apiKeyData.apiKey);
expect(apiKeysRepository.findBy).toHaveBeenCalledWith({ userId: req.user.id });
});
});
describe('deleteAPIKey', () => {
it('should delete the API key', async () => {
const user = mock<User>({
id: '123',
password: 'password',
authIdentities: [],
role: 'global:member',
mfaEnabled: false,
});
const req = mock<MeRequest.DeleteAPIKey>({ user, params: { id: user.id } });
await controller.deleteAPIKey(req);
expect(apiKeysRepository.delete).toHaveBeenCalledWith({
userId: req.user.id,
id: req.params.id,
});
});
});
});
});

View file

@ -0,0 +1,56 @@
import { type RequestHandler } from 'express';
import { Delete, Get, Post, RestController } from '@/decorators';
import { EventService } from '@/events/event.service';
import { isApiEnabled } from '@/public-api';
import { ApiKeysRequest, AuthenticatedRequest } from '@/requests';
import { PublicApiKeyService } from '@/services/public-api-key.service';
export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => {
if (isApiEnabled()) {
next();
} else {
res.status(404).end();
}
};
@RestController('/api-keys')
export class ApiKeysController {
constructor(
private readonly eventService: EventService,
private readonly publicApiKeyService: PublicApiKeyService,
) {}
/**
* Create an API Key
*/
@Post('/', { middlewares: [isApiEnabledMiddleware] })
async createAPIKey(req: AuthenticatedRequest) {
const newApiKey = await this.publicApiKeyService.createPublicApiKeyForUser(req.user);
this.eventService.emit('public-api-key-created', { user: req.user, publicApi: false });
return newApiKey;
}
/**
* Get API keys
*/
@Get('/', { middlewares: [isApiEnabledMiddleware] })
async getAPIKeys(req: AuthenticatedRequest) {
const apiKeys = await this.publicApiKeyService.getRedactedApiKeysForUser(req.user);
return apiKeys;
}
/**
* Delete an API Key
*/
@Delete('/:id', { middlewares: [isApiEnabledMiddleware] })
async deleteAPIKey(req: ApiKeysRequest.DeleteAPIKey) {
await this.publicApiKeyService.deleteApiKeyForUser(req.user, req.params.id);
this.eventService.emit('public-api-key-deleted', { user: req.user, publicApi: false });
return { success: true };
}
}

View file

@ -14,7 +14,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { EventService } from '@/events/event.service';
import type { PublicUser } from '@/interfaces';
import { License } from '@/license';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { MfaService } from '@/mfa/mfa.service';
import { PostHogClient } from '@/posthog';
import { AuthenticatedRequest, LoginRequest, UserRequest } from '@/requests';

View file

@ -14,7 +14,7 @@ import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'
import type { BooleanLicenseFeature, NumericLicenseFeature } from '@/interfaces';
import type { FeatureReturnType } from '@/license';
import { License } from '@/license';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { MfaService } from '@/mfa/mfa.service';
import { Push } from '@/push';
import type { UserSetupPayload } from '@/requests';

View file

@ -12,7 +12,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { EventService } from '@/events/event.service';
import { ExternalHooks } from '@/external-hooks';
import { License } from '@/license';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { PostHogClient } from '@/posthog';
import { UserRequest } from '@/requests';
import { PasswordUtility } from '@/services/password.utility';

View file

@ -4,37 +4,26 @@ import {
UserUpdateRequestDto,
} from '@n8n/api-types';
import { plainToInstance } from 'class-transformer';
import { type RequestHandler, Response } from 'express';
import { Response } from 'express';
import { AuthService } from '@/auth/auth.service';
import type { User } from '@/databases/entities/user';
import { UserRepository } from '@/databases/repositories/user.repository';
import { Body, Delete, Get, Patch, Post, RestController } from '@/decorators';
import { Body, Patch, Post, RestController } from '@/decorators';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { InvalidMfaCodeError } from '@/errors/response-errors/invalid-mfa-code.error';
import { EventService } from '@/events/event.service';
import { ExternalHooks } from '@/external-hooks';
import { validateEntity } from '@/generic-helpers';
import type { PublicUser } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { MfaService } from '@/mfa/mfa.service';
import { isApiEnabled } from '@/public-api';
import { AuthenticatedRequest, MeRequest } from '@/requests';
import { PasswordUtility } from '@/services/password.utility';
import { PublicApiKeyService } from '@/services/public-api-key.service';
import { UserService } from '@/services/user.service';
import { isSamlLicensedAndEnabled } from '@/sso/saml/saml-helpers';
import { PersonalizationSurveyAnswersV4 } from './survey-answers.dto';
export const isApiEnabledMiddleware: RequestHandler = (_, res, next) => {
if (isApiEnabled()) {
next();
} else {
res.status(404).end();
}
};
@RestController('/me')
export class MeController {
constructor(
@ -46,7 +35,6 @@ export class MeController {
private readonly userRepository: UserRepository,
private readonly eventService: EventService,
private readonly mfaService: MfaService,
private readonly publicApiKeyService: PublicApiKeyService,
) {}
/**
@ -217,39 +205,6 @@ export class MeController {
return { success: true };
}
/**
* Create an API Key
*/
@Post('/api-keys', { middlewares: [isApiEnabledMiddleware] })
async createAPIKey(req: AuthenticatedRequest) {
const newApiKey = await this.publicApiKeyService.createPublicApiKeyForUser(req.user);
this.eventService.emit('public-api-key-created', { user: req.user, publicApi: false });
return newApiKey;
}
/**
* Get API keys
*/
@Get('/api-keys', { middlewares: [isApiEnabledMiddleware] })
async getAPIKeys(req: AuthenticatedRequest) {
const apiKeys = await this.publicApiKeyService.getRedactedApiKeysForUser(req.user);
return apiKeys;
}
/**
* Delete an API Key
*/
@Delete('/api-keys/:id', { middlewares: [isApiEnabledMiddleware] })
async deleteAPIKey(req: MeRequest.DeleteAPIKey) {
await this.publicApiKeyService.deleteApiKeyForUser(req.user, req.params.id);
this.eventService.emit('public-api-key-deleted', { user: req.user, publicApi: false });
return { success: true };
}
/**
* Update the logged-in user's settings.
*/

View file

@ -15,7 +15,7 @@ import { VariablesService } from '@/environments/variables/variables.service.ee'
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { ExternalHooks } from '@/external-hooks';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import type { OAuthRequest } from '@/requests';
import { SecretsHelper } from '@/secrets-helpers';
import { mockInstance } from '@test/mocking';

View file

@ -15,7 +15,7 @@ import { VariablesService } from '@/environments/variables/variables.service.ee'
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { ExternalHooks } from '@/external-hooks';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import type { OAuthRequest } from '@/requests';
import { SecretsHelper } from '@/secrets-helpers';
import { mockInstance } from '@test/mocking';

View file

@ -15,7 +15,7 @@ import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { ExternalHooks } from '@/external-hooks';
import type { ICredentialsDb } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import type { OAuthRequest } from '@/requests';
import { UrlService } from '@/services/url.service';
import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data';

View file

@ -9,7 +9,7 @@ import { GlobalScope, Post, RestController } from '@/decorators';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { EventService } from '@/events/event.service';
import { validateEntity } from '@/generic-helpers';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { PostHogClient } from '@/posthog';
import { OwnerRequest } from '@/requests';
import { PasswordUtility } from '@/services/password.utility';

View file

@ -13,7 +13,7 @@ import { UnprocessableRequestError } from '@/errors/response-errors/unprocessabl
import { EventService } from '@/events/event.service';
import { ExternalHooks } from '@/external-hooks';
import { License } from '@/license';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { MfaService } from '@/mfa/mfa.service';
import { PasswordResetRequest } from '@/requests';
import { PasswordUtility } from '@/services/password.utility';

View file

@ -18,7 +18,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { EventService } from '@/events/event.service';
import { ExternalHooks } from '@/external-hooks';
import type { PublicUser } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { listQueryMiddleware } from '@/middlewares';
import { AuthenticatedRequest, ListQuery, UserRequest } from '@/requests';
import { ProjectService } from '@/services/project.service';

View file

@ -7,7 +7,7 @@ import { WorkflowStatisticsRepository } from '@/databases/repositories/workflow-
import { Get, Middleware, RestController } from '@/decorators';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import type { IWorkflowStatisticsDataLoaded } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { StatisticsRequest } from './workflow-statistics.types';

View file

@ -6,7 +6,7 @@ import { join, dirname } from 'path';
import { Container } from 'typedi';
import { inProduction } from '@/constants';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
export const touchFile = async (filePath: string): Promise<void> => {
await mkdir(dirname(filePath), { recursive: true });

View file

@ -5,7 +5,7 @@ import { Service } from 'typedi';
import { CredentialTypes } from '@/credential-types';
import type { ICredentialsOverwrite } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
@Service()
export class CredentialsOverwrites {

View file

@ -23,7 +23,7 @@ import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { EventService } from '@/events/event.service';
import { License } from '@/license';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { listQueryMiddleware } from '@/middlewares';
import { CredentialRequest } from '@/requests';
import { NamingService } from '@/services/naming.service';

View file

@ -157,14 +157,6 @@ export class EnterpriseCredentialsService {
"You can't transfer a credential into the project that's already owning it.",
);
}
if (sourceProject.type !== 'team' && sourceProject.type !== 'personal') {
throw new TransferCredentialError(
'You can only transfer credentials out of personal or team projects.',
);
}
if (destinationProject.type !== 'team') {
throw new TransferCredentialError('You can only transfer credentials into team projects.');
}
await this.sharedCredentialsRepository.manager.transaction(async (trx) => {
// 6. transfer the credential

View file

@ -33,7 +33,7 @@ import { NotFoundError } from '@/errors/response-errors/not-found.error';
import { ExternalHooks } from '@/external-hooks';
import { validateEntity } from '@/generic-helpers';
import type { ICredentialsDb } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { userHasScopes } from '@/permissions/check-access';
import type { CredentialRequest, ListQuery } from '@/requests';
import { CredentialsTester } from '@/services/credentials-tester.service';

View file

@ -47,7 +47,14 @@ export class ExecutionEntity {
status: ExecutionStatus;
@Column(datetimeColumnType)
startedAt: Date;
createdAt: Date;
/**
* Time when the processing of the execution actually started. This column
* is `null` when an execution is enqueued but has not started yet.
*/
@Column({ type: datetimeColumnType, nullable: true })
startedAt: Date | null;
@Index()
@Column({ type: datetimeColumnType, nullable: true })

View file

@ -0,0 +1,27 @@
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
export class SeparateExecutionCreationFromStart1727427440136 implements ReversibleMigration {
async up({
schemaBuilder: { addColumns, column, dropNotNull },
runQuery,
escape,
}: MigrationContext) {
await addColumns('execution_entity', [
column('createdAt').notNull.timestamp().default('NOW()'),
]);
await dropNotNull('execution_entity', 'startedAt');
const executionEntity = escape.tableName('execution_entity');
const createdAt = escape.columnName('createdAt');
const startedAt = escape.columnName('startedAt');
// inaccurate for pre-migration rows but prevents `createdAt` from being nullable
await runQuery(`UPDATE ${executionEntity} SET ${createdAt} = ${startedAt};`);
}
async down({ schemaBuilder: { dropColumns, addNotNull } }: MigrationContext) {
await dropColumns('execution_entity', ['createdAt']);
await addNotNull('execution_entity', 'startedAt');
}
}

View file

@ -64,6 +64,7 @@ import { CreateInvalidAuthTokenTable1723627610222 } from '../common/172362761022
import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices';
import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables';
import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable';
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
export const mysqlMigrations: Migration[] = [
InitialMigration1588157391238,
@ -130,4 +131,5 @@ export const mysqlMigrations: Migration[] = [
RefactorExecutionIndices1723796243146,
CreateAnnotationTables1724753530828,
AddApiKeysTable1724951148974,
SeparateExecutionCreationFromStart1727427440136,
];

View file

@ -64,6 +64,7 @@ import { CreateInvalidAuthTokenTable1723627610222 } from '../common/172362761022
import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices';
import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables';
import { AddApiKeysTable1724951148974 } from '../common/1724951148974-AddApiKeysTable';
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
export const postgresMigrations: Migration[] = [
InitialMigration1587669153312,
@ -130,4 +131,5 @@ export const postgresMigrations: Migration[] = [
RefactorExecutionIndices1723796243146,
CreateAnnotationTables1724753530828,
AddApiKeysTable1724951148974,
SeparateExecutionCreationFromStart1727427440136,
];

View file

@ -61,6 +61,7 @@ import { AddConstraintToExecutionMetadata1720101653148 } from '../common/1720101
import { CreateInvalidAuthTokenTable1723627610222 } from '../common/1723627610222-CreateInvalidAuthTokenTable';
import { RefactorExecutionIndices1723796243146 } from '../common/1723796243146-RefactorExecutionIndices';
import { CreateAnnotationTables1724753530828 } from '../common/1724753530828-CreateExecutionAnnotationTables';
import { SeparateExecutionCreationFromStart1727427440136 } from '../common/1727427440136-SeparateExecutionCreationFromStart';
const sqliteMigrations: Migration[] = [
InitialMigration1588102412422,
@ -124,6 +125,7 @@ const sqliteMigrations: Migration[] = [
RefactorExecutionIndices1723796243146,
CreateAnnotationTables1724753530828,
AddApiKeysTable1724951148974,
SeparateExecutionCreationFromStart1727427440136,
];
export { sqliteMigrations };

View file

@ -12,7 +12,7 @@ import { mockInstance, mockEntityManager } from '@test/mocking';
describe('ExecutionRepository', () => {
const entityManager = mockEntityManager(ExecutionEntity);
const globalConfig = mockInstance(GlobalConfig);
const globalConfig = mockInstance(GlobalConfig, { logging: { outputs: ['console'] } });
const binaryDataService = mockInstance(BinaryDataService);
const executionRepository = Container.get(ExecutionRepository);
const mockDate = new Date('2023-12-28 12:34:56.789Z');

View file

@ -42,12 +42,12 @@ import { ExecutionAnnotation } from '@/databases/entities/execution-annotation.e
import { PostgresLiveRowsRetrievalError } from '@/errors/postgres-live-rows-retrieval.error';
import type { ExecutionSummaries } from '@/executions/execution.types';
import type {
ExecutionPayload,
CreateExecutionPayload,
IExecutionBase,
IExecutionFlattedDb,
IExecutionResponse,
} from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { separate } from '@/utils';
import { ExecutionDataRepository } from './execution-data.repository';
@ -198,7 +198,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
return executions.map((execution) => {
const { executionData, ...rest } = execution;
return rest;
});
}) as IExecutionFlattedDb[] | IExecutionResponse[] | IExecutionBase[];
}
reportInvalidExecutions(executions: ExecutionEntity[]) {
@ -297,15 +297,15 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
}),
...(options?.includeAnnotation &&
serializedAnnotation && { annotation: serializedAnnotation }),
};
} as IExecutionFlattedDb | IExecutionResponse | IExecutionBase;
}
/**
* Insert a new execution and its execution data using a transaction.
*/
async createNewExecution(execution: ExecutionPayload): Promise<string> {
async createNewExecution(execution: CreateExecutionPayload): Promise<string> {
const { data, workflowData, ...rest } = execution;
const { identifiers: inserted } = await this.insert(rest);
const { identifiers: inserted } = await this.insert({ ...rest, createdAt: new Date() });
const { id: executionId } = inserted[0] as { id: string };
const { connections, nodes, name, settings } = workflowData ?? {};
await this.executionDataRepository.insert({
@ -340,20 +340,25 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
]);
}
async updateStatus(executionId: string, status: ExecutionStatus) {
await this.update({ id: executionId }, { status });
}
async setRunning(executionId: string) {
const startedAt = new Date();
async resetStartedAt(executionId: string) {
await this.update({ id: executionId }, { startedAt: new Date() });
await this.update({ id: executionId }, { status: 'running', startedAt });
return startedAt;
}
async updateExistingExecution(executionId: string, execution: Partial<IExecutionResponse>) {
// Se isolate startedAt because it must be set when the execution starts and should never change.
// So we prevent updating it, if it's sent (it usually is and causes problems to executions that
// are resumed after waiting for some time, as a new startedAt is set)
const { id, data, workflowId, workflowData, startedAt, customData, ...executionInformation } =
execution;
const {
id,
data,
workflowId,
workflowData,
createdAt, // must never change
startedAt, // must never change
customData,
...executionInformation
} = execution;
if (Object.keys(executionInformation).length > 0) {
await this.update({ id: executionId }, executionInformation);
}
@ -721,6 +726,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
mode: true,
retryOf: true,
status: true,
createdAt: true,
startedAt: true,
stoppedAt: true,
};
@ -806,6 +812,7 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
// @tech_debt: These transformations should not be needed
private toSummary(execution: {
id: number | string;
createdAt?: Date | string;
startedAt?: Date | string;
stoppedAt?: Date | string;
waitTill?: Date | string | null;
@ -817,6 +824,13 @@ export class ExecutionRepository extends Repository<ExecutionEntity> {
return date;
};
if (execution.createdAt) {
execution.createdAt =
execution.createdAt instanceof Date
? execution.createdAt.toISOString()
: normalizeDateString(execution.createdAt);
}
if (execution.startedAt) {
execution.startedAt =
execution.startedAt instanceof Date

View file

@ -3,7 +3,7 @@ import { EventSubscriber } from '@n8n/typeorm';
import { ApplicationError, ErrorReporterProxy } from 'n8n-workflow';
import { Container } from 'typedi';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { Project } from '../entities/project';
import { User } from '../entities/user';

View file

@ -1,7 +1,7 @@
import type { QueryRunner, ObjectLiteral } from '@n8n/typeorm';
import type { INodeTypes } from 'n8n-workflow';
import type { Logger } from '@/logger';
import type { Logger } from '@/logging/logger.service';
import type { createSchemaBuilder } from './dsl';

View file

@ -9,7 +9,7 @@ import { Container } from 'typedi';
import { inTest } from '@/constants';
import { createSchemaBuilder } from '@/databases/dsl';
import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@/databases/types';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { NodeTypes } from '@/node-types';
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';

View file

@ -0,0 +1,37 @@
import debounce from 'lodash/debounce';
/**
* Debounce a class method using `lodash/debounce`.
*
* @param waitMs - Number of milliseconds to debounce method by.
*
* @example
* ```
* class MyClass {
* @Debounce(1000)
* async myMethod() {
* // debounced
* }
* }
* ```
*/
export const Debounce =
(waitMs: number): MethodDecorator =>
<T>(
_: object,
methodName: string,
originalDescriptor: PropertyDescriptor,
): TypedPropertyDescriptor<T> => ({
configurable: true,
get() {
const debouncedFn = debounce(originalDescriptor.value, waitMs);
Object.defineProperty(this, methodName, {
configurable: false,
value: debouncedFn,
});
return debouncedFn as T;
},
});

View file

@ -1,5 +1,5 @@
import { RedactableError } from '@/errors/redactable.error';
import type { UserLike } from '@/events/relay-event-map';
import type { UserLike } from '@/events/maps/relay.event-map';
function toRedactable(userLike: UserLike) {
return {

View file

@ -11,7 +11,7 @@ import { SharedWorkflowRepository } from '@/databases/repositories/shared-workfl
import { TagRepository } from '@/databases/repositories/tag.repository';
import { WorkflowTagMappingRepository } from '@/databases/repositories/workflow-tag-mapping.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import {
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,

View file

@ -14,7 +14,7 @@ import type {
import { Service } from 'typedi';
import type { User } from '@/databases/entities/user';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { OwnershipService } from '@/services/ownership.service';
import {

View file

@ -4,7 +4,7 @@ import path from 'path';
import { Container } from 'typedi';
import { License } from '@/license';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import {
SOURCE_CONTROL_GIT_KEY_COMMENT,

View file

@ -23,7 +23,7 @@ import { VariablesRepository } from '@/databases/repositories/variables.reposito
import { WorkflowTagMappingRepository } from '@/databases/repositories/workflow-tag-mapping.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import type { IWorkflowToImport } from '@/interfaces';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import { isUniqueConstraintError } from '@/response-helper';
import { assertNever } from '@/utils';

View file

@ -9,7 +9,7 @@ import Container, { Service } from 'typedi';
import config from '@/config';
import { SettingsRepository } from '@/databases/repositories/settings.repository';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import {
SOURCE_CONTROL_SSH_FOLDER,

View file

@ -10,7 +10,7 @@ import type { Variables } from '@/databases/entities/variables';
import { TagRepository } from '@/databases/repositories/tag.repository';
import { BadRequestError } from '@/errors/response-errors/bad-request.error';
import { EventService } from '@/events/event.service';
import { Logger } from '@/logger';
import { Logger } from '@/logging/logger.service';
import {
SOURCE_CONTROL_DEFAULT_EMAIL,

View file

@ -1,9 +0,0 @@
import { ApplicationError } from 'n8n-workflow';
export class PortTakenError extends ApplicationError {
constructor(port: number) {
super(
`Port ${port} is already in use. Do you already have the n8n main process running on that port?`,
);
}
}

Some files were not shown because too many files have changed in this diff Show more