mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
Merge remote-tracking branch 'upstream/master' into patch-1
This commit is contained in:
commit
f003d939fc
1
.npmrc
1
.npmrc
|
@ -7,4 +7,5 @@ prefer-workspace-packages = true
|
|||
link-workspace-packages = deep
|
||||
hoist = true
|
||||
shamefully-hoist = true
|
||||
hoist-workspace-packages = false
|
||||
loglevel = warn
|
||||
|
|
31
CHANGELOG.md
31
CHANGELOG.md
|
@ -1,3 +1,34 @@
|
|||
# [1.42.0](https://github.com/n8n-io/n8n/compare/n8n@1.41.0...n8n@1.42.0) (2024-05-15)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **Code Node:** Bind helper methods to the correct context ([#9380](https://github.com/n8n-io/n8n/issues/9380)) ([82c8801](https://github.com/n8n-io/n8n/commit/82c8801f25446085bc8da5055d9932eed4321f47))
|
||||
* **Cortex Node:** Fix issue with analyzer response not working for file observables ([#9374](https://github.com/n8n-io/n8n/issues/9374)) ([ed22dcd](https://github.com/n8n-io/n8n/commit/ed22dcd88ac7f8433b9ed5dc2139d8779b0e1d4c))
|
||||
* **editor:** Render backticks as code segments in error view ([#9352](https://github.com/n8n-io/n8n/issues/9352)) ([4ed5850](https://github.com/n8n-io/n8n/commit/4ed585040b20c50919e2ec2252216639c85194cb))
|
||||
* **Mattermost Node:** Fix issue when fetching reactions ([#9375](https://github.com/n8n-io/n8n/issues/9375)) ([78e7c7a](https://github.com/n8n-io/n8n/commit/78e7c7a9da96a293262cea5304509261ad10020c))
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* **AI Agent Node:** Implement Tool calling agent ([#9339](https://github.com/n8n-io/n8n/issues/9339)) ([677f534](https://github.com/n8n-io/n8n/commit/677f534661634c74340f50723e55e241570d5a56))
|
||||
* **core:** Allow using a custom certificates in docker containers ([#8705](https://github.com/n8n-io/n8n/issues/8705)) ([6059722](https://github.com/n8n-io/n8n/commit/6059722fbfeeca31addfc31ed287f79f40aaad18))
|
||||
* **core:** Node hints(warnings) system ([#8954](https://github.com/n8n-io/n8n/issues/8954)) ([da6088d](https://github.com/n8n-io/n8n/commit/da6088d0bbb952fcdf595a650e1e01b7b02a2b7e))
|
||||
* **core:** Node version available in expression ([#9350](https://github.com/n8n-io/n8n/issues/9350)) ([a00467c](https://github.com/n8n-io/n8n/commit/a00467c9fa57d740de9eccfcd136267bc9e9559d))
|
||||
* **editor:** Add examples for number & boolean, add new methods ([#9358](https://github.com/n8n-io/n8n/issues/9358)) ([7b45dc3](https://github.com/n8n-io/n8n/commit/7b45dc313f42317f894469c6aa8abecc55704e3a))
|
||||
* **editor:** Add examples for object and array expression methods ([#9360](https://github.com/n8n-io/n8n/issues/9360)) ([5293663](https://github.com/n8n-io/n8n/commit/52936633af9c71dff1957ee43a5eda48f7fc1bf1))
|
||||
* **editor:** Add item selector to expression output ([#9281](https://github.com/n8n-io/n8n/issues/9281)) ([dc5994b](https://github.com/n8n-io/n8n/commit/dc5994b18580b9326574c5208d9beaf01c746f33))
|
||||
* **editor:** Autocomplete info box: improve structure and add examples ([#9019](https://github.com/n8n-io/n8n/issues/9019)) ([c92c870](https://github.com/n8n-io/n8n/commit/c92c870c7335f4e2af63fa1c6bcfd086b2957ef8))
|
||||
* **editor:** Remove AI Error Debugging ([#9337](https://github.com/n8n-io/n8n/issues/9337)) ([cda062b](https://github.com/n8n-io/n8n/commit/cda062bde63bcbfdd599d0662ddbe89c27a71686))
|
||||
* **Slack Node:** Add block support for message updates ([#8925](https://github.com/n8n-io/n8n/issues/8925)) ([1081429](https://github.com/n8n-io/n8n/commit/1081429a4d0f7e2d1fc1841303448035b46e44d1))
|
||||
|
||||
|
||||
### Performance Improvements
|
||||
|
||||
* Add tailwind to editor and design system ([#9032](https://github.com/n8n-io/n8n/issues/9032)) ([1c1e444](https://github.com/n8n-io/n8n/commit/1c1e4443f41dd39da8d5fa3951c8dffb0fbfce10))
|
||||
|
||||
|
||||
|
||||
# [1.41.0](https://github.com/n8n-io/n8n/compare/n8n@1.40.0...n8n@1.41.0) (2024-05-08)
|
||||
|
||||
|
||||
|
|
18
cypress/composables/projects.ts
Normal file
18
cypress/composables/projects.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
export const getHomeButton = () => cy.getByTestId('project-home-menu-item');
|
||||
export const getMenuItems = () => cy.getByTestId('project-menu-item');
|
||||
export const getAddProjectButton = () => cy.getByTestId('add-project-menu-item');
|
||||
export const getProjectTabs = () => cy.getByTestId('project-tabs').find('a');
|
||||
export const getProjectTabWorkflows = () => getProjectTabs().filter('a[href$="/workflows"]');
|
||||
export const getProjectTabCredentials = () => getProjectTabs().filter('a[href$="/credentials"]');
|
||||
export const getProjectTabSettings = () => getProjectTabs().filter('a[href$="/settings"]');
|
||||
export const getProjectSettingsSaveButton = () => cy.getByTestId('project-settings-save-button');
|
||||
export const getProjectSettingsCancelButton = () =>
|
||||
cy.getByTestId('project-settings-cancel-button');
|
||||
export const getProjectSettingsDeleteButton = () =>
|
||||
cy.getByTestId('project-settings-delete-button');
|
||||
export const getProjectMembersSelect = () => cy.getByTestId('project-members-select');
|
||||
|
||||
export const addProjectMember = (email: string) => {
|
||||
getProjectMembersSelect().click();
|
||||
getProjectMembersSelect().get('.el-select-dropdown__item').contains(email.toLowerCase()).click();
|
||||
};
|
|
@ -30,7 +30,7 @@ const workflowSharingModal = new WorkflowSharingModal();
|
|||
const ndv = new NDV();
|
||||
|
||||
describe('Sharing', { disableAutoLogin: true }, () => {
|
||||
before(() => cy.enableFeature('sharing', true));
|
||||
before(() => cy.enableFeature('sharing'));
|
||||
|
||||
let workflowW2Url = '';
|
||||
it('should create C1, W1, W2, share W1 with U3, as U2', () => {
|
||||
|
@ -171,11 +171,11 @@ describe('Sharing', { disableAutoLogin: true }, () => {
|
|||
cy.get('input').should('not.have.length');
|
||||
credentialsModal.actions.changeTab('Sharing');
|
||||
cy.contains(
|
||||
'You can view this credential because you have permission to read and share',
|
||||
'Sharing a credential allows people to use it in their workflows. They cannot access credential details.',
|
||||
).should('be.visible');
|
||||
|
||||
credentialsModal.getters.usersSelect().click();
|
||||
cy.getByTestId('user-email')
|
||||
cy.getByTestId('project-sharing-info')
|
||||
.filter(':visible')
|
||||
.should('have.length', 3)
|
||||
.contains(INSTANCE_ADMIN.email)
|
||||
|
|
|
@ -501,7 +501,7 @@ describe('Execution', () => {
|
|||
|
||||
workflowPage.getters.clearExecutionDataButton().should('be.visible');
|
||||
|
||||
cy.intercept('POST', '/rest/workflows/run').as('workflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('workflowRun');
|
||||
|
||||
workflowPage.getters
|
||||
.canvasNodeByName('do something with them')
|
||||
|
@ -525,7 +525,7 @@ describe('Execution', () => {
|
|||
|
||||
workflowPage.getters.zoomToFitButton().click();
|
||||
|
||||
cy.intercept('POST', '/rest/workflows/run').as('workflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('workflowRun');
|
||||
|
||||
workflowPage.getters
|
||||
.canvasNodeByName('If')
|
||||
|
@ -545,7 +545,7 @@ describe('Execution', () => {
|
|||
|
||||
workflowPage.getters.clearExecutionDataButton().should('be.visible');
|
||||
|
||||
cy.intercept('POST', '/rest/workflows/run').as('workflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('workflowRun');
|
||||
|
||||
workflowPage.getters
|
||||
.canvasNodeByName('NoOp2')
|
||||
|
@ -576,7 +576,7 @@ describe('Execution', () => {
|
|||
'My test workflow',
|
||||
);
|
||||
|
||||
cy.intercept('POST', '/rest/workflows/run').as('workflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('workflowRun');
|
||||
|
||||
workflowPage.getters.zoomToFitButton().click();
|
||||
workflowPage.getters.executeWorkflowButton().click();
|
||||
|
@ -599,7 +599,7 @@ describe('Execution', () => {
|
|||
'My test workflow',
|
||||
);
|
||||
|
||||
cy.intercept('POST', '/rest/workflows/run').as('workflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('workflowRun');
|
||||
|
||||
workflowPage.getters.zoomToFitButton().click();
|
||||
workflowPage.getters.executeWorkflowButton().click();
|
||||
|
|
|
@ -254,8 +254,9 @@ describe('Credentials', () => {
|
|||
});
|
||||
|
||||
workflowPage.actions.visit(true);
|
||||
workflowPage.actions.addNodeToCanvas('Slack');
|
||||
workflowPage.actions.openNode('Slack');
|
||||
workflowPage.actions.addNodeToCanvas('Manual');
|
||||
workflowPage.actions.addNodeToCanvas('Slack', true, true, 'Get a channel');
|
||||
workflowPage.getters.nodeCredentialsSelect().should('exist');
|
||||
workflowPage.getters.nodeCredentialsSelect().click();
|
||||
getVisibleSelect().find('li').last().click();
|
||||
credentialsModal.getters.credentialAuthTypeRadioButtons().first().click();
|
||||
|
|
|
@ -4,7 +4,7 @@ const variablesPage = new VariablesPage();
|
|||
|
||||
describe('Variables', () => {
|
||||
it('should show the unlicensed action box when the feature is disabled', () => {
|
||||
cy.disableFeature('variables', false);
|
||||
cy.disableFeature('variables');
|
||||
cy.visit(variablesPage.url);
|
||||
|
||||
variablesPage.getters.unavailableResourcesList().should('be.visible');
|
||||
|
@ -18,14 +18,15 @@ describe('Variables', () => {
|
|||
|
||||
beforeEach(() => {
|
||||
cy.intercept('GET', '/rest/variables').as('loadVariables');
|
||||
cy.intercept('GET', '/rest/login').as('login');
|
||||
|
||||
cy.visit(variablesPage.url);
|
||||
cy.wait(['@loadVariables', '@loadSettings']);
|
||||
cy.wait(['@loadVariables', '@loadSettings', '@login']);
|
||||
});
|
||||
|
||||
it('should show the licensed action box when the feature is enabled', () => {
|
||||
variablesPage.getters.emptyResourcesList().should('be.visible');
|
||||
variablesPage.getters.createVariableButton().should('be.visible');
|
||||
variablesPage.getters.emptyResourcesListNewVariableButton().should('be.visible');
|
||||
});
|
||||
|
||||
it('should create a new variable using empty state row', () => {
|
||||
|
|
|
@ -19,7 +19,7 @@ describe('Debug', () => {
|
|||
it('should be able to debug executions', () => {
|
||||
cy.intercept('GET', '/rest/executions?filter=*').as('getExecutions');
|
||||
cy.intercept('GET', '/rest/executions/*').as('getExecution');
|
||||
cy.intercept('POST', '/rest/workflows/run').as('postWorkflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('postWorkflowRun');
|
||||
|
||||
cy.signin({ email: INSTANCE_OWNER.email, password: INSTANCE_OWNER.password });
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ describe('Workflow templates', () => {
|
|||
beforeEach(() => {
|
||||
cy.intercept('GET', '**/rest/settings', (req) => {
|
||||
// Disable cache
|
||||
delete req.headers['if-none-match']
|
||||
delete req.headers['if-none-match'];
|
||||
req.reply((res) => {
|
||||
if (res.body.data) {
|
||||
// Disable custom templates host if it has been overridden by another intercept
|
||||
|
@ -22,18 +22,27 @@ describe('Workflow templates', () => {
|
|||
|
||||
it('Opens website when clicking templates sidebar link', () => {
|
||||
cy.visit(workflowsPage.url);
|
||||
mainSidebar.getters.menuItem('Templates').should('be.visible');
|
||||
mainSidebar.getters.templates().should('be.visible');
|
||||
// Templates should be a link to the website
|
||||
mainSidebar.getters.templates().parent('a').should('have.attr', 'href').and('include', 'https://n8n.io/workflows');
|
||||
mainSidebar.getters
|
||||
.templates()
|
||||
.parent('a')
|
||||
.should('have.attr', 'href')
|
||||
.and('include', 'https://n8n.io/workflows');
|
||||
// Link should contain instance address and n8n version
|
||||
mainSidebar.getters.templates().parent('a').then(($a) => {
|
||||
const href = $a.attr('href');
|
||||
const params = new URLSearchParams(href);
|
||||
// Link should have all mandatory parameters expected on the website
|
||||
expect(decodeURIComponent(`${params.get('utm_instance')}`)).to.include(window.location.origin);
|
||||
expect(params.get('utm_n8n_version')).to.match(/[0-9]+\.[0-9]+\.[0-9]+/);
|
||||
expect(params.get('utm_awc')).to.match(/[0-9]+/);
|
||||
});
|
||||
mainSidebar.getters
|
||||
.templates()
|
||||
.parent('a')
|
||||
.then(($a) => {
|
||||
const href = $a.attr('href');
|
||||
const params = new URLSearchParams(href);
|
||||
// Link should have all mandatory parameters expected on the website
|
||||
expect(decodeURIComponent(`${params.get('utm_instance')}`)).to.include(
|
||||
window.location.origin,
|
||||
);
|
||||
expect(params.get('utm_n8n_version')).to.match(/[0-9]+\.[0-9]+\.[0-9]+/);
|
||||
expect(params.get('utm_awc')).to.match(/[0-9]+/);
|
||||
});
|
||||
mainSidebar.getters.templates().parent('a').should('have.attr', 'target', '_blank');
|
||||
});
|
||||
|
||||
|
@ -41,6 +50,6 @@ describe('Workflow templates', () => {
|
|||
cy.visit(templatesPage.url);
|
||||
cy.origin('https://n8n.io', () => {
|
||||
cy.url().should('include', 'https://n8n.io/workflows');
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -148,7 +148,7 @@ describe('Editor actions should work', () => {
|
|||
it('after switching between Editor and Debug', () => {
|
||||
cy.intercept('GET', '/rest/executions?filter=*').as('getExecutions');
|
||||
cy.intercept('GET', '/rest/executions/*').as('getExecution');
|
||||
cy.intercept('POST', '/rest/workflows/run').as('postWorkflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('postWorkflowRun');
|
||||
|
||||
editWorkflowAndDeactivate();
|
||||
workflowPage.actions.executeWorkflow();
|
||||
|
@ -196,9 +196,9 @@ describe('Editor zoom should work after route changes', () => {
|
|||
cy.intercept('GET', '/rest/workflow-history/workflow/*/version/*').as('getVersion');
|
||||
cy.intercept('GET', '/rest/workflow-history/workflow/*').as('getHistory');
|
||||
cy.intercept('GET', '/rest/users').as('getUsers');
|
||||
cy.intercept('GET', '/rest/workflows').as('getWorkflows');
|
||||
cy.intercept('GET', '/rest/workflows?*').as('getWorkflows');
|
||||
cy.intercept('GET', '/rest/active-workflows').as('getActiveWorkflows');
|
||||
cy.intercept('GET', '/rest/credentials').as('getCredentials');
|
||||
cy.intercept('GET', '/rest/credentials?*').as('getCredentials');
|
||||
|
||||
switchBetweenEditorAndHistory();
|
||||
zoomInAndCheckNodes();
|
||||
|
|
151
cypress/e2e/39-projects.cy.ts
Normal file
151
cypress/e2e/39-projects.cy.ts
Normal file
|
@ -0,0 +1,151 @@
|
|||
import { INSTANCE_ADMIN, INSTANCE_MEMBERS } from '../constants';
|
||||
import { WorkflowsPage, WorkflowPage, CredentialsModal, CredentialsPage } from '../pages';
|
||||
import * as projects from '../composables/projects';
|
||||
|
||||
const workflowsPage = new WorkflowsPage();
|
||||
const workflowPage = new WorkflowPage();
|
||||
const credentialsPage = new CredentialsPage();
|
||||
const credentialsModal = new CredentialsModal();
|
||||
|
||||
describe('Projects', () => {
|
||||
beforeEach(() => {
|
||||
cy.resetDatabase();
|
||||
cy.enableFeature('advancedPermissions');
|
||||
cy.enableFeature('projectRole:admin');
|
||||
cy.enableFeature('projectRole:editor');
|
||||
cy.changeQuota('maxTeamProjects', -1);
|
||||
});
|
||||
|
||||
it('should handle workflows and credentials', () => {
|
||||
cy.signin(INSTANCE_ADMIN);
|
||||
cy.visit(workflowsPage.url);
|
||||
workflowsPage.getters.workflowCards().should('not.have.length');
|
||||
|
||||
workflowsPage.getters.newWorkflowButtonCard().click();
|
||||
|
||||
cy.intercept('POST', '/rest/workflows').as('workflowSave');
|
||||
workflowPage.actions.saveWorkflowOnButtonClick();
|
||||
|
||||
cy.wait('@workflowSave').then((interception) => {
|
||||
expect(interception.request.body).not.to.have.property('projectId');
|
||||
});
|
||||
|
||||
projects.getHomeButton().click();
|
||||
projects.getProjectTabs().should('have.length', 2);
|
||||
|
||||
projects.getProjectTabCredentials().click();
|
||||
credentialsPage.getters.credentialCards().should('not.have.length');
|
||||
|
||||
credentialsPage.getters.emptyListCreateCredentialButton().click();
|
||||
credentialsModal.getters.newCredentialModal().should('be.visible');
|
||||
credentialsModal.getters.newCredentialTypeSelect().should('be.visible');
|
||||
credentialsModal.getters.newCredentialTypeOption('Notion API').click();
|
||||
credentialsModal.getters.newCredentialTypeButton().click();
|
||||
credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890');
|
||||
credentialsModal.actions.setName('My awesome Notion account');
|
||||
|
||||
cy.intercept('POST', '/rest/credentials').as('credentialSave');
|
||||
credentialsModal.actions.save();
|
||||
cy.wait('@credentialSave').then((interception) => {
|
||||
expect(interception.request.body).not.to.have.property('projectId');
|
||||
});
|
||||
|
||||
credentialsModal.actions.close();
|
||||
credentialsPage.getters.credentialCards().should('have.length', 1);
|
||||
|
||||
projects.getProjectTabWorkflows().click();
|
||||
workflowsPage.getters.workflowCards().should('have.length', 1);
|
||||
|
||||
projects.getMenuItems().should('not.have.length');
|
||||
|
||||
cy.intercept('POST', '/rest/projects').as('projectCreate');
|
||||
projects.getAddProjectButton().click();
|
||||
cy.wait('@projectCreate');
|
||||
projects.getMenuItems().should('have.length', 1);
|
||||
projects.getProjectTabs().should('have.length', 3);
|
||||
|
||||
cy.get('input[name="name"]').type('Development');
|
||||
projects.addProjectMember(INSTANCE_MEMBERS[0].email);
|
||||
|
||||
cy.intercept('PATCH', '/rest/projects/*').as('projectSettingsSave');
|
||||
projects.getProjectSettingsSaveButton().click();
|
||||
cy.wait('@projectSettingsSave').then((interception) => {
|
||||
expect(interception.request.body).to.have.property('name').and.to.equal('Development');
|
||||
expect(interception.request.body).to.have.property('relations').to.have.lengthOf(2);
|
||||
});
|
||||
|
||||
projects.getMenuItems().first().click();
|
||||
workflowsPage.getters.workflowCards().should('not.have.length');
|
||||
projects.getProjectTabs().should('have.length', 3);
|
||||
|
||||
workflowsPage.getters.newWorkflowButtonCard().click();
|
||||
|
||||
cy.intercept('POST', '/rest/workflows').as('workflowSave');
|
||||
workflowPage.actions.saveWorkflowOnButtonClick();
|
||||
|
||||
cy.wait('@workflowSave').then((interception) => {
|
||||
expect(interception.request.body).to.have.property('projectId');
|
||||
});
|
||||
|
||||
projects.getMenuItems().first().click();
|
||||
|
||||
projects.getProjectTabCredentials().click();
|
||||
credentialsPage.getters.credentialCards().should('not.have.length');
|
||||
|
||||
credentialsPage.getters.emptyListCreateCredentialButton().click();
|
||||
credentialsModal.getters.newCredentialModal().should('be.visible');
|
||||
credentialsModal.getters.newCredentialTypeSelect().should('be.visible');
|
||||
credentialsModal.getters.newCredentialTypeOption('Notion API').click();
|
||||
credentialsModal.getters.newCredentialTypeButton().click();
|
||||
credentialsModal.getters.connectionParameter('Internal Integration Secret').type('1234567890');
|
||||
credentialsModal.actions.setName('My awesome Notion account');
|
||||
|
||||
cy.intercept('POST', '/rest/credentials').as('credentialSave');
|
||||
credentialsModal.actions.save();
|
||||
cy.wait('@credentialSave').then((interception) => {
|
||||
expect(interception.request.body).to.have.property('projectId');
|
||||
});
|
||||
credentialsModal.actions.close();
|
||||
|
||||
projects.getAddProjectButton().click();
|
||||
projects.getMenuItems().should('have.length', 2);
|
||||
|
||||
let projectId: string;
|
||||
projects.getMenuItems().first().click();
|
||||
cy.intercept('GET', '/rest/credentials*').as('credentialsList');
|
||||
projects.getProjectTabCredentials().click();
|
||||
cy.wait('@credentialsList').then((interception) => {
|
||||
const url = new URL(interception.request.url);
|
||||
const queryParams = new URLSearchParams(url.search);
|
||||
const filter = queryParams.get('filter');
|
||||
expect(filter).to.be.a('string').and.to.contain('projectId');
|
||||
|
||||
if (filter) {
|
||||
projectId = JSON.parse(filter).projectId;
|
||||
}
|
||||
});
|
||||
|
||||
projects.getMenuItems().last().click();
|
||||
cy.intercept('GET', '/rest/credentials*').as('credentialsList');
|
||||
projects.getProjectTabCredentials().click();
|
||||
cy.wait('@credentialsList').then((interception) => {
|
||||
const url = new URL(interception.request.url);
|
||||
const queryParams = new URLSearchParams(url.search);
|
||||
const filter = queryParams.get('filter');
|
||||
expect(filter).to.be.a('string').and.to.contain('projectId');
|
||||
|
||||
if (filter) {
|
||||
expect(JSON.parse(filter).projectId).not.to.equal(projectId);
|
||||
}
|
||||
});
|
||||
|
||||
projects.getHomeButton().click();
|
||||
workflowsPage.getters.workflowCards().should('have.length', 2);
|
||||
|
||||
cy.intercept('GET', '/rest/credentials*').as('credentialsList');
|
||||
projects.getProjectTabCredentials().click();
|
||||
cy.wait('@credentialsList').then((interception) => {
|
||||
expect(interception.request.url).not.to.contain('filter');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -697,7 +697,7 @@ describe('NDV', () => {
|
|||
});
|
||||
|
||||
it('Stop listening for trigger event from NDV', () => {
|
||||
cy.intercept('POST', '/rest/workflows/run').as('workflowRun');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('workflowRun');
|
||||
workflowPage.actions.addInitialNodeToCanvas('Local File Trigger', {
|
||||
keepNdvOpen: true,
|
||||
action: 'On Changes To A Specific File',
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
},
|
||||
{
|
||||
"parameters": {
|
||||
"url": "https://random-data-api.com/api/v2/users?size=5",
|
||||
"url": "https://internal.users.n8n.cloud/webhook/random-data-api",
|
||||
"options": {}
|
||||
},
|
||||
"id": "22511d75-ab54-49e1-b8af-08b8b3372373",
|
||||
|
@ -28,7 +28,7 @@
|
|||
},
|
||||
{
|
||||
"parameters": {
|
||||
"jsCode": "// Loop over input items and add a new field called 'myNewField' to the JSON of each one\nfor (const item of $input.all()) {\n item.json.first_name_reversed = item.json = {\n firstName: item.json.first_name,\n firstnNameReversed: item.json.first_name_BUG.split(\"\").reverse().join(\"\")\n };\n}\n\nreturn $input.all();"
|
||||
"jsCode": "// Loop over input items and add a new field called 'myNewField' to the JSON of each one\nfor (const item of $input.all()) {\n item.json.first_name_reversed = item.json = {\n firstName: item.json.firstname,\n firstnNameReversed: item.json.firstname.split(\"\").reverse().join(\"\")\n };\n}\n\nreturn $input.all();"
|
||||
},
|
||||
"id": "4b66b15a-1685-46c1-a5e3-ebf8cdb11d21",
|
||||
"name": "do something with them",
|
||||
|
@ -130,4 +130,4 @@
|
|||
},
|
||||
"id": "PymcwIrbqgNh3O0K",
|
||||
"tags": []
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { BasePage } from './base';
|
||||
|
||||
export class CredentialsPage extends BasePage {
|
||||
url = '/credentials';
|
||||
url = '/home/credentials';
|
||||
getters = {
|
||||
emptyListCreateCredentialButton: () => cy.getByTestId('empty-resources-list').find('button'),
|
||||
createCredentialButton: () => cy.getByTestId('resources-list-add'),
|
||||
|
|
|
@ -25,7 +25,7 @@ export class CredentialsModal extends BasePage {
|
|||
credentialInputs: () => cy.getByTestId('credential-connection-parameter'),
|
||||
menu: () => this.getters.editCredentialModal().get('.menu-container'),
|
||||
menuItem: (name: string) => this.getters.menu().get('.n8n-menu-item').contains(name),
|
||||
usersSelect: () => cy.getByTestId('credential-sharing-modal-users-select'),
|
||||
usersSelect: () => cy.getByTestId('project-sharing-select').filter(':visible'),
|
||||
testSuccessTag: () => cy.getByTestId('credentials-config-container-test-success'),
|
||||
};
|
||||
actions = {
|
||||
|
|
|
@ -3,7 +3,7 @@ import { BasePage } from '../base';
|
|||
export class WorkflowSharingModal extends BasePage {
|
||||
getters = {
|
||||
modal: () => cy.getByTestId('workflowShare-modal', { timeout: 5000 }),
|
||||
usersSelect: () => cy.getByTestId('workflow-sharing-modal-users-select'),
|
||||
usersSelect: () => cy.getByTestId('project-sharing-select'),
|
||||
saveButton: () => cy.getByTestId('workflow-sharing-modal-save-button'),
|
||||
closeButton: () => this.getters.modal().find('.el-dialog__close').first(),
|
||||
};
|
||||
|
|
|
@ -41,10 +41,10 @@ export class SettingsUsersPage extends BasePage {
|
|||
workflowPage.actions.visit();
|
||||
mainSidebar.actions.goToSettings();
|
||||
if (isOwner) {
|
||||
settingsSidebar.getters.menuItem('Users').click();
|
||||
settingsSidebar.getters.users().click();
|
||||
cy.url().should('match', new RegExp(this.url));
|
||||
} else {
|
||||
settingsSidebar.getters.menuItem('Users').should('not.exist');
|
||||
settingsSidebar.getters.users().should('not.exist');
|
||||
// Should be redirected to workflows page if trying to access UM url
|
||||
cy.visit('/settings/users');
|
||||
cy.url().should('match', new RegExp(workflowsPage.url));
|
||||
|
|
|
@ -5,14 +5,13 @@ const workflowsPage = new WorkflowsPage();
|
|||
|
||||
export class MainSidebar extends BasePage {
|
||||
getters = {
|
||||
menuItem: (menuLabel: string) =>
|
||||
cy.getByTestId('menu-item').filter(`:contains("${menuLabel}")`),
|
||||
settings: () => this.getters.menuItem('Settings'),
|
||||
templates: () => this.getters.menuItem('Templates'),
|
||||
workflows: () => this.getters.menuItem('Workflows'),
|
||||
credentials: () => this.getters.menuItem('Credentials'),
|
||||
executions: () => this.getters.menuItem('Executions'),
|
||||
adminPanel: () => this.getters.menuItem('Admin Panel'),
|
||||
menuItem: (id: string) => cy.getByTestId('menu-item').get('#' + id),
|
||||
settings: () => this.getters.menuItem('settings'),
|
||||
templates: () => this.getters.menuItem('templates'),
|
||||
workflows: () => this.getters.menuItem('workflows'),
|
||||
credentials: () => this.getters.menuItem('credentials'),
|
||||
executions: () => this.getters.menuItem('executions'),
|
||||
adminPanel: () => this.getters.menuItem('cloud-admin'),
|
||||
userMenu: () => cy.get('div[class="action-dropdown-container"]'),
|
||||
logo: () => cy.getByTestId('n8n-logo'),
|
||||
};
|
||||
|
|
|
@ -2,9 +2,8 @@ import { BasePage } from '../base';
|
|||
|
||||
export class SettingsSidebar extends BasePage {
|
||||
getters = {
|
||||
menuItem: (menuLabel: string) =>
|
||||
cy.getByTestId('menu-item').filter(`:contains("${menuLabel}")`),
|
||||
users: () => this.getters.menuItem('Users'),
|
||||
menuItem: (id: string) => cy.getByTestId('menu-item').get('#' + id),
|
||||
users: () => this.getters.menuItem('settings-users'),
|
||||
back: () => cy.getByTestId('settings-back'),
|
||||
};
|
||||
actions = {
|
||||
|
|
|
@ -35,7 +35,7 @@ export class VariablesPage extends BasePage {
|
|||
deleteVariable: (key: string) => {
|
||||
const row = this.getters.variableRow(key);
|
||||
row.within(() => {
|
||||
cy.getByTestId('variable-row-delete-button').click();
|
||||
cy.getByTestId('variable-row-delete-button').should('not.be.disabled').click();
|
||||
});
|
||||
|
||||
const modal = cy.get('[role="dialog"]');
|
||||
|
@ -53,7 +53,7 @@ export class VariablesPage extends BasePage {
|
|||
editRow: (key: string) => {
|
||||
const row = this.getters.variableRow(key);
|
||||
row.within(() => {
|
||||
cy.getByTestId('variable-row-edit-button').click();
|
||||
cy.getByTestId('variable-row-edit-button').should('not.be.disabled').click();
|
||||
});
|
||||
},
|
||||
setRowValue: (row: Chainable<JQuery<HTMLElement>>, field: 'key' | 'value', value: string) => {
|
||||
|
|
|
@ -32,7 +32,7 @@ export class WorkflowExecutionsTab extends BasePage {
|
|||
},
|
||||
createManualExecutions: (count: number) => {
|
||||
for (let i = 0; i < count; i++) {
|
||||
cy.intercept('POST', '/rest/workflows/run').as('workflowExecution');
|
||||
cy.intercept('POST', '/rest/workflows/**/run').as('workflowExecution');
|
||||
workflowPage.actions.executeWorkflow();
|
||||
cy.wait('@workflowExecution');
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { BasePage } from './base';
|
||||
|
||||
export class WorkflowsPage extends BasePage {
|
||||
url = '/workflows';
|
||||
url = '/home/workflows';
|
||||
getters = {
|
||||
newWorkflowButtonCard: () => cy.getByTestId('new-workflow-card'),
|
||||
newWorkflowTemplateCard: () => cy.getByTestId('new-workflow-template-card'),
|
||||
|
|
|
@ -65,7 +65,7 @@ Cypress.Commands.add('signout', () => {
|
|||
cy.request({
|
||||
method: 'POST',
|
||||
url: `${BACKEND_BASE_URL}/rest/logout`,
|
||||
headers: { 'browser-id': localStorage.getItem('n8n-browserId') }
|
||||
headers: { 'browser-id': localStorage.getItem('n8n-browserId') },
|
||||
});
|
||||
cy.getCookie(N8N_AUTH_COOKIE).should('not.exist');
|
||||
});
|
||||
|
@ -80,12 +80,19 @@ const setFeature = (feature: string, enabled: boolean) =>
|
|||
enabled,
|
||||
});
|
||||
|
||||
const setQuota = (feature: string, value: number) =>
|
||||
cy.request('PATCH', `${BACKEND_BASE_URL}/rest/e2e/quota`, {
|
||||
feature: `quota:${feature}`,
|
||||
value,
|
||||
});
|
||||
|
||||
const setQueueMode = (enabled: boolean) =>
|
||||
cy.request('PATCH', `${BACKEND_BASE_URL}/rest/e2e/queue-mode`, {
|
||||
enabled,
|
||||
});
|
||||
|
||||
Cypress.Commands.add('enableFeature', (feature: string) => setFeature(feature, true));
|
||||
Cypress.Commands.add('changeQuota', (feature: string, value: number) => setQuota(feature, value));
|
||||
Cypress.Commands.add('disableFeature', (feature: string) => setFeature(feature, false));
|
||||
Cypress.Commands.add('enableQueueMode', () => setQueueMode(true));
|
||||
Cypress.Commands.add('disableQueueMode', () => setQueueMode(false));
|
||||
|
|
|
@ -30,6 +30,7 @@ declare global {
|
|||
disableFeature(feature: string): void;
|
||||
enableQueueMode(): void;
|
||||
disableQueueMode(): void;
|
||||
changeQuota(feature: string, value: number): void;
|
||||
waitForLoad(waitForIntercepts?: boolean): void;
|
||||
grantBrowserPermissions(...permissions: string[]): void;
|
||||
readClipboard(): Chainable<string>;
|
||||
|
|
|
@ -29,7 +29,7 @@ export function createMockNodeExecutionData(
|
|||
];
|
||||
|
||||
return acc;
|
||||
}, {})
|
||||
}, {})
|
||||
: data,
|
||||
source: [null],
|
||||
...rest,
|
||||
|
@ -88,7 +88,7 @@ export function runMockWorkflowExcution({
|
|||
}) {
|
||||
const executionId = Math.random().toString(36).substring(4);
|
||||
|
||||
cy.intercept('POST', '/rest/workflows/run', {
|
||||
cy.intercept('POST', '/rest/workflows/**/run', {
|
||||
statusCode: 201,
|
||||
body: {
|
||||
data: {
|
||||
|
|
|
@ -1,4 +1,11 @@
|
|||
#!/bin/sh
|
||||
if [ -d /opt/custom-certificates ]; then
|
||||
echo "Trusting custom certificates from /opt/custom-certificates."
|
||||
export NODE_OPTIONS=--use-openssl-ca $NODE_OPTIONS
|
||||
export SSL_CERT_DIR=/opt/custom-certificates
|
||||
c_rehash /opt/custom-certificates
|
||||
fi
|
||||
|
||||
if [ "$#" -gt 0 ]; then
|
||||
# Got started with arguments
|
||||
exec n8n "$@"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n-monorepo",
|
||||
"version": "1.41.0",
|
||||
"version": "1.42.0",
|
||||
"private": true,
|
||||
"homepage": "https://n8n.io",
|
||||
"engines": {
|
||||
|
@ -38,9 +38,6 @@
|
|||
"test:e2e:dev": "scripts/run-e2e.js dev",
|
||||
"test:e2e:all": "scripts/run-e2e.js all"
|
||||
},
|
||||
"dependencies": {
|
||||
"n8n": "workspace:*"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@n8n_io/eslint-config": "workspace:*",
|
||||
"@ngneat/falso": "^6.4.0",
|
||||
|
@ -95,7 +92,8 @@
|
|||
"pyodide@0.23.4": "patches/pyodide@0.23.4.patch",
|
||||
"@types/express-serve-static-core@4.17.43": "patches/@types__express-serve-static-core@4.17.43.patch",
|
||||
"@types/ws@8.5.4": "patches/@types__ws@8.5.4.patch",
|
||||
"vite-plugin-checker@0.6.4": "patches/vite-plugin-checker@0.6.4.patch"
|
||||
"vite-plugin-checker@0.6.4": "patches/vite-plugin-checker@0.6.4.patch",
|
||||
"@types/uuencode@0.0.3": "patches/@types__uuencode@0.0.3.patch"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
<script setup lang="ts">
|
||||
// eslint-disable-next-line import/no-unresolved
|
||||
import Close from 'virtual:icons/mdi/close';
|
||||
import { computed, nextTick, onMounted } from 'vue';
|
||||
import Layout from '@n8n/chat/components/Layout.vue';
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
<script lang="ts" setup>
|
||||
// eslint-disable-next-line import/no-unresolved
|
||||
import IconChat from 'virtual:icons/mdi/chat';
|
||||
// eslint-disable-next-line import/no-unresolved
|
||||
import IconChevronDown from 'virtual:icons/mdi/chevron-down';
|
||||
import { nextTick, ref } from 'vue';
|
||||
import Chat from '@n8n/chat/components/Chat.vue';
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
<script setup lang="ts">
|
||||
// eslint-disable-next-line import/no-unresolved
|
||||
import IconSend from 'virtual:icons/mdi/send';
|
||||
import { computed, onMounted, ref } from 'vue';
|
||||
import { useI18n, useChat, useOptions } from '@n8n/chat/composables';
|
||||
|
|
5
packages/@n8n/chat/src/types/icons.d.ts
vendored
Normal file
5
packages/@n8n/chat/src/types/icons.d.ts
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
declare module 'virtual:icons/*' {
|
||||
import { FunctionalComponent, SVGAttributes } from 'vue';
|
||||
const component: FunctionalComponent<SVGAttributes>;
|
||||
export default component;
|
||||
}
|
|
@ -14,6 +14,7 @@ const plugins = [
|
|||
vue(),
|
||||
icons({
|
||||
compiler: 'vue3',
|
||||
autoInstall: true,
|
||||
}),
|
||||
dts(),
|
||||
];
|
||||
|
|
|
@ -10,6 +10,7 @@ export interface ClientOAuth2TokenData extends Record<string, string | undefined
|
|||
expires_in?: string;
|
||||
scope?: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* General purpose client token generator.
|
||||
*/
|
||||
|
@ -74,18 +75,27 @@ export class ClientOAuth2Token {
|
|||
|
||||
if (!this.refreshToken) throw new Error('No refresh token');
|
||||
|
||||
const clientId = options.clientId;
|
||||
const clientSecret = options.clientSecret;
|
||||
const headers = { ...DEFAULT_HEADERS };
|
||||
const body: Record<string, string> = {
|
||||
refresh_token: this.refreshToken,
|
||||
grant_type: 'refresh_token',
|
||||
};
|
||||
|
||||
if (options.authentication === 'body') {
|
||||
body.client_id = clientId;
|
||||
body.client_secret = clientSecret;
|
||||
} else {
|
||||
headers.Authorization = auth(clientId, clientSecret);
|
||||
}
|
||||
|
||||
const requestOptions = getRequestOptions(
|
||||
{
|
||||
url: options.accessTokenUri,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
...DEFAULT_HEADERS,
|
||||
Authorization: auth(options.clientId, options.clientSecret),
|
||||
},
|
||||
body: {
|
||||
refresh_token: this.refreshToken,
|
||||
grant_type: 'refresh_token',
|
||||
},
|
||||
headers,
|
||||
body,
|
||||
},
|
||||
options,
|
||||
);
|
||||
|
|
|
@ -130,8 +130,8 @@ describe('CredentialsFlow', () => {
|
|||
});
|
||||
|
||||
describe('#refresh', () => {
|
||||
const mockRefreshCall = () =>
|
||||
nock(config.baseUrl)
|
||||
const mockRefreshCall = async () => {
|
||||
const nockScope = nock(config.baseUrl)
|
||||
.post(
|
||||
'/login/oauth/access_token',
|
||||
({ refresh_token, grant_type }) =>
|
||||
|
@ -142,6 +142,15 @@ describe('CredentialsFlow', () => {
|
|||
access_token: config.refreshedAccessToken,
|
||||
refresh_token: config.refreshedRefreshToken,
|
||||
});
|
||||
return await new Promise<{ headers: Headers; body: unknown }>((resolve) => {
|
||||
nockScope.once('request', (req) => {
|
||||
resolve({
|
||||
headers: req.headers,
|
||||
body: req.requestBodyBuffers.toString('utf-8'),
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
it('should make a request to get a new access token', async () => {
|
||||
const authClient = createAuthClient({ scopes: ['notifications'] });
|
||||
|
@ -150,12 +159,55 @@ describe('CredentialsFlow', () => {
|
|||
const token = await authClient.credentials.getToken();
|
||||
expect(token.accessToken).toEqual(config.accessToken);
|
||||
|
||||
mockRefreshCall();
|
||||
const requestPromise = mockRefreshCall();
|
||||
const token1 = await token.refresh();
|
||||
await requestPromise;
|
||||
|
||||
expect(token1).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(token1.accessToken).toEqual(config.refreshedAccessToken);
|
||||
expect(token1.tokenType).toEqual('bearer');
|
||||
});
|
||||
|
||||
it('should make a request to get a new access token with authentication = "body"', async () => {
|
||||
const authClient = createAuthClient({ scopes: ['notifications'], authentication: 'body' });
|
||||
void mockTokenCall({ requestedScope: 'notifications' });
|
||||
|
||||
const token = await authClient.credentials.getToken();
|
||||
expect(token.accessToken).toEqual(config.accessToken);
|
||||
|
||||
const requestPromise = mockRefreshCall();
|
||||
const token1 = await token.refresh();
|
||||
const { headers, body } = await requestPromise;
|
||||
|
||||
expect(token1).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(token1.accessToken).toEqual(config.refreshedAccessToken);
|
||||
expect(token1.tokenType).toEqual('bearer');
|
||||
expect(headers?.authorization).toBe(undefined);
|
||||
expect(body).toEqual(
|
||||
'refresh_token=def456token&grant_type=refresh_token&client_id=abc&client_secret=123',
|
||||
);
|
||||
});
|
||||
|
||||
it('should make a request to get a new access token with authentication = "header"', async () => {
|
||||
const authClient = createAuthClient({
|
||||
scopes: ['notifications'],
|
||||
authentication: 'header',
|
||||
});
|
||||
void mockTokenCall({ requestedScope: 'notifications' });
|
||||
|
||||
const token = await authClient.credentials.getToken();
|
||||
expect(token.accessToken).toEqual(config.accessToken);
|
||||
|
||||
const requestPromise = mockRefreshCall();
|
||||
const token1 = await token.refresh();
|
||||
const { headers, body } = await requestPromise;
|
||||
|
||||
expect(token1).toBeInstanceOf(ClientOAuth2Token);
|
||||
expect(token1.accessToken).toEqual(config.refreshedAccessToken);
|
||||
expect(token1.tokenType).toEqual('bearer');
|
||||
expect(headers?.authorization).toBe('Basic YWJjOjEyMw==');
|
||||
expect(body).toEqual('refresh_token=def456token&grant_type=refresh_token');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -24,3 +24,9 @@ export function n8nExpressionLanguageSupport() {
|
|||
return new LanguageSupport(n8nLanguage);
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Unicode ranges
|
||||
|
||||
- From `Basic Latin` up to and including `Currency Symbols`
|
||||
- `Miscellaneous Symbols and Pictographs`
|
||||
- `CJK Unified Ideographs`
|
||||
|
|
|
@ -15,7 +15,7 @@ entity { Plaintext | Resolvable }
|
|||
|
||||
resolvableChar { unicodeChar | "}" ![}] | "\\}}" }
|
||||
|
||||
unicodeChar { $[\u0000-\u007C] | $[\u007E-\u1FFF] | $[\u20A0-\u20CF] | $[\u{1F300}-\u{1F64F}] }
|
||||
unicodeChar { $[\u0000-\u007C] | $[\u007E-\u20CF] | $[\u{1F300}-\u{1F64F}] | $[\u4E00-\u9FFF] }
|
||||
}
|
||||
|
||||
@detectDelim
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
// This file was generated by lezer-generator. You probably shouldn't edit it.
|
||||
import { LRParser } from '@lezer/lr';
|
||||
|
||||
export const parser = LRParser.deserialize({
|
||||
version: 14,
|
||||
states: "nQQOPOOOOOO'#Cb'#CbOOOO'#C`'#C`QQOPOOOOOO-E6^-E6^",
|
||||
|
@ -11,7 +10,7 @@ export const parser = LRParser.deserialize({
|
|||
skippedNodes: [0],
|
||||
repeatNodeCount: 1,
|
||||
tokenData:
|
||||
"&U~RTO#ob#o#p!h#p;'Sb;'S;=`!]<%lOb~gTQ~O#ob#o#pv#p;'Sb;'S;=`!]<%lOb~yUO#ob#p;'Sb;'S;=`!]<%l~b~Ob~~!c~!`P;=`<%lb~!hOQ~~!kVO#ob#o#p#Q#p;'Sb;'S;=`!]<%l~b~Ob~~!c~#TWO#O#Q#O#P#m#P#q#Q#q#r%Z#r$IS#Q$Lj$Ml#Q;(b;(c%x;(c;(d&O~#pWO#O#Q#O#P#m#P#q#Q#q#r$Y#r$IS#Q$Lj$Ml#Q;(b;(c%x;(c;(d&O~$]TO#q#Q#q#r$l#r;'S#Q;'S;=`%r<%lO#Q~$qWR~O#O#Q#O#P#m#P#q#Q#q#r%Z#r$IS#Q$Lj$Ml#Q;(b;(c%x;(c;(d&O~%^TO#q#Q#q#r%m#r;'S#Q;'S;=`%r<%lO#Q~%rOR~~%uP;=`<%l#Q~%{P;NQ<%l#Q~&RP;=`;JY#Q",
|
||||
"&U~RTO#ob#o#p!h#p;'Sb;'S;=`!]<%lOb~gTQ~O#ob#o#pv#p;'Sb;'S;=`!]<%lOb~yUO#ob#p;'Sb;'S;=`!]<%l~b~Ob~~!c~!`P;=`<%lb~!hOQ~~!kVO#ob#o#p#Q#p;'Sb;'S;=`!]<%l~b~Ob~~!c~#TWO#O#Q#O#P#m#P#q#Q#q#r%Z#r$Ml#Q*5S41d#Q;(b;(c%x;(c;(d&O~#pWO#O#Q#O#P#m#P#q#Q#q#r$Y#r$Ml#Q*5S41d#Q;(b;(c%x;(c;(d&O~$]TO#q#Q#q#r$l#r;'S#Q;'S;=`%r<%lO#Q~$qWR~O#O#Q#O#P#m#P#q#Q#q#r%Z#r$Ml#Q*5S41d#Q;(b;(c%x;(c;(d&O~%^TO#q#Q#q#r%m#r;'S#Q;'S;=`%r<%lO#Q~%rOR~~%uP;=`<%l#Q~%{P;NQ<%l#Q~&RP;=`;JY#Q",
|
||||
tokenizers: [0],
|
||||
topRules: { Program: [0, 1] },
|
||||
tokenPrec: 0,
|
||||
|
|
|
@ -253,3 +253,27 @@ Program(Resolvable)
|
|||
==>
|
||||
|
||||
Program(Resolvable)
|
||||
|
||||
# Resolvable with general punctuation char
|
||||
|
||||
{{ '†' }}
|
||||
|
||||
==>
|
||||
|
||||
Program(Resolvable)
|
||||
|
||||
# Resolvable with superscript char
|
||||
|
||||
{{ '⁷' }}
|
||||
|
||||
==>
|
||||
|
||||
Program(Resolvable)
|
||||
|
||||
# Resolvable with CJK char
|
||||
|
||||
{{ '漢' }}
|
||||
|
||||
==>
|
||||
|
||||
Program(Resolvable)
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
"lint": "eslint . --quiet",
|
||||
"lintfix": "eslint . --fix",
|
||||
"watch": "tsc -p tsconfig.build.json --watch",
|
||||
"test": "echo \"Error: no test created yet\""
|
||||
"test": "jest"
|
||||
},
|
||||
"main": "dist/index.js",
|
||||
"module": "src/index.ts",
|
||||
|
|
|
@ -2,13 +2,10 @@
|
|||
import { EventEmitter } from 'events';
|
||||
import type Imap from 'imap';
|
||||
import { type ImapMessage } from 'imap';
|
||||
import * as qp from 'quoted-printable';
|
||||
import * as iconvlite from 'iconv-lite';
|
||||
import * as utf8 from 'utf8';
|
||||
import * as uuencode from 'uuencode';
|
||||
|
||||
import { getMessage } from './helpers/getMessage';
|
||||
import type { Message, MessagePart } from './types';
|
||||
import { PartData } from './PartData';
|
||||
|
||||
const IMAP_EVENTS = ['alert', 'mail', 'expunge', 'uidvalidity', 'update', 'close', 'end'] as const;
|
||||
|
||||
|
@ -124,7 +121,7 @@ export class ImapSimple extends EventEmitter {
|
|||
/** The message part to be downloaded, from the `message.attributes.struct` Array */
|
||||
part: MessagePart,
|
||||
) {
|
||||
return await new Promise<string>((resolve, reject) => {
|
||||
return await new Promise<PartData>((resolve, reject) => {
|
||||
const fetch = this.imap.fetch(message.attributes.uid, {
|
||||
bodies: [part.partID],
|
||||
struct: true,
|
||||
|
@ -138,43 +135,8 @@ export class ImapSimple extends EventEmitter {
|
|||
}
|
||||
|
||||
const data = result.parts[0].body as string;
|
||||
|
||||
const encoding = part.encoding.toUpperCase();
|
||||
|
||||
if (encoding === 'BASE64') {
|
||||
resolve(Buffer.from(data, 'base64').toString());
|
||||
return;
|
||||
}
|
||||
|
||||
if (encoding === 'QUOTED-PRINTABLE') {
|
||||
if (part.params?.charset?.toUpperCase() === 'UTF-8') {
|
||||
resolve(Buffer.from(utf8.decode(qp.decode(data))).toString());
|
||||
} else {
|
||||
resolve(Buffer.from(qp.decode(data)).toString());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (encoding === '7BIT') {
|
||||
resolve(Buffer.from(data).toString('ascii'));
|
||||
return;
|
||||
}
|
||||
|
||||
if (encoding === '8BIT' || encoding === 'BINARY') {
|
||||
const charset = part.params?.charset ?? 'utf-8';
|
||||
resolve(iconvlite.decode(Buffer.from(data), charset));
|
||||
return;
|
||||
}
|
||||
|
||||
if (encoding === 'UUENCODE') {
|
||||
const parts = data.toString().split('\n'); // remove newline characters
|
||||
const merged = parts.splice(1, parts.length - 4).join(''); // remove excess lines and join lines with empty string
|
||||
resolve(uuencode.decode(merged));
|
||||
return;
|
||||
}
|
||||
|
||||
// if it gets here, the encoding is not currently supported
|
||||
reject(new Error('Unknown encoding ' + part.encoding));
|
||||
resolve(PartData.fromData(data, encoding));
|
||||
};
|
||||
|
||||
const fetchOnError = (error: Error) => {
|
||||
|
|
84
packages/@n8n/imap/src/PartData.ts
Normal file
84
packages/@n8n/imap/src/PartData.ts
Normal file
|
@ -0,0 +1,84 @@
|
|||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
import * as qp from 'quoted-printable';
|
||||
import * as iconvlite from 'iconv-lite';
|
||||
import * as utf8 from 'utf8';
|
||||
import * as uuencode from 'uuencode';
|
||||
|
||||
export abstract class PartData {
|
||||
constructor(readonly buffer: Buffer) {}
|
||||
|
||||
toString() {
|
||||
return this.buffer.toString();
|
||||
}
|
||||
|
||||
static fromData(data: string, encoding: string, charset?: string): PartData {
|
||||
if (encoding === 'BASE64') {
|
||||
return new Base64PartData(data);
|
||||
}
|
||||
|
||||
if (encoding === 'QUOTED-PRINTABLE') {
|
||||
return new QuotedPrintablePartData(data, charset);
|
||||
}
|
||||
|
||||
if (encoding === '7BIT') {
|
||||
return new SevenBitPartData(data);
|
||||
}
|
||||
|
||||
if (encoding === '8BIT' || encoding === 'BINARY') {
|
||||
return new BinaryPartData(data, charset);
|
||||
}
|
||||
|
||||
if (encoding === 'UUENCODE') {
|
||||
return new UuencodedPartData(data);
|
||||
}
|
||||
|
||||
// if it gets here, the encoding is not currently supported
|
||||
throw new Error('Unknown encoding ' + encoding);
|
||||
}
|
||||
}
|
||||
|
||||
export class Base64PartData extends PartData {
|
||||
constructor(data: string) {
|
||||
super(Buffer.from(data, 'base64'));
|
||||
}
|
||||
}
|
||||
|
||||
export class QuotedPrintablePartData extends PartData {
|
||||
constructor(data: string, charset?: string) {
|
||||
const decoded =
|
||||
charset?.toUpperCase() === 'UTF-8' ? utf8.decode(qp.decode(data)) : qp.decode(data);
|
||||
super(Buffer.from(decoded));
|
||||
}
|
||||
}
|
||||
|
||||
export class SevenBitPartData extends PartData {
|
||||
constructor(data: string) {
|
||||
super(Buffer.from(data));
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.buffer.toString('ascii');
|
||||
}
|
||||
}
|
||||
|
||||
export class BinaryPartData extends PartData {
|
||||
constructor(
|
||||
data: string,
|
||||
readonly charset: string = 'utf-8',
|
||||
) {
|
||||
super(Buffer.from(data));
|
||||
}
|
||||
|
||||
toString() {
|
||||
return iconvlite.decode(this.buffer, this.charset);
|
||||
}
|
||||
}
|
||||
|
||||
export class UuencodedPartData extends PartData {
|
||||
constructor(data: string) {
|
||||
const parts = data.split('\n'); // remove newline characters
|
||||
const merged = parts.splice(1, parts.length - 4).join(''); // remove excess lines and join lines with empty string
|
||||
const decoded = uuencode.decode(merged);
|
||||
super(decoded);
|
||||
}
|
||||
}
|
88
packages/@n8n/imap/test/PartData.test.ts
Normal file
88
packages/@n8n/imap/test/PartData.test.ts
Normal file
|
@ -0,0 +1,88 @@
|
|||
import {
|
||||
PartData,
|
||||
Base64PartData,
|
||||
QuotedPrintablePartData,
|
||||
SevenBitPartData,
|
||||
BinaryPartData,
|
||||
UuencodedPartData,
|
||||
} from '../src/PartData';
|
||||
|
||||
describe('PartData', () => {
|
||||
describe('fromData', () => {
|
||||
it('should return an instance of Base64PartData when encoding is BASE64', () => {
|
||||
const result = PartData.fromData('data', 'BASE64');
|
||||
expect(result).toBeInstanceOf(Base64PartData);
|
||||
});
|
||||
|
||||
it('should return an instance of QuotedPrintablePartData when encoding is QUOTED-PRINTABLE', () => {
|
||||
const result = PartData.fromData('data', 'QUOTED-PRINTABLE');
|
||||
expect(result).toBeInstanceOf(QuotedPrintablePartData);
|
||||
});
|
||||
|
||||
it('should return an instance of SevenBitPartData when encoding is 7BIT', () => {
|
||||
const result = PartData.fromData('data', '7BIT');
|
||||
expect(result).toBeInstanceOf(SevenBitPartData);
|
||||
});
|
||||
|
||||
it('should return an instance of BinaryPartData when encoding is 8BIT or BINARY', () => {
|
||||
let result = PartData.fromData('data', '8BIT');
|
||||
expect(result).toBeInstanceOf(BinaryPartData);
|
||||
result = PartData.fromData('data', 'BINARY');
|
||||
expect(result).toBeInstanceOf(BinaryPartData);
|
||||
});
|
||||
|
||||
it('should return an instance of UuencodedPartData when encoding is UUENCODE', () => {
|
||||
const result = PartData.fromData('data', 'UUENCODE');
|
||||
expect(result).toBeInstanceOf(UuencodedPartData);
|
||||
});
|
||||
|
||||
it('should throw an error when encoding is not supported', () => {
|
||||
expect(() => PartData.fromData('data', 'UNSUPPORTED')).toThrow(
|
||||
'Unknown encoding UNSUPPORTED',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Base64PartData', () => {
|
||||
it('should correctly decode base64 data', () => {
|
||||
const data = Buffer.from('Hello, world!', 'utf-8').toString('base64');
|
||||
const partData = new Base64PartData(data);
|
||||
expect(partData.toString()).toBe('Hello, world!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('QuotedPrintablePartData', () => {
|
||||
it('should correctly decode quoted-printable data', () => {
|
||||
const data = '=48=65=6C=6C=6F=2C=20=77=6F=72=6C=64=21'; // 'Hello, world!' in quoted-printable
|
||||
const partData = new QuotedPrintablePartData(data);
|
||||
expect(partData.toString()).toBe('Hello, world!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('SevenBitPartData', () => {
|
||||
it('should correctly decode 7bit data', () => {
|
||||
const data = 'Hello, world!';
|
||||
const partData = new SevenBitPartData(data);
|
||||
expect(partData.toString()).toBe('Hello, world!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('BinaryPartData', () => {
|
||||
it('should correctly decode binary data', () => {
|
||||
const data = Buffer.from('Hello, world!', 'utf-8').toString();
|
||||
const partData = new BinaryPartData(data);
|
||||
expect(partData.toString()).toBe('Hello, world!');
|
||||
});
|
||||
});
|
||||
|
||||
describe('UuencodedPartData', () => {
|
||||
it('should correctly decode uuencoded data', () => {
|
||||
const data = Buffer.from(
|
||||
'YmVnaW4gNjQ0IGRhdGEKLTImNUw7JlxMKCc9TzxGUUQoMGBgCmAKZW5kCg==',
|
||||
'base64',
|
||||
).toString('binary');
|
||||
const partData = new UuencodedPartData(data);
|
||||
expect(partData.toString()).toBe('Hello, world!');
|
||||
});
|
||||
});
|
|
@ -7,6 +7,7 @@ import type {
|
|||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
import { getTemplateNoticeField } from '../../../utils/sharedFields';
|
||||
import { promptTypeOptions, textInput } from '../../../utils/descriptions';
|
||||
|
@ -20,11 +21,13 @@ import { reActAgentAgentProperties } from './agents/ReActAgent/description';
|
|||
import { reActAgentAgentExecute } from './agents/ReActAgent/execute';
|
||||
import { sqlAgentAgentProperties } from './agents/SqlAgent/description';
|
||||
import { sqlAgentAgentExecute } from './agents/SqlAgent/execute';
|
||||
import { toolsAgentProperties } from './agents/ToolsAgent/description';
|
||||
import { toolsAgentExecute } from './agents/ToolsAgent/execute';
|
||||
|
||||
// Function used in the inputs expression to figure out which inputs to
|
||||
// display based on the agent type
|
||||
function getInputs(
|
||||
agent: 'conversationalAgent' | 'openAiFunctionsAgent' | 'reActAgent' | 'sqlAgent',
|
||||
agent: 'toolsAgent' | 'conversationalAgent' | 'openAiFunctionsAgent' | 'reActAgent' | 'sqlAgent',
|
||||
hasOutputParser?: boolean,
|
||||
): Array<ConnectionTypes | INodeInputConfiguration> {
|
||||
interface SpecialInput {
|
||||
|
@ -92,6 +95,31 @@ function getInputs(
|
|||
type: NodeConnectionType.AiOutputParser,
|
||||
},
|
||||
];
|
||||
} else if (agent === 'toolsAgent') {
|
||||
specialInputs = [
|
||||
{
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
filter: {
|
||||
nodes: [
|
||||
'@n8n/n8n-nodes-langchain.lmChatAnthropic',
|
||||
'@n8n/n8n-nodes-langchain.lmChatAzureOpenAi',
|
||||
'@n8n/n8n-nodes-langchain.lmChatMistralCloud',
|
||||
'@n8n/n8n-nodes-langchain.lmChatOpenAi',
|
||||
'@n8n/n8n-nodes-langchain.lmChatGroq',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiMemory,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiTool,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
},
|
||||
];
|
||||
} else if (agent === 'openAiFunctionsAgent') {
|
||||
specialInputs = [
|
||||
{
|
||||
|
@ -157,16 +185,60 @@ function getInputs(
|
|||
return [NodeConnectionType.Main, ...getInputData(specialInputs)];
|
||||
}
|
||||
|
||||
const agentTypeProperty: INodeProperties = {
|
||||
displayName: 'Agent',
|
||||
name: 'agent',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Conversational Agent',
|
||||
value: 'conversationalAgent',
|
||||
description:
|
||||
'Selects tools to accomplish its task and uses memory to recall previous conversations',
|
||||
},
|
||||
{
|
||||
name: 'OpenAI Functions Agent',
|
||||
value: 'openAiFunctionsAgent',
|
||||
description:
|
||||
"Utilizes OpenAI's Function Calling feature to select the appropriate tool and arguments for execution",
|
||||
},
|
||||
{
|
||||
name: 'Plan and Execute Agent',
|
||||
value: 'planAndExecuteAgent',
|
||||
description:
|
||||
'Plan and execute agents accomplish an objective by first planning what to do, then executing the sub tasks',
|
||||
},
|
||||
{
|
||||
name: 'ReAct Agent',
|
||||
value: 'reActAgent',
|
||||
description: 'Strategically select tools to accomplish a given task',
|
||||
},
|
||||
{
|
||||
name: 'SQL Agent',
|
||||
value: 'sqlAgent',
|
||||
description: 'Answers questions about data in an SQL database',
|
||||
},
|
||||
{
|
||||
name: 'Tools Agent',
|
||||
value: 'toolsAgent',
|
||||
description:
|
||||
'Utilized unified Tool calling interface to select the appropriate tools and argument for execution',
|
||||
},
|
||||
],
|
||||
default: '',
|
||||
};
|
||||
|
||||
export class Agent implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'AI Agent',
|
||||
name: 'agent',
|
||||
icon: 'fa:robot',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1, 1.2, 1.3, 1.4, 1.5],
|
||||
version: [1, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6],
|
||||
description: 'Generates an action plan and executes it. Can use external tools.',
|
||||
subtitle:
|
||||
"={{ { conversationalAgent: 'Conversational Agent', openAiFunctionsAgent: 'OpenAI Functions Agent', reActAgent: 'ReAct Agent', sqlAgent: 'SQL Agent', planAndExecuteAgent: 'Plan and Execute Agent' }[$parameter.agent] }}",
|
||||
"={{ { toolsAgent: 'Tools Agent', conversationalAgent: 'Conversational Agent', openAiFunctionsAgent: 'OpenAI Functions Agent', reActAgent: 'ReAct Agent', sqlAgent: 'SQL Agent', planAndExecuteAgent: 'Plan and Execute Agent' }[$parameter.agent] }}",
|
||||
defaults: {
|
||||
name: 'AI Agent',
|
||||
color: '#404040',
|
||||
|
@ -225,43 +297,18 @@ export class Agent implements INodeType {
|
|||
},
|
||||
},
|
||||
},
|
||||
// Make Conversational Agent the default agent for versions 1.5 and below
|
||||
{
|
||||
displayName: 'Agent',
|
||||
name: 'agent',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
options: [
|
||||
{
|
||||
name: 'Conversational Agent',
|
||||
value: 'conversationalAgent',
|
||||
description:
|
||||
'Selects tools to accomplish its task and uses memory to recall previous conversations',
|
||||
},
|
||||
{
|
||||
name: 'OpenAI Functions Agent',
|
||||
value: 'openAiFunctionsAgent',
|
||||
description:
|
||||
"Utilizes OpenAI's Function Calling feature to select the appropriate tool and arguments for execution",
|
||||
},
|
||||
{
|
||||
name: 'Plan and Execute Agent',
|
||||
value: 'planAndExecuteAgent',
|
||||
description:
|
||||
'Plan and execute agents accomplish an objective by first planning what to do, then executing the sub tasks',
|
||||
},
|
||||
{
|
||||
name: 'ReAct Agent',
|
||||
value: 'reActAgent',
|
||||
description: 'Strategically select tools to accomplish a given task',
|
||||
},
|
||||
{
|
||||
name: 'SQL Agent',
|
||||
value: 'sqlAgent',
|
||||
description: 'Answers questions about data in an SQL database',
|
||||
},
|
||||
],
|
||||
...agentTypeProperty,
|
||||
displayOptions: { show: { '@version': [{ _cnd: { lte: 1.5 } }] } },
|
||||
default: 'conversationalAgent',
|
||||
},
|
||||
// Make Tools Agent the default agent for versions 1.6 and above
|
||||
{
|
||||
...agentTypeProperty,
|
||||
displayOptions: { show: { '@version': [{ _cnd: { gte: 1.6 } }] } },
|
||||
default: 'toolsAgent',
|
||||
},
|
||||
{
|
||||
...promptTypeOptions,
|
||||
displayOptions: {
|
||||
|
@ -307,6 +354,7 @@ export class Agent implements INodeType {
|
|||
},
|
||||
},
|
||||
|
||||
...toolsAgentProperties,
|
||||
...conversationalAgentProperties,
|
||||
...openAiFunctionsAgentProperties,
|
||||
...reActAgentAgentProperties,
|
||||
|
@ -321,6 +369,8 @@ export class Agent implements INodeType {
|
|||
|
||||
if (agentType === 'conversationalAgent') {
|
||||
return await conversationalAgentExecute.call(this, nodeVersion);
|
||||
} else if (agentType === 'toolsAgent') {
|
||||
return await toolsAgentExecute.call(this, nodeVersion);
|
||||
} else if (agentType === 'openAiFunctionsAgent') {
|
||||
return await openAiFunctionsAgentExecute.call(this, nodeVersion);
|
||||
} else if (agentType === 'reActAgent') {
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { SYSTEM_MESSAGE } from './prompt';
|
||||
|
||||
export const toolsAgentProperties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
displayOptions: {
|
||||
show: {
|
||||
agent: ['toolsAgent'],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'System Message',
|
||||
name: 'systemMessage',
|
||||
type: 'string',
|
||||
default: SYSTEM_MESSAGE,
|
||||
description: 'The message that will be sent to the agent before the conversation starts',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
default: 10,
|
||||
description: 'The maximum number of iterations the agent will run before stopping',
|
||||
},
|
||||
{
|
||||
displayName: 'Return Intermediate Steps',
|
||||
name: 'returnIntermediateSteps',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether or not the output should include intermediate steps the agent took',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
|
@ -0,0 +1,189 @@
|
|||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
|
||||
import type { AgentAction, AgentFinish, AgentStep } from 'langchain/agents';
|
||||
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents';
|
||||
import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
|
||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
||||
import { omit } from 'lodash';
|
||||
import type { Tool } from '@langchain/core/tools';
|
||||
import { DynamicStructuredTool } from '@langchain/core/tools';
|
||||
import { RunnableSequence } from '@langchain/core/runnables';
|
||||
import type { ZodObject } from 'zod';
|
||||
import { z } from 'zod';
|
||||
import type { BaseOutputParser, StructuredOutputParser } from '@langchain/core/output_parsers';
|
||||
import { OutputFixingParser } from 'langchain/output_parsers';
|
||||
import {
|
||||
isChatInstance,
|
||||
getPromptInputByType,
|
||||
getOptionalOutputParsers,
|
||||
getConnectedTools,
|
||||
} from '../../../../../utils/helpers';
|
||||
import { SYSTEM_MESSAGE } from './prompt';
|
||||
|
||||
function getOutputParserSchema(outputParser: BaseOutputParser): ZodObject<any, any, any, any> {
|
||||
const parserType = outputParser.lc_namespace[outputParser.lc_namespace.length - 1];
|
||||
let schema: ZodObject<any, any, any, any>;
|
||||
|
||||
if (parserType === 'structured') {
|
||||
// If the output parser is a structured output parser, we will use the schema from the parser
|
||||
schema = (outputParser as StructuredOutputParser<ZodObject<any, any, any, any>>).schema;
|
||||
} else if (parserType === 'fix' && outputParser instanceof OutputFixingParser) {
|
||||
// If the output parser is a fixing parser, we will use the schema from the connected structured output parser
|
||||
schema = (outputParser.parser as StructuredOutputParser<ZodObject<any, any, any, any>>).schema;
|
||||
} else {
|
||||
// If the output parser is not a structured output parser, we will use a fallback schema
|
||||
schema = z.object({ text: z.string() });
|
||||
}
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
export async function toolsAgentExecute(
|
||||
this: IExecuteFunctions,
|
||||
nodeVersion: number,
|
||||
): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Tools Agent');
|
||||
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
|
||||
|
||||
if (!isChatInstance(model) || !model.bindTools) {
|
||||
throw new NodeOperationError(
|
||||
this.getNode(),
|
||||
'Tools Agent requires Chat Model which supports Tools calling',
|
||||
);
|
||||
}
|
||||
|
||||
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
|
||||
| BaseChatMemory
|
||||
| undefined;
|
||||
|
||||
const tools = (await getConnectedTools(this, true)) as Array<DynamicStructuredTool | Tool>;
|
||||
const outputParser = (await getOptionalOutputParsers(this))?.[0];
|
||||
let structuredOutputParserTool: DynamicStructuredTool | undefined;
|
||||
|
||||
async function agentStepsParser(
|
||||
steps: AgentFinish | AgentAction[],
|
||||
): Promise<AgentFinish | AgentAction[]> {
|
||||
if (Array.isArray(steps)) {
|
||||
const responseParserTool = steps.find((step) => step.tool === 'format_final_response');
|
||||
if (responseParserTool) {
|
||||
const toolInput = responseParserTool?.toolInput;
|
||||
const returnValues = (await outputParser.parse(toolInput as unknown as string)) as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
|
||||
return {
|
||||
returnValues,
|
||||
log: 'Final response formatted',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// If the steps are an AgentFinish and the outputParser is defined it must mean that the LLM didn't use `format_final_response` tool so we will parse the output manually
|
||||
if (outputParser && typeof steps === 'object' && (steps as AgentFinish).returnValues) {
|
||||
const finalResponse = (steps as AgentFinish).returnValues;
|
||||
const returnValues = (await outputParser.parse(finalResponse as unknown as string)) as Record<
|
||||
string,
|
||||
unknown
|
||||
>;
|
||||
|
||||
return {
|
||||
returnValues,
|
||||
log: 'Final response formatted',
|
||||
};
|
||||
}
|
||||
return steps;
|
||||
}
|
||||
|
||||
if (outputParser) {
|
||||
const schema = getOutputParserSchema(outputParser);
|
||||
structuredOutputParserTool = new DynamicStructuredTool({
|
||||
schema,
|
||||
name: 'format_final_response',
|
||||
description:
|
||||
'Always use this tool for the final output to the user. It validates the output so only use it when you are sure the output is final.',
|
||||
// We will not use the function here as we will use the parser to intercept & parse the output in the agentStepsParser
|
||||
func: async () => '',
|
||||
});
|
||||
|
||||
tools.push(structuredOutputParserTool);
|
||||
}
|
||||
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
systemMessage?: string;
|
||||
maxIterations?: number;
|
||||
returnIntermediateSteps?: boolean;
|
||||
};
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', `{system_message}${outputParser ? '\n\n{formatting_instructions}' : ''}`],
|
||||
['placeholder', '{chat_history}'],
|
||||
['human', '{input}'],
|
||||
['placeholder', '{agent_scratchpad}'],
|
||||
]);
|
||||
|
||||
const agent = createToolCallingAgent({
|
||||
llm: model,
|
||||
tools,
|
||||
prompt,
|
||||
streamRunnable: false,
|
||||
});
|
||||
agent.streamRunnable = false;
|
||||
|
||||
const runnableAgent = RunnableSequence.from<{
|
||||
steps: AgentStep[];
|
||||
}>([agent, agentStepsParser]);
|
||||
|
||||
const executor = AgentExecutor.fromAgentAndTools({
|
||||
agent: runnableAgent,
|
||||
memory,
|
||||
tools,
|
||||
returnIntermediateSteps: options.returnIntermediateSteps === true,
|
||||
maxIterations: options.maxIterations ?? 10,
|
||||
});
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
const items = this.getInputData();
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
try {
|
||||
const input = getPromptInputByType({
|
||||
ctx: this,
|
||||
i: itemIndex,
|
||||
inputKey: 'text',
|
||||
promptTypeKey: 'promptType',
|
||||
});
|
||||
|
||||
if (input === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The ‘text parameter is empty.');
|
||||
}
|
||||
|
||||
const response = await executor.invoke({
|
||||
input,
|
||||
system_message: options.systemMessage ?? SYSTEM_MESSAGE,
|
||||
formatting_instructions:
|
||||
'IMPORTANT: Always call `format_final_response` to format your final response!', //outputParser?.getFormatInstructions(),
|
||||
});
|
||||
|
||||
returnData.push({
|
||||
json: omit(
|
||||
response,
|
||||
'system_message',
|
||||
'formatting_instructions',
|
||||
'input',
|
||||
'chat_history',
|
||||
'agent_scratchpad',
|
||||
),
|
||||
});
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
|
||||
continue;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return await this.prepareOutputData(returnData);
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export const SYSTEM_MESSAGE = 'You are a helpful assistant';
|
|
@ -133,6 +133,24 @@ const properties: INodeProperties[] = [
|
|||
type: 'collection',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Output Randomness (Temperature)',
|
||||
name: 'temperature',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Output Randomness (Top P)',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Fail if Assistant Already Exists',
|
||||
name: 'failIfExists',
|
||||
|
@ -176,7 +194,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
do {
|
||||
const response = (await apiRequest.call(this, 'GET', '/assistants', {
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
'OpenAI-Beta': 'assistants=v2',
|
||||
},
|
||||
qs: {
|
||||
limit: 100,
|
||||
|
@ -219,7 +237,6 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
name,
|
||||
description: assistantDescription,
|
||||
instructions,
|
||||
file_ids,
|
||||
};
|
||||
|
||||
const tools = [];
|
||||
|
@ -228,12 +245,28 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
tools.push({
|
||||
type: 'code_interpreter',
|
||||
});
|
||||
body.tool_resources = {
|
||||
...((body.tool_resources as object) ?? {}),
|
||||
code_interpreter: {
|
||||
file_ids,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (knowledgeRetrieval) {
|
||||
tools.push({
|
||||
type: 'retrieval',
|
||||
type: 'file_search',
|
||||
});
|
||||
body.tool_resources = {
|
||||
...((body.tool_resources as object) ?? {}),
|
||||
file_search: {
|
||||
vector_stores: [
|
||||
{
|
||||
file_ids,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (tools.length) {
|
||||
|
@ -243,7 +276,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
const response = await apiRequest.call(this, 'POST', '/assistants', {
|
||||
body,
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
'OpenAI-Beta': 'assistants=v2',
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
|
||||
const response = await apiRequest.call(this, 'DELETE', `/assistants/${assistantId}`, {
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
'OpenAI-Beta': 'assistants=v2',
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
do {
|
||||
const response = await apiRequest.call(this, 'GET', '/assistants', {
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
'OpenAI-Beta': 'assistants=v2',
|
||||
},
|
||||
qs: {
|
||||
limit: 100,
|
||||
|
|
|
@ -4,9 +4,17 @@ import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant
|
|||
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
|
||||
import { OpenAI as OpenAIClient } from 'openai';
|
||||
|
||||
import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
|
||||
import type {
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { BufferWindowMemory } from 'langchain/memory';
|
||||
import omit from 'lodash/omit';
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
import { formatToOpenAIAssistantTool } from '../../helpers/utils';
|
||||
import { assistantRLC } from '../descriptions';
|
||||
|
||||
|
@ -110,6 +118,12 @@ const displayOptions = {
|
|||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
const mapChatMessageToThreadMessage = (
|
||||
message: BaseMessage,
|
||||
): OpenAIClient.Beta.Threads.ThreadCreateParams.Message => ({
|
||||
role: message._getType() === 'ai' ? 'assistant' : 'user',
|
||||
content: message.content.toString(),
|
||||
});
|
||||
|
||||
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
@ -182,11 +196,47 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
tools: tools ?? [],
|
||||
});
|
||||
|
||||
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
|
||||
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
|
||||
| BufferWindowMemory
|
||||
| undefined;
|
||||
|
||||
const chainValues: IDataObject = {
|
||||
content: input,
|
||||
signal: this.getExecutionCancelSignal(),
|
||||
timeout: options.timeout ?? 10000,
|
||||
});
|
||||
};
|
||||
let thread: OpenAIClient.Beta.Threads.Thread;
|
||||
if (memory) {
|
||||
const chatMessages = await memory.chatHistory.getMessages();
|
||||
|
||||
// Construct a new thread from the chat history to map the memory
|
||||
if (chatMessages.length) {
|
||||
const first32Messages = chatMessages.slice(0, 32);
|
||||
// There is a undocumented limit of 32 messages per thread when creating a thread with messages
|
||||
const mappedMessages: OpenAIClient.Beta.Threads.ThreadCreateParams.Message[] =
|
||||
first32Messages.map(mapChatMessageToThreadMessage);
|
||||
|
||||
thread = await client.beta.threads.create({ messages: mappedMessages });
|
||||
const overLimitMessages = chatMessages.slice(32).map(mapChatMessageToThreadMessage);
|
||||
|
||||
// Send the remaining messages that exceed the limit of 32 sequentially
|
||||
for (const message of overLimitMessages) {
|
||||
await client.beta.threads.messages.create(thread.id, message);
|
||||
}
|
||||
|
||||
chainValues.threadId = thread.id;
|
||||
}
|
||||
}
|
||||
|
||||
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke(chainValues);
|
||||
if (memory) {
|
||||
await memory.saveContext({ input }, { output: response.output });
|
||||
|
||||
if (response.threadId && response.runId) {
|
||||
const threadRun = await client.beta.threads.runs.retrieve(response.threadId, response.runId);
|
||||
response.usage = threadRun.usage;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
options.preserveOriginalTools !== false &&
|
||||
|
@ -197,6 +247,6 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
tools: assistantTools,
|
||||
});
|
||||
}
|
||||
|
||||
return [{ json: response, pairedItem: { item: i } }];
|
||||
const filteredResponse = omit(response, ['signal', 'timeout']);
|
||||
return [{ json: filteredResponse, pairedItem: { item: i } }];
|
||||
}
|
||||
|
|
|
@ -84,6 +84,25 @@ const properties: INodeProperties[] = [
|
|||
default: false,
|
||||
description: 'Whether to remove all custom tools (functions) from the assistant',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Output Randomness (Temperature)',
|
||||
name: 'temperature',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
displayName: 'Output Randomness (Top P)',
|
||||
name: 'topP',
|
||||
default: 1,
|
||||
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
|
||||
description:
|
||||
'An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
|
||||
type: 'number',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
@ -109,6 +128,8 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
knowledgeRetrieval,
|
||||
file_ids,
|
||||
removeCustomTools,
|
||||
temperature,
|
||||
topP,
|
||||
} = options;
|
||||
|
||||
const assistantDescription = options.description as string;
|
||||
|
@ -128,7 +149,19 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
);
|
||||
}
|
||||
|
||||
body.file_ids = files;
|
||||
body.tool_resources = {
|
||||
...((body.tool_resources as object) ?? {}),
|
||||
code_interpreter: {
|
||||
file_ids,
|
||||
},
|
||||
file_search: {
|
||||
vector_stores: [
|
||||
{
|
||||
file_ids,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (modelId) {
|
||||
|
@ -147,11 +180,19 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
body.instructions = instructions;
|
||||
}
|
||||
|
||||
if (temperature) {
|
||||
body.temperature = temperature;
|
||||
}
|
||||
|
||||
if (topP) {
|
||||
body.topP = topP;
|
||||
}
|
||||
|
||||
let tools =
|
||||
((
|
||||
await apiRequest.call(this, 'GET', `/assistants/${assistantId}`, {
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
'OpenAI-Beta': 'assistants=v2',
|
||||
},
|
||||
})
|
||||
).tools as IDataObject[]) || [];
|
||||
|
@ -166,14 +207,14 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
tools = tools.filter((tool) => tool.type !== 'code_interpreter');
|
||||
}
|
||||
|
||||
if (knowledgeRetrieval && !tools.find((tool) => tool.type === 'retrieval')) {
|
||||
if (knowledgeRetrieval && !tools.find((tool) => tool.type === 'file_search')) {
|
||||
tools.push({
|
||||
type: 'retrieval',
|
||||
type: 'file_search',
|
||||
});
|
||||
}
|
||||
|
||||
if (knowledgeRetrieval === false && tools.find((tool) => tool.type === 'retrieval')) {
|
||||
tools = tools.filter((tool) => tool.type !== 'retrieval');
|
||||
if (knowledgeRetrieval === false && tools.find((tool) => tool.type === 'file_search')) {
|
||||
tools = tools.filter((tool) => tool.type !== 'file_search');
|
||||
}
|
||||
|
||||
if (removeCustomTools) {
|
||||
|
@ -185,7 +226,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
|
|||
const response = await apiRequest.call(this, 'POST', `/assistants/${assistantId}`, {
|
||||
body,
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
'OpenAI-Beta': 'assistants=v2',
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -46,6 +46,7 @@ const configureNodeInputs = (resource: string, operation: string, hideTools: str
|
|||
if (resource === 'assistant' && operation === 'message') {
|
||||
return [
|
||||
{ type: NodeConnectionType.Main },
|
||||
{ type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 },
|
||||
{ type: NodeConnectionType.AiTool, displayName: 'Tools' },
|
||||
];
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ export async function assistantSearch(
|
|||
): Promise<INodeListSearchResult> {
|
||||
const { data, has_more, last_id } = await apiRequest.call(this, 'GET', '/assistants', {
|
||||
headers: {
|
||||
'OpenAI-Beta': 'assistants=v1',
|
||||
'OpenAI-Beta': 'assistants=v2',
|
||||
},
|
||||
qs: {
|
||||
limit: 100,
|
||||
|
|
|
@ -84,13 +84,24 @@ describe('OpenAi, Assistant resource', () => {
|
|||
expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants', {
|
||||
body: {
|
||||
description: 'description',
|
||||
file_ids: [],
|
||||
instructions: 'some instructions',
|
||||
model: 'gpt-model',
|
||||
name: 'name',
|
||||
tools: [{ type: 'code_interpreter' }, { type: 'retrieval' }],
|
||||
tool_resources: {
|
||||
code_interpreter: {
|
||||
file_ids: [],
|
||||
},
|
||||
file_search: {
|
||||
vector_stores: [
|
||||
{
|
||||
file_ids: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
tools: [{ type: 'code_interpreter' }, { type: 'file_search' }],
|
||||
},
|
||||
headers: { 'OpenAI-Beta': 'assistants=v1' },
|
||||
headers: { 'OpenAI-Beta': 'assistants=v2' },
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -124,7 +135,7 @@ describe('OpenAi, Assistant resource', () => {
|
|||
);
|
||||
|
||||
expect(transport.apiRequest).toHaveBeenCalledWith('DELETE', '/assistants/assistant-id', {
|
||||
headers: { 'OpenAI-Beta': 'assistants=v1' },
|
||||
headers: { 'OpenAI-Beta': 'assistants=v2' },
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -185,17 +196,28 @@ describe('OpenAi, Assistant resource', () => {
|
|||
|
||||
expect(transport.apiRequest).toHaveBeenCalledTimes(2);
|
||||
expect(transport.apiRequest).toHaveBeenCalledWith('GET', '/assistants/assistant-id', {
|
||||
headers: { 'OpenAI-Beta': 'assistants=v1' },
|
||||
headers: { 'OpenAI-Beta': 'assistants=v2' },
|
||||
});
|
||||
expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants/assistant-id', {
|
||||
body: {
|
||||
file_ids: [],
|
||||
instructions: 'some instructions',
|
||||
model: 'gpt-model',
|
||||
name: 'name',
|
||||
tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'retrieval' }],
|
||||
tool_resources: {
|
||||
code_interpreter: {
|
||||
file_ids: [],
|
||||
},
|
||||
file_search: {
|
||||
vector_stores: [
|
||||
{
|
||||
file_ids: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'file_search' }],
|
||||
},
|
||||
headers: { 'OpenAI-Beta': 'assistants=v1' },
|
||||
headers: { 'OpenAI-Beta': 'assistants=v2' },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@n8n/n8n-nodes-langchain",
|
||||
"version": "1.41.0",
|
||||
"version": "1.42.0",
|
||||
"description": "",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
|
|
@ -13,7 +13,6 @@ import type { Document } from '@langchain/core/documents';
|
|||
import { TextSplitter } from 'langchain/text_splitter';
|
||||
import { BaseChatMemory } from '@langchain/community/memory/chat_memory';
|
||||
import { BaseRetriever } from '@langchain/core/retrievers';
|
||||
import type { FormatInstructionsOptions } from '@langchain/core/output_parsers';
|
||||
import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
|
||||
import { isObject } from 'lodash';
|
||||
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
|
||||
|
@ -222,31 +221,7 @@ export function logWrapper(
|
|||
|
||||
// ========== BaseOutputParser ==========
|
||||
if (originalInstance instanceof BaseOutputParser) {
|
||||
if (prop === 'getFormatInstructions' && 'getFormatInstructions' in target) {
|
||||
return (options?: FormatInstructionsOptions): string => {
|
||||
connectionType = NodeConnectionType.AiOutputParser;
|
||||
const { index } = executeFunctions.addInputData(connectionType, [
|
||||
[{ json: { action: 'getFormatInstructions' } }],
|
||||
]);
|
||||
|
||||
// @ts-ignore
|
||||
const response = callMethodSync.call(target, {
|
||||
executeFunctions,
|
||||
connectionType,
|
||||
currentNodeRunIndex: index,
|
||||
method: target[prop],
|
||||
arguments: [options],
|
||||
}) as string;
|
||||
|
||||
executeFunctions.addOutputData(connectionType, index, [
|
||||
[{ json: { action: 'getFormatInstructions', response } }],
|
||||
]);
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.get.instructions', {
|
||||
response,
|
||||
});
|
||||
return response;
|
||||
};
|
||||
} else if (prop === 'parse' && 'parse' in target) {
|
||||
if (prop === 'parse' && 'parse' in target) {
|
||||
return async (text: string | Record<string, unknown>): Promise<unknown> => {
|
||||
connectionType = NodeConnectionType.AiOutputParser;
|
||||
const stringifiedText = isObject(text) ? JSON.stringify(text) : text;
|
||||
|
@ -254,19 +229,30 @@ export function logWrapper(
|
|||
[{ json: { action: 'parse', text: stringifiedText } }],
|
||||
]);
|
||||
|
||||
const response = (await callMethodAsync.call(target, {
|
||||
executeFunctions,
|
||||
connectionType,
|
||||
currentNodeRunIndex: index,
|
||||
method: target[prop],
|
||||
arguments: [stringifiedText],
|
||||
})) as object;
|
||||
try {
|
||||
const response = (await callMethodAsync.call(target, {
|
||||
executeFunctions,
|
||||
connectionType,
|
||||
currentNodeRunIndex: index,
|
||||
method: target[prop],
|
||||
arguments: [stringifiedText],
|
||||
})) as object;
|
||||
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { text, response });
|
||||
executeFunctions.addOutputData(connectionType, index, [
|
||||
[{ json: { action: 'parse', response } }],
|
||||
]);
|
||||
return response;
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { text, response });
|
||||
executeFunctions.addOutputData(connectionType, index, [
|
||||
[{ json: { action: 'parse', response } }],
|
||||
]);
|
||||
return response;
|
||||
} catch (error) {
|
||||
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', {
|
||||
text,
|
||||
response: error.message ?? error,
|
||||
});
|
||||
executeFunctions.addOutputData(connectionType, index, [
|
||||
[{ json: { action: 'parse', response: error.message ?? error } }],
|
||||
]);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
23
packages/@n8n/permissions/src/combineScopes.ts
Normal file
23
packages/@n8n/permissions/src/combineScopes.ts
Normal file
|
@ -0,0 +1,23 @@
|
|||
import type { Scope, ScopeLevels, GlobalScopes, MaskLevels } from './types';
|
||||
|
||||
export function combineScopes(userScopes: GlobalScopes, masks?: MaskLevels): Set<Scope>;
|
||||
export function combineScopes(userScopes: ScopeLevels, masks?: MaskLevels): Set<Scope>;
|
||||
export function combineScopes(
|
||||
userScopes: GlobalScopes | ScopeLevels,
|
||||
masks?: MaskLevels,
|
||||
): Set<Scope> {
|
||||
const maskedScopes: GlobalScopes | ScopeLevels = Object.fromEntries(
|
||||
Object.entries(userScopes).map((e) => [e[0], [...e[1]]]),
|
||||
) as GlobalScopes | ScopeLevels;
|
||||
|
||||
if (masks?.sharing) {
|
||||
if ('project' in maskedScopes) {
|
||||
maskedScopes.project = maskedScopes.project.filter((v) => masks.sharing.includes(v));
|
||||
}
|
||||
if ('resource' in maskedScopes) {
|
||||
maskedScopes.resource = maskedScopes.resource.filter((v) => masks.sharing.includes(v));
|
||||
}
|
||||
}
|
||||
|
||||
return new Set(Object.values(maskedScopes).flat());
|
||||
}
|
|
@ -1,25 +1,29 @@
|
|||
import type { Scope, ScopeLevels, GlobalScopes, ScopeOptions } from './types';
|
||||
import { combineScopes } from './combineScopes';
|
||||
import type { Scope, ScopeLevels, GlobalScopes, ScopeOptions, MaskLevels } from './types';
|
||||
|
||||
export function hasScope(
|
||||
scope: Scope | Scope[],
|
||||
userScopes: GlobalScopes,
|
||||
masks?: MaskLevels,
|
||||
options?: ScopeOptions,
|
||||
): boolean;
|
||||
export function hasScope(
|
||||
scope: Scope | Scope[],
|
||||
userScopes: ScopeLevels,
|
||||
masks?: MaskLevels,
|
||||
options?: ScopeOptions,
|
||||
): boolean;
|
||||
export function hasScope(
|
||||
scope: Scope | Scope[],
|
||||
userScopes: GlobalScopes | ScopeLevels,
|
||||
masks?: MaskLevels,
|
||||
options: ScopeOptions = { mode: 'oneOf' },
|
||||
): boolean {
|
||||
if (!Array.isArray(scope)) {
|
||||
scope = [scope];
|
||||
}
|
||||
|
||||
const userScopeSet = new Set(Object.values(userScopes).flat());
|
||||
const userScopeSet = combineScopes(userScopes, masks);
|
||||
|
||||
if (options.mode === 'allOf') {
|
||||
return !!scope.length && scope.every((s) => userScopeSet.has(s));
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
export type * from './types';
|
||||
export * from './hasScope';
|
||||
export * from './combineScopes';
|
||||
|
|
|
@ -12,8 +12,10 @@ export type Resource =
|
|||
| 'license'
|
||||
| 'logStreaming'
|
||||
| 'orchestration'
|
||||
| 'sourceControl'
|
||||
| 'project'
|
||||
| 'saml'
|
||||
| 'securityAudit'
|
||||
| 'sourceControl'
|
||||
| 'tag'
|
||||
| 'user'
|
||||
| 'variable'
|
||||
|
@ -48,7 +50,9 @@ export type LdapScope = ResourceScope<'ldap', 'manage' | 'sync'>;
|
|||
export type LicenseScope = ResourceScope<'license', 'manage'>;
|
||||
export type LogStreamingScope = ResourceScope<'logStreaming', 'manage'>;
|
||||
export type OrchestrationScope = ResourceScope<'orchestration', 'read' | 'list'>;
|
||||
export type ProjectScope = ResourceScope<'project'>;
|
||||
export type SamlScope = ResourceScope<'saml', 'manage'>;
|
||||
export type SecurityAuditScope = ResourceScope<'securityAudit', 'generate'>;
|
||||
export type SourceControlScope = ResourceScope<'sourceControl', 'pull' | 'push' | 'manage'>;
|
||||
export type TagScope = ResourceScope<'tag'>;
|
||||
export type UserScope = ResourceScope<'user', DefaultOperations | 'resetPassword' | 'changeRole'>;
|
||||
|
@ -69,7 +73,9 @@ export type Scope =
|
|||
| LicenseScope
|
||||
| LogStreamingScope
|
||||
| OrchestrationScope
|
||||
| ProjectScope
|
||||
| SamlScope
|
||||
| SecurityAuditScope
|
||||
| SourceControlScope
|
||||
| TagScope
|
||||
| UserScope
|
||||
|
@ -84,5 +90,10 @@ export type ProjectScopes = GetScopeLevel<'project'>;
|
|||
export type ResourceScopes = GetScopeLevel<'resource'>;
|
||||
export type ScopeLevels = GlobalScopes & (ProjectScopes | (ProjectScopes & ResourceScopes));
|
||||
|
||||
export type MaskLevel = 'sharing';
|
||||
export type GetMaskLevel<T extends MaskLevel> = Record<T, Scope[]>;
|
||||
export type SharingMasks = GetMaskLevel<'sharing'>;
|
||||
export type MaskLevels = SharingMasks;
|
||||
|
||||
export type ScopeMode = 'oneOf' | 'allOf';
|
||||
export type ScopeOptions = { mode: ScopeMode };
|
||||
|
|
|
@ -33,6 +33,7 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: memberPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'oneOf' },
|
||||
),
|
||||
).toBe(true);
|
||||
|
@ -43,6 +44,7 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: memberPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'allOf' },
|
||||
),
|
||||
).toBe(true);
|
||||
|
@ -53,6 +55,7 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: memberPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'oneOf' },
|
||||
),
|
||||
).toBe(false);
|
||||
|
@ -63,6 +66,7 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: memberPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'allOf' },
|
||||
),
|
||||
).toBe(false);
|
||||
|
@ -95,6 +99,7 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: ownerPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'allOf' },
|
||||
),
|
||||
).toBe(true);
|
||||
|
@ -105,6 +110,7 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: memberPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'allOf' },
|
||||
),
|
||||
).toBe(false);
|
||||
|
@ -115,6 +121,7 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: memberPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'allOf' },
|
||||
),
|
||||
).toBe(false);
|
||||
|
@ -125,8 +132,127 @@ describe('hasScope', () => {
|
|||
{
|
||||
global: memberPermissions,
|
||||
},
|
||||
undefined,
|
||||
{ mode: 'allOf' },
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hasScope masking', () => {
|
||||
test('should return true without mask when scopes present', () => {
|
||||
expect(
|
||||
hasScope('workflow:read', {
|
||||
global: ['user:list'],
|
||||
project: ['workflow:read'],
|
||||
resource: [],
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false without mask when scopes are not present', () => {
|
||||
expect(
|
||||
hasScope('workflow:update', {
|
||||
global: ['user:list'],
|
||||
project: ['workflow:read'],
|
||||
resource: [],
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when mask does not include scope but scopes list does contain required scope', () => {
|
||||
expect(
|
||||
hasScope(
|
||||
'workflow:update',
|
||||
{
|
||||
global: ['user:list'],
|
||||
project: ['workflow:read', 'workflow:update'],
|
||||
resource: [],
|
||||
},
|
||||
{
|
||||
sharing: ['workflow:read'],
|
||||
},
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
test('should return true when mask does include scope and scope list includes scope', () => {
|
||||
expect(
|
||||
hasScope(
|
||||
'workflow:update',
|
||||
{
|
||||
global: ['user:list'],
|
||||
project: ['workflow:read', 'workflow:update'],
|
||||
resource: [],
|
||||
},
|
||||
{
|
||||
sharing: ['workflow:read', 'workflow:update'],
|
||||
},
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true when mask does include scope and scopes list includes scope on multiple levels', () => {
|
||||
expect(
|
||||
hasScope(
|
||||
'workflow:update',
|
||||
{
|
||||
global: ['user:list'],
|
||||
project: ['workflow:read', 'workflow:update'],
|
||||
resource: ['workflow:update'],
|
||||
},
|
||||
{
|
||||
sharing: ['workflow:read', 'workflow:update'],
|
||||
},
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('should not mask out global scopes', () => {
|
||||
expect(
|
||||
hasScope(
|
||||
'workflow:update',
|
||||
{
|
||||
global: ['workflow:read', 'workflow:update'],
|
||||
project: ['workflow:read'],
|
||||
resource: ['workflow:read'],
|
||||
},
|
||||
{
|
||||
sharing: ['workflow:read'],
|
||||
},
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false when scope is not in mask or scope list', () => {
|
||||
expect(
|
||||
hasScope(
|
||||
'workflow:update',
|
||||
{
|
||||
global: ['workflow:read'],
|
||||
project: ['workflow:read'],
|
||||
resource: ['workflow:read'],
|
||||
},
|
||||
{
|
||||
sharing: ['workflow:read'],
|
||||
},
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when scope is in mask or not scope list', () => {
|
||||
expect(
|
||||
hasScope(
|
||||
'workflow:update',
|
||||
{
|
||||
global: ['workflow:read'],
|
||||
project: ['workflow:read'],
|
||||
resource: ['workflow:read'],
|
||||
},
|
||||
{
|
||||
sharing: ['workflow:read', 'workflow:update'],
|
||||
},
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -361,7 +361,7 @@ const config = (module.exports = {
|
|||
/**
|
||||
* https://github.com/import-js/eslint-plugin-import/blob/main/docs/rules/no-unresolved.md
|
||||
*/
|
||||
'import/no-unresolved': 'error',
|
||||
'import/no-unresolved': ['error', { ignore: ['^virtual:'] }],
|
||||
|
||||
/**
|
||||
* https://github.com/import-js/eslint-plugin-import/blob/master/docs/rules/order.md
|
||||
|
|
|
@ -35,4 +35,20 @@ module.exports = {
|
|||
'@typescript-eslint/no-unsafe-enum-comparison': 'warn',
|
||||
'@typescript-eslint/no-unsafe-declaration-merging': 'warn',
|
||||
},
|
||||
|
||||
overrides: [
|
||||
{
|
||||
files: ['./src/decorators/**/*.ts'],
|
||||
rules: {
|
||||
'@typescript-eslint/ban-types': [
|
||||
'warn',
|
||||
{
|
||||
types: {
|
||||
Function: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"version": "1.41.0",
|
||||
"version": "1.42.0",
|
||||
"description": "n8n Workflow Automation Tool",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
|
|
@ -207,13 +207,6 @@ export abstract class AbstractServer {
|
|||
// Register a handler
|
||||
this.app.all(`/${this.endpointFormTest}/:path(*)`, webhookRequestHandler(testWebhooks));
|
||||
this.app.all(`/${this.endpointWebhookTest}/:path(*)`, webhookRequestHandler(testWebhooks));
|
||||
|
||||
// Removes a test webhook
|
||||
// TODO UM: check if this needs validation with user management.
|
||||
this.app.delete(
|
||||
`/${this.restEndpoint}/test-webhook/:id`,
|
||||
send(async (req) => await testWebhooks.cancelWebhook(req.params.id)),
|
||||
);
|
||||
}
|
||||
|
||||
// Block bots from scanning the application
|
||||
|
@ -230,6 +223,16 @@ export abstract class AbstractServer {
|
|||
this.setupDevMiddlewares();
|
||||
}
|
||||
|
||||
if (this.testWebhooksEnabled) {
|
||||
const testWebhooks = Container.get(TestWebhooks);
|
||||
// Removes a test webhook
|
||||
// TODO UM: check if this needs validation with user management.
|
||||
this.app.delete(
|
||||
`/${this.restEndpoint}/test-webhook/:id`,
|
||||
send(async (req) => await testWebhooks.cancelWebhook(req.params.id)),
|
||||
);
|
||||
}
|
||||
|
||||
// Setup body parsing middleware after the webhook handlers are setup
|
||||
this.app.use(bodyParser);
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ export class ActiveWebhooks implements IWebhookManager {
|
|||
|
||||
const workflowData = await this.workflowRepository.findOne({
|
||||
where: { id: webhook.workflowId },
|
||||
relations: ['shared', 'shared.user'],
|
||||
relations: { shared: { project: { projectRelations: true } } },
|
||||
});
|
||||
|
||||
if (workflowData === null) {
|
||||
|
@ -102,9 +102,7 @@ export class ActiveWebhooks implements IWebhookManager {
|
|||
settings: workflowData.settings,
|
||||
});
|
||||
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(
|
||||
workflowData.shared[0].user.id,
|
||||
);
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase();
|
||||
|
||||
const webhookData = NodeHelpers.getNodeWebhooks(
|
||||
workflow,
|
||||
|
|
|
@ -229,7 +229,6 @@ export class ActiveWorkflowManager {
|
|||
async clearWebhooks(workflowId: string) {
|
||||
const workflowData = await this.workflowRepository.findOne({
|
||||
where: { id: workflowId },
|
||||
relations: ['shared', 'shared.user'],
|
||||
});
|
||||
|
||||
if (workflowData === null) {
|
||||
|
@ -249,9 +248,7 @@ export class ActiveWorkflowManager {
|
|||
|
||||
const mode = 'internal';
|
||||
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(
|
||||
workflowData.shared[0].user.id,
|
||||
);
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase();
|
||||
|
||||
const webhooks = WebhookHelpers.getWorkflowWebhooks(workflow, additionalData, undefined, true);
|
||||
|
||||
|
@ -570,13 +567,7 @@ export class ActiveWorkflowManager {
|
|||
);
|
||||
}
|
||||
|
||||
const sharing = dbWorkflow.shared.find((shared) => shared.role === 'workflow:owner');
|
||||
|
||||
if (!sharing) {
|
||||
throw new WorkflowActivationError(`Workflow ${dbWorkflow.display()} has no owner`);
|
||||
}
|
||||
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(sharing.user.id);
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase();
|
||||
|
||||
if (shouldAddWebhooks) {
|
||||
await this.addWebhooks(workflow, additionalData, 'trigger', activationMode);
|
||||
|
@ -711,6 +702,7 @@ export class ActiveWorkflowManager {
|
|||
* @param {string} workflowId The id of the workflow to deactivate
|
||||
*/
|
||||
// TODO: this should happen in a transaction
|
||||
// maybe, see: https://github.com/n8n-io/n8n/pull/8904#discussion_r1530150510
|
||||
async remove(workflowId: string) {
|
||||
if (this.orchestrationService.isMultiMainSetupEnabled) {
|
||||
try {
|
||||
|
|
|
@ -30,15 +30,15 @@ import { ICredentialsHelper, NodeHelpers, Workflow, ApplicationError } from 'n8n
|
|||
import type { ICredentialsDb } from '@/Interfaces';
|
||||
|
||||
import type { CredentialsEntity } from '@db/entities/CredentialsEntity';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { CredentialTypes } from '@/CredentialTypes';
|
||||
import { CredentialsOverwrites } from '@/CredentialsOverwrites';
|
||||
import { RESPONSE_ERROR_MESSAGES } from './constants';
|
||||
|
||||
import { Logger } from '@/Logger';
|
||||
import { CredentialsRepository } from '@db/repositories/credentials.repository';
|
||||
import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository';
|
||||
import { CredentialNotFoundError } from './errors/credential-not-found.error';
|
||||
import { In } from '@n8n/typeorm';
|
||||
import { CacheService } from './services/cache/cache.service';
|
||||
|
||||
const mockNode = {
|
||||
name: '',
|
||||
|
@ -77,12 +77,11 @@ const mockNodeTypes: INodeTypes = {
|
|||
@Service()
|
||||
export class CredentialsHelper extends ICredentialsHelper {
|
||||
constructor(
|
||||
private readonly logger: Logger,
|
||||
private readonly credentialTypes: CredentialTypes,
|
||||
private readonly nodeTypes: NodeTypes,
|
||||
private readonly credentialsOverwrites: CredentialsOverwrites,
|
||||
private readonly credentialsRepository: CredentialsRepository,
|
||||
private readonly sharedCredentialsRepository: SharedCredentialsRepository,
|
||||
private readonly cacheService: CacheService,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
@ -245,7 +244,6 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
async getCredentials(
|
||||
nodeCredential: INodeCredentialsDetails,
|
||||
type: string,
|
||||
userId?: string,
|
||||
): Promise<Credentials> {
|
||||
if (!nodeCredential.id) {
|
||||
throw new ApplicationError('Found credential with no ID.', {
|
||||
|
@ -257,14 +255,10 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
let credential: CredentialsEntity;
|
||||
|
||||
try {
|
||||
credential = userId
|
||||
? await this.sharedCredentialsRepository
|
||||
.findOneOrFail({
|
||||
relations: ['credentials'],
|
||||
where: { credentials: { id: nodeCredential.id, type }, userId },
|
||||
})
|
||||
.then((shared) => shared.credentials)
|
||||
: await this.credentialsRepository.findOneByOrFail({ id: nodeCredential.id, type });
|
||||
credential = await this.credentialsRepository.findOneByOrFail({
|
||||
id: nodeCredential.id,
|
||||
type,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new CredentialNotFoundError(nodeCredential.id, type);
|
||||
}
|
||||
|
@ -338,7 +332,7 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
|
||||
await additionalData?.secretsHelpers?.waitForInit();
|
||||
|
||||
const canUseSecrets = await this.credentialOwnedByOwner(nodeCredentials);
|
||||
const canUseSecrets = await this.credentialCanUseExternalSecrets(nodeCredentials);
|
||||
|
||||
return this.applyDefaultsAndOverwrites(
|
||||
additionalData,
|
||||
|
@ -457,28 +451,39 @@ export class CredentialsHelper extends ICredentialsHelper {
|
|||
await this.credentialsRepository.update(findQuery, newCredentialsData);
|
||||
}
|
||||
|
||||
async credentialOwnedByOwner(nodeCredential: INodeCredentialsDetails): Promise<boolean> {
|
||||
async credentialCanUseExternalSecrets(nodeCredential: INodeCredentialsDetails): Promise<boolean> {
|
||||
if (!nodeCredential.id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const credential = await this.sharedCredentialsRepository.findOne({
|
||||
where: {
|
||||
role: 'credential:owner',
|
||||
user: {
|
||||
role: 'global:owner',
|
||||
},
|
||||
credentials: {
|
||||
id: nodeCredential.id,
|
||||
},
|
||||
},
|
||||
});
|
||||
return (
|
||||
(await this.cacheService.get(`credential-can-use-secrets:${nodeCredential.id}`, {
|
||||
refreshFn: async () => {
|
||||
const credential = await this.sharedCredentialsRepository.findOne({
|
||||
where: {
|
||||
role: 'credential:owner',
|
||||
project: {
|
||||
projectRelations: {
|
||||
role: In(['project:personalOwner', 'project:admin']),
|
||||
user: {
|
||||
role: In(['global:owner', 'global:admin']),
|
||||
},
|
||||
},
|
||||
},
|
||||
credentials: {
|
||||
id: nodeCredential.id!,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!credential) {
|
||||
return false;
|
||||
}
|
||||
if (!credential) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
return true;
|
||||
},
|
||||
})) ?? false
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -535,7 +535,8 @@ export interface IWorkflowExecutionDataProcess {
|
|||
pushRef?: string;
|
||||
startNodes?: StartNodeData[];
|
||||
workflowData: IWorkflowBase;
|
||||
userId: string;
|
||||
userId?: string;
|
||||
projectId?: string;
|
||||
}
|
||||
|
||||
export interface IWorkflowExecuteProcess {
|
||||
|
|
|
@ -34,6 +34,10 @@ import { License } from '@/License';
|
|||
import { EventsService } from '@/services/events.service';
|
||||
import { NodeTypes } from '@/NodeTypes';
|
||||
import { Telemetry } from '@/telemetry';
|
||||
import type { Project } from '@db/entities/Project';
|
||||
import type { ProjectRole } from '@db/entities/ProjectRelation';
|
||||
import { ProjectRelationRepository } from './databases/repositories/projectRelation.repository';
|
||||
import { SharedCredentialsRepository } from './databases/repositories/sharedCredentials.repository';
|
||||
|
||||
function userToPayload(user: User): {
|
||||
userId: string;
|
||||
|
@ -62,6 +66,8 @@ export class InternalHooks {
|
|||
private readonly instanceSettings: InstanceSettings,
|
||||
private readonly eventBus: MessageEventBus,
|
||||
private readonly license: License,
|
||||
private readonly projectRelationRepository: ProjectRelationRepository,
|
||||
private readonly sharedCredentialsRepository: SharedCredentialsRepository,
|
||||
) {
|
||||
eventsService.on(
|
||||
'telemetry.onFirstProductionWorkflowSuccess',
|
||||
|
@ -164,7 +170,12 @@ export class InternalHooks {
|
|||
);
|
||||
}
|
||||
|
||||
async onWorkflowCreated(user: User, workflow: IWorkflowBase, publicApi: boolean): Promise<void> {
|
||||
async onWorkflowCreated(
|
||||
user: User,
|
||||
workflow: IWorkflowBase,
|
||||
project: Project,
|
||||
publicApi: boolean,
|
||||
): Promise<void> {
|
||||
const { nodeGraph } = TelemetryHelpers.generateNodesGraph(workflow, this.nodeTypes);
|
||||
void Promise.all([
|
||||
this.eventBus.sendAuditEvent({
|
||||
|
@ -180,6 +191,8 @@ export class InternalHooks {
|
|||
workflow_id: workflow.id,
|
||||
node_graph_string: JSON.stringify(nodeGraph),
|
||||
public_api: publicApi,
|
||||
project_id: project.id,
|
||||
project_type: project.type,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
@ -208,19 +221,32 @@ export class InternalHooks {
|
|||
isCloudDeployment,
|
||||
});
|
||||
|
||||
let userRole: 'owner' | 'sharee' | 'member' | undefined = undefined;
|
||||
const role = await this.sharedWorkflowRepository.findSharingRole(user.id, workflow.id);
|
||||
if (role) {
|
||||
userRole = role === 'workflow:owner' ? 'owner' : 'sharee';
|
||||
} else {
|
||||
const workflowOwner = await this.sharedWorkflowRepository.getWorkflowOwningProject(
|
||||
workflow.id,
|
||||
);
|
||||
|
||||
if (workflowOwner) {
|
||||
const projectRole = await this.projectRelationRepository.findProjectRole({
|
||||
userId: user.id,
|
||||
projectId: workflowOwner.id,
|
||||
});
|
||||
|
||||
if (projectRole && projectRole !== 'project:personalOwner') {
|
||||
userRole = 'member';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const notesCount = Object.keys(nodeGraph.notes).length;
|
||||
const overlappingCount = Object.values(nodeGraph.notes).filter(
|
||||
(note) => note.overlapping,
|
||||
).length;
|
||||
|
||||
let userRole: 'owner' | 'sharee' | undefined = undefined;
|
||||
if (user.id && workflow.id) {
|
||||
const role = await this.sharedWorkflowRepository.findSharingRole(user.id, workflow.id);
|
||||
if (role) {
|
||||
userRole = role === 'workflow:owner' ? 'owner' : 'sharee';
|
||||
}
|
||||
}
|
||||
|
||||
void Promise.all([
|
||||
this.eventBus.sendAuditEvent({
|
||||
eventName: 'n8n.audit.workflow.updated',
|
||||
|
@ -865,6 +891,9 @@ export class InternalHooks {
|
|||
credential_id: string;
|
||||
public_api: boolean;
|
||||
}): Promise<void> {
|
||||
const project = await this.sharedCredentialsRepository.findCredentialOwningProject(
|
||||
userCreatedCredentialsData.credential_id,
|
||||
);
|
||||
void Promise.all([
|
||||
this.eventBus.sendAuditEvent({
|
||||
eventName: 'n8n.audit.user.credentials.created',
|
||||
|
@ -880,6 +909,8 @@ export class InternalHooks {
|
|||
credential_type: userCreatedCredentialsData.credential_type,
|
||||
credential_id: userCreatedCredentialsData.credential_id,
|
||||
instance_id: this.instanceSettings.instanceId,
|
||||
project_id: project?.id,
|
||||
project_type: project?.type,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
@ -1207,4 +1238,27 @@ export class InternalHooks {
|
|||
}): Promise<void> {
|
||||
return await this.telemetry.track('User updated external secrets settings', saveData);
|
||||
}
|
||||
|
||||
async onTeamProjectCreated(data: { user_id: string; role: GlobalRole }) {
|
||||
return await this.telemetry.track('User created project', data);
|
||||
}
|
||||
|
||||
async onTeamProjectDeleted(data: {
|
||||
user_id: string;
|
||||
role: GlobalRole;
|
||||
project_id: string;
|
||||
removal_type: 'delete' | 'transfer';
|
||||
target_project_id?: string;
|
||||
}) {
|
||||
return await this.telemetry.track('User deleted project', data);
|
||||
}
|
||||
|
||||
async onTeamProjectUpdated(data: {
|
||||
user_id: string;
|
||||
role: GlobalRole;
|
||||
project_id: string;
|
||||
members: Array<{ user_id: string; role: ProjectRole }>;
|
||||
}) {
|
||||
return await this.telemetry.track('Project settings updated', data);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,7 +93,7 @@ export const getAuthIdentityByLdapId = async (
|
|||
idAttributeValue: string,
|
||||
): Promise<AuthIdentity | null> => {
|
||||
return await Container.get(AuthIdentityRepository).findOne({
|
||||
relations: ['user'],
|
||||
relations: { user: true },
|
||||
where: {
|
||||
providerId: idAttributeValue,
|
||||
providerType: 'ldap',
|
||||
|
@ -140,7 +140,7 @@ export const getLdapIds = async (): Promise<string[]> => {
|
|||
|
||||
export const getLdapUsers = async (): Promise<User[]> => {
|
||||
const identities = await Container.get(AuthIdentityRepository).find({
|
||||
relations: ['user'],
|
||||
relations: { user: true },
|
||||
where: {
|
||||
providerType: 'ldap',
|
||||
},
|
||||
|
@ -179,10 +179,15 @@ export const processUsers = async (
|
|||
toUpdateUsers: Array<[string, User]>,
|
||||
toDisableUsers: string[],
|
||||
): Promise<void> => {
|
||||
const userRepository = Container.get(UserRepository);
|
||||
await Db.transaction(async (transactionManager) => {
|
||||
return await Promise.all([
|
||||
...toCreateUsers.map(async ([ldapId, user]) => {
|
||||
const authIdentity = AuthIdentity.create(await transactionManager.save(user), ldapId);
|
||||
const { user: savedUser } = await userRepository.createUserWithProject(
|
||||
user,
|
||||
transactionManager,
|
||||
);
|
||||
const authIdentity = AuthIdentity.create(savedUser, ldapId);
|
||||
return await transactionManager.save(authIdentity);
|
||||
}),
|
||||
...toUpdateUsers.map(async ([ldapId, user]) => {
|
||||
|
@ -202,7 +207,13 @@ export const processUsers = async (
|
|||
providerId: ldapId,
|
||||
});
|
||||
if (authIdentity?.userId) {
|
||||
await transactionManager.update(User, { id: authIdentity?.userId }, { disabled: true });
|
||||
const user = await transactionManager.findOneBy(User, { id: authIdentity.userId });
|
||||
|
||||
if (user) {
|
||||
user.disabled = true;
|
||||
await transactionManager.save(user);
|
||||
}
|
||||
|
||||
await transactionManager.delete(AuthIdentity, { userId: authIdentity?.userId });
|
||||
}
|
||||
}),
|
||||
|
@ -266,14 +277,11 @@ export const createLdapAuthIdentity = async (user: User, ldapId: string) => {
|
|||
};
|
||||
|
||||
export const createLdapUserOnLocalDb = async (data: Partial<User>, ldapId: string) => {
|
||||
const user = await Container.get(UserRepository).save(
|
||||
{
|
||||
password: randomPassword(),
|
||||
role: 'global:member',
|
||||
...data,
|
||||
},
|
||||
{ transaction: false },
|
||||
);
|
||||
const { user } = await Container.get(UserRepository).createUserWithProject({
|
||||
password: randomPassword(),
|
||||
role: 'global:member',
|
||||
...data,
|
||||
});
|
||||
await createLdapAuthIdentity(user, ldapId);
|
||||
return user;
|
||||
};
|
||||
|
@ -281,7 +289,11 @@ export const createLdapUserOnLocalDb = async (data: Partial<User>, ldapId: strin
|
|||
export const updateLdapUserOnLocalDb = async (identity: AuthIdentity, data: Partial<User>) => {
|
||||
const userId = identity?.user?.id;
|
||||
if (userId) {
|
||||
await Container.get(UserRepository).update({ id: userId }, data);
|
||||
const user = await Container.get(UserRepository).findOneBy({ id: userId });
|
||||
|
||||
if (user) {
|
||||
await Container.get(UserRepository).save({ id: userId, ...data }, { transaction: true });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -349,7 +349,7 @@ export class LdapService {
|
|||
localAdUsers,
|
||||
);
|
||||
|
||||
this.logger.debug('LDAP - Users processed', {
|
||||
this.logger.debug('LDAP - Users to process', {
|
||||
created: usersToCreate.length,
|
||||
updated: usersToUpdate.length,
|
||||
disabled: usersToDisable.length,
|
||||
|
|
|
@ -61,8 +61,8 @@ export class License {
|
|||
return autoRenewEnabled;
|
||||
}
|
||||
|
||||
async init(instanceType: N8nInstanceType = 'main') {
|
||||
if (this.manager) {
|
||||
async init(instanceType: N8nInstanceType = 'main', forceRecreate = false) {
|
||||
if (this.manager && !forceRecreate) {
|
||||
this.logger.warn('License manager already initialized or shutting down');
|
||||
return;
|
||||
}
|
||||
|
@ -289,6 +289,18 @@ export class License {
|
|||
return this.isFeatureEnabled(LICENSE_FEATURES.WORKER_VIEW);
|
||||
}
|
||||
|
||||
isProjectRoleAdminLicensed() {
|
||||
return this.isFeatureEnabled(LICENSE_FEATURES.PROJECT_ROLE_ADMIN);
|
||||
}
|
||||
|
||||
isProjectRoleEditorLicensed() {
|
||||
return this.isFeatureEnabled(LICENSE_FEATURES.PROJECT_ROLE_EDITOR);
|
||||
}
|
||||
|
||||
isProjectRoleViewerLicensed() {
|
||||
return this.isFeatureEnabled(LICENSE_FEATURES.PROJECT_ROLE_VIEWER);
|
||||
}
|
||||
|
||||
getCurrentEntitlements() {
|
||||
return this.manager?.getCurrentEntitlements() ?? [];
|
||||
}
|
||||
|
@ -341,6 +353,10 @@ export class License {
|
|||
);
|
||||
}
|
||||
|
||||
getTeamProjectLimit() {
|
||||
return this.getFeatureValue(LICENSE_QUOTAS.TEAM_PROJECT_LIMIT) ?? 0;
|
||||
}
|
||||
|
||||
getPlanName(): string {
|
||||
return this.getFeatureValue('planName') ?? 'Community';
|
||||
}
|
||||
|
@ -359,6 +375,6 @@ export class License {
|
|||
|
||||
async reinit() {
|
||||
this.manager?.reset();
|
||||
await this.init();
|
||||
await this.init('main', true);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,10 +25,16 @@ export class MfaService {
|
|||
secret,
|
||||
recoveryCodes,
|
||||
);
|
||||
return await this.userRepository.update(userId, {
|
||||
mfaSecret: encryptedSecret,
|
||||
mfaRecoveryCodes: encryptedRecoveryCodes,
|
||||
});
|
||||
|
||||
const user = await this.userRepository.findOneBy({ id: userId });
|
||||
if (user) {
|
||||
Object.assign(user, {
|
||||
mfaSecret: encryptedSecret,
|
||||
mfaRecoveryCodes: encryptedRecoveryCodes,
|
||||
});
|
||||
|
||||
await this.userRepository.save(user);
|
||||
}
|
||||
}
|
||||
|
||||
public encryptSecretAndRecoveryCodes(rawSecret: string, rawRecoveryCodes: string[]) {
|
||||
|
@ -56,7 +62,12 @@ export class MfaService {
|
|||
}
|
||||
|
||||
public async enableMfa(userId: string) {
|
||||
await this.userRepository.update(userId, { mfaEnabled: true });
|
||||
const user = await this.userRepository.findOneBy({ id: userId });
|
||||
if (user) {
|
||||
user.mfaEnabled = true;
|
||||
|
||||
await this.userRepository.save(user);
|
||||
}
|
||||
}
|
||||
|
||||
public encryptRecoveryCodes(mfaRecoveryCodes: string[]) {
|
||||
|
@ -64,10 +75,15 @@ export class MfaService {
|
|||
}
|
||||
|
||||
public async disableMfa(userId: string) {
|
||||
await this.userRepository.update(userId, {
|
||||
mfaEnabled: false,
|
||||
mfaSecret: null,
|
||||
mfaRecoveryCodes: [],
|
||||
});
|
||||
const user = await this.userRepository.findOneBy({ id: userId });
|
||||
|
||||
if (user) {
|
||||
Object.assign(user, {
|
||||
mfaEnabled: false,
|
||||
mfaSecret: null,
|
||||
mfaRecoveryCodes: [],
|
||||
});
|
||||
await this.userRepository.save(user);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import type { IDataObject, ExecutionStatus } from 'n8n-workflow';
|
||||
import type { ExecutionStatus, ICredentialDataDecryptedObject } from 'n8n-workflow';
|
||||
|
||||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import type { TagEntity } from '@db/entities/TagEntity';
|
||||
import type { Risk } from '@/security-audit/types';
|
||||
|
@ -127,7 +128,14 @@ export declare namespace UserRequest {
|
|||
}
|
||||
|
||||
export declare namespace CredentialRequest {
|
||||
type Create = AuthenticatedRequest<{}, {}, { type: string; name: string; data: IDataObject }, {}>;
|
||||
type Create = AuthenticatedRequest<
|
||||
{},
|
||||
{},
|
||||
{ type: string; name: string; data: ICredentialDataDecryptedObject },
|
||||
{}
|
||||
>;
|
||||
|
||||
type Delete = AuthenticatedRequest<{ id: string }, {}, {}, Record<string, string>>;
|
||||
}
|
||||
|
||||
export type OperationID = 'getUsers' | 'getUser';
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import { authorize } from '@/PublicApi/v1/shared/middlewares/global.middleware';
|
||||
import { globalScope } from '@/PublicApi/v1/shared/middlewares/global.middleware';
|
||||
import type { Response } from 'express';
|
||||
import type { AuditRequest } from '@/PublicApi/types';
|
||||
import Container from 'typedi';
|
||||
|
||||
export = {
|
||||
generateAudit: [
|
||||
authorize(['global:owner', 'global:admin']),
|
||||
globalScope('securityAudit:generate'),
|
||||
async (req: AuditRequest.Generate, res: Response): Promise<Response> => {
|
||||
try {
|
||||
const { SecurityAuditService } = await import('@/security-audit/SecurityAudit.service');
|
||||
|
|
|
@ -4,9 +4,8 @@ import type express from 'express';
|
|||
import { CredentialsHelper } from '@/CredentialsHelper';
|
||||
import { CredentialTypes } from '@/CredentialTypes';
|
||||
import type { CredentialsEntity } from '@db/entities/CredentialsEntity';
|
||||
import type { CredentialRequest } from '@/requests';
|
||||
import type { CredentialTypeRequest } from '../../../types';
|
||||
import { authorize } from '../../shared/middlewares/global.middleware';
|
||||
import type { CredentialTypeRequest, CredentialRequest } from '../../../types';
|
||||
import { projectScope } from '../../shared/middlewares/global.middleware';
|
||||
import { validCredentialsProperties, validCredentialType } from './credentials.middleware';
|
||||
|
||||
import {
|
||||
|
@ -23,7 +22,6 @@ import { Container } from 'typedi';
|
|||
|
||||
export = {
|
||||
createCredential: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
validCredentialType,
|
||||
validCredentialsProperties,
|
||||
async (
|
||||
|
@ -47,7 +45,7 @@ export = {
|
|||
},
|
||||
],
|
||||
deleteCredential: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('credential:delete', 'credential'),
|
||||
async (
|
||||
req: CredentialRequest.Delete,
|
||||
res: express.Response,
|
||||
|
@ -75,7 +73,6 @@ export = {
|
|||
],
|
||||
|
||||
getCredentialType: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
async (req: CredentialTypeRequest.Get, res: express.Response): Promise<express.Response> => {
|
||||
const { credentialTypeName } = req.params;
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ import type { CredentialRequest } from '@/requests';
|
|||
import { Container } from 'typedi';
|
||||
import { CredentialsRepository } from '@db/repositories/credentials.repository';
|
||||
import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
import { InternalHooks } from '@/InternalHooks';
|
||||
|
||||
export async function getCredentials(credentialId: string): Promise<ICredentialsDb | null> {
|
||||
|
@ -28,7 +29,7 @@ export async function getSharedCredentials(
|
|||
): Promise<SharedCredentials | null> {
|
||||
return await Container.get(SharedCredentialsRepository).findOne({
|
||||
where: {
|
||||
userId,
|
||||
project: { projectRelations: { userId } },
|
||||
credentialsId: credentialId,
|
||||
},
|
||||
relations: ['credentials'],
|
||||
|
@ -66,10 +67,14 @@ export async function saveCredential(
|
|||
|
||||
const newSharedCredential = new SharedCredentials();
|
||||
|
||||
const personalProject = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
user.id,
|
||||
);
|
||||
|
||||
Object.assign(newSharedCredential, {
|
||||
role: 'credential:owner',
|
||||
user,
|
||||
credentials: savedCredential,
|
||||
projectId: personalProject.id,
|
||||
});
|
||||
|
||||
await transactionManager.save<SharedCredentials>(newSharedCredential);
|
||||
|
|
|
@ -3,7 +3,7 @@ import { Container } from 'typedi';
|
|||
import { replaceCircularReferences } from 'n8n-workflow';
|
||||
|
||||
import { ActiveExecutions } from '@/ActiveExecutions';
|
||||
import { authorize, validCursor } from '../../shared/middlewares/global.middleware';
|
||||
import { validCursor } from '../../shared/middlewares/global.middleware';
|
||||
import type { ExecutionRequest } from '../../../types';
|
||||
import { getSharedWorkflowIds } from '../workflows/workflows.service';
|
||||
import { encodeNextCursor } from '../../shared/services/pagination.service';
|
||||
|
@ -12,9 +12,8 @@ import { ExecutionRepository } from '@db/repositories/execution.repository';
|
|||
|
||||
export = {
|
||||
deleteExecution: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
async (req: ExecutionRequest.Delete, res: express.Response): Promise<express.Response> => {
|
||||
const sharedWorkflowsIds = await getSharedWorkflowIds(req.user);
|
||||
const sharedWorkflowsIds = await getSharedWorkflowIds(req.user, ['workflow:delete']);
|
||||
|
||||
// user does not have workflows hence no executions
|
||||
// or the execution they are trying to access belongs to a workflow they do not own
|
||||
|
@ -44,9 +43,8 @@ export = {
|
|||
},
|
||||
],
|
||||
getExecution: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
async (req: ExecutionRequest.Get, res: express.Response): Promise<express.Response> => {
|
||||
const sharedWorkflowsIds = await getSharedWorkflowIds(req.user);
|
||||
const sharedWorkflowsIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
// user does not have workflows hence no executions
|
||||
// or the execution they are trying to access belongs to a workflow they do not own
|
||||
|
@ -75,7 +73,6 @@ export = {
|
|||
},
|
||||
],
|
||||
getExecutions: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
validCursor,
|
||||
async (req: ExecutionRequest.GetAll, res: express.Response): Promise<express.Response> => {
|
||||
const {
|
||||
|
@ -86,7 +83,7 @@ export = {
|
|||
workflowId = undefined,
|
||||
} = req.query;
|
||||
|
||||
const sharedWorkflowsIds = await getSharedWorkflowIds(req.user);
|
||||
const sharedWorkflowsIds = await getSharedWorkflowIds(req.user, ['workflow:read']);
|
||||
|
||||
// user does not have workflows hence no executions
|
||||
// or the execution they are trying to access belongs to a workflow they do not own
|
||||
|
|
|
@ -2,7 +2,7 @@ import type express from 'express';
|
|||
import { Container } from 'typedi';
|
||||
import type { StatusResult } from 'simple-git';
|
||||
import type { PublicSourceControlRequest } from '../../../types';
|
||||
import { authorize } from '../../shared/middlewares/global.middleware';
|
||||
import { globalScope } from '../../shared/middlewares/global.middleware';
|
||||
import type { ImportResult } from '@/environments/sourceControl/types/importResult';
|
||||
import { SourceControlService } from '@/environments/sourceControl/sourceControl.service.ee';
|
||||
import { SourceControlPreferencesService } from '@/environments/sourceControl/sourceControlPreferences.service.ee';
|
||||
|
@ -14,7 +14,7 @@ import { InternalHooks } from '@/InternalHooks';
|
|||
|
||||
export = {
|
||||
pull: [
|
||||
authorize(['global:owner', 'global:admin']),
|
||||
globalScope('sourceControl:pull'),
|
||||
async (
|
||||
req: PublicSourceControlRequest.Pull,
|
||||
res: express.Response,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import type express from 'express';
|
||||
|
||||
import type { TagEntity } from '@db/entities/TagEntity';
|
||||
import { authorize, validCursor } from '../../shared/middlewares/global.middleware';
|
||||
import { globalScope, validCursor } from '../../shared/middlewares/global.middleware';
|
||||
import type { TagRequest } from '../../../types';
|
||||
import { encodeNextCursor } from '../../shared/services/pagination.service';
|
||||
|
||||
|
@ -12,7 +12,7 @@ import { TagService } from '@/services/tag.service';
|
|||
|
||||
export = {
|
||||
createTag: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
globalScope('tag:create'),
|
||||
async (req: TagRequest.Create, res: express.Response): Promise<express.Response> => {
|
||||
const { name } = req.body;
|
||||
|
||||
|
@ -27,7 +27,7 @@ export = {
|
|||
},
|
||||
],
|
||||
updateTag: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
globalScope('tag:update'),
|
||||
async (req: TagRequest.Update, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
const { name } = req.body;
|
||||
|
@ -49,7 +49,7 @@ export = {
|
|||
},
|
||||
],
|
||||
deleteTag: [
|
||||
authorize(['global:owner', 'global:admin']),
|
||||
globalScope('tag:delete'),
|
||||
async (req: TagRequest.Delete, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
|
||||
|
@ -65,7 +65,7 @@ export = {
|
|||
},
|
||||
],
|
||||
getTags: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
globalScope('tag:read'),
|
||||
validCursor,
|
||||
async (req: TagRequest.GetAll, res: express.Response): Promise<express.Response> => {
|
||||
const { offset = 0, limit = 100 } = req.query;
|
||||
|
@ -88,7 +88,7 @@ export = {
|
|||
},
|
||||
],
|
||||
getTag: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
globalScope('tag:read'),
|
||||
async (req: TagRequest.Get, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import { clean, getAllUsersAndCount, getUser } from './users.service.ee';
|
|||
|
||||
import { encodeNextCursor } from '../../shared/services/pagination.service';
|
||||
import {
|
||||
authorize,
|
||||
globalScope,
|
||||
validCursor,
|
||||
validLicenseWithUserQuota,
|
||||
} from '../../shared/middlewares/global.middleware';
|
||||
|
@ -15,7 +15,7 @@ import { InternalHooks } from '@/InternalHooks';
|
|||
export = {
|
||||
getUser: [
|
||||
validLicenseWithUserQuota,
|
||||
authorize(['global:owner', 'global:admin']),
|
||||
globalScope('user:read'),
|
||||
async (req: UserRequest.Get, res: express.Response) => {
|
||||
const { includeRole = false } = req.query;
|
||||
const { id } = req.params;
|
||||
|
@ -41,7 +41,7 @@ export = {
|
|||
getUsers: [
|
||||
validLicenseWithUserQuota,
|
||||
validCursor,
|
||||
authorize(['global:owner', 'global:admin']),
|
||||
globalScope(['user:list', 'user:read']),
|
||||
async (req: UserRequest.Get, res: express.Response) => {
|
||||
const { offset = 0, limit = 100, includeRole = false } = req.query;
|
||||
|
||||
|
|
|
@ -11,11 +11,10 @@ import { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
|||
import { ExternalHooks } from '@/ExternalHooks';
|
||||
import { addNodeIds, replaceInvalidCredentials } from '@/WorkflowHelpers';
|
||||
import type { WorkflowRequest } from '../../../types';
|
||||
import { authorize, validCursor } from '../../shared/middlewares/global.middleware';
|
||||
import { projectScope, validCursor } from '../../shared/middlewares/global.middleware';
|
||||
import { encodeNextCursor } from '../../shared/services/pagination.service';
|
||||
import {
|
||||
getWorkflowById,
|
||||
getSharedWorkflow,
|
||||
setWorkflowAsActive,
|
||||
setWorkflowAsInactive,
|
||||
updateWorkflow,
|
||||
|
@ -30,10 +29,10 @@ import { WorkflowHistoryService } from '@/workflows/workflowHistory/workflowHist
|
|||
import { SharedWorkflowRepository } from '@/databases/repositories/sharedWorkflow.repository';
|
||||
import { TagRepository } from '@/databases/repositories/tag.repository';
|
||||
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
|
||||
export = {
|
||||
createWorkflow: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
async (req: WorkflowRequest.Create, res: express.Response): Promise<express.Response> => {
|
||||
const workflow = req.body;
|
||||
|
||||
|
@ -44,7 +43,10 @@ export = {
|
|||
|
||||
addNodeIds(workflow);
|
||||
|
||||
const createdWorkflow = await createWorkflow(workflow, req.user, 'workflow:owner');
|
||||
const project = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
req.user.id,
|
||||
);
|
||||
const createdWorkflow = await createWorkflow(workflow, req.user, project, 'workflow:owner');
|
||||
|
||||
await Container.get(WorkflowHistoryService).saveVersion(
|
||||
req.user,
|
||||
|
@ -53,13 +55,13 @@ export = {
|
|||
);
|
||||
|
||||
await Container.get(ExternalHooks).run('workflow.afterCreate', [createdWorkflow]);
|
||||
void Container.get(InternalHooks).onWorkflowCreated(req.user, createdWorkflow, true);
|
||||
void Container.get(InternalHooks).onWorkflowCreated(req.user, createdWorkflow, project, true);
|
||||
|
||||
return res.json(createdWorkflow);
|
||||
},
|
||||
],
|
||||
deleteWorkflow: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('workflow:delete', 'workflow'),
|
||||
async (req: WorkflowRequest.Get, res: express.Response): Promise<express.Response> => {
|
||||
const { id: workflowId } = req.params;
|
||||
|
||||
|
@ -74,15 +76,21 @@ export = {
|
|||
},
|
||||
],
|
||||
getWorkflow: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('workflow:read', 'workflow'),
|
||||
async (req: WorkflowRequest.Get, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
|
||||
const sharedWorkflow = await getSharedWorkflow(req.user, id);
|
||||
const workflow = await Container.get(SharedWorkflowRepository).findWorkflowForUser(
|
||||
id,
|
||||
req.user,
|
||||
['workflow:read'],
|
||||
{ includeTags: !config.getEnv('workflowTagsDisabled') },
|
||||
);
|
||||
|
||||
if (!sharedWorkflow) {
|
||||
if (!workflow) {
|
||||
// user trying to access a workflow they do not own
|
||||
// or workflow does not exist
|
||||
// and was not shared to them
|
||||
// Or does not exist.
|
||||
return res.status(404).json({ message: 'Not Found' });
|
||||
}
|
||||
|
||||
|
@ -91,11 +99,10 @@ export = {
|
|||
public_api: true,
|
||||
});
|
||||
|
||||
return res.json(sharedWorkflow.workflow);
|
||||
return res.json(workflow);
|
||||
},
|
||||
],
|
||||
getWorkflows: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
validCursor,
|
||||
async (req: WorkflowRequest.GetAll, res: express.Response): Promise<express.Response> => {
|
||||
const { offset = 0, limit = 100, active, tags, name } = req.query;
|
||||
|
@ -121,19 +128,24 @@ export = {
|
|||
);
|
||||
}
|
||||
|
||||
const sharedWorkflows = await Container.get(SharedWorkflowRepository).getSharedWorkflows(
|
||||
let workflows = await Container.get(SharedWorkflowRepository).findAllWorkflowsForUser(
|
||||
req.user,
|
||||
options,
|
||||
['workflow:read'],
|
||||
);
|
||||
|
||||
if (!sharedWorkflows.length) {
|
||||
if (options.workflowIds) {
|
||||
const workflowIds = options.workflowIds;
|
||||
workflows = workflows.filter((wf) => workflowIds.includes(wf.id));
|
||||
}
|
||||
|
||||
if (!workflows.length) {
|
||||
return res.status(200).json({
|
||||
data: [],
|
||||
nextCursor: null,
|
||||
});
|
||||
}
|
||||
|
||||
const workflowsIds = sharedWorkflows.map((shareWorkflow) => shareWorkflow.workflowId);
|
||||
const workflowsIds = workflows.map((wf) => wf.id);
|
||||
where.id = In(workflowsIds);
|
||||
}
|
||||
|
||||
|
@ -160,7 +172,7 @@ export = {
|
|||
},
|
||||
],
|
||||
updateWorkflow: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('workflow:update', 'workflow'),
|
||||
async (req: WorkflowRequest.Update, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
const updateData = new WorkflowEntity();
|
||||
|
@ -168,9 +180,13 @@ export = {
|
|||
updateData.id = id;
|
||||
updateData.versionId = uuid();
|
||||
|
||||
const sharedWorkflow = await getSharedWorkflow(req.user, id);
|
||||
const workflow = await Container.get(SharedWorkflowRepository).findWorkflowForUser(
|
||||
id,
|
||||
req.user,
|
||||
['workflow:update'],
|
||||
);
|
||||
|
||||
if (!sharedWorkflow) {
|
||||
if (!workflow) {
|
||||
// user trying to access a workflow they do not own
|
||||
// or workflow does not exist
|
||||
return res.status(404).json({ message: 'Not Found' });
|
||||
|
@ -181,23 +197,23 @@ export = {
|
|||
|
||||
const workflowManager = Container.get(ActiveWorkflowManager);
|
||||
|
||||
if (sharedWorkflow.workflow.active) {
|
||||
if (workflow.active) {
|
||||
// When workflow gets saved always remove it as the triggers could have been
|
||||
// changed and so the changes would not take effect
|
||||
await workflowManager.remove(id);
|
||||
}
|
||||
|
||||
try {
|
||||
await updateWorkflow(sharedWorkflow.workflowId, updateData);
|
||||
await updateWorkflow(workflow.id, updateData);
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
}
|
||||
}
|
||||
|
||||
if (sharedWorkflow.workflow.active) {
|
||||
if (workflow.active) {
|
||||
try {
|
||||
await workflowManager.add(sharedWorkflow.workflowId, 'update');
|
||||
await workflowManager.add(workflow.id, 'update');
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
|
@ -205,13 +221,13 @@ export = {
|
|||
}
|
||||
}
|
||||
|
||||
const updatedWorkflow = await getWorkflowById(sharedWorkflow.workflowId);
|
||||
const updatedWorkflow = await getWorkflowById(workflow.id);
|
||||
|
||||
if (updatedWorkflow) {
|
||||
await Container.get(WorkflowHistoryService).saveVersion(
|
||||
req.user,
|
||||
updatedWorkflow,
|
||||
sharedWorkflow.workflowId,
|
||||
workflow.id,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -222,21 +238,25 @@ export = {
|
|||
},
|
||||
],
|
||||
activateWorkflow: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('workflow:update', 'workflow'),
|
||||
async (req: WorkflowRequest.Activate, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
|
||||
const sharedWorkflow = await getSharedWorkflow(req.user, id);
|
||||
const workflow = await Container.get(SharedWorkflowRepository).findWorkflowForUser(
|
||||
id,
|
||||
req.user,
|
||||
['workflow:update'],
|
||||
);
|
||||
|
||||
if (!sharedWorkflow) {
|
||||
if (!workflow) {
|
||||
// user trying to access a workflow they do not own
|
||||
// or workflow does not exist
|
||||
return res.status(404).json({ message: 'Not Found' });
|
||||
}
|
||||
|
||||
if (!sharedWorkflow.workflow.active) {
|
||||
if (!workflow.active) {
|
||||
try {
|
||||
await Container.get(ActiveWorkflowManager).add(sharedWorkflow.workflowId, 'activate');
|
||||
await Container.get(ActiveWorkflowManager).add(workflow.id, 'activate');
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
return res.status(400).json({ message: error.message });
|
||||
|
@ -244,25 +264,29 @@ export = {
|
|||
}
|
||||
|
||||
// change the status to active in the DB
|
||||
await setWorkflowAsActive(sharedWorkflow.workflow);
|
||||
await setWorkflowAsActive(workflow);
|
||||
|
||||
sharedWorkflow.workflow.active = true;
|
||||
workflow.active = true;
|
||||
|
||||
return res.json(sharedWorkflow.workflow);
|
||||
return res.json(workflow);
|
||||
}
|
||||
|
||||
// nothing to do as the workflow is already active
|
||||
return res.json(sharedWorkflow.workflow);
|
||||
return res.json(workflow);
|
||||
},
|
||||
],
|
||||
deactivateWorkflow: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('workflow:update', 'workflow'),
|
||||
async (req: WorkflowRequest.Activate, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
|
||||
const sharedWorkflow = await getSharedWorkflow(req.user, id);
|
||||
const workflow = await Container.get(SharedWorkflowRepository).findWorkflowForUser(
|
||||
id,
|
||||
req.user,
|
||||
['workflow:update'],
|
||||
);
|
||||
|
||||
if (!sharedWorkflow) {
|
||||
if (!workflow) {
|
||||
// user trying to access a workflow they do not own
|
||||
// or workflow does not exist
|
||||
return res.status(404).json({ message: 'Not Found' });
|
||||
|
@ -270,22 +294,22 @@ export = {
|
|||
|
||||
const activeWorkflowManager = Container.get(ActiveWorkflowManager);
|
||||
|
||||
if (sharedWorkflow.workflow.active) {
|
||||
await activeWorkflowManager.remove(sharedWorkflow.workflowId);
|
||||
if (workflow.active) {
|
||||
await activeWorkflowManager.remove(workflow.id);
|
||||
|
||||
await setWorkflowAsInactive(sharedWorkflow.workflow);
|
||||
await setWorkflowAsInactive(workflow);
|
||||
|
||||
sharedWorkflow.workflow.active = false;
|
||||
workflow.active = false;
|
||||
|
||||
return res.json(sharedWorkflow.workflow);
|
||||
return res.json(workflow);
|
||||
}
|
||||
|
||||
// nothing to do as the workflow is already inactive
|
||||
return res.json(sharedWorkflow.workflow);
|
||||
return res.json(workflow);
|
||||
},
|
||||
],
|
||||
getWorkflowTags: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('workflow:read', 'workflow'),
|
||||
async (req: WorkflowRequest.GetTags, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
|
||||
|
@ -293,9 +317,13 @@ export = {
|
|||
return res.status(400).json({ message: 'Workflow Tags Disabled' });
|
||||
}
|
||||
|
||||
const sharedWorkflow = await getSharedWorkflow(req.user, id);
|
||||
const workflow = await Container.get(SharedWorkflowRepository).findWorkflowForUser(
|
||||
id,
|
||||
req.user,
|
||||
['workflow:read'],
|
||||
);
|
||||
|
||||
if (!sharedWorkflow) {
|
||||
if (!workflow) {
|
||||
// user trying to access a workflow he does not own
|
||||
// or workflow does not exist
|
||||
return res.status(404).json({ message: 'Not Found' });
|
||||
|
@ -307,7 +335,7 @@ export = {
|
|||
},
|
||||
],
|
||||
updateWorkflowTags: [
|
||||
authorize(['global:owner', 'global:admin', 'global:member']),
|
||||
projectScope('workflow:update', 'workflow'),
|
||||
async (req: WorkflowRequest.UpdateTags, res: express.Response): Promise<express.Response> => {
|
||||
const { id } = req.params;
|
||||
const newTags = req.body.map((newTag) => newTag.id);
|
||||
|
@ -316,7 +344,11 @@ export = {
|
|||
return res.status(400).json({ message: 'Workflow Tags Disabled' });
|
||||
}
|
||||
|
||||
const sharedWorkflow = await getSharedWorkflow(req.user, id);
|
||||
const sharedWorkflow = await Container.get(SharedWorkflowRepository).findWorkflowForUser(
|
||||
id,
|
||||
req.user,
|
||||
['workflow:update'],
|
||||
);
|
||||
|
||||
if (!sharedWorkflow) {
|
||||
// user trying to access a workflow he does not own
|
||||
|
|
|
@ -4,23 +4,31 @@ import type { User } from '@db/entities/User';
|
|||
import { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import { WorkflowTagMapping } from '@db/entities/WorkflowTagMapping';
|
||||
import { SharedWorkflow, type WorkflowSharingRole } from '@db/entities/SharedWorkflow';
|
||||
import config from '@/config';
|
||||
import { WorkflowRepository } from '@db/repositories/workflow.repository';
|
||||
import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository';
|
||||
import type { Project } from '@/databases/entities/Project';
|
||||
import { WorkflowTagMappingRepository } from '@db/repositories/workflowTagMapping.repository';
|
||||
import { TagRepository } from '@db/repositories/tag.repository';
|
||||
import { License } from '@/License';
|
||||
import { WorkflowSharingService } from '@/workflows/workflowSharing.service';
|
||||
import type { Scope } from '@n8n/permissions';
|
||||
import config from '@/config';
|
||||
|
||||
function insertIf(condition: boolean, elements: string[]): string[] {
|
||||
return condition ? elements : [];
|
||||
}
|
||||
|
||||
export async function getSharedWorkflowIds(user: User): Promise<string[]> {
|
||||
const where = ['global:owner', 'global:admin'].includes(user.role) ? {} : { userId: user.id };
|
||||
const sharedWorkflows = await Container.get(SharedWorkflowRepository).find({
|
||||
where,
|
||||
select: ['workflowId'],
|
||||
});
|
||||
return sharedWorkflows.map(({ workflowId }) => workflowId);
|
||||
export async function getSharedWorkflowIds(user: User, scopes: Scope[]): Promise<string[]> {
|
||||
if (Container.get(License).isSharingEnabled()) {
|
||||
return await Container.get(WorkflowSharingService).getSharedWorkflowIds(user, {
|
||||
scopes,
|
||||
});
|
||||
} else {
|
||||
return await Container.get(WorkflowSharingService).getSharedWorkflowIds(user, {
|
||||
workflowRoles: ['workflow:owner'],
|
||||
projectRoles: ['project:personalOwner'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function getSharedWorkflow(
|
||||
|
@ -45,6 +53,7 @@ export async function getWorkflowById(id: string): Promise<WorkflowEntity | null
|
|||
export async function createWorkflow(
|
||||
workflow: WorkflowEntity,
|
||||
user: User,
|
||||
personalProject: Project,
|
||||
role: WorkflowSharingRole,
|
||||
): Promise<WorkflowEntity> {
|
||||
return await Db.transaction(async (transactionManager) => {
|
||||
|
@ -56,6 +65,7 @@ export async function createWorkflow(
|
|||
Object.assign(newSharedWorkflow, {
|
||||
role,
|
||||
user,
|
||||
project: personalProject,
|
||||
workflow: savedWorkflow,
|
||||
});
|
||||
await transactionManager.save<SharedWorkflow>(newSharedWorkflow);
|
||||
|
|
|
@ -3,27 +3,48 @@ import type express from 'express';
|
|||
import { Container } from 'typedi';
|
||||
|
||||
import { License } from '@/License';
|
||||
import type { GlobalRole } from '@db/entities/User';
|
||||
import type { AuthenticatedRequest } from '@/requests';
|
||||
|
||||
import type { PaginatedRequest } from '../../../types';
|
||||
import { decodeCursor } from '../services/pagination.service';
|
||||
import type { Scope } from '@n8n/permissions';
|
||||
import { userHasScope } from '@/permissions/checkAccess';
|
||||
|
||||
const UNLIMITED_USERS_QUOTA = -1;
|
||||
|
||||
export const authorize =
|
||||
(authorizedRoles: readonly GlobalRole[]) =>
|
||||
(
|
||||
req: AuthenticatedRequest,
|
||||
export type ProjectScopeResource = 'workflow' | 'credential';
|
||||
|
||||
const buildScopeMiddleware = (
|
||||
scopes: Scope[],
|
||||
resource?: ProjectScopeResource,
|
||||
{ globalOnly } = { globalOnly: false },
|
||||
) => {
|
||||
return async (
|
||||
req: AuthenticatedRequest<{ id?: string }>,
|
||||
res: express.Response,
|
||||
next: express.NextFunction,
|
||||
): express.Response | void => {
|
||||
if (!authorizedRoles.includes(req.user.role)) {
|
||||
): Promise<express.Response | void> => {
|
||||
const params: { credentialId?: string; workflowId?: string } = {};
|
||||
if (req.params.id) {
|
||||
if (resource === 'workflow') {
|
||||
params.workflowId = req.params.id;
|
||||
} else if (resource === 'credential') {
|
||||
params.credentialId = req.params.id;
|
||||
}
|
||||
}
|
||||
if (!(await userHasScope(req.user, scopes, globalOnly, params))) {
|
||||
return res.status(403).json({ message: 'Forbidden' });
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
export const globalScope = (scopes: Scope | Scope[]) =>
|
||||
buildScopeMiddleware(Array.isArray(scopes) ? scopes : [scopes], undefined, { globalOnly: true });
|
||||
|
||||
export const projectScope = (scopes: Scope | Scope[], resource: ProjectScopeResource) =>
|
||||
buildScopeMiddleware(Array.isArray(scopes) ? scopes : [scopes], resource, { globalOnly: false });
|
||||
|
||||
export const validCursor = (
|
||||
req: PaginatedRequest,
|
||||
|
|
|
@ -71,6 +71,8 @@ import { InvitationController } from './controllers/invitation.controller';
|
|||
// import { CollaborationService } from './collaboration/collaboration.service';
|
||||
import { BadRequestError } from './errors/response-errors/bad-request.error';
|
||||
import { OrchestrationService } from '@/services/orchestration.service';
|
||||
import { ProjectController } from './controllers/project.controller';
|
||||
import { RoleController } from './controllers/role.controller';
|
||||
|
||||
const exec = promisify(callbackExec);
|
||||
|
||||
|
@ -146,6 +148,8 @@ export class Server extends AbstractServer {
|
|||
ExecutionsController,
|
||||
CredentialsController,
|
||||
AIController,
|
||||
ProjectController,
|
||||
RoleController,
|
||||
];
|
||||
|
||||
if (
|
||||
|
|
|
@ -5,64 +5,47 @@ import { CredentialAccessError, NodeOperationError, WorkflowOperationError } fro
|
|||
import config from '@/config';
|
||||
import { License } from '@/License';
|
||||
import { OwnershipService } from '@/services/ownership.service';
|
||||
import { UserRepository } from '@db/repositories/user.repository';
|
||||
import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository';
|
||||
import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository';
|
||||
import { ProjectService } from '@/services/project.service';
|
||||
|
||||
@Service()
|
||||
export class PermissionChecker {
|
||||
constructor(
|
||||
private readonly userRepository: UserRepository,
|
||||
private readonly sharedCredentialsRepository: SharedCredentialsRepository,
|
||||
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
|
||||
private readonly ownershipService: OwnershipService,
|
||||
private readonly license: License,
|
||||
private readonly projectService: ProjectService,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Check if a user is permitted to execute a workflow.
|
||||
* Check if a workflow has the ability to execute based on the projects it's apart of.
|
||||
*/
|
||||
async check(workflowId: string, userId: string, nodes: INode[]) {
|
||||
// allow if no nodes in this workflow use creds
|
||||
|
||||
async check(workflowId: string, nodes: INode[]) {
|
||||
const homeProject = await this.ownershipService.getWorkflowProjectCached(workflowId);
|
||||
const homeProjectOwner = await this.ownershipService.getProjectOwnerCached(homeProject.id);
|
||||
if (homeProject.type === 'personal' && homeProjectOwner?.hasGlobalScope('credential:list')) {
|
||||
// Workflow belongs to a project by a user with privileges
|
||||
// so all credentials are usable. Skip credential checks.
|
||||
return;
|
||||
}
|
||||
const projectIds = await this.projectService.findProjectsWorkflowIsIn(workflowId);
|
||||
const credIdsToNodes = this.mapCredIdsToNodes(nodes);
|
||||
|
||||
const workflowCredIds = Object.keys(credIdsToNodes);
|
||||
|
||||
if (workflowCredIds.length === 0) return;
|
||||
|
||||
// allow if requesting user is instance owner
|
||||
const accessible = await this.sharedCredentialsRepository.getFilteredAccessibleCredentials(
|
||||
projectIds,
|
||||
workflowCredIds,
|
||||
);
|
||||
|
||||
const user = await this.userRepository.findOneOrFail({
|
||||
where: { id: userId },
|
||||
});
|
||||
|
||||
if (user.hasGlobalScope('workflow:execute')) return;
|
||||
|
||||
const isSharingEnabled = this.license.isSharingEnabled();
|
||||
|
||||
// allow if all creds used in this workflow are a subset of
|
||||
// all creds accessible to users who have access to this workflow
|
||||
|
||||
let workflowUserIds = [userId];
|
||||
|
||||
if (workflowId && isSharingEnabled) {
|
||||
workflowUserIds = await this.sharedWorkflowRepository.getSharedUserIds(workflowId);
|
||||
for (const credentialsId of workflowCredIds) {
|
||||
if (!accessible.includes(credentialsId)) {
|
||||
const nodeToFlag = credIdsToNodes[credentialsId][0];
|
||||
throw new CredentialAccessError(nodeToFlag, credentialsId, workflowId);
|
||||
}
|
||||
}
|
||||
|
||||
const accessibleCredIds = isSharingEnabled
|
||||
? await this.sharedCredentialsRepository.getAccessibleCredentialIds(workflowUserIds)
|
||||
: await this.sharedCredentialsRepository.getOwnedCredentialIds(workflowUserIds);
|
||||
|
||||
const inaccessibleCredIds = workflowCredIds.filter((id) => !accessibleCredIds.includes(id));
|
||||
|
||||
if (inaccessibleCredIds.length === 0) return;
|
||||
|
||||
// if disallowed, flag only first node using first inaccessible cred
|
||||
const inaccessibleCredId = inaccessibleCredIds[0];
|
||||
const nodeToFlag = credIdsToNodes[inaccessibleCredId][0];
|
||||
|
||||
throw new CredentialAccessError(nodeToFlag, inaccessibleCredId, workflowId);
|
||||
}
|
||||
|
||||
async checkSubworkflowExecutePolicy(
|
||||
|
@ -91,14 +74,14 @@ export class PermissionChecker {
|
|||
}
|
||||
|
||||
const parentWorkflowOwner =
|
||||
await this.ownershipService.getWorkflowOwnerCached(parentWorkflowId);
|
||||
await this.ownershipService.getWorkflowProjectCached(parentWorkflowId);
|
||||
|
||||
const subworkflowOwner = await this.ownershipService.getWorkflowOwnerCached(subworkflow.id);
|
||||
const subworkflowOwner = await this.ownershipService.getWorkflowProjectCached(subworkflow.id);
|
||||
|
||||
const description =
|
||||
subworkflowOwner.id === parentWorkflowOwner.id
|
||||
? 'Change the settings of the sub-workflow so it can be called by this one.'
|
||||
: `${subworkflowOwner.firstName} (${subworkflowOwner.email}) can make this change. You may need to tell them the ID of the sub-workflow, which is ${subworkflow.id}`;
|
||||
: `An admin for the ${subworkflowOwner.name} project can make this change. You may need to tell them the ID of the sub-workflow, which is ${subworkflow.id}`;
|
||||
|
||||
const errorToThrow = new WorkflowOperationError(
|
||||
`Target workflow ID ${subworkflow.id} may not be called`,
|
||||
|
|
|
@ -19,6 +19,7 @@ export class NodeMailer {
|
|||
host: config.getEnv('userManagement.emails.smtp.host'),
|
||||
port: config.getEnv('userManagement.emails.smtp.port'),
|
||||
secure: config.getEnv('userManagement.emails.smtp.secure'),
|
||||
ignoreTLS: !config.getEnv('userManagement.emails.smtp.startTLS'),
|
||||
};
|
||||
|
||||
if (
|
||||
|
|
|
@ -173,13 +173,13 @@ export class WaitTracker {
|
|||
throw new ApplicationError('Only saved workflows can be resumed.');
|
||||
}
|
||||
const workflowId = fullExecutionData.workflowData.id;
|
||||
const user = await this.ownershipService.getWorkflowOwnerCached(workflowId);
|
||||
const project = await this.ownershipService.getWorkflowProjectCached(workflowId);
|
||||
|
||||
const data: IWorkflowExecutionDataProcess = {
|
||||
executionMode: fullExecutionData.mode,
|
||||
executionData: fullExecutionData.data,
|
||||
workflowData: fullExecutionData.workflowData,
|
||||
userId: user.id,
|
||||
projectId: project.id,
|
||||
};
|
||||
|
||||
// Start the execution again
|
||||
|
|
|
@ -88,19 +88,12 @@ export class WaitingWebhooks implements IWebhookManager {
|
|||
settings: workflowData.settings,
|
||||
});
|
||||
|
||||
let workflowOwner;
|
||||
try {
|
||||
workflowOwner = await this.ownershipService.getWorkflowOwnerCached(workflowData.id);
|
||||
} catch (error) {
|
||||
throw new NotFoundError('Could not find workflow');
|
||||
}
|
||||
|
||||
const workflowStartNode = workflow.getNode(lastNodeExecuted);
|
||||
if (workflowStartNode === null) {
|
||||
throw new NotFoundError('Could not find node to process webhook.');
|
||||
}
|
||||
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(workflowOwner.id);
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase();
|
||||
const webhookData = NodeHelpers.getNodeWebhooks(
|
||||
workflow,
|
||||
workflowStartNode,
|
||||
|
|
|
@ -56,8 +56,6 @@ import * as WorkflowHelpers from '@/WorkflowHelpers';
|
|||
import { WorkflowRunner } from '@/WorkflowRunner';
|
||||
import * as WorkflowExecuteAdditionalData from '@/WorkflowExecuteAdditionalData';
|
||||
import { ActiveExecutions } from '@/ActiveExecutions';
|
||||
import type { User } from '@db/entities/User';
|
||||
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
|
||||
import { EventsService } from '@/services/events.service';
|
||||
import { OwnershipService } from './services/ownership.service';
|
||||
import { parseBody } from './middlewares';
|
||||
|
@ -65,6 +63,7 @@ import { Logger } from './Logger';
|
|||
import { NotFoundError } from './errors/response-errors/not-found.error';
|
||||
import { InternalServerError } from './errors/response-errors/internal-server.error';
|
||||
import { UnprocessableRequestError } from './errors/response-errors/unprocessable.error';
|
||||
import type { Project } from './databases/entities/Project';
|
||||
|
||||
export const WEBHOOK_METHODS: IHttpRequestMethods[] = [
|
||||
'DELETE',
|
||||
|
@ -248,22 +247,15 @@ export async function executeWebhook(
|
|||
$executionId: executionId,
|
||||
};
|
||||
|
||||
let user: User;
|
||||
if (
|
||||
(workflowData as WorkflowEntity).shared?.length &&
|
||||
(workflowData as WorkflowEntity).shared[0].user
|
||||
) {
|
||||
user = (workflowData as WorkflowEntity).shared[0].user;
|
||||
} else {
|
||||
try {
|
||||
user = await Container.get(OwnershipService).getWorkflowOwnerCached(workflowData.id);
|
||||
} catch (error) {
|
||||
throw new NotFoundError('Cannot find workflow');
|
||||
}
|
||||
let project: Project | undefined = undefined;
|
||||
try {
|
||||
project = await Container.get(OwnershipService).getWorkflowProjectCached(workflowData.id);
|
||||
} catch (error) {
|
||||
throw new NotFoundError('Cannot find workflow');
|
||||
}
|
||||
|
||||
// Prepare everything that is needed to run the workflow
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(user.id);
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase();
|
||||
|
||||
// Get the responseMode
|
||||
const responseMode = workflow.expression.getSimpleParameterValue(
|
||||
|
@ -546,7 +538,7 @@ export async function executeWebhook(
|
|||
pushRef,
|
||||
workflowData,
|
||||
pinData,
|
||||
userId: user.id,
|
||||
projectId: project?.id,
|
||||
};
|
||||
|
||||
let responsePromise: IDeferredPromise<IN8nHttpFullResponse> | undefined;
|
||||
|
|
|
@ -195,12 +195,12 @@ export function executeErrorWorkflow(
|
|||
}
|
||||
|
||||
Container.get(OwnershipService)
|
||||
.getWorkflowOwnerCached(workflowId)
|
||||
.then((user) => {
|
||||
.getWorkflowProjectCached(workflowId)
|
||||
.then((project) => {
|
||||
void Container.get(WorkflowExecutionService).executeErrorWorkflow(
|
||||
errorWorkflow,
|
||||
workflowErrorData,
|
||||
user,
|
||||
project,
|
||||
);
|
||||
})
|
||||
.catch((error: Error) => {
|
||||
|
@ -223,12 +223,12 @@ export function executeErrorWorkflow(
|
|||
) {
|
||||
logger.verbose('Start internal error workflow', { executionId, workflowId });
|
||||
void Container.get(OwnershipService)
|
||||
.getWorkflowOwnerCached(workflowId)
|
||||
.then((user) => {
|
||||
.getWorkflowProjectCached(workflowId)
|
||||
.then((project) => {
|
||||
void Container.get(WorkflowExecutionService).executeErrorWorkflow(
|
||||
workflowId,
|
||||
workflowErrorData,
|
||||
user,
|
||||
project,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -655,7 +655,6 @@ function hookFunctionsSaveWorker(): IWorkflowExecuteHooks {
|
|||
|
||||
export async function getRunData(
|
||||
workflowData: IWorkflowBase,
|
||||
userId: string,
|
||||
inputData?: INodeExecutionData[],
|
||||
): Promise<IWorkflowExecutionDataProcess> {
|
||||
const mode = 'integrated';
|
||||
|
@ -698,7 +697,6 @@ export async function getRunData(
|
|||
executionData: runExecutionData,
|
||||
// @ts-ignore
|
||||
workflowData,
|
||||
userId,
|
||||
};
|
||||
|
||||
return runData;
|
||||
|
@ -784,9 +782,7 @@ async function executeWorkflow(
|
|||
settings: workflowData.settings,
|
||||
});
|
||||
|
||||
const runData =
|
||||
options.loadedRunData ??
|
||||
(await getRunData(workflowData, additionalData.userId, options.inputData));
|
||||
const runData = options.loadedRunData ?? (await getRunData(workflowData, options.inputData));
|
||||
|
||||
let executionId;
|
||||
|
||||
|
@ -800,11 +796,7 @@ async function executeWorkflow(
|
|||
|
||||
let data;
|
||||
try {
|
||||
await Container.get(PermissionChecker).check(
|
||||
workflowData.id,
|
||||
additionalData.userId,
|
||||
workflowData.nodes,
|
||||
);
|
||||
await Container.get(PermissionChecker).check(workflowData.id, workflowData.nodes);
|
||||
await Container.get(PermissionChecker).checkSubworkflowExecutePolicy(
|
||||
workflow,
|
||||
options.parentWorkflowId,
|
||||
|
@ -813,7 +805,7 @@ async function executeWorkflow(
|
|||
|
||||
// Create new additionalData to have different workflow loaded and to call
|
||||
// different webhooks
|
||||
const additionalDataIntegrated = await getBase(additionalData.userId);
|
||||
const additionalDataIntegrated = await getBase();
|
||||
additionalDataIntegrated.hooks = getWorkflowHooksIntegrated(
|
||||
runData.executionMode,
|
||||
executionId,
|
||||
|
@ -966,7 +958,7 @@ export function sendDataToUI(type: string, data: IDataObject | IDataObject[]) {
|
|||
* Returns the base additional data without webhooks
|
||||
*/
|
||||
export async function getBase(
|
||||
userId: string,
|
||||
userId?: string,
|
||||
currentNodeParameters?: INodeParameters,
|
||||
executionTimeoutTimestamp?: number,
|
||||
): Promise<IWorkflowExecuteAdditionalData> {
|
||||
|
|
|
@ -161,7 +161,7 @@ export class WorkflowRunner {
|
|||
|
||||
const { id: workflowId, nodes } = data.workflowData;
|
||||
try {
|
||||
await this.permissionChecker.check(workflowId, data.userId, nodes);
|
||||
await this.permissionChecker.check(workflowId, nodes);
|
||||
} catch (error) {
|
||||
// Create a failed execution with the data for the node, save it and abort execution
|
||||
const runData = generateFailedExecutionFromError(data.executionMode, error, error.node);
|
||||
|
|
|
@ -8,7 +8,7 @@ import { AUTH_COOKIE_NAME, RESPONSE_ERROR_MESSAGES, Time } from '@/constants';
|
|||
import type { User } from '@db/entities/User';
|
||||
import { UserRepository } from '@db/repositories/user.repository';
|
||||
import { AuthError } from '@/errors/response-errors/auth.error';
|
||||
import { UnauthorizedError } from '@/errors/response-errors/unauthorized.error';
|
||||
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
|
||||
import { License } from '@/License';
|
||||
import { Logger } from '@/Logger';
|
||||
import type { AuthenticatedRequest } from '@/requests';
|
||||
|
@ -92,7 +92,7 @@ export class AuthService {
|
|||
!user.isOwner &&
|
||||
!isWithinUsersLimit
|
||||
) {
|
||||
throw new UnauthorizedError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED);
|
||||
throw new ForbiddenError(RESPONSE_ERROR_MESSAGES.USERS_QUOTA_REACHED);
|
||||
}
|
||||
|
||||
const token = this.issueJWT(user, browserId);
|
||||
|
|
|
@ -6,7 +6,6 @@ import glob from 'fast-glob';
|
|||
import type { EntityManager } from '@n8n/typeorm';
|
||||
|
||||
import * as Db from '@/Db';
|
||||
import type { User } from '@db/entities/User';
|
||||
import { SharedCredentials } from '@db/entities/SharedCredentials';
|
||||
import { CredentialsEntity } from '@db/entities/CredentialsEntity';
|
||||
import { disableAutoGeneratedIds } from '@db/utils/commandHelpers';
|
||||
|
@ -15,6 +14,8 @@ import type { ICredentialsEncrypted } from 'n8n-workflow';
|
|||
import { ApplicationError, jsonParse } from 'n8n-workflow';
|
||||
import { UM_FIX_INSTRUCTION } from '@/constants';
|
||||
import { UserRepository } from '@db/repositories/user.repository';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
import type { Project } from '@/databases/entities/Project';
|
||||
|
||||
export class ImportCredentialsCommand extends BaseCommand {
|
||||
static description = 'Import credentials';
|
||||
|
@ -23,6 +24,7 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
'$ n8n import:credentials --input=file.json',
|
||||
'$ n8n import:credentials --separate --input=backups/latest/',
|
||||
'$ n8n import:credentials --input=file.json --userId=1d64c3d2-85fe-4a83-a649-e446b07b3aae',
|
||||
'$ n8n import:credentials --input=file.json --projectId=Ox8O54VQrmBrb4qL',
|
||||
'$ n8n import:credentials --separate --input=backups/latest/ --userId=1d64c3d2-85fe-4a83-a649-e446b07b3aae',
|
||||
];
|
||||
|
||||
|
@ -38,6 +40,9 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
userId: Flags.string({
|
||||
description: 'The ID of the user to assign the imported credentials to',
|
||||
}),
|
||||
projectId: Flags.string({
|
||||
description: 'The ID of the project to assign the imported credential to',
|
||||
}),
|
||||
};
|
||||
|
||||
private transactionManager: EntityManager;
|
||||
|
@ -64,21 +69,27 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
}
|
||||
}
|
||||
|
||||
const user = flags.userId ? await this.getAssignee(flags.userId) : await this.getOwner();
|
||||
if (flags.projectId && flags.userId) {
|
||||
throw new ApplicationError(
|
||||
'You cannot use `--userId` and `--projectId` together. Use one or the other.',
|
||||
);
|
||||
}
|
||||
|
||||
const project = await this.getProject(flags.userId, flags.projectId);
|
||||
|
||||
const credentials = await this.readCredentials(flags.input, flags.separate);
|
||||
|
||||
await Db.getConnection().transaction(async (transactionManager) => {
|
||||
this.transactionManager = transactionManager;
|
||||
|
||||
const result = await this.checkRelations(credentials, flags.userId);
|
||||
const result = await this.checkRelations(credentials, flags.projectId, flags.userId);
|
||||
|
||||
if (!result.success) {
|
||||
throw new ApplicationError(result.message);
|
||||
}
|
||||
|
||||
for (const credential of credentials) {
|
||||
await this.storeCredential(credential, user);
|
||||
await this.storeCredential(credential, project);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -98,7 +109,7 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
);
|
||||
}
|
||||
|
||||
private async storeCredential(credential: Partial<CredentialsEntity>, user: User) {
|
||||
private async storeCredential(credential: Partial<CredentialsEntity>, project: Project) {
|
||||
const result = await this.transactionManager.upsert(CredentialsEntity, credential, ['id']);
|
||||
|
||||
const sharingExists = await this.transactionManager.existsBy(SharedCredentials, {
|
||||
|
@ -111,25 +122,34 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
SharedCredentials,
|
||||
{
|
||||
credentialsId: result.identifiers[0].id as string,
|
||||
userId: user.id,
|
||||
role: 'credential:owner',
|
||||
projectId: project.id,
|
||||
},
|
||||
['credentialsId', 'userId'],
|
||||
['credentialsId', 'projectId'],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async getOwner() {
|
||||
private async getOwnerProject() {
|
||||
const owner = await Container.get(UserRepository).findOneBy({ role: 'global:owner' });
|
||||
if (!owner) {
|
||||
throw new ApplicationError(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
|
||||
}
|
||||
|
||||
return owner;
|
||||
const project = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
owner.id,
|
||||
);
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
private async checkRelations(credentials: ICredentialsEncrypted[], userId?: string) {
|
||||
if (!userId) {
|
||||
private async checkRelations(
|
||||
credentials: ICredentialsEncrypted[],
|
||||
projectId?: string,
|
||||
userId?: string,
|
||||
) {
|
||||
// The credential is not supposed to be re-owned.
|
||||
if (!projectId && !userId) {
|
||||
return {
|
||||
success: true as const,
|
||||
message: undefined,
|
||||
|
@ -145,15 +165,26 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
continue;
|
||||
}
|
||||
|
||||
const ownerId = await this.getCredentialOwner(credential.id);
|
||||
if (!ownerId) {
|
||||
const { user, project: ownerProject } = await this.getCredentialOwner(credential.id);
|
||||
|
||||
if (!ownerProject) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ownerId !== userId) {
|
||||
if (ownerProject.id !== projectId) {
|
||||
const currentOwner =
|
||||
ownerProject.type === 'personal'
|
||||
? `the user with the ID "${user.id}"`
|
||||
: `the project with the ID "${ownerProject.id}"`;
|
||||
const newOwner = userId
|
||||
? // The user passed in `--userId`, so let's use the user ID in the error
|
||||
// message as opposed to the project ID.
|
||||
`the user with the ID "${userId}"`
|
||||
: `the project with the ID "${projectId}"`;
|
||||
|
||||
return {
|
||||
success: false as const,
|
||||
message: `The credential with id "${credential.id}" is already owned by the user with the id "${ownerId}". It can't be re-owned by the user with the id "${userId}"`,
|
||||
message: `The credential with ID "${credential.id}" is already owned by ${currentOwner}. It can't be re-owned by ${newOwner}.`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -206,26 +237,39 @@ export class ImportCredentialsCommand extends BaseCommand {
|
|||
});
|
||||
}
|
||||
|
||||
private async getAssignee(userId: string) {
|
||||
const user = await Container.get(UserRepository).findOneBy({ id: userId });
|
||||
|
||||
if (!user) {
|
||||
throw new ApplicationError('Failed to find user', { extra: { userId } });
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
private async getCredentialOwner(credentialsId: string) {
|
||||
const sharedCredential = await this.transactionManager.findOneBy(SharedCredentials, {
|
||||
credentialsId,
|
||||
role: 'credential:owner',
|
||||
const sharedCredential = await this.transactionManager.findOne(SharedCredentials, {
|
||||
where: { credentialsId, role: 'credential:owner' },
|
||||
relations: { project: true },
|
||||
});
|
||||
|
||||
return sharedCredential?.userId;
|
||||
if (sharedCredential && sharedCredential.project.type === 'personal') {
|
||||
const user = await Container.get(UserRepository).findOneByOrFail({
|
||||
projectRelations: {
|
||||
role: 'project:personalOwner',
|
||||
projectId: sharedCredential.projectId,
|
||||
},
|
||||
});
|
||||
|
||||
return { user, project: sharedCredential.project };
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
private async credentialExists(credentialId: string) {
|
||||
return await this.transactionManager.existsBy(CredentialsEntity, { id: credentialId });
|
||||
}
|
||||
|
||||
private async getProject(userId?: string, projectId?: string) {
|
||||
if (projectId) {
|
||||
return await Container.get(ProjectRepository).findOneByOrFail({ id: projectId });
|
||||
}
|
||||
|
||||
if (userId) {
|
||||
return await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
|
||||
}
|
||||
|
||||
return await this.getOwnerProject();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import type { IWorkflowToImport } from '@/Interfaces';
|
|||
import { ImportService } from '@/services/import.service';
|
||||
import { BaseCommand } from '../BaseCommand';
|
||||
import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
|
||||
function assertHasWorkflowsToImport(workflows: unknown): asserts workflows is IWorkflowToImport[] {
|
||||
if (!Array.isArray(workflows)) {
|
||||
|
@ -40,6 +41,7 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
'$ n8n import:workflow --input=file.json',
|
||||
'$ n8n import:workflow --separate --input=backups/latest/',
|
||||
'$ n8n import:workflow --input=file.json --userId=1d64c3d2-85fe-4a83-a649-e446b07b3aae',
|
||||
'$ n8n import:workflow --input=file.json --projectId=Ox8O54VQrmBrb4qL',
|
||||
'$ n8n import:workflow --separate --input=backups/latest/ --userId=1d64c3d2-85fe-4a83-a649-e446b07b3aae',
|
||||
];
|
||||
|
||||
|
@ -55,6 +57,9 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
userId: Flags.string({
|
||||
description: 'The ID of the user to assign the imported workflows to',
|
||||
}),
|
||||
projectId: Flags.string({
|
||||
description: 'The ID of the project to assign the imported workflows to',
|
||||
}),
|
||||
};
|
||||
|
||||
async init() {
|
||||
|
@ -79,24 +84,32 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
}
|
||||
}
|
||||
|
||||
const owner = await this.getOwner();
|
||||
if (flags.projectId && flags.userId) {
|
||||
throw new ApplicationError(
|
||||
'You cannot use `--userId` and `--projectId` together. Use one or the other.',
|
||||
);
|
||||
}
|
||||
|
||||
const project = await this.getProject(flags.userId, flags.projectId);
|
||||
|
||||
const workflows = await this.readWorkflows(flags.input, flags.separate);
|
||||
|
||||
const result = await this.checkRelations(workflows, flags.userId);
|
||||
const result = await this.checkRelations(workflows, flags.projectId, flags.userId);
|
||||
|
||||
if (!result.success) {
|
||||
throw new ApplicationError(result.message);
|
||||
}
|
||||
|
||||
this.logger.info(`Importing ${workflows.length} workflows...`);
|
||||
|
||||
await Container.get(ImportService).importWorkflows(workflows, flags.userId ?? owner.id);
|
||||
await Container.get(ImportService).importWorkflows(workflows, project.id);
|
||||
|
||||
this.reportSuccess(workflows.length);
|
||||
}
|
||||
|
||||
private async checkRelations(workflows: WorkflowEntity[], userId: string | undefined) {
|
||||
if (!userId) {
|
||||
private async checkRelations(workflows: WorkflowEntity[], projectId?: string, userId?: string) {
|
||||
// The credential is not supposed to be re-owned.
|
||||
if (!userId && !projectId) {
|
||||
return {
|
||||
success: true as const,
|
||||
message: undefined,
|
||||
|
@ -108,15 +121,26 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
continue;
|
||||
}
|
||||
|
||||
const ownerId = await this.getWorkflowOwner(workflow);
|
||||
if (!ownerId) {
|
||||
const { user, project: ownerProject } = await this.getWorkflowOwner(workflow);
|
||||
|
||||
if (!ownerProject) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ownerId !== userId) {
|
||||
if (ownerProject.id !== projectId) {
|
||||
const currentOwner =
|
||||
ownerProject.type === 'personal'
|
||||
? `the user with the ID "${user.id}"`
|
||||
: `the project with the ID "${ownerProject.id}"`;
|
||||
const newOwner = userId
|
||||
? // The user passed in `--userId`, so let's use the user ID in the error
|
||||
// message as opposed to the project ID.
|
||||
`the user with the ID "${userId}"`
|
||||
: `the project with the ID "${projectId}"`;
|
||||
|
||||
return {
|
||||
success: false as const,
|
||||
message: `The credential with id "${workflow.id}" is already owned by the user with the id "${ownerId}". It can't be re-owned by the user with the id "${userId}"`,
|
||||
message: `The credential with ID "${workflow.id}" is already owned by ${currentOwner}. It can't be re-owned by ${newOwner}.`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -136,22 +160,37 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
this.logger.info(`Successfully imported ${total} ${total === 1 ? 'workflow.' : 'workflows.'}`);
|
||||
}
|
||||
|
||||
private async getOwner() {
|
||||
private async getOwnerProject() {
|
||||
const owner = await Container.get(UserRepository).findOneBy({ role: 'global:owner' });
|
||||
if (!owner) {
|
||||
throw new ApplicationError(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
|
||||
}
|
||||
|
||||
return owner;
|
||||
const project = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
owner.id,
|
||||
);
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
private async getWorkflowOwner(workflow: WorkflowEntity) {
|
||||
const sharing = await Container.get(SharedWorkflowRepository).findOneBy({
|
||||
workflowId: workflow.id,
|
||||
role: 'workflow:owner',
|
||||
const sharing = await Container.get(SharedWorkflowRepository).findOne({
|
||||
where: { workflowId: workflow.id, role: 'workflow:owner' },
|
||||
relations: { project: true },
|
||||
});
|
||||
|
||||
return sharing?.userId;
|
||||
if (sharing && sharing.project.type === 'personal') {
|
||||
const user = await Container.get(UserRepository).findOneByOrFail({
|
||||
projectRelations: {
|
||||
role: 'project:personalOwner',
|
||||
projectId: sharing.projectId,
|
||||
},
|
||||
});
|
||||
|
||||
return { user, project: sharing.project };
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
private async workflowExists(workflow: WorkflowEntity) {
|
||||
|
@ -189,4 +228,16 @@ export class ImportWorkflowsCommand extends BaseCommand {
|
|||
return workflowInstances;
|
||||
}
|
||||
}
|
||||
|
||||
private async getProject(userId?: string, projectId?: string) {
|
||||
if (projectId) {
|
||||
return await Container.get(ProjectRepository).findOneByOrFail({ id: projectId });
|
||||
}
|
||||
|
||||
if (userId) {
|
||||
return await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
|
||||
}
|
||||
|
||||
return await this.getOwnerProject();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,18 +5,115 @@ import { AuthProviderSyncHistoryRepository } from '@db/repositories/authProvider
|
|||
import { SettingsRepository } from '@db/repositories/settings.repository';
|
||||
import { UserRepository } from '@db/repositories/user.repository';
|
||||
import { BaseCommand } from '../BaseCommand';
|
||||
import { Flags } from '@oclif/core';
|
||||
import { ApplicationError } from 'n8n-workflow';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
import { WorkflowService } from '@/workflows/workflow.service';
|
||||
import { In } from '@n8n/typeorm';
|
||||
import { SharedWorkflowRepository } from '@/databases/repositories/sharedWorkflow.repository';
|
||||
import { SharedCredentialsRepository } from '@/databases/repositories/sharedCredentials.repository';
|
||||
import { ProjectRelationRepository } from '@/databases/repositories/projectRelation.repository';
|
||||
import { CredentialsService } from '@/credentials/credentials.service';
|
||||
import { UM_FIX_INSTRUCTION } from '@/constants';
|
||||
|
||||
const wrongFlagsError =
|
||||
'You must use exactly one of `--userId`, `--projectId` or `--deleteWorkflowsAndCredentials`.';
|
||||
|
||||
export class Reset extends BaseCommand {
|
||||
static description = '\nResets the database to the default ldap state';
|
||||
static description =
|
||||
'\nResets the database to the default ldap state.\n\nTHIS DELETES ALL LDAP MANAGED USERS.';
|
||||
|
||||
static examples = [
|
||||
'$ n8n ldap:reset --userId=1d64c3d2-85fe-4a83-a649-e446b07b3aae',
|
||||
'$ n8n ldap:reset --projectId=Ox8O54VQrmBrb4qL',
|
||||
'$ n8n ldap:reset --deleteWorkflowsAndCredentials',
|
||||
];
|
||||
|
||||
static flags = {
|
||||
help: Flags.help({ char: 'h' }),
|
||||
userId: Flags.string({
|
||||
description:
|
||||
'The ID of the user to assign the workflows and credentials owned by the deleted LDAP users to',
|
||||
}),
|
||||
projectId: Flags.string({
|
||||
description:
|
||||
'The ID of the project to assign the workflows and credentials owned by the deleted LDAP users to',
|
||||
}),
|
||||
deleteWorkflowsAndCredentials: Flags.boolean({
|
||||
description:
|
||||
'Delete all workflows and credentials owned by the users that were created by the users managed via LDAP.',
|
||||
}),
|
||||
};
|
||||
|
||||
async run(): Promise<void> {
|
||||
const { flags } = await this.parse(Reset);
|
||||
const numberOfOptions =
|
||||
Number(!!flags.userId) +
|
||||
Number(!!flags.projectId) +
|
||||
Number(!!flags.deleteWorkflowsAndCredentials);
|
||||
|
||||
if (numberOfOptions !== 1) {
|
||||
throw new ApplicationError(wrongFlagsError);
|
||||
}
|
||||
|
||||
const owner = await this.getOwner();
|
||||
const ldapIdentities = await Container.get(AuthIdentityRepository).find({
|
||||
where: { providerType: 'ldap' },
|
||||
select: ['userId'],
|
||||
});
|
||||
const personalProjectIds = await Container.get(
|
||||
ProjectRelationRepository,
|
||||
).getPersonalProjectsForUsers(ldapIdentities.map((i) => i.userId));
|
||||
|
||||
// Migrate all workflows and credentials to another project.
|
||||
if (flags.projectId ?? flags.userId) {
|
||||
if (flags.userId && ldapIdentities.some((i) => i.userId === flags.userId)) {
|
||||
throw new ApplicationError(
|
||||
`Can't migrate workflows and credentials to the user with the ID ${flags.userId}. That user was created via LDAP and will be deleted as well.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (flags.projectId && personalProjectIds.includes(flags.projectId)) {
|
||||
throw new ApplicationError(
|
||||
`Can't migrate workflows and credentials to the project with the ID ${flags.projectId}. That project is a personal project belonging to a user that was created via LDAP and will be deleted as well.`,
|
||||
);
|
||||
}
|
||||
|
||||
const project = await this.getProject(flags.userId, flags.projectId);
|
||||
|
||||
await Container.get(UserRepository).manager.transaction(async (trx) => {
|
||||
for (const projectId of personalProjectIds) {
|
||||
await Container.get(WorkflowService).transferAll(projectId, project.id, trx);
|
||||
await Container.get(CredentialsService).transferAll(projectId, project.id, trx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const [ownedSharedWorkflows, ownedSharedCredentials] = await Promise.all([
|
||||
Container.get(SharedWorkflowRepository).find({
|
||||
select: { workflowId: true },
|
||||
where: { projectId: In(personalProjectIds), role: 'workflow:owner' },
|
||||
}),
|
||||
Container.get(SharedCredentialsRepository).find({
|
||||
relations: { credentials: true },
|
||||
where: { projectId: In(personalProjectIds), role: 'credential:owner' },
|
||||
}),
|
||||
]);
|
||||
|
||||
const ownedCredentials = ownedSharedCredentials.map(({ credentials }) => credentials);
|
||||
|
||||
for (const { workflowId } of ownedSharedWorkflows) {
|
||||
await Container.get(WorkflowService).delete(owner, workflowId);
|
||||
}
|
||||
|
||||
for (const credential of ownedCredentials) {
|
||||
await Container.get(CredentialsService).delete(credential);
|
||||
}
|
||||
|
||||
await Container.get(AuthProviderSyncHistoryRepository).delete({ providerType: 'ldap' });
|
||||
await Container.get(AuthIdentityRepository).delete({ providerType: 'ldap' });
|
||||
await Container.get(UserRepository).deleteMany(ldapIdentities.map((i) => i.userId));
|
||||
await Container.get(ProjectRepository).delete({ id: In(personalProjectIds) });
|
||||
await Container.get(SettingsRepository).delete({ key: LDAP_FEATURE_NAME });
|
||||
await Container.get(SettingsRepository).insert({
|
||||
key: LDAP_FEATURE_NAME,
|
||||
|
@ -27,8 +124,43 @@ export class Reset extends BaseCommand {
|
|||
this.logger.info('Successfully reset the database to default ldap state.');
|
||||
}
|
||||
|
||||
async getProject(userId?: string, projectId?: string) {
|
||||
if (projectId) {
|
||||
const project = await Container.get(ProjectRepository).findOneBy({ id: projectId });
|
||||
|
||||
if (project === null) {
|
||||
throw new ApplicationError(`Could not find the project with the ID ${projectId}.`);
|
||||
}
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
if (userId) {
|
||||
const project = await Container.get(ProjectRepository).getPersonalProjectForUser(userId);
|
||||
|
||||
if (project === null) {
|
||||
throw new ApplicationError(
|
||||
`Could not find the user with the ID ${userId} or their personalProject.`,
|
||||
);
|
||||
}
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
throw new ApplicationError(wrongFlagsError);
|
||||
}
|
||||
|
||||
async catch(error: Error): Promise<void> {
|
||||
this.logger.error('Error resetting database. See log messages for details.');
|
||||
this.logger.error(error.message);
|
||||
}
|
||||
|
||||
private async getOwner() {
|
||||
const owner = await Container.get(UserRepository).findOneBy({ role: 'global:owner' });
|
||||
if (!owner) {
|
||||
throw new ApplicationError(`Failed to find owner. ${UM_FIX_INSTRUCTION}`);
|
||||
}
|
||||
|
||||
return owner;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,16 +27,27 @@ export class DisableMFACommand extends BaseCommand {
|
|||
return;
|
||||
}
|
||||
|
||||
const updateOperationResult = await Container.get(UserRepository).update(
|
||||
{ email: flags.email },
|
||||
{ mfaSecret: null, mfaRecoveryCodes: [], mfaEnabled: false },
|
||||
);
|
||||
const user = await Container.get(UserRepository).findOneBy({ email: flags.email });
|
||||
|
||||
if (!updateOperationResult.affected) {
|
||||
if (!user) {
|
||||
this.reportUserDoesNotExistError(flags.email);
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
user.mfaSecret === null &&
|
||||
Array.isArray(user.mfaRecoveryCodes) &&
|
||||
user.mfaRecoveryCodes.length === 0 &&
|
||||
!user.mfaEnabled
|
||||
) {
|
||||
this.reportUserDoesNotExistError(flags.email);
|
||||
return;
|
||||
}
|
||||
|
||||
Object.assign(user, { mfaSecret: null, mfaRecoveryCodes: [], mfaEnabled: false });
|
||||
|
||||
await Container.get(UserRepository).save(user);
|
||||
|
||||
this.reportSuccess(flags.email);
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.
|
|||
import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository';
|
||||
import { UserRepository } from '@db/repositories/user.repository';
|
||||
import { BaseCommand } from '../BaseCommand';
|
||||
import { ProjectRepository } from '@/databases/repositories/project.repository';
|
||||
|
||||
const defaultUserProps = {
|
||||
firstName: null,
|
||||
|
@ -23,9 +24,12 @@ export class Reset extends BaseCommand {
|
|||
|
||||
async run(): Promise<void> {
|
||||
const owner = await this.getInstanceOwner();
|
||||
const personalProject = await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(
|
||||
owner.id,
|
||||
);
|
||||
|
||||
await Container.get(SharedWorkflowRepository).makeOwnerOfAllWorkflows(owner);
|
||||
await Container.get(SharedCredentialsRepository).makeOwnerOfAllCredentials(owner);
|
||||
await Container.get(SharedWorkflowRepository).makeOwnerOfAllWorkflows(personalProject);
|
||||
await Container.get(SharedCredentialsRepository).makeOwnerOfAllCredentials(personalProject);
|
||||
|
||||
await Container.get(UserRepository).deleteAllExcept(owner);
|
||||
await Container.get(UserRepository).save(Object.assign(owner, defaultUserProps));
|
||||
|
@ -38,7 +42,7 @@ export class Reset extends BaseCommand {
|
|||
const newSharedCredentials = danglingCredentials.map((credentials) =>
|
||||
Container.get(SharedCredentialsRepository).create({
|
||||
credentials,
|
||||
user: owner,
|
||||
projectId: personalProject.id,
|
||||
role: 'credential:owner',
|
||||
}),
|
||||
);
|
||||
|
|
|
@ -17,7 +17,6 @@ import { Queue } from '@/Queue';
|
|||
import { N8N_VERSION } from '@/constants';
|
||||
import { ExecutionRepository } from '@db/repositories/execution.repository';
|
||||
import { WorkflowRepository } from '@db/repositories/workflow.repository';
|
||||
import { OwnershipService } from '@/services/ownership.service';
|
||||
import type { ICredentialsOverwrite } from '@/Interfaces';
|
||||
import { CredentialsOverwrites } from '@/CredentialsOverwrites';
|
||||
import { rawBodyReader, bodyParser } from '@/middlewares';
|
||||
|
@ -118,8 +117,6 @@ export class Worker extends BaseCommand {
|
|||
);
|
||||
await executionRepository.updateStatus(executionId, 'running');
|
||||
|
||||
const workflowOwner = await Container.get(OwnershipService).getWorkflowOwnerCached(workflowId);
|
||||
|
||||
let { staticData } = fullExecutionData.workflowData;
|
||||
if (loadStaticData) {
|
||||
const workflowData = await Container.get(WorkflowRepository).findOne({
|
||||
|
@ -160,7 +157,7 @@ export class Worker extends BaseCommand {
|
|||
});
|
||||
|
||||
const additionalData = await WorkflowExecuteAdditionalData.getBase(
|
||||
workflowOwner.id,
|
||||
undefined,
|
||||
undefined,
|
||||
executionTimeoutTimestamp,
|
||||
);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue