Merge remote-tracking branch 'origin/master' into refactor-execution-hooks

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™ 2025-01-13 17:37:57 +01:00
commit 66be5cd907
No known key found for this signature in database
52 changed files with 1502 additions and 422 deletions

View file

@ -20,26 +20,28 @@ jobs:
- uses: actions/checkout@v4.1.1
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
uses: docker/setup-buildx-action@v3.8.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.0.0
uses: docker/login-action@v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
uses: docker/login-action@v3.0.0
uses: docker/login-action@v3.3.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build
uses: docker/build-push-action@v5.1.0
uses: docker/build-push-action@v6.11.0
env:
DOCKER_BUILD_SUMMARY: false
with:
context: .
file: ./docker/images/n8n-base/Dockerfile

View file

@ -19,20 +19,22 @@ jobs:
- uses: actions/checkout@v4.1.1
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
uses: docker/setup-buildx-action@v3.8.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.0.0
uses: docker/login-action@v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build
uses: docker/build-push-action@v5.1.0
uses: docker/build-push-action@v6.11.0
env:
DOCKER_BUILD_SUMMARY: false
with:
context: .
file: ./packages/@n8n/benchmark/Dockerfile

View file

@ -0,0 +1,83 @@
name: Docker Custom Image CI
run-name: Build ${{ inputs.branch }} - ${{ inputs.user }}
on:
workflow_dispatch:
inputs:
branch:
description: 'GitHub branch to create image off.'
required: true
tag:
description: 'Name of the docker tag to create.'
required: true
merge-master:
description: 'Merge with master.'
type: boolean
required: true
default: false
user:
description: ''
required: false
default: 'none'
start-url:
description: 'URL to call after workflow is kicked off.'
required: false
default: ''
success-url:
description: 'URL to call after Docker Image got built successfully.'
required: false
default: ''
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Call Start URL - optionally
if: ${{ github.event.inputs.start-url != '' }}
run: curl -v -X POST -d 'url=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' ${{github.event.inputs.start-url}} || echo ""
shell: bash
- name: Checkout
uses: actions/checkout@v4.1.1
with:
ref: ${{ github.event.inputs.branch }}
- name: Merge Master - optionally
if: github.event.inputs.merge-master
run: git remote add upstream https://github.com/n8n-io/n8n.git -f; git merge upstream/master --allow-unrelated-histories || echo ""
shell: bash
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.8.0
- name: Login to GHCR
uses: docker/login-action@v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push image to GHCR
uses: docker/build-push-action@v6.11.0
env:
DOCKER_BUILD_SUMMARY: false
with:
context: .
file: ./docker/images/n8n-custom/Dockerfile
build-args: |
N8N_RELEASE_TYPE=development
platforms: linux/amd64
provenance: false
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ghcr.io/${{ github.repository_owner }}/n8n:${{ inputs.tag }}
- name: Call Success URL - optionally
if: ${{ github.event.inputs.success-url != '' }}
run: curl -v ${{github.event.inputs.success-url}} || echo ""
shell: bash

View file

@ -1,74 +1,42 @@
name: Docker Nightly Image CI
run-name: Build ${{ inputs.branch }} - ${{ inputs.user }}
on:
schedule:
- cron: '0 1 * * *'
- cron: '0 0 * * *'
workflow_dispatch:
inputs:
branch:
description: 'GitHub branch to create image off.'
required: true
default: 'master'
tag:
description: 'Name of the docker tag to create.'
required: true
default: 'nightly'
merge-master:
description: 'Merge with master.'
type: boolean
required: true
default: false
user:
description: ''
required: false
default: 'schedule'
start-url:
description: 'URL to call after workflow is kicked off.'
required: false
default: ''
success-url:
description: 'URL to call after Docker Image got built successfully.'
required: false
default: ''
env:
N8N_TAG: ${{ inputs.tag || 'nightly' }}
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Call Start URL - optionally
run: |
[[ "${{github.event.inputs.start-url}}" != "" ]] && curl -v -X POST -d 'url=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}' ${{github.event.inputs.start-url}} || echo ""
shell: bash
- name: Checkout
uses: actions/checkout@v4.1.1
with:
ref: ${{ github.event.inputs.branch || 'master' }}
ref: master
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
uses: docker/setup-buildx-action@v3.8.0
- name: Login to GHCR
uses: docker/login-action@v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
uses: docker/login-action@v3.0.0
uses: docker/login-action@v3.3.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Merge Master - optionally
run: |
[[ "${{github.event.inputs.merge-master}}" == "true" ]] && git remote add upstream https://github.com/n8n-io/n8n.git -f; git merge upstream/master --allow-unrelated-histories || echo ""
shell: bash
- name: Build and push to DockerHub
uses: docker/build-push-action@v5.1.0
- name: Build and push image to GHCR and DockerHub
uses: docker/build-push-action@v6.11.0
env:
DOCKER_BUILD_SUMMARY: false
with:
context: .
file: ./docker/images/n8n-custom/Dockerfile
@ -79,24 +47,6 @@ jobs:
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ secrets.DOCKER_USERNAME }}/n8n:${{ env.N8N_TAG }}
- name: Login to GitHub Container Registry
if: env.N8N_TAG == 'nightly'
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Push image to GHCR
if: env.N8N_TAG == 'nightly'
run: |
docker buildx imagetools create \
--tag ghcr.io/${{ github.repository_owner }}/n8n:nightly \
tags: |
ghcr.io/${{ github.repository_owner }}/n8n:nightly
${{ secrets.DOCKER_USERNAME }}/n8n:nightly
- name: Call Success URL - optionally
run: |
[[ "${{github.event.inputs.success-url}}" != "" ]] && curl -v ${{github.event.inputs.success-url}} || echo ""
shell: bash

View file

@ -73,26 +73,28 @@ jobs:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
uses: docker/setup-qemu-action@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.0.0
uses: docker/setup-buildx-action@v3.8.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.0.0
uses: docker/login-action@v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to DockerHub
uses: docker/login-action@v3.0.0
uses: docker/login-action@v3.3.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build
uses: docker/build-push-action@v5.1.0
uses: docker/build-push-action@v6.11.0
env:
DOCKER_BUILD_SUMMARY: false
with:
context: ./docker/images/n8n
build-args: |

View file

@ -34,7 +34,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: docker/login-action@v3.0.0
- uses: docker/login-action@v3.3.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@ -46,7 +46,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: docker/login-action@v3.0.0
- uses: docker/login-action@v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}

View file

@ -73,7 +73,7 @@ docker run -it --rm \
-p 5678:5678 \
-v ~/.n8n:/home/node/.n8n \
docker.n8n.io/n8nio/n8n \
n8n start --tunnel
start --tunnel
```
## Persist data

View file

@ -349,15 +349,6 @@ export const schema = {
},
},
sourceControl: {
defaultKeyPairType: {
doc: 'Default SSH key type to use when generating SSH keys',
format: ['rsa', 'ed25519'] as const,
default: 'ed25519',
env: 'N8N_SOURCECONTROL_DEFAULT_SSH_KEY_TYPE',
},
},
workflowHistory: {
enabled: {
doc: 'Whether to save workflow history versions',

View file

@ -6,7 +6,7 @@ import { Cipher } from 'n8n-core';
import { Logger } from 'n8n-core';
import nock from 'nock';
import { Time } from '@/constants';
import { CREDENTIAL_BLANKING_VALUE, Time } from '@/constants';
import { OAuth2CredentialController } from '@/controllers/oauth/oauth2-credential.controller';
import { CredentialsHelper } from '@/credentials-helper';
import type { CredentialsEntity } from '@/databases/entities/credentials-entity';
@ -257,5 +257,85 @@ describe('OAuth2CredentialController', () => {
);
expect(res.render).toHaveBeenCalledWith('oauth-callback');
});
it('merges oauthTokenData if it already exists', async () => {
credentialsRepository.findOneBy.mockResolvedValueOnce(credential);
credentialsHelper.getDecrypted.mockResolvedValueOnce({
csrfSecret,
oauthTokenData: { token: true },
});
jest.spyOn(Csrf.prototype, 'verify').mockReturnValueOnce(true);
nock('https://example.domain')
.post(
'/token',
'code=code&grant_type=authorization_code&redirect_uri=http%3A%2F%2Flocalhost%3A5678%2Frest%2Foauth2-credential%2Fcallback',
)
.reply(200, { access_token: 'access-token', refresh_token: 'refresh-token' });
cipher.encrypt.mockReturnValue('encrypted');
await controller.handleCallback(req, res);
expect(externalHooks.run).toHaveBeenCalledWith('oauth2.callback', [
expect.objectContaining({
clientId: 'test-client-id',
redirectUri: 'http://localhost:5678/rest/oauth2-credential/callback',
}),
]);
expect(cipher.encrypt).toHaveBeenCalledWith({
oauthTokenData: {
token: true,
access_token: 'access-token',
refresh_token: 'refresh-token',
},
});
expect(credentialsRepository.update).toHaveBeenCalledWith(
'1',
expect.objectContaining({
data: 'encrypted',
id: '1',
name: 'Test Credential',
type: 'oAuth2Api',
}),
);
expect(res.render).toHaveBeenCalledWith('oauth-callback');
});
it('overwrites oauthTokenData if it is a string', async () => {
credentialsRepository.findOneBy.mockResolvedValueOnce(credential);
credentialsHelper.getDecrypted.mockResolvedValueOnce({
csrfSecret,
oauthTokenData: CREDENTIAL_BLANKING_VALUE,
});
jest.spyOn(Csrf.prototype, 'verify').mockReturnValueOnce(true);
nock('https://example.domain')
.post(
'/token',
'code=code&grant_type=authorization_code&redirect_uri=http%3A%2F%2Flocalhost%3A5678%2Frest%2Foauth2-credential%2Fcallback',
)
.reply(200, { access_token: 'access-token', refresh_token: 'refresh-token' });
cipher.encrypt.mockReturnValue('encrypted');
await controller.handleCallback(req, res);
expect(externalHooks.run).toHaveBeenCalledWith('oauth2.callback', [
expect.objectContaining({
clientId: 'test-client-id',
redirectUri: 'http://localhost:5678/rest/oauth2-credential/callback',
}),
]);
expect(cipher.encrypt).toHaveBeenCalledWith({
oauthTokenData: { access_token: 'access-token', refresh_token: 'refresh-token' },
});
expect(credentialsRepository.update).toHaveBeenCalledWith(
'1',
expect.objectContaining({
data: 'encrypted',
id: '1',
name: 'Test Credential',
type: 'oAuth2Api',
}),
);
expect(res.render).toHaveBeenCalledWith('oauth-callback');
});
});
});

View file

@ -133,7 +133,7 @@ export class OAuth2CredentialController extends AbstractOAuthController {
set(oauthToken.data, 'callbackQueryString', omit(req.query, 'state', 'code'));
}
if (decryptedDataOriginal.oauthTokenData) {
if (typeof decryptedDataOriginal.oauthTokenData === 'object') {
// Only overwrite supplied data as some providers do for example just return the
// refresh_token on the very first request and not on subsequent ones.
Object.assign(decryptedDataOriginal.oauthTokenData, oauthToken.data);

View file

@ -33,7 +33,7 @@ describe('CredentialsController', () => {
});
describe('createCredentials', () => {
it('it should create new credentials and emit "credentials-created"', async () => {
it('should create new credentials and emit "credentials-created"', async () => {
// Arrange
const newCredentialsPayload = createNewCredentialsPayload();

View file

@ -198,7 +198,7 @@ export class CredentialsController {
throw new BadRequestError('Managed credentials cannot be updated');
}
const decryptedData = this.credentialsService.decrypt(credential);
const decryptedData = this.credentialsService.decrypt(credential, true);
const preparedCredentialData = await this.credentialsService.prepareUpdateData(
req.body,
decryptedData,

View file

@ -1,86 +1,261 @@
import type { SourceControlledFile } from '@n8n/api-types';
import { Container } from '@n8n/di';
import mock from 'jest-mock-extended/lib/Mock';
import { mock, captor } from 'jest-mock-extended';
import { Cipher, type InstanceSettings } from 'n8n-core';
import { ApplicationError, deepCopy } from 'n8n-workflow';
import fsp from 'node:fs/promises';
import type { CredentialsEntity } from '@/databases/entities/credentials-entity';
import type { SharedCredentials } from '@/databases/entities/shared-credentials';
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
import { mockInstance } from '@test/mocking';
import type { SharedWorkflow } from '@/databases/entities/shared-workflow';
import type { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
import type { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
import type { TagRepository } from '@/databases/repositories/tag.repository';
import type { WorkflowTagMappingRepository } from '@/databases/repositories/workflow-tag-mapping.repository';
import type { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import type { VariablesService } from '../../variables/variables.service.ee';
import { SourceControlExportService } from '../source-control-export.service.ee';
// https://github.com/jestjs/jest/issues/4715
function deepSpyOn<O extends object, M extends keyof O>(object: O, methodName: M) {
const spy = jest.fn();
const originalMethod = object[methodName];
if (typeof originalMethod !== 'function') {
throw new ApplicationError(`${methodName.toString()} is not a function`, { level: 'warning' });
}
object[methodName] = function (...args: unknown[]) {
const clonedArgs = deepCopy(args);
spy(...clonedArgs);
return originalMethod.apply(this, args);
} as O[M];
return spy;
}
describe('SourceControlExportService', () => {
const cipher = Container.get(Cipher);
const sharedCredentialsRepository = mock<SharedCredentialsRepository>();
const sharedWorkflowRepository = mock<SharedWorkflowRepository>();
const workflowRepository = mock<WorkflowRepository>();
const tagRepository = mock<TagRepository>();
const workflowTagMappingRepository = mock<WorkflowTagMappingRepository>();
const variablesService = mock<VariablesService>();
const service = new SourceControlExportService(
mock(),
mock(),
mock(),
mock<InstanceSettings>({ n8nFolder: '' }),
variablesService,
tagRepository,
sharedCredentialsRepository,
sharedWorkflowRepository,
workflowRepository,
workflowTagMappingRepository,
mock<InstanceSettings>({ n8nFolder: '/mock/n8n' }),
);
describe('exportCredentialsToWorkFolder', () => {
it('should export credentials to work folder', async () => {
/**
* Arrange
*/
// @ts-expect-error Private method
const replaceSpy = deepSpyOn(service, 'replaceCredentialData');
const fsWriteFile = jest.spyOn(fsp, 'writeFile');
mockInstance(SharedCredentialsRepository).findByCredentialIds.mockResolvedValue([
beforeEach(() => jest.clearAllMocks());
describe('exportCredentialsToWorkFolder', () => {
const credentialData = {
authUrl: 'test',
accessTokenUrl: 'test',
clientId: 'test',
clientSecret: 'test',
oauthTokenData: {
access_token: 'test',
token_type: 'test',
expires_in: 123,
refresh_token: 'test',
},
};
const mockCredentials = mock({
id: 'cred1',
name: 'Test Credential',
type: 'oauth2',
data: cipher.encrypt(credentialData),
});
it('should export credentials to work folder', async () => {
sharedCredentialsRepository.findByCredentialIds.mockResolvedValue([
mock<SharedCredentials>({
credentials: mock<CredentialsEntity>({
data: Container.get(Cipher).encrypt(
JSON.stringify({
authUrl: 'test',
accessTokenUrl: 'test',
clientId: 'test',
clientSecret: 'test',
oauthTokenData: {
access_token: 'test',
token_type: 'test',
expires_in: 123,
refresh_token: 'test',
},
}),
),
credentials: mockCredentials,
project: mock({
type: 'personal',
projectRelations: [
{
role: 'project:personalOwner',
user: mock({ email: 'user@example.com' }),
},
],
}),
}),
]);
/**
* Act
*/
await service.exportCredentialsToWorkFolder([mock<SourceControlledFile>()]);
// Act
const result = await service.exportCredentialsToWorkFolder([mock()]);
/**
* Assert
*/
expect(replaceSpy).toHaveBeenCalledWith({
authUrl: 'test',
accessTokenUrl: 'test',
clientId: 'test',
clientSecret: 'test',
// Assert
expect(result.count).toBe(1);
expect(result.files).toHaveLength(1);
const dataCaptor = captor<string>();
expect(fsWriteFile).toHaveBeenCalledWith(
'/mock/n8n/git/credential_stubs/cred1.json',
dataCaptor,
);
expect(JSON.parse(dataCaptor.value)).toEqual({
id: 'cred1',
name: 'Test Credential',
type: 'oauth2',
data: {
authUrl: '',
accessTokenUrl: '',
clientId: '',
clientSecret: '',
},
ownedBy: {
type: 'personal',
personalEmail: 'user@example.com',
},
});
});
it('should handle team project credentials', async () => {
sharedCredentialsRepository.findByCredentialIds.mockResolvedValue([
mock<SharedCredentials>({
credentials: mockCredentials,
project: mock({
type: 'team',
id: 'team1',
name: 'Test Team',
}),
}),
]);
// Act
const result = await service.exportCredentialsToWorkFolder([
mock<SourceControlledFile>({ id: 'cred1' }),
]);
// Assert
expect(result.count).toBe(1);
const dataCaptor = captor<string>();
expect(fsWriteFile).toHaveBeenCalledWith(
'/mock/n8n/git/credential_stubs/cred1.json',
dataCaptor,
);
expect(JSON.parse(dataCaptor.value)).toEqual({
id: 'cred1',
name: 'Test Credential',
type: 'oauth2',
data: {
authUrl: '',
accessTokenUrl: '',
clientId: '',
clientSecret: '',
},
ownedBy: {
type: 'team',
teamId: 'team1',
teamName: 'Test Team',
},
});
});
it('should handle missing credentials', async () => {
// Arrange
sharedCredentialsRepository.findByCredentialIds.mockResolvedValue([]);
// Act
const result = await service.exportCredentialsToWorkFolder([
mock<SourceControlledFile>({ id: 'cred1' }),
]);
// Assert
expect(result.missingIds).toHaveLength(1);
expect(result.missingIds?.[0]).toBe('cred1');
});
});
describe('exportTagsToWorkFolder', () => {
it('should export tags to work folder', async () => {
// Arrange
tagRepository.find.mockResolvedValue([mock()]);
workflowTagMappingRepository.find.mockResolvedValue([mock()]);
// Act
const result = await service.exportTagsToWorkFolder();
// Assert
expect(result.count).toBe(1);
expect(result.files).toHaveLength(1);
});
it('should not export empty tags', async () => {
// Arrange
tagRepository.find.mockResolvedValue([]);
// Act
const result = await service.exportTagsToWorkFolder();
// Assert
expect(result.count).toBe(0);
expect(result.files).toHaveLength(0);
});
});
describe('exportVariablesToWorkFolder', () => {
it('should export variables to work folder', async () => {
// Arrange
variablesService.getAllCached.mockResolvedValue([mock()]);
// Act
const result = await service.exportVariablesToWorkFolder();
// Assert
expect(result.count).toBe(1);
expect(result.files).toHaveLength(1);
});
it('should not export empty variables', async () => {
// Arrange
variablesService.getAllCached.mockResolvedValue([]);
// Act
const result = await service.exportVariablesToWorkFolder();
// Assert
expect(result.count).toBe(0);
expect(result.files).toHaveLength(0);
});
});
describe('exportWorkflowsToWorkFolder', () => {
it('should export workflows to work folder', async () => {
// Arrange
workflowRepository.findByIds.mockResolvedValue([mock()]);
sharedWorkflowRepository.findByWorkflowIds.mockResolvedValue([
mock<SharedWorkflow>({
project: mock({
type: 'personal',
projectRelations: [{ role: 'project:personalOwner', user: mock() }],
}),
workflow: mock(),
}),
]);
// Act
const result = await service.exportWorkflowsToWorkFolder([mock()]);
// Assert
expect(result.count).toBe(1);
expect(result.files).toHaveLength(1);
});
it('should throw an error if workflow has no owner', async () => {
// Arrange
sharedWorkflowRepository.findByWorkflowIds.mockResolvedValue([
mock<SharedWorkflow>({
project: mock({
type: 'personal',
projectRelations: [],
}),
workflow: mock({
display: () => 'TestWorkflow',
}),
}),
]);
// Act & Assert
await expect(service.exportWorkflowsToWorkFolder([mock()])).rejects.toThrow(
'Workflow TestWorkflow has no owner',
);
});
});
});

View file

@ -1,6 +1,7 @@
import type { SourceControlledFile } from '@n8n/api-types';
import { Container } from '@n8n/di';
import { constants as fsConstants, accessSync } from 'fs';
import { mock } from 'jest-mock-extended';
import { InstanceSettings } from 'n8n-core';
import path from 'path';
@ -16,10 +17,8 @@ import {
getTrackingInformationFromPullResult,
sourceControlFoldersExistCheck,
} from '@/environments.ee/source-control/source-control-helper.ee';
import { SourceControlPreferencesService } from '@/environments.ee/source-control/source-control-preferences.service.ee';
import type { SourceControlPreferences } from '@/environments.ee/source-control/types/source-control-preferences';
import { License } from '@/license';
import { mockInstance } from '@test/mocking';
import type { SourceControlPreferencesService } from '@/environments.ee/source-control/source-control-preferences.service.ee';
import type { License } from '@/license';
const pushResult: SourceControlledFile[] = [
{
@ -151,12 +150,13 @@ const pullResult: SourceControlledFile[] = [
},
];
const license = mockInstance(License);
const license = mock<License>();
const sourceControlPreferencesService = mock<SourceControlPreferencesService>();
beforeAll(async () => {
jest.resetAllMocks();
license.isSourceControlLicensed.mockReturnValue(true);
Container.get(SourceControlPreferencesService).getPreferences = () => ({
sourceControlPreferencesService.getPreferences.mockReturnValue({
branchName: 'main',
connected: true,
repositoryUrl: 'git@example.com:n8ntest/n8n_testrepo.git',
@ -245,17 +245,4 @@ describe('Source Control', () => {
workflowUpdates: 3,
});
});
it('should class validate correct preferences', async () => {
const validPreferences: Partial<SourceControlPreferences> = {
branchName: 'main',
repositoryUrl: 'git@example.com:n8ntest/n8n_testrepo.git',
branchReadOnly: false,
branchColor: '#5296D6',
};
const validationResult = await Container.get(
SourceControlPreferencesService,
).validateSourceControlPreferences(validPreferences);
expect(validationResult).toBeTruthy();
});
});

View file

@ -0,0 +1,180 @@
import * as fastGlob from 'fast-glob';
import { mock } from 'jest-mock-extended';
import { type InstanceSettings } from 'n8n-core';
import fsp from 'node:fs/promises';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import type { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { SourceControlImportService } from '../source-control-import.service.ee';
jest.mock('fast-glob');
describe('SourceControlImportService', () => {
const workflowRepository = mock<WorkflowRepository>();
const service = new SourceControlImportService(
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
mock(),
workflowRepository,
mock(),
mock<InstanceSettings>({ n8nFolder: '/mock/n8n' }),
);
const globMock = fastGlob.default as unknown as jest.Mock<Promise<string[]>, string[]>;
const fsReadFile = jest.spyOn(fsp, 'readFile');
beforeEach(() => jest.clearAllMocks());
describe('getRemoteVersionIdsFromFiles', () => {
const mockWorkflowFile = '/mock/workflow1.json';
it('should parse workflow files correctly', async () => {
globMock.mockResolvedValue([mockWorkflowFile]);
const mockWorkflowData = {
id: 'workflow1',
versionId: 'v1',
name: 'Test Workflow',
};
fsReadFile.mockResolvedValue(JSON.stringify(mockWorkflowData));
const result = await service.getRemoteVersionIdsFromFiles();
expect(fsReadFile).toHaveBeenCalledWith(mockWorkflowFile, { encoding: 'utf8' });
expect(result).toHaveLength(1);
expect(result[0]).toEqual(
expect.objectContaining({
id: 'workflow1',
versionId: 'v1',
name: 'Test Workflow',
}),
);
});
it('should filter out files without valid workflow data', async () => {
globMock.mockResolvedValue(['/mock/invalid.json']);
fsReadFile.mockResolvedValue('{}');
const result = await service.getRemoteVersionIdsFromFiles();
expect(result).toHaveLength(0);
});
});
describe('getRemoteCredentialsFromFiles', () => {
it('should parse credential files correctly', async () => {
globMock.mockResolvedValue(['/mock/credential1.json']);
const mockCredentialData = {
id: 'cred1',
name: 'Test Credential',
type: 'oauth2',
};
fsReadFile.mockResolvedValue(JSON.stringify(mockCredentialData));
const result = await service.getRemoteCredentialsFromFiles();
expect(result).toHaveLength(1);
expect(result[0]).toEqual(
expect.objectContaining({
id: 'cred1',
name: 'Test Credential',
type: 'oauth2',
}),
);
});
it('should filter out files without valid credential data', async () => {
globMock.mockResolvedValue(['/mock/invalid.json']);
fsReadFile.mockResolvedValue('{}');
const result = await service.getRemoteCredentialsFromFiles();
expect(result).toHaveLength(0);
});
});
describe('getRemoteVariablesFromFile', () => {
it('should parse variables file correctly', async () => {
globMock.mockResolvedValue(['/mock/variables.json']);
const mockVariablesData = [
{ key: 'VAR1', value: 'value1' },
{ key: 'VAR2', value: 'value2' },
];
fsReadFile.mockResolvedValue(JSON.stringify(mockVariablesData));
const result = await service.getRemoteVariablesFromFile();
expect(result).toEqual(mockVariablesData);
});
it('should return empty array if no variables file found', async () => {
globMock.mockResolvedValue([]);
const result = await service.getRemoteVariablesFromFile();
expect(result).toHaveLength(0);
});
});
describe('getRemoteTagsAndMappingsFromFile', () => {
it('should parse tags and mappings file correctly', async () => {
globMock.mockResolvedValue(['/mock/tags.json']);
const mockTagsData = {
tags: [{ id: 'tag1', name: 'Tag 1' }],
mappings: [{ workflowId: 'workflow1', tagId: 'tag1' }],
};
fsReadFile.mockResolvedValue(JSON.stringify(mockTagsData));
const result = await service.getRemoteTagsAndMappingsFromFile();
expect(result.tags).toEqual(mockTagsData.tags);
expect(result.mappings).toEqual(mockTagsData.mappings);
});
it('should return empty tags and mappings if no file found', async () => {
globMock.mockResolvedValue([]);
const result = await service.getRemoteTagsAndMappingsFromFile();
expect(result.tags).toHaveLength(0);
expect(result.mappings).toHaveLength(0);
});
});
describe('getLocalVersionIdsFromDb', () => {
const now = new Date();
jest.useFakeTimers({ now });
it('should replace invalid updatedAt with current timestamp', async () => {
const mockWorkflows = [
{
id: 'workflow1',
name: 'Test Workflow',
updatedAt: 'invalid-date',
},
] as unknown as WorkflowEntity[];
workflowRepository.find.mockResolvedValue(mockWorkflows);
const result = await service.getLocalVersionIdsFromDb();
expect(result[0].updatedAt).toBe(now.toISOString());
});
});
});

View file

@ -0,0 +1,27 @@
import { mock } from 'jest-mock-extended';
import type { InstanceSettings } from 'n8n-core';
import { SourceControlPreferencesService } from '../source-control-preferences.service.ee';
import type { SourceControlPreferences } from '../types/source-control-preferences';
describe('SourceControlPreferencesService', () => {
const instanceSettings = mock<InstanceSettings>({ n8nFolder: '' });
const service = new SourceControlPreferencesService(
instanceSettings,
mock(),
mock(),
mock(),
mock(),
);
it('should class validate correct preferences', async () => {
const validPreferences: Partial<SourceControlPreferences> = {
branchName: 'main',
repositoryUrl: 'git@example.com:n8ntest/n8n_testrepo.git',
branchReadOnly: false,
branchColor: '#5296D6',
};
const validationResult = await service.validateSourceControlPreferences(validPreferences);
expect(validationResult).toBeTruthy();
});
});

View file

@ -10,6 +10,8 @@ describe('SourceControlService', () => {
Container.get(InstanceSettings),
mock(),
mock(),
mock(),
mock(),
);
const sourceControlService = new SourceControlService(
mock(),

View file

@ -1,5 +1,5 @@
import type { SourceControlledFile } from '@n8n/api-types';
import { Container, Service } from '@n8n/di';
import { Service } from '@n8n/di';
import { rmSync } from 'fs';
import { Credentials, InstanceSettings, Logger } from 'n8n-core';
import { ApplicationError, type ICredentialDataDecryptedObject } from 'n8n-workflow';
@ -44,6 +44,10 @@ export class SourceControlExportService {
private readonly logger: Logger,
private readonly variablesService: VariablesService,
private readonly tagRepository: TagRepository,
private readonly sharedCredentialsRepository: SharedCredentialsRepository,
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
private readonly workflowRepository: WorkflowRepository,
private readonly workflowTagMappingRepository: WorkflowTagMappingRepository,
instanceSettings: InstanceSettings,
) {
this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER);
@ -106,17 +110,16 @@ export class SourceControlExportService {
try {
sourceControlFoldersExistCheck([this.workflowExportFolder]);
const workflowIds = candidates.map((e) => e.id);
const sharedWorkflows =
await Container.get(SharedWorkflowRepository).findByWorkflowIds(workflowIds);
const workflows = await Container.get(WorkflowRepository).findByIds(workflowIds);
const sharedWorkflows = await this.sharedWorkflowRepository.findByWorkflowIds(workflowIds);
const workflows = await this.workflowRepository.findByIds(workflowIds);
// determine owner of each workflow to be exported
const owners: Record<string, ResourceOwner> = {};
sharedWorkflows.forEach((e) => {
const project = e.project;
sharedWorkflows.forEach((sharedWorkflow) => {
const project = sharedWorkflow.project;
if (!project) {
throw new ApplicationError(`Workflow ${e.workflow.display()} has no owner`);
throw new ApplicationError(`Workflow ${sharedWorkflow.workflow.display()} has no owner`);
}
if (project.type === 'personal') {
@ -124,14 +127,16 @@ export class SourceControlExportService {
(pr) => pr.role === 'project:personalOwner',
);
if (!ownerRelation) {
throw new ApplicationError(`Workflow ${e.workflow.display()} has no owner`);
throw new ApplicationError(
`Workflow ${sharedWorkflow.workflow.display()} has no owner`,
);
}
owners[e.workflowId] = {
owners[sharedWorkflow.workflowId] = {
type: 'personal',
personalEmail: ownerRelation.user.email,
};
} else if (project.type === 'team') {
owners[e.workflowId] = {
owners[sharedWorkflow.workflowId] = {
type: 'team',
teamId: project.id,
teamName: project.name,
@ -156,6 +161,7 @@ export class SourceControlExportService {
})),
};
} catch (error) {
if (error instanceof ApplicationError) throw error;
throw new ApplicationError('Failed to export workflows to work folder', { cause: error });
}
}
@ -204,7 +210,7 @@ export class SourceControlExportService {
files: [],
};
}
const mappings = await Container.get(WorkflowTagMappingRepository).find();
const mappings = await this.workflowTagMappingRepository.find();
const fileName = path.join(this.gitFolder, SOURCE_CONTROL_TAGS_EXPORT_FILE);
await fsWriteFile(
fileName,
@ -260,9 +266,10 @@ export class SourceControlExportService {
try {
sourceControlFoldersExistCheck([this.credentialExportFolder]);
const credentialIds = candidates.map((e) => e.id);
const credentialsToBeExported = await Container.get(
SharedCredentialsRepository,
).findByCredentialIds(credentialIds, 'credential:owner');
const credentialsToBeExported = await this.sharedCredentialsRepository.findByCredentialIds(
credentialIds,
'credential:owner',
);
let missingIds: string[] = [];
if (credentialsToBeExported.length !== credentialIds.length) {
const foundCredentialIds = credentialsToBeExported.map((e) => e.credentialsId);

View file

@ -1,5 +1,5 @@
import type { SourceControlledFile } from '@n8n/api-types';
import { Container, Service } from '@n8n/di';
import { Service } from '@n8n/di';
// eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import
import { In } from '@n8n/typeorm';
import glob from 'fast-glob';
@ -53,7 +53,15 @@ export class SourceControlImportService {
private readonly errorReporter: ErrorReporter,
private readonly variablesService: VariablesService,
private readonly activeWorkflowManager: ActiveWorkflowManager,
private readonly credentialsRepository: CredentialsRepository,
private readonly projectRepository: ProjectRepository,
private readonly tagRepository: TagRepository,
private readonly sharedWorkflowRepository: SharedWorkflowRepository,
private readonly sharedCredentialsRepository: SharedCredentialsRepository,
private readonly userRepository: UserRepository,
private readonly variablesRepository: VariablesRepository,
private readonly workflowRepository: WorkflowRepository,
private readonly workflowTagMappingRepository: WorkflowTagMappingRepository,
instanceSettings: InstanceSettings,
) {
this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER);
@ -91,7 +99,7 @@ export class SourceControlImportService {
}
async getLocalVersionIdsFromDb(): Promise<SourceControlWorkflowVersionId[]> {
const localWorkflows = await Container.get(WorkflowRepository).find({
const localWorkflows = await this.workflowRepository.find({
select: ['id', 'name', 'versionId', 'updatedAt'],
});
return localWorkflows.map((local) => {
@ -146,7 +154,7 @@ export class SourceControlImportService {
}
async getLocalCredentialsFromDb(): Promise<Array<ExportableCredential & { filename: string }>> {
const localCredentials = await Container.get(CredentialsRepository).find({
const localCredentials = await this.credentialsRepository.find({
select: ['id', 'name', 'type'],
});
return localCredentials.map((local) => ({
@ -201,24 +209,22 @@ export class SourceControlImportService {
const localTags = await this.tagRepository.find({
select: ['id', 'name'],
});
const localMappings = await Container.get(WorkflowTagMappingRepository).find({
const localMappings = await this.workflowTagMappingRepository.find({
select: ['workflowId', 'tagId'],
});
return { tags: localTags, mappings: localMappings };
}
async importWorkflowFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
const personalProject =
await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
const personalProject = await this.projectRepository.getPersonalProjectForUserOrFail(userId);
const workflowManager = this.activeWorkflowManager;
const candidateIds = candidates.map((c) => c.id);
const existingWorkflows = await Container.get(WorkflowRepository).findByIds(candidateIds, {
const existingWorkflows = await this.workflowRepository.findByIds(candidateIds, {
fields: ['id', 'name', 'versionId', 'active'],
});
const allSharedWorkflows = await Container.get(SharedWorkflowRepository).findWithFields(
candidateIds,
{ select: ['workflowId', 'role', 'projectId'] },
);
const allSharedWorkflows = await this.sharedWorkflowRepository.findWithFields(candidateIds, {
select: ['workflowId', 'role', 'projectId'],
});
const importWorkflowsResult = [];
// Due to SQLite concurrency issues, we cannot save all workflows at once
@ -235,9 +241,7 @@ export class SourceControlImportService {
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
importedWorkflow.active = existingWorkflow?.active ?? false;
this.logger.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
const upsertResult = await Container.get(WorkflowRepository).upsert({ ...importedWorkflow }, [
'id',
]);
const upsertResult = await this.workflowRepository.upsert({ ...importedWorkflow }, ['id']);
if (upsertResult?.identifiers?.length !== 1) {
throw new ApplicationError('Failed to upsert workflow', {
extra: { workflowId: importedWorkflow.id ?? 'new' },
@ -253,7 +257,7 @@ export class SourceControlImportService {
? await this.findOrCreateOwnerProject(importedWorkflow.owner)
: null;
await Container.get(SharedWorkflowRepository).upsert(
await this.sharedWorkflowRepository.upsert(
{
workflowId: importedWorkflow.id,
projectId: remoteOwnerProject?.id ?? personalProject.id,
@ -276,7 +280,7 @@ export class SourceControlImportService {
const error = ensureError(e);
this.logger.error(`Failed to activate workflow ${existingWorkflow.id}`, { error });
} finally {
await Container.get(WorkflowRepository).update(
await this.workflowRepository.update(
{ id: existingWorkflow.id },
{ versionId: importedWorkflow.versionId },
);
@ -295,16 +299,15 @@ export class SourceControlImportService {
}
async importCredentialsFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
const personalProject =
await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
const personalProject = await this.projectRepository.getPersonalProjectForUserOrFail(userId);
const candidateIds = candidates.map((c) => c.id);
const existingCredentials = await Container.get(CredentialsRepository).find({
const existingCredentials = await this.credentialsRepository.find({
where: {
id: In(candidateIds),
},
select: ['id', 'name', 'type', 'data'],
});
const existingSharedCredentials = await Container.get(SharedCredentialsRepository).find({
const existingSharedCredentials = await this.sharedCredentialsRepository.find({
select: ['credentialsId', 'role'],
where: {
credentialsId: In(candidateIds),
@ -336,7 +339,7 @@ export class SourceControlImportService {
}
this.logger.debug(`Updating credential id ${newCredentialObject.id as string}`);
await Container.get(CredentialsRepository).upsert(newCredentialObject, ['id']);
await this.credentialsRepository.upsert(newCredentialObject, ['id']);
const isOwnedLocally = existingSharedCredentials.some(
(c) => c.credentialsId === credential.id && c.role === 'credential:owner',
@ -352,7 +355,7 @@ export class SourceControlImportService {
newSharedCredential.projectId = remoteOwnerProject?.id ?? personalProject.id;
newSharedCredential.role = 'credential:owner';
await Container.get(SharedCredentialsRepository).upsert({ ...newSharedCredential }, [
await this.sharedCredentialsRepository.upsert({ ...newSharedCredential }, [
'credentialsId',
'projectId',
]);
@ -388,7 +391,7 @@ export class SourceControlImportService {
const existingWorkflowIds = new Set(
(
await Container.get(WorkflowRepository).find({
await this.workflowRepository.find({
select: ['id'],
})
).map((e) => e.id),
@ -417,7 +420,7 @@ export class SourceControlImportService {
await Promise.all(
mappedTags.mappings.map(async (mapping) => {
if (!existingWorkflowIds.has(String(mapping.workflowId))) return;
await Container.get(WorkflowTagMappingRepository).upsert(
await this.workflowTagMappingRepository.upsert(
{ tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) },
{
skipUpdateIfNoValuesChanged: true,
@ -464,12 +467,12 @@ export class SourceControlImportService {
overriddenKeys.splice(overriddenKeys.indexOf(variable.key), 1);
}
try {
await Container.get(VariablesRepository).upsert({ ...variable }, ['id']);
await this.variablesRepository.upsert({ ...variable }, ['id']);
} catch (errorUpsert) {
if (isUniqueConstraintError(errorUpsert as Error)) {
this.logger.debug(`Variable ${variable.key} already exists, updating instead`);
try {
await Container.get(VariablesRepository).update({ key: variable.key }, { ...variable });
await this.variablesRepository.update({ key: variable.key }, { ...variable });
} catch (errorUpdate) {
this.logger.debug(`Failed to update variable ${variable.key}, skipping`);
this.logger.debug((errorUpdate as Error).message);
@ -484,11 +487,11 @@ export class SourceControlImportService {
if (overriddenKeys.length > 0 && valueOverrides) {
for (const key of overriddenKeys) {
result.imported.push(key);
const newVariable = Container.get(VariablesRepository).create({
const newVariable = this.variablesRepository.create({
key,
value: valueOverrides[key],
});
await Container.get(VariablesRepository).save(newVariable, { transaction: false });
await this.variablesRepository.save(newVariable, { transaction: false });
}
}
@ -498,32 +501,30 @@ export class SourceControlImportService {
}
private async findOrCreateOwnerProject(owner: ResourceOwner): Promise<Project | null> {
const projectRepository = Container.get(ProjectRepository);
const userRepository = Container.get(UserRepository);
if (typeof owner === 'string' || owner.type === 'personal') {
const email = typeof owner === 'string' ? owner : owner.personalEmail;
const user = await userRepository.findOne({
const user = await this.userRepository.findOne({
where: { email },
});
if (!user) {
return null;
}
return await projectRepository.getPersonalProjectForUserOrFail(user.id);
return await this.projectRepository.getPersonalProjectForUserOrFail(user.id);
} else if (owner.type === 'team') {
let teamProject = await projectRepository.findOne({
let teamProject = await this.projectRepository.findOne({
where: { id: owner.teamId },
});
if (!teamProject) {
try {
teamProject = await projectRepository.save(
projectRepository.create({
teamProject = await this.projectRepository.save(
this.projectRepository.create({
id: owner.teamId,
name: owner.teamName,
type: 'team',
}),
);
} catch (e) {
teamProject = await projectRepository.findOne({
teamProject = await this.projectRepository.findOne({
where: { id: owner.teamId },
});
if (!teamProject) {

View file

@ -1,4 +1,4 @@
import { Container, Service } from '@n8n/di';
import { Service } from '@n8n/di';
import type { ValidationError } from 'class-validator';
import { validate } from 'class-validator';
import { rm as fsRm } from 'fs/promises';
@ -7,7 +7,6 @@ import { ApplicationError, jsonParse } from 'n8n-workflow';
import { writeFile, chmod, readFile } from 'node:fs/promises';
import path from 'path';
import config from '@/config';
import { SettingsRepository } from '@/databases/repositories/settings.repository';
import {
@ -17,6 +16,7 @@ import {
SOURCE_CONTROL_PREFERENCES_DB_KEY,
} from './constants';
import { generateSshKeyPair, isSourceControlLicensed } from './source-control-helper.ee';
import { SourceControlConfig } from './source-control.config';
import type { KeyPairType } from './types/key-pair-type';
import { SourceControlPreferences } from './types/source-control-preferences';
@ -34,6 +34,8 @@ export class SourceControlPreferencesService {
private readonly instanceSettings: InstanceSettings,
private readonly logger: Logger,
private readonly cipher: Cipher,
private readonly settingsRepository: SettingsRepository,
private readonly sourceControlConfig: SourceControlConfig,
) {
this.sshFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_SSH_FOLDER);
this.gitFolder = path.join(instanceSettings.n8nFolder, SOURCE_CONTROL_GIT_FOLDER);
@ -64,9 +66,7 @@ export class SourceControlPreferencesService {
}
private async getKeyPairFromDatabase() {
const dbSetting = await Container.get(SettingsRepository).findByKey(
'features.sourceControl.sshKeys',
);
const dbSetting = await this.settingsRepository.findByKey('features.sourceControl.sshKeys');
if (!dbSetting?.value) return null;
@ -120,7 +120,7 @@ export class SourceControlPreferencesService {
async deleteKeyPair() {
try {
await fsRm(this.sshFolder, { recursive: true });
await Container.get(SettingsRepository).delete({ key: 'features.sourceControl.sshKeys' });
await this.settingsRepository.delete({ key: 'features.sourceControl.sshKeys' });
} catch (e) {
const error = e instanceof Error ? e : new Error(`${e}`);
this.logger.error(`Failed to delete SSH key pair: ${error.message}`);
@ -133,14 +133,12 @@ export class SourceControlPreferencesService {
async generateAndSaveKeyPair(keyPairType?: KeyPairType): Promise<SourceControlPreferences> {
if (!keyPairType) {
keyPairType =
this.getPreferences().keyGeneratorType ??
(config.get('sourceControl.defaultKeyPairType') as KeyPairType) ??
'ed25519';
this.getPreferences().keyGeneratorType ?? this.sourceControlConfig.defaultKeyPairType;
}
const keyPair = await generateSshKeyPair(keyPairType);
try {
await Container.get(SettingsRepository).save({
await this.settingsRepository.save({
key: 'features.sourceControl.sshKeys',
value: JSON.stringify({
encryptedPrivateKey: this.cipher.encrypt(keyPair.privateKey),
@ -211,7 +209,7 @@ export class SourceControlPreferencesService {
if (saveToDb) {
const settingsValue = JSON.stringify(this._sourceControlPreferences);
try {
await Container.get(SettingsRepository).save(
await this.settingsRepository.save(
{
key: SOURCE_CONTROL_PREFERENCES_DB_KEY,
value: settingsValue,
@ -229,7 +227,7 @@ export class SourceControlPreferencesService {
async loadFromDbAndApplySourceControlPreferences(): Promise<
SourceControlPreferences | undefined
> {
const loadedPreferences = await Container.get(SettingsRepository).findOne({
const loadedPreferences = await this.settingsRepository.findOne({
where: { key: SOURCE_CONTROL_PREFERENCES_DB_KEY },
});
if (loadedPreferences) {

View file

@ -0,0 +1,8 @@
import { Config, Env } from '@n8n/config';
@Config
export class SourceControlConfig {
/** Default SSH key type to use when generating SSH keys. */
@Env('N8N_SOURCECONTROL_DEFAULT_SSH_KEY_TYPE')
defaultKeyPairType: 'ed25519' | 'rsa' = 'ed25519';
}

View file

@ -1,27 +0,0 @@
import { Container } from '@n8n/di';
import { License } from '@/license';
export function isVariablesEnabled(): boolean {
const license = Container.get(License);
return license.isVariablesEnabled();
}
export function canCreateNewVariable(variableCount: number): boolean {
if (!isVariablesEnabled()) {
return false;
}
const license = Container.get(License);
// This defaults to -1 which is what we want if we've enabled
// variables via the config
const limit = license.getVariablesLimit();
if (limit === -1) {
return true;
}
return limit > variableCount;
}
export function getVariablesLimit(): number {
const license = Container.get(License);
return license.getVariablesLimit();
}

View file

@ -1,4 +1,4 @@
import { Container, Service } from '@n8n/di';
import { Service } from '@n8n/di';
import type { Variables } from '@/databases/entities/variables';
import { VariablesRepository } from '@/databases/repositories/variables.repository';
@ -6,23 +6,21 @@ import { generateNanoId } from '@/databases/utils/generators';
import { VariableCountLimitReachedError } from '@/errors/variable-count-limit-reached.error';
import { VariableValidationError } from '@/errors/variable-validation.error';
import { EventService } from '@/events/event.service';
import { License } from '@/license';
import { CacheService } from '@/services/cache/cache.service';
import { canCreateNewVariable } from './environment-helpers';
@Service()
export class VariablesService {
constructor(
protected cacheService: CacheService,
protected variablesRepository: VariablesRepository,
private readonly cacheService: CacheService,
private readonly variablesRepository: VariablesRepository,
private readonly eventService: EventService,
private readonly license: License,
) {}
async getAllCached(state?: 'empty'): Promise<Variables[]> {
let variables = await this.cacheService.get('variables', {
async refreshFn() {
return await Container.get(VariablesService).findAll();
},
refreshFn: async () => await this.findAll(),
});
if (variables === undefined) {
@ -77,7 +75,7 @@ export class VariablesService {
}
async create(variable: Omit<Variables, 'id'>): Promise<Variables> {
if (!canCreateNewVariable(await this.getCount())) {
if (!this.canCreateNewVariable(await this.getCount())) {
throw new VariableCountLimitReachedError('Variables limit reached');
}
this.validateVariable(variable);
@ -100,4 +98,17 @@ export class VariablesService {
await this.updateCache();
return (await this.getCached(id))!;
}
private canCreateNewVariable(variableCount: number): boolean {
if (!this.license.isVariablesEnabled()) {
return false;
}
// This defaults to -1 which is what we want if we've enabled
// variables via the config
const limit = this.license.getVariablesLimit();
if (limit === -1) {
return true;
}
return limit > variableCount;
}
}

View file

@ -19,7 +19,7 @@ import type { WorkflowRepository } from '@/databases/repositories/workflow.repos
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
import { NodeTypes } from '@/node-types';
import type { WorkflowRunner } from '@/workflow-runner';
import { mockInstance } from '@test/mocking';
import { mockInstance, mockLogger } from '@test/mocking';
import { mockNodeTypesData } from '@test-integration/utils/node-types-data';
import { TestRunnerService } from '../test-runner.service.ee';
@ -129,6 +129,9 @@ function mockEvaluationExecutionData(metrics: Record<string, GenericValue>) {
});
}
const errorReporter = mock<ErrorReporter>();
const logger = mockLogger();
describe('TestRunnerService', () => {
const executionRepository = mock<ExecutionRepository>();
const workflowRepository = mock<WorkflowRepository>();
@ -176,6 +179,7 @@ describe('TestRunnerService', () => {
test('should create an instance of TestRunnerService', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -183,7 +187,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
expect(testRunnerService).toBeInstanceOf(TestRunnerService);
@ -191,6 +195,7 @@ describe('TestRunnerService', () => {
test('should create and run test cases from past executions', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -198,7 +203,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -229,6 +234,7 @@ describe('TestRunnerService', () => {
test('should run both workflow under test and evaluation workflow', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -236,7 +242,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -330,6 +336,7 @@ describe('TestRunnerService', () => {
test('should properly count passed and failed executions', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -337,7 +344,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -388,6 +395,7 @@ describe('TestRunnerService', () => {
test('should properly count failed test executions', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -395,7 +403,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -442,6 +450,7 @@ describe('TestRunnerService', () => {
test('should properly count failed evaluations', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -449,7 +458,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -500,6 +509,7 @@ describe('TestRunnerService', () => {
test('should specify correct start nodes when running workflow under test', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -507,7 +517,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
workflowRepository.findById.calledWith('workflow-under-test-id').mockResolvedValueOnce({
@ -574,6 +584,7 @@ describe('TestRunnerService', () => {
test('should properly choose trigger and start nodes', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -581,7 +592,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
const startNodesData = (testRunnerService as any).getStartNodesData(
@ -599,6 +610,7 @@ describe('TestRunnerService', () => {
test('should properly choose trigger and start nodes 2', async () => {
const testRunnerService = new TestRunnerService(
logger,
workflowRepository,
workflowRunner,
executionRepository,
@ -606,7 +618,7 @@ describe('TestRunnerService', () => {
testRunRepository,
testMetricRepository,
mockNodeTypes,
mock<ErrorReporter>(),
errorReporter,
);
const startNodesData = (testRunnerService as any).getStartNodesData(

View file

@ -1,6 +1,6 @@
import { Service } from '@n8n/di';
import { parse } from 'flatted';
import { ErrorReporter } from 'n8n-core';
import { ErrorReporter, Logger } from 'n8n-core';
import { NodeConnectionType, Workflow } from 'n8n-workflow';
import type {
IDataObject,
@ -39,6 +39,7 @@ import { createPinData, getPastExecutionTriggerNode } from './utils.ee';
@Service()
export class TestRunnerService {
constructor(
private readonly logger: Logger,
private readonly workflowRepository: WorkflowRepository,
private readonly workflowRunner: WorkflowRunner,
private readonly executionRepository: ExecutionRepository,
@ -115,8 +116,9 @@ export class TestRunnerService {
executionMode: 'evaluation',
runData: {},
pinData,
workflowData: workflow,
workflowData: { ...workflow, pinData },
userId,
partialExecutionVersion: '1',
};
// Trigger the workflow under test with mocked data
@ -203,6 +205,8 @@ export class TestRunnerService {
* Creates a new test run for the given test definition.
*/
async runTest(user: User, test: TestDefinition): Promise<void> {
this.logger.debug('Starting new test run', { testId: test.id });
const workflow = await this.workflowRepository.findById(test.workflowId);
assert(workflow, 'Workflow not found');
@ -227,6 +231,8 @@ export class TestRunnerService {
.andWhere('execution.workflowId = :workflowId', { workflowId: test.workflowId })
.getMany();
this.logger.debug('Found past executions', { count: pastExecutions.length });
// Get the metrics to collect from the evaluation workflow
const testMetricNames = await this.getTestMetricNames(test.id);
@ -238,6 +244,8 @@ export class TestRunnerService {
const metrics = new EvaluationMetrics(testMetricNames);
for (const { id: pastExecutionId } of pastExecutions) {
this.logger.debug('Running test case', { pastExecutionId });
try {
// Fetch past execution with data
const pastExecution = await this.executionRepository.findOne({
@ -257,6 +265,8 @@ export class TestRunnerService {
user.id,
);
this.logger.debug('Test case execution finished', { pastExecutionId });
// In case of a permission check issue, the test case execution will be undefined.
// Skip them, increment the failed count and continue with the next test case
if (!testCaseExecution) {
@ -279,6 +289,8 @@ export class TestRunnerService {
);
assert(evalExecution);
this.logger.debug('Evaluation execution finished', { pastExecutionId });
metrics.addResults(this.extractEvaluationResult(evalExecution));
if (evalExecution.data.resultData.error) {
@ -297,5 +309,7 @@ export class TestRunnerService {
const aggregatedMetrics = metrics.getAggregatedMetrics();
await this.testRunRepository.markAsCompleted(testRun.id, aggregatedMetrics);
this.logger.debug('Test run finished', { testId: test.id });
}
}

View file

@ -71,7 +71,11 @@ export class ManualExecutionService {
},
};
const workflowExecute = new WorkflowExecute(additionalData, 'manual', executionData);
const workflowExecute = new WorkflowExecute(
additionalData,
data.executionMode,
executionData,
);
return workflowExecute.processRunExecutionData(workflow);
} else if (
data.runData === undefined ||

View file

@ -12,7 +12,6 @@ import config from '@/config';
import { inE2ETests, LICENSE_FEATURES, N8N_VERSION } from '@/constants';
import { CredentialTypes } from '@/credential-types';
import { CredentialsOverwrites } from '@/credentials-overwrites';
import { getVariablesLimit } from '@/environments.ee/variables/environment-helpers';
import { getLdapLoginLabel } from '@/ldap.ee/helpers.ee';
import { License } from '@/license';
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
@ -326,7 +325,7 @@ export class FrontendService {
}
if (this.license.isVariablesEnabled()) {
this.settings.variables.limit = getVariablesLimit();
this.settings.variables.limit = this.license.getVariablesLimit();
}
if (this.license.isWorkflowHistoryLicensed() && config.getEnv('workflowHistory.enabled')) {

View file

@ -454,7 +454,7 @@ export async function executeWebhook(
}
let pinData: IPinData | undefined;
const usePinData = executionMode === 'manual';
const usePinData = ['manual', 'evaluation'].includes(executionMode);
if (usePinData) {
pinData = workflowData.pinData;
runExecutionData.resultData.pinData = pinData;

View file

@ -238,7 +238,7 @@ export class WorkflowRunner {
}
let pinData: IPinData | undefined;
if (data.executionMode === 'manual') {
if (['manual', 'evaluation'].includes(data.executionMode)) {
pinData = data.pinData ?? data.workflowData.pinData;
}

View file

@ -4,6 +4,7 @@ import type { Scope } from '@sentry/node';
import { Credentials } from 'n8n-core';
import { randomString } from 'n8n-workflow';
import { CREDENTIAL_BLANKING_VALUE } from '@/constants';
import { CredentialsService } from '@/credentials/credentials.service';
import type { Project } from '@/databases/entities/project';
import type { User } from '@/databases/entities/user';
@ -1164,6 +1165,73 @@ describe('PATCH /credentials/:id', () => {
expect(shellCredential.name).toBe(patchPayload.name); // updated
});
test('should not store redacted value in the db for oauthTokenData', async () => {
// ARRANGE
const credentialService = Container.get(CredentialsService);
const redactSpy = jest.spyOn(credentialService, 'redact').mockReturnValueOnce({
accessToken: CREDENTIAL_BLANKING_VALUE,
oauthTokenData: CREDENTIAL_BLANKING_VALUE,
});
const payload = randomCredentialPayload();
payload.data.oauthTokenData = { tokenData: true };
const savedCredential = await saveCredential(payload, {
user: owner,
role: 'credential:owner',
});
// ACT
const patchPayload = { ...payload, data: { foo: 'bar' } };
await authOwnerAgent.patch(`/credentials/${savedCredential.id}`).send(patchPayload).expect(200);
// ASSERT
const response = await authOwnerAgent
.get(`/credentials/${savedCredential.id}`)
.query({ includeData: true })
.expect(200);
const { id, data } = response.body.data;
expect(id).toBe(savedCredential.id);
expect(data).toEqual({
...patchPayload.data,
// should be the original
oauthTokenData: payload.data.oauthTokenData,
});
expect(redactSpy).not.toHaveBeenCalled();
});
test('should not allow to overwrite oauthTokenData', async () => {
// ARRANGE
const payload = randomCredentialPayload();
payload.data.oauthTokenData = { tokenData: true };
const savedCredential = await saveCredential(payload, {
user: owner,
role: 'credential:owner',
});
// ACT
const patchPayload = {
...payload,
data: { accessToken: 'new', oauthTokenData: { tokenData: false } },
};
await authOwnerAgent.patch(`/credentials/${savedCredential.id}`).send(patchPayload).expect(200);
// ASSERT
const response = await authOwnerAgent
.get(`/credentials/${savedCredential.id}`)
.query({ includeData: true })
.expect(200);
const { id, data } = response.body.data;
expect(id).toBe(savedCredential.id);
// was overwritten
expect(data.accessToken).toBe(patchPayload.data.accessToken);
// was not overwritten
expect(data.oauthTokenData).toEqual(payload.data.oauthTokenData);
});
test('should fail with invalid inputs', async () => {
const savedCredential = await saveCredential(randomCredentialPayload(), {
user: owner,

View file

@ -10,6 +10,7 @@ import fsp from 'node:fs/promises';
import { CredentialsRepository } from '@/databases/repositories/credentials.repository';
import { ProjectRepository } from '@/databases/repositories/project.repository';
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
import { UserRepository } from '@/databases/repositories/user.repository';
import { SourceControlImportService } from '@/environments.ee/source-control/source-control-import.service.ee';
import type { ExportableCredential } from '@/environments.ee/source-control/types/exportable-credential';
@ -21,20 +22,36 @@ import { randomCredentialPayload } from '../shared/random';
import * as testDb from '../shared/test-db';
describe('SourceControlImportService', () => {
let credentialsRepository: CredentialsRepository;
let projectRepository: ProjectRepository;
let sharedCredentialsRepository: SharedCredentialsRepository;
let userRepository: UserRepository;
let service: SourceControlImportService;
const cipher = mockInstance(Cipher);
beforeAll(async () => {
await testDb.init();
credentialsRepository = Container.get(CredentialsRepository);
projectRepository = Container.get(ProjectRepository);
sharedCredentialsRepository = Container.get(SharedCredentialsRepository);
userRepository = Container.get(UserRepository);
service = new SourceControlImportService(
mock(),
mock(),
mock(),
mock(),
credentialsRepository,
projectRepository,
mock(),
mock(),
sharedCredentialsRepository,
userRepository,
mock(),
mock(),
mock(),
mock<InstanceSettings>({ n8nFolder: '/some-path' }),
);
await testDb.init();
});
afterEach(async () => {
@ -75,7 +92,7 @@ describe('SourceControlImportService', () => {
const personalProject = await getPersonalProject(member);
const sharing = await Container.get(SharedCredentialsRepository).findOneBy({
const sharing = await sharedCredentialsRepository.findOneBy({
credentialsId: CREDENTIAL_ID,
projectId: personalProject.id,
role: 'credential:owner',
@ -112,7 +129,7 @@ describe('SourceControlImportService', () => {
const personalProject = await getPersonalProject(importingUser);
const sharing = await Container.get(SharedCredentialsRepository).findOneBy({
const sharing = await sharedCredentialsRepository.findOneBy({
credentialsId: CREDENTIAL_ID,
projectId: personalProject.id,
role: 'credential:owner',
@ -149,7 +166,7 @@ describe('SourceControlImportService', () => {
const personalProject = await getPersonalProject(importingUser);
const sharing = await Container.get(SharedCredentialsRepository).findOneBy({
const sharing = await sharedCredentialsRepository.findOneBy({
credentialsId: CREDENTIAL_ID,
projectId: personalProject.id,
role: 'credential:owner',
@ -190,7 +207,7 @@ describe('SourceControlImportService', () => {
const personalProject = await getPersonalProject(importingUser);
const sharing = await Container.get(SharedCredentialsRepository).findOneBy({
const sharing = await sharedCredentialsRepository.findOneBy({
credentialsId: CREDENTIAL_ID,
projectId: personalProject.id,
role: 'credential:owner',
@ -223,7 +240,7 @@ describe('SourceControlImportService', () => {
cipher.encrypt.mockReturnValue('some-encrypted-data');
{
const project = await Container.get(ProjectRepository).findOne({
const project = await projectRepository.findOne({
where: [
{
id: '1234-asdf',
@ -241,7 +258,7 @@ describe('SourceControlImportService', () => {
importingUser.id,
);
const sharing = await Container.get(SharedCredentialsRepository).findOne({
const sharing = await sharedCredentialsRepository.findOne({
where: {
credentialsId: CREDENTIAL_ID,
role: 'credential:owner',
@ -288,7 +305,7 @@ describe('SourceControlImportService', () => {
importingUser.id,
);
const sharing = await Container.get(SharedCredentialsRepository).findOneBy({
const sharing = await sharedCredentialsRepository.findOneBy({
credentialsId: CREDENTIAL_ID,
projectId: project.id,
role: 'credential:owner',
@ -332,7 +349,7 @@ describe('SourceControlImportService', () => {
);
await expect(
Container.get(SharedCredentialsRepository).findBy({
sharedCredentialsRepository.findBy({
credentialsId: credential.id,
}),
).resolves.toMatchObject([
@ -342,7 +359,7 @@ describe('SourceControlImportService', () => {
},
]);
await expect(
Container.get(CredentialsRepository).findBy({
credentialsRepository.findBy({
id: credential.id,
}),
).resolves.toMatchObject([

View file

@ -1,7 +1,6 @@
import type { SourceControlledFile } from '@n8n/api-types';
import { Container } from '@n8n/di';
import config from '@/config';
import type { User } from '@/databases/entities/user';
import { SourceControlPreferencesService } from '@/environments.ee/source-control/source-control-preferences.service.ee';
import { SourceControlService } from '@/environments.ee/source-control/source-control.service.ee';
@ -21,11 +20,17 @@ const testServer = utils.setupTestServer({
enabledFeatures: ['feat:sourceControl', 'feat:sharing'],
});
let sourceControlPreferencesService: SourceControlPreferencesService;
beforeAll(async () => {
owner = await createUser({ role: 'global:owner' });
authOwnerAgent = testServer.authAgentFor(owner);
Container.get(SourceControlPreferencesService).isSourceControlConnected = () => true;
sourceControlPreferencesService = Container.get(SourceControlPreferencesService);
await sourceControlPreferencesService.setPreferences({
connected: true,
keyGeneratorType: 'rsa',
});
});
describe('GET /sourceControl/preferences', () => {
@ -65,19 +70,11 @@ describe('GET /sourceControl/preferences', () => {
});
test('refreshing key pairsshould return new rsa key', async () => {
config.set('sourceControl.defaultKeyPairType', 'rsa');
await authOwnerAgent
.post('/source-control/generate-key-pair')
.send()
.expect(200)
.expect((res) => {
expect(
Container.get(SourceControlPreferencesService).getPreferences().keyGeneratorType,
).toBe('rsa');
expect(res.body.data).toHaveProperty('publicKey');
expect(res.body.data).toHaveProperty('keyGeneratorType');
expect(res.body.data.keyGeneratorType).toBe('rsa');
expect(res.body.data.publicKey).toContain('ssh-rsa');
});
const res = await authOwnerAgent.post('/source-control/generate-key-pair').send().expect(200);
expect(res.body.data).toHaveProperty('publicKey');
expect(res.body.data).toHaveProperty('keyGeneratorType');
expect(res.body.data.keyGeneratorType).toBe('rsa');
expect(res.body.data.publicKey).toContain('ssh-rsa');
});
});

View file

@ -246,4 +246,67 @@ describe('validateValueAgainstSchema', () => {
// value should be type number
expect(typeof result).toEqual('number');
});
describe('when the mode is in Fixed mode, and the node is a resource mapper', () => {
const nodeType = {
description: {
properties: [
{
name: 'operation',
type: 'resourceMapper',
typeOptions: {
resourceMapper: {
mode: 'add',
},
},
},
],
},
} as unknown as INodeType;
const node = {
parameters: {
operation: {
schema: [
{ id: 'num', type: 'number', required: true },
{ id: 'str', type: 'string', required: true },
{ id: 'obj', type: 'object', required: true },
{ id: 'arr', type: 'array', required: true },
],
attemptToConvertTypes: true,
mappingMode: '',
value: '',
},
},
} as unknown as INode;
const parameterName = 'operation.value';
describe('should correctly validate values for', () => {
test.each([
{ num: 0 },
{ num: 23 },
{ num: -0 },
{ num: -Infinity },
{ num: Infinity },
{ str: '' },
{ str: ' ' },
{ str: 'hello' },
{ arr: [] },
{ obj: {} },
])('%s', (value) => {
expect(() =>
validateValueAgainstSchema(node, nodeType, value, parameterName, 0, 0),
).not.toThrow();
});
});
describe('should throw an error for', () => {
test.each([{ num: NaN }, { num: undefined }, { num: null }])('%s', (value) => {
expect(() =>
validateValueAgainstSchema(node, nodeType, value, parameterName, 0, 0),
).toThrow();
});
});
});
});

View file

@ -44,7 +44,7 @@ const validateResourceMapperValue = (
!skipRequiredCheck &&
schemaEntry?.required === true &&
schemaEntry.type !== 'boolean' &&
!resolvedValue
(resolvedValue === undefined || resolvedValue === null)
) {
return {
valid: false,

View file

@ -32,6 +32,7 @@ export async function getAllCredentials(
): Promise<ICredentialsResponse[]> {
return await makeRestApiRequest(context, 'GET', '/credentials', {
...(includeScopes ? { includeScopes } : {}),
includeData: true,
...(filter ? { filter } : {}),
});
}

View file

@ -33,7 +33,7 @@ export const pushWorkfolder = async (
export const pullWorkfolder = async (
context: IRestApiContext,
data: PullWorkFolderRequestDto,
): Promise<void> => {
): Promise<SourceControlledFile[]> => {
return await makeRestApiRequest(context, 'POST', `${sourceControlApiRoot}/pull-workfolder`, data);
};

View file

@ -29,6 +29,7 @@ const props = withDefaults(
defineProps<{
data: ICredentialsResponse;
readOnly?: boolean;
needsSetup?: boolean;
}>(),
{
data: () => ({
@ -146,6 +147,9 @@ function moveResource() {
<N8nBadge v-if="readOnly" class="ml-3xs" theme="tertiary" bold>
{{ locale.baseText('credentials.item.readonly') }}
</N8nBadge>
<N8nBadge v-if="needsSetup" class="ml-3xs" theme="warning">
{{ locale.baseText('credentials.item.needsSetup') }}
</N8nBadge>
</n8n-heading>
</template>
<div :class="$style.cardDescription">
@ -195,10 +199,6 @@ function moveResource() {
.cardHeading {
font-size: var(--font-size-s);
padding: var(--spacing-s) 0 0;
span {
color: var(--color-text-light);
}
}
.cardDescription {

View file

@ -3,7 +3,7 @@ import { waitFor } from '@testing-library/vue';
import userEvent from '@testing-library/user-event';
import { createTestingPinia } from '@pinia/testing';
import { merge } from 'lodash-es';
import { SOURCE_CONTROL_PULL_MODAL_KEY, STORES } from '@/constants';
import { SOURCE_CONTROL_PULL_MODAL_KEY, SOURCE_CONTROL_PUSH_MODAL_KEY, STORES } from '@/constants';
import { SETTINGS_STORE_DEFAULT_STATE } from '@/__tests__/utils';
import MainSidebarSourceControl from '@/components/MainSidebarSourceControl.vue';
import { useSourceControlStore } from '@/stores/sourceControl.store';
@ -18,8 +18,9 @@ let rbacStore: ReturnType<typeof useRBACStore>;
const showMessage = vi.fn();
const showError = vi.fn();
const showToast = vi.fn();
vi.mock('@/composables/useToast', () => ({
useToast: () => ({ showMessage, showError }),
useToast: () => ({ showMessage, showError, showToast }),
}));
const renderComponent = createComponentRenderer(MainSidebarSourceControl);
@ -131,5 +132,129 @@ describe('MainSidebarSourceControl', () => {
),
);
});
it('should open push modal when there are changes', async () => {
const status = [
{
id: '014da93897f146d2b880-baa374b9d02d',
name: 'vuelfow2',
type: 'workflow' as const,
status: 'created' as const,
location: 'local' as const,
conflict: false,
file: '/014da93897f146d2b880-baa374b9d02d.json',
updatedAt: '2025-01-09T13:12:24.580Z',
},
];
vi.spyOn(sourceControlStore, 'getAggregatedStatus').mockResolvedValueOnce(status);
const openModalSpy = vi.spyOn(uiStore, 'openModalWithData');
const { getAllByRole } = renderComponent({
pinia,
props: { isCollapsed: false },
});
await userEvent.click(getAllByRole('button')[1]);
await waitFor(() =>
expect(openModalSpy).toHaveBeenCalledWith(
expect.objectContaining({
name: SOURCE_CONTROL_PUSH_MODAL_KEY,
data: expect.objectContaining({
status,
}),
}),
),
);
});
it("should show user's feedback when pulling", async () => {
vi.spyOn(sourceControlStore, 'pullWorkfolder').mockResolvedValueOnce([
{
id: '014da93897f146d2b880-baa374b9d02d',
name: 'vuelfow2',
type: 'workflow',
status: 'created',
location: 'remote',
conflict: false,
file: '/014da93897f146d2b880-baa374b9d02d.json',
updatedAt: '2025-01-09T13:12:24.580Z',
},
{
id: 'a102c0b9-28ac-43cb-950e-195723a56d54',
name: 'Gmail account',
type: 'credential',
status: 'created',
location: 'remote',
conflict: false,
file: '/a102c0b9-28ac-43cb-950e-195723a56d54.json',
updatedAt: '2025-01-09T13:12:24.586Z',
},
{
id: 'variables',
name: 'variables',
type: 'variables',
status: 'modified',
location: 'remote',
conflict: false,
file: '/variable_stubs.json',
updatedAt: '2025-01-09T13:12:24.588Z',
},
{
id: 'mappings',
name: 'tags',
type: 'tags',
status: 'modified',
location: 'remote',
conflict: false,
file: '/tags.json',
updatedAt: '2024-12-16T12:53:12.155Z',
},
]);
const { getAllByRole } = renderComponent({
pinia,
props: { isCollapsed: false },
});
await userEvent.click(getAllByRole('button')[0]);
await waitFor(() => {
expect(showToast).toHaveBeenNthCalledWith(
1,
expect.objectContaining({
title: 'Finish setting up your new variables to use in workflows',
}),
);
expect(showToast).toHaveBeenNthCalledWith(
2,
expect.objectContaining({
title: 'Finish setting up your new credentials to use in workflows',
}),
);
expect(showToast).toHaveBeenNthCalledWith(
3,
expect.objectContaining({
message: '1 Workflow, 1 Credential, Variables, and Tags were pulled',
}),
);
});
});
it('should show feedback where there are no change to pull', async () => {
vi.spyOn(sourceControlStore, 'pullWorkfolder').mockResolvedValueOnce([]);
const { getAllByRole } = renderComponent({
pinia,
props: { isCollapsed: false },
});
await userEvent.click(getAllByRole('button')[0]);
await waitFor(() => {
expect(showMessage).toHaveBeenCalledWith(
expect.objectContaining({
title: 'Up to date',
}),
);
});
});
});
});

View file

@ -1,5 +1,5 @@
<script lang="ts" setup>
import { computed, nextTick, ref } from 'vue';
import { computed, h, nextTick, ref } from 'vue';
import { createEventBus } from 'n8n-design-system/utils';
import { useI18n } from '@/composables/useI18n';
import { hasPermission } from '@/utils/rbac/permissions';
@ -9,6 +9,9 @@ import { useUIStore } from '@/stores/ui.store';
import { useSourceControlStore } from '@/stores/sourceControl.store';
import { SOURCE_CONTROL_PULL_MODAL_KEY, SOURCE_CONTROL_PUSH_MODAL_KEY } from '@/constants';
import { sourceControlEventBus } from '@/event-bus/source-control';
import { groupBy } from 'lodash-es';
import { RouterLink } from 'vue-router';
import { VIEWS } from '@/constants';
import type { SourceControlledFile } from '@n8n/api-types';
defineProps<{
@ -64,48 +67,106 @@ async function pushWorkfolder() {
}
}
const variablesToast = {
title: i18n.baseText('settings.sourceControl.pull.upToDate.variables.title'),
message: h(RouterLink, { to: { name: VIEWS.VARIABLES } }, () =>
i18n.baseText('settings.sourceControl.pull.upToDate.variables.description'),
),
type: 'info' as const,
closeOnClick: true,
duration: 0,
};
const credentialsToast = {
title: i18n.baseText('settings.sourceControl.pull.upToDate.credentials.title'),
message: h(RouterLink, { to: { name: VIEWS.CREDENTIALS, query: { setupNeeded: 'true' } } }, () =>
i18n.baseText('settings.sourceControl.pull.upToDate.credentials.description'),
),
type: 'info' as const,
closeOnClick: true,
duration: 0,
};
const pullMessage = ({
credential,
tags,
variables,
workflow,
}: Partial<Record<SourceControlledFile['type'], SourceControlledFile[]>>) => {
const messages: string[] = [];
if (workflow?.length) {
messages.push(
i18n.baseText('generic.workflow', {
adjustToNumber: workflow.length,
interpolate: { count: workflow.length },
}),
);
}
if (credential?.length) {
messages.push(
i18n.baseText('generic.credential', {
adjustToNumber: credential.length,
interpolate: { count: credential.length },
}),
);
}
if (variables?.length) {
messages.push(i18n.baseText('generic.variable_plural'));
}
if (tags?.length) {
messages.push(i18n.baseText('generic.tag_plural'));
}
return [
new Intl.ListFormat(i18n.locale, { style: 'long', type: 'conjunction' }).format(messages),
'were pulled',
].join(' ');
};
async function pullWorkfolder() {
loadingService.startLoading();
loadingService.setLoadingText(i18n.baseText('settings.sourceControl.loading.pull'));
try {
const status: SourceControlledFile[] =
((await sourceControlStore.pullWorkfolder(false)) as unknown as SourceControlledFile[]) || [];
const status = await sourceControlStore.pullWorkfolder(false);
const statusWithoutLocallyCreatedWorkflows = status.filter((file) => {
return !(file.type === 'workflow' && file.status === 'created' && file.location === 'local');
});
if (statusWithoutLocallyCreatedWorkflows.length === 0) {
if (!status.length) {
toast.showMessage({
title: i18n.baseText('settings.sourceControl.pull.upToDate.title'),
message: i18n.baseText('settings.sourceControl.pull.upToDate.description'),
type: 'success',
});
} else {
toast.showMessage({
title: i18n.baseText('settings.sourceControl.pull.success.title'),
type: 'success',
});
const incompleteFileTypes = ['variables', 'credential'];
const hasVariablesOrCredentials = (status || []).some((file) => {
return incompleteFileTypes.includes(file.type);
});
if (hasVariablesOrCredentials) {
void nextTick(() => {
toast.showMessage({
message: i18n.baseText('settings.sourceControl.pull.oneLastStep.description'),
title: i18n.baseText('settings.sourceControl.pull.oneLastStep.title'),
type: 'info',
duration: 0,
showClose: true,
offset: 0,
});
});
}
return;
}
const { credential, tags, variables, workflow } = groupBy(status, 'type');
const toastMessages = [
...(variables?.length ? [variablesToast] : []),
...(credential?.length ? [credentialsToast] : []),
{
title: i18n.baseText('settings.sourceControl.pull.success.title'),
message: pullMessage({ credential, tags, variables, workflow }),
type: 'success' as const,
},
];
for (const message of toastMessages) {
/**
* the toasts stack in a reversed way, resulting in
* Success
* Credentials
* Variables
*/
//
toast.showToast(message);
await nextTick();
}
sourceControlEventBus.emit('pull');
} catch (error) {
const errorResponse = error.response;

View file

@ -141,9 +141,10 @@ export function AIView(_nodes: SimplifiedNodeType[]): NodeView {
const chainNodes = getAiNodesBySubcategory(nodeTypesStore.allLatestNodeTypes, AI_CATEGORY_CHAINS);
const agentNodes = getAiNodesBySubcategory(nodeTypesStore.allLatestNodeTypes, AI_CATEGORY_AGENTS);
const websiteCategoryURL = templatesStore.websiteTemplateRepositoryParameters;
websiteCategoryURL.append('utm_user_role', 'AdvancedAI');
const websiteCategoryURLParams = templatesStore.websiteTemplateRepositoryParameters;
websiteCategoryURLParams.append('utm_user_role', 'AdvancedAI');
const websiteCategoryURL =
templatesStore.constructTemplateRepositoryURL(websiteCategoryURLParams);
return {
value: AI_NODE_CREATOR_VIEW,
@ -158,7 +159,7 @@ export function AIView(_nodes: SimplifiedNodeType[]): NodeView {
icon: 'box-open',
description: i18n.baseText('nodeCreator.aiPanel.linkItem.description'),
name: 'ai_templates_root',
url: websiteCategoryURL.toString(),
url: websiteCategoryURL,
tag: {
type: 'info',
text: i18n.baseText('nodeCreator.triggerHelperPanel.manualTriggerTag'),

View file

@ -4,7 +4,6 @@ import {
NodeConnectionType,
type IRunData,
type IRunExecutionData,
type NodeError,
type Workflow,
} from 'n8n-workflow';
import RunData from './RunData.vue';
@ -120,14 +119,17 @@ const hasAiMetadata = computed(() => {
return false;
});
const hasError = computed(() =>
Boolean(
workflowRunData.value &&
node.value &&
workflowRunData.value[node.value.name]?.[props.runIndex]?.error,
),
);
// Determine the initial output mode to logs if the node has an error and the logs are available
const defaultOutputMode = computed<OutputType>(() => {
const hasError =
workflowRunData.value &&
node.value &&
(workflowRunData.value[node.value.name]?.[props.runIndex]?.error as NodeError);
return Boolean(hasError) && hasAiMetadata.value ? OUTPUT_TYPE.LOGS : OUTPUT_TYPE.REGULAR;
return hasError.value && hasAiMetadata.value ? OUTPUT_TYPE.LOGS : OUTPUT_TYPE.REGULAR;
});
const isNodeRunning = computed(() => {
@ -216,7 +218,7 @@ const canPinData = computed(() => {
});
const allToolsWereUnusedNotice = computed(() => {
if (!node.value || runsCount.value === 0) return undefined;
if (!node.value || runsCount.value === 0 || hasError.value) return undefined;
// With pinned data there's no clear correct answer for whether
// we should use historic or current parents, so we don't show the notice,

View file

@ -148,13 +148,7 @@ const outputError = computed(() => {
@click.stop="trackOpeningRelatedExecution(runMeta, 'ai')"
>
<N8nIcon icon="external-link-alt" size="xsmall" />
{{
i18n.baseText('runData.openSubExecutionWithId', {
interpolate: {
id: runMeta.subExecution?.executionId,
},
})
}}
{{ i18n.baseText('runData.openSubExecutionSingle') }}
</a>
</li>
<li v-if="(consumedTokensSum?.totalTokens ?? 0) > 0" :class="$style.tokensUsage">

View file

@ -519,6 +519,13 @@ function onOpenContextMenu(event: MouseEvent) {
});
}
function onOpenSelectionContextMenu({ event }: { event: MouseEvent }) {
contextMenu.open(event, {
source: 'canvas',
nodeIds: selectedNodeIds.value,
});
}
function onOpenNodeContextMenu(
id: string,
event: MouseEvent,
@ -692,6 +699,7 @@ provide(CanvasKey, {
@node-drag-stop="onNodeDragStop"
@node-click="onNodeClick"
@selection-drag-stop="onSelectionDragStop"
@selection-context-menu="onOpenSelectionContextMenu"
@dragover="onDragOver"
@drop="onDrop"
>

View file

@ -44,7 +44,18 @@ const filtersLength = computed(() => {
}
const value = props.modelValue[key];
length += (Array.isArray(value) ? value.length > 0 : value !== '') ? 1 : 0;
if (value === true) {
length += 1;
}
if (Array.isArray(value) && value.length) {
length += 1;
}
if (typeof value === 'string' && value !== '') {
length += 1;
}
});
return length;

View file

@ -168,13 +168,19 @@ const focusSearchInput = () => {
};
const hasAppliedFilters = (): boolean => {
return !!filterKeys.value.find(
(key) =>
key !== 'search' &&
(Array.isArray(props.filters[key])
? props.filters[key].length > 0
: props.filters[key] !== ''),
);
return !!filterKeys.value.find((key) => {
if (key === 'search') return false;
if (typeof props.filters[key] === 'boolean') {
return props.filters[key];
}
if (Array.isArray(props.filters[key])) {
return props.filters[key].length > 0;
}
return props.filters[key] !== '';
});
};
const setRowsPerPage = (numberOfRowsPerPage: number) => {

View file

@ -995,6 +995,18 @@ describe('useCanvasOperations', () => {
expect(ndvStore.activeNodeName).toBe('Existing Node');
});
it('should set node as dirty when node is set active', () => {
const workflowsStore = mockedStore(useWorkflowsStore);
const node = createTestNode();
workflowsStore.getNodeById.mockImplementation(() => node);
const { setNodeActive } = useCanvasOperations({ router });
setNodeActive(node.id);
expect(workflowsStore.setNodePristine).toHaveBeenCalledWith(node.name, false);
});
});
describe('setNodeActiveByName', () => {

View file

@ -381,6 +381,7 @@ export function useCanvasOperations({ router }: { router: ReturnType<typeof useR
return;
}
workflowsStore.setNodePristine(node.name, false);
setNodeActiveByName(node.name);
}
@ -1923,7 +1924,7 @@ export function useCanvasOperations({ router }: { router: ReturnType<typeof useR
workflowsStore.setWorkflowExecutionData(data);
if (data.mode !== 'manual') {
if (!['manual', 'evaluation'].includes(data.mode)) {
workflowsStore.setWorkflowPinData({});
}

View file

@ -624,8 +624,11 @@
"credentials.item.created": "Created",
"credentials.item.owner": "Owner",
"credentials.item.readonly": "Read only",
"credentials.item.needsSetup": "Needs first setup",
"credentials.search.placeholder": "Search credentials...",
"credentials.filters.type": "Type",
"credentials.filters.setup": "Needs first setup",
"credentials.filters.status": "Status",
"credentials.filters.active": "Some credentials may be hidden since filters are applied.",
"credentials.filters.active.reset": "Remove filters",
"credentials.sort.lastUpdated": "Sort by last updated",
@ -1967,6 +1970,10 @@
"settings.sourceControl.pull.success.title": "Pulled successfully",
"settings.sourceControl.pull.upToDate.title": "Up to date",
"settings.sourceControl.pull.upToDate.description": "No workflow changes to pull from Git",
"settings.sourceControl.pull.upToDate.variables.title": "Finish setting up your new variables to use in workflows",
"settings.sourceControl.pull.upToDate.variables.description": "Review Variables",
"settings.sourceControl.pull.upToDate.credentials.title": "Finish setting up your new credentials to use in workflows",
"settings.sourceControl.pull.upToDate.credentials.description": "Review Credentials",
"settings.sourceControl.modals.pull.title": "Pull changes",
"settings.sourceControl.modals.pull.description": "These workflows will be updated, and any local changes to them will be overridden. To keep the local version, push it before pulling.",
"settings.sourceControl.modals.pull.description.learnMore": "More info",

View file

@ -167,6 +167,10 @@ export const useTemplatesStore = defineStore(STORES.TEMPLATES, () => {
`${TEMPLATES_URLS.BASE_WEBSITE_URL}?${websiteTemplateRepositoryParameters.value.toString()}`,
);
const constructTemplateRepositoryURL = (params: URLSearchParams): string => {
return `${TEMPLATES_URLS.BASE_WEBSITE_URL}?${params.toString()}`;
};
const addCategories = (_categories: ITemplatesCategory[]): void => {
categories.value = _categories;
};
@ -427,6 +431,7 @@ export const useTemplatesStore = defineStore(STORES.TEMPLATES, () => {
isSearchFinished,
hasCustomTemplatesHost,
websiteTemplateRepositoryURL,
constructTemplateRepositoryURL,
websiteTemplateRepositoryParameters,
addCategories,
addCollections,

View file

@ -5,28 +5,27 @@ import CredentialsView from '@/views/CredentialsView.vue';
import { useUIStore } from '@/stores/ui.store';
import { mockedStore } from '@/__tests__/utils';
import { waitFor, within, fireEvent } from '@testing-library/vue';
import { CREDENTIAL_SELECT_MODAL_KEY, STORES } from '@/constants';
import { CREDENTIAL_SELECT_MODAL_KEY, STORES, VIEWS } from '@/constants';
import { useProjectsStore } from '@/stores/projects.store';
import type { Project } from '@/types/projects.types';
import { useRouter } from 'vue-router';
import { createRouter, createWebHistory } from 'vue-router';
import { flushPromises } from '@vue/test-utils';
import { CREDENTIAL_EMPTY_VALUE } from 'n8n-workflow';
vi.mock('@/composables/useGlobalEntityCreation', () => ({
useGlobalEntityCreation: () => ({
menu: [],
}),
}));
vi.mock('vue-router', async () => {
const actual = await vi.importActual('vue-router');
const push = vi.fn();
const replace = vi.fn();
return {
...actual,
// your mocked methods
useRouter: () => ({
push,
replace,
}),
};
const router = createRouter({
history: createWebHistory(),
routes: [
{
path: '/:credentialId?',
name: VIEWS.CREDENTIALS,
component: { template: '<div></div>' },
},
],
});
const initialState = {
@ -36,14 +35,14 @@ const initialState = {
};
const renderComponent = createComponentRenderer(CredentialsView, {
global: { stubs: { ProjectHeader: true } },
global: { stubs: { ProjectHeader: true }, plugins: [router] },
});
let router: ReturnType<typeof useRouter>;
describe('CredentialsView', () => {
beforeEach(() => {
beforeEach(async () => {
createTestingPinia({ initialState });
router = useRouter();
await router.push('/');
await router.isReady();
});
afterEach(() => {
@ -115,6 +114,7 @@ describe('CredentialsView', () => {
});
it('should update credentialId route param when opened', async () => {
const replaceSpy = vi.spyOn(router, 'replace');
const projectsStore = mockedStore(useProjectsStore);
projectsStore.isProjectHome = false;
projectsStore.currentProject = { scopes: ['credential:read'] } as Project;
@ -137,8 +137,147 @@ describe('CredentialsView', () => {
*/
await fireEvent.click(getByTestId('resources-list-item'));
await waitFor(() =>
expect(router.replace).toHaveBeenCalledWith({ params: { credentialId: '1' } }),
expect(replaceSpy).toHaveBeenCalledWith(
expect.objectContaining({ params: { credentialId: '1' } }),
),
);
});
});
describe('filters', () => {
it('should filter by type', async () => {
await router.push({ name: VIEWS.CREDENTIALS, query: { type: ['test'] } });
const credentialsStore = mockedStore(useCredentialsStore);
credentialsStore.allCredentialTypes = [
{
name: 'test',
displayName: 'test',
properties: [],
},
];
credentialsStore.allCredentials = [
{
id: '1',
name: 'test',
type: 'test',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
},
{
id: '1',
name: 'test',
type: 'another',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
},
];
const { getAllByTestId } = renderComponent();
expect(getAllByTestId('resources-list-item').length).toBe(1);
});
it('should filter by setupNeeded', async () => {
await router.push({ name: VIEWS.CREDENTIALS, query: { setupNeeded: 'true' } });
const credentialsStore = mockedStore(useCredentialsStore);
credentialsStore.allCredentials = [
{
id: '1',
name: 'test',
type: 'test',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
data: {} as unknown as string,
},
{
id: '1',
name: 'test',
type: 'another',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
data: { anyKey: 'any' } as unknown as string,
},
];
const { getAllByTestId, getByTestId } = renderComponent();
await flushPromises();
expect(getAllByTestId('resources-list-item').length).toBe(1);
await fireEvent.click(getByTestId('credential-filter-setup-needed'));
await waitFor(() => expect(getAllByTestId('resources-list-item').length).toBe(2));
});
it('should filter by setupNeeded when object keys are empty', async () => {
await router.push({ name: VIEWS.CREDENTIALS, query: { setupNeeded: 'true' } });
const credentialsStore = mockedStore(useCredentialsStore);
credentialsStore.allCredentials = [
{
id: '1',
name: 'credential needs setup',
type: 'test',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
data: { anyKey: '' } as unknown as string,
},
{
id: '2',
name: 'random',
type: 'test',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
data: { anyKey: 'any value' } as unknown as string,
},
];
const { getAllByTestId, getByTestId } = renderComponent();
await flushPromises();
expect(getAllByTestId('resources-list-item').length).toBe(1);
expect(getByTestId('resources-list-item').textContent).toContain('credential needs setup');
await fireEvent.click(getByTestId('credential-filter-setup-needed'));
await waitFor(() => expect(getAllByTestId('resources-list-item').length).toBe(2));
});
it('should filter by setupNeeded when object keys are "CREDENTIAL_EMPTY_VALUE"', async () => {
await router.push({ name: VIEWS.CREDENTIALS, query: { setupNeeded: 'true' } });
const credentialsStore = mockedStore(useCredentialsStore);
credentialsStore.allCredentials = [
{
id: '1',
name: 'credential needs setup',
type: 'test',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
data: { anyKey: CREDENTIAL_EMPTY_VALUE } as unknown as string,
},
{
id: '2',
name: 'random',
type: 'test',
createdAt: '2021-05-05T00:00:00Z',
updatedAt: '2021-05-05T00:00:00Z',
scopes: ['credential:update'],
isManaged: false,
data: { anyKey: 'any value' } as unknown as string,
},
];
const { getAllByTestId, getByTestId } = renderComponent();
await flushPromises();
expect(getAllByTestId('resources-list-item').length).toBe(1);
expect(getByTestId('resources-list-item').textContent).toContain('credential needs setup');
await fireEvent.click(getByTestId('credential-filter-setup-needed'));
await waitFor(() => expect(getAllByTestId('resources-list-item').length).toBe(2));
});
});
});

View file

@ -1,13 +1,13 @@
<script setup lang="ts">
import { ref, computed, onMounted, watch } from 'vue';
import { useRoute, useRouter } from 'vue-router';
import { useRoute, useRouter, type LocationQueryRaw } from 'vue-router';
import type { ICredentialsResponse, ICredentialTypeMap } from '@/Interface';
import type { ICredentialType, ICredentialsDecrypted } from 'n8n-workflow';
import ResourcesListLayout, {
type IResource,
type IFilters,
} from '@/components/layouts/ResourcesListLayout.vue';
import CredentialCard from '@/components/CredentialCard.vue';
import type { ICredentialType } from 'n8n-workflow';
import {
CREDENTIAL_SELECT_MODAL_KEY,
CREDENTIAL_EDIT_MODAL_KEY,
@ -27,6 +27,9 @@ import { useDocumentTitle } from '@/composables/useDocumentTitle';
import { useTelemetry } from '@/composables/useTelemetry';
import { useI18n } from '@/composables/useI18n';
import ProjectHeader from '@/components/Projects/ProjectHeader.vue';
import { N8nCheckbox } from 'n8n-design-system';
import { pickBy } from 'lodash-es';
import { CREDENTIAL_EMPTY_VALUE } from 'n8n-workflow';
const props = defineProps<{
credentialId?: string;
@ -46,14 +49,26 @@ const router = useRouter();
const telemetry = useTelemetry();
const i18n = useI18n();
const filters = ref<IFilters>({
search: '',
homeProject: '',
type: [],
});
type Filters = IFilters & { type?: string[]; setupNeeded?: boolean };
const updateFilter = (state: Filters) => {
void router.replace({ query: pickBy(state) as LocationQueryRaw });
};
const filters = computed<Filters>(
() =>
({ ...route.query, setupNeeded: route.query.setupNeeded?.toString() === 'true' }) as Filters,
);
const loading = ref(false);
const needsSetup = (data: string | undefined): boolean => {
const dataObject = data as unknown as ICredentialsDecrypted['data'];
if (!dataObject) return false;
if (Object.keys(dataObject).length === 0) return true;
return Object.values(dataObject).every((value) => !value || value === CREDENTIAL_EMPTY_VALUE);
};
const allCredentials = computed<IResource[]>(() =>
credentialsStore.allCredentials.map((credential) => ({
id: credential.id,
@ -66,6 +81,7 @@ const allCredentials = computed<IResource[]>(() =>
type: credential.type,
sharedWithProjects: credential.sharedWithProjects,
readOnly: !getResourcePermissions(credential.scopes).credential.update,
needsSetup: needsSetup(credential.data),
})),
);
@ -84,7 +100,7 @@ const projectPermissions = computed(() =>
);
const setRouteCredentialId = (credentialId?: string) => {
void router.replace({ params: { credentialId } });
void router.replace({ params: { credentialId }, query: route.query });
};
const addCredential = () => {
@ -98,7 +114,7 @@ listenForModalChanges({
store: uiStore,
onModalClosed(modalName) {
if ([CREDENTIAL_SELECT_MODAL_KEY, CREDENTIAL_EDIT_MODAL_KEY].includes(modalName as string)) {
void router.replace({ params: { credentialId: '' } });
void router.replace({ params: { credentialId: '' }, query: route.query });
}
},
});
@ -121,9 +137,9 @@ watch(
);
const onFilter = (resource: IResource, newFilters: IFilters, matches: boolean): boolean => {
const iResource = resource as ICredentialsResponse;
const filtersToApply = newFilters as IFilters & { type: string[] };
if (filtersToApply.type.length > 0) {
const iResource = resource as ICredentialsResponse & { needsSetup: boolean };
const filtersToApply = newFilters as Filters;
if (filtersToApply.type && filtersToApply.type.length > 0) {
matches = matches && filtersToApply.type.includes(iResource.type);
}
@ -136,6 +152,10 @@ const onFilter = (resource: IResource, newFilters: IFilters, matches: boolean):
credentialTypesById.value[iResource.type].displayName.toLowerCase().includes(searchString));
}
if (filtersToApply.setupNeeded) {
matches = matches && iResource.needsSetup;
}
return matches;
};
@ -156,6 +176,14 @@ const initialize = async () => {
loading.value = false;
};
credentialsStore.$onAction(({ name, after }) => {
if (name === 'createNewCredential') {
after(() => {
void credentialsStore.fetchAllCredentials(route?.params?.projectId as string | undefined);
});
}
});
sourceControlStore.$onAction(({ name, after }) => {
if (name !== 'pullWorkfolder') return;
after(() => {
@ -181,7 +209,7 @@ onMounted(() => {
:type-props="{ itemSize: 77 }"
:loading="loading"
:disabled="readOnlyEnv || !projectPermissions.credential.create"
@update:filters="filters = $event"
@update:filters="updateFilter"
>
<template #header>
<ProjectHeader />
@ -192,6 +220,7 @@ onMounted(() => {
class="mb-2xs"
:data="data"
:read-only="data.readOnly"
:needs-setup="data.needsSetup"
@click="setRouteCredentialId"
/>
</template>
@ -221,6 +250,23 @@ onMounted(() => {
/>
</N8nSelect>
</div>
<div class="mb-s">
<N8nInputLabel
:label="i18n.baseText('credentials.filters.status')"
:bold="false"
size="small"
color="text-base"
class="mb-3xs"
/>
<N8nCheckbox
:label="i18n.baseText('credentials.filters.setup')"
data-test-id="credential-filter-setup-needed"
:model-value="filters.setupNeeded"
@update:model-value="setKeyValue('setupNeeded', $event)"
>
</N8nCheckbox>
</div>
</template>
<template #empty>
<n8n-action-box

View file

@ -31,6 +31,7 @@ import type {
IWorkflowDb,
IWorkflowTemplate,
NodeCreatorOpenSource,
NodeFilterType,
ToggleNodeCreatorOptions,
WorkflowDataWithTemplateId,
XYPosition,
@ -996,7 +997,11 @@ async function onRevertAddNode({ node }: { node: INodeUi }) {
}
async function onSwitchActiveNode(nodeName: string) {
const node = workflowsStore.getNodeByName(nodeName);
if (!node) return;
setNodeActiveByName(nodeName);
selectNodes([node.id]);
}
async function onOpenSelectiveNodeCreator(node: string, connectionType: NodeConnectionType) {
@ -1385,7 +1390,8 @@ async function onPostMessageReceived(messageEvent: MessageEvent) {
try {
// If this NodeView is used in preview mode (in iframe) it will not have access to the main app store
// so everything it needs has to be sent using post messages and passed down to child components
isProductionExecutionPreview.value = json.executionMode !== 'manual';
isProductionExecutionPreview.value =
json.executionMode !== 'manual' && json.executionMode !== 'evaluation';
await onOpenExecution(json.executionId);
canOpenNDV.value = json.canOpenNDV ?? true;
@ -1554,13 +1560,15 @@ function registerCustomActions() {
registerCustomAction({
key: 'openSelectiveNodeCreator',
action: ({
creatorview: creatorView,
connectiontype: connectionType,
node,
}: {
creatorview: NodeFilterType;
connectiontype: NodeConnectionType;
node: string;
}) => {
void onOpenSelectiveNodeCreator(node, connectionType);
nodeCreatorStore.openSelectiveNodeCreator({ node, connectionType, creatorView });
},
});