Merge branch 'master' into node-1608-credential-parameters-tech-debt-project

This commit is contained in:
Elias Meire 2024-08-29 16:32:01 +02:00
commit 28e36e002d
No known key found for this signature in database
1342 changed files with 7523 additions and 5098 deletions

View file

@ -2,7 +2,7 @@ name: Destroy Benchmark Env
on:
schedule:
- cron: '30 4 * * *'
- cron: '0 1 * * *'
workflow_dispatch:
permissions:
@ -25,16 +25,15 @@ jobs:
tenant-id: ${{ secrets.BENCHMARK_ARM_TENANT_ID }}
subscription-id: ${{ secrets.BENCHMARK_ARM_SUBSCRIPTION_ID }}
- run: Setup node
- run: corepack enable
- uses: actions/setup-node@v4.0.2
with:
node-version: 20.x
cache: pnpm
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Destroy cloud env
if: github.event.inputs.debug == 'true'
run: pnpm destroy-cloud-env
working-directory: packages/@n8n/benchmark

View file

@ -60,10 +60,10 @@ jobs:
- name: Run the benchmark with debug logging
if: github.event.inputs.debug == 'true'
run: pnpm run-in-cloud sqlite --debug
run: pnpm run-in-cloud --debug
working-directory: packages/@n8n/benchmark
- name: Run the benchmark
if: github.event.inputs.debug != 'true'
run: pnpm run-in-cloud sqlite
run: pnpm run-in-cloud
working-directory: packages/@n8n/benchmark

View file

@ -4,7 +4,7 @@ on:
workflow_dispatch:
push:
branches:
- main
- master
paths:
- 'packages/@n8n/benchmark/**'
- 'pnpm-lock.yaml'

View file

@ -64,7 +64,7 @@ jobs:
[[ "${{github.event.inputs.merge-master}}" == "true" ]] && git remote add upstream https://github.com/n8n-io/n8n.git -f; git merge upstream/master --allow-unrelated-histories || echo ""
shell: bash
- name: Build and push
- name: Build and push to DockerHub
uses: docker/build-push-action@v5.1.0
with:
context: .
@ -78,6 +78,21 @@ jobs:
cache-to: type=gha,mode=max
tags: ${{ secrets.DOCKER_USERNAME }}/n8n:${{ github.event.inputs.tag || 'nightly' }}
- name: Login to GitHub Container Registry
if: github.event.inputs.tag == 'nightly'
uses: docker/login-action@v3.0.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Push image to GHCR
if: github.event.inputs.tag == 'nightly'
run: |
docker buildx imagetools create \
--tag ghcr.io/${{ github.repository_owner }}/n8n:nightly \
${{ secrets.DOCKER_USERNAME }}/n8n:nightly
- name: Call Success URL - optionally
run: |
[[ "${{github.event.inputs.success-url}}" != "" ]] && curl -v ${{github.event.inputs.success-url}} || echo ""

View file

@ -13,6 +13,8 @@ jobs:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == true
timeout-minutes: 10
permissions:
id-token: write
env:
NPM_CONFIG_PROVENANCE: true
outputs:

View file

@ -5,6 +5,7 @@
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"esbenp.prettier-vscode",
"mjmlio.vscode-mjml",
"Vue.volar"
]
}

View file

@ -1,3 +1,34 @@
# [1.57.0](https://github.com/n8n-io/n8n/compare/n8n@1.56.0...n8n@1.57.0) (2024-08-28)
### Bug Fixes
* **AI Agent Node:** Allow AWS Bedrock Chat to be used with conversational agent ([#10489](https://github.com/n8n-io/n8n/issues/10489)) ([bdcc657](https://github.com/n8n-io/n8n/commit/bdcc657965af5f604aac1eaff7d937f69a08ce1c))
* **core:** Make boolean config value parsing backward-compatible ([#10560](https://github.com/n8n-io/n8n/issues/10560)) ([70b410f](https://github.com/n8n-io/n8n/commit/70b410f4b00dd599fcd4249aa105098aa262da66))
* **core:** Restore Redis cache key ([#10520](https://github.com/n8n-io/n8n/issues/10520)) ([873056a](https://github.com/n8n-io/n8n/commit/873056a92e52cc629d2873c960656d5f06d4728e))
* **core:** Scheduler tasks should not trigger on follower instances ([#10507](https://github.com/n8n-io/n8n/issues/10507)) ([3428f28](https://github.com/n8n-io/n8n/commit/3428f28a732f79e067b3cb515cc59d835de246a6))
* **core:** Stop explicit redis client disconnect on shutdown ([#10551](https://github.com/n8n-io/n8n/issues/10551)) ([f712812](https://github.com/n8n-io/n8n/commit/f71281221efb79d65d8d7610c292bc90cef13d7a))
* **editor:** Ensure `Datatable` component renders `All` option ([#10525](https://github.com/n8n-io/n8n/issues/10525)) ([bc27beb](https://github.com/n8n-io/n8n/commit/bc27beb6629883003a8991d7e840ffaa066d41ac))
* **editor:** Prevent Safari users from accessing the frontend over insecure contexts ([#10510](https://github.com/n8n-io/n8n/issues/10510)) ([a73b9a3](https://github.com/n8n-io/n8n/commit/a73b9a38d6c48e2f78593328e7d9933f2493dbb6))
* **editor:** Scale output item selector input width with value ([#10555](https://github.com/n8n-io/n8n/issues/10555)) ([52c574d](https://github.com/n8n-io/n8n/commit/52c574d83f344f03b0e39984bbc3ac0402e50791))
* **Google Sheets Trigger Node:** Show sheet name is too long error ([#10542](https://github.com/n8n-io/n8n/issues/10542)) ([4e15007](https://github.com/n8n-io/n8n/commit/4e1500757700ec984cdad8b9cfcd76ee00ae127e))
* **Wait Node:** Prevent waiting until invalid date ([#10523](https://github.com/n8n-io/n8n/issues/10523)) ([c0e7620](https://github.com/n8n-io/n8n/commit/c0e7620036738f8d0b382d0d0610b981dcbc29e0))
### Features
* Add new credentials for the HTTP Request node ([#9833](https://github.com/n8n-io/n8n/issues/9833)) ([26f1af3](https://github.com/n8n-io/n8n/commit/26f1af397b2b25e3394fc2dae91a5c281bf33d66))
* **AI Agent Node:** Add tutorial link to agent node ([#10493](https://github.com/n8n-io/n8n/issues/10493)) ([5c7cc36](https://github.com/n8n-io/n8n/commit/5c7cc36c23e58a47a1e71911e7303a1bd54f167e))
* **core:** Expose queue metrics for Prometheus ([#10559](https://github.com/n8n-io/n8n/issues/10559)) ([008c510](https://github.com/n8n-io/n8n/commit/008c510b7623fefb8c60730c7eac54dd9bb2e3fc))
* **editor:** Implement workflowSelector parameter type ([#10482](https://github.com/n8n-io/n8n/issues/10482)) ([84e54be](https://github.com/n8n-io/n8n/commit/84e54beac763f25399c9687f695f1e658e3ce434))
### Performance Improvements
* **core:** Make execution queries faster ([#9817](https://github.com/n8n-io/n8n/issues/9817)) ([dc7dc99](https://github.com/n8n-io/n8n/commit/dc7dc995d5e2ea8fbd0dcb54cfa8aa93ecb437c9))
# [1.56.0](https://github.com/n8n-io/n8n/compare/n8n@1.55.0...n8n@1.56.0) (2024-08-21)

View file

@ -7,7 +7,7 @@ import {
WorkflowSharingModal,
WorkflowsPage,
} from '../pages';
import { getVisibleDropdown, getVisibleSelect } from '../utils';
import { getVisibleDropdown, getVisiblePopper, getVisibleSelect } from '../utils';
import * as projects from '../composables/projects';
/**
@ -180,7 +180,8 @@ describe('Sharing', { disableAutoLogin: true }, () => {
).should('be.visible');
credentialsModal.getters.usersSelect().click();
cy.getByTestId('project-sharing-info')
getVisiblePopper()
.find('[data-test-id="project-sharing-info"]')
.filter(':visible')
.should('have.length', 3)
.contains(INSTANCE_ADMIN.email)

View file

@ -34,7 +34,7 @@ describe('AI Assistant::enabled', () => {
aiAssistant.getters.chatInputWrapper().should('not.exist');
aiAssistant.getters.closeChatButton().should('be.visible');
aiAssistant.getters.closeChatButton().click();
aiAssistant.getters.askAssistantChat().should('not.exist');
aiAssistant.getters.askAssistantChat().should('not.be.visible');
});
it('should resize assistant chat up', () => {
@ -162,13 +162,13 @@ describe('AI Assistant::enabled', () => {
cy.createFixtureWorkflow('aiAssistant/test_workflow.json');
wf.actions.openNode('Edit Fields');
ndv.getters.nodeExecuteButton().click();
aiAssistant.getters.nodeErrorViewAssistantButton().click();
aiAssistant.getters.nodeErrorViewAssistantButton().click({ force: true });
cy.wait('@chatRequest');
aiAssistant.getters.closeChatButton().click();
ndv.getters.backToCanvas().click();
wf.actions.openNode('Stop and Error');
ndv.getters.nodeExecuteButton().click();
aiAssistant.getters.nodeErrorViewAssistantButton().click();
aiAssistant.getters.nodeErrorViewAssistantButton().click({ force: true });
// Since we already have an active session, a warning should be shown
aiAssistant.getters.newAssistantSessionModal().should('be.visible');
aiAssistant.getters

View file

@ -0,0 +1,35 @@
import { WorkflowsPage } from '../pages';
const workflowsPage = new WorkflowsPage();
describe('n8n.io iframe', () => {
describe('when telemetry is disabled', () => {
it('should not load the iframe when visiting /home/workflows', () => {
cy.overrideSettings({ telemetry: { enabled: false } });
cy.visit(workflowsPage.url);
cy.get('iframe').should('not.exist');
});
});
describe('when telemetry is enabled', () => {
it('should load the iframe when visiting /home/workflows', () => {
const testInstanceId = 'test-instance-id';
cy.overrideSettings({ telemetry: { enabled: true }, instanceId: testInstanceId });
const testUserId = Cypress.env('currentUserId');
const iframeUrl = `https://n8n.io/self-install?instanceId=${testInstanceId}&userId=${testUserId}`;
cy.intercept(iframeUrl, (req) => req.reply(200)).as('iframeRequest');
cy.visit(workflowsPage.url);
cy.get('iframe').should('exist').and('have.attr', 'src', iframeUrl);
cy.wait('@iframeRequest').its('response.statusCode').should('eq', 200);
});
});
});

View file

@ -199,7 +199,7 @@ describe('NDV', () => {
.contains(key)
.should('be.visible');
});
getObjectValueItem().find('label').click();
getObjectValueItem().find('label').click({ force: true });
expandedObjectProps.forEach((key) => {
ndv.getters
.outputPanel()

View file

@ -59,14 +59,18 @@ Cypress.Commands.add('waitForLoad', (waitForIntercepts = true) => {
Cypress.Commands.add('signin', ({ email, password }) => {
void Cypress.session.clearAllSavedSessions();
cy.session([email, password], () =>
cy.request({
method: 'POST',
url: `${BACKEND_BASE_URL}/rest/login`,
body: { email, password },
failOnStatusCode: false,
}),
);
cy.session([email, password], () => {
return cy
.request({
method: 'POST',
url: `${BACKEND_BASE_URL}/rest/login`,
body: { email, password },
failOnStatusCode: false,
})
.then((response) => {
Cypress.env('currentUserId', response.body.data.id);
});
});
});
Cypress.Commands.add('signinAsOwner', () => cy.signin(INSTANCE_OWNER));

View file

@ -1,6 +1,6 @@
{
"name": "n8n-monorepo",
"version": "1.56.0",
"version": "1.57.0",
"private": true,
"engines": {
"node": ">=20.15",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-benchmark",
"version": "1.0.0",
"version": "1.1.0",
"description": "Cli for running benchmark tests for n8n",
"main": "dist/index",
"scripts": {

View file

@ -15,7 +15,7 @@
// @ts-check
import fs from 'fs';
import minimist from 'minimist';
import { $, sleep, which } from 'zx';
import { sleep, which } from 'zx';
import path from 'path';
import { SshClient } from './sshClient.mjs';
import { TerraformClient } from './terraformClient.mjs';
@ -61,7 +61,6 @@ async function ensureDependencies() {
}
/**
*
* @param {Config} config
* @param {BenchmarkEnv} benchmarkEnv
*/
@ -86,7 +85,32 @@ async function runBenchmarksOnVm(config, benchmarkEnv) {
// Give some time for the VM to be ready
await sleep(1000);
console.log('Running benchmarks...');
if (config.n8nSetupToUse === 'all') {
const availableSetups = readAvailableN8nSetups();
for (const n8nSetup of availableSetups) {
await runBenchmarkForN8nSetup({
config,
sshClient,
scriptsDir,
n8nSetup,
});
}
} else {
await runBenchmarkForN8nSetup({
config,
sshClient,
scriptsDir,
n8nSetup: config.n8nSetupToUse,
});
}
}
/**
* @param {{ config: Config; sshClient: any; scriptsDir: string; n8nSetup: string; }} opts
*/
async function runBenchmarkForN8nSetup({ config, sshClient, scriptsDir, n8nSetup }) {
console.log(`Running benchmarks for ${n8nSetup}...`);
const runScriptPath = path.join(scriptsDir, 'runOnVm.mjs');
const flags = {
@ -100,7 +124,7 @@ async function runBenchmarksOnVm(config, benchmarkEnv) {
.map(([key, value]) => `--${key}=${value}`)
.join(' ');
await sshClient.ssh(`npx zx ${runScriptPath} ${flagsString} ${config.n8nSetupToUse}`, {
await sshClient.ssh(`npx zx ${runScriptPath} ${flagsString} ${n8nSetup}`, {
// Test run should always log its output
verbose: true,
});
@ -138,10 +162,15 @@ function readAvailableN8nSetups() {
* @returns {Promise<Config>}
*/
async function parseAndValidateConfig() {
const args = minimist(process.argv.slice(2), {
boolean: ['debug'],
const args = minimist(process.argv.slice(3), {
boolean: ['debug', 'help'],
});
if (args.help) {
printUsage();
process.exit(0);
}
const n8nSetupToUse = await getAndValidateN8nSetup(args);
const isVerbose = args.debug || false;
const n8nTag = args.n8nTag || process.env.N8N_DOCKER_TAG || 'latest';
@ -163,10 +192,8 @@ async function parseAndValidateConfig() {
async function getAndValidateN8nSetup(args) {
// Last parameter is the n8n setup to use
const n8nSetupToUse = args._[args._.length - 1];
if (!n8nSetupToUse) {
printUsage();
process.exit(1);
if (!n8nSetupToUse || n8nSetupToUse === 'all') {
return 'all';
}
const availableSetups = readAvailableN8nSetups();
@ -182,19 +209,20 @@ async function getAndValidateN8nSetup(args) {
function printUsage() {
const availableSetups = readAvailableN8nSetups();
console.log('Usage: zx scripts/runInCloud.mjs <n8n setup name>');
console.log(' eg: zx scripts/runInCloud.mjs sqlite');
console.log('Usage: zx scripts/runInCloud.mjs [n8n setup name]');
console.log(' eg: zx scripts/runInCloud.mjs');
console.log('');
console.log('Options:');
console.log(
` [n8n setup name] Against which n8n setup to run the benchmarks. One of: ${['all', ...availableSetups].join(', ')}. Default is all`,
);
console.log(' --debug Enable verbose output');
console.log(' --n8nTag Docker tag for n8n image. Default is latest');
console.log(' --benchmarkTag Docker tag for benchmark cli image. Default is latest');
console.log(
' --k6ApiToken API token for k6 cloud. Default is read from K6_API_TOKEN env var. If omitted, k6 cloud will not be used.',
' --k6ApiToken API token for k6 cloud. Default is read from K6_API_TOKEN env var. If omitted, k6 cloud will not be used',
);
console.log('');
console.log('Available setups:');
console.log(` ${availableSetups.join(', ')}`);
}
main().catch(console.error);

View file

@ -10,8 +10,8 @@ CURRENT_USER=$(whoami)
# Mount the data disk
if [ -d "/n8n" ]; then
echo "Data disk already mounted. Clearing it..."
rm -rf /n8n/*
rm -rf /n8n/.[!.]*
sudo rm -rf /n8n/*
sudo rm -rf /n8n/.[!.]*
else
sudo mkdir -p /n8n
sudo parted /dev/sdc --script mklabel gpt mkpart xfspart xfs 0% 100%

View file

@ -0,0 +1,17 @@
services:
n8n:
image: ghcr.io/n8n-io/n8n:${N8N_VERSION:-latest}
environment:
- N8N_DIAGNOSTICS_ENABLED=false
- N8N_USER_FOLDER=/n8n
ports:
- 5678:5678
volumes:
- /n8n:/n8n
benchmark:
image: ghcr.io/n8n-io/n8n-benchmark:${N8N_BENCHMARK_VERSION:-latest}
depends_on:
- n8n
environment:
- N8N_BASE_URL=http://n8n:5678
- K6_API_TOKEN=${K6_API_TOKEN}

View file

@ -4,6 +4,8 @@ services:
environment:
- N8N_DIAGNOSTICS_ENABLED=false
- N8N_USER_FOLDER=/n8n
- DB_SQLITE_POOL_SIZE=3
- DB_SQLITE_ENABLE_WAL=true
ports:
- 5678:5678
volumes:

View file

@ -2,23 +2,19 @@
/**
* This script runs the benchmarks using a given docker compose setup
*/
// @ts-check
import path from 'path';
import { $, argv, fs } from 'zx';
import { $ } from 'zx';
const [n8nSetupToUse] = argv._;
if (!n8nSetupToUse) {
printUsage();
process.exit(1);
}
function printUsage() {
console.log('Usage: zx runOnVm.mjs <envName>');
console.log(' eg: zx runOnVm.mjs sqlite');
}
const paths = {
n8nSetupsDir: path.join(__dirname, 'n8nSetups'),
};
async function main() {
const composeFilePath = path.join(__dirname, 'n8nSetups', n8nSetupToUse);
const [n8nSetupToUse] = argv._;
validateN8nSetup(n8nSetupToUse);
const composeFilePath = path.join(paths.n8nSetupsDir, n8nSetupToUse);
const n8nTag = argv.n8nDockerTag || process.env.N8N_DOCKER_TAG || 'latest';
const benchmarkTag = argv.benchmarkDockerTag || process.env.BENCHMARK_DOCKER_TAG || 'latest';
const k6ApiToken = argv.k6ApiToken || process.env.K6_API_TOKEN || undefined;
@ -36,7 +32,7 @@ async function main() {
try {
await $$`docker-compose up -d n8n`;
await $$`docker-compose run benchmark run`;
await $$`docker-compose run benchmark run --scenarioNamePrefix=${n8nSetupToUse} `;
} catch (error) {
console.error('An error occurred while running the benchmarks:');
console.error(error);
@ -52,4 +48,28 @@ async function dumpN8nInstanceLogs($$) {
await $$`docker-compose logs n8n`;
}
function printUsage() {
const availableSetups = getAllN8nSetups();
console.log('Usage: zx runOnVm.mjs <n8n setup to use>');
console.log(` eg: zx runOnVm.mjs ${availableSetups[0]}`);
console.log('');
console.log('Available setups:');
console.log(availableSetups.join(', '));
}
/**
* @returns {string[]}
*/
function getAllN8nSetups() {
return fs.readdirSync(paths.n8nSetupsDir);
}
function validateN8nSetup(givenSetup) {
const availableSetups = getAllN8nSetups();
if (!availableSetups.includes(givenSetup)) {
printUsage();
process.exit(1);
}
}
main();

View file

@ -16,10 +16,14 @@ export default class RunCommand extends Command {
description: 'Comma-separated list of test scenarios to run',
required: false,
}),
scenarioNamePrefix: Flags.string({
description: 'Prefix for the scenario name. Defaults to Unnamed',
required: false,
}),
};
async run() {
const config = loadConfig();
const config = await this.loadConfigAndMergeWithFlags();
const scenarioLoader = new ScenarioLoader();
const scenarioRunner = new ScenarioRunner(
@ -34,10 +38,22 @@ export default class RunCommand extends Command {
email: config.get('n8n.user.email'),
password: config.get('n8n.user.password'),
},
config.get('scenarioNamePrefix'),
);
const allScenarios = scenarioLoader.loadAll(config.get('testScenariosPath'));
await scenarioRunner.runManyScenarios(allScenarios);
}
private async loadConfigAndMergeWithFlags() {
const config = loadConfig();
const { flags } = await this.parse(RunCommand);
if (flags.scenarioNamePrefix) {
config.set('scenarioNamePrefix', flags.scenarioNamePrefix);
}
return config;
}
}

View file

@ -31,6 +31,12 @@ const configSchema = {
},
},
},
scenarioNamePrefix: {
doc: 'Prefix for the scenario name',
format: String,
default: 'Unnamed',
env: 'N8N_BENCHMARK_SCENARIO_NAME_PREFIX',
},
k6: {
executablePath: {
doc: 'The path to the k6 binary',

View file

@ -9,6 +9,11 @@ export type K6ExecutorOpts = {
n8nApiBaseUrl: string;
};
export type K6RunOpts = {
/** Name of the scenario run. Used e.g. when the run is reported to k6 cloud */
scenarioRunName: string;
};
/**
* Flag for the k6 CLI.
* @example ['--duration', '1m']
@ -36,8 +41,8 @@ export function handleSummary(data) {
constructor(private readonly opts: K6ExecutorOpts) {}
async executeTestScenario(scenario: Scenario) {
const augmentedTestScriptPath = this.augmentSummaryScript(scenario);
async executeTestScenario(scenario: Scenario, { scenarioRunName }: K6RunOpts) {
const augmentedTestScriptPath = this.augmentSummaryScript(scenario, scenarioRunName);
const runDirPath = path.dirname(augmentedTestScriptPath);
const flags: K6CliFlag[] = [['--quiet'], ['--duration', '1m'], ['--vus', '5']];
@ -62,7 +67,7 @@ export function handleSummary(data) {
console.log((chunk as Buffer).toString());
}
this.loadEndOfTestSummary(runDirPath, scenario.name);
this.loadEndOfTestSummary(runDirPath, scenarioRunName);
}
/**
@ -70,24 +75,24 @@ export function handleSummary(data) {
*
* @returns Absolute path to the augmented test script
*/
private augmentSummaryScript(scenario: Scenario) {
private augmentSummaryScript(scenario: Scenario, scenarioRunName: string) {
const fullTestScriptPath = path.join(scenario.scenarioDirPath, scenario.scriptPath);
const testScript = fs.readFileSync(fullTestScriptPath, 'utf8');
const summaryScript = this.handleSummaryScript.replace('{{scenarioName}}', scenario.name);
const summaryScript = this.handleSummaryScript.replace('{{scenarioName}}', scenarioRunName);
const augmentedTestScript = `${testScript}\n\n${summaryScript}`;
const tempFilePath = tmpfile(`${scenario.name}.ts`, augmentedTestScript);
const tempFilePath = tmpfile(`${scenarioRunName}.ts`, augmentedTestScript);
return tempFilePath;
}
private loadEndOfTestSummary(dir: string, scenarioName: string): K6EndOfTestSummary {
const summaryReportPath = path.join(dir, `${scenarioName}.summary.json`);
private loadEndOfTestSummary(dir: string, scenarioRunName: string): K6EndOfTestSummary {
const summaryReportPath = path.join(dir, `${scenarioRunName}.summary.json`);
const summaryReport = fs.readFileSync(summaryReportPath, 'utf8');
try {
return JSON.parse(summaryReport);
return JSON.parse(summaryReport) as K6EndOfTestSummary;
} catch (error) {
throw new Error(`Failed to parse the summary report at ${summaryReportPath}`);
}

View file

@ -17,6 +17,7 @@ export class ScenarioRunner {
email: string;
password: string;
},
private readonly scenarioPrefix: string,
) {}
async runManyScenarios(scenarios: Scenario[]) {
@ -38,13 +39,25 @@ export class ScenarioRunner {
}
private async runSingleTestScenario(testDataImporter: ScenarioDataImporter, scenario: Scenario) {
console.log('Running scenario:', scenario.name);
const scenarioRunName = this.formTestScenarioRunName(scenario);
console.log('Running scenario:', scenarioRunName);
console.log('Loading and importing data');
const testData = await this.dataLoader.loadDataForScenario(scenario);
await testDataImporter.importTestScenarioData(testData.workflows);
console.log('Executing scenario script');
await this.k6Executor.executeTestScenario(scenario);
await this.k6Executor.executeTestScenario(scenario, {
scenarioRunName,
});
}
/**
* Forms a name for the scenario by combining prefix and scenario name.
* The benchmarks are ran against different n8n setups, so we use the
* prefix to differentiate between them.
*/
private formTestScenarioRunName(scenario: Scenario) {
return `${this.scenarioPrefix}-${scenario.name}`;
}
}

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/config",
"version": "1.6.0",
"version": "1.7.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -49,6 +49,14 @@ class PrometheusMetricsConfig {
/** Whether to include metrics derived from n8n's internal events */
@Env('N8N_METRICS_INCLUDE_MESSAGE_EVENT_BUS_METRICS')
includeMessageEventBusMetrics: boolean = false;
/** Whether to include metrics for jobs in scaling mode. Not supported in multi-main setup. */
@Env('N8N_METRICS_INCLUDE_QUEUE_METRICS')
includeQueueMetrics: boolean = false;
/** How often (in seconds) to update queue metrics. */
@Env('N8N_METRICS_QUEUE_METRICS_INTERVAL')
queueMetricsInterval: number = 20;
}
@Config

View file

@ -49,19 +49,19 @@ class SmtpConfig {
export class TemplateConfig {
/** Overrides default HTML template for inviting new people (use full path) */
@Env('N8N_UM_EMAIL_TEMPLATES_INVITE')
invite: string = '';
'user-invited': string = '';
/** Overrides default HTML template for resetting password (use full path) */
@Env('N8N_UM_EMAIL_TEMPLATES_PWRESET')
passwordReset: string = '';
'password-reset-requested': string = '';
/** Overrides default HTML template for notifying that a workflow was shared (use full path) */
@Env('N8N_UM_EMAIL_TEMPLATES_WORKFLOW_SHARED')
workflowShared: string = '';
'workflow-shared': string = '';
/** Overrides default HTML template for notifying that credentials were shared (use full path) */
@Env('N8N_UM_EMAIL_TEMPLATES_CREDENTIALS_SHARED')
credentialsShared: string = '';
'credentials-shared': string = '';
}
@Config

View file

@ -6,13 +6,24 @@ import { Container, Service } from 'typedi';
type Class = Function;
type Constructable<T = unknown> = new (rawValue: string) => T;
type PropertyKey = string | symbol;
type PropertyType = number | boolean | string | Class;
interface PropertyMetadata {
type: unknown;
type: PropertyType;
envName?: string;
}
const globalMetadata = new Map<Class, Map<PropertyKey, PropertyMetadata>>();
const readEnv = (envName: string) => {
if (envName in process.env) return process.env[envName];
// Read the value from a file, if "_FILE" environment variable is defined
const filePath = process.env[`${envName}_FILE`];
if (filePath) return readFileSync(filePath, 'utf8');
return undefined;
};
export const Config: ClassDecorator = (ConfigClass: Class) => {
const factory = function () {
const config = new (ConfigClass as new () => Record<PropertyKey, unknown>)();
@ -26,38 +37,28 @@ export const Config: ClassDecorator = (ConfigClass: Class) => {
if (typeof type === 'function' && globalMetadata.has(type)) {
config[key] = Container.get(type);
} else if (envName) {
let value: unknown = process.env[envName];
// Read the value from a file, if "_FILE" environment variable is defined
const filePath = process.env[`${envName}_FILE`];
if (filePath) {
value = readFileSync(filePath, 'utf8');
}
const value = readEnv(envName);
if (value === undefined) continue;
if (type === Number) {
value = Number(value);
if (isNaN(value as number)) {
// TODO: add a warning
value = undefined;
const parsed = Number(value);
if (isNaN(parsed)) {
console.warn(`Invalid number value for ${envName}: ${value}`);
} else {
config[key] = parsed;
}
} else if (type === Boolean) {
if (value !== 'true' && value !== 'false') {
// TODO: add a warning
value = undefined;
if (['true', '1'].includes(value.toLowerCase())) {
config[key] = true;
} else if (['false', '0'].includes(value.toLowerCase())) {
config[key] = false;
} else {
value = value === 'true';
console.warn(`Invalid boolean value for ${envName}: ${value}`);
}
} else if (type === Object) {
// eslint-disable-next-line n8n-local-rules/no-plain-errors
throw new Error(
`Invalid decorator metadata on key "${key as string}" on ${ConfigClass.name}\n Please use explicit typing on all config fields`,
);
} else if (type !== String && type !== Object) {
value = new (type as Constructable)(value as string);
}
if (value !== undefined) {
} else if (type === String) {
config[key] = value;
} else {
config[key] = new (type as Constructable)(value);
}
}
}
@ -70,7 +71,7 @@ export const Config: ClassDecorator = (ConfigClass: Class) => {
export const Nested: PropertyDecorator = (target: object, key: PropertyKey) => {
const ConfigClass = target.constructor;
const classMetadata = globalMetadata.get(ConfigClass) ?? new Map<PropertyKey, PropertyMetadata>();
const type = Reflect.getMetadata('design:type', target, key) as unknown;
const type = Reflect.getMetadata('design:type', target, key) as PropertyType;
classMetadata.set(key, { type });
globalMetadata.set(ConfigClass, classMetadata);
};
@ -81,7 +82,13 @@ export const Env =
const ConfigClass = target.constructor;
const classMetadata =
globalMetadata.get(ConfigClass) ?? new Map<PropertyKey, PropertyMetadata>();
const type = Reflect.getMetadata('design:type', target, key) as unknown;
const type = Reflect.getMetadata('design:type', target, key) as PropertyType;
if (type === Object) {
// eslint-disable-next-line n8n-local-rules/no-plain-errors
throw new Error(
`Invalid decorator metadata on key "${key as string}" on ${ConfigClass.name}\n Please use explicit typing on all config fields`,
);
}
classMetadata.set(key, { type, envName });
globalMetadata.set(ConfigClass, classMetadata);
};

View file

@ -17,6 +17,10 @@ describe('GlobalConfig', () => {
process.env = originalEnv;
});
// deepCopy for diff to show plain objects
// eslint-disable-next-line n8n-local-rules/no-json-parse-json-stringify
const deepCopy = <T>(obj: T): T => JSON.parse(JSON.stringify(obj));
const defaultConfig: GlobalConfig = {
path: '/',
host: 'localhost',
@ -85,10 +89,10 @@ describe('GlobalConfig', () => {
},
},
template: {
credentialsShared: '',
invite: '',
passwordReset: '',
workflowShared: '',
'credentials-shared': '',
'user-invited': '',
'password-reset-requested': '',
'workflow-shared': '',
},
},
},
@ -161,6 +165,8 @@ describe('GlobalConfig', () => {
includeApiMethodLabel: false,
includeCredentialTypeLabel: false,
includeApiStatusCodeLabel: false,
includeQueueMetrics: false,
queueMetricsInterval: 20,
},
additionalNonUIRoutes: '',
disableProductionWebhooksOnMainProcess: false,
@ -218,10 +224,6 @@ describe('GlobalConfig', () => {
process.env = {};
const config = Container.get(GlobalConfig);
// deepCopy for diff to show plain objects
// eslint-disable-next-line n8n-local-rules/no-json-parse-json-stringify
const deepCopy = <T>(obj: T): T => JSON.parse(JSON.stringify(obj));
expect(deepCopy(config)).toEqual(defaultConfig);
expect(mockFs.readFileSync).not.toHaveBeenCalled();
});
@ -233,9 +235,11 @@ describe('GlobalConfig', () => {
DB_TABLE_PREFIX: 'test_',
NODES_INCLUDE: '["n8n-nodes-base.hackerNews"]',
DB_LOGGING_MAX_EXECUTION_TIME: '0',
N8N_METRICS: 'TRUE',
N8N_TEMPLATES_ENABLED: '0',
};
const config = Container.get(GlobalConfig);
expect(config).toEqual({
expect(deepCopy(config)).toEqual({
...defaultConfig,
database: {
logging: defaultConfig.database.logging,
@ -249,10 +253,21 @@ describe('GlobalConfig', () => {
tablePrefix: 'test_',
type: 'sqlite',
},
endpoints: {
...defaultConfig.endpoints,
metrics: {
...defaultConfig.endpoints.metrics,
enable: true,
},
},
nodes: {
...defaultConfig.nodes,
include: ['n8n-nodes-base.hackerNews'],
},
templates: {
...defaultConfig.templates,
enabled: false,
},
});
expect(mockFs.readFileSync).not.toHaveBeenCalled();
});
@ -265,7 +280,7 @@ describe('GlobalConfig', () => {
mockFs.readFileSync.calledWith(passwordFile, 'utf8').mockReturnValueOnce('password-from-file');
const config = Container.get(GlobalConfig);
expect(config).toEqual({
expect(deepCopy(config)).toEqual({
...defaultConfig,
database: {
...defaultConfig.database,

View file

@ -0,0 +1,21 @@
import { Container } from 'typedi';
import { Config, Env } from '../src/decorators';
describe('decorators', () => {
beforeEach(() => {
Container.reset();
});
it('should throw when explicit typing is missing', () => {
expect(() => {
@Config
class InvalidConfig {
@Env('STRING_VALUE')
value = 'string';
}
Container.get(InvalidConfig);
}).toThrowError(
'Invalid decorator metadata on key "value" on InvalidConfig\n Please use explicit typing on all config fields',
);
});
});

View file

@ -1,6 +1,5 @@
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
ConnectionTypes,
INodeInputConfiguration,
INodeInputFilter,
IExecuteFunctions,
@ -28,16 +27,16 @@ import { toolsAgentExecute } from './agents/ToolsAgent/execute';
function getInputs(
agent: 'toolsAgent' | 'conversationalAgent' | 'openAiFunctionsAgent' | 'reActAgent' | 'sqlAgent',
hasOutputParser?: boolean,
): Array<ConnectionTypes | INodeInputConfiguration> {
): Array<NodeConnectionType | INodeInputConfiguration> {
interface SpecialInput {
type: ConnectionTypes;
type: NodeConnectionType;
filter?: INodeInputFilter;
required?: boolean;
}
const getInputData = (
inputs: SpecialInput[],
): Array<ConnectionTypes | INodeInputConfiguration> => {
): Array<NodeConnectionType | INodeInputConfiguration> => {
const displayNames: { [key: string]: string } = {
[NodeConnectionType.AiLanguageModel]: 'Model',
[NodeConnectionType.AiMemory]: 'Memory',

View file

@ -19,7 +19,7 @@ export async function conversationalAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Conversational Agent');
this.logger.debug('Executing Conversational Agent');
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
if (!isChatInstance(model)) {

View file

@ -23,7 +23,7 @@ export async function openAiFunctionsAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing OpenAi Functions Agent');
this.logger.debug('Executing OpenAi Functions Agent');
const model = (await this.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,

View file

@ -22,7 +22,7 @@ export async function planAndExecuteAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing PlanAndExecute Agent');
this.logger.debug('Executing PlanAndExecute Agent');
const model = (await this.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,

View file

@ -24,7 +24,7 @@ export async function reActAgentAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing ReAct Agent');
this.logger.debug('Executing ReAct Agent');
const model = (await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0)) as
| BaseLanguageModel

View file

@ -29,7 +29,7 @@ const parseTablesString = (tablesString: string) =>
export async function sqlAgentAgentExecute(
this: IExecuteFunctions,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing SQL Agent');
this.logger.debug('Executing SQL Agent');
const model = (await this.getInputConnectionData(
NodeConnectionType.AiLanguageModel,

View file

@ -76,7 +76,7 @@ async function extractBinaryMessages(ctx: IExecuteFunctions) {
}
export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Tools Agent');
this.logger.debug('Executing Tools Agent');
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
if (!isChatInstance(model) || !model.bindTools) {

View file

@ -517,7 +517,7 @@ export class ChainLlm implements INodeType {
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing LLM Chain');
this.logger.debug('Executing LLM Chain');
const items = this.getInputData();
const returnData: INodeExecutionData[] = [];

View file

@ -141,7 +141,7 @@ export class ChainRetrievalQa implements INodeType {
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Retrieval QA Chain');
this.logger.debug('Executing Retrieval QA Chain');
const model = (await this.getInputConnectionData(
NodeConnectionType.AiLanguageModel,

View file

@ -162,7 +162,7 @@ export class ChainSummarizationV1 implements INodeType {
}
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Vector Store QA Chain');
this.logger.debug('Executing Vector Store QA Chain');
const type = this.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine';
const model = (await this.getInputConnectionData(

View file

@ -311,7 +311,7 @@ export class ChainSummarizationV2 implements INodeType {
}
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Summarization Chain V2');
this.logger.debug('Executing Summarization Chain V2');
const operationMode = this.getNodeParameter('operationMode', 0, 'nodeInputJson') as
| 'nodeInputJson'
| 'nodeInputBinary'

View file

@ -178,7 +178,7 @@ export class DocumentBinaryInputLoader implements INodeType {
};
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
this.logger.verbose('Supply Data for Binary Input Loader');
this.logger.debug('Supply Data for Binary Input Loader');
const textSplitter = (await this.getInputConnectionData(
NodeConnectionType.AiTextSplitter,
0,

View file

@ -80,7 +80,7 @@ export class DocumentJsonInputLoader implements INodeType {
};
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
this.logger.verbose('Supply Data for JSON Input Loader');
this.logger.debug('Supply Data for JSON Input Loader');
const textSplitter = (await this.getInputConnectionData(
NodeConnectionType.AiTextSplitter,
0,

View file

@ -93,12 +93,12 @@ export class EmbeddingsAzureOpenAi implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply data for embeddings');
const credentials = (await this.getCredentials('azureOpenAiApi')) as {
this.logger.debug('Supply data for embeddings');
const credentials = await this.getCredentials<{
apiKey: string;
resourceName: string;
apiVersion: string;
};
}>('azureOpenAiApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as {

View file

@ -100,9 +100,9 @@ export class EmbeddingsCohere implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply data for embeddings Cohere');
this.logger.debug('Supply data for embeddings Cohere');
const modelName = this.getNodeParameter('modelName', itemIndex, 'embed-english-v2.0') as string;
const credentials = (await this.getCredentials('cohereApi')) as { apiKey: string };
const credentials = await this.getCredentials<{ apiKey: string }>('cohereApi');
const embeddings = new CohereEmbeddings({
apiKey: credentials.apiKey,
model: modelName,

View file

@ -117,7 +117,7 @@ export class EmbeddingsGoogleGemini implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply data for embeddings Google Gemini');
this.logger.debug('Supply data for embeddings Google Gemini');
const modelName = this.getNodeParameter(
'modelName',
itemIndex,

View file

@ -116,7 +116,7 @@ export class EmbeddingsGooglePalm implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply data for embeddings Google PaLM');
this.logger.debug('Supply data for embeddings Google PaLM');
const modelName = this.getNodeParameter(
'modelName',
itemIndex,

View file

@ -82,7 +82,7 @@ export class EmbeddingsHuggingFaceInference implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply data for embeddings HF Inference');
this.logger.debug('Supply data for embeddings HF Inference');
const model = this.getNodeParameter(
'modelName',
itemIndex,

View file

@ -45,7 +45,7 @@ export class EmbeddingsOllama implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply data for embeddings Ollama');
this.logger.debug('Supply data for embeddings Ollama');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const credentials = await this.getCredentials('ollamaApi');

View file

@ -171,7 +171,7 @@ export class EmbeddingsOpenAi implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply data for embeddings');
this.logger.debug('Supply data for embeddings');
const credentials = await this.getCredentials('openAiApi');
const options = this.getNodeParameter('options', itemIndex, {}) as {

View file

@ -133,11 +133,11 @@ export class LmChatAzureOpenAi implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = (await this.getCredentials('azureOpenAiApi')) as {
const credentials = await this.getCredentials<{
apiKey: string;
resourceName: string;
apiVersion: string;
};
}>('azureOpenAiApi');
const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as {

View file

@ -88,7 +88,7 @@ export class MemoryChatRetriever implements INodeType {
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Chat Memory Retriever');
this.logger.debug('Executing Chat Memory Retriever');
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory

View file

@ -74,7 +74,7 @@ export class MemoryPostgresChat implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = (await this.getCredentials('postgres')) as PostgresNodeCredentials;
const credentials = await this.getCredentials<PostgresNodeCredentials>('postgres');
const tableName = this.getNodeParameter('tableName', itemIndex, 'n8n_chat_histories') as string;
const sessionId = getSessionId(this, itemIndex);

View file

@ -104,11 +104,11 @@ export class MemoryZep implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
const credentials = (await this.getCredentials('zepApi')) as {
const credentials = await this.getCredentials<{
apiKey?: string;
apiUrl?: string;
cloud?: boolean;
};
}>('zepApi');
const nodeVersion = this.getNode().typeVersion;

View file

@ -64,7 +64,7 @@ export class RetrieverContextualCompression implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supplying data for Contextual Compression Retriever');
this.logger.debug('Supplying data for Contextual Compression Retriever');
const model = (await this.getInputConnectionData(
NodeConnectionType.AiLanguageModel,

View file

@ -83,7 +83,7 @@ export class RetrieverMultiQuery implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supplying data for MultiQuery Retriever');
this.logger.debug('Supplying data for MultiQuery Retriever');
const options = this.getNodeParameter('options', itemIndex, {}) as { queryCount?: number };

View file

@ -57,7 +57,7 @@ export class RetrieverVectorStore implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supplying data for Vector Store Retriever');
this.logger.debug('Supplying data for Vector Store Retriever');
const topK = this.getNodeParameter('topK', itemIndex, 4) as number;
const vectorStore = (await this.getInputConnectionData(

View file

@ -64,7 +64,7 @@ export class TextSplitterCharacterTextSplitter implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply Data for Text Splitter');
this.logger.debug('Supply Data for Text Splitter');
const separator = this.getNodeParameter('separator', itemIndex) as string;
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;

View file

@ -95,7 +95,7 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply Data for Text Splitter');
this.logger.debug('Supply Data for Text Splitter');
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex) as number;

View file

@ -57,7 +57,7 @@ export class TextSplitterTokenSplitter implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply Data for Text Splitter');
this.logger.debug('Supply Data for Text Splitter');
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex) as number;

View file

@ -75,7 +75,7 @@ export class ChatTrigger extends Node {
}
];
})() }}`,
outputs: ['main'],
outputs: [NodeConnectionType.Main],
credentials: [
{
// eslint-disable-next-line n8n-nodes-base/node-class-description-credentials-name-unsuffixed

View file

@ -14,7 +14,7 @@ export async function validateAuth(context: IWebhookFunctions) {
// Basic authorization is needed to call webhook
let expectedAuth: ICredentialDataDecryptedObject | undefined;
try {
expectedAuth = await context.getCredentials('httpBasicAuth');
expectedAuth = await context.getCredentials<ICredentialDataDecryptedObject>('httpBasicAuth');
} catch {}
if (expectedAuth === undefined || !expectedAuth.user || !expectedAuth.password) {

View file

@ -97,7 +97,7 @@ export class VectorStorePineconeInsert implements INodeType {
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData(0);
this.logger.verbose('Executing data for Pinecone Insert Vector Store');
this.logger.debug('Executing data for Pinecone Insert Vector Store');
const namespace = this.getNodeParameter('pineconeNamespace', 0) as string;
const index = this.getNodeParameter('pineconeIndex', 0, '', { extractValue: true }) as string;

View file

@ -85,7 +85,7 @@ export class VectorStorePineconeLoad implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supplying data for Pinecone Load Vector Store');
this.logger.debug('Supplying data for Pinecone Load Vector Store');
const namespace = this.getNodeParameter('pineconeNamespace', itemIndex) as string;
const index = this.getNodeParameter('pineconeIndex', itemIndex, '', {

View file

@ -94,7 +94,7 @@ export class VectorStoreSupabaseInsert implements INodeType {
methods = { listSearch: { supabaseTableNameSearch } };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing data for Supabase Insert Vector Store');
this.logger.debug('Executing data for Supabase Insert Vector Store');
const items = this.getInputData(0);
const tableName = this.getNodeParameter('tableName', 0, '', { extractValue: true }) as string;

View file

@ -82,7 +82,7 @@ export class VectorStoreSupabaseLoad implements INodeType {
methods = { listSearch: { supabaseTableNameSearch } };
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supply Supabase Load Vector Store');
this.logger.debug('Supply Supabase Load Vector Store');
const tableName = this.getNodeParameter('tableName', itemIndex, '', {
extractValue: true,

View file

@ -79,10 +79,10 @@ export const VectorStoreZep = createVectorStoreNode({
embeddingDimensions?: number;
}) || {};
const credentials = (await context.getCredentials('zepApi')) as {
const credentials = await context.getCredentials<{
apiKey?: string;
apiUrl: string;
};
}>('zepApi');
const zepConfig: IZepConfig = {
apiUrl: credentials.apiUrl,
@ -102,10 +102,10 @@ export const VectorStoreZep = createVectorStoreNode({
embeddingDimensions?: number;
}) || {};
const credentials = (await context.getCredentials('zepApi')) as {
const credentials = await context.getCredentials<{
apiKey?: string;
apiUrl: string;
};
}>('zepApi');
const zepConfig = {
apiUrl: credentials.apiUrl,

View file

@ -101,7 +101,7 @@ export class VectorStoreZepInsert implements INodeType {
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing data for Zep Insert Vector Store');
this.logger.debug('Executing data for Zep Insert Vector Store');
const items = this.getInputData(0);
const collectionName = this.getNodeParameter('collectionName', 0) as string;
const options =
@ -110,10 +110,10 @@ export class VectorStoreZepInsert implements INodeType {
embeddingDimensions?: number;
}) || {};
const credentials = (await this.getCredentials('zepApi')) as {
const credentials = await this.getCredentials<{
apiKey?: string;
apiUrl: string;
};
}>('zepApi');
const documentInput = (await this.getInputConnectionData(NodeConnectionType.AiDocument, 0)) as
| N8nJsonLoader

View file

@ -84,7 +84,7 @@ export class VectorStoreZepLoad implements INodeType {
};
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
this.logger.verbose('Supplying data for Zep Load Vector Store');
this.logger.debug('Supplying data for Zep Load Vector Store');
const collectionName = this.getNodeParameter('collectionName', itemIndex) as string;
@ -93,10 +93,10 @@ export class VectorStoreZepLoad implements INodeType {
embeddingDimensions?: number;
}) || {};
const credentials = (await this.getCredentials('zepApi')) as {
const credentials = await this.getCredentials<{
apiKey?: string;
apiUrl: string;
};
}>('zepApi');
const embeddings = (await this.getInputConnectionData(
NodeConnectionType.AiEmbedding,
0,

View file

@ -90,7 +90,7 @@ export const versionDescription: INodeTypeDescription = {
},
},
inputs: `={{(${configureNodeInputs})($parameter.resource, $parameter.operation, $parameter.hideTools)}}`,
outputs: ['main'],
outputs: [NodeConnectionType.Main],
credentials: [
{
name: 'openAiApi',

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-nodes-langchain",
"version": "1.56.0",
"version": "1.57.0",
"description": "",
"main": "index.js",
"scripts": {

View file

@ -1,5 +1,5 @@
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import type { ConnectionTypes, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import type { Tool } from '@langchain/core/tools';
import type { BaseMessage } from '@langchain/core/messages';
@ -31,7 +31,7 @@ export async function callMethodAsync<T>(
this: T,
parameters: {
executeFunctions: IExecuteFunctions;
connectionType: ConnectionTypes;
connectionType: NodeConnectionType;
currentNodeRunIndex: number;
method: (...args: any[]) => Promise<unknown>;
arguments: unknown[];
@ -78,7 +78,7 @@ export function callMethodSync<T>(
this: T,
parameters: {
executeFunctions: IExecuteFunctions;
connectionType: ConnectionTypes;
connectionType: NodeConnectionType;
currentNodeRunIndex: number;
method: (...args: any[]) => T;
arguments: unknown[];
@ -123,7 +123,7 @@ export function logWrapper(
) {
return new Proxy(originalInstance, {
get: (target, prop) => {
let connectionType: ConnectionTypes | undefined;
let connectionType: NodeConnectionType | undefined;
// ========== BaseChatMemory ==========
if (isBaseChatMemory(originalInstance)) {
if (prop === 'loadMemoryVariables' && 'loadMemoryVariables' in target) {

View file

@ -19,6 +19,8 @@ module.exports = {
],
rules: {
'unicorn/filename-case': ['error', { case: 'kebabCase' }],
'n8n-local-rules/no-dynamic-import-template': 'error',
'n8n-local-rules/misplaced-n8n-typeorm-import': 'error',
'n8n-local-rules/no-type-unsafe-event-emitter': 'error',
@ -39,6 +41,12 @@ module.exports = {
},
overrides: [
{
files: ['./src/databases/migrations/**/*.ts'],
rules: {
'unicorn/filename-case': 'off',
},
},
{
files: ['./src/databases/**/*.ts', './test/**/*.ts', './src/**/__tests__/**/*.ts'],
rules: {

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "1.56.0",
"version": "1.57.0",
"description": "n8n Workflow Automation Tool",
"main": "dist/index",
"types": "dist/index.d.ts",
@ -78,6 +78,7 @@
"chokidar": "^3.5.2",
"concurrently": "^8.2.0",
"ioredis-mock": "^8.8.1",
"mjml": "^4.15.3",
"ts-essentials": "^7.0.3"
},
"dependencies": {

View file

@ -3,17 +3,18 @@ import { writeFileSync } from 'fs';
import { fileURLToPath } from 'url';
import shell from 'shelljs';
import { rawTimeZones } from '@vvo/tzdb';
import glob from 'fast-glob';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const ROOT_DIR = path.resolve(__dirname, '..');
const SPEC_FILENAME = 'openapi.yml';
const SPEC_THEME_FILENAME = 'swaggerTheme.css';
const SPEC_THEME_FILENAME = 'swagger-theme.css';
const publicApiEnabled = process.env.N8N_PUBLIC_API_DISABLED !== 'true';
copyUserManagementEmailTemplates();
generateUserManagementEmailTemplates();
generateTimezoneData();
if (publicApiEnabled) {
@ -21,26 +22,35 @@ if (publicApiEnabled) {
bundleOpenApiSpecs();
}
function copyUserManagementEmailTemplates() {
const templates = {
source: path.resolve(ROOT_DIR, 'src', 'user-management', 'email', 'templates'),
destination: path.resolve(ROOT_DIR, 'dist', 'user-management', 'email'),
};
function generateUserManagementEmailTemplates() {
const sourceDir = path.resolve(ROOT_DIR, 'src', 'user-management', 'email', 'templates');
const destinationDir = path.resolve(ROOT_DIR, 'dist', 'user-management', 'email', 'templates');
shell.cp('-r', templates.source, templates.destination);
shell.mkdir('-p', destinationDir);
const templates = glob.sync('*.mjml', { cwd: sourceDir });
templates.forEach((template) => {
if (template.startsWith('_')) return;
const source = path.resolve(sourceDir, template);
const destination = path.resolve(destinationDir, template.replace(/\.mjml$/, '.handlebars'));
const command = `pnpm mjml --output ${destination} ${source}`;
shell.exec(command, { silent: false });
});
shell.cp(path.resolve(sourceDir, 'n8n-logo.png'), destinationDir);
}
function copySwaggerTheme() {
const swaggerTheme = {
source: path.resolve(ROOT_DIR, 'src', 'PublicApi', SPEC_THEME_FILENAME),
destination: path.resolve(ROOT_DIR, 'dist', 'PublicApi'),
source: path.resolve(ROOT_DIR, 'src', 'public-api', SPEC_THEME_FILENAME),
destination: path.resolve(ROOT_DIR, 'dist', 'public-api'),
};
shell.cp('-r', swaggerTheme.source, swaggerTheme.destination);
}
function bundleOpenApiSpecs() {
const publicApiDir = path.resolve(ROOT_DIR, 'src', 'PublicApi');
const publicApiDir = path.resolve(ROOT_DIR, 'src', 'public-api');
shell
.find(publicApiDir)

View file

@ -3,8 +3,8 @@ import PCancelable from 'p-cancelable';
import { v4 as uuid } from 'uuid';
import type { IExecuteResponsePromiseData, IRun } from 'n8n-workflow';
import { createDeferredPromise } from 'n8n-workflow';
import type { IWorkflowExecutionDataProcess } from '@/Interfaces';
import type { ExecutionRepository } from '@db/repositories/execution.repository';
import type { IWorkflowExecutionDataProcess } from '@/interfaces';
import type { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { mock } from 'jest-mock-extended';
import { ConcurrencyControlService } from '@/concurrency/concurrency-control.service';
import { mockInstance } from '@test/mocking';

View file

@ -7,13 +7,13 @@ import type {
INode,
INodeProperties,
} from 'n8n-workflow';
import { deepCopy } from 'n8n-workflow';
import { NodeConnectionType, deepCopy } from 'n8n-workflow';
import { Workflow } from 'n8n-workflow';
import { CredentialsHelper } from '@/credentials-helper';
import { NodeTypes } from '@/node-types';
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
import { CredentialsRepository } from '@db/repositories/credentials.repository';
import { SharedCredentialsRepository } from '@db/repositories/sharedCredentials.repository';
import { CredentialsRepository } from '@/databases/repositories/credentials.repository';
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
import { mockInstance } from '@test/mocking';
describe('CredentialsHelper', () => {
@ -34,8 +34,8 @@ describe('CredentialsHelper', () => {
name: 'Set',
color: '#0000FF',
},
inputs: ['main'],
outputs: ['main'],
inputs: [NodeConnectionType.Main],
outputs: [NodeConnectionType.Main],
properties: [
{
displayName: 'Value1',

View file

@ -1,9 +1,9 @@
import { WaitTracker } from '@/wait-tracker';
import { mock } from 'jest-mock-extended';
import type { ExecutionRepository } from '@/databases/repositories/execution.repository';
import type { IExecutionResponse } from '@/Interfaces';
import type { IExecutionResponse } from '@/interfaces';
import { OrchestrationService } from '@/services/orchestration.service';
import type { MultiMainSetup } from '@/services/orchestration/main/MultiMainSetup.ee';
import type { MultiMainSetup } from '@/services/orchestration/main/multi-main-setup.ee';
jest.useFakeTimers();

View file

@ -1,6 +1,6 @@
import { type Workflow } from 'n8n-workflow';
import { getExecutionStartNode } from '@/workflow-helpers';
import type { IWorkflowExecutionDataProcess } from '@/Interfaces';
import type { IWorkflowExecutionDataProcess } from '@/interfaces';
describe('WorkflowHelpers', () => {
describe('getExecutionStartNode', () => {

View file

@ -1,10 +1,10 @@
import Container from 'typedi';
import { WorkflowHooks, type ExecutionError, type IWorkflowExecuteHooks } from 'n8n-workflow';
import type { User } from '@db/entities/User';
import type { User } from '@/databases/entities/user';
import { WorkflowRunner } from '@/workflow-runner';
import config from '@/config';
import * as testDb from '@test-integration/testDb';
import * as testDb from '@test-integration/test-db';
import { setupTestServer } from '@test-integration/utils';
import { createUser } from '@test-integration/db/users';
import { createWorkflow } from '@test-integration/db/workflows';

View file

@ -8,8 +8,8 @@ import isbot from 'isbot';
import config from '@/config';
import { N8N_VERSION, TEMPLATES_DIR, inDevelopment, inTest } from '@/constants';
import * as Db from '@/Db';
import { N8nInstanceType } from '@/Interfaces';
import * as Db from '@/db';
import { N8nInstanceType } from '@/interfaces';
import { ExternalHooks } from '@/external-hooks';
import { send, sendErrorResponse } from '@/response-helper';
import { rawBodyReader, bodyParser, corsMiddleware } from '@/middlewares';

View file

@ -20,9 +20,9 @@ import type {
IExecutionDb,
IExecutionsCurrentSummary,
IWorkflowExecutionDataProcess,
} from '@/Interfaces';
} from '@/interfaces';
import { isWorkflowIdValid } from '@/utils';
import { ExecutionRepository } from '@db/repositories/execution.repository';
import { ExecutionRepository } from '@/databases/repositories/execution.repository';
import { Logger } from '@/logger';
import { ConcurrencyControlService } from './concurrency/concurrency-control.service';
import config from './config';

View file

@ -26,11 +26,11 @@ import {
ApplicationError,
} from 'n8n-workflow';
import type { IWorkflowDb } from '@/Interfaces';
import type { IWorkflowDb } from '@/interfaces';
import * as WebhookHelpers from '@/webhooks/webhook-helpers';
import * as WorkflowExecuteAdditionalData from '@/workflow-execute-additional-data';
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { ActiveExecutions } from '@/active-executions';
import { ExecutionService } from './executions/execution.service';
import {
@ -42,10 +42,10 @@ import { NodeTypes } from '@/node-types';
import { ExternalHooks } from '@/external-hooks';
import { WebhookService } from '@/webhooks/webhook.service';
import { Logger } from './logger';
import { WorkflowRepository } from '@db/repositories/workflow.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { OrchestrationService } from '@/services/orchestration.service';
import { ActivationErrorsService } from '@/activation-errors.service';
import { ActiveWorkflowsService } from '@/services/activeWorkflows.service';
import { ActiveWorkflowsService } from '@/services/active-workflows.service';
import { WorkflowExecutionService } from '@/workflows/workflow-execution.service';
import { WorkflowStaticDataService } from '@/workflows/workflow-static-data.service';
import { OnShutdown } from '@/decorators/on-shutdown';
@ -95,7 +95,7 @@ export class ActiveWorkflowManager {
*/
async removeAll() {
let activeWorkflowIds: string[] = [];
this.logger.verbose('Call to remove all active workflows received (removeAll)');
this.logger.debug('Call to remove all active workflows received (removeAll)');
activeWorkflowIds.push(...this.activeWorkflows.allActiveWorkflows());
@ -437,7 +437,7 @@ export class ActiveWorkflowManager {
});
if (wasActivated) {
this.logger.verbose(`Successfully started workflow ${dbWorkflow.display()}`, {
this.logger.debug(`Successfully started workflow ${dbWorkflow.display()}`, {
workflowName: dbWorkflow.name,
workflowId: dbWorkflow.id,
});
@ -469,7 +469,7 @@ export class ActiveWorkflowManager {
}
}
this.logger.verbose('Finished activating workflows (startup)');
this.logger.debug('Finished activating workflows (startup)');
}
async clearAllActivationErrors() {
@ -800,7 +800,7 @@ export class ActiveWorkflowManager {
getPollFunctions,
);
this.logger.verbose(`Workflow ${dbWorkflow.display()} activated`, {
this.logger.debug(`Workflow ${dbWorkflow.display()} activated`, {
workflowId: dbWorkflow.id,
workflowName: dbWorkflow.name,
});

View file

@ -5,9 +5,9 @@ import type { NextFunction, Response } from 'express';
import { AuthService } from '@/auth/auth.service';
import config from '@/config';
import { AUTH_COOKIE_NAME, Time } from '@/constants';
import type { User } from '@db/entities/User';
import type { InvalidAuthTokenRepository } from '@db/repositories/invalidAuthToken.repository';
import type { UserRepository } from '@db/repositories/user.repository';
import type { User } from '@/databases/entities/user';
import type { InvalidAuthTokenRepository } from '@/databases/repositories/invalid-auth-token.repository';
import type { UserRepository } from '@/databases/repositories/user.repository';
import { JwtService } from '@/services/jwt.service';
import type { UrlService } from '@/services/url.service';
import type { AuthenticatedRequest } from '@/requests';

View file

@ -5,9 +5,9 @@ import { JsonWebTokenError, TokenExpiredError } from 'jsonwebtoken';
import config from '@/config';
import { AUTH_COOKIE_NAME, RESPONSE_ERROR_MESSAGES, Time } from '@/constants';
import type { User } from '@db/entities/User';
import { InvalidAuthTokenRepository } from '@db/repositories/invalidAuthToken.repository';
import { UserRepository } from '@db/repositories/user.repository';
import type { User } from '@/databases/entities/user';
import { InvalidAuthTokenRepository } from '@/databases/repositories/invalid-auth-token.repository';
import { UserRepository } from '@/databases/repositories/user.repository';
import { AuthError } from '@/errors/response-errors/auth.error';
import { ForbiddenError } from '@/errors/response-errors/forbidden.error';
import { License } from '@/license';

View file

@ -1,7 +1,7 @@
import { Container } from 'typedi';
import type { Response } from 'express';
import type { User } from '@db/entities/User';
import type { User } from '@/databases/entities/user';
import { AuthService } from './auth.service';
// This method is still used by cloud hooks.

View file

@ -1,8 +1,8 @@
import type { User } from '@db/entities/User';
import type { User } from '@/databases/entities/user';
import { PasswordUtility } from '@/services/password.utility';
import { Container } from 'typedi';
import { isLdapLoginEnabled } from '@/ldap/helpers.ee';
import { UserRepository } from '@db/repositories/user.repository';
import { UserRepository } from '@/databases/repositories/user.repository';
import { AuthError } from '@/errors/response-errors/auth.error';
import { EventService } from '@/events/event.service';

View file

@ -10,7 +10,7 @@ import {
createLdapAuthIdentity,
updateLdapUserOnLocalDb,
} from '@/ldap/helpers.ee';
import type { User } from '@db/entities/User';
import type { User } from '@/databases/entities/user';
import { EventService } from '@/events/event.service';
export const handleLdapLogin = async (

View file

@ -7,23 +7,23 @@ import { BinaryDataService, InstanceSettings, ObjectStoreService } from 'n8n-cor
import type { AbstractServer } from '@/abstract-server';
import { Logger } from '@/logger';
import config from '@/config';
import * as Db from '@/Db';
import * as Db from '@/db';
import * as CrashJournal from '@/crash-journal';
import { LICENSE_FEATURES, inDevelopment, inTest } from '@/constants';
import { initErrorHandling } from '@/error-reporting';
import { ExternalHooks } from '@/external-hooks';
import { NodeTypes } from '@/node-types';
import { LoadNodesAndCredentials } from '@/load-nodes-and-credentials';
import type { N8nInstanceType } from '@/Interfaces';
import type { N8nInstanceType } from '@/interfaces';
import { PostHogClient } from '@/posthog';
import { InternalHooks } from '@/internal-hooks';
import { License } from '@/license';
import { ExternalSecretsManager } from '@/external-secrets/external-secrets-manager.ee';
import { initExpressionEvaluator } from '@/expression-evaluator';
import { generateHostInstanceId } from '@db/utils/generators';
import { generateHostInstanceId } from '@/databases/utils/generators';
import { WorkflowHistoryManager } from '@/workflows/workflow-history/workflow-history-manager.ee';
import { ShutdownService } from '@/shutdown/shutdown.service';
import { TelemetryEventRelay } from '@/events/telemetry-event-relay';
import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus';
export abstract class BaseCommand extends Command {
protected logger = Container.get(Logger);
@ -116,12 +116,14 @@ export abstract class BaseCommand extends Command {
const { communityPackages } = this.globalConfig.nodes;
if (communityPackages.enabled && this.needsCommunityPackages) {
const { CommunityPackagesService } = await import('@/services/communityPackages.service');
const { CommunityPackagesService } = await import('@/services/community-packages.service');
await Container.get(CommunityPackagesService).checkForMissingPackages();
}
// TODO: remove this after the cyclic dependencies around the event-bus are resolved
Container.get(MessageEventBus);
await Container.get(PostHogClient).init();
await Container.get(InternalHooks).init();
await Container.get(TelemetryEventRelay).init();
}

View file

@ -5,9 +5,9 @@ import type { DataSourceOptions as ConnectionOptions } from '@n8n/typeorm';
import { MigrationExecutor, DataSource as Connection } from '@n8n/typeorm';
import { Container } from 'typedi';
import { Logger } from '@/logger';
import { getConnectionOptions } from '@db/config';
import type { Migration } from '@db/types';
import { wrapMigration } from '@db/utils/migrationHelpers';
import { getConnectionOptions } from '@/databases/config';
import type { Migration } from '@/databases/types';
import { wrapMigration } from '@/databases/utils/migration-helpers';
// This function is extracted to make it easier to unit test it.
// Mocking turned into a mess due to this command using typeorm and the db

View file

@ -11,9 +11,9 @@ import pick from 'lodash/pick';
import { ActiveExecutions } from '@/active-executions';
import { WorkflowRunner } from '@/workflow-runner';
import type { IWorkflowDb, IWorkflowExecutionDataProcess } from '@/Interfaces';
import type { User } from '@db/entities/User';
import { WorkflowRepository } from '@db/repositories/workflow.repository';
import type { IWorkflowDb, IWorkflowExecutionDataProcess } from '@/interfaces';
import type { User } from '@/databases/entities/user';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { OwnershipService } from '@/services/ownership.service';
import { findCliWorkflowStart } from '@/utils';
@ -108,6 +108,8 @@ export class ExecuteBatch extends BaseCommand {
}),
};
static aliases = ['executeBatch'];
override needsCommunityPackages = true;
/**

View file

@ -5,11 +5,11 @@ import { ApplicationError, ExecutionBaseError } from 'n8n-workflow';
import { ActiveExecutions } from '@/active-executions';
import { WorkflowRunner } from '@/workflow-runner';
import type { IWorkflowExecutionDataProcess } from '@/Interfaces';
import type { IWorkflowExecutionDataProcess } from '@/interfaces';
import { findCliWorkflowStart, isWorkflowIdValid } from '@/utils';
import { BaseCommand } from './base-command';
import { WorkflowRepository } from '@db/repositories/workflow.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { OwnershipService } from '@/services/ownership.service';
export class Execute extends BaseCommand {

View file

@ -2,9 +2,9 @@ import { Flags } from '@oclif/core';
import fs from 'fs';
import path from 'path';
import { Credentials } from 'n8n-core';
import type { ICredentialsDb, ICredentialsDecryptedDb } from '@/Interfaces';
import type { ICredentialsDb, ICredentialsDecryptedDb } from '@/interfaces';
import { BaseCommand } from '../base-command';
import { CredentialsRepository } from '@db/repositories/credentials.repository';
import { CredentialsRepository } from '@/databases/repositories/credentials.repository';
import Container from 'typedi';
import { ApplicationError } from 'n8n-workflow';

View file

@ -2,7 +2,7 @@ import { Flags } from '@oclif/core';
import fs from 'fs';
import path from 'path';
import { BaseCommand } from '../base-command';
import { WorkflowRepository } from '@db/repositories/workflow.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import Container from 'typedi';
import { ApplicationError } from 'n8n-workflow';

View file

@ -6,16 +6,16 @@ import glob from 'fast-glob';
// eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import
import type { EntityManager } from '@n8n/typeorm';
import * as Db from '@/Db';
import { SharedCredentials } from '@db/entities/SharedCredentials';
import { CredentialsEntity } from '@db/entities/CredentialsEntity';
import * as Db from '@/db';
import { SharedCredentials } from '@/databases/entities/shared-credentials';
import { CredentialsEntity } from '@/databases/entities/credentials-entity';
import { BaseCommand } from '../base-command';
import type { ICredentialsEncrypted } from 'n8n-workflow';
import { ApplicationError, jsonParse } from 'n8n-workflow';
import { UM_FIX_INSTRUCTION } from '@/constants';
import { ProjectRepository } from '@/databases/repositories/project.repository';
import { Project } from '@/databases/entities/Project';
import { User } from '@/databases/entities/User';
import { Project } from '@/databases/entities/project';
import { User } from '@/databases/entities/user';
export class ImportCredentialsCommand extends BaseCommand {
static description = 'Import credentials';

View file

@ -5,14 +5,14 @@ import fs from 'fs';
import glob from 'fast-glob';
import { UM_FIX_INSTRUCTION } from '@/constants';
import type { WorkflowEntity } from '@db/entities/WorkflowEntity';
import { generateNanoId } from '@db/utils/generators';
import { UserRepository } from '@db/repositories/user.repository';
import { WorkflowRepository } from '@db/repositories/workflow.repository';
import type { IWorkflowToImport } from '@/Interfaces';
import type { WorkflowEntity } from '@/databases/entities/workflow-entity';
import { generateNanoId } from '@/databases/utils/generators';
import { UserRepository } from '@/databases/repositories/user.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import type { IWorkflowToImport } from '@/interfaces';
import { ImportService } from '@/services/import.service';
import { BaseCommand } from '../base-command';
import { SharedWorkflowRepository } from '@db/repositories/sharedWorkflow.repository';
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
import { ProjectRepository } from '@/databases/repositories/project.repository';
function assertHasWorkflowsToImport(workflows: unknown): asserts workflows is IWorkflowToImport[] {

View file

@ -1,9 +1,9 @@
import Container from 'typedi';
import { LDAP_DEFAULT_CONFIGURATION, LDAP_FEATURE_NAME } from '@/ldap/constants';
import { AuthIdentityRepository } from '@db/repositories/authIdentity.repository';
import { AuthProviderSyncHistoryRepository } from '@db/repositories/authProviderSyncHistory.repository';
import { SettingsRepository } from '@db/repositories/settings.repository';
import { UserRepository } from '@db/repositories/user.repository';
import { AuthIdentityRepository } from '@/databases/repositories/auth-identity.repository';
import { AuthProviderSyncHistoryRepository } from '@/databases/repositories/auth-provider-sync-history.repository';
import { SettingsRepository } from '@/databases/repositories/settings.repository';
import { UserRepository } from '@/databases/repositories/user.repository';
import { BaseCommand } from '../base-command';
import { Flags } from '@oclif/core';
import { ApplicationError } from 'n8n-workflow';
@ -11,9 +11,9 @@ import { ProjectRepository } from '@/databases/repositories/project.repository';
import { WorkflowService } from '@/workflows/workflow.service';
// eslint-disable-next-line n8n-local-rules/misplaced-n8n-typeorm-import
import { In } from '@n8n/typeorm';
import { SharedWorkflowRepository } from '@/databases/repositories/sharedWorkflow.repository';
import { SharedCredentialsRepository } from '@/databases/repositories/sharedCredentials.repository';
import { ProjectRelationRepository } from '@/databases/repositories/projectRelation.repository';
import { SharedWorkflowRepository } from '@/databases/repositories/shared-workflow.repository';
import { SharedCredentialsRepository } from '@/databases/repositories/shared-credentials.repository';
import { ProjectRelationRepository } from '@/databases/repositories/project-relation.repository';
import { CredentialsService } from '@/credentials/credentials.service';
import { UM_FIX_INSTRUCTION } from '@/constants';

View file

@ -1,7 +1,7 @@
import { Container } from 'typedi';
import { SETTINGS_LICENSE_CERT_KEY } from '@/constants';
import { BaseCommand } from '../base-command';
import { SettingsRepository } from '@db/repositories/settings.repository';
import { SettingsRepository } from '@/databases/repositories/settings.repository';
import { License } from '@/license';
export class ClearLicenseCommand extends BaseCommand {

View file

@ -1,6 +1,6 @@
import Container from 'typedi';
import { Flags } from '@oclif/core';
import { WorkflowRepository } from '@db/repositories/workflow.repository';
import { WorkflowRepository } from '@/databases/repositories/workflow.repository';
import { BaseCommand } from '../base-command';
export class ListWorkflowCommand extends BaseCommand {

View file

@ -1,6 +1,6 @@
import Container from 'typedi';
import { Flags } from '@oclif/core';
import { AuthUserRepository } from '@db/repositories/authUser.repository';
import { AuthUserRepository } from '@/databases/repositories/auth-user.repository';
import { BaseCommand } from '../base-command';
export class DisableMFACommand extends BaseCommand {

Some files were not shown because too many files have changed in this diff Show more