Merge branch 'master' into ai-508-backend-cancel-test-run

# Conflicts:
#	packages/cli/src/databases/repositories/test-run.repository.ee.ts
This commit is contained in:
Eugene Molodkin 2024-12-20 12:58:42 +01:00
commit 437546cfa3
No known key found for this signature in database
1247 changed files with 4397 additions and 3306 deletions

View file

@ -1,3 +1,56 @@
# [1.73.0](https://github.com/n8n-io/n8n/compare/n8n@1.72.0...n8n@1.73.0) (2024-12-19)
### Bug Fixes
* **core:** Ensure runners do not throw on unsupported console methods ([#12167](https://github.com/n8n-io/n8n/issues/12167)) ([57c6a61](https://github.com/n8n-io/n8n/commit/57c6a6167dd2b30f0082a416daefce994ecad33a))
* **core:** Fix `$getWorkflowStaticData` on task runners ([#12153](https://github.com/n8n-io/n8n/issues/12153)) ([b479f14](https://github.com/n8n-io/n8n/commit/b479f14ef5012551b823bea5d2ffbddedfd50a77))
* **core:** Fix binary data helpers (like `prepareBinaryData`) with task runner ([#12259](https://github.com/n8n-io/n8n/issues/12259)) ([0f1461f](https://github.com/n8n-io/n8n/commit/0f1461f2d5d7ec34236ed7fcec3e2f9ee7eb73c4))
* **core:** Fix race condition in AI tool invocation with multiple items from the parent ([#12169](https://github.com/n8n-io/n8n/issues/12169)) ([dce0c58](https://github.com/n8n-io/n8n/commit/dce0c58f8605c33fc50ec8aa422f0fb5eee07637))
* **core:** Fix serialization of circular json with task runner ([#12288](https://github.com/n8n-io/n8n/issues/12288)) ([a99d726](https://github.com/n8n-io/n8n/commit/a99d726f42d027b64f94eda0d385b597c5d5be2e))
* **core:** Upgrade nanoid to address CVE-2024-55565 ([#12171](https://github.com/n8n-io/n8n/issues/12171)) ([8c0bd02](https://github.com/n8n-io/n8n/commit/8c0bd0200c386b122f495c453ccc97a001e4729c))
* **editor:** Add new create first project CTA ([#12189](https://github.com/n8n-io/n8n/issues/12189)) ([878b419](https://github.com/n8n-io/n8n/commit/878b41904d76eda3ee230f850127b4d56993de24))
* **editor:** Fix canvas ready opacity transition on new canvas ([#12264](https://github.com/n8n-io/n8n/issues/12264)) ([5d33a6b](https://github.com/n8n-io/n8n/commit/5d33a6ba8a2bccea097402fd04c0e2b00e423e76))
* **editor:** Fix rendering of code-blocks in sticky notes ([#12227](https://github.com/n8n-io/n8n/issues/12227)) ([9b59035](https://github.com/n8n-io/n8n/commit/9b5903524b95bd21d5915908780942790cf88d27))
* **editor:** Fix sticky color picker getting covered by nodes on new canvas ([#12263](https://github.com/n8n-io/n8n/issues/12263)) ([27bd3c8](https://github.com/n8n-io/n8n/commit/27bd3c85b3a4ddcf763a543b232069bb108130cf))
* **editor:** Improve commit modal user facing messaging ([#12161](https://github.com/n8n-io/n8n/issues/12161)) ([ad39243](https://github.com/n8n-io/n8n/commit/ad392439826b17bd0b84f981e0958d88f09e7fe9))
* **editor:** Prevent connection line from showing when clicking the plus button of a node ([#12265](https://github.com/n8n-io/n8n/issues/12265)) ([9180b46](https://github.com/n8n-io/n8n/commit/9180b46b52302b203eecf3bb81c3f2132527a1e6))
* **editor:** Prevent stickies from being edited in preview mode in the new canvas ([#12222](https://github.com/n8n-io/n8n/issues/12222)) ([6706dcd](https://github.com/n8n-io/n8n/commit/6706dcdf72d54f33c1cf4956602c3a64a1578826))
* **editor:** Reduce cases for Auto-Add of ChatTrigger for AI Agents ([#12154](https://github.com/n8n-io/n8n/issues/12154)) ([365e82d](https://github.com/n8n-io/n8n/commit/365e82d2008dff2f9c91664ee04d7a78363a8b30))
* **editor:** Remove invalid connections after node handles change ([#12247](https://github.com/n8n-io/n8n/issues/12247)) ([6330bec](https://github.com/n8n-io/n8n/commit/6330bec4db0175b558f2747837323fdbb25b634a))
* **editor:** Set dangerouslyUseHTMLString in composable ([#12280](https://github.com/n8n-io/n8n/issues/12280)) ([6ba91b5](https://github.com/n8n-io/n8n/commit/6ba91b5e1ed197c67146347a6f6e663ecdf3de48))
* **editor:** Set RunData outputIndex based on incoming data ([#12182](https://github.com/n8n-io/n8n/issues/12182)) ([dc4261a](https://github.com/n8n-io/n8n/commit/dc4261ae7eca6cf277404cd514c90fad42f14ae0))
* **editor:** Update the universal create button interaction ([#12105](https://github.com/n8n-io/n8n/issues/12105)) ([5300e0a](https://github.com/n8n-io/n8n/commit/5300e0ac45bf832b3d2957198a49a1c687f3fe1f))
* **Elasticsearch Node:** Fix issue stopping search queries being sent ([#11464](https://github.com/n8n-io/n8n/issues/11464)) ([388a83d](https://github.com/n8n-io/n8n/commit/388a83dfbdc6ac301e4df704666df9f09fb7d0b3))
* **Extract from File Node:** Detect file encoding ([#12081](https://github.com/n8n-io/n8n/issues/12081)) ([92af245](https://github.com/n8n-io/n8n/commit/92af245d1aab5bfad8618fda69b2405f5206875d))
* **Github Node:** Fix fetch of file names with ? character ([#12206](https://github.com/n8n-io/n8n/issues/12206)) ([39462ab](https://github.com/n8n-io/n8n/commit/39462abe1fde7e82b5e5b8f3ceebfcadbfd7c925))
* **Invoice Ninja Node:** Fix actions for bank transactions ([#11511](https://github.com/n8n-io/n8n/issues/11511)) ([80eea49](https://github.com/n8n-io/n8n/commit/80eea49cf0bf9db438eb85af7cd22aeb11fbfed2))
* **Linear Node:** Fix issue with error handling ([#12191](https://github.com/n8n-io/n8n/issues/12191)) ([b8eae5f](https://github.com/n8n-io/n8n/commit/b8eae5f28a7d523195f4715cd8da77b3a884ae4c))
* **MongoDB Node:** Fix checks on projection feature call ([#10563](https://github.com/n8n-io/n8n/issues/10563)) ([58bab46](https://github.com/n8n-io/n8n/commit/58bab461c4c5026b2ca5ea143cbcf98bf3a4ced8))
* **Postgres Node:** Allow users to wrap strings with $$ ([#12034](https://github.com/n8n-io/n8n/issues/12034)) ([0c15e30](https://github.com/n8n-io/n8n/commit/0c15e30778cc5cb10ed368df144d6fbb2504ec70))
* **Redis Node:** Add support for username auth ([#12274](https://github.com/n8n-io/n8n/issues/12274)) ([64c0414](https://github.com/n8n-io/n8n/commit/64c0414ef28acf0f7ec42b4b0bb21cbf2921ebe7))
### Features
* Add solarwinds ipam credentials ([#12005](https://github.com/n8n-io/n8n/issues/12005)) ([882484e](https://github.com/n8n-io/n8n/commit/882484e8ee7d1841d5d600414ca48e9915abcfa8))
* Add SolarWinds Observability node credentials ([#11805](https://github.com/n8n-io/n8n/issues/11805)) ([e8a5db5](https://github.com/n8n-io/n8n/commit/e8a5db5beb572edbb61dd9100b70827ccc4cca58))
* **AI Agent Node:** Update descriptions and titles for Chat Trigger options in AI Agents and Memory ([#12155](https://github.com/n8n-io/n8n/issues/12155)) ([07a6ae1](https://github.com/n8n-io/n8n/commit/07a6ae11b3291c1805553d55ba089fe8dd919fd8))
* **API:** Exclude pinned data from workflows ([#12261](https://github.com/n8n-io/n8n/issues/12261)) ([e0dc385](https://github.com/n8n-io/n8n/commit/e0dc385f8bc8ee13fbc5bbf35e07654e52b193e9))
* **editor:** Params pane collection improvements ([#11607](https://github.com/n8n-io/n8n/issues/11607)) ([6e44c71](https://github.com/n8n-io/n8n/commit/6e44c71c9ca82cce20eb55bb9003930bbf66a16c))
* **editor:** Support adding nodes via drag and drop from node creator on new canvas ([#12197](https://github.com/n8n-io/n8n/issues/12197)) ([1bfd9c0](https://github.com/n8n-io/n8n/commit/1bfd9c0e913f3eefc4593f6c344db1ae1f6e4df4))
* **Facebook Graph API Node:** Update node to support API v21.0 ([#12116](https://github.com/n8n-io/n8n/issues/12116)) ([14c33f6](https://github.com/n8n-io/n8n/commit/14c33f666fe92f7173e4f471fb478e629e775c62))
* **Linear Trigger Node:** Add support for admin scope ([#12211](https://github.com/n8n-io/n8n/issues/12211)) ([410ea9a](https://github.com/n8n-io/n8n/commit/410ea9a2ef2e14b5e8e4493e5db66cfc2290d8f6))
* **MailerLite Node:** Update node to support new api ([#11933](https://github.com/n8n-io/n8n/issues/11933)) ([d6b8e65](https://github.com/n8n-io/n8n/commit/d6b8e65abeb411f86538c1630dcce832ee0846a9))
* Send and wait operation - freeText and customForm response types ([#12106](https://github.com/n8n-io/n8n/issues/12106)) ([e98c7f1](https://github.com/n8n-io/n8n/commit/e98c7f160b018243dc88490d46fb1047a4d7fcdc))
### Performance Improvements
* **editor:** SchemaView performance improvement by ≈90% 🚀 ([#12180](https://github.com/n8n-io/n8n/issues/12180)) ([6a58309](https://github.com/n8n-io/n8n/commit/6a5830959f5fb493a4119869b8298d8ed702c84a))
# [1.72.0](https://github.com/n8n-io/n8n/compare/n8n@1.71.0...n8n@1.72.0) (2024-12-11)

View file

@ -19,6 +19,7 @@ Great that you are here and you want to contribute to n8n
- [Actual n8n setup](#actual-n8n-setup)
- [Start](#start)
- [Development cycle](#development-cycle)
- [Community PR Guidelines](#community-pr-guidelines)
- [Test suite](#test-suite)
- [Unit tests](#unit-tests)
- [E2E tests](#e2e-tests)
@ -191,6 +192,51 @@ automatically build your code, restart the backend and refresh the frontend
```
1. Commit code and [create a pull request](https://docs.github.com/en/github/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)
---
### Community PR Guidelines
#### **1. Change Request/Comment**
Please address the requested changes or provide feedback within 14 days. If there is no response or updates to the pull request during this time, it will be automatically closed. The PR can be reopened once the requested changes are applied.
#### **2. General Requirements**
- **Follow the Style Guide:**
- Ensure your code adheres to n8n's coding standards and conventions (e.g., formatting, naming, indentation). Use linting tools where applicable.
- **TypeScript Compliance:**
- Do not use `ts-ignore` .
- Ensure code adheres to TypeScript rules.
- **Avoid Repetitive Code:**
- Reuse existing components, parameters, and logic wherever possible instead of redefining or duplicating them.
- For nodes: Use the same parameter across multiple operations rather than defining a new parameter for each operation (if applicable).
- **Testing Requirements:**
- PRs **must include tests**:
- Unit tests
- Workflow tests for nodes (example [here](https://github.com/n8n-io/n8n/tree/master/packages/nodes-base/nodes/Switch/V3/test))
- UI tests (if applicable)
- **Typos:**
- Use a spell-checking tool, such as [**Code Spell Checker**](https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker), to avoid typos.
#### **3. PR Specific Requirements**
- **Small PRs Only:**
- Focus on a single feature or fix per PR.
- **Naming Convention:**
- Follow [n8n's PR Title Conventions](https://github.com/n8n-io/n8n/blob/master/.github/pull_request_title_conventions.md#L36).
- **New Nodes:**
- PRs that introduce new nodes will be **auto-closed** unless they are explicitly requested by the n8n team and aligned with an agreed project scope. However, you can still explore [building your own nodes](https://docs.n8n.io/integrations/creating-nodes/) , as n8n offers the flexibility to create your own custom nodes.
- **Typo-Only PRs:**
- Typos are not sufficient justification for a PR and will be rejected.
#### **4. Workflow Summary for Non-Compliant PRs**
- **No Tests:** If tests are not provided, the PR will be auto-closed after **14 days**.
- **Non-Small PRs:** Large or multifaceted PRs will be returned for segmentation.
- **New Nodes/Typo PRs:** Automatically rejected if not aligned with project scope or guidelines.
---
### Test suite
#### Unit tests

View file

@ -44,8 +44,7 @@ describe('n8n Form Trigger', () => {
':nth-child(3) > .border-top-dashed > .parameter-input-list-wrapper > :nth-child(1) > .parameter-item',
)
.find('input[placeholder*="e.g. What is your name?"]')
.type('Test Field 3')
.blur();
.type('Test Field 3');
cy.get(
':nth-child(3) > .border-top-dashed > .parameter-input-list-wrapper > :nth-child(2) > .parameter-item',
).click();
@ -56,27 +55,24 @@ describe('n8n Form Trigger', () => {
':nth-child(4) > .border-top-dashed > .parameter-input-list-wrapper > :nth-child(1) > .parameter-item',
)
.find('input[placeholder*="e.g. What is your name?"]')
.type('Test Field 4')
.blur();
.type('Test Field 4');
cy.get(
':nth-child(4) > .border-top-dashed > .parameter-input-list-wrapper > :nth-child(2) > .parameter-item',
).click();
getVisibleSelect().contains('Dropdown').click();
cy.get(
'.border-top-dashed > :nth-child(2) > :nth-child(3) > .multi-parameter > .fixed-collection-parameter > :nth-child(2) > .button',
).click();
cy.get(
':nth-child(4) > :nth-child(1) > :nth-child(2) > :nth-child(3) > .multi-parameter > .fixed-collection-parameter > .fixed-collection-parameter-property > :nth-child(1) > :nth-child(1)',
)
.find('input')
.type('Option 1')
.blur();
cy.get(
':nth-child(4) > :nth-child(1) > :nth-child(2) > :nth-child(3) > .multi-parameter > .fixed-collection-parameter > .fixed-collection-parameter-property > :nth-child(1) > :nth-child(2)',
)
.find('input')
.type('Option 2')
.blur();
cy.contains('button', 'Add Field Option').click();
cy.contains('label', 'Field Options')
.parent()
.nextAll()
.find('[data-test-id="parameter-input-field"]')
.eq(0)
.type('Option 1');
cy.contains('label', 'Field Options')
.parent()
.nextAll()
.find('[data-test-id="parameter-input-field"]')
.eq(1)
.type('Option 2');
//add optional submitted message
cy.get('.param-options').click();
@ -94,7 +90,6 @@ describe('n8n Form Trigger', () => {
.children()
.children()
.first()
.clear()
.type('Your test form was successfully submitted');
ndv.getters.backToCanvas().click();

View file

@ -65,26 +65,6 @@ describe('NDV', () => {
cy.shouldNotHaveConsoleErrors();
});
it('should disconect Switch outputs if rules order was changed', () => {
cy.createFixtureWorkflow('NDV-test-switch_reorder.json', 'NDV test switch reorder');
workflowPage.actions.zoomToFit();
workflowPage.actions.executeWorkflow();
workflowPage.actions.openNode('Merge');
ndv.getters.outputPanel().contains('2 items').should('exist');
cy.contains('span', 'first').should('exist');
ndv.getters.backToCanvas().click();
workflowPage.actions.openNode('Switch');
cy.get('.cm-line').realMouseMove(100, 100);
cy.get('.fa-angle-down').first().click();
ndv.getters.backToCanvas().click();
workflowPage.actions.executeWorkflow();
workflowPage.actions.openNode('Merge');
ndv.getters.outputPanel().contains('2 items').should('exist');
cy.contains('span', 'zero').should('exist');
});
it('should show correct validation state for resource locator params', () => {
workflowPage.actions.addNodeToCanvas('Typeform', true, true);
ndv.getters.container().should('be.visible');

View file

@ -33,7 +33,7 @@ COPY docker/images/n8n/docker-entrypoint.sh /
# Setup the Task Runner Launcher
ARG TARGETPLATFORM
ARG LAUNCHER_VERSION=1.0.0
ARG LAUNCHER_VERSION=1.1.0
COPY docker/images/n8n/n8n-task-runners.json /etc/n8n-task-runners.json
# Download, verify, then extract the launcher binary
RUN \

View file

@ -24,7 +24,7 @@ RUN set -eux; \
# Setup the Task Runner Launcher
ARG TARGETPLATFORM
ARG LAUNCHER_VERSION=1.0.0
ARG LAUNCHER_VERSION=1.1.0
COPY n8n-task-runners.json /etc/n8n-task-runners.json
# Download, verify, then extract the launcher binary
RUN \

View file

@ -1,12 +1,12 @@
{
"name": "n8n-monorepo",
"version": "1.72.0",
"version": "1.73.0",
"private": true,
"engines": {
"node": ">=20.15",
"pnpm": ">=9.5"
"pnpm": ">=9.15"
},
"packageManager": "pnpm@9.6.0",
"packageManager": "pnpm@9.15.1",
"scripts": {
"prepare": "node scripts/prepare.mjs",
"preinstall": "node scripts/block-npm-install.js",

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/api-types",
"version": "0.10.0",
"version": "0.11.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -2,7 +2,7 @@ import type { AxiosError, AxiosRequestConfig } from 'axios';
import axios from 'axios';
export class N8nApiClient {
constructor(public readonly apiBaseUrl: string) {}
constructor(readonly apiBaseUrl: string) {}
async waitForInstanceToBecomeOnline(): Promise<void> {
const HEALTH_ENDPOINT = 'healthz';

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/config",
"version": "1.22.0",
"version": "1.23.0",
"scripts": {
"clean": "rimraf dist .turbo",
"dev": "pnpm watch",

View file

@ -6,7 +6,7 @@ class PruningIntervalsConfig {
@Env('EXECUTIONS_DATA_PRUNE_HARD_DELETE_INTERVAL')
hardDelete: number = 15;
/** How often (minutes) execution data should be soft-deleted */
/** How often (minutes) execution data should be soft-deleted. */
@Env('EXECUTIONS_DATA_PRUNE_SOFT_DELETE_INTERVAL')
softDelete: number = 60;
}

View file

@ -15,7 +15,6 @@ module.exports = {
eqeqeq: 'warn',
'id-denylist': 'warn',
'import/extensions': 'warn',
'import/order': 'warn',
'prefer-spread': 'warn',
'@typescript-eslint/naming-convention': ['error', { selector: 'memberLike', format: null }],

View file

@ -254,6 +254,7 @@ export class ChainLlm implements INodeType {
displayName: 'Basic LLM Chain',
name: 'chainLlm',
icon: 'fa:link',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1, 1.2, 1.3, 1.4, 1.5],
description: 'A simple chain to prompt a large language model',

View file

@ -31,6 +31,7 @@ export class ChainRetrievalQa implements INodeType {
displayName: 'Question and Answer Chain',
name: 'chainRetrievalQa',
icon: 'fa:link',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1, 1.2, 1.3, 1.4],
description: 'Answer questions about retrieved documents',

View file

@ -10,6 +10,7 @@ export class ChainSummarization extends VersionedNodeType {
displayName: 'Summarization Chain',
name: 'chainSummarization',
icon: 'fa:link',
iconColor: 'black',
group: ['transform'],
description: 'Transforms text into a concise summary',
codex: {

View file

@ -10,6 +10,7 @@ import {
import { logWrapper } from '@utils/logWrapper';
import { N8nBinaryLoader } from '@utils/N8nBinaryLoader';
import { N8nJsonLoader } from '@utils/N8nJsonLoader';
import { metadataFilterField } from '@utils/sharedFields';
// Dependencies needed underneath the hood for the loaders. We add them
@ -18,7 +19,6 @@ import { metadataFilterField } from '@utils/sharedFields';
import 'mammoth'; // for docx
import 'epub2'; // for epub
import 'pdf-parse'; // for pdf
import { N8nJsonLoader } from '@utils/N8nJsonLoader';
export class DocumentDefaultDataLoader implements INodeType {
description: INodeTypeDescription = {

View file

@ -32,14 +32,14 @@ class MemoryChatBufferSingleton {
this.memoryBuffer = new Map();
}
public static getInstance(): MemoryChatBufferSingleton {
static getInstance(): MemoryChatBufferSingleton {
if (!MemoryChatBufferSingleton.instance) {
MemoryChatBufferSingleton.instance = new MemoryChatBufferSingleton();
}
return MemoryChatBufferSingleton.instance;
}
public async getMemory(
async getMemory(
sessionKey: string,
memoryParams: BufferWindowMemoryInput,
): Promise<BufferWindowMemory> {
@ -78,6 +78,7 @@ export class MemoryBufferWindow implements INodeType {
displayName: 'Window Buffer Memory (easiest)',
name: 'memoryBufferWindow',
icon: 'fa:database',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1, 1.2, 1.3],
description: 'Stores in n8n memory, so no credentials required',

View file

@ -38,6 +38,7 @@ export class MemoryChatRetriever implements INodeType {
displayName: 'Chat Messages Retriever',
name: 'memoryChatRetriever',
icon: 'fa:database',
iconColor: 'black',
group: ['transform'],
hidden: true,
version: 1,

View file

@ -19,6 +19,7 @@ export class MemoryMotorhead implements INodeType {
displayName: 'Motorhead',
name: 'memoryMotorhead',
icon: 'fa:file-export',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1, 1.2, 1.3],
description: 'Use Motorhead Memory',

View file

@ -21,6 +21,7 @@ export class OutputParserAutofixing implements INodeType {
displayName: 'Auto-fixing Output Parser',
name: 'outputParserAutofixing',
icon: 'fa:tools',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Automatically fix the output if it is not in the correct format',

View file

@ -15,6 +15,7 @@ export class OutputParserItemList implements INodeType {
displayName: 'Item List Output Parser',
name: 'outputParserItemList',
icon: 'fa:bars',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Return the results as separate items',

View file

@ -20,6 +20,7 @@ export class OutputParserStructured implements INodeType {
displayName: 'Structured Output Parser',
name: 'outputParserStructured',
icon: 'fa:code',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1, 1.2],
defaultVersion: 1.2,

View file

@ -19,6 +19,7 @@ export class RetrieverContextualCompression implements INodeType {
displayName: 'Contextual Compression Retriever',
name: 'retrieverContextualCompression',
icon: 'fa:box-open',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Enhances document similarity search by contextual compression.',

View file

@ -18,6 +18,7 @@ export class RetrieverMultiQuery implements INodeType {
displayName: 'MultiQuery Retriever',
name: 'retrieverMultiQuery',
icon: 'fa:box-open',
iconColor: 'black',
group: ['transform'],
version: 1,
description:

View file

@ -15,6 +15,7 @@ export class RetrieverVectorStore implements INodeType {
displayName: 'Vector Store Retriever',
name: 'retrieverVectorStore',
icon: 'fa:box-open',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Use a Vector Store as Retriever',

View file

@ -41,6 +41,7 @@ export class RetrieverWorkflow implements INodeType {
displayName: 'Workflow Retriever',
name: 'retrieverWorkflow',
icon: 'fa:box-open',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1],
description: 'Use an n8n Workflow as Retriever',

View file

@ -17,6 +17,7 @@ export class TextSplitterCharacterTextSplitter implements INodeType {
displayName: 'Character Text Splitter',
name: 'textSplitterCharacterTextSplitter',
icon: 'fa:grip-lines-vertical',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Split text into chunks by characters',

View file

@ -37,6 +37,7 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
displayName: 'Recursive Character Text Splitter',
name: 'textSplitterRecursiveCharacterTextSplitter',
icon: 'fa:grip-lines-vertical',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Split text into chunks by characters recursively, recommended for most use cases',

View file

@ -16,6 +16,7 @@ export class TextSplitterTokenSplitter implements INodeType {
displayName: 'Token Splitter',
name: 'textSplitterTokenSplitter',
icon: 'fa:grip-lines-vertical',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Split text into chunks by tokens',

View file

@ -16,6 +16,7 @@ export class ToolCalculator implements INodeType {
displayName: 'Calculator',
name: 'toolCalculator',
icon: 'fa:calculator',
iconColor: 'black',
group: ['transform'],
version: 1,
description: 'Make it easier for AI agents to perform arithmetic',

View file

@ -26,6 +26,7 @@ export class ToolCode implements INodeType {
displayName: 'Code Tool',
name: 'toolCode',
icon: 'fa:code',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1],
description: 'Write a tool in JS or Python',

View file

@ -18,6 +18,7 @@ export class ToolVectorStore implements INodeType {
displayName: 'Vector Store Tool',
name: 'toolVectorStore',
icon: 'fa:database',
iconColor: 'black',
group: ['transform'],
version: [1],
description: 'Retrieve context from vector store',

View file

@ -32,6 +32,7 @@ export class ToolWorkflow implements INodeType {
displayName: 'Call n8n Workflow Tool',
name: 'toolWorkflow',
icon: 'fa:network-wired',
iconColor: 'black',
group: ['transform'],
version: [1, 1.1, 1.2, 1.3],
description: 'Uses another n8n workflow as a tool. Allows packaging any n8n node(s) as a tool.',

View file

@ -26,6 +26,7 @@ export class VectorStoreInMemory extends createVectorStoreNode({
name: 'vectorStoreInMemory',
description: 'Work with your data in In-Memory Vector Store',
icon: 'fa:database',
iconColor: 'black',
docsUrl:
'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/',
},

View file

@ -56,7 +56,7 @@ export class VectorStorePinecone extends createVectorStoreNode({
displayName: 'Pinecone Vector Store',
name: 'vectorStorePinecone',
description: 'Work with your data in Pinecone Vector Store',
icon: 'file:pinecone.svg',
icon: { light: 'file:pinecone.svg', dark: 'file:pinecone.dark.svg' },
docsUrl:
'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstorepinecone/',
credentials: [

View file

@ -0,0 +1,21 @@
<svg width="32" height="35" viewBox="0 0 32 35" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.8555 34.2962C14.9325 34.2962 15.8055 33.4451 15.8055 32.3954C15.8055 31.3456 14.9325 30.4946 13.8555 30.4946C12.7786 30.4946 11.9055 31.3456 11.9055 32.3954C11.9055 33.4451 12.7786 34.2962 13.8555 34.2962Z" fill="white"/>
<path d="M18.4138 7.19675L19.2512 2.66005" stroke="white" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M22.2656 5.5855L19.3466 2.11099L15.3748 4.37292" stroke="white" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M14.9202 26.5528L15.7337 22.0169" stroke="white" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M18.7729 24.9304L15.83 21.4671L11.8701 23.741" stroke="white" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M16.6077 17.1996L17.4212 12.6633" stroke="white" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M20.4587 15.58L17.5277 12.128L13.5679 14.3904" stroke="white" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M8.32871 26.1554L4.75171 28.5815" stroke="white" stroke-width="2.01017" stroke-linecap="square"/>
<path d="M8.54383 30.0865L4.3208 28.8738L4.63185 24.5944" stroke="white" stroke-width="2.01017" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M21.3213 28.4299L23.8096 31.9282" stroke="white" stroke-width="2.01017" stroke-linecap="square"/>
<path d="M19.718 32.045L24.1085 32.3365L25.3527 28.2438" stroke="white" stroke-width="2.01017" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M25.3999 21.3291L29.7784 22.0996" stroke="white" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M26.9072 25.072L30.3048 22.1919L28.1634 18.3557" stroke="white" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M24.1196 12.8615L28.0197 10.763" stroke="white" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M24.3357 8.83965L28.4869 10.5188L27.7093 14.8216" stroke="white" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M6.91639 18.1572L2.52588 17.4101" stroke="white" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M4.17731 21.1645L2 17.328L5.36167 14.436" stroke="white" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M11.0799 10.6129L8.14893 7.34769" stroke="white" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M12.2897 6.77496L7.80349 6.96156L7.01392 11.2649" stroke="white" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 2.5 KiB

View file

@ -1 +1,21 @@
<svg xmlns="http://www.w3.org/2000/svg" class="w-full -translate-y-0.5" viewBox="1 1 30 29"><g fill="none" fill-rule="evenodd" transform="translate(0 1)"><path stroke="currentColor" stroke-linecap="square" stroke-width="1.77" d="m14.58 5.24.7-3.89"/><path stroke="#7D7D87" stroke-linecap="square" stroke-linejoin="round" stroke-width="1.77" d="M17.8 3.86 15.36.88l-3.32 1.94"/><path stroke="currentColor" stroke-linecap="square" stroke-width="1.77" d="m11.66 21.84.68-3.89"/><path stroke="currentColor" stroke-linecap="square" stroke-linejoin="round" stroke-width="1.77" d="m14.88 20.45-2.46-2.97-3.31 1.95"/><path stroke="currentColor" stroke-linecap="square" stroke-width="1.77" d="m13.07 13.82.68-3.89"/><path stroke="currentColor" stroke-linecap="square" stroke-linejoin="round" stroke-width="1.77" d="m16.29 12.43-2.45-2.96-3.31 1.94"/><circle cx="10.77" cy="26.85" r="1.63" fill="currentColor" fill-rule="nonzero"/><g stroke="currentColor" stroke-linecap="square"><path stroke-width="1.68" d="m6.15 21.5-2.99 2.08"/><path stroke-linejoin="round" stroke-width="1.68" d="M6.33 24.87 2.8 23.83l.26-3.67"/><path stroke-width="1.68" d="m17.01 23.45 2.08 3"/><path stroke-linejoin="round" stroke-width="1.68" d="m15.67 26.55 3.67.25 1.04-3.51"/><path stroke-width="1.72" d="m20.42 17.36 3.66.66"/><path stroke-linejoin="round" stroke-width="1.72" d="m21.68 20.57 2.84-2.47-1.79-3.29"/><path stroke-width="1.72" d="m19.35 10.1 3.26-1.8"/><path stroke-linejoin="round" stroke-width="1.72" d="M19.53 6.65 23 8.09l-.65 3.69"/><path stroke-width="1.72" d="M4.97 14.64 1.3 14"/><path stroke-linejoin="round" stroke-width="1.72" d="M2.68 17.22.86 13.93l2.81-2.48"/><path stroke-width="1.72" d="M8.45 8.17 6 5.37"/><path stroke-linejoin="round" stroke-width="1.72" d="m9.46 4.88-3.75.16-.66 3.69"/></g></g></svg>
<svg width="32" height="35" viewBox="0 0 32 35" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.8555 34.2962C14.9325 34.2962 15.8055 33.4451 15.8055 32.3954C15.8055 31.3456 14.9325 30.4946 13.8555 30.4946C12.7786 30.4946 11.9055 31.3456 11.9055 32.3954C11.9055 33.4451 12.7786 34.2962 13.8555 34.2962Z" fill="black"/>
<path d="M18.4138 7.19675L19.2512 2.66005" stroke="black" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M22.2656 5.5855L19.3466 2.11099L15.3748 4.37292" stroke="black" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M14.9202 26.5528L15.7337 22.0169" stroke="black" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M18.7729 24.9304L15.83 21.4671L11.8701 23.741" stroke="black" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M16.6077 17.1996L17.4212 12.6633" stroke="black" stroke-width="2.11786" stroke-linecap="square"/>
<path d="M20.4587 15.58L17.5277 12.128L13.5679 14.3904" stroke="black" stroke-width="2.11786" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M8.32871 26.1554L4.75171 28.5815" stroke="black" stroke-width="2.01017" stroke-linecap="square"/>
<path d="M8.54383 30.0865L4.3208 28.8738L4.63185 24.5944" stroke="black" stroke-width="2.01017" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M21.3213 28.4299L23.8096 31.9282" stroke="black" stroke-width="2.01017" stroke-linecap="square"/>
<path d="M19.718 32.045L24.1085 32.3365L25.3527 28.2438" stroke="black" stroke-width="2.01017" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M25.3999 21.3291L29.7784 22.0996" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M26.9072 25.072L30.3048 22.1919L28.1634 18.3557" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M24.1196 12.8615L28.0197 10.763" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M24.3357 8.83965L28.4869 10.5188L27.7093 14.8216" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M6.91639 18.1572L2.52588 17.4101" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M4.17731 21.1645L2 17.328L5.36167 14.436" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
<path d="M11.0799 10.6129L8.14893 7.34769" stroke="black" stroke-width="2.05804" stroke-linecap="square"/>
<path d="M12.2897 6.77496L7.80349 6.96156L7.01392 11.2649" stroke="black" stroke-width="2.05804" stroke-linecap="square" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 2.5 KiB

View file

@ -11,7 +11,7 @@ export class MemoryVectorStoreManager {
this.vectorStoreBuffer = new Map();
}
public static getInstance(embeddings: Embeddings): MemoryVectorStoreManager {
static getInstance(embeddings: Embeddings): MemoryVectorStoreManager {
if (!MemoryVectorStoreManager.instance) {
MemoryVectorStoreManager.instance = new MemoryVectorStoreManager(embeddings);
} else {
@ -27,7 +27,7 @@ export class MemoryVectorStoreManager {
return MemoryVectorStoreManager.instance;
}
public async getVectorStore(memoryKey: string): Promise<MemoryVectorStore> {
async getVectorStore(memoryKey: string): Promise<MemoryVectorStore> {
let vectorStoreInstance = this.vectorStoreBuffer.get(memoryKey);
if (!vectorStoreInstance) {
@ -38,7 +38,7 @@ export class MemoryVectorStoreManager {
return vectorStoreInstance;
}
public async addDocuments(
async addDocuments(
memoryKey: string,
documents: Document[],
clearStore?: boolean,

View file

@ -17,6 +17,7 @@ import type {
INodeListSearchResult,
Icon,
INodePropertyOptions,
ThemeIconColor,
} from 'n8n-workflow';
import { getMetadataFiltersValues, logAiEvent } from '@utils/helpers';
@ -37,6 +38,7 @@ interface NodeMeta {
description: string;
docsUrl: string;
icon: Icon;
iconColor?: ThemeIconColor;
credentials?: INodeCredentialDescription[];
operationModes?: NodeOperationMode[];
}
@ -125,6 +127,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
name: args.meta.name,
description: args.meta.description,
icon: args.meta.icon,
iconColor: args.meta.iconColor,
group: ['transform'],
version: 1,
defaults: {

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/n8n-nodes-langchain",
"version": "1.72.0",
"version": "1.73.0",
"description": "",
"main": "index.js",
"scripts": {
@ -10,8 +10,8 @@
"build": "tsc -p tsconfig.build.json && tsc-alias -p tsconfig.build.json && pnpm n8n-copy-icons && pnpm n8n-generate-metadata",
"format": "biome format --write .",
"format:check": "biome ci .",
"lint": "eslint nodes credentials --quiet",
"lintfix": "eslint nodes credentials --fix",
"lint": "eslint nodes credentials utils --quiet",
"lintfix": "eslint nodes credentials utils --fix",
"watch": "tsc-watch -p tsconfig.build.json --onCompilationComplete \"tsc-alias -p tsconfig.build.json\" --onSuccess \"pnpm n8n-generate-metadata\"",
"test": "jest",
"test:dev": "jest --watch"

View file

@ -1,5 +1,12 @@
import { pipeline } from 'stream/promises';
import { CSVLoader } from '@langchain/community/document_loaders/fs/csv';
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
import { EPubLoader } from '@langchain/community/document_loaders/fs/epub';
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from '@langchain/textsplitters';
import { createWriteStream } from 'fs';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import type {
IBinaryData,
IExecuteFunctions,
@ -7,15 +14,7 @@ import type {
ISupplyDataFunctions,
} from 'n8n-workflow';
import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow';
import type { TextSplitter } from '@langchain/textsplitters';
import type { Document } from '@langchain/core/documents';
import { CSVLoader } from '@langchain/community/document_loaders/fs/csv';
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { EPubLoader } from '@langchain/community/document_loaders/fs/epub';
import { pipeline } from 'stream/promises';
import { file as tmpFile, type DirectoryResult } from 'tmp-promise';
import { getMetadataFiltersValues } from './helpers';

View file

@ -1,3 +1,7 @@
import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from '@langchain/textsplitters';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import {
type IExecuteFunctions,
type INodeExecutionData,
@ -5,10 +9,6 @@ import {
NodeOperationError,
} from 'n8n-workflow';
import type { TextSplitter } from '@langchain/textsplitters';
import type { Document } from '@langchain/core/documents';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { getMetadataFiltersValues } from './helpers';
export class N8nJsonLoader {

View file

@ -1,8 +1,9 @@
import { N8nTool } from './N8nTool';
import { createMockExecuteFunction } from 'n8n-nodes-base/test/nodes/Helpers';
import { z } from 'zod';
import type { INode } from 'n8n-workflow';
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import { createMockExecuteFunction } from 'n8n-nodes-base/test/nodes/Helpers';
import type { INode } from 'n8n-workflow';
import { z } from 'zod';
import { N8nTool } from './N8nTool';
const mockNode: INode = {
id: '1',

View file

@ -1,8 +1,8 @@
import type { DynamicStructuredToolInput } from '@langchain/core/tools';
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import { StructuredOutputParser } from 'langchain/output_parsers';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow';
import { StructuredOutputParser } from 'langchain/output_parsers';
import type { ZodTypeAny } from 'zod';
import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod';

View file

@ -1,6 +1,6 @@
{
"name": "@n8n/task-runner",
"version": "1.10.0",
"version": "1.11.0",
"scripts": {
"clean": "rimraf dist .turbo",
"start": "node dist/start.js",
@ -38,6 +38,7 @@
"@sentry/node": "catalog:",
"acorn": "8.14.0",
"acorn-walk": "8.3.4",
"lodash": "catalog:",
"n8n-core": "workspace:*",
"n8n-workflow": "workspace:*",
"nanoid": "catalog:",
@ -45,6 +46,7 @@
"ws": "^8.18.0"
},
"devDependencies": {
"@types/lodash": "catalog:",
"luxon": "catalog:"
}
}

View file

@ -1,5 +1,6 @@
import { mock } from 'jest-mock-extended';
import { DateTime } from 'luxon';
import type { IBinaryData } from 'n8n-workflow';
import { setGlobalState, type CodeExecutionMode, type IDataObject } from 'n8n-workflow';
import fs from 'node:fs';
import { builtinModules } from 'node:module';
@ -8,10 +9,15 @@ import type { BaseRunnerConfig } from '@/config/base-runner-config';
import type { JsRunnerConfig } from '@/config/js-runner-config';
import { MainConfig } from '@/config/main-config';
import { ExecutionError } from '@/js-task-runner/errors/execution-error';
import { UnsupportedFunctionError } from '@/js-task-runner/errors/unsupported-function.error';
import { ValidationError } from '@/js-task-runner/errors/validation-error';
import type { JSExecSettings } from '@/js-task-runner/js-task-runner';
import { JsTaskRunner } from '@/js-task-runner/js-task-runner';
import type { DataRequestResponse, InputDataChunkDefinition } from '@/runner-types';
import {
UNSUPPORTED_HELPER_FUNCTIONS,
type DataRequestResponse,
type InputDataChunkDefinition,
} from '@/runner-types';
import type { Task } from '@/task-runner';
import {
@ -567,6 +573,120 @@ describe('JsTaskRunner', () => {
);
});
describe('helpers', () => {
const binaryDataFile: IBinaryData = {
data: 'data',
fileName: 'file.txt',
mimeType: 'text/plain',
};
const groups = [
{
method: 'helpers.assertBinaryData',
invocation: "helpers.assertBinaryData(0, 'binaryFile')",
expectedParams: [0, 'binaryFile'],
},
{
method: 'helpers.getBinaryDataBuffer',
invocation: "helpers.getBinaryDataBuffer(0, 'binaryFile')",
expectedParams: [0, 'binaryFile'],
},
{
method: 'helpers.prepareBinaryData',
invocation: "helpers.prepareBinaryData(Buffer.from('123'), 'file.txt', 'text/plain')",
expectedParams: [Buffer.from('123'), 'file.txt', 'text/plain'],
},
{
method: 'helpers.setBinaryDataBuffer',
invocation:
"helpers.setBinaryDataBuffer({ data: '123', mimeType: 'text/plain' }, Buffer.from('321'))",
expectedParams: [{ data: '123', mimeType: 'text/plain' }, Buffer.from('321')],
},
{
method: 'helpers.binaryToString',
invocation: "helpers.binaryToString(Buffer.from('123'), 'utf8')",
expectedParams: [Buffer.from('123'), 'utf8'],
},
{
method: 'helpers.httpRequest',
invocation: "helpers.httpRequest({ method: 'GET', url: 'http://localhost' })",
expectedParams: [{ method: 'GET', url: 'http://localhost' }],
},
];
for (const group of groups) {
it(`${group.method} for runOnceForAllItems`, async () => {
// Arrange
const rpcCallSpy = jest
.spyOn(defaultTaskRunner, 'makeRpcCall')
.mockResolvedValue(undefined);
// Act
await execTaskWithParams({
task: newTaskWithSettings({
code: `await ${group.invocation}; return []`,
nodeMode: 'runOnceForAllItems',
}),
taskData: newDataRequestResponse(
[{ json: {}, binary: { binaryFile: binaryDataFile } }],
{},
),
});
expect(rpcCallSpy).toHaveBeenCalledWith('1', group.method, group.expectedParams);
});
it(`${group.method} for runOnceForEachItem`, async () => {
// Arrange
const rpcCallSpy = jest
.spyOn(defaultTaskRunner, 'makeRpcCall')
.mockResolvedValue(undefined);
// Act
await execTaskWithParams({
task: newTaskWithSettings({
code: `await ${group.invocation}; return {}`,
nodeMode: 'runOnceForEachItem',
}),
taskData: newDataRequestResponse(
[{ json: {}, binary: { binaryFile: binaryDataFile } }],
{},
),
});
expect(rpcCallSpy).toHaveBeenCalledWith('1', group.method, group.expectedParams);
});
}
describe('unsupported methods', () => {
for (const unsupportedFunction of UNSUPPORTED_HELPER_FUNCTIONS) {
it(`should throw an error if ${unsupportedFunction} is used in runOnceForAllItems`, async () => {
// Act
await expect(
async () =>
await executeForAllItems({
code: `${unsupportedFunction}()`,
inputItems,
}),
).rejects.toThrow(UnsupportedFunctionError);
});
it(`should throw an error if ${unsupportedFunction} is used in runOnceForEachItem`, async () => {
// Act
await expect(
async () =>
await executeForEachItem({
code: `${unsupportedFunction}()`,
inputItems,
}),
).rejects.toThrow(UnsupportedFunctionError);
});
}
});
});
it('should allow access to Node.js Buffers', async () => {
const outcomeAll = await execTaskWithParams({
task: newTaskWithSettings({

View file

@ -21,7 +21,7 @@ export class BuiltInsParser {
/**
* Parses which built-in variables are accessed in the given code
*/
public parseUsedBuiltIns(code: string): Result<BuiltInsParserState, Error> {
parseUsedBuiltIns(code: string): Result<BuiltInsParserState, Error> {
return toResult(() => {
const wrappedCode = `async function VmCodeWrapper() { ${code} }`;
const ast = parse(wrappedCode, { ecmaVersion: 2025, sourceType: 'module' });

View file

@ -0,0 +1,13 @@
import { ApplicationError } from 'n8n-workflow';
/**
* Error that indicates that a specific function is not available in the
* Code Node.
*/
export class UnsupportedFunctionError extends ApplicationError {
constructor(functionName: string) {
super(`The function "${functionName}" is not supported in the Code Node`, {
level: 'info',
});
}
}

View file

@ -1,3 +1,4 @@
import set from 'lodash/set';
import { getAdditionalKeys } from 'n8n-core';
import { WorkflowDataProxy, Workflow, ObservableObject } from 'n8n-workflow';
import type {
@ -19,11 +20,14 @@ import * as a from 'node:assert';
import { runInNewContext, type Context } from 'node:vm';
import type { MainConfig } from '@/config/main-config';
import type {
DataRequestResponse,
InputDataChunkDefinition,
PartialAdditionalData,
TaskResultData,
import { UnsupportedFunctionError } from '@/js-task-runner/errors/unsupported-function.error';
import {
EXPOSED_RPC_METHODS,
UNSUPPORTED_HELPER_FUNCTIONS,
type DataRequestResponse,
type InputDataChunkDefinition,
type PartialAdditionalData,
type TaskResultData,
} from '@/runner-types';
import { type Task, TaskRunner } from '@/task-runner';
@ -38,6 +42,10 @@ import { createRequireResolver } from './require-resolver';
import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation';
import { DataRequestResponseReconstruct } from '../data-request/data-request-response-reconstruct';
export interface RPCCallObject {
[name: string]: ((...args: unknown[]) => Promise<unknown>) | RPCCallObject;
}
export interface JSExecSettings {
code: string;
nodeMode: CodeExecutionMode;
@ -439,4 +447,24 @@ export class JsTaskRunner extends TaskRunner {
this.nodeTypes.addNodeTypeDescriptions(nodeTypes);
}
}
private buildRpcCallObject(taskId: string) {
const rpcObject: RPCCallObject = {};
for (const rpcMethod of EXPOSED_RPC_METHODS) {
set(
rpcObject,
rpcMethod.split('.'),
async (...args: unknown[]) => await this.makeRpcCall(taskId, rpcMethod, args),
);
}
for (const rpcMethod of UNSUPPORTED_HELPER_FUNCTIONS) {
set(rpcObject, rpcMethod.split('.'), () => {
throw new UnsupportedFunctionError(rpcMethod);
});
}
return rpcObject;
}
}

View file

@ -2,7 +2,7 @@ import type { INodeTypeBaseDescription } from 'n8n-workflow';
import type {
NeededNodeType,
RPC_ALLOW_LIST,
AVAILABLE_RPC_METHODS,
TaskDataRequestParams,
TaskResultData,
} from './runner-types';
@ -105,7 +105,7 @@ export namespace BrokerMessage {
type: 'broker:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
name: (typeof AVAILABLE_RPC_METHODS)[number];
params: unknown[];
}
@ -239,7 +239,7 @@ export namespace RunnerMessage {
type: 'runner:rpc';
callId: string;
taskId: string;
name: (typeof RPC_ALLOW_LIST)[number];
name: (typeof AVAILABLE_RPC_METHODS)[number];
params: unknown[];
}

View file

@ -100,31 +100,73 @@ export interface PartialAdditionalData {
variables: IDataObject;
}
export const RPC_ALLOW_LIST = [
/** RPC methods that are exposed directly to the Code Node */
export const EXPOSED_RPC_METHODS = [
// assertBinaryData(itemIndex: number, propertyName: string): Promise<IBinaryData>
'helpers.assertBinaryData',
// getBinaryDataBuffer(itemIndex: number, propertyName: string): Promise<Buffer>
'helpers.getBinaryDataBuffer',
// prepareBinaryData(binaryData: Buffer, fileName?: string, mimeType?: string): Promise<IBinaryData>
'helpers.prepareBinaryData',
// setBinaryDataBuffer(metadata: IBinaryData, buffer: Buffer): Promise<IBinaryData>
'helpers.setBinaryDataBuffer',
// binaryToString(body: Buffer, encoding?: string): string
'helpers.binaryToString',
// httpRequest(opts: IHttpRequestOptions): Promise<IN8nHttpFullResponse | IN8nHttpResponse>
'helpers.httpRequest',
];
/** Helpers that exist but that we are not exposing to the Code Node */
export const UNSUPPORTED_HELPER_FUNCTIONS = [
// These rely on checking the credentials from the current node type (Code Node)
// and hence they can't even work (Code Node doesn't have credentials)
'helpers.httpRequestWithAuthentication',
'helpers.requestWithAuthenticationPaginated',
// "helpers.normalizeItems"
// "helpers.constructExecutionMetaData"
// "helpers.assertBinaryData"
'helpers.getBinaryDataBuffer',
// "helpers.copyInputItems"
// "helpers.returnJsonArray"
'helpers.getSSHClient',
'helpers.createReadStream',
// "helpers.getStoragePath"
'helpers.writeContentToFile',
'helpers.prepareBinaryData',
'helpers.setBinaryDataBuffer',
// This has been removed
'helpers.copyBinaryFile',
'helpers.binaryToBuffer',
// "helpers.binaryToString"
// "helpers.getBinaryPath"
// We can't support streams over RPC without implementing it ourselves
'helpers.createReadStream',
'helpers.getBinaryStream',
// Makes no sense to support this, as it returns either a stream or a buffer
// and we can't support streams over RPC
'helpers.binaryToBuffer',
// These are pretty low-level, so we shouldn't expose them
// (require binary data id, which we don't expose)
'helpers.getBinaryMetadata',
'helpers.getStoragePath',
'helpers.getBinaryPath',
// We shouldn't allow arbitrary FS writes
'helpers.writeContentToFile',
// Not something we need to expose. Can be done in the node itself
// copyInputItems(items: INodeExecutionData[], properties: string[]): IDataObject[]
'helpers.copyInputItems',
// Code Node does these automatically already
'helpers.returnJsonArray',
'helpers.normalizeItems',
// The client is instantiated and lives on the n8n instance, so we can't
// expose it over RPC without implementing object marshalling
'helpers.getSSHClient',
// Doesn't make sense to expose
'helpers.createDeferredPromise',
'helpers.httpRequest',
'logNodeOutput',
] as const;
'helpers.constructExecutionMetaData',
];
/** List of all RPC methods that task runner supports */
export const AVAILABLE_RPC_METHODS = [...EXPOSED_RPC_METHODS, 'logNodeOutput'] as const;
/** Node types needed for the runner to execute a task. */
export type NeededNodeType = { name: string; version: number };

View file

@ -1,3 +1,4 @@
import { isSerializedBuffer, toBuffer } from 'n8n-core';
import { ApplicationError, ensureError, randomInt } from 'n8n-workflow';
import { nanoid } from 'nanoid';
import { EventEmitter } from 'node:events';
@ -6,7 +7,7 @@ import { type MessageEvent, WebSocket } from 'ws';
import type { BaseRunnerConfig } from '@/config/base-runner-config';
import type { BrokerMessage, RunnerMessage } from '@/message-types';
import { TaskRunnerNodeTypes } from '@/node-types';
import { RPC_ALLOW_LIST, type TaskResultData } from '@/runner-types';
import type { TaskResultData } from '@/runner-types';
import { TaskCancelledError } from './js-task-runner/errors/task-cancelled-error';
@ -42,10 +43,6 @@ interface RPCCall {
reject: (error: unknown) => void;
}
export interface RPCCallObject {
[name: string]: ((...args: unknown[]) => Promise<unknown>) | RPCCallObject;
}
const OFFER_VALID_TIME_MS = 5000;
const OFFER_VALID_EXTRA_MS = 100;
@ -464,7 +461,9 @@ export abstract class TaskRunner extends EventEmitter {
});
try {
return await dataPromise;
const returnValue = await dataPromise;
return isSerializedBuffer(returnValue) ? toBuffer(returnValue) : returnValue;
} finally {
this.rpcCalls.delete(callId);
}
@ -486,24 +485,6 @@ export abstract class TaskRunner extends EventEmitter {
}
}
buildRpcCallObject(taskId: string) {
const rpcObject: RPCCallObject = {};
for (const r of RPC_ALLOW_LIST) {
const splitPath = r.split('.');
let obj = rpcObject;
splitPath.forEach((s, index) => {
if (index !== splitPath.length - 1) {
obj[s] = {};
obj = obj[s];
return;
}
obj[s] = async (...args: unknown[]) => await this.makeRpcCall(taskId, r, args);
});
}
return rpcObject;
}
/** Close the connection gracefully and wait until has been closed */
async stop() {
this.clearIdleTimer();

View file

@ -316,6 +316,11 @@ const config = (module.exports = {
*/
'@typescript-eslint/return-await': ['error', 'always'],
/**
* https://typescript-eslint.io/rules/explicit-member-accessibility/
*/
'@typescript-eslint/explicit-member-accessibility': ['error', { accessibility: 'no-public' }],
// ----------------------------------
// eslint-plugin-import
// ----------------------------------

View file

@ -1,6 +1,6 @@
{
"name": "n8n",
"version": "1.72.0",
"version": "1.73.0",
"description": "n8n Workflow Automation Tool",
"main": "dist/index",
"types": "dist/index.d.ts",

View file

@ -27,7 +27,7 @@ export class CollaborationState {
* After how many minutes of inactivity a user should be removed
* as being an active user of a workflow.
*/
public readonly inactivityCleanUpTime = 15 * Time.minutes.toMilliseconds;
readonly inactivityCleanUpTime = 15 * Time.minutes.toMilliseconds;
constructor(private readonly cache: CacheService) {}

View file

@ -29,6 +29,14 @@ export class CredentialsEntity extends WithTimestampsAndStringId implements ICre
@OneToMany('SharedCredentials', 'credentials')
shared: SharedCredentials[];
/**
* Whether the credential is managed by n8n. We currently use this flag
* to provide OpenAI free credits on cloud. Managed credentials cannot be
* edited by the user.
*/
@Column({ default: false })
isManaged: boolean;
toJSON() {
const { shared, ...rest } = this;
return rest;

View file

@ -80,7 +80,7 @@ export class WorkflowEntity extends WithTimestampsAndStringId implements IWorkfl
nullable: true,
transformer: sqlite.jsonColumn,
})
pinData: ISimplifiedPinData;
pinData?: ISimplifiedPinData;
@Column({ length: 36 })
versionId: string;

View file

@ -9,7 +9,7 @@ export class AddMockedNodesColumnToTestDefinition1733133775640 implements Revers
const mockedNodesColumnName = escape.columnName('mockedNodes');
await runQuery(
`ALTER TABLE ${tableName} ADD COLUMN ${mockedNodesColumnName} JSON DEFAULT '[]' NOT NULL`,
`ALTER TABLE ${tableName} ADD COLUMN ${mockedNodesColumnName} JSON DEFAULT ('[]') NOT NULL`,
);
}

View file

@ -0,0 +1,21 @@
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
export class AddManagedColumnToCredentialsTable1734479635324 implements ReversibleMigration {
async up({ escape, runQuery, isSqlite }: MigrationContext) {
const tableName = escape.tableName('credentials_entity');
const columnName = escape.columnName('isManaged');
const defaultValue = isSqlite ? 0 : 'FALSE';
await runQuery(
`ALTER TABLE ${tableName} ADD COLUMN ${columnName} BOOLEAN NOT NULL DEFAULT ${defaultValue}`,
);
}
async down({ escape, runQuery }: MigrationContext) {
const tableName = escape.tableName('credentials_entity');
const columnName = escape.columnName('isManaged');
await runQuery(`ALTER TABLE ${tableName} DROP COLUMN ${columnName}`);
}
}

View file

@ -74,6 +74,7 @@ import { AddDescriptionToTestDefinition1731404028106 } from '../common/173140402
import { CreateTestMetricTable1732271325258 } from '../common/1732271325258-CreateTestMetricTable';
import { CreateTestRun1732549866705 } from '../common/1732549866705-CreateTestRunTable';
import { AddMockedNodesColumnToTestDefinition1733133775640 } from '../common/1733133775640-AddMockedNodesColumnToTestDefinition';
import { AddManagedColumnToCredentialsTable1734479635324 } from '../common/1734479635324-AddManagedColumnToCredentialsTable';
export const mysqlMigrations: Migration[] = [
InitialMigration1588157391238,
@ -150,4 +151,5 @@ export const mysqlMigrations: Migration[] = [
CreateTestMetricTable1732271325258,
CreateTestRun1732549866705,
AddMockedNodesColumnToTestDefinition1733133775640,
AddManagedColumnToCredentialsTable1734479635324,
];

View file

@ -74,6 +74,7 @@ import { AddDescriptionToTestDefinition1731404028106 } from '../common/173140402
import { CreateTestMetricTable1732271325258 } from '../common/1732271325258-CreateTestMetricTable';
import { CreateTestRun1732549866705 } from '../common/1732549866705-CreateTestRunTable';
import { AddMockedNodesColumnToTestDefinition1733133775640 } from '../common/1733133775640-AddMockedNodesColumnToTestDefinition';
import { AddManagedColumnToCredentialsTable1734479635324 } from '../common/1734479635324-AddManagedColumnToCredentialsTable';
export const postgresMigrations: Migration[] = [
InitialMigration1587669153312,
@ -150,4 +151,5 @@ export const postgresMigrations: Migration[] = [
CreateTestMetricTable1732271325258,
CreateTestRun1732549866705,
AddMockedNodesColumnToTestDefinition1733133775640,
AddManagedColumnToCredentialsTable1734479635324,
];

View file

@ -71,6 +71,7 @@ import { CreateTestDefinitionTable1730386903556 } from '../common/1730386903556-
import { CreateTestMetricTable1732271325258 } from '../common/1732271325258-CreateTestMetricTable';
import { CreateTestRun1732549866705 } from '../common/1732549866705-CreateTestRunTable';
import { AddMockedNodesColumnToTestDefinition1733133775640 } from '../common/1733133775640-AddMockedNodesColumnToTestDefinition';
import { AddManagedColumnToCredentialsTable1734479635324 } from '../common/1734479635324-AddManagedColumnToCredentialsTable';
const sqliteMigrations: Migration[] = [
InitialMigration1588102412422,
@ -144,6 +145,7 @@ const sqliteMigrations: Migration[] = [
CreateTestMetricTable1732271325258,
CreateTestRun1732549866705,
AddMockedNodesColumnToTestDefinition1733133775640,
AddManagedColumnToCredentialsTable1734479635324,
];
export { sqliteMigrations };

View file

@ -12,7 +12,7 @@ export class TestRunRepository extends Repository<TestRun> {
super(TestRun, dataSource.manager);
}
public async createTestRun(testDefinitionId: string) {
async createTestRun(testDefinitionId: string) {
const testRun = this.create({
status: 'new',
testDefinition: { id: testDefinitionId },
@ -21,19 +21,19 @@ export class TestRunRepository extends Repository<TestRun> {
return await this.save(testRun);
}
public async markAsRunning(id: string) {
async markAsRunning(id: string) {
return await this.update(id, { status: 'running', runAt: new Date() });
}
public async markAsCompleted(id: string, metrics: AggregatedTestRunMetrics) {
async markAsCompleted(id: string, metrics: AggregatedTestRunMetrics) {
return await this.update(id, { status: 'completed', completedAt: new Date(), metrics });
}
public async markAsCancelled(id: string) {
async markAsCancelled(id: string) {
return await this.update(id, { status: 'cancelled' });
}
public async getMany(testDefinitionId: string, options: ListQuery.Options) {
async getMany(testDefinitionId: string, options: ListQuery.Options) {
const findManyOptions: FindManyOptions<TestRun> = {
where: { testDefinition: { id: testDefinitionId } },
order: { createdAt: 'DESC' },

View file

@ -71,7 +71,7 @@ export class SourceControlExportService {
}
}
public rmFilesFromExportFolder(filesToBeDeleted: Set<string>): Set<string> {
rmFilesFromExportFolder(filesToBeDeleted: Set<string>): Set<string> {
try {
filesToBeDeleted.forEach((e) => rmSync(e));
} catch (error) {

View file

@ -65,7 +65,7 @@ export class SourceControlImportService {
);
}
public async getRemoteVersionIdsFromFiles(): Promise<SourceControlWorkflowVersionId[]> {
async getRemoteVersionIdsFromFiles(): Promise<SourceControlWorkflowVersionId[]> {
const remoteWorkflowFiles = await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
@ -91,7 +91,7 @@ export class SourceControlImportService {
);
}
public async getLocalVersionIdsFromDb(): Promise<SourceControlWorkflowVersionId[]> {
async getLocalVersionIdsFromDb(): Promise<SourceControlWorkflowVersionId[]> {
const localWorkflows = await Container.get(WorkflowRepository).find({
select: ['id', 'name', 'versionId', 'updatedAt'],
});
@ -119,7 +119,7 @@ export class SourceControlImportService {
}) as SourceControlWorkflowVersionId[];
}
public async getRemoteCredentialsFromFiles(): Promise<
async getRemoteCredentialsFromFiles(): Promise<
Array<ExportableCredential & { filename: string }>
> {
const remoteCredentialFiles = await glob('*.json', {
@ -146,9 +146,7 @@ export class SourceControlImportService {
>;
}
public async getLocalCredentialsFromDb(): Promise<
Array<ExportableCredential & { filename: string }>
> {
async getLocalCredentialsFromDb(): Promise<Array<ExportableCredential & { filename: string }>> {
const localCredentials = await Container.get(CredentialsRepository).find({
select: ['id', 'name', 'type'],
});
@ -160,7 +158,7 @@ export class SourceControlImportService {
})) as Array<ExportableCredential & { filename: string }>;
}
public async getRemoteVariablesFromFile(): Promise<Variables[]> {
async getRemoteVariablesFromFile(): Promise<Variables[]> {
const variablesFile = await glob(SOURCE_CONTROL_VARIABLES_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
@ -174,11 +172,11 @@ export class SourceControlImportService {
return [];
}
public async getLocalVariablesFromDb(): Promise<Variables[]> {
async getLocalVariablesFromDb(): Promise<Variables[]> {
return await this.variablesService.getAllCached();
}
public async getRemoteTagsAndMappingsFromFile(): Promise<{
async getRemoteTagsAndMappingsFromFile(): Promise<{
tags: TagEntity[];
mappings: WorkflowTagMapping[];
}> {
@ -197,7 +195,7 @@ export class SourceControlImportService {
return { tags: [], mappings: [] };
}
public async getLocalTagsAndMappingsFromDb(): Promise<{
async getLocalTagsAndMappingsFromDb(): Promise<{
tags: TagEntity[];
mappings: WorkflowTagMapping[];
}> {
@ -210,7 +208,7 @@ export class SourceControlImportService {
return { tags: localTags, mappings: localMappings };
}
public async importWorkflowFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
async importWorkflowFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
const personalProject =
await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
const workflowManager = this.activeWorkflowManager;
@ -297,7 +295,7 @@ export class SourceControlImportService {
}>;
}
public async importCredentialsFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
async importCredentialsFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
const personalProject =
await Container.get(ProjectRepository).getPersonalProjectForUserOrFail(userId);
const candidateIds = candidates.map((c) => c.id);
@ -371,7 +369,7 @@ export class SourceControlImportService {
return importCredentialsResult.filter((e) => e !== undefined);
}
public async importTagsFromWorkFolder(candidate: SourceControlledFile) {
async importTagsFromWorkFolder(candidate: SourceControlledFile) {
let mappedTags;
try {
this.logger.debug(`Importing tags from file ${candidate.file}`);
@ -433,7 +431,7 @@ export class SourceControlImportService {
return mappedTags;
}
public async importVariablesFromWorkFolder(
async importVariablesFromWorkFolder(
candidate: SourceControlledFile,
valueOverrides?: {
[key: string]: string;

View file

@ -41,7 +41,7 @@ export class SourceControlPreferencesService {
this.sshKeyName = path.join(this.sshFolder, SOURCE_CONTROL_SSH_KEY_NAME);
}
public get sourceControlPreferences(): SourceControlPreferences {
get sourceControlPreferences(): SourceControlPreferences {
return {
...this._sourceControlPreferences,
connected: this._sourceControlPreferences.connected ?? false,
@ -49,14 +49,14 @@ export class SourceControlPreferencesService {
}
// merge the new preferences with the existing preferences when setting
public set sourceControlPreferences(preferences: Partial<SourceControlPreferences>) {
set sourceControlPreferences(preferences: Partial<SourceControlPreferences>) {
this._sourceControlPreferences = SourceControlPreferences.merge(
preferences,
this._sourceControlPreferences,
);
}
public isSourceControlSetup() {
isSourceControlSetup() {
return (
this.isSourceControlLicensedAndEnabled() &&
this.getPreferences().repositoryUrl &&

View file

@ -81,7 +81,7 @@ export class SourceControlService {
});
}
public async sanityCheck(): Promise<void> {
async sanityCheck(): Promise<void> {
try {
const foldersExisted = sourceControlFoldersExistCheck(
[this.gitFolder, this.sshFolder],

View file

@ -211,7 +211,7 @@ export class TestRunnerService {
/**
* Creates a new test run for the given test definition.
*/
public async runTest(user: User, test: TestDefinition): Promise<void> {
async runTest(user: User, test: TestDefinition): Promise<void> {
const workflow = await this.workflowRepository.findById(test.workflowId);
assert(workflow, 'Workflow not found');

View file

@ -70,7 +70,7 @@ export class MessageEventBusLogWriter {
this.globalConfig = Container.get(GlobalConfig);
}
public get worker(): Worker | undefined {
get worker(): Worker | undefined {
return this._worker;
}

View file

@ -504,7 +504,7 @@ export class ExecutionService {
}
}
public async annotate(
async annotate(
executionId: string,
updateData: ExecutionRequest.ExecutionUpdatePayload,
sharedWorkflowIds: string[],

View file

@ -74,11 +74,12 @@ export declare namespace WorkflowRequest {
active: boolean;
name?: string;
projectId?: string;
excludePinnedData?: boolean;
}
>;
type Create = AuthenticatedRequest<{}, {}, WorkflowEntity, {}>;
type Get = AuthenticatedRequest<{ id: string }, {}, {}, {}>;
type Get = AuthenticatedRequest<{ id: string }, {}, {}, { excludePinnedData?: boolean }>;
type Delete = Get;
type Update = AuthenticatedRequest<{ id: string }, {}, WorkflowEntity, {}>;
type Activate = Get;

View file

@ -6,6 +6,13 @@ get:
summary: Retrieves a workflow
description: Retrieves a workflow.
parameters:
- name: excludePinnedData
in: query
required: false
description: Set this to avoid retrieving pinned data
schema:
type: boolean
example: true
- $ref: '../schemas/parameters/workflowId.yml'
responses:
'200':

View file

@ -60,6 +60,13 @@ get:
schema:
type: string
example: VmwOO9HeTEj20kxM
- name: excludePinnedData
in: query
required: false
description: Set this to avoid retrieving pinned data
schema:
type: boolean
example: true
- $ref: '../../../../shared/spec/parameters/limit.yml'
- $ref: '../../../../shared/spec/parameters/cursor.yml'
responses:

View file

@ -105,6 +105,7 @@ export = {
projectScope('workflow:read', 'workflow'),
async (req: WorkflowRequest.Get, res: express.Response): Promise<express.Response> => {
const { id } = req.params;
const { excludePinnedData = false } = req.query;
const workflow = await Container.get(SharedWorkflowRepository).findWorkflowForUser(
id,
@ -120,6 +121,10 @@ export = {
return res.status(404).json({ message: 'Not Found' });
}
if (excludePinnedData) {
delete workflow.pinData;
}
Container.get(EventService).emit('user-retrieved-workflow', {
userId: req.user.id,
publicApi: true,
@ -131,7 +136,15 @@ export = {
getWorkflows: [
validCursor,
async (req: WorkflowRequest.GetAll, res: express.Response): Promise<express.Response> => {
const { offset = 0, limit = 100, active, tags, name, projectId } = req.query;
const {
offset = 0,
limit = 100,
excludePinnedData = false,
active,
tags,
name,
projectId,
} = req.query;
const where: FindOptionsWhere<WorkflowEntity> = {
...(active !== undefined && { active }),
@ -199,6 +212,12 @@ export = {
...(!config.getEnv('workflowTagsDisabled') && { relations: ['tags'] }),
});
if (excludePinnedData) {
workflows.forEach((workflow) => {
delete workflow.pinData;
});
}
Container.get(EventService).emit('user-retrieved-all-workflows', {
userId: req.user.id,
publicApi: true,

View file

@ -11,15 +11,15 @@ import { mockInstance } from '@test/mocking';
jest.useFakeTimers();
class MockWebSocket extends EventEmitter {
public isAlive = true;
isAlive = true;
public ping = jest.fn();
ping = jest.fn();
public send = jest.fn();
send = jest.fn();
public terminate = jest.fn();
terminate = jest.fn();
public close = jest.fn();
close = jest.fn();
}
const createMockWebSocket = () => new MockWebSocket() as unknown as jest.Mocked<WebSocket>;

View file

@ -36,7 +36,7 @@ const useWebSockets = config.getEnv('push.backend') === 'websocket';
*/
@Service()
export class Push extends TypedEmitter<PushEvents> {
public isBidirectional = useWebSockets;
isBidirectional = useWebSockets;
private backend = useWebSockets ? Container.get(WebSocketPush) : Container.get(SSEPush);

View file

@ -58,4 +58,28 @@ describe('TaskRunnerWsServer', () => {
expect(clearIntervalSpy).toHaveBeenCalled();
});
});
describe('sendMessage', () => {
it('should work with a message containing circular references', () => {
const server = new TaskRunnerWsServer(mock(), mock(), mock(), mock(), mock());
const ws = mock<WebSocket>();
server.runnerConnections.set('test-runner', ws);
const messageData: Record<string, unknown> = {};
messageData.circular = messageData;
expect(() =>
server.sendMessage('test-runner', {
type: 'broker:taskdataresponse',
taskId: 'taskId',
requestId: 'requestId',
data: messageData,
}),
).not.toThrow();
expect(ws.send).toHaveBeenCalledWith(
'{"type":"broker:taskdataresponse","taskId":"taskId","requestId":"requestId","data":{"circular":"[Circular Reference]"}}',
);
});
});
});

View file

@ -2,10 +2,10 @@ import type { TaskRunner } from '@n8n/task-runner';
import { ApplicationError } from 'n8n-workflow';
export class TaskRunnerDisconnectedError extends ApplicationError {
public description: string;
description: string;
constructor(
public readonly runnerId: TaskRunner['id'],
readonly runnerId: TaskRunner['id'],
isCloudDeployment: boolean,
) {
super('Node execution failed');

View file

@ -3,10 +3,10 @@ import { ApplicationError } from 'n8n-workflow';
import type { TaskRunner } from '../task-broker.service';
export class TaskRunnerOomError extends ApplicationError {
public description: string;
description: string;
constructor(
public readonly runnerId: TaskRunner['id'],
readonly runnerId: TaskRunner['id'],
isCloudDeployment: boolean,
) {
super('Node ran out of memory.', { level: 'error' });

View file

@ -2,8 +2,8 @@ import { ApplicationError } from 'n8n-workflow';
export class TaskRunnerRestartLoopError extends ApplicationError {
constructor(
public readonly howManyTimes: number,
public readonly timePeriodMs: number,
readonly howManyTimes: number,
readonly timePeriodMs: number,
) {
const message = `Task runner has restarted ${howManyTimes} times within ${timePeriodMs / 1000} seconds. This is an abnormally high restart rate that suggests a bug or other issue is preventing your runner process from starting up. If this issues persists, please file a report at: https://github.com/n8n-io/n8n/issues`;

View file

@ -6,7 +6,7 @@ import type { ChildProcess } from 'node:child_process';
* memory (OOMs).
*/
export class NodeProcessOomDetector {
public get didProcessOom() {
get didProcessOom() {
return this._didProcessOom;
}

View file

@ -1,6 +1,6 @@
import { TaskRunnersConfig } from '@n8n/config';
import type { BrokerMessage, RunnerMessage } from '@n8n/task-runner';
import { ApplicationError } from 'n8n-workflow';
import { ApplicationError, jsonStringify } from 'n8n-workflow';
import { Service } from 'typedi';
import type WebSocket from 'ws';
@ -83,7 +83,7 @@ export class TaskRunnerWsServer {
}
sendMessage(id: TaskRunner['id'], message: BrokerMessage.ToRunner.All) {
this.runnerConnections.get(id)?.send(JSON.stringify(message));
this.runnerConnections.get(id)?.send(jsonStringify(message, { replaceCircularRefs: true }));
}
add(id: TaskRunner['id'], connection: WebSocket) {

View file

@ -36,7 +36,7 @@ export class SlidingWindowSignal<TEvents, TEventName extends keyof TEvents & str
* milliseconds for the event to be emitted. `null` is returned
* if no event is emitted within the window.
*/
public async getSignal(): Promise<TEvents[TEventName] | null> {
async getSignal(): Promise<TEvents[TEventName] | null> {
const timeSinceLastEvent = Date.now() - this.lastSignalTime;
if (timeSinceLastEvent <= this.windowSizeInMs) return this.lastSignal;

View file

@ -0,0 +1,136 @@
import { mock } from 'jest-mock-extended';
import { get, set } from 'lodash';
import type { NodeTypes } from '@/node-types';
import type { Task } from '@/runners/task-managers/task-manager';
import { TaskManager } from '@/runners/task-managers/task-manager';
class TestTaskManager extends TaskManager {
sentMessages: unknown[] = [];
sendMessage(message: unknown) {
this.sentMessages.push(message);
}
}
describe('TaskManager', () => {
let instance: TestTaskManager;
const mockNodeTypes = mock<NodeTypes>();
beforeEach(() => {
instance = new TestTaskManager(mockNodeTypes);
});
describe('handleRpc', () => {
test.each([
['logNodeOutput', ['hello world']],
['helpers.assertBinaryData', [0, 'propertyName']],
['helpers.getBinaryDataBuffer', [0, 'propertyName']],
['helpers.prepareBinaryData', [Buffer.from('data').toJSON(), 'filename', 'mimetype']],
['helpers.setBinaryDataBuffer', [{ data: '123' }, Buffer.from('data').toJSON()]],
['helpers.binaryToString', [Buffer.from('data').toJSON(), 'utf8']],
['helpers.httpRequest', [{ url: 'http://localhost' }]],
])('should handle %s rpc call', async (methodName, args) => {
const executeFunctions = set({}, methodName.split('.'), jest.fn());
const mockTask = mock<Task>({
taskId: 'taskId',
data: {
executeFunctions,
},
});
instance.tasks.set('taskId', mockTask);
await instance.handleRpc('taskId', 'callId', methodName, args);
expect(instance.sentMessages).toEqual([
{
callId: 'callId',
data: undefined,
status: 'success',
taskId: 'taskId',
type: 'requester:rpcresponse',
},
]);
expect(get(executeFunctions, methodName.split('.'))).toHaveBeenCalledWith(...args);
});
it('converts any serialized buffer arguments into buffers', async () => {
const mockPrepareBinaryData = jest.fn().mockResolvedValue(undefined);
const mockTask = mock<Task>({
taskId: 'taskId',
data: {
executeFunctions: {
helpers: {
prepareBinaryData: mockPrepareBinaryData,
},
},
},
});
instance.tasks.set('taskId', mockTask);
await instance.handleRpc('taskId', 'callId', 'helpers.prepareBinaryData', [
Buffer.from('data').toJSON(),
'filename',
'mimetype',
]);
expect(mockPrepareBinaryData).toHaveBeenCalledWith(
Buffer.from('data'),
'filename',
'mimetype',
);
});
describe('errors', () => {
it('sends method not allowed error if method is not in the allow list', async () => {
const mockTask = mock<Task>({
taskId: 'taskId',
data: {
executeFunctions: {},
},
});
instance.tasks.set('taskId', mockTask);
await instance.handleRpc('taskId', 'callId', 'notAllowedMethod', []);
expect(instance.sentMessages).toEqual([
{
callId: 'callId',
data: 'Method not allowed',
status: 'error',
taskId: 'taskId',
type: 'requester:rpcresponse',
},
]);
});
it('sends error if method throws', async () => {
const error = new Error('Test error');
const mockTask = mock<Task>({
taskId: 'taskId',
data: {
executeFunctions: {
helpers: {
assertBinaryData: jest.fn().mockRejectedValue(error),
},
},
},
});
instance.tasks.set('taskId', mockTask);
await instance.handleRpc('taskId', 'callId', 'helpers.assertBinaryData', []);
expect(instance.sentMessages).toEqual([
{
callId: 'callId',
data: error,
status: 'error',
taskId: 'taskId',
type: 'requester:rpcresponse',
},
]);
});
});
});
});

View file

@ -1,5 +1,6 @@
import type { TaskResultData, RequesterMessage, BrokerMessage, TaskData } from '@n8n/task-runner';
import { RPC_ALLOW_LIST } from '@n8n/task-runner';
import { AVAILABLE_RPC_METHODS } from '@n8n/task-runner';
import { isSerializedBuffer, toBuffer } from 'n8n-core';
import { createResultOk, createResultError } from 'n8n-workflow';
import type {
EnvProviderState,
@ -288,7 +289,7 @@ export abstract class TaskManager {
}
try {
if (!RPC_ALLOW_LIST.includes(name)) {
if (!AVAILABLE_RPC_METHODS.includes(name)) {
this.sendMessage({
type: 'requester:rpcresponse',
taskId,
@ -322,6 +323,15 @@ export abstract class TaskManager {
});
return;
}
// Convert any serialized buffers back to buffers
for (let i = 0; i < params.length; i++) {
const paramValue = params[i];
if (isSerializedBuffer(paramValue)) {
params[i] = toBuffer(paramValue);
}
}
const data = (await func.call(funcs, ...params)) as unknown;
this.sendMessage({

View file

@ -28,17 +28,17 @@ export type TaskRunnerProcessEventMap = {
*/
@Service()
export class TaskRunnerProcess extends TypedEmitter<TaskRunnerProcessEventMap> {
public get isRunning() {
get isRunning() {
return this.process !== null;
}
/** The process ID of the task runner process */
public get pid() {
get pid() {
return this.process?.pid;
}
/** Promise that resolves when the process has exited */
public get runPromise() {
get runPromise() {
return this._runPromise;
}

View file

@ -31,7 +31,7 @@ export class TaskRunnerServer {
readonly app: express.Application;
public get port() {
get port() {
return (this.server?.address() as AddressInfo)?.port;
}

View file

@ -13,9 +13,17 @@ import { Logger } from '@/logging/logger.service';
import { OrchestrationService } from '../orchestration.service';
/**
* Responsible for pruning executions from the database and their associated binary data
* from the filesystem, on a rolling basis. By default we soft-delete execution rows
* every cycle and hard-delete them and their binary data every 4th cycle.
* Responsible for deleting old executions from the database and deleting their
* associated binary data from the filesystem, on a rolling basis.
*
* By default:
*
* - Soft deletion (every 60m) identifies all prunable executions based on max
* age and/or max count, exempting annotated executions.
* - Hard deletion (every 15m) processes prunable executions in batches of 100,
* switching to 1s intervals until the total to prune is back down low enough,
* or in case the hard deletion fails.
* - Once mostly caught up, hard deletion goes back to the 15m schedule.
*/
@Service()
export class PruningService {

View file

@ -68,7 +68,7 @@ export class SamlService {
},
};
public get samlPreferences(): SamlPreferences {
get samlPreferences(): SamlPreferences {
return {
...this._samlPreferences,
loginEnabled: isSamlLoginEnabled(),

View file

@ -14,7 +14,7 @@ jest.unmock('node:fs');
/** Test server for testing the form data parsing */
class TestServer {
public agent: TestAgent;
agent: TestAgent;
private app: express.Application;

View file

@ -378,6 +378,47 @@ describe('GET /workflows', () => {
expect(updatedAt).toBeDefined();
}
});
test('should return all owned workflows without pinned data', async () => {
await Promise.all([
createWorkflow(
{
pinData: {
Webhook1: [{ json: { first: 'first' } }],
},
},
member,
),
createWorkflow(
{
pinData: {
Webhook2: [{ json: { second: 'second' } }],
},
},
member,
),
createWorkflow(
{
pinData: {
Webhook3: [{ json: { third: 'third' } }],
},
},
member,
),
]);
const response = await authMemberAgent.get('/workflows?excludePinnedData=true');
expect(response.statusCode).toBe(200);
expect(response.body.data.length).toBe(3);
expect(response.body.nextCursor).toBeNull();
for (const workflow of response.body.data) {
const { pinData } = workflow;
expect(pinData).not.toBeDefined();
}
});
});
describe('GET /workflows/:id', () => {
@ -444,6 +485,26 @@ describe('GET /workflows/:id', () => {
expect(createdAt).toEqual(workflow.createdAt.toISOString());
expect(updatedAt).toEqual(workflow.updatedAt.toISOString());
});
test('should retrieve workflow without pinned data', async () => {
// create and assign workflow to owner
const workflow = await createWorkflow(
{
pinData: {
Webhook1: [{ json: { first: 'first' } }],
},
},
member,
);
const response = await authMemberAgent.get(`/workflows/${workflow.id}?excludePinnedData=true`);
expect(response.statusCode).toBe(200);
const { pinData } = response.body;
expect(pinData).not.toBeDefined();
});
});
describe('DELETE /workflows/:id', () => {

View file

@ -10,6 +10,7 @@ mkdirSync(baseDir, { recursive: true });
const testDir = mkdtempSync(baseDir);
mkdirSync(join(testDir, '.n8n'));
process.env.N8N_USER_FOLDER = testDir;
process.env.N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS = 'false';
writeFileSync(
join(testDir, '.n8n/config'),

View file

@ -1,6 +1,6 @@
{
"name": "n8n-core",
"version": "1.72.0",
"version": "1.73.0",
"description": "Core functionality of n8n",
"main": "dist/index",
"types": "dist/index.d.ts",
@ -42,6 +42,7 @@
"@sentry/node": "catalog:",
"aws4": "1.11.0",
"axios": "catalog:",
"chardet": "2.0.0",
"concat-stream": "2.0.0",
"cron": "3.1.7",
"fast-glob": "catalog:",

View file

@ -388,7 +388,7 @@ class AIParametersParser {
* Creates a DynamicStructuredTool from a node.
* @returns A DynamicStructuredTool instance.
*/
public createTool(): DynamicStructuredTool {
createTool(): DynamicStructuredTool {
const { node, nodeType } = this.options;
const schema = this.getSchema();
const description = this.getDescription();

View file

@ -15,6 +15,7 @@ import type {
import { ClientOAuth2 } from '@n8n/client-oauth2';
import type { AxiosError, AxiosHeaders, AxiosRequestConfig, AxiosResponse } from 'axios';
import axios from 'axios';
import chardet from 'chardet';
import crypto, { createHmac } from 'crypto';
import FileType from 'file-type';
import FormData from 'form-data';
@ -1050,6 +1051,10 @@ export async function getBinaryDataBuffer(
return await Container.get(BinaryDataService).getAsBuffer(binaryData);
}
export function detectBinaryEncoding(buffer: Buffer): string {
return chardet.detect(buffer) as string;
}
/**
* Store an incoming IBinaryData & related buffer using the configured binary data manager.
*

View file

@ -0,0 +1,24 @@
/** A nodejs Buffer gone through JSON.stringify */
export type SerializedBuffer = {
type: 'Buffer';
data: number[]; // Array like Uint8Array, each item is uint8 (0-255)
};
/** Converts the given SerializedBuffer to nodejs Buffer */
export function toBuffer(serializedBuffer: SerializedBuffer): Buffer {
return Buffer.from(serializedBuffer.data);
}
function isObjectLiteral(item: unknown): item is { [key: string]: unknown } {
return typeof item === 'object' && item !== null && !Array.isArray(item);
}
export function isSerializedBuffer(candidate: unknown): candidate is SerializedBuffer {
return (
isObjectLiteral(candidate) &&
'type' in candidate &&
'data' in candidate &&
candidate.type === 'Buffer' &&
Array.isArray(candidate.data)
);
}

View file

@ -53,7 +53,7 @@ describe('Memoized Decorator', () => {
class InvalidClass {
// @ts-expect-error this code will fail at compile time and at runtime
@Memoized
public normalProperty = 42;
normalProperty = 42;
}
new InvalidClass();
}).toThrow(AssertionError);

View file

@ -24,3 +24,4 @@ export * from './ExecutionMetadata';
export * from './node-execution-context';
export * from './PartialExecutionUtils';
export { ErrorReporter } from './error-reporter';
export * from './SerializedBuffer';

View file

@ -37,6 +37,7 @@ import {
getSSHTunnelFunctions,
getFileSystemHelperFunctions,
getCheckProcessedHelperFunctions,
detectBinaryEncoding,
} from '@/NodeExecuteFunctions';
import { BaseExecuteContext } from './base-execute-context';
@ -96,6 +97,7 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti
assertBinaryData(inputData, node, itemIndex, propertyName, 0),
getBinaryDataBuffer: async (itemIndex, propertyName) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, 0),
detectBinaryEncoding: (buffer: Buffer) => detectBinaryEncoding(buffer),
};
this.nodeHelpers = {

View file

@ -16,6 +16,7 @@ import { ApplicationError, createDeferredPromise, NodeConnectionType } from 'n8n
// eslint-disable-next-line import/no-cycle
import {
assertBinaryData,
detectBinaryEncoding,
getBinaryDataBuffer,
getBinaryHelperFunctions,
getRequestHelperFunctions,
@ -69,6 +70,7 @@ export class ExecuteSingleContext extends BaseExecuteContext implements IExecute
assertBinaryData(inputData, node, itemIndex, propertyName, inputIndex),
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
detectBinaryEncoding: (buffer) => detectBinaryEncoding(buffer),
};
}

Some files were not shown because too many files have changed in this diff Show more