diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 681de6c024..0c5abcba47 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -7,6 +7,7 @@ "EditorConfig.EditorConfig", "esbenp.prettier-vscode", "mjmlio.vscode-mjml", - "Vue.volar" + "Vue.volar", + "vitest.explorer" ] } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4a67bf00ac..3f19be15e9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -68,7 +68,7 @@ If you already have VS Code and Docker installed, you can click [here](https://v #### Node.js -[Node.js](https://nodejs.org/en/) version 18.10 or newer is required for development purposes. +[Node.js](https://nodejs.org/en/) version 20.15 or newer is required for development purposes. #### pnpm diff --git a/cypress/e2e/2270-ADO-opening-webhook-ndv-marks-workflow-as-unsaved.cy.ts b/cypress/e2e/2270-ADO-opening-webhook-ndv-marks-workflow-as-unsaved.cy.ts new file mode 100644 index 0000000000..eede668e1e --- /dev/null +++ b/cypress/e2e/2270-ADO-opening-webhook-ndv-marks-workflow-as-unsaved.cy.ts @@ -0,0 +1,21 @@ +import { WEBHOOK_NODE_NAME } from '../constants'; +import { NDV, WorkflowPage } from '../pages'; + +const workflowPage = new WorkflowPage(); +const ndv = new NDV(); + +describe('ADO-2270 Save button resets on webhook node open', () => { + it('should not reset the save button if webhook node is opened and closed', () => { + workflowPage.actions.visit(); + workflowPage.actions.addInitialNodeToCanvas(WEBHOOK_NODE_NAME); + workflowPage.getters.saveButton().click(); + workflowPage.actions.openNode(WEBHOOK_NODE_NAME); + + ndv.actions.close(); + + cy.ifCanvasVersion( + () => cy.getByTestId('workflow-save-button').should('not.contain', 'Saved'), + () => cy.getByTestId('workflow-save-button').should('contain', 'Saved'), + ); + }); +}); diff --git a/docker/images/n8n/n8n-task-runners.json b/docker/images/n8n/n8n-task-runners.json index 56a48b2d09..699794d504 100644 --- a/docker/images/n8n/n8n-task-runners.json +++ b/docker/images/n8n/n8n-task-runners.json @@ -10,8 +10,10 @@ "N8N_RUNNERS_GRANT_TOKEN", "N8N_RUNNERS_N8N_URI", "N8N_RUNNERS_MAX_PAYLOAD", + "N8N_RUNNERS_MAX_CONCURRENCY", "NODE_FUNCTION_ALLOW_BUILTIN", - "NODE_FUNCTION_ALLOW_EXTERNAL" + "NODE_FUNCTION_ALLOW_EXTERNAL", + "NODE_OPTIONS" ], "uid": 2000, "gid": 2000 diff --git a/packages/@n8n/api-types/package.json b/packages/@n8n/api-types/package.json index 0c4440eb6b..b3d28f18cc 100644 --- a/packages/@n8n/api-types/package.json +++ b/packages/@n8n/api-types/package.json @@ -21,6 +21,7 @@ "dist/**/*" ], "devDependencies": { + "@n8n/config": "workspace:*", "n8n-workflow": "workspace:*" }, "dependencies": { diff --git a/packages/@n8n/api-types/src/frontend-settings.ts b/packages/@n8n/api-types/src/frontend-settings.ts index 5084344aeb..6b2f3231d3 100644 --- a/packages/@n8n/api-types/src/frontend-settings.ts +++ b/packages/@n8n/api-types/src/frontend-settings.ts @@ -1,3 +1,4 @@ +import type { FrontendBetaFeatures } from '@n8n/config'; import type { ExpressionEvaluatorType, LogLevel, WorkflowSettings } from 'n8n-workflow'; export interface IVersionNotificationSettings { @@ -169,4 +170,5 @@ export interface FrontendSettings { security: { blockFileAccessToN8nFiles: boolean; }; + betaFeatures: FrontendBetaFeatures[]; } diff --git a/packages/@n8n/config/src/configs/frontend.config.ts b/packages/@n8n/config/src/configs/frontend.config.ts new file mode 100644 index 0000000000..63f812952f --- /dev/null +++ b/packages/@n8n/config/src/configs/frontend.config.ts @@ -0,0 +1,11 @@ +import { Config, Env } from '../decorators'; +import { StringArray } from '../utils'; + +export type FrontendBetaFeatures = 'canvas_v2'; + +@Config +export class FrontendConfig { + /** Which UI experiments to enable. Separate multiple values with a comma `,` */ + @Env('N8N_UI_BETA_FEATURES') + betaFeatures: StringArray = []; +} diff --git a/packages/@n8n/config/src/configs/license.config.ts b/packages/@n8n/config/src/configs/license.config.ts new file mode 100644 index 0000000000..58ccef450c --- /dev/null +++ b/packages/@n8n/config/src/configs/license.config.ts @@ -0,0 +1,28 @@ +import { Config, Env } from '../decorators'; + +@Config +export class LicenseConfig { + /** License server URL to retrieve license. */ + @Env('N8N_LICENSE_SERVER_URL') + serverUrl: string = 'https://license.n8n.io/v1'; + + /** Whether autorenewal for licenses is enabled. */ + @Env('N8N_LICENSE_AUTO_RENEW_ENABLED') + autoRenewalEnabled: boolean = true; + + /** How long (in seconds) before expiry a license should be autorenewed. */ + @Env('N8N_LICENSE_AUTO_RENEW_OFFSET') + autoRenewOffset: number = 60 * 60 * 72; // 72 hours + + /** Activation key to initialize license. */ + @Env('N8N_LICENSE_ACTIVATION_KEY') + activationKey: string = ''; + + /** Tenant ID used by the license manager SDK, e.g. for self-hosted, sandbox, embed, cloud. */ + @Env('N8N_LICENSE_TENANT_ID') + tenantId: number = 1; + + /** Ephemeral license certificate. See: https://github.com/n8n-io/license-management?tab=readme-ov-file#concept-ephemeral-entitlements */ + @Env('N8N_LICENSE_CERT') + cert: string = ''; +} diff --git a/packages/@n8n/config/src/configs/logging.config.ts b/packages/@n8n/config/src/configs/logging.config.ts index 0568eaf791..94e4642223 100644 --- a/packages/@n8n/config/src/configs/logging.config.ts +++ b/packages/@n8n/config/src/configs/logging.config.ts @@ -11,6 +11,7 @@ export const LOG_SCOPES = [ 'redis', 'scaling', 'waiting-executions', + 'task-runner', ] as const; export type LogScope = (typeof LOG_SCOPES)[number]; diff --git a/packages/@n8n/config/src/configs/runners.config.ts b/packages/@n8n/config/src/configs/runners.config.ts index 648959e3f4..c7be197963 100644 --- a/packages/@n8n/config/src/configs/runners.config.ts +++ b/packages/@n8n/config/src/configs/runners.config.ts @@ -42,4 +42,12 @@ export class TaskRunnersConfig { /** Which task runner to launch from the config */ @Env('N8N_RUNNERS_LAUNCHER_RUNNER') launcherRunner: string = 'javascript'; + + /** The --max-old-space-size option to use for the runner (in MB). Default means node.js will determine it based on the available memory. */ + @Env('N8N_RUNNERS_MAX_OLD_SPACE_SIZE') + maxOldSpaceSize: string = ''; + + /** How many concurrent tasks can a runner execute at a time */ + @Env('N8N_RUNNERS_MAX_CONCURRENCY') + maxConcurrency: number = 5; } diff --git a/packages/@n8n/config/src/configs/security.config.ts b/packages/@n8n/config/src/configs/security.config.ts new file mode 100644 index 0000000000..329e84cc43 --- /dev/null +++ b/packages/@n8n/config/src/configs/security.config.ts @@ -0,0 +1,27 @@ +import { Config, Env } from '../decorators'; + +@Config +export class SecurityConfig { + /** + * Which directories to limit n8n's access to. Separate multiple dirs with semicolon `;`. + * + * @example N8N_RESTRICT_FILE_ACCESS_TO=/home/user/.n8n;/home/user/n8n-data + */ + @Env('N8N_RESTRICT_FILE_ACCESS_TO') + restrictFileAccessTo: string = ''; + + /** + * Whether to block access to all files at: + * - the ".n8n" directory, + * - the static cache dir at ~/.cache/n8n/public, and + * - user-defined config files. + */ + @Env('N8N_BLOCK_FILE_ACCESS_TO_N8N_FILES') + blockFileAccessToN8nFiles: boolean = true; + + /** + * In a [security audit](https://docs.n8n.io/hosting/securing/security-audit/), how many days for a workflow to be considered abandoned if not executed. + */ + @Env('N8N_SECURITY_AUDIT_DAYS_ABANDONED_WORKFLOW') + daysAbandonedWorkflow: number = 90; +} diff --git a/packages/@n8n/config/src/index.ts b/packages/@n8n/config/src/index.ts index 7b944eac85..c056a1090c 100644 --- a/packages/@n8n/config/src/index.ts +++ b/packages/@n8n/config/src/index.ts @@ -6,21 +6,25 @@ import { EventBusConfig } from './configs/event-bus.config'; import { ExternalSecretsConfig } from './configs/external-secrets.config'; import { ExternalStorageConfig } from './configs/external-storage.config'; import { GenericConfig } from './configs/generic.config'; +import { LicenseConfig } from './configs/license.config'; import { LoggingConfig } from './configs/logging.config'; import { MultiMainSetupConfig } from './configs/multi-main-setup.config'; import { NodesConfig } from './configs/nodes.config'; import { PublicApiConfig } from './configs/public-api.config'; import { TaskRunnersConfig } from './configs/runners.config'; -export { TaskRunnersConfig } from './configs/runners.config'; import { ScalingModeConfig } from './configs/scaling-mode.config'; +import { SecurityConfig } from './configs/security.config'; import { SentryConfig } from './configs/sentry.config'; import { TemplatesConfig } from './configs/templates.config'; import { UserManagementConfig } from './configs/user-management.config'; import { VersionNotificationsConfig } from './configs/version-notifications.config'; import { WorkflowsConfig } from './configs/workflows.config'; import { Config, Env, Nested } from './decorators'; -export { Config, Env, Nested } from './decorators'; +export { Config, Env, Nested } from './decorators'; +export { TaskRunnersConfig } from './configs/runners.config'; +export { SecurityConfig } from './configs/security.config'; +export { FrontendBetaFeatures, FrontendConfig } from './configs/frontend.config'; export { LOG_SCOPES } from './configs/logging.config'; export type { LogScope } from './configs/logging.config'; @@ -102,4 +106,10 @@ export class GlobalConfig { @Nested generic: GenericConfig; + + @Nested + license: LicenseConfig; + + @Nested + security: SecurityConfig; } diff --git a/packages/@n8n/config/test/config.test.ts b/packages/@n8n/config/test/config.test.ts index 718cb5b73c..07af2c0a0b 100644 --- a/packages/@n8n/config/test/config.test.ts +++ b/packages/@n8n/config/test/config.test.ts @@ -231,6 +231,8 @@ describe('GlobalConfig', () => { port: 5679, launcherPath: '', launcherRunner: 'javascript', + maxOldSpaceSize: '', + maxConcurrency: 5, }, sentry: { backendDsn: '', @@ -256,6 +258,19 @@ describe('GlobalConfig', () => { releaseChannel: 'dev', gracefulShutdownTimeout: 30, }, + license: { + serverUrl: 'https://license.n8n.io/v1', + autoRenewalEnabled: true, + autoRenewOffset: 60 * 60 * 72, + activationKey: '', + tenantId: 1, + cert: '', + }, + security: { + restrictFileAccessTo: '', + blockFileAccessToN8nFiles: true, + daysAbandonedWorkflow: 90, + }, }; it('should use all default values when no env variables are defined', () => { diff --git a/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts b/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts index abe9b01530..8708e0c7ea 100644 --- a/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/code/Code.node.ts @@ -1,13 +1,13 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ -import { - NodeOperationError, - type IExecuteFunctions, - type INodeExecutionData, - type INodeType, - type INodeTypeDescription, - type INodeOutputConfiguration, - type SupplyData, - NodeConnectionType, +import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; +import type { + IExecuteFunctions, + INodeExecutionData, + INodeType, + INodeTypeDescription, + INodeOutputConfiguration, + SupplyData, + ISupplyDataFunctions, } from 'n8n-workflow'; // TODO: Add support for execute function. Got already started but got commented out @@ -72,7 +72,7 @@ export const vmResolver = makeResolverFromLegacyOptions({ }); function getSandbox( - this: IExecuteFunctions, + this: IExecuteFunctions | ISupplyDataFunctions, code: string, options?: { addItems?: boolean; itemIndex?: number }, ) { @@ -354,7 +354,7 @@ export class Code implements INodeType { } } - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const code = this.getNodeParameter('code', itemIndex) as { supplyData?: { code: string } }; if (!code.supplyData?.code) { diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts index 783f12be9d..2e68db4e69 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentBinaryInputLoader/DocumentBinaryInputLoader.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -177,7 +177,7 @@ export class DocumentBinaryInputLoader implements INodeType { ], }; - async supplyData(this: IExecuteFunctions): Promise { + async supplyData(this: ISupplyDataFunctions): Promise { this.logger.debug('Supply Data for Binary Input Loader'); const textSplitter = (await this.getInputConnectionData( NodeConnectionType.AiTextSplitter, diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts index 062008db2f..5e6457951e 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentDefaultDataLoader/DocumentDefaultDataLoader.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -283,7 +283,7 @@ export class DocumentDefaultDataLoader implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const dataType = this.getNodeParameter('dataType', itemIndex, 'json') as 'json' | 'binary'; const textSplitter = (await this.getInputConnectionData( NodeConnectionType.AiTextSplitter, diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts index 916f0e7159..071134f25e 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentGithubLoader/DocumentGithubLoader.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github'; @@ -93,7 +93,7 @@ export class DocumentGithubLoader implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { console.log('Supplying data for Github Document Loader'); const repository = this.getNodeParameter('repository', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts index 3cb2c4bfdb..2e8cb95a11 100644 --- a/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/document_loaders/DocumentJSONInputLoader/DocumentJsonInputLoader.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -79,7 +79,7 @@ export class DocumentJsonInputLoader implements INodeType { ], }; - async supplyData(this: IExecuteFunctions): Promise { + async supplyData(this: ISupplyDataFunctions): Promise { this.logger.debug('Supply Data for JSON Input Loader'); const textSplitter = (await this.getInputConnectionData( NodeConnectionType.AiTextSplitter, diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts index 58caebe05b..6e0782f1c1 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAwsBedrock/EmbeddingsAwsBedrock.node.ts @@ -2,9 +2,9 @@ import { BedrockEmbeddings } from '@langchain/aws'; import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -104,7 +104,7 @@ export class EmbeddingsAwsBedrock implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('aws'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts index 46195be0d3..a75a93c9f4 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsAzureOpenAi/EmbeddingsAzureOpenAi.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -92,7 +92,7 @@ export class EmbeddingsAzureOpenAi implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply data for embeddings'); const credentials = await this.getCredentials<{ apiKey: string; diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts index a6c246acb5..26e5d39b70 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsCohere/EmbeddingsCohere.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { CohereEmbeddings } from '@langchain/cohere'; @@ -99,7 +99,7 @@ export class EmbeddingsCohere implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply data for embeddings Cohere'); const modelName = this.getNodeParameter('modelName', itemIndex, 'embed-english-v2.0') as string; const credentials = await this.getCredentials<{ apiKey: string }>('cohereApi'); diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts index 92882dfffa..2a455e4574 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsGoogleGemini/EmbeddingsGoogleGemini.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { GoogleGenerativeAIEmbeddings } from '@langchain/google-genai'; @@ -116,7 +116,7 @@ export class EmbeddingsGoogleGemini implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply data for embeddings Google Gemini'); const modelName = this.getNodeParameter( 'modelName', diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts index 93d751b9c4..c8317630c3 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsHuggingFaceInference/EmbeddingsHuggingFaceInference.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf'; @@ -81,7 +81,7 @@ export class EmbeddingsHuggingFaceInference implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply data for embeddings HF Inference'); const model = this.getNodeParameter( 'modelName', diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts index d8223a2ffe..dbfb93b82e 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsMistralCloud/EmbeddingsMistralCloud.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import type { MistralAIEmbeddingsParams } from '@langchain/mistralai'; @@ -134,7 +134,7 @@ export class EmbeddingsMistralCloud implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('mistralCloudApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; const options = this.getNodeParameter( diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts index ec404f2306..d84aa537ec 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOllama/EmbeddingsOllama.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { OllamaEmbeddings } from '@langchain/ollama'; @@ -44,7 +44,7 @@ export class EmbeddingsOllama implements INodeType { properties: [getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]), ollamaModel], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply data for embeddings Ollama'); const modelName = this.getNodeParameter('model', itemIndex) as string; const credentials = await this.getCredentials('ollamaApi'); diff --git a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts index 3c40e03203..167581ed2e 100644 --- a/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/embeddings/EmbeddingsOpenAI/EmbeddingsOpenAi.node.ts @@ -1,10 +1,10 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, type SupplyData, + type ISupplyDataFunctions, type INodeProperties, } from 'n8n-workflow'; @@ -170,7 +170,7 @@ export class EmbeddingsOpenAi implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply data for embeddings'); const credentials = await this.getCredentials('openAiApi'); diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts index ecc14e1344..77df60da79 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatAnthropic/LmChatAnthropic.node.ts @@ -3,7 +3,7 @@ import { NodeConnectionType, type INodePropertyOptions, type INodeProperties, - type IExecuteFunctions, + type ISupplyDataFunctions, type INodeType, type INodeTypeDescription, type SupplyData, @@ -20,6 +20,10 @@ const modelField: INodeProperties = { type: 'options', // eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items options: [ + { + name: 'Claude 3.5 Sonnet(20241022)', + value: 'claude-3-5-sonnet-20241022', + }, { name: 'Claude 3 Opus(20240229)', value: 'claude-3-opus-20240229', @@ -175,7 +179,7 @@ export class LmChatAnthropic implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('anthropicApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts index b4fc474dd2..dc2f716b2b 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOllama/LmChatOllama.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -52,7 +52,7 @@ export class LmChatOllama implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('ollamaApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts index 3556bca0cf..2e724bf3a7 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMChatOpenAi/LmChatOpenAi.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, type JsonObject, NodeApiError, @@ -242,7 +242,7 @@ export class LmChatOpenAi implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('openAiApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts index 191209bb33..6957cd9d9a 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMCohere/LmCohere.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -90,7 +90,7 @@ export class LmCohere implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('cohereApi'); const options = this.getNodeParameter('options', itemIndex, {}) as object; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts index 5492a51a97..f71708cbca 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOllama/LmOllama.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -51,7 +51,7 @@ export class LmOllama implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('ollamaApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts index a46ad429a2..5fb9be937e 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenAi/LmOpenAi.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType } from 'n8n-workflow'; import type { - IExecuteFunctions, INodeType, INodeTypeDescription, + ISupplyDataFunctions, SupplyData, ILoadOptionsFunctions, } from 'n8n-workflow'; @@ -229,7 +229,7 @@ export class LmOpenAi implements INodeType { }, }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('openAiApi'); const modelName = this.getNodeParameter('model', itemIndex, '', { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts index 7b2c821f9c..ddf8065bf6 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LMOpenHuggingFaceInference/LmOpenHuggingFaceInference.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -132,7 +132,7 @@ export class LmOpenHuggingFaceInference implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('huggingFaceApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts index c7b3d8ad95..b4eafde76e 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAwsBedrock/LmChatAwsBedrock.node.ts @@ -2,9 +2,9 @@ import { ChatBedrockConverse } from '@langchain/aws'; import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -132,7 +132,7 @@ export class LmChatAwsBedrock implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('aws'); const modelName = this.getNodeParameter('model', itemIndex) as string; const options = this.getNodeParameter('options', itemIndex, {}) as { diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts index 03548142db..55a5afb7ce 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -162,7 +162,7 @@ export class LmChatAzureOpenAi implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials<{ apiKey: string; resourceName: string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts index ce08a650f2..44691a47ef 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; @@ -113,7 +113,7 @@ export class LmChatGoogleGemini implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('googlePalmApi'); const modelName = this.getNodeParameter('modelName', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts index 55ccda90d2..a9a01ebf1b 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleVertex/LmChatGoogleVertex.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, type ILoadOptionsFunctions, type JsonObject, @@ -124,7 +124,7 @@ export class LmChatGoogleVertex implements INodeType { }, }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('googleApi'); const privateKey = formatPrivateKey(credentials.privateKey as string); const email = (credentials.email as string).trim(); diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts index d0a28715e1..3588cf0cc3 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGroq/LmChatGroq.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -129,7 +129,7 @@ export class LmChatGroq implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('groqApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts index 129beeadfe..5ff28bd30d 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatMistralCloud/LmChatMistralCloud.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -172,7 +172,7 @@ export class LmChatMistralCloud implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('mistralCloudApi'); const modelName = this.getNodeParameter('model', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts index a0f47677f7..660bf3b0a9 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts @@ -7,7 +7,7 @@ import type { SerializedSecret, } from '@langchain/core/load/serializable'; import type { LLMResult } from '@langchain/core/outputs'; -import type { IDataObject, IExecuteFunctions } from 'n8n-workflow'; +import type { IDataObject, ISupplyDataFunctions } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import { pick } from 'lodash'; import type { BaseMessage } from '@langchain/core/messages'; @@ -30,8 +30,6 @@ const TIKTOKEN_ESTIMATE_MODEL = 'gpt-4o'; export class N8nLlmTracing extends BaseCallbackHandler { name = 'N8nLlmTracing'; - executionFunctions: IExecuteFunctions; - connectionType = NodeConnectionType.AiLanguageModel; promptTokensEstimate = 0; @@ -61,11 +59,10 @@ export class N8nLlmTracing extends BaseCallbackHandler { }; constructor( - executionFunctions: IExecuteFunctions, + private executionFunctions: ISupplyDataFunctions, options?: { tokensUsageParser: TokensUsageParser }, ) { super(); - this.executionFunctions = executionFunctions; this.options = { ...this.options, ...options }; } @@ -138,7 +135,7 @@ export class N8nLlmTracing extends BaseCallbackHandler { this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [ [{ json: { ...response } }], ]); - void logAiEvent(this.executionFunctions, 'ai-llm-generated-output', { + logAiEvent(this.executionFunctions, 'ai-llm-generated-output', { messages: parsedMessages, options: runDetails.options, response, @@ -186,7 +183,7 @@ export class N8nLlmTracing extends BaseCallbackHandler { }); } - void logAiEvent(this.executionFunctions, 'ai-llm-errored', { + logAiEvent(this.executionFunctions, 'ai-llm-errored', { error: Object.keys(error).length === 0 ? error.toString() : error, runId, parentRunId, diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts index b8eea7a5e2..fae6927c25 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryBufferWindow/MemoryBufferWindow.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import type { BufferWindowMemoryInput } from 'langchain/memory'; @@ -134,7 +134,7 @@ export class MemoryBufferWindow implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const contextWindowLength = this.getNodeParameter('contextWindowLength', itemIndex) as number; const workflowId = this.getWorkflow().id; const memoryInstance = MemoryChatBufferSingleton.getInstance(); diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts index 9b2f46fb30..7f8d88fbaf 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryMotorhead/MemoryMotorhead.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -86,7 +86,7 @@ export class MemoryMotorhead implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('motorheadApi'); const nodeVersion = this.getNode().typeVersion; diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts index f42cb93fe1..f51b76fb18 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryPostgresChat/MemoryPostgresChat.node.ts @@ -1,5 +1,10 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ -import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow'; +import type { + ISupplyDataFunctions, + INodeType, + INodeTypeDescription, + SupplyData, +} from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import { BufferMemory, BufferWindowMemory } from 'langchain/memory'; import { PostgresChatMessageHistory } from '@langchain/community/stores/message/postgres'; @@ -73,7 +78,7 @@ export class MemoryPostgresChat implements INodeType { }, }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('postgres'); const tableName = this.getNodeParameter('tableName', itemIndex, 'n8n_chat_histories') as string; const sessionId = getSessionId(this, itemIndex); diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts index da57ede1d2..01a31458de 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryRedisChat/MemoryRedisChat.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeOperationError, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, NodeConnectionType, } from 'n8n-workflow'; @@ -102,7 +102,7 @@ export class MemoryRedisChat implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('redis'); const nodeVersion = this.getNode().typeVersion; diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts index f0177d9e75..be431b9b3c 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryXata/MemoryXata.node.ts @@ -1,6 +1,11 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow'; +import type { + ISupplyDataFunctions, + INodeType, + INodeTypeDescription, + SupplyData, +} from 'n8n-workflow'; import { XataChatMessageHistory } from '@langchain/community/stores/message/xata'; import { BufferMemory, BufferWindowMemory } from 'langchain/memory'; import { BaseClient } from '@xata.io/client'; @@ -88,7 +93,7 @@ export class MemoryXata implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('xataApi'); const nodeVersion = this.getNode().typeVersion; diff --git a/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts b/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts index 7cbf1da574..20e70fd920 100644 --- a/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/memory/MemoryZep/MemoryZep.node.ts @@ -1,7 +1,7 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, + type ISupplyDataFunctions, type INodeType, type INodeTypeDescription, type SupplyData, @@ -103,7 +103,7 @@ export class MemoryZep implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials<{ apiKey?: string; apiUrl?: string; diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts index 7d676c7607..d4743fb043 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserAutofixing/OutputParserAutofixing.node.ts @@ -1,6 +1,11 @@ import type { BaseLanguageModel } from '@langchain/core/language_models/base'; import { NodeConnectionType } from 'n8n-workflow'; -import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow'; +import type { + ISupplyDataFunctions, + INodeType, + INodeTypeDescription, + SupplyData, +} from 'n8n-workflow'; import { N8nOutputFixingParser, @@ -63,7 +68,7 @@ export class OutputParserAutofixing implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const model = (await this.getInputConnectionData( NodeConnectionType.AiLanguageModel, itemIndex, diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts index cb67afb453..b613c14775 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserItemList/OutputParserItemList.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -80,7 +80,7 @@ export class OutputParserItemList implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const options = this.getNodeParameter('options', itemIndex, {}) as { numberOfItems?: number; separator?: string; diff --git a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts index b5b6a5846c..c35cb1d145 100644 --- a/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/output_parser/OutputParserStructured/OutputParserStructured.node.ts @@ -1,9 +1,9 @@ import type { JSONSchema7 } from 'json-schema'; import { jsonParse, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, NodeOperationError, NodeConnectionType, @@ -122,7 +122,7 @@ export class OutputParserStructured implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const schemaType = this.getNodeParameter('schemaType', itemIndex, '') as 'fromJson' | 'manual'; // We initialize these even though one of them will always be empty // it makes it easer to navigate the ternary operator diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts index 5b89a0bf26..8017caa1ad 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverContextualCompression/RetrieverContextualCompression.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -63,7 +63,7 @@ export class RetrieverContextualCompression implements INodeType { properties: [], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supplying data for Contextual Compression Retriever'); const model = (await this.getInputConnectionData( diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts index 3cb377d654..f814ba875e 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverMultiQuery/RetrieverMultiQuery.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; @@ -82,7 +82,7 @@ export class RetrieverMultiQuery implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supplying data for MultiQuery Retriever'); const options = this.getNodeParameter('options', itemIndex, {}) as { queryCount?: number }; diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts index 6543d061d4..5e79a6a754 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverVectorStore/RetrieverVectorStore.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import type { VectorStore } from '@langchain/core/vectorstores'; @@ -56,7 +56,7 @@ export class RetrieverVectorStore implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supplying data for Vector Store Retriever'); const topK = this.getNodeParameter('topK', itemIndex, 4) as number; diff --git a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts index 6b446149fc..0aafabf1d4 100644 --- a/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/retrievers/RetrieverWorkflow/RetrieverWorkflow.node.ts @@ -5,7 +5,7 @@ import type { IExecuteWorkflowInfo, INodeExecutionData, IWorkflowBase, - IExecuteFunctions, + ISupplyDataFunctions, INodeType, INodeTypeDescription, SupplyData, @@ -292,15 +292,15 @@ export class RetrieverWorkflow implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { class WorkflowRetriever extends BaseRetriever { lc_namespace = ['n8n-nodes-langchain', 'retrievers', 'workflow']; - executeFunctions: IExecuteFunctions; - - constructor(executeFunctions: IExecuteFunctions, fields: BaseRetrieverInput) { + constructor( + private executeFunctions: ISupplyDataFunctions, + fields: BaseRetrieverInput, + ) { super(fields); - this.executeFunctions = executeFunctions; } async _getRelevantDocuments( diff --git a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts index 61e62def0f..f62e8f01f1 100644 --- a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterCharacterTextSplitter/TextSplitterCharacterTextSplitter.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import type { CharacterTextSplitterParams } from '@langchain/textsplitters'; @@ -63,7 +63,7 @@ export class TextSplitterCharacterTextSplitter implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply Data for Text Splitter'); const separator = this.getNodeParameter('separator', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts index 4d2c5a6ec8..21a0520766 100644 --- a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterRecursiveCharacterTextSplitter/TextSplitterRecursiveCharacterTextSplitter.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import type { @@ -94,7 +94,7 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply Data for Text Splitter'); const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number; diff --git a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts index c021aa1df7..247d142fa8 100644 --- a/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/text_splitters/TextSplitterTokenSplitter/TextSplitterTokenSplitter.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { TokenTextSplitter } from '@langchain/textsplitters'; @@ -56,7 +56,7 @@ export class TextSplitterTokenSplitter implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply Data for Text Splitter'); const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number; diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts index f37a3176a4..f50a6216c0 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCalculator/ToolCalculator.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { Calculator } from '@langchain/community/tools/calculator'; @@ -43,7 +43,7 @@ export class ToolCalculator implements INodeType { properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])], }; - async supplyData(this: IExecuteFunctions): Promise { + async supplyData(this: ISupplyDataFunctions): Promise { return { response: logWrapper(new Calculator(), this), }; diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts index 2a2a635c90..1491662e61 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts @@ -6,9 +6,9 @@ import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox'; import type { Sandbox } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; import type { - IExecuteFunctions, INodeType, INodeTypeDescription, + ISupplyDataFunctions, SupplyData, ExecutionError, IDataObject, @@ -175,7 +175,7 @@ export class ToolCode implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const node = this.getNode(); const workflowMode = this.getMode(); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts index 2fbac43474..f279c1e751 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts @@ -1,8 +1,8 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import type { - IExecuteFunctions, INodeType, INodeTypeDescription, + ISupplyDataFunctions, SupplyData, IHttpRequestMethods, IHttpRequestOptions, @@ -250,7 +250,7 @@ export class ToolHttpRequest implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const name = this.getNode().name.replace(/ /g, '_'); try { tryToParseAlphanumericString(name); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts index f18a2437e3..1a99896fff 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts @@ -10,19 +10,19 @@ describe('ToolHttpRequest', () => { const helpers = mock(); const executeFunctions = mock({ helpers }); - describe('Binary response', () => { - beforeEach(() => { - jest.resetAllMocks(); - executeFunctions.getNode.mockReturnValue( - mock({ - type: 'n8n-nodes-base.httpRequest', - name: 'HTTP Request', - typeVersion: 1.1, - }), - ); - executeFunctions.addInputData.mockReturnValue({ index: 0 }); - }); + beforeEach(() => { + jest.resetAllMocks(); + executeFunctions.getNode.mockReturnValue( + mock({ + type: 'n8n-nodes-base.httpRequest', + name: 'HTTP Request', + typeVersion: 1.1, + }), + ); + executeFunctions.addInputData.mockReturnValue({ index: 0 }); + }); + describe('Binary response', () => { it('should return the error when receiving a binary response', async () => { helpers.httpRequest.mockResolvedValue({ body: Buffer.from(''), @@ -237,4 +237,62 @@ describe('ToolHttpRequest', () => { ); }); }); + + describe('Optimize response', () => { + it('should extract body from the response HTML', async () => { + helpers.httpRequest.mockResolvedValue({ + body: ` + + + + +

Test

+ +
+

+ Test content +

+
+ +`, + headers: { + 'content-type': 'text/html', + }, + }); + + executeFunctions.getNodeParameter.mockImplementation( + (paramName: string, _: any, fallback: any) => { + switch (paramName) { + case 'method': + return 'GET'; + case 'url': + return '{url}'; + case 'options': + return {}; + case 'placeholderDefinitions.values': + return []; + case 'optimizeResponse': + return true; + case 'responseType': + return 'html'; + case 'cssSelector': + return 'body'; + default: + return fallback; + } + }, + ); + + const { response } = await httpTool.supplyData.call(executeFunctions, 0); + + const res = await (response as N8nTool).invoke({ + url: 'https://httpbin.org/html', + }); + + expect(helpers.httpRequest).toHaveBeenCalled(); + expect(res).toEqual( + JSON.stringify(['

Test

Test content

'], null, 2), + ); + }); + }); }); diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts index cd12ac00eb..f1d6dfd150 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts @@ -1,5 +1,5 @@ import { Readability } from '@mozilla/readability'; -import cheerio from 'cheerio'; +import * as cheerio from 'cheerio'; import { convert } from 'html-to-text'; import { JSDOM } from 'jsdom'; import get from 'lodash/get'; @@ -8,12 +8,12 @@ import unset from 'lodash/unset'; import * as mime from 'mime-types'; import { getOAuth2AdditionalParameters } from 'n8n-nodes-base/dist/nodes/HttpRequest/GenericFunctions'; import type { - IExecuteFunctions, IDataObject, IHttpRequestOptions, IRequestOptionsSimplified, ExecutionError, NodeApiError, + ISupplyDataFunctions, } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; import { z } from 'zod'; @@ -28,7 +28,7 @@ import type { } from './interfaces'; import type { DynamicZodObject } from '../../../types/zod.types'; -const genericCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: number) => { +const genericCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => { const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string; if (genericType === 'httpBasicAuth' || genericType === 'httpDigestAuth') { @@ -104,7 +104,7 @@ const genericCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: numbe }); }; -const predefinedCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: number) => { +const predefinedCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => { const predefinedType = ctx.getNodeParameter('nodeCredentialType', itemIndex) as string; const additionalOptions = getOAuth2AdditionalParameters(predefinedType); @@ -119,7 +119,7 @@ const predefinedCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: nu }; export const configureHttpRequestFunction = async ( - ctx: IExecuteFunctions, + ctx: ISupplyDataFunctions, credentialsType: 'predefinedCredentialType' | 'genericCredentialType' | 'none', itemIndex: number, ) => { @@ -146,7 +146,7 @@ const defaultOptimizer = (response: T) => { return String(response); }; -const htmlOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: number) => { +const htmlOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number, maxLength: number) => { const cssSelector = ctx.getNodeParameter('cssSelector', itemIndex, '') as string; const onlyContent = ctx.getNodeParameter('onlyContent', itemIndex, false) as boolean; let elementsToOmit: string[] = []; @@ -214,7 +214,7 @@ const htmlOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: num }; }; -const textOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: number) => { +const textOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number, maxLength: number) => { return (response: string | IDataObject) => { if (typeof response === 'object') { try { @@ -245,7 +245,7 @@ const textOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: num }; }; -const jsonOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => { +const jsonOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number) => { return (response: string): string => { let responseData: IDataObject | IDataObject[] | string = response; @@ -324,7 +324,7 @@ const jsonOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => { }; }; -export const configureResponseOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => { +export const configureResponseOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number) => { const optimizeResponse = ctx.getNodeParameter('optimizeResponse', itemIndex, false) as boolean; if (optimizeResponse) { @@ -469,7 +469,7 @@ const MODEL_INPUT_DESCRIPTION = { }; export const updateParametersAndOptions = (options: { - ctx: IExecuteFunctions; + ctx: ISupplyDataFunctions; itemIndex: number; toolParameters: ToolParameter[]; placeholdersDefinitions: PlaceholderDefinition[]; @@ -558,7 +558,7 @@ export const prepareToolDescription = ( }; export const configureToolFunction = ( - ctx: IExecuteFunctions, + ctx: ISupplyDataFunctions, itemIndex: number, toolParameters: ToolParameter[], requestOptions: IHttpRequestOptions, diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts index c08553b96e..709b06b7ac 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolSerpApi/ToolSerpApi.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { SerpAPI } from '@langchain/community/tools/serpapi'; @@ -113,7 +113,7 @@ export class ToolSerpApi implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const credentials = await this.getCredentials('serpApi'); const options = this.getNodeParameter('options', itemIndex) as object; diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts index b4ea7c3321..b4016f06ca 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolVectorStore/ToolVectorStore.node.ts @@ -1,4 +1,9 @@ -import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow'; +import type { + INodeType, + INodeTypeDescription, + ISupplyDataFunctions, + SupplyData, +} from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import { VectorStoreQATool } from 'langchain/tools'; @@ -82,7 +87,7 @@ export class ToolVectorStore implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const name = this.getNodeParameter('name', itemIndex) as string; const toolDescription = this.getNodeParameter('description', itemIndex) as string; const topK = this.getNodeParameter('topK', itemIndex, 4) as number; diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts index 0b3aeaff74..e462e38feb 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWikipedia/ToolWikipedia.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run'; @@ -43,7 +43,7 @@ export class ToolWikipedia implements INodeType { properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])], }; - async supplyData(this: IExecuteFunctions): Promise { + async supplyData(this: ISupplyDataFunctions): Promise { const WikiTool = new WikipediaQueryRun(); WikiTool.description = diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts index 3a1f7ea2cc..93290e63ad 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWolframAlpha/ToolWolframAlpha.node.ts @@ -1,9 +1,9 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import { WolframAlphaTool } from '@langchain/community/tools/wolframalpha'; @@ -49,7 +49,7 @@ export class ToolWolframAlpha implements INodeType { properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])], }; - async supplyData(this: IExecuteFunctions): Promise { + async supplyData(this: ISupplyDataFunctions): Promise { const credentials = await this.getCredentials('wolframAlphaApi'); return { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts index 6cc983eae4..f912e162d9 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts @@ -6,12 +6,12 @@ import isObject from 'lodash/isObject'; import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces'; import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode'; import type { - IExecuteFunctions, IExecuteWorkflowInfo, INodeExecutionData, INodeType, INodeTypeDescription, IWorkflowBase, + ISupplyDataFunctions, SupplyData, ExecutionError, IDataObject, @@ -357,7 +357,7 @@ export class ToolWorkflow implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const name = this.getNodeParameter('name', itemIndex) as string; const description = this.getNodeParameter('description', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts index c85a245073..7bf48c3d8c 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreInMemoryLoad/VectorStoreInMemoryLoad.node.ts @@ -1,10 +1,10 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { NodeConnectionType, - type SupplyData, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, + type SupplyData, } from 'n8n-workflow'; import type { Embeddings } from '@langchain/core/embeddings'; import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager'; @@ -59,7 +59,7 @@ export class VectorStoreInMemoryLoad implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const embeddings = (await this.getInputConnectionData( NodeConnectionType.AiEmbedding, itemIndex, diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts index 7cae9c9d85..d46bccd9f7 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStorePineconeLoad/VectorStorePineconeLoad.node.ts @@ -1,8 +1,8 @@ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import type { PineconeStoreParams } from '@langchain/pinecone'; @@ -84,7 +84,7 @@ export class VectorStorePineconeLoad implements INodeType { }, }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supplying data for Pinecone Load Vector Store'); const namespace = this.getNodeParameter('pineconeNamespace', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts index bae6d0e1a9..f4bdc49e44 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreSupabaseLoad/VectorStoreSupabaseLoad.node.ts @@ -1,7 +1,7 @@ import { - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, NodeConnectionType, } from 'n8n-workflow'; @@ -81,7 +81,7 @@ export class VectorStoreSupabaseLoad implements INodeType { methods = { listSearch: { supabaseTableNameSearch } }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supply Supabase Load Vector Store'); const tableName = this.getNodeParameter('tableName', itemIndex, '', { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts index 244c0a9843..dd30a0808e 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/VectorStoreZepLoad/VectorStoreZepLoad.node.ts @@ -1,8 +1,8 @@ import { NodeConnectionType, - type IExecuteFunctions, type INodeType, type INodeTypeDescription, + type ISupplyDataFunctions, type SupplyData, } from 'n8n-workflow'; import type { IZepConfig } from '@langchain/community/vectorstores/zep'; @@ -83,7 +83,7 @@ export class VectorStoreZepLoad implements INodeType { ], }; - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { this.logger.debug('Supplying data for Zep Load Vector Store'); const collectionName = this.getNodeParameter('collectionName', itemIndex) as string; diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.ts index 806a5129c5..1076fb93ba 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/MemoryVectorStoreManager.ts @@ -7,11 +7,8 @@ export class MemoryVectorStoreManager { private vectorStoreBuffer: Map; - private embeddings: Embeddings; - - private constructor(embeddings: Embeddings) { + private constructor(private embeddings: Embeddings) { this.vectorStoreBuffer = new Map(); - this.embeddings = embeddings; } public static getInstance(embeddings: Embeddings): MemoryVectorStoreManager { diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index 45d28542d7..2de9304fc5 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -5,12 +5,13 @@ import type { Embeddings } from '@langchain/core/embeddings'; import type { VectorStore } from '@langchain/core/vectorstores'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { + IExecuteFunctions, INodeCredentialDescription, INodeProperties, INodeExecutionData, - IExecuteFunctions, INodeTypeDescription, SupplyData, + ISupplyDataFunctions, INodeType, ILoadOptionsFunctions, INodeListSearchResult, @@ -57,13 +58,13 @@ interface VectorStoreNodeConstructorArgs { retrieveFields?: INodeProperties[]; updateFields?: INodeProperties[]; populateVectorStore: ( - context: IExecuteFunctions, + context: ISupplyDataFunctions, embeddings: Embeddings, documents: Array>>, itemIndex: number, ) => Promise; getVectorStoreClient: ( - context: IExecuteFunctions, + context: ISupplyDataFunctions, filter: Record | undefined, embeddings: Embeddings, itemIndex: number, @@ -281,7 +282,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => }); resultData.push(...serializedDocs); - void logAiEvent(this, 'ai-vector-store-searched', { query: prompt }); + logAiEvent(this, 'ai-vector-store-searched', { query: prompt }); } return [resultData]; @@ -311,7 +312,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => try { await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex); - void logAiEvent(this, 'ai-vector-store-populated'); + logAiEvent(this, 'ai-vector-store-populated'); } catch (error) { throw error; } @@ -365,7 +366,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => ids: [documentId], }); - void logAiEvent(this, 'ai-vector-store-updated'); + logAiEvent(this, 'ai-vector-store-updated'); } catch (error) { throw error; } @@ -380,7 +381,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => ); } - async supplyData(this: IExecuteFunctions, itemIndex: number): Promise { + async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise { const mode = this.getNodeParameter('mode', 0) as 'load' | 'insert' | 'retrieve'; const filter = getMetadataFiltersValues(this, itemIndex); const embeddings = (await this.getInputConnectionData( diff --git a/packages/@n8n/nodes-langchain/utils/N8nBinaryLoader.ts b/packages/@n8n/nodes-langchain/utils/N8nBinaryLoader.ts index 491bb03e28..53f4f95a74 100644 --- a/packages/@n8n/nodes-langchain/utils/N8nBinaryLoader.ts +++ b/packages/@n8n/nodes-langchain/utils/N8nBinaryLoader.ts @@ -1,6 +1,11 @@ import { pipeline } from 'stream/promises'; import { createWriteStream } from 'fs'; -import type { IBinaryData, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; +import type { + IBinaryData, + IExecuteFunctions, + INodeExecutionData, + ISupplyDataFunctions, +} from 'n8n-workflow'; import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow'; import type { TextSplitter } from '@langchain/textsplitters'; @@ -26,25 +31,12 @@ const SUPPORTED_MIME_TYPES = { }; export class N8nBinaryLoader { - private context: IExecuteFunctions; - - private optionsPrefix: string; - - private binaryDataKey: string; - - private textSplitter?: TextSplitter; - constructor( - context: IExecuteFunctions, - optionsPrefix = '', - binaryDataKey = '', - textSplitter?: TextSplitter, - ) { - this.context = context; - this.textSplitter = textSplitter; - this.optionsPrefix = optionsPrefix; - this.binaryDataKey = binaryDataKey; - } + private context: IExecuteFunctions | ISupplyDataFunctions, + private optionsPrefix = '', + private binaryDataKey = '', + private textSplitter?: TextSplitter, + ) {} async processAll(items?: INodeExecutionData[]): Promise { const docs: Document[] = []; diff --git a/packages/@n8n/nodes-langchain/utils/N8nJsonLoader.ts b/packages/@n8n/nodes-langchain/utils/N8nJsonLoader.ts index 6cc4862d22..7c44d8a8f9 100644 --- a/packages/@n8n/nodes-langchain/utils/N8nJsonLoader.ts +++ b/packages/@n8n/nodes-langchain/utils/N8nJsonLoader.ts @@ -1,4 +1,9 @@ -import { type IExecuteFunctions, type INodeExecutionData, NodeOperationError } from 'n8n-workflow'; +import { + type IExecuteFunctions, + type INodeExecutionData, + type ISupplyDataFunctions, + NodeOperationError, +} from 'n8n-workflow'; import type { TextSplitter } from '@langchain/textsplitters'; import type { Document } from '@langchain/core/documents'; @@ -7,17 +12,11 @@ import { TextLoader } from 'langchain/document_loaders/fs/text'; import { getMetadataFiltersValues } from './helpers'; export class N8nJsonLoader { - private context: IExecuteFunctions; - - private optionsPrefix: string; - - private textSplitter?: TextSplitter; - - constructor(context: IExecuteFunctions, optionsPrefix = '', textSplitter?: TextSplitter) { - this.context = context; - this.textSplitter = textSplitter; - this.optionsPrefix = optionsPrefix; - } + constructor( + private context: IExecuteFunctions | ISupplyDataFunctions, + private optionsPrefix = '', + private textSplitter?: TextSplitter, + ) {} async processAll(items?: INodeExecutionData[]): Promise { const docs: Document[] = []; diff --git a/packages/@n8n/nodes-langchain/utils/N8nTool.ts b/packages/@n8n/nodes-langchain/utils/N8nTool.ts index bb8bab08bd..2cb89630f0 100644 --- a/packages/@n8n/nodes-langchain/utils/N8nTool.ts +++ b/packages/@n8n/nodes-langchain/utils/N8nTool.ts @@ -1,6 +1,6 @@ import type { DynamicStructuredToolInput } from '@langchain/core/tools'; import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools'; -import type { IExecuteFunctions, IDataObject } from 'n8n-workflow'; +import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow'; import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow'; import { StructuredOutputParser } from 'langchain/output_parsers'; import type { ZodTypeAny } from 'zod'; @@ -45,12 +45,11 @@ ALL parameters marked as required must be provided`; }; export class N8nTool extends DynamicStructuredTool { - private context: IExecuteFunctions; - - constructor(context: IExecuteFunctions, fields: DynamicStructuredToolInput) { + constructor( + private context: ISupplyDataFunctions, + fields: DynamicStructuredToolInput, + ) { super(fields); - - this.context = context; } asDynamicTool(): DynamicTool { diff --git a/packages/@n8n/nodes-langchain/utils/helpers.ts b/packages/@n8n/nodes-langchain/utils/helpers.ts index a760c32ba8..f1e02e9c9f 100644 --- a/packages/@n8n/nodes-langchain/utils/helpers.ts +++ b/packages/@n8n/nodes-langchain/utils/helpers.ts @@ -5,7 +5,13 @@ import type { BaseMessage } from '@langchain/core/messages'; import type { Tool } from '@langchain/core/tools'; import type { BaseChatMemory } from 'langchain/memory'; import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; -import type { AiEvent, IDataObject, IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow'; +import type { + AiEvent, + IDataObject, + IExecuteFunctions, + ISupplyDataFunctions, + IWebhookFunctions, +} from 'n8n-workflow'; import { N8nTool } from './N8nTool'; @@ -20,7 +26,7 @@ function hasMethods(obj: unknown, ...methodNames: Array): ob } export function getMetadataFiltersValues( - ctx: IExecuteFunctions, + ctx: IExecuteFunctions | ISupplyDataFunctions, itemIndex: number, ): Record | undefined { const options = ctx.getNodeParameter('options', itemIndex, {}); @@ -93,7 +99,7 @@ export function getPromptInputByType(options: { } export function getSessionId( - ctx: IExecuteFunctions | IWebhookFunctions, + ctx: ISupplyDataFunctions | IWebhookFunctions, itemIndex: number, selectorKey = 'sessionIdType', autoSelect = 'fromInput', @@ -133,13 +139,13 @@ export function getSessionId( return sessionId; } -export async function logAiEvent( - executeFunctions: IExecuteFunctions, +export function logAiEvent( + executeFunctions: IExecuteFunctions | ISupplyDataFunctions, event: AiEvent, data?: IDataObject, ) { try { - await executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined); + executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined); } catch (error) { executeFunctions.logger.debug(`Error logging AI event: ${event}`); } diff --git a/packages/@n8n/nodes-langchain/utils/logWrapper.ts b/packages/@n8n/nodes-langchain/utils/logWrapper.ts index c1ecd1799c..eca1431a4b 100644 --- a/packages/@n8n/nodes-langchain/utils/logWrapper.ts +++ b/packages/@n8n/nodes-langchain/utils/logWrapper.ts @@ -10,7 +10,7 @@ import type { Tool } from '@langchain/core/tools'; import { VectorStore } from '@langchain/core/vectorstores'; import { TextSplitter } from '@langchain/textsplitters'; import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base'; -import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; +import type { IExecuteFunctions, INodeExecutionData, ISupplyDataFunctions } from 'n8n-workflow'; import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers'; @@ -27,7 +27,7 @@ const errorsMap: { [key: string]: { message: string; description: string } } = { export async function callMethodAsync( this: T, parameters: { - executeFunctions: IExecuteFunctions; + executeFunctions: IExecuteFunctions | ISupplyDataFunctions; connectionType: NodeConnectionType; currentNodeRunIndex: number; method: (...args: any[]) => Promise; @@ -113,7 +113,7 @@ export function logWrapper( | VectorStore | N8nBinaryLoader | N8nJsonLoader, - executeFunctions: IExecuteFunctions, + executeFunctions: IExecuteFunctions | ISupplyDataFunctions, ) { return new Proxy(originalInstance, { get: (target, prop) => { @@ -190,7 +190,7 @@ export function logWrapper( const payload = { action: 'getMessages', response }; executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); - void logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response }); + logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response }); return response; }; } else if (prop === 'addMessage' && 'addMessage' in target) { @@ -207,7 +207,7 @@ export function logWrapper( arguments: [message], }); - void logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message }); + logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message }); executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); }; } @@ -233,7 +233,7 @@ export function logWrapper( arguments: [query, config], })) as Array>>; - void logAiEvent(executeFunctions, 'ai-documents-retrieved', { query }); + logAiEvent(executeFunctions, 'ai-documents-retrieved', { query }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -258,7 +258,7 @@ export function logWrapper( arguments: [documents], })) as number[][]; - void logAiEvent(executeFunctions, 'ai-document-embedded'); + logAiEvent(executeFunctions, 'ai-document-embedded'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -278,7 +278,7 @@ export function logWrapper( method: target[prop], arguments: [query], })) as number[]; - void logAiEvent(executeFunctions, 'ai-query-embedded'); + logAiEvent(executeFunctions, 'ai-query-embedded'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -323,7 +323,7 @@ export function logWrapper( arguments: [item, itemIndex], })) as number[]; - void logAiEvent(executeFunctions, 'ai-document-processed'); + logAiEvent(executeFunctions, 'ai-document-processed'); executeFunctions.addOutputData(connectionType, index, [ [{ json: { response }, pairedItem: { item: itemIndex } }], ]); @@ -349,7 +349,7 @@ export function logWrapper( arguments: [text], })) as string[]; - void logAiEvent(executeFunctions, 'ai-text-split'); + logAiEvent(executeFunctions, 'ai-text-split'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -373,7 +373,7 @@ export function logWrapper( arguments: [query], })) as string; - void logAiEvent(executeFunctions, 'ai-tool-called', { query, response }); + logAiEvent(executeFunctions, 'ai-tool-called', { query, response }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -403,7 +403,7 @@ export function logWrapper( arguments: [query, k, filter, _callbacks], })) as Array>>; - void logAiEvent(executeFunctions, 'ai-vector-store-searched', { query }); + logAiEvent(executeFunctions, 'ai-vector-store-searched', { query }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts index bfcbf88b33..eec3b0c187 100644 --- a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts @@ -2,7 +2,7 @@ import type { Callbacks } from '@langchain/core/callbacks/manager'; import type { BaseLanguageModel } from '@langchain/core/language_models/base'; import type { AIMessage } from '@langchain/core/messages'; import { BaseOutputParser } from '@langchain/core/output_parsers'; -import type { IExecuteFunctions } from 'n8n-workflow'; +import type { ISupplyDataFunctions } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser'; @@ -10,23 +10,14 @@ import { NAIVE_FIX_PROMPT } from './prompt'; import { logAiEvent } from '../helpers'; export class N8nOutputFixingParser extends BaseOutputParser { - private context: IExecuteFunctions; - - private model: BaseLanguageModel; - - private outputParser: N8nStructuredOutputParser; - lc_namespace = ['langchain', 'output_parsers', 'fix']; constructor( - context: IExecuteFunctions, - model: BaseLanguageModel, - outputParser: N8nStructuredOutputParser, + private context: ISupplyDataFunctions, + private model: BaseLanguageModel, + private outputParser: N8nStructuredOutputParser, ) { super(); - this.context = context; - this.model = model; - this.outputParser = outputParser; } getRetryChain() { @@ -48,7 +39,7 @@ export class N8nOutputFixingParser extends BaseOutputParser { try { // First attempt to parse the completion const response = await this.outputParser.parse(completion, callbacks, (e) => e); - void logAiEvent(this.context, 'ai-output-parsed', { text: completion, response }); + logAiEvent(this.context, 'ai-output-parsed', { text: completion, response }); this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ [{ json: { action: 'parse', response } }], diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts index 4799193be6..a24052f5e1 100644 --- a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts @@ -1,7 +1,7 @@ import type { Callbacks } from '@langchain/core/callbacks/manager'; import { StructuredOutputParser } from 'langchain/output_parsers'; import get from 'lodash/get'; -import type { IExecuteFunctions } from 'n8n-workflow'; +import type { ISupplyDataFunctions } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { z } from 'zod'; @@ -14,11 +14,11 @@ const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array'; export class N8nStructuredOutputParser extends StructuredOutputParser< z.ZodType > { - context: IExecuteFunctions; - - constructor(context: IExecuteFunctions, zodSchema: z.ZodSchema) { + constructor( + private context: ISupplyDataFunctions, + zodSchema: z.ZodSchema, + ) { super(zodSchema); - this.context = context; } lc_namespace = ['langchain', 'output_parsers', 'structured']; @@ -39,7 +39,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser< get(parsed, STRUCTURED_OUTPUT_KEY) ?? parsed) as Record; - void logAiEvent(this.context, 'ai-output-parsed', { text, response: result }); + logAiEvent(this.context, 'ai-output-parsed', { text, response: result }); this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ [{ json: { action: 'parse', response: result } }], @@ -56,7 +56,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser< }, ); - void logAiEvent(this.context, 'ai-output-parsed', { + logAiEvent(this.context, 'ai-output-parsed', { text, response: e.message ?? e, }); @@ -73,7 +73,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser< static async fromZodJsonSchema( zodSchema: z.ZodSchema, nodeVersion: number, - context: IExecuteFunctions, + context: ISupplyDataFunctions, ): Promise { let returnSchema: z.ZodType; if (nodeVersion === 1) { diff --git a/packages/@n8n/task-runner/package.json b/packages/@n8n/task-runner/package.json index d889bde6f3..fca6b9a22c 100644 --- a/packages/@n8n/task-runner/package.json +++ b/packages/@n8n/task-runner/package.json @@ -22,9 +22,11 @@ "dist/**/*" ], "dependencies": { + "@n8n/config": "workspace:*", "n8n-workflow": "workspace:*", "n8n-core": "workspace:*", "nanoid": "^3.3.6", + "typedi": "catalog:", "ws": "^8.18.0" }, "devDependencies": { diff --git a/packages/@n8n/task-runner/src/config/base-runner-config.ts b/packages/@n8n/task-runner/src/config/base-runner-config.ts new file mode 100644 index 0000000000..01e00c177a --- /dev/null +++ b/packages/@n8n/task-runner/src/config/base-runner-config.ts @@ -0,0 +1,16 @@ +import { Config, Env } from '@n8n/config'; + +@Config +export class BaseRunnerConfig { + @Env('N8N_RUNNERS_N8N_URI') + n8nUri: string = '127.0.0.1:5679'; + + @Env('N8N_RUNNERS_GRANT_TOKEN') + grantToken: string = ''; + + @Env('N8N_RUNNERS_MAX_PAYLOAD') + maxPayloadSize: number = 1024 * 1024 * 1024; + + @Env('N8N_RUNNERS_MAX_CONCURRENCY') + maxConcurrency: number = 5; +} diff --git a/packages/@n8n/task-runner/src/config/js-runner-config.ts b/packages/@n8n/task-runner/src/config/js-runner-config.ts new file mode 100644 index 0000000000..4cba6f1d98 --- /dev/null +++ b/packages/@n8n/task-runner/src/config/js-runner-config.ts @@ -0,0 +1,10 @@ +import { Config, Env } from '@n8n/config'; + +@Config +export class JsRunnerConfig { + @Env('NODE_FUNCTION_ALLOW_BUILTIN') + allowedBuiltInModules: string = ''; + + @Env('NODE_FUNCTION_ALLOW_EXTERNAL') + allowedExternalModules: string = ''; +} diff --git a/packages/@n8n/task-runner/src/config/main-config.ts b/packages/@n8n/task-runner/src/config/main-config.ts new file mode 100644 index 0000000000..a290c0c380 --- /dev/null +++ b/packages/@n8n/task-runner/src/config/main-config.ts @@ -0,0 +1,13 @@ +import { Config, Nested } from '@n8n/config'; + +import { BaseRunnerConfig } from './base-runner-config'; +import { JsRunnerConfig } from './js-runner-config'; + +@Config +export class MainConfig { + @Nested + baseRunnerConfig!: BaseRunnerConfig; + + @Nested + jsRunnerConfig!: JsRunnerConfig; +} diff --git a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts index 5f83b23ae1..36f3c5afa2 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/__tests__/js-task-runner.test.ts @@ -4,7 +4,6 @@ import fs from 'node:fs'; import { builtinModules } from 'node:module'; import { ValidationError } from '@/js-task-runner/errors/validation-error'; -import type { JsTaskRunnerOpts } from '@/js-task-runner/js-task-runner'; import { JsTaskRunner, type AllCodeTaskData, @@ -13,17 +12,27 @@ import { import type { Task } from '@/task-runner'; import { newAllCodeTaskData, newTaskWithSettings, withPairedItem, wrapIntoJson } from './test-data'; +import type { JsRunnerConfig } from '../../config/js-runner-config'; +import { MainConfig } from '../../config/main-config'; import { ExecutionError } from '../errors/execution-error'; jest.mock('ws'); +const defaultConfig = new MainConfig(); + describe('JsTaskRunner', () => { - const createRunnerWithOpts = (opts: Partial = {}) => + const createRunnerWithOpts = (opts: Partial = {}) => new JsTaskRunner({ - wsUrl: 'ws://localhost', - grantToken: 'grantToken', - maxConcurrency: 1, - ...opts, + baseRunnerConfig: { + ...defaultConfig.baseRunnerConfig, + grantToken: 'grantToken', + maxConcurrency: 1, + n8nUri: 'localhost', + }, + jsRunnerConfig: { + ...defaultConfig.jsRunnerConfig, + ...opts, + }, }); const defaultTaskRunner = createRunnerWithOpts(); diff --git a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts index d7ec7d85f4..40ee12af2c 100644 --- a/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts +++ b/packages/@n8n/task-runner/src/js-task-runner/js-task-runner.ts @@ -30,6 +30,7 @@ import { makeSerializable } from './errors/serializable-error'; import type { RequireResolver } from './require-resolver'; import { createRequireResolver } from './require-resolver'; import { validateRunForAllItemsOutput, validateRunForEachItemOutput } from './result-validation'; +import type { MainConfig } from '../config/main-config'; export interface JSExecSettings { code: string; @@ -76,23 +77,6 @@ export interface AllCodeTaskData { additionalData: PartialAdditionalData; } -export interface JsTaskRunnerOpts { - wsUrl: string; - grantToken: string; - maxConcurrency: number; - name?: string; - /** - * List of built-in nodejs modules that are allowed to be required in the - * execution sandbox. Asterisk (*) can be used to allow all. - */ - allowedBuiltInModules?: string; - /** - * List of npm modules that are allowed to be required in the execution - * sandbox. Asterisk (*) can be used to allow all. - */ - allowedExternalModules?: string; -} - type CustomConsole = { log: (...args: unknown[]) => void; }; @@ -100,22 +84,20 @@ type CustomConsole = { export class JsTaskRunner extends TaskRunner { private readonly requireResolver: RequireResolver; - constructor({ - grantToken, - maxConcurrency, - wsUrl, - name = 'JS Task Runner', - allowedBuiltInModules, - allowedExternalModules, - }: JsTaskRunnerOpts) { - super('javascript', wsUrl, grantToken, maxConcurrency, name); + constructor(config: MainConfig, name = 'JS Task Runner') { + super({ + taskType: 'javascript', + name, + ...config.baseRunnerConfig, + }); + const { jsRunnerConfig } = config; const parseModuleAllowList = (moduleList: string) => moduleList === '*' ? null : new Set(moduleList.split(',').map((x) => x.trim())); this.requireResolver = createRequireResolver({ - allowedBuiltInModules: parseModuleAllowList(allowedBuiltInModules ?? ''), - allowedExternalModules: parseModuleAllowList(allowedExternalModules ?? ''), + allowedBuiltInModules: parseModuleAllowList(jsRunnerConfig.allowedBuiltInModules ?? ''), + allowedExternalModules: parseModuleAllowList(jsRunnerConfig.allowedExternalModules ?? ''), }); } diff --git a/packages/@n8n/task-runner/src/start.ts b/packages/@n8n/task-runner/src/start.ts index f5487ba6c2..fcaab84d51 100644 --- a/packages/@n8n/task-runner/src/start.ts +++ b/packages/@n8n/task-runner/src/start.ts @@ -1,27 +1,12 @@ -import { ApplicationError, ensureError } from 'n8n-workflow'; +import { ensureError } from 'n8n-workflow'; +import Container from 'typedi'; +import { MainConfig } from './config/main-config'; import { JsTaskRunner } from './js-task-runner/js-task-runner'; let runner: JsTaskRunner | undefined; let isShuttingDown = false; -type Config = { - n8nUri: string; - grantToken: string; -}; - -function readAndParseConfig(): Config { - const grantToken = process.env.N8N_RUNNERS_GRANT_TOKEN; - if (!grantToken) { - throw new ApplicationError('Missing N8N_RUNNERS_GRANT_TOKEN environment variable'); - } - - return { - n8nUri: process.env.N8N_RUNNERS_N8N_URI ?? '127.0.0.1:5679', - grantToken, - }; -} - function createSignalHandler(signal: string) { return async function onSignal() { if (isShuttingDown) { @@ -46,16 +31,9 @@ function createSignalHandler(signal: string) { } void (async function start() { - const config = readAndParseConfig(); + const config = Container.get(MainConfig); - const wsUrl = `ws://${config.n8nUri}/runners/_ws`; - runner = new JsTaskRunner({ - wsUrl, - grantToken: config.grantToken, - maxConcurrency: 5, - allowedBuiltInModules: process.env.NODE_FUNCTION_ALLOW_BUILTIN, - allowedExternalModules: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, - }); + runner = new JsTaskRunner(config); process.on('SIGINT', createSignalHandler('SIGINT')); process.on('SIGTERM', createSignalHandler('SIGTERM')); diff --git a/packages/@n8n/task-runner/src/task-runner.ts b/packages/@n8n/task-runner/src/task-runner.ts index 356afb69e5..9629cc15d5 100644 --- a/packages/@n8n/task-runner/src/task-runner.ts +++ b/packages/@n8n/task-runner/src/task-runner.ts @@ -1,8 +1,8 @@ import { ApplicationError, type INodeTypeDescription } from 'n8n-workflow'; import { nanoid } from 'nanoid'; -import { URL } from 'node:url'; import { type MessageEvent, WebSocket } from 'ws'; +import type { BaseRunnerConfig } from './config/base-runner-config'; import { TaskRunnerNodeTypes } from './node-types'; import { RPC_ALLOW_LIST, @@ -42,7 +42,10 @@ export interface RPCCallObject { const VALID_TIME_MS = 1000; const VALID_EXTRA_MS = 100; -const DEFAULT_MAX_PAYLOAD_SIZE = 1024 * 1024 * 1024; +export interface TaskRunnerOpts extends BaseRunnerConfig { + taskType: string; + name?: string; +} export abstract class TaskRunner { id: string = nanoid(); @@ -63,22 +66,23 @@ export abstract class TaskRunner { nodeTypes: TaskRunnerNodeTypes = new TaskRunnerNodeTypes([]); - constructor( - public taskType: string, - wsUrl: string, - grantToken: string, - private maxConcurrency: number, - public name?: string, - ) { - const url = new URL(wsUrl); - url.searchParams.append('id', this.id); - this.ws = new WebSocket(url.toString(), { + taskType: string; + + maxConcurrency: number; + + name: string; + + constructor(opts: TaskRunnerOpts) { + this.taskType = opts.taskType; + this.name = opts.name ?? 'Node.js Task Runner SDK'; + this.maxConcurrency = opts.maxConcurrency; + + const wsUrl = `ws://${opts.n8nUri}/runners/_ws?id=${this.id}`; + this.ws = new WebSocket(wsUrl, { headers: { - authorization: `Bearer ${grantToken}`, + authorization: `Bearer ${opts.grantToken}`, }, - maxPayload: process.env.N8N_RUNNERS_MAX_PAYLOAD - ? parseInt(process.env.N8N_RUNNERS_MAX_PAYLOAD) - : DEFAULT_MAX_PAYLOAD_SIZE, + maxPayload: opts.maxPayloadSize, }); this.ws.addEventListener('message', this.receiveMessage); this.ws.addEventListener('close', this.stopTaskOffers); @@ -145,7 +149,7 @@ export abstract class TaskRunner { case 'broker:inforequest': this.send({ type: 'runner:info', - name: this.name ?? 'Node.js Task Runner SDK', + name: this.name, types: [this.taskType], }); break; diff --git a/packages/@n8n/task-runner/tsconfig.json b/packages/@n8n/task-runner/tsconfig.json index db6ad545e3..ddee64ec1f 100644 --- a/packages/@n8n/task-runner/tsconfig.json +++ b/packages/@n8n/task-runner/tsconfig.json @@ -2,6 +2,8 @@ "extends": ["../../../tsconfig.json", "../../../tsconfig.backend.json"], "compilerOptions": { "rootDir": ".", + "emitDecoratorMetadata": true, + "experimentalDecorators": true, "baseUrl": "src", "paths": { "@/*": ["./*"] diff --git a/packages/cli/src/__tests__/license.test.ts b/packages/cli/src/__tests__/license.test.ts index 70aa80347a..d33d7c37cf 100644 --- a/packages/cli/src/__tests__/license.test.ts +++ b/packages/cli/src/__tests__/license.test.ts @@ -17,14 +17,16 @@ const MOCK_ACTIVATION_KEY = 'activation-key'; const MOCK_FEATURE_FLAG = 'feat:sharing'; const MOCK_MAIN_PLAN_ID = '1b765dc4-d39d-4ffe-9885-c56dd67c4b26'; -describe('License', () => { - beforeAll(() => { - config.set('license.serverUrl', MOCK_SERVER_URL); - config.set('license.autoRenewEnabled', true); - config.set('license.autoRenewOffset', MOCK_RENEW_OFFSET); - config.set('license.tenantId', 1); - }); +const licenseConfig: GlobalConfig['license'] = { + serverUrl: MOCK_SERVER_URL, + autoRenewalEnabled: true, + autoRenewOffset: MOCK_RENEW_OFFSET, + activationKey: MOCK_ACTIVATION_KEY, + tenantId: 1, + cert: '', +}; +describe('License', () => { let license: License; const instanceSettings = mock({ instanceId: MOCK_INSTANCE_ID, @@ -32,7 +34,10 @@ describe('License', () => { }); beforeEach(async () => { - const globalConfig = mock({ multiMainSetup: { enabled: false } }); + const globalConfig = mock({ + license: licenseConfig, + multiMainSetup: { enabled: false }, + }); license = new License(mockLogger(), instanceSettings, mock(), mock(), mock(), globalConfig); await license.init(); }); @@ -66,7 +71,7 @@ describe('License', () => { mock(), mock(), mock(), - mock(), + mock({ license: licenseConfig }), ); await license.init(); expect(LicenseManager).toHaveBeenCalledWith( @@ -192,17 +197,23 @@ describe('License', () => { }); describe('License', () => { - beforeEach(() => { - config.load(config.default); - }); - describe('init', () => { describe('in single-main setup', () => { describe('with `license.autoRenewEnabled` enabled', () => { it('should enable renewal', async () => { - const globalConfig = mock({ multiMainSetup: { enabled: false } }); + const globalConfig = mock({ + license: licenseConfig, + multiMainSetup: { enabled: false }, + }); - await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); + await new License( + mockLogger(), + mock({ instanceType: 'main' }), + mock(), + mock(), + mock(), + globalConfig, + ).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }), @@ -212,9 +223,14 @@ describe('License', () => { describe('with `license.autoRenewEnabled` disabled', () => { it('should disable renewal', async () => { - config.set('license.autoRenewEnabled', false); - - await new License(mockLogger(), mock(), mock(), mock(), mock(), mock()).init(); + await new License( + mockLogger(), + mock({ instanceType: 'main' }), + mock(), + mock(), + mock(), + mock(), + ).init(); expect(LicenseManager).toHaveBeenCalledWith( expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }), @@ -228,9 +244,11 @@ describe('License', () => { test.each(['unset', 'leader', 'follower'])( 'if %s status, should disable removal', async (status) => { - const globalConfig = mock({ multiMainSetup: { enabled: true } }); + const globalConfig = mock({ + license: { ...licenseConfig, autoRenewalEnabled: false }, + multiMainSetup: { enabled: true }, + }); config.set('multiMainSetup.instanceType', status); - config.set('license.autoRenewEnabled', false); await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); @@ -243,9 +261,11 @@ describe('License', () => { describe('with `license.autoRenewEnabled` enabled', () => { test.each(['unset', 'follower'])('if %s status, should disable removal', async (status) => { - const globalConfig = mock({ multiMainSetup: { enabled: true } }); + const globalConfig = mock({ + license: { ...licenseConfig, autoRenewalEnabled: false }, + multiMainSetup: { enabled: true }, + }); config.set('multiMainSetup.instanceType', status); - config.set('license.autoRenewEnabled', false); await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); @@ -255,7 +275,10 @@ describe('License', () => { }); it('if leader status, should enable renewal', async () => { - const globalConfig = mock({ multiMainSetup: { enabled: true } }); + const globalConfig = mock({ + license: licenseConfig, + multiMainSetup: { enabled: true }, + }); config.set('multiMainSetup.instanceType', 'leader'); await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init(); diff --git a/packages/cli/src/commands/audit.ts b/packages/cli/src/commands/audit.ts index e86c8c9ab5..e98bb8bce0 100644 --- a/packages/cli/src/commands/audit.ts +++ b/packages/cli/src/commands/audit.ts @@ -1,8 +1,8 @@ +import { SecurityConfig } from '@n8n/config'; import { Flags } from '@oclif/core'; import { ApplicationError } from 'n8n-workflow'; import { Container } from 'typedi'; -import config from '@/config'; import { RISK_CATEGORIES } from '@/security-audit/constants'; import { SecurityAuditService } from '@/security-audit/security-audit.service'; import type { Risk } from '@/security-audit/types'; @@ -26,7 +26,7 @@ export class SecurityAudit extends BaseCommand { }), 'days-abandoned-workflow': Flags.integer({ - default: config.getEnv('security.audit.daysAbandonedWorkflow'), + default: Container.get(SecurityConfig).daysAbandonedWorkflow, description: 'Days for a workflow to be considered abandoned if not executed', }), }; diff --git a/packages/cli/src/commands/base-command.ts b/packages/cli/src/commands/base-command.ts index 64ce401257..214d7f4ce7 100644 --- a/packages/cli/src/commands/base-command.ts +++ b/packages/cli/src/commands/base-command.ts @@ -274,7 +274,7 @@ export abstract class BaseCommand extends Command { this.license = Container.get(License); await this.license.init(); - const activationKey = config.getEnv('license.activationKey'); + const { activationKey } = this.globalConfig.license; if (activationKey) { const hasCert = (await this.license.loadCertStr()).length > 0; diff --git a/packages/cli/src/commands/start.ts b/packages/cli/src/commands/start.ts index 4a80c1d1c4..70f52f8cb8 100644 --- a/packages/cli/src/commands/start.ts +++ b/packages/cli/src/commands/start.ts @@ -199,7 +199,7 @@ export class Start extends BaseCommand { await this.initOrchestration(); this.logger.debug('Orchestration init complete'); - if (!config.getEnv('license.autoRenewEnabled') && this.instanceSettings.isLeader) { + if (!this.globalConfig.license.autoRenewalEnabled && this.instanceSettings.isLeader) { this.logger.warn( 'Automatic license renewal is disabled. The license will not renew automatically, and access to licensed features may be lost!', ); diff --git a/packages/cli/src/config/schema.ts b/packages/cli/src/config/schema.ts index e0e322210e..8bece9199a 100644 --- a/packages/cli/src/config/schema.ts +++ b/packages/cli/src/config/schema.ts @@ -187,29 +187,6 @@ export const schema = { doc: 'Public URL where the editor is accessible. Also used for emails sent from n8n.', }, - security: { - restrictFileAccessTo: { - doc: 'If set only files in that directories can be accessed. Multiple directories can be separated by semicolon (";").', - format: String, - default: '', - env: 'N8N_RESTRICT_FILE_ACCESS_TO', - }, - blockFileAccessToN8nFiles: { - doc: 'If set to true it will block access to all files in the ".n8n" directory, the static cache dir at ~/.cache/n8n/public, and user defined config files.', - format: Boolean, - default: true, - env: 'N8N_BLOCK_FILE_ACCESS_TO_N8N_FILES', - }, - audit: { - daysAbandonedWorkflow: { - doc: 'Days for a workflow to be considered abandoned if not executed', - format: Number, - default: 90, - env: 'N8N_SECURITY_AUDIT_DAYS_ABANDONED_WORKFLOW', - }, - }, - }, - workflowTagsDisabled: { format: Boolean, default: false, @@ -411,45 +388,6 @@ export const schema = { env: 'N8N_DEFAULT_LOCALE', }, - license: { - serverUrl: { - format: String, - default: 'https://license.n8n.io/v1', - env: 'N8N_LICENSE_SERVER_URL', - doc: 'License server url to retrieve license.', - }, - autoRenewEnabled: { - format: Boolean, - default: true, - env: 'N8N_LICENSE_AUTO_RENEW_ENABLED', - doc: 'Whether auto renewal for licenses is enabled.', - }, - autoRenewOffset: { - format: Number, - default: 60 * 60 * 72, // 72 hours - env: 'N8N_LICENSE_AUTO_RENEW_OFFSET', - doc: 'How many seconds before expiry a license should get automatically renewed. ', - }, - activationKey: { - format: String, - default: '', - env: 'N8N_LICENSE_ACTIVATION_KEY', - doc: 'Activation key to initialize license', - }, - tenantId: { - format: Number, - default: 1, - env: 'N8N_LICENSE_TENANT_ID', - doc: 'Tenant id used by the license manager', - }, - cert: { - format: String, - default: '', - env: 'N8N_LICENSE_CERT', - doc: 'Ephemeral license certificate', - }, - }, - hideUsagePage: { format: Boolean, default: false, diff --git a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts index df65a70ecb..7e98877dc7 100644 --- a/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts +++ b/packages/cli/src/events/__tests__/telemetry-event-relay.test.ts @@ -1061,6 +1061,7 @@ describe('TelemetryEventRelay', () => { describe('Community+ registered', () => { it('should track `license-community-plus-registered` event', () => { const event: RelayEventMap['license-community-plus-registered'] = { + userId: 'user123', email: 'user@example.com', licenseKey: 'license123', }; @@ -1068,6 +1069,7 @@ describe('TelemetryEventRelay', () => { eventService.emit('license-community-plus-registered', event); expect(telemetry.track).toHaveBeenCalledWith('User registered for license community plus', { + user_id: 'user123', email: 'user@example.com', licenseKey: 'license123', }); diff --git a/packages/cli/src/events/maps/relay.event-map.ts b/packages/cli/src/events/maps/relay.event-map.ts index 21b673a2b5..0e72564571 100644 --- a/packages/cli/src/events/maps/relay.event-map.ts +++ b/packages/cli/src/events/maps/relay.event-map.ts @@ -8,7 +8,7 @@ import type { import type { AuthProviderType } from '@/databases/entities/auth-identity'; import type { ProjectRole } from '@/databases/entities/project-relation'; -import type { GlobalRole } from '@/databases/entities/user'; +import type { GlobalRole, User } from '@/databases/entities/user'; import type { IWorkflowDb } from '@/interfaces'; import type { AiEventMap } from './ai.event-map'; @@ -421,6 +421,7 @@ export type RelayEventMap = { }; 'license-community-plus-registered': { + userId: User['id']; email: string; licenseKey: string; }; diff --git a/packages/cli/src/events/relays/telemetry.event-relay.ts b/packages/cli/src/events/relays/telemetry.event-relay.ts index 9e00e2e055..fc5cf0a53d 100644 --- a/packages/cli/src/events/relays/telemetry.event-relay.ts +++ b/packages/cli/src/events/relays/telemetry.event-relay.ts @@ -236,10 +236,12 @@ export class TelemetryEventRelay extends EventRelay { } private licenseCommunityPlusRegistered({ + userId, email, licenseKey, }: RelayEventMap['license-community-plus-registered']) { this.telemetry.track('User registered for license community plus', { + user_id: userId, email, licenseKey, }); @@ -778,7 +780,7 @@ export class TelemetryEventRelay extends EventRelay { ldap_allowed: authenticationMethod === 'ldap', saml_enabled: authenticationMethod === 'saml', license_plan_name: this.license.getPlanName(), - license_tenant_id: config.getEnv('license.tenantId'), + license_tenant_id: this.globalConfig.license.tenantId, binary_data_s3: isS3Available && isS3Selected && isS3Licensed, multi_main_setup_enabled: this.globalConfig.multiMainSetup.enabled, metrics: { diff --git a/packages/cli/src/license.ts b/packages/cli/src/license.ts index 68ff15cf39..8f1bd26e64 100644 --- a/packages/cli/src/license.ts +++ b/packages/cli/src/license.ts @@ -48,8 +48,7 @@ export class License { */ private renewalEnabled() { if (this.instanceSettings.instanceType !== 'main') return false; - - const autoRenewEnabled = config.getEnv('license.autoRenewEnabled'); + const autoRenewEnabled = this.globalConfig.license.autoRenewalEnabled; /** * In multi-main setup, all mains start off with `unset` status and so renewal disabled. @@ -75,9 +74,9 @@ export class License { const { instanceType } = this.instanceSettings; const isMainInstance = instanceType === 'main'; - const server = config.getEnv('license.serverUrl'); + const server = this.globalConfig.license.serverUrl; const offlineMode = !isMainInstance; - const autoRenewOffset = config.getEnv('license.autoRenewOffset'); + const autoRenewOffset = this.globalConfig.license.autoRenewOffset; const saveCertStr = isMainInstance ? async (value: TLicenseBlock) => await this.saveCertStr(value) : async () => {}; @@ -96,7 +95,7 @@ export class License { try { this.manager = new LicenseManager({ server, - tenantId: config.getEnv('license.tenantId'), + tenantId: this.globalConfig.license.tenantId, productIdentifier: `n8n-${N8N_VERSION}`, autoRenewEnabled: renewalEnabled, renewOnInit: renewalEnabled, @@ -122,7 +121,7 @@ export class License { async loadCertStr(): Promise { // if we have an ephemeral license, we don't want to load it from the database - const ephemeralLicense = config.get('license.cert'); + const ephemeralLicense = this.globalConfig.license.cert; if (ephemeralLicense) { return ephemeralLicense; } @@ -179,7 +178,7 @@ export class License { async saveCertStr(value: TLicenseBlock): Promise { // if we have an ephemeral license, we don't want to save it to the database - if (config.get('license.cert')) return; + if (this.globalConfig.license.cert) return; await this.settingsRepository.upsert( { key: SETTINGS_LICENSE_CERT_KEY, diff --git a/packages/cli/src/license/__tests__/license.service.test.ts b/packages/cli/src/license/__tests__/license.service.test.ts index 9cd9c0ee0b..8ffc1dbf3d 100644 --- a/packages/cli/src/license/__tests__/license.service.test.ts +++ b/packages/cli/src/license/__tests__/license.service.test.ts @@ -94,6 +94,7 @@ describe('LicenseService', () => { .spyOn(axios, 'post') .mockResolvedValueOnce({ data: { title: 'Title', text: 'Text', licenseKey: 'abc-123' } }); const data = await licenseService.registerCommunityEdition({ + userId: '123', email: 'test@ema.il', instanceId: '123', instanceUrl: 'http://localhost', @@ -102,6 +103,7 @@ describe('LicenseService', () => { expect(data).toEqual({ title: 'Title', text: 'Text' }); expect(eventService.emit).toHaveBeenCalledWith('license-community-plus-registered', { + userId: '123', email: 'test@ema.il', licenseKey: 'abc-123', }); @@ -111,6 +113,7 @@ describe('LicenseService', () => { jest.spyOn(axios, 'post').mockRejectedValueOnce(new AxiosError('Failed')); await expect( licenseService.registerCommunityEdition({ + userId: '123', email: 'test@ema.il', instanceId: '123', instanceUrl: 'http://localhost', diff --git a/packages/cli/src/license/license.controller.ts b/packages/cli/src/license/license.controller.ts index db895ef4a0..3c284a25cb 100644 --- a/packages/cli/src/license/license.controller.ts +++ b/packages/cli/src/license/license.controller.ts @@ -4,7 +4,7 @@ import { InstanceSettings } from 'n8n-core'; import { Get, Post, RestController, GlobalScope, Body } from '@/decorators'; import { BadRequestError } from '@/errors/response-errors/bad-request.error'; -import { AuthenticatedRequest, AuthlessRequest, LicenseRequest } from '@/requests'; +import { AuthenticatedRequest, LicenseRequest } from '@/requests'; import { UrlService } from '@/services/url.service'; import { LicenseService } from './license.service'; @@ -41,11 +41,12 @@ export class LicenseController { @Post('/enterprise/community-registered') async registerCommunityEdition( - _req: AuthlessRequest, + req: AuthenticatedRequest, _res: Response, @Body payload: CommunityRegisteredRequestDto, ) { return await this.licenseService.registerCommunityEdition({ + userId: req.user.id, email: payload.email, instanceId: this.instanceSettings.instanceId, instanceUrl: this.urlService.getInstanceBaseUrl(), diff --git a/packages/cli/src/license/license.service.ts b/packages/cli/src/license/license.service.ts index cdd6454036..1419d58b83 100644 --- a/packages/cli/src/license/license.service.ts +++ b/packages/cli/src/license/license.service.ts @@ -61,11 +61,13 @@ export class LicenseService { } async registerCommunityEdition({ + userId, email, instanceId, instanceUrl, licenseType, }: { + userId: User['id']; email: string; instanceId: string; instanceUrl: string; @@ -83,7 +85,7 @@ export class LicenseService { licenseType, }, ); - this.eventService.emit('license-community-plus-registered', { email, licenseKey }); + this.eventService.emit('license-community-plus-registered', { userId, email, licenseKey }); return rest; } catch (e: unknown) { if (e instanceof AxiosError) { diff --git a/packages/cli/src/runners/__tests__/forward-to-logger.test.ts b/packages/cli/src/runners/__tests__/forward-to-logger.test.ts new file mode 100644 index 0000000000..64352ab54d --- /dev/null +++ b/packages/cli/src/runners/__tests__/forward-to-logger.test.ts @@ -0,0 +1,114 @@ +import type { Logger } from 'n8n-workflow'; +import { Readable } from 'stream'; + +import { forwardToLogger } from '../forward-to-logger'; + +describe('forwardToLogger', () => { + let logger: Logger; + let stdout: Readable; + let stderr: Readable; + + beforeEach(() => { + logger = { + info: jest.fn(), + error: jest.fn(), + } as unknown as Logger; + + stdout = new Readable({ read() {} }); + stderr = new Readable({ read() {} }); + + jest.resetAllMocks(); + }); + + const pushToStdout = async (data: string) => { + stdout.push(Buffer.from(data)); + stdout.push(null); + // Wait for the next tick to allow the event loop to process the data + await new Promise((resolve) => setImmediate(resolve)); + }; + + const pushToStderr = async (data: string) => { + stderr.push(Buffer.from(data)); + stderr.push(null); + // Wait for the next tick to allow the event loop to process the data + await new Promise((resolve) => setImmediate(resolve)); + }; + + it('should forward stdout data to logger.info', async () => { + forwardToLogger(logger, { stdout, stderr: null }); + + await pushToStdout('Test stdout message'); + + await new Promise((resolve) => setImmediate(resolve)); + + expect(logger.info).toHaveBeenCalledWith('Test stdout message'); + }); + + it('should forward stderr data to logger.error', async () => { + forwardToLogger(logger, { stdout: null, stderr }); + + await pushToStderr('Test stderr message'); + + expect(logger.error).toHaveBeenCalledWith('Test stderr message'); + }); + + it('should remove trailing newline from stdout', async () => { + forwardToLogger(logger, { stdout, stderr: null }); + + await pushToStdout('Test stdout message\n'); + + expect(logger.info).toHaveBeenCalledWith('Test stdout message'); + }); + + it('should remove trailing newline from stderr', async () => { + forwardToLogger(logger, { stdout: null, stderr }); + + await pushToStderr('Test stderr message\n'); + + expect(logger.error).toHaveBeenCalledWith('Test stderr message'); + }); + + it('should forward stderr data to logger.error', async () => { + forwardToLogger(logger, { stdout: null, stderr }); + + await pushToStderr('Test stderr message'); + + expect(logger.error).toHaveBeenCalledWith('Test stderr message'); + }); + + it('should include prefix if provided for stdout', async () => { + const prefix = '[PREFIX]'; + forwardToLogger(logger, { stdout, stderr: null }, prefix); + + await pushToStdout('Message with prefix'); + + expect(logger.info).toHaveBeenCalledWith('[PREFIX] Message with prefix'); + }); + + it('should include prefix if provided for stderr', async () => { + const prefix = '[PREFIX]'; + forwardToLogger(logger, { stdout: null, stderr }, prefix); + + await pushToStderr('Error message with prefix'); + + expect(logger.error).toHaveBeenCalledWith('[PREFIX] Error message with prefix'); + }); + + it('should make sure there is no duplicate space after prefix for stdout', async () => { + const prefix = '[PREFIX] '; + forwardToLogger(logger, { stdout, stderr: null }, prefix); + + await pushToStdout('Message with prefix'); + + expect(logger.info).toHaveBeenCalledWith('[PREFIX] Message with prefix'); + }); + + it('should make sure there is no duplicate space after prefix for stderr', async () => { + const prefix = '[PREFIX] '; + forwardToLogger(logger, { stdout: null, stderr }, prefix); + + await pushToStderr('Error message with prefix'); + + expect(logger.error).toHaveBeenCalledWith('[PREFIX] Error message with prefix'); + }); +}); diff --git a/packages/cli/src/runners/__tests__/node-process-oom-detector.test.ts b/packages/cli/src/runners/__tests__/node-process-oom-detector.test.ts new file mode 100644 index 0000000000..5b619e0e08 --- /dev/null +++ b/packages/cli/src/runners/__tests__/node-process-oom-detector.test.ts @@ -0,0 +1,43 @@ +import { spawn } from 'node:child_process'; + +import { NodeProcessOomDetector } from '../node-process-oom-detector'; + +describe('NodeProcessOomDetector', () => { + test('should detect an out-of-memory error in a monitored process', (done) => { + const childProcess = spawn(process.execPath, [ + // set low memory limit + '--max-old-space-size=20', + '-e', + ` + const data = []; + // fill memory until it crashes + while (true) data.push(Array.from({ length: 10_000 }).map(() => Math.random().toString()).join()); + `, + ]); + + const detector = new NodeProcessOomDetector(childProcess); + + childProcess.on('exit', (code) => { + expect(detector.didProcessOom).toBe(true); + expect(code).not.toBe(0); + done(); + }); + }); + + test('should not detect an out-of-memory error in a process that exits normally', (done) => { + const childProcess = spawn(process.execPath, [ + '-e', + ` + console.log("Hello, World!"); + `, + ]); + + const detector = new NodeProcessOomDetector(childProcess); + + childProcess.on('exit', (code) => { + expect(detector.didProcessOom).toBe(false); + expect(code).toBe(0); + done(); + }); + }); +}); diff --git a/packages/cli/src/runners/__tests__/sliding-window-signal.test.ts b/packages/cli/src/runners/__tests__/sliding-window-signal.test.ts new file mode 100644 index 0000000000..56462d186a --- /dev/null +++ b/packages/cli/src/runners/__tests__/sliding-window-signal.test.ts @@ -0,0 +1,71 @@ +import { TypedEmitter } from '../../typed-emitter'; +import { SlidingWindowSignal } from '../sliding-window-signal'; + +type TestEventMap = { + testEvent: string; +}; + +describe('SlidingWindowSignal', () => { + let eventEmitter: TypedEmitter; + let slidingWindowSignal: SlidingWindowSignal; + + beforeEach(() => { + eventEmitter = new TypedEmitter(); + slidingWindowSignal = new SlidingWindowSignal(eventEmitter, 'testEvent', { + windowSizeInMs: 500, + }); + }); + + afterEach(() => { + jest.clearAllTimers(); + jest.clearAllMocks(); + }); + + it('should return the last signal if within window size', async () => { + const signal = 'testSignal'; + eventEmitter.emit('testEvent', signal); + + const receivedSignal = await slidingWindowSignal.getSignal(); + + expect(receivedSignal).toBe(signal); + }); + + it('should return null if there is no signal within the window', async () => { + jest.useFakeTimers(); + const receivedSignalPromise = slidingWindowSignal.getSignal(); + jest.advanceTimersByTime(600); + const receivedSignal = await receivedSignalPromise; + + expect(receivedSignal).toBeNull(); + jest.useRealTimers(); + }); + + it('should return null if "exit" event is not emitted before timeout', async () => { + const signal = 'testSignal'; + jest.useFakeTimers(); + const receivedSignalPromise = slidingWindowSignal.getSignal(); + jest.advanceTimersByTime(600); + eventEmitter.emit('testEvent', signal); + + const receivedSignal = await receivedSignalPromise; + expect(receivedSignal).toBeNull(); + jest.useRealTimers(); + }); + + it('should return the signal emitted on "exit" event before timeout', async () => { + jest.useFakeTimers(); + const receivedSignalPromise = slidingWindowSignal.getSignal(); + + // Emit 'exit' with a signal before timeout + const exitSignal = 'exitSignal'; + eventEmitter.emit('testEvent', exitSignal); + + // Advance timers enough to go outside the timeout window + jest.advanceTimersByTime(600); + + const receivedSignal = await receivedSignalPromise; + expect(receivedSignal).toBe(exitSignal); + + jest.useRealTimers(); + }); +}); diff --git a/packages/cli/src/runners/__tests__/task-broker.test.ts b/packages/cli/src/runners/__tests__/task-broker.test.ts index 4a226c5b98..a90bf7662c 100644 --- a/packages/cli/src/runners/__tests__/task-broker.test.ts +++ b/packages/cli/src/runners/__tests__/task-broker.test.ts @@ -110,7 +110,7 @@ describe('TaskBroker', () => { const messageCallback = jest.fn(); taskBroker.registerRunner(runner, messageCallback); - taskBroker.deregisterRunner(runnerId); + taskBroker.deregisterRunner(runnerId, new Error()); const knownRunners = taskBroker.getKnownRunners(); const runnerIds = Object.keys(knownRunners); @@ -138,7 +138,7 @@ describe('TaskBroker', () => { validFor: 1000, validUntil: createValidUntil(1000), }); - taskBroker.deregisterRunner(runnerId); + taskBroker.deregisterRunner(runnerId, new Error()); const offers = taskBroker.getPendingTaskOffers(); expect(offers).toHaveLength(1); @@ -161,10 +161,14 @@ describe('TaskBroker', () => { [taskId]: { id: taskId, requesterId: 'requester1', runnerId, taskType: 'mock' }, task2: { id: 'task2', requesterId: 'requester1', runnerId: 'runner2', taskType: 'mock' }, }); - taskBroker.deregisterRunner(runnerId); + const error = new Error('error'); + taskBroker.deregisterRunner(runnerId, error); - expect(failSpy).toBeCalledWith(taskId, `The Task Runner (${runnerId}) has disconnected`); - expect(rejectSpy).toBeCalledWith(taskId, `The Task Runner (${runnerId}) has disconnected`); + expect(failSpy).toBeCalledWith(taskId, error); + expect(rejectSpy).toBeCalledWith( + taskId, + `The Task Runner (${runnerId}) has disconnected: error`, + ); }); }); diff --git a/packages/cli/src/runners/__tests__/task-runner-process.test.ts b/packages/cli/src/runners/__tests__/task-runner-process.test.ts index 1bae991811..eb04e3ab8e 100644 --- a/packages/cli/src/runners/__tests__/task-runner-process.test.ts +++ b/packages/cli/src/runners/__tests__/task-runner-process.test.ts @@ -2,9 +2,10 @@ import { TaskRunnersConfig } from '@n8n/config'; import { mock } from 'jest-mock-extended'; import type { ChildProcess, SpawnOptions } from 'node:child_process'; -import { mockInstance } from '../../../test/shared/mocking'; -import type { TaskRunnerAuthService } from '../auth/task-runner-auth.service'; -import { TaskRunnerProcess } from '../task-runner-process'; +import { Logger } from '@/logging/logger.service'; +import type { TaskRunnerAuthService } from '@/runners/auth/task-runner-auth.service'; +import { TaskRunnerProcess } from '@/runners/task-runner-process'; +import { mockInstance } from '@test/mocking'; const spawnMock = jest.fn(() => mock({ @@ -19,11 +20,12 @@ const spawnMock = jest.fn(() => require('child_process').spawn = spawnMock; describe('TaskRunnerProcess', () => { + const logger = mockInstance(Logger); const runnerConfig = mockInstance(TaskRunnersConfig); runnerConfig.disabled = false; runnerConfig.mode = 'internal_childprocess'; const authService = mock(); - const taskRunnerProcess = new TaskRunnerProcess(runnerConfig, authService); + let taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService); afterEach(async () => { spawnMock.mockClear(); @@ -33,17 +35,38 @@ describe('TaskRunnerProcess', () => { it('should throw if runner mode is external', () => { runnerConfig.mode = 'external'; - expect(() => new TaskRunnerProcess(runnerConfig, authService)).toThrow(); + expect(() => new TaskRunnerProcess(logger, runnerConfig, authService)).toThrow(); runnerConfig.mode = 'internal_childprocess'; }); }); describe('start', () => { - it('should propagate NODE_FUNCTION_ALLOW_BUILTIN and NODE_FUNCTION_ALLOW_EXTERNAL from env', async () => { + beforeEach(() => { + taskRunnerProcess = new TaskRunnerProcess(logger, runnerConfig, authService); + }); + + test.each(['PATH', 'NODE_FUNCTION_ALLOW_BUILTIN', 'NODE_FUNCTION_ALLOW_EXTERNAL'])( + 'should propagate %s from env as is', + async (envVar) => { + jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken'); + process.env[envVar] = 'custom value'; + + await taskRunnerProcess.start(); + + // @ts-expect-error The type is not correct + const options = spawnMock.mock.calls[0][2] as SpawnOptions; + expect(options.env).toEqual( + expect.objectContaining({ + [envVar]: 'custom value', + }), + ); + }, + ); + + it('should pass NODE_OPTIONS env if maxOldSpaceSize is configured', async () => { jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken'); - process.env.NODE_FUNCTION_ALLOW_BUILTIN = '*'; - process.env.NODE_FUNCTION_ALLOW_EXTERNAL = '*'; + runnerConfig.maxOldSpaceSize = '1024'; await taskRunnerProcess.start(); @@ -51,10 +74,20 @@ describe('TaskRunnerProcess', () => { const options = spawnMock.mock.calls[0][2] as SpawnOptions; expect(options.env).toEqual( expect.objectContaining({ - NODE_FUNCTION_ALLOW_BUILTIN: '*', - NODE_FUNCTION_ALLOW_EXTERNAL: '*', + NODE_OPTIONS: '--max-old-space-size=1024', }), ); }); + + it('should not pass NODE_OPTIONS env if maxOldSpaceSize is not configured', async () => { + jest.spyOn(authService, 'createGrantToken').mockResolvedValue('grantToken'); + runnerConfig.maxOldSpaceSize = ''; + + await taskRunnerProcess.start(); + + // @ts-expect-error The type is not correct + const options = spawnMock.mock.calls[0][2] as SpawnOptions; + expect(options.env).not.toHaveProperty('NODE_OPTIONS'); + }); }); }); diff --git a/packages/cli/src/runners/errors/task-runner-disconnected-error.ts b/packages/cli/src/runners/errors/task-runner-disconnected-error.ts new file mode 100644 index 0000000000..6c7c49450a --- /dev/null +++ b/packages/cli/src/runners/errors/task-runner-disconnected-error.ts @@ -0,0 +1,7 @@ +import { ApplicationError } from 'n8n-workflow'; + +export class TaskRunnerDisconnectedError extends ApplicationError { + constructor(runnerId: string) { + super(`Task runner (${runnerId}) disconnected`); + } +} diff --git a/packages/cli/src/runners/errors/task-runner-oom-error.ts b/packages/cli/src/runners/errors/task-runner-oom-error.ts new file mode 100644 index 0000000000..e52b8b4bea --- /dev/null +++ b/packages/cli/src/runners/errors/task-runner-oom-error.ts @@ -0,0 +1,31 @@ +import { ApplicationError } from 'n8n-workflow'; + +import type { TaskRunner } from '../task-broker.service'; + +export class TaskRunnerOomError extends ApplicationError { + public description: string; + + constructor(runnerId: TaskRunner['id'], isCloudDeployment: boolean) { + super(`Task runner (${runnerId}) ran out of memory.`, { level: 'error' }); + + const fixSuggestions = { + reduceItems: 'Reduce the number of items processed at a time by batching the input.', + increaseMemory: + "Increase the memory available to the task runner with 'N8N_RUNNERS_MAX_OLD_SPACE_SIZE' environment variable.", + upgradePlan: 'Upgrade your cloud plan to increase the available memory.', + }; + + const subtitle = + 'The runner executing the code ran out of memory. This usually happens when there are too many items to process. You can try the following:'; + const suggestions = isCloudDeployment + ? [fixSuggestions.reduceItems, fixSuggestions.upgradePlan] + : [fixSuggestions.reduceItems, fixSuggestions.increaseMemory]; + const suggestionsText = suggestions + .map((suggestion, index) => `${index + 1}. ${suggestion}`) + .join('
'); + + const description = `${subtitle}

${suggestionsText}`; + + this.description = description; + } +} diff --git a/packages/cli/src/runners/forward-to-logger.ts b/packages/cli/src/runners/forward-to-logger.ts new file mode 100644 index 0000000000..0bcc813225 --- /dev/null +++ b/packages/cli/src/runners/forward-to-logger.ts @@ -0,0 +1,42 @@ +import type { Logger } from 'n8n-workflow'; +import type { Readable } from 'stream'; + +/** + * Forwards stdout and stderr of a given producer to the given + * logger's info and error methods respectively. + */ +export function forwardToLogger( + logger: Logger, + producer: { + stdout?: Readable | null; + stderr?: Readable | null; + }, + prefix?: string, +) { + if (prefix) { + prefix = prefix.trimEnd(); + } + + const stringify = (data: Buffer) => { + let str = data.toString(); + + // Remove possible trailing newline (otherwise it's duplicated) + if (str.endsWith('\n')) { + str = str.slice(0, -1); + } + + return prefix ? `${prefix} ${str}` : str; + }; + + if (producer.stdout) { + producer.stdout.on('data', (data: Buffer) => { + logger.info(stringify(data)); + }); + } + + if (producer.stderr) { + producer.stderr.on('data', (data: Buffer) => { + logger.error(stringify(data)); + }); + } +} diff --git a/packages/cli/src/runners/node-process-oom-detector.ts b/packages/cli/src/runners/node-process-oom-detector.ts new file mode 100644 index 0000000000..e6debb8551 --- /dev/null +++ b/packages/cli/src/runners/node-process-oom-detector.ts @@ -0,0 +1,34 @@ +import * as a from 'node:assert/strict'; +import type { ChildProcess } from 'node:child_process'; + +/** + * Class to monitor a nodejs process and detect if it runs out of + * memory (OOMs). + */ +export class NodeProcessOomDetector { + public get didProcessOom() { + return this._didProcessOom; + } + + private _didProcessOom = false; + + constructor(processToMonitor: ChildProcess) { + this.monitorProcess(processToMonitor); + } + + private monitorProcess(processToMonitor: ChildProcess) { + a.ok(processToMonitor.stderr, "Can't monitor a process without stderr"); + + processToMonitor.stderr.on('data', this.onStderr); + + processToMonitor.once('exit', () => { + processToMonitor.stderr?.off('data', this.onStderr); + }); + } + + private onStderr = (data: Buffer) => { + if (data.includes('JavaScript heap out of memory')) { + this._didProcessOom = true; + } + }; +} diff --git a/packages/cli/src/runners/runner-ws-server.ts b/packages/cli/src/runners/runner-ws-server.ts index 38b70c97dc..59bb92ff76 100644 --- a/packages/cli/src/runners/runner-ws-server.ts +++ b/packages/cli/src/runners/runner-ws-server.ts @@ -10,6 +10,7 @@ import type { TaskRunnerServerInitResponse, } from './runner-types'; import { TaskBroker, type MessageCallback, type TaskRunner } from './task-broker.service'; +import { TaskRunnerDisconnectAnalyzer } from './task-runner-disconnect-analyzer'; function heartbeat(this: WebSocket) { this.isAlive = true; @@ -22,6 +23,7 @@ export class TaskRunnerService { constructor( private readonly logger: Logger, private readonly taskBroker: TaskBroker, + private readonly disconnectAnalyzer: TaskRunnerDisconnectAnalyzer, ) {} sendMessage(id: TaskRunner['id'], message: N8nMessage.ToRunner.All) { @@ -34,7 +36,7 @@ export class TaskRunnerService { let isConnected = false; - const onMessage = (data: WebSocket.RawData) => { + const onMessage = async (data: WebSocket.RawData) => { try { const buffer = Array.isArray(data) ? Buffer.concat(data) : Buffer.from(data); @@ -45,7 +47,7 @@ export class TaskRunnerService { if (!isConnected && message.type !== 'runner:info') { return; } else if (!isConnected && message.type === 'runner:info') { - this.removeConnection(id); + await this.removeConnection(id); isConnected = true; this.runnerConnections.set(id, connection); @@ -75,10 +77,10 @@ export class TaskRunnerService { }; // Makes sure to remove the session if the connection is closed - connection.once('close', () => { + connection.once('close', async () => { connection.off('pong', heartbeat); connection.off('message', onMessage); - this.removeConnection(id); + await this.removeConnection(id); }); connection.on('message', onMessage); @@ -87,10 +89,11 @@ export class TaskRunnerService { ); } - removeConnection(id: TaskRunner['id']) { + async removeConnection(id: TaskRunner['id']) { const connection = this.runnerConnections.get(id); if (connection) { - this.taskBroker.deregisterRunner(id); + const disconnectReason = await this.disconnectAnalyzer.determineDisconnectReason(id); + this.taskBroker.deregisterRunner(id, disconnectReason); connection.close(); this.runnerConnections.delete(id); } diff --git a/packages/cli/src/runners/sliding-window-signal.ts b/packages/cli/src/runners/sliding-window-signal.ts new file mode 100644 index 0000000000..5954f7bade --- /dev/null +++ b/packages/cli/src/runners/sliding-window-signal.ts @@ -0,0 +1,59 @@ +import type { TypedEmitter } from '../typed-emitter'; + +export type SlidingWindowSignalOpts = { + windowSizeInMs?: number; +}; + +/** + * A class that listens for a specific event on an emitter (signal) and + * provides a sliding window of the last event that was emitted. + */ +export class SlidingWindowSignal { + private lastSignal: TEvents[TEventName] | null = null; + + private lastSignalTime: number = 0; + + private windowSizeInMs: number; + + constructor( + private readonly eventEmitter: TypedEmitter, + private readonly eventName: TEventName, + opts: SlidingWindowSignalOpts = {}, + ) { + const { windowSizeInMs = 500 } = opts; + + this.windowSizeInMs = windowSizeInMs; + + eventEmitter.on(eventName, (signal: TEvents[TEventName]) => { + this.lastSignal = signal; + this.lastSignalTime = Date.now(); + }); + } + + /** + * If an event has been emitted within the last `windowSize` milliseconds, + * that event is returned. Otherwise it will wait for up to `windowSize` + * milliseconds for the event to be emitted. `null` is returned + * if no event is emitted within the window. + */ + public async getSignal(): Promise { + const timeSinceLastEvent = Date.now() - this.lastSignalTime; + if (timeSinceLastEvent <= this.windowSizeInMs) return this.lastSignal; + + return await new Promise((resolve) => { + let timeoutTimerId: NodeJS.Timeout | null = null; + + const onExit = (signal: TEvents[TEventName]) => { + if (timeoutTimerId) clearTimeout(timeoutTimerId); + resolve(signal); + }; + + timeoutTimerId = setTimeout(() => { + this.eventEmitter.off(this.eventName, onExit); + resolve(null); + }); + + this.eventEmitter.once(this.eventName, onExit); + }); + } +} diff --git a/packages/cli/src/runners/task-broker.service.ts b/packages/cli/src/runners/task-broker.service.ts index 40ad6d6e90..d88d677725 100644 --- a/packages/cli/src/runners/task-broker.service.ts +++ b/packages/cli/src/runners/task-broker.service.ts @@ -104,7 +104,7 @@ export class TaskBroker { }); } - deregisterRunner(runnerId: string) { + deregisterRunner(runnerId: string, error: Error) { this.knownRunners.delete(runnerId); // Remove any pending offers @@ -117,8 +117,11 @@ export class TaskBroker { // Fail any tasks for (const task of this.tasks.values()) { if (task.runnerId === runnerId) { - void this.failTask(task.id, `The Task Runner (${runnerId}) has disconnected`); - this.handleRunnerReject(task.id, `The Task Runner (${runnerId}) has disconnected`); + void this.failTask(task.id, error); + this.handleRunnerReject( + task.id, + `The Task Runner (${runnerId}) has disconnected: ${error.message}`, + ); } } } @@ -352,7 +355,7 @@ export class TaskBroker { }); } - private async failTask(taskId: Task['id'], reason: string) { + private async failTask(taskId: Task['id'], error: Error) { const task = this.tasks.get(taskId); if (!task) { return; @@ -362,7 +365,7 @@ export class TaskBroker { await this.messageRequester(task.requesterId, { type: 'broker:taskerror', taskId, - error: reason, + error, }); } @@ -375,11 +378,14 @@ export class TaskBroker { } const runner = this.knownRunners.get(task.runnerId); if (!runner) { - const reason = `Cannot find runner, failed to find runner (${task.runnerId})`; - await this.failTask(taskId, reason); - throw new ApplicationError(reason, { - level: 'error', - }); + const error = new ApplicationError( + `Cannot find runner, failed to find runner (${task.runnerId})`, + { + level: 'error', + }, + ); + await this.failTask(taskId, error); + throw error; } return runner.runner; } diff --git a/packages/cli/src/runners/task-runner-disconnect-analyzer.ts b/packages/cli/src/runners/task-runner-disconnect-analyzer.ts new file mode 100644 index 0000000000..d75a1b9aad --- /dev/null +++ b/packages/cli/src/runners/task-runner-disconnect-analyzer.ts @@ -0,0 +1,60 @@ +import { TaskRunnersConfig } from '@n8n/config'; +import { Service } from 'typedi'; + +import config from '@/config'; + +import { TaskRunnerDisconnectedError } from './errors/task-runner-disconnected-error'; +import { TaskRunnerOomError } from './errors/task-runner-oom-error'; +import { SlidingWindowSignal } from './sliding-window-signal'; +import type { TaskRunner } from './task-broker.service'; +import type { ExitReason, TaskRunnerProcessEventMap } from './task-runner-process'; +import { TaskRunnerProcess } from './task-runner-process'; + +/** + * Analyzes the disconnect reason of a task runner process to provide a more + * meaningful error message to the user. + */ +@Service() +export class TaskRunnerDisconnectAnalyzer { + private readonly exitReasonSignal: SlidingWindowSignal; + + constructor( + private readonly runnerConfig: TaskRunnersConfig, + private readonly taskRunnerProcess: TaskRunnerProcess, + ) { + // When the task runner process is running as a child process, there's + // no determinate time when it exits compared to when the runner disconnects + // (i.e. it's a race condition). Hence we use a sliding window to determine + // the exit reason. As long as we receive the exit signal from the task + // runner process within the window, we can determine the exit reason. + this.exitReasonSignal = new SlidingWindowSignal(this.taskRunnerProcess, 'exit', { + windowSizeInMs: 500, + }); + } + + private get isCloudDeployment() { + return config.get('deployment.type') === 'cloud'; + } + + async determineDisconnectReason(runnerId: TaskRunner['id']): Promise { + const exitCode = await this.awaitExitSignal(); + if (exitCode === 'oom') { + return new TaskRunnerOomError(runnerId, this.isCloudDeployment); + } + + return new TaskRunnerDisconnectedError(runnerId); + } + + private async awaitExitSignal(): Promise { + if (this.runnerConfig.mode === 'external') { + // If the task runner is running in external mode, we don't have + // control over the process and hence cannot determine the exit + // reason. We just return 'unknown' in this case. + return 'unknown'; + } + + const lastExitReason = await this.exitReasonSignal.getSignal(); + + return lastExitReason?.reason ?? 'unknown'; + } +} diff --git a/packages/cli/src/runners/task-runner-process.ts b/packages/cli/src/runners/task-runner-process.ts index 413b74d725..5b31a96ba3 100644 --- a/packages/cli/src/runners/task-runner-process.ts +++ b/packages/cli/src/runners/task-runner-process.ts @@ -4,16 +4,29 @@ import { spawn } from 'node:child_process'; import * as process from 'node:process'; import { Service } from 'typedi'; +import { OnShutdown } from '@/decorators/on-shutdown'; +import { Logger } from '@/logging/logger.service'; + import { TaskRunnerAuthService } from './auth/task-runner-auth.service'; -import { OnShutdown } from '../decorators/on-shutdown'; +import { forwardToLogger } from './forward-to-logger'; +import { NodeProcessOomDetector } from './node-process-oom-detector'; +import { TypedEmitter } from '../typed-emitter'; type ChildProcess = ReturnType; +export type ExitReason = 'unknown' | 'oom'; + +export type TaskRunnerProcessEventMap = { + exit: { + reason: ExitReason; + }; +}; + /** * Manages the JS task runner process as a child process */ @Service() -export class TaskRunnerProcess { +export class TaskRunnerProcess extends TypedEmitter { public get isRunning() { return this.process !== null; } @@ -36,16 +49,31 @@ export class TaskRunnerProcess { private _runPromise: Promise | null = null; + private oomDetector: NodeProcessOomDetector | null = null; + private isShuttingDown = false; + private logger: Logger; + + private readonly passthroughEnvVars = [ + 'PATH', + 'NODE_FUNCTION_ALLOW_BUILTIN', + 'NODE_FUNCTION_ALLOW_EXTERNAL', + ] as const; + constructor( + logger: Logger, private readonly runnerConfig: TaskRunnersConfig, private readonly authService: TaskRunnerAuthService, ) { + super(); + a.ok( this.runnerConfig.mode === 'internal_childprocess' || this.runnerConfig.mode === 'internal_launcher', ); + + this.logger = logger.scoped('task-runner'); } async start() { @@ -58,8 +86,7 @@ export class TaskRunnerProcess { ? this.startLauncher(grantToken, n8nUri) : this.startNode(grantToken, n8nUri); - this.process.stdout?.pipe(process.stdout); - this.process.stderr?.pipe(process.stderr); + forwardToLogger(this.logger, this.process, '[Task Runner]: '); this.monitorProcess(this.process); } @@ -68,26 +95,14 @@ export class TaskRunnerProcess { const startScript = require.resolve('@n8n/task-runner'); return spawn('node', [startScript], { - env: { - PATH: process.env.PATH, - N8N_RUNNERS_GRANT_TOKEN: grantToken, - N8N_RUNNERS_N8N_URI: n8nUri, - N8N_RUNNERS_MAX_PAYLOAD: this.runnerConfig.maxPayload.toString(), - NODE_FUNCTION_ALLOW_BUILTIN: process.env.NODE_FUNCTION_ALLOW_BUILTIN, - NODE_FUNCTION_ALLOW_EXTERNAL: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, - }, + env: this.getProcessEnvVars(grantToken, n8nUri), }); } startLauncher(grantToken: string, n8nUri: string) { return spawn(this.runnerConfig.launcherPath, ['launch', this.runnerConfig.launcherRunner], { env: { - PATH: process.env.PATH, - N8N_RUNNERS_GRANT_TOKEN: grantToken, - N8N_RUNNERS_N8N_URI: n8nUri, - N8N_RUNNERS_MAX_PAYLOAD: this.runnerConfig.maxPayload.toString(), - NODE_FUNCTION_ALLOW_BUILTIN: process.env.NODE_FUNCTION_ALLOW_BUILTIN, - NODE_FUNCTION_ALLOW_EXTERNAL: process.env.NODE_FUNCTION_ALLOW_EXTERNAL, + ...this.getProcessEnvVars(grantToken, n8nUri), // For debug logging if enabled RUST_LOG: process.env.RUST_LOG, }, @@ -140,6 +155,8 @@ export class TaskRunnerProcess { private monitorProcess(taskRunnerProcess: ChildProcess) { this._runPromise = new Promise((resolve) => { + this.oomDetector = new NodeProcessOomDetector(taskRunnerProcess); + taskRunnerProcess.on('exit', (code) => { this.onProcessExit(code, resolve); }); @@ -148,6 +165,7 @@ export class TaskRunnerProcess { private onProcessExit(_code: number | null, resolveFn: () => void) { this.process = null; + this.emit('exit', { reason: this.oomDetector?.didProcessOom ? 'oom' : 'unknown' }); resolveFn(); // If we are not shutting down, restart the process @@ -155,4 +173,30 @@ export class TaskRunnerProcess { setImmediate(async () => await this.start()); } } + + private getProcessEnvVars(grantToken: string, n8nUri: string) { + const envVars: Record = { + N8N_RUNNERS_GRANT_TOKEN: grantToken, + N8N_RUNNERS_N8N_URI: n8nUri, + N8N_RUNNERS_MAX_PAYLOAD: this.runnerConfig.maxPayload.toString(), + N8N_RUNNERS_MAX_CONCURRENCY: this.runnerConfig.maxConcurrency.toString(), + ...this.getPassthroughEnvVars(), + }; + + if (this.runnerConfig.maxOldSpaceSize) { + envVars.NODE_OPTIONS = `--max-old-space-size=${this.runnerConfig.maxOldSpaceSize}`; + } + + return envVars; + } + + private getPassthroughEnvVars() { + return this.passthroughEnvVars.reduce>((env, key) => { + if (process.env[key]) { + env[key] = process.env[key]; + } + + return env; + }, {}); + } } diff --git a/packages/cli/src/scaling/__tests__/job-processor.service.test.ts b/packages/cli/src/scaling/__tests__/job-processor.service.test.ts new file mode 100644 index 0000000000..6a3fa5caa4 --- /dev/null +++ b/packages/cli/src/scaling/__tests__/job-processor.service.test.ts @@ -0,0 +1,21 @@ +import { mock } from 'jest-mock-extended'; + +import type { ExecutionRepository } from '@/databases/repositories/execution.repository'; +import type { IExecutionResponse } from '@/interfaces'; + +import { JobProcessor } from '../job-processor'; +import type { Job } from '../scaling.types'; + +describe('JobProcessor', () => { + it('should refrain from processing a crashed execution', async () => { + const executionRepository = mock(); + executionRepository.findSingleExecution.mockResolvedValue( + mock({ status: 'crashed' }), + ); + const jobProcessor = new JobProcessor(mock(), executionRepository, mock(), mock(), mock()); + + const result = await jobProcessor.processJob(mock()); + + expect(result).toEqual({ success: false }); + }); +}); diff --git a/packages/cli/src/scaling/job-processor.ts b/packages/cli/src/scaling/job-processor.ts index 9a531d3039..6bf2524304 100644 --- a/packages/cli/src/scaling/job-processor.ts +++ b/packages/cli/src/scaling/job-processor.ts @@ -58,6 +58,13 @@ export class JobProcessor { ); } + /** + * Bull's implicit retry mechanism and n8n's execution recovery mechanism may + * cause a crashed execution to be enqueued. We refrain from processing it, + * until we have reworked both mechanisms to prevent this scenario. + */ + if (execution.status === 'crashed') return { success: false }; + const workflowId = execution.workflowData.id; this.logger.info(`Worker started execution ${executionId} (job ${job.id})`, { diff --git a/packages/cli/src/security-audit/risk-reporters/credentials-risk-reporter.ts b/packages/cli/src/security-audit/risk-reporters/credentials-risk-reporter.ts index ab7873e808..0c8d84211e 100644 --- a/packages/cli/src/security-audit/risk-reporters/credentials-risk-reporter.ts +++ b/packages/cli/src/security-audit/risk-reporters/credentials-risk-reporter.ts @@ -1,7 +1,7 @@ +import { SecurityConfig } from '@n8n/config'; import type { IWorkflowBase } from 'n8n-workflow'; import { Service } from 'typedi'; -import config from '@/config'; import type { WorkflowEntity } from '@/databases/entities/workflow-entity'; import { CredentialsRepository } from '@/databases/repositories/credentials.repository'; import { ExecutionDataRepository } from '@/databases/repositories/execution-data.repository'; @@ -15,10 +15,11 @@ export class CredentialsRiskReporter implements RiskReporter { private readonly credentialsRepository: CredentialsRepository, private readonly executionRepository: ExecutionRepository, private readonly executionDataRepository: ExecutionDataRepository, + private readonly securityConfig: SecurityConfig, ) {} async report(workflows: WorkflowEntity[]) { - const days = config.getEnv('security.audit.daysAbandonedWorkflow'); + const days = this.securityConfig.daysAbandonedWorkflow; const allExistingCreds = await this.getAllExistingCreds(); const { credsInAnyUse, credsInActiveUse } = await this.getAllCredsInUse(workflows); diff --git a/packages/cli/src/security-audit/security-audit.service.ts b/packages/cli/src/security-audit/security-audit.service.ts index 19582450c4..97b5424a19 100644 --- a/packages/cli/src/security-audit/security-audit.service.ts +++ b/packages/cli/src/security-audit/security-audit.service.ts @@ -1,3 +1,4 @@ +import { SecurityConfig } from '@n8n/config'; import Container, { Service } from 'typedi'; import config from '@/config'; @@ -8,7 +9,10 @@ import { toReportTitle } from '@/security-audit/utils'; @Service() export class SecurityAuditService { - constructor(private readonly workflowRepository: WorkflowRepository) {} + constructor( + private readonly workflowRepository: WorkflowRepository, + private readonly securityConfig: SecurityConfig, + ) {} private reporters: { [name: string]: RiskReporter; @@ -19,7 +23,7 @@ export class SecurityAuditService { await this.initReporters(categories); - const daysFromEnv = config.getEnv('security.audit.daysAbandonedWorkflow'); + const daysFromEnv = this.securityConfig.daysAbandonedWorkflow; if (daysAbandonedWorkflow) { config.set('security.audit.daysAbandonedWorkflow', daysAbandonedWorkflow); diff --git a/packages/cli/src/services/frontend.service.ts b/packages/cli/src/services/frontend.service.ts index ef3ed5a5f9..6cad4a4f24 100644 --- a/packages/cli/src/services/frontend.service.ts +++ b/packages/cli/src/services/frontend.service.ts @@ -1,5 +1,5 @@ import type { FrontendSettings, ITelemetrySettings } from '@n8n/api-types'; -import { GlobalConfig } from '@n8n/config'; +import { GlobalConfig, FrontendConfig, SecurityConfig } from '@n8n/config'; import { createWriteStream } from 'fs'; import { mkdir } from 'fs/promises'; import uniq from 'lodash/uniq'; @@ -46,6 +46,8 @@ export class FrontendService { private readonly mailer: UserManagementMailer, private readonly instanceSettings: InstanceSettings, private readonly urlService: UrlService, + private readonly securityConfig: SecurityConfig, + private readonly frontendConfig: FrontendConfig, ) { loadNodesAndCredentials.addPostProcessor(async () => await this.generateTypes()); void this.generateTypes(); @@ -201,7 +203,7 @@ export class FrontendService { hideUsagePage: config.getEnv('hideUsagePage'), license: { consumerId: 'unknown', - environment: config.getEnv('license.tenantId') === 1 ? 'production' : 'staging', + environment: this.globalConfig.license.tenantId === 1 ? 'production' : 'staging', }, variables: { limit: 0, @@ -225,8 +227,9 @@ export class FrontendService { maxCount: config.getEnv('executions.pruneDataMaxCount'), }, security: { - blockFileAccessToN8nFiles: config.getEnv('security.blockFileAccessToN8nFiles'), + blockFileAccessToN8nFiles: this.securityConfig.blockFileAccessToN8nFiles, }, + betaFeatures: this.frontendConfig.betaFeatures, }; } diff --git a/packages/cli/src/workflow-runner.ts b/packages/cli/src/workflow-runner.ts index 4dd5e08714..02e0b94afd 100644 --- a/packages/cli/src/workflow-runner.ts +++ b/packages/cli/src/workflow-runner.ts @@ -14,7 +14,6 @@ import type { IWorkflowExecutionDataProcess, } from 'n8n-workflow'; import { - ApplicationError, ErrorReporterProxy as ErrorReporter, ExecutionCancelledError, Workflow, @@ -381,17 +380,6 @@ export class WorkflowRunner { let job: Job; let hooks: WorkflowHooks; try { - // check to help diagnose PAY-2100 - if ( - data.executionData?.executionData?.nodeExecutionStack?.length === 0 && - config.getEnv('deployment.type') === 'internal' - ) { - await this.executionRepository.setRunning(executionId); // set `startedAt` so we display it correctly in UI - throw new ApplicationError('Execution to enqueue has empty node execution stack', { - extra: { executionData: data.executionData }, - }); - } - job = await this.scalingService.addJob(jobData, { priority: realtime ? 50 : 100 }); hooks = WorkflowExecuteAdditionalData.getWorkflowHooksWorkerMain( diff --git a/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts b/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts index 4513beb6bb..b5b4c122df 100644 --- a/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts +++ b/packages/cli/test/integration/security-audit/credentials-risk-reporter.test.ts @@ -1,7 +1,8 @@ +import type { SecurityConfig } from '@n8n/config'; +import { mock } from 'jest-mock-extended'; import Container from 'typedi'; import { v4 as uuid } from 'uuid'; -import config from '@/config'; import { CredentialsRepository } from '@/databases/repositories/credentials.repository'; import { ExecutionDataRepository } from '@/databases/repositories/execution-data.repository'; import { ExecutionRepository } from '@/databases/repositories/execution.repository'; @@ -15,10 +16,15 @@ import * as testDb from '../shared/test-db'; let securityAuditService: SecurityAuditService; +const securityConfig = mock({ daysAbandonedWorkflow: 90 }); + beforeAll(async () => { await testDb.init(); - securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository)); + securityAuditService = new SecurityAuditService( + Container.get(WorkflowRepository), + securityConfig, + ); }); beforeEach(async () => { @@ -154,7 +160,7 @@ test('should report credential in not recently executed workflow', async () => { const workflow = await Container.get(WorkflowRepository).save(workflowDetails); const date = new Date(); - date.setDate(date.getDate() - config.getEnv('security.audit.daysAbandonedWorkflow') - 1); + date.setDate(date.getDate() - securityConfig.daysAbandonedWorkflow - 1); const savedExecution = await Container.get(ExecutionRepository).save({ finished: true, @@ -223,7 +229,7 @@ test('should not report credentials in recently executed workflow', async () => const workflow = await Container.get(WorkflowRepository).save(workflowDetails); const date = new Date(); - date.setDate(date.getDate() - config.getEnv('security.audit.daysAbandonedWorkflow') + 1); + date.setDate(date.getDate() - securityConfig.daysAbandonedWorkflow + 1); const savedExecution = await Container.get(ExecutionRepository).save({ finished: true, diff --git a/packages/cli/test/integration/security-audit/database-risk-reporter.test.ts b/packages/cli/test/integration/security-audit/database-risk-reporter.test.ts index d519f97a23..3aef57396b 100644 --- a/packages/cli/test/integration/security-audit/database-risk-reporter.test.ts +++ b/packages/cli/test/integration/security-audit/database-risk-reporter.test.ts @@ -1,3 +1,4 @@ +import { mock } from 'jest-mock-extended'; import Container from 'typedi'; import { v4 as uuid } from 'uuid'; @@ -18,7 +19,7 @@ let securityAuditService: SecurityAuditService; beforeAll(async () => { await testDb.init(); - securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository)); + securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository), mock()); }); beforeEach(async () => { diff --git a/packages/cli/test/integration/security-audit/filesystem-risk-reporter.test.ts b/packages/cli/test/integration/security-audit/filesystem-risk-reporter.test.ts index 34bcb83b49..ceb306935f 100644 --- a/packages/cli/test/integration/security-audit/filesystem-risk-reporter.test.ts +++ b/packages/cli/test/integration/security-audit/filesystem-risk-reporter.test.ts @@ -1,3 +1,4 @@ +import { mock } from 'jest-mock-extended'; import Container from 'typedi'; import { v4 as uuid } from 'uuid'; @@ -13,7 +14,7 @@ let securityAuditService: SecurityAuditService; beforeAll(async () => { await testDb.init(); - securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository)); + securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository), mock()); }); beforeEach(async () => { diff --git a/packages/cli/test/integration/security-audit/instance-risk-reporter.test.ts b/packages/cli/test/integration/security-audit/instance-risk-reporter.test.ts index 4f355cbcbc..928667b518 100644 --- a/packages/cli/test/integration/security-audit/instance-risk-reporter.test.ts +++ b/packages/cli/test/integration/security-audit/instance-risk-reporter.test.ts @@ -1,3 +1,4 @@ +import { mock } from 'jest-mock-extended'; import { NodeConnectionType } from 'n8n-workflow'; import Container from 'typedi'; import { v4 as uuid } from 'uuid'; @@ -23,7 +24,7 @@ let securityAuditService: SecurityAuditService; beforeAll(async () => { await testDb.init(); - securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository)); + securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository), mock()); simulateUpToDateInstance(); }); diff --git a/packages/cli/test/integration/security-audit/nodes-risk-reporter.test.ts b/packages/cli/test/integration/security-audit/nodes-risk-reporter.test.ts index 133a574d40..c1fb198b69 100644 --- a/packages/cli/test/integration/security-audit/nodes-risk-reporter.test.ts +++ b/packages/cli/test/integration/security-audit/nodes-risk-reporter.test.ts @@ -1,3 +1,4 @@ +import { mock } from 'jest-mock-extended'; import { Container } from 'typedi'; import { v4 as uuid } from 'uuid'; @@ -24,7 +25,7 @@ let securityAuditService: SecurityAuditService; beforeAll(async () => { await testDb.init(); - securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository)); + securityAuditService = new SecurityAuditService(Container.get(WorkflowRepository), mock()); }); beforeEach(async () => { diff --git a/packages/core/src/NodeExecuteFunctions.ts b/packages/core/src/NodeExecuteFunctions.ts index 529d6fc94b..10c44efced 100644 --- a/packages/core/src/NodeExecuteFunctions.ts +++ b/packages/core/src/NodeExecuteFunctions.ts @@ -110,6 +110,7 @@ import type { DeduplicationItemTypes, ICheckProcessedContextData, AiEvent, + ISupplyDataFunctions, } from 'n8n-workflow'; import { NodeConnectionType, @@ -2803,12 +2804,14 @@ async function getInputConnectionData( runExecutionData: IRunExecutionData, runIndex: number, connectionInputData: INodeExecutionData[], + inputData: ITaskDataConnections, additionalData: IWorkflowExecuteAdditionalData, - executeData: IExecuteData | undefined, + executeData: IExecuteData, mode: WorkflowExecuteMode, closeFunctions: CloseFunction[], inputName: NodeConnectionType, itemIndex: number, + abortSignal?: AbortSignal, ): Promise { const node = this.getNode(); const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); @@ -2856,74 +2859,20 @@ async function getInputConnectionData( connectedNode.typeVersion, ); - // TODO: create a new context object here based on the type of `connectedNode`, and avoid using `Object.assign` on context objects - // https://linear.app/n8n/issue/CAT-269 - const context = Object.assign({}, this); - - context.getNodeParameter = ( - parameterName: string, - itemIndex: number, - fallbackValue?: any, - options?: IGetNodeParameterOptions, - ) => { - return getNodeParameter( - workflow, - runExecutionData, - runIndex, - connectionInputData, - connectedNode, - parameterName, - itemIndex, - mode, - getAdditionalKeys(additionalData, mode, runExecutionData), - executeData, - fallbackValue, - { ...(options || {}), contextNode: node }, - ) as any; - }; - - // TODO: Check what else should be overwritten - context.getNode = () => { - return deepCopy(connectedNode); - }; - - context.getCredentials = async (key: string) => { - try { - return await getCredentials( - workflow, - connectedNode, - key, - additionalData, - mode, - executeData, - runExecutionData, - runIndex, - connectionInputData, - itemIndex, - ); - } catch (error) { - // Display the error on the node which is causing it - - let currentNodeRunIndex = 0; - if (runExecutionData.resultData.runData.hasOwnProperty(node.name)) { - currentNodeRunIndex = runExecutionData.resultData.runData[node.name].length; - } - - await addExecutionDataFunctions( - 'input', - connectedNode.name, - error, - runExecutionData, - inputName, - additionalData, - node.name, - runIndex, - currentNodeRunIndex, - ); - - throw error; - } - }; + // eslint-disable-next-line @typescript-eslint/no-use-before-define + const context = getSupplyDataFunctions( + workflow, + runExecutionData, + runIndex, + connectionInputData, + inputData, + connectedNode, + additionalData, + executeData, + mode, + closeFunctions, + abortSignal, + ); if (!nodeType.supplyData) { if (nodeType.description.outputs.includes(NodeConnectionType.AiTool)) { @@ -3767,9 +3716,7 @@ export function getExecuteFunctions( runExecutionData, runIndex, itemIndex, - // TODO: revert this back to `node.name` when we stop using `IExecuteFunctions` as the context object in AI nodes. - // https://linear.app/n8n/issue/CAT-269 - this.getNode().name, + node.name, connectionInputData, mode, getAdditionalKeys(additionalData, mode, runExecutionData), @@ -3812,12 +3759,14 @@ export function getExecuteFunctions( runExecutionData, runIndex, connectionInputData, + inputData, additionalData, executeData, mode, closeFunctions, inputName, itemIndex, + abortSignal, ); }, @@ -4031,7 +3980,7 @@ export function getExecuteFunctions( constructExecutionMetaData, }, nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id), - logAiEvent: async (eventName: AiEvent, msg: string) => { + logAiEvent: (eventName: AiEvent, msg: string) => { return additionalData.logAiEvent(eventName, { executionId: additionalData.executionId ?? 'unsaved-execution', nodeName: node.name, @@ -4059,6 +4008,270 @@ export function getExecuteFunctions( })(workflow, runExecutionData, connectionInputData, inputData, node) as IExecuteFunctions; } +export function getSupplyDataFunctions( + workflow: Workflow, + runExecutionData: IRunExecutionData, + runIndex: number, + connectionInputData: INodeExecutionData[], + inputData: ITaskDataConnections, + node: INode, + additionalData: IWorkflowExecuteAdditionalData, + executeData: IExecuteData, + mode: WorkflowExecuteMode, + closeFunctions: CloseFunction[], + abortSignal?: AbortSignal, +): ISupplyDataFunctions { + return { + ...getCommonWorkflowFunctions(workflow, node, additionalData), + ...executionCancellationFunctions(abortSignal), + getMode: () => mode, + getCredentials: async (type, itemIndex) => + await getCredentials( + workflow, + node, + type, + additionalData, + mode, + executeData, + runExecutionData, + runIndex, + connectionInputData, + itemIndex, + ), + continueOnFail: () => continueOnFail(node), + evaluateExpression: (expression: string, itemIndex: number) => + workflow.expression.resolveSimpleParameterValue( + `=${expression}`, + {}, + runExecutionData, + runIndex, + itemIndex, + node.name, + connectionInputData, + mode, + getAdditionalKeys(additionalData, mode, runExecutionData), + executeData, + ), + executeWorkflow: async ( + workflowInfo: IExecuteWorkflowInfo, + inputData?: INodeExecutionData[], + parentCallbackManager?: CallbackManager, + ) => + await additionalData + .executeWorkflow(workflowInfo, additionalData, { + parentWorkflowId: workflow.id?.toString(), + inputData, + parentWorkflowSettings: workflow.settings, + node, + parentCallbackManager, + }) + .then( + async (result) => + await Container.get(BinaryDataService).duplicateBinaryData( + workflow.id, + additionalData.executionId!, + result, + ), + ), + getNodeOutputs() { + const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion); + return NodeHelpers.getNodeOutputs(workflow, node, nodeType.description).map((output) => { + if (typeof output === 'string') { + return { + type: output, + }; + } + return output; + }); + }, + async getInputConnectionData( + inputName: NodeConnectionType, + itemIndex: number, + ): Promise { + return await getInputConnectionData.call( + this, + workflow, + runExecutionData, + runIndex, + connectionInputData, + inputData, + additionalData, + executeData, + mode, + closeFunctions, + inputName, + itemIndex, + abortSignal, + ); + }, + getInputData: (inputIndex = 0, inputName = 'main') => { + if (!inputData.hasOwnProperty(inputName)) { + // Return empty array because else it would throw error when nothing is connected to input + return []; + } + + // TODO: Check if nodeType has input with that index defined + if (inputData[inputName].length < inputIndex) { + throw new ApplicationError('Could not get input with given index', { + extra: { inputIndex, inputName }, + }); + } + + if (inputData[inputName][inputIndex] === null) { + throw new ApplicationError('Value of input was not set', { + extra: { inputIndex, inputName }, + }); + } + + return inputData[inputName][inputIndex]; + }, + getNodeParameter: (( + parameterName: string, + itemIndex: number, + fallbackValue?: any, + options?: IGetNodeParameterOptions, + ) => + getNodeParameter( + workflow, + runExecutionData, + runIndex, + connectionInputData, + node, + parameterName, + itemIndex, + mode, + getAdditionalKeys(additionalData, mode, runExecutionData), + executeData, + fallbackValue, + options, + )) as ISupplyDataFunctions['getNodeParameter'], + getWorkflowDataProxy: (itemIndex: number) => + new WorkflowDataProxy( + workflow, + runExecutionData, + runIndex, + itemIndex, + node.name, + connectionInputData, + {}, + mode, + getAdditionalKeys(additionalData, mode, runExecutionData), + executeData, + ).getDataProxy(), + sendMessageToUI(...args: any[]): void { + if (mode !== 'manual') { + return; + } + try { + if (additionalData.sendDataToUI) { + args = args.map((arg) => { + // prevent invalid dates from being logged as null + if (arg.isLuxonDateTime && arg.invalidReason) return { ...arg }; + + // log valid dates in human readable format, as in browser + if (arg.isLuxonDateTime) return new Date(arg.ts).toString(); + if (arg instanceof Date) return arg.toString(); + + return arg; + }); + + additionalData.sendDataToUI('sendConsoleMessage', { + source: `[Node: "${node.name}"]`, + messages: args, + }); + } + } catch (error) { + Logger.warn(`There was a problem sending message to UI: ${error.message}`); + } + }, + logAiEvent: (eventName: AiEvent, msg: string) => + additionalData.logAiEvent(eventName, { + executionId: additionalData.executionId ?? 'unsaved-execution', + nodeName: node.name, + workflowName: workflow.name ?? 'Unnamed workflow', + nodeType: node.type, + workflowId: workflow.id ?? 'unsaved-workflow', + msg, + }), + addInputData( + connectionType: NodeConnectionType, + data: INodeExecutionData[][], + ): { index: number } { + const nodeName = this.getNode().name; + let currentNodeRunIndex = 0; + if (runExecutionData.resultData.runData.hasOwnProperty(nodeName)) { + currentNodeRunIndex = runExecutionData.resultData.runData[nodeName].length; + } + + addExecutionDataFunctions( + 'input', + this.getNode().name, + data, + runExecutionData, + connectionType, + additionalData, + node.name, + runIndex, + currentNodeRunIndex, + ).catch((error) => { + Logger.warn( + `There was a problem logging input data of node "${this.getNode().name}": ${ + error.message + }`, + ); + }); + + return { index: currentNodeRunIndex }; + }, + addOutputData( + connectionType: NodeConnectionType, + currentNodeRunIndex: number, + data: INodeExecutionData[][], + ): void { + addExecutionDataFunctions( + 'output', + this.getNode().name, + data, + runExecutionData, + connectionType, + additionalData, + node.name, + runIndex, + currentNodeRunIndex, + ).catch((error) => { + Logger.warn( + `There was a problem logging output data of node "${this.getNode().name}": ${ + error.message + }`, + ); + }); + }, + helpers: { + createDeferredPromise, + copyInputItems, + ...getRequestHelperFunctions( + workflow, + node, + additionalData, + runExecutionData, + connectionInputData, + ), + ...getSSHTunnelFunctions(), + ...getFileSystemHelperFunctions(node), + ...getBinaryHelperFunctions(additionalData, workflow.id), + ...getCheckProcessedHelperFunctions(workflow, node), + assertBinaryData: (itemIndex, propertyName) => + assertBinaryData(inputData, node, itemIndex, propertyName, 0), + getBinaryDataBuffer: async (itemIndex, propertyName) => + await getBinaryDataBuffer(inputData, itemIndex, propertyName, 0), + + returnJsonArray, + normalizeItems, + constructExecutionMetaData, + }, + }; +} + /** * Returns the execute functions regular nodes have access to when single-function is defined. */ @@ -4201,7 +4414,7 @@ export function getExecuteSingleFunctions( getBinaryDataBuffer: async (propertyName, inputIndex = 0) => await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex), }, - logAiEvent: async (eventName: AiEvent, msg: string) => { + logAiEvent: (eventName: AiEvent, msg: string) => { return additionalData.logAiEvent(eventName, { executionId: additionalData.executionId ?? 'unsaved-execution', nodeName: node.name, @@ -4431,6 +4644,7 @@ export function getExecuteWebhookFunctions( runExecutionData, runIndex, connectionInputData, + {} as ITaskDataConnections, additionalData, executeData, mode, diff --git a/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts index dffbe310d1..d8c3485d65 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/getSourceDataGroups.test.ts @@ -52,15 +52,15 @@ describe('getSourceDataGroups', () => { expect(groups).toHaveLength(2); const group1 = groups[0]; - expect(group1).toHaveLength(2); - expect(group1[0]).toEqual({ + expect(group1.connections).toHaveLength(2); + expect(group1.connections[0]).toEqual({ from: source1, outputIndex: 0, type: NodeConnectionType.Main, inputIndex: 0, to: node, }); - expect(group1[1]).toEqual({ + expect(group1.connections[1]).toEqual({ from: source3, outputIndex: 0, type: NodeConnectionType.Main, @@ -69,8 +69,8 @@ describe('getSourceDataGroups', () => { }); const group2 = groups[1]; - expect(group2).toHaveLength(1); - expect(group2[0]).toEqual({ + expect(group2.connections).toHaveLength(1); + expect(group2.connections[0]).toEqual({ from: source2, outputIndex: 0, type: NodeConnectionType.Main, @@ -116,15 +116,15 @@ describe('getSourceDataGroups', () => { expect(groups).toHaveLength(2); const group1 = groups[0]; - expect(group1).toHaveLength(2); - expect(group1[0]).toEqual({ + expect(group1.connections).toHaveLength(2); + expect(group1.connections[0]).toEqual({ from: source1, outputIndex: 0, type: NodeConnectionType.Main, inputIndex: 0, to: node, }); - expect(group1[1]).toEqual({ + expect(group1.connections[1]).toEqual({ from: source3, outputIndex: 0, type: NodeConnectionType.Main, @@ -133,8 +133,8 @@ describe('getSourceDataGroups', () => { }); const group2 = groups[1]; - expect(group2).toHaveLength(1); - expect(group2[0]).toEqual({ + expect(group2.connections).toHaveLength(1); + expect(group2.connections[0]).toEqual({ from: source2, outputIndex: 0, type: NodeConnectionType.Main, @@ -152,7 +152,7 @@ describe('getSourceDataGroups', () => { //┌───────┐1 │ └────┘ //│source3├────┘ //└───────┘ - it('groups sources into possibly complete sets if all of them have data', () => { + it('groups sources into one complete set with 2 connections and one incomplete set with 1 connection', () => { // ARRANGE const source1 = createNodeData({ name: 'source1' }); const source2 = createNodeData({ name: 'source2' }); @@ -176,23 +176,341 @@ describe('getSourceDataGroups', () => { const groups = getSourceDataGroups(graph, node, runData, pinnedData); // ASSERT - expect(groups).toHaveLength(1); + const completeGroups = groups.filter((g) => g.complete); + { + expect(completeGroups).toHaveLength(1); + const group1 = completeGroups[0]; + expect(group1.connections).toHaveLength(2); + expect(group1.connections[0]).toEqual({ + from: source2, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 0, + to: node, + }); + expect(group1.connections[1]).toEqual({ + from: source3, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 1, + to: node, + }); + } - const group1 = groups[0]; - expect(group1).toHaveLength(2); - expect(group1[0]).toEqual({ - from: source2, + const incompleteGroups = groups.filter((g) => !g.complete); + { + expect(incompleteGroups).toHaveLength(1); + const group1 = incompleteGroups[0]; + expect(group1.connections).toHaveLength(1); + expect(group1.connections[0]).toEqual({ + from: source1, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 0, + to: node, + }); + } + }); + + //┌───────┐0 + //│source1├───────┐ + //└───────┘ │ + // │ + //┌───────┐1 │ + //│source2├───────┤ ┌────┐ + //└───────┘ └────► │ + // │node│ + //┌───────┐1 ┌────► │ + //│source3├───────┤ └────┘ + //└───────┘ │ + // │ + //┌───────┐0 │ + //│source4├───────┘ + //└───────┘ + it('groups sources into one complete set with 2 connections and one incomplete set with 2 connection', () => { + // ARRANGE + const source1 = createNodeData({ name: 'source1' }); + const source2 = createNodeData({ name: 'source2' }); + const source3 = createNodeData({ name: 'source3' }); + const source4 = createNodeData({ name: 'source4' }); + const node = createNodeData({ name: 'node' }); + + const graph = new DirectedGraph() + .addNodes(source1, source2, source3, source4, node) + .addConnections( + { from: source1, to: node, inputIndex: 0 }, + { from: source2, to: node, inputIndex: 0 }, + { from: source3, to: node, inputIndex: 1 }, + { from: source4, to: node, inputIndex: 1 }, + ); + const runData: IRunData = { + [source2.name]: [toITaskData([{ data: { value: 1 } }])], + [source3.name]: [toITaskData([{ data: { value: 1 } }])], + }; + const pinnedData: IPinData = {}; + + // ACT + const groups = getSourceDataGroups(graph, node, runData, pinnedData); + + // ASSERT + const completeGroups = groups.filter((g) => g.complete); + { + expect(completeGroups).toHaveLength(1); + const group1 = completeGroups[0]; + expect(group1.connections).toHaveLength(2); + expect(group1.connections[0]).toEqual({ + from: source2, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 0, + to: node, + }); + expect(group1.connections[1]).toEqual({ + from: source3, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 1, + to: node, + }); + } + + const incompleteGroups = groups.filter((g) => !g.complete); + { + expect(incompleteGroups).toHaveLength(1); + const group1 = incompleteGroups[0]; + expect(group1.connections).toHaveLength(2); + expect(group1.connections[0]).toEqual({ + from: source1, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 0, + to: node, + }); + expect(group1.connections[1]).toEqual({ + from: source4, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 1, + to: node, + }); + } + }); + + // ┌───────┐1 + // │source1├───────┐ + // └───────┘ │ + // │ + // ┌───────┐0 │ + // │source2├───────┤ ┌────┐ + // └───────┘ └────► │ + // │node│ + // ┌───────┐0 ┌────► │ + // │source3├───────┘ └────┘ + // └───────┘ + it('groups sources into two incomplete sets, one with 1 connection without and one with 2 connections one with data and one without', () => { + // ARRANGE + const source1 = createNodeData({ name: 'source1' }); + const source2 = createNodeData({ name: 'source2' }); + const source3 = createNodeData({ name: 'source3' }); + const node = createNodeData({ name: 'node' }); + + const graph = new DirectedGraph() + .addNodes(source1, source2, source3, node) + .addConnections( + { from: source1, to: node, inputIndex: 0 }, + { from: source2, to: node, inputIndex: 0 }, + { from: source3, to: node, inputIndex: 1 }, + ); + const runData: IRunData = { + [source1.name]: [toITaskData([{ data: { node: 'source1' } }])], + }; + const pinnedData: IPinData = {}; + + // ACT + const groups = getSourceDataGroups(graph, node, runData, pinnedData); + + // ASSERT + const completeGroups = groups.filter((g) => g.complete); + expect(completeGroups).toHaveLength(0); + + const incompleteGroups = groups.filter((g) => !g.complete); + expect(incompleteGroups).toHaveLength(2); + + const group1 = incompleteGroups[0]; + expect(group1.connections).toHaveLength(2); + expect(group1.connections[0]).toEqual({ + from: source1, outputIndex: 0, type: NodeConnectionType.Main, inputIndex: 0, to: node, }); - expect(group1[1]).toEqual({ + expect(group1.connections[1]).toEqual({ from: source3, outputIndex: 0, type: NodeConnectionType.Main, inputIndex: 1, to: node, }); + + const group2 = incompleteGroups[1]; + expect(group2.connections).toHaveLength(1); + expect(group2.connections[0]).toEqual({ + from: source2, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 0, + to: node, + }); + }); + + // ┌─────┐1 ►► + // ┌─►│Node1┼──┐ ┌─────┐ + // ┌───────┐1│ └─────┘ └──►│ │ + // │Trigger├─┤ │Node3│ + // └───────┘ │ ┌─────┐0 ┌──►│ │ + // └─►│Node2├──┘ └─────┘ + // └─────┘ + test('return an incomplete group when there is no data on input 2', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const node3 = createNodeData({ name: 'node3' }); + const graph = new DirectedGraph() + .addNodes(trigger, node1, node2, node3) + .addConnections( + { from: trigger, to: node1 }, + { from: trigger, to: node2 }, + { from: node1, to: node3, inputIndex: 0 }, + { from: node2, to: node3, inputIndex: 1 }, + ); + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { nodeName: 'trigger' } }])], + [node1.name]: [toITaskData([{ data: { nodeName: 'node1' } }])], + }; + const pinData: IPinData = {}; + + // ACT + const groups = getSourceDataGroups(graph, node3, runData, pinData); + + // ASSERT + expect(groups).toHaveLength(1); + const group1 = groups[0]; + expect(group1.connections).toHaveLength(2); + expect(group1.complete).toEqual(false); + }); + + // ┌─────┐0 ►► + // ┌─►│Node1┼──┐ ┌─────┐ + // ┌───────┐1│ └─────┘ └──►│ │ + // │Trigger├─┤ │Node3│ + // └───────┘ │ ┌─────┐1 ┌──►│ │ + // └─►│Node2├──┘ └─────┘ + // └─────┘ + test('return an incomplete group when there is no data on input 1', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const node3 = createNodeData({ name: 'node3' }); + const graph = new DirectedGraph() + .addNodes(trigger, node1, node2, node3) + .addConnections( + { from: trigger, to: node1 }, + { from: trigger, to: node2 }, + { from: node1, to: node3, inputIndex: 0 }, + { from: node2, to: node3, inputIndex: 1 }, + ); + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { nodeName: 'trigger' } }])], + [node2.name]: [toITaskData([{ data: { nodeName: 'node2' } }])], + }; + const pinData: IPinData = {}; + + // ACT + const groups = getSourceDataGroups(graph, node3, runData, pinData); + + // ASSERT + expect(groups).toHaveLength(1); + const group1 = groups[0]; + expect(group1.connections).toHaveLength(2); + expect(group1.complete).toEqual(false); + }); + + it('terminates with negative input indexes', () => { + // ARRANGE + const source1 = createNodeData({ name: 'source1' }); + const node = createNodeData({ name: 'node' }); + + const graph = new DirectedGraph() + .addNodes(source1, node) + .addConnections({ from: source1, to: node, inputIndex: -1 }); + const runData: IRunData = { + [source1.name]: [toITaskData([{ data: { node: source1.name } }])], + }; + const pinnedData: IPinData = {}; + + // ACT + const groups = getSourceDataGroups(graph, node, runData, pinnedData); + + // ASSERT + expect(groups).toHaveLength(1); + const group1 = groups[0]; + expect(group1.connections).toHaveLength(1); + expect(group1.connections[0]).toEqual({ + from: source1, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: -1, + to: node, + }); + }); + + it('terminates inputs with missing connections', () => { + // ARRANGE + const source1 = createNodeData({ name: 'source1' }); + const node = createNodeData({ name: 'node' }); + + const graph = new DirectedGraph() + .addNodes(source1, node) + .addConnections({ from: source1, to: node, inputIndex: 1 }); + const runData: IRunData = { + [source1.name]: [toITaskData([{ data: { node: source1.name } }])], + }; + const pinnedData: IPinData = {}; + + // ACT + const groups = getSourceDataGroups(graph, node, runData, pinnedData); + + // ASSERT + expect(groups).toHaveLength(1); + const group1 = groups[0]; + expect(group1.connections).toHaveLength(1); + expect(group1.connections[0]).toEqual({ + from: source1, + outputIndex: 0, + type: NodeConnectionType.Main, + inputIndex: 1, + to: node, + }); + }); + + it('terminates if the graph has no connections', () => { + // ARRANGE + const source1 = createNodeData({ name: 'source1' }); + const node = createNodeData({ name: 'node' }); + + const graph = new DirectedGraph().addNodes(source1, node); + const runData: IRunData = { + [source1.name]: [toITaskData([{ data: { node: source1.name } }])], + }; + const pinnedData: IPinData = {}; + + // ACT + const groups = getSourceDataGroups(graph, node, runData, pinnedData); + + // ASSERT + expect(groups).toHaveLength(0); }); }); diff --git a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts index 8bae766912..b78b9df135 100644 --- a/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts +++ b/packages/core/src/PartialExecutionUtils/__tests__/recreateNodeExecutionStack.test.ts @@ -10,9 +10,19 @@ // PD denotes that the node has pinned data import { AssertionError } from 'assert'; -import { type IPinData, type IRunData } from 'n8n-workflow'; +import type { + INodeExecutionData, + ISourceData, + IWaitingForExecution, + IWaitingForExecutionSource, +} from 'n8n-workflow'; +import { NodeConnectionType, type IPinData, type IRunData } from 'n8n-workflow'; -import { recreateNodeExecutionStack } from '@/PartialExecutionUtils/recreateNodeExecutionStack'; +import { + addWaitingExecution, + addWaitingExecutionSource, + recreateNodeExecutionStack, +} from '@/PartialExecutionUtils/recreateNodeExecutionStack'; import { createNodeData, toITaskData } from './helpers'; import { DirectedGraph } from '../DirectedGraph'; @@ -41,7 +51,7 @@ describe('recreateNodeExecutionStack', () => { // ACT const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(workflow, startNodes, node, runData, pinData); + recreateNodeExecutionStack(workflow, startNodes, runData, pinData); // ASSERT expect(nodeExecutionStack).toHaveLength(1); @@ -62,17 +72,8 @@ describe('recreateNodeExecutionStack', () => { }, }, ]); - - expect(waitingExecution).toEqual({ node: { '0': { main: [[{ json: { value: 1 } }]] } } }); - expect(waitingExecutionSource).toEqual({ - node: { - '0': { - main: [ - { previousNode: 'trigger', previousNodeOutput: undefined, previousNodeRun: undefined }, - ], - }, - }, - }); + expect(waitingExecution).toEqual({}); + expect(waitingExecutionSource).toEqual({}); }); // ►► @@ -93,7 +94,7 @@ describe('recreateNodeExecutionStack', () => { // ACT const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(workflow, startNodes, node, runData, pinData); + recreateNodeExecutionStack(workflow, startNodes, runData, pinData); // ASSERT expect(nodeExecutionStack).toHaveLength(1); @@ -105,8 +106,8 @@ describe('recreateNodeExecutionStack', () => { }, ]); - expect(waitingExecution).toEqual({ node: { '0': { main: [null] } } }); - expect(waitingExecutionSource).toEqual({ node: { '0': { main: [null] } } }); + expect(waitingExecution).toEqual({}); + expect(waitingExecutionSource).toEqual({}); }); // PinData ►► @@ -129,7 +130,7 @@ describe('recreateNodeExecutionStack', () => { // ACT const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(workflow, startNodes, node, runData, pinData); + recreateNodeExecutionStack(workflow, startNodes, runData, pinData); // ASSERT expect(nodeExecutionStack).toHaveLength(1); @@ -151,8 +152,8 @@ describe('recreateNodeExecutionStack', () => { }, ]); - expect(waitingExecution).toEqual({ node: { '0': { main: [null] } } }); - expect(waitingExecutionSource).toEqual({ node: { '0': { main: [null] } } }); + expect(waitingExecution).toEqual({}); + expect(waitingExecutionSource).toEqual({}); }); // XX ►► @@ -176,9 +177,9 @@ describe('recreateNodeExecutionStack', () => { const pinData = {}; // ACT & ASSERT - expect(() => - recreateNodeExecutionStack(graph, startNodes, node2, runData, pinData), - ).toThrowError(AssertionError); + expect(() => recreateNodeExecutionStack(graph, startNodes, runData, pinData)).toThrowError( + AssertionError, + ); }); // ►► @@ -214,10 +215,9 @@ describe('recreateNodeExecutionStack', () => { // ACT const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(graph, startNodes, node3, runData, pinData); + recreateNodeExecutionStack(graph, startNodes, runData, pinData); // ASSERT - expect(nodeExecutionStack).toEqual([ { data: { main: [[{ json: { value: 1 } }]] }, @@ -251,19 +251,8 @@ describe('recreateNodeExecutionStack', () => { }, ]); - expect(waitingExecution).toEqual({ - node3: { '0': { main: [[{ json: { value: 1 } }], [{ json: { value: 1 } }]] } }, - }); - expect(waitingExecutionSource).toEqual({ - node3: { - '0': { - main: [ - { previousNode: 'node1', previousNodeOutput: undefined, previousNodeRun: undefined }, - { previousNode: 'node2', previousNodeOutput: undefined, previousNodeRun: undefined }, - ], - }, - }, - }); + expect(waitingExecution).toEqual({}); + expect(waitingExecutionSource).toEqual({}); }); // ┌─────┐1 ►► @@ -299,7 +288,7 @@ describe('recreateNodeExecutionStack', () => { // ACT const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(graph, startNodes, node3, runData, pinData); + recreateNodeExecutionStack(graph, startNodes, runData, pinData); // ASSERT expect(nodeExecutionStack).toHaveLength(1); @@ -314,22 +303,515 @@ describe('recreateNodeExecutionStack', () => { }, }); - expect(waitingExecution).toEqual({ - node3: { - '0': { - main: [[{ json: { value: 1 } }]], + expect(waitingExecution).toEqual({}); + expect(waitingExecutionSource).toEqual({}); + }); + + // ┌─────┐ ┌─────┐ + // ┌──►node1┼────┬──────► │ + // │ └─────┘ │ │merge│ + // │ │ ┌───► │ + // ├─────────────┘ │ └─────┘ + // │ │ + //┌───────┐ │ ┌─────┐ │ + //│trigger├───┴────►node2├─────┘ + //└───────┘ └─────┘ + describe('multiple inputs', () => { + // ARRANGE + const trigger = createNodeData({ name: 'trigger' }); + const node1 = createNodeData({ name: 'node1' }); + const node2 = createNodeData({ name: 'node2' }); + const merge = createNodeData({ name: 'merge' }); + const graph = new DirectedGraph() + .addNodes(trigger, node1, node2, merge) + .addConnections( + { from: trigger, to: node1 }, + { from: trigger, to: node2 }, + { from: trigger, to: merge, inputIndex: 0 }, + { from: node1, to: merge, inputIndex: 0 }, + { from: node2, to: merge, inputIndex: 1 }, + ); + + test('only the trigger has run data', () => { + // ARRANGE + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])], + }; + const pinData: IPinData = {}; + const startNodes = new Set([node1, node2, merge]); + + // ACT + const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = + recreateNodeExecutionStack(graph, startNodes, runData, pinData); + + // ASSERT + expect(nodeExecutionStack).toHaveLength(2); + expect(nodeExecutionStack[0]).toEqual({ + node: node1, + data: { main: [[{ json: { node: 'trigger' } }]] }, + source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] }, + }); + expect(nodeExecutionStack[1]).toEqual({ + node: node2, + data: { main: [[{ json: { node: 'trigger' } }]] }, + source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] }, + }); + + expect(waitingExecution).toEqual({ + [merge.name]: { + '0': { + main: [[{ json: { node: 'trigger' } }]], + }, }, - }, + }); + expect(waitingExecutionSource).toEqual({ + [merge.name]: { + '0': { + main: [ + { + previousNode: 'trigger', + previousNodeOutput: 0, + previousNodeRun: 0, + }, + ], + }, + }, + }); }); - expect(waitingExecutionSource).toEqual({ - node3: { - '0': { + + test('the trigger and node1 have run data', () => { + // ARRANGE + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])], + [node1.name]: [toITaskData([{ data: { node: 'node1' } }])], + }; + const pinData: IPinData = {}; + const startNodes = new Set([node2, merge]); + + // ACT + const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = + recreateNodeExecutionStack(graph, startNodes, runData, pinData); + + // ASSERT + expect(nodeExecutionStack).toHaveLength(2); + expect(nodeExecutionStack[0]).toEqual({ + node: node2, + data: { main: [[{ json: { node: 'trigger' } }]] }, + source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] }, + }); + expect(nodeExecutionStack[1]).toEqual({ + node: merge, + data: { main: [[{ json: { node: 'trigger' } }]] }, + source: { + main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }], + }, + }); + + expect(waitingExecution).toEqual({ + [merge.name]: { + '0': { + main: [[{ json: { node: 'node1' } }]], + }, + }, + }); + expect(waitingExecutionSource).toEqual({ + [merge.name]: { + '0': { + main: [ + { + previousNode: 'node1', + previousNodeOutput: 0, + previousNodeRun: 0, + }, + ], + }, + }, + }); + }); + + test('the trigger and node2 have run data', () => { + // ARRANGE + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])], + [node2.name]: [toITaskData([{ data: { node: 'node2' } }])], + }; + const pinData: IPinData = {}; + const startNodes = new Set([node1, merge]); + + // ACT + const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = + recreateNodeExecutionStack(graph, startNodes, runData, pinData); + + // ASSERT + expect(nodeExecutionStack).toHaveLength(2); + expect(nodeExecutionStack[0]).toEqual({ + node: node1, + data: { main: [[{ json: { node: 'trigger' } }]] }, + source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] }, + }); + expect(nodeExecutionStack[1]).toEqual({ + node: merge, + data: { main: [[{ json: { node: 'trigger' } }], [{ json: { node: 'node2' } }]] }, + source: { main: [ - { previousNode: 'node1', previousNodeOutput: undefined, previousNodeRun: undefined }, - { previousNode: 'node2', previousNodeOutput: 1, previousNodeRun: undefined }, + { previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }, + { previousNode: 'node2', previousNodeOutput: 0, previousNodeRun: 0 }, ], }, - }, + }); + + expect(waitingExecution).toEqual({}); + expect(waitingExecutionSource).toEqual({}); + }); + + test('the trigger, node1 and node2 have run data', () => { + // ARRANGE + const runData: IRunData = { + [trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])], + [node1.name]: [toITaskData([{ data: { node: 'node1' } }])], + [node2.name]: [toITaskData([{ data: { node: 'node2' } }])], + }; + const pinData: IPinData = {}; + const startNodes = new Set([merge]); + + // ACT + const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = + recreateNodeExecutionStack(graph, startNodes, runData, pinData); + + // ASSERT + expect(nodeExecutionStack).toHaveLength(2); + expect(nodeExecutionStack[0]).toEqual({ + node: merge, + data: { main: [[{ json: { node: 'node1' } }], [{ json: { node: 'node2' } }]] }, + source: { + main: [ + { previousNode: 'node1', previousNodeOutput: 0, previousNodeRun: 0 }, + { previousNode: 'node2', previousNodeOutput: 0, previousNodeRun: 0 }, + ], + }, + }); + expect(nodeExecutionStack[1]).toEqual({ + node: merge, + data: { main: [[{ json: { node: 'trigger' } }]] }, + source: { + main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }], + }, + }); + + expect(waitingExecution).toEqual({}); + expect(waitingExecutionSource).toEqual({}); }); }); }); + +describe('addWaitingExecution', () => { + test('allow adding data partially', () => { + const waitingExecution: IWaitingForExecution = {}; + const nodeName1 = 'node 1'; + const nodeName2 = 'node 2'; + const executionData: INodeExecutionData[] = [{ json: { item: 1 } }, { json: { item: 2 } }]; + + // adding the data for the second input index first + { + addWaitingExecution( + waitingExecution, + nodeName1, + 1, // runIndex + NodeConnectionType.Main, + 1, // inputIndex + executionData, + ); + expect(waitingExecution).toEqual({ + [nodeName1]: { + // runIndex + 1: { + [NodeConnectionType.Main]: [undefined, executionData], + }, + }, + }); + } + + // adding the data for the first input + { + addWaitingExecution( + waitingExecution, + nodeName1, + 1, // runIndex + NodeConnectionType.Main, + 0, // inputIndex + executionData, + ); + expect(waitingExecution).toEqual({ + [nodeName1]: { + // runIndex + 1: { + [NodeConnectionType.Main]: [executionData, executionData], + }, + }, + }); + } + + // adding data for another node connection type + { + addWaitingExecution( + waitingExecution, + nodeName1, + 1, // runIndex + NodeConnectionType.AiMemory, + 0, // inputIndex + executionData, + ); + expect(waitingExecution).toEqual({ + [nodeName1]: { + // runIndex + 1: { + [NodeConnectionType.Main]: [executionData, executionData], + [NodeConnectionType.AiMemory]: [executionData], + }, + }, + }); + } + + // adding data for another run + { + addWaitingExecution( + waitingExecution, + nodeName1, + 0, // runIndex + NodeConnectionType.AiChain, + 0, // inputIndex + executionData, + ); + expect(waitingExecution).toEqual({ + [nodeName1]: { + // runIndex + 0: { + [NodeConnectionType.AiChain]: [executionData], + }, + 1: { + [NodeConnectionType.Main]: [executionData, executionData], + [NodeConnectionType.AiMemory]: [executionData], + }, + }, + }); + } + + // adding data for another node + { + addWaitingExecution( + waitingExecution, + nodeName2, + 0, // runIndex + NodeConnectionType.Main, + 2, // inputIndex + executionData, + ); + expect(waitingExecution).toEqual({ + [nodeName1]: { + // runIndex + 0: { + [NodeConnectionType.AiChain]: [executionData], + }, + 1: { + [NodeConnectionType.Main]: [executionData, executionData], + [NodeConnectionType.AiMemory]: [executionData], + }, + }, + [nodeName2]: { + // runIndex + 0: { + [NodeConnectionType.Main]: [undefined, undefined, executionData], + }, + }, + }); + } + + // allow adding null + { + addWaitingExecution( + waitingExecution, + nodeName2, + 0, // runIndex + NodeConnectionType.Main, + 0, // inputIndex + null, + ); + expect(waitingExecution).toEqual({ + [nodeName2]: { + // runIndex + 0: { + [NodeConnectionType.Main]: [null, undefined, executionData], + }, + }, + [nodeName1]: { + // runIndex + 0: { + [NodeConnectionType.AiChain]: [executionData], + }, + 1: { + [NodeConnectionType.Main]: [executionData, executionData], + [NodeConnectionType.AiMemory]: [executionData], + }, + }, + }); + } + }); +}); + +describe('addWaitingExecutionSource', () => { + test('allow adding data partially', () => { + const waitingExecutionSource: IWaitingForExecutionSource = {}; + const nodeName1 = 'node 1'; + const nodeName2 = 'node 2'; + const sourceData: ISourceData = { + previousNode: 'node 0', + previousNodeRun: 0, + previousNodeOutput: 0, + }; + + // adding the data for the second input index first + { + addWaitingExecutionSource( + waitingExecutionSource, + nodeName1, + 1, // runIndex + NodeConnectionType.Main, + 1, // inputIndex + sourceData, + ); + expect(waitingExecutionSource).toEqual({ + [nodeName1]: { + // runIndex + 1: { + [NodeConnectionType.Main]: [undefined, sourceData], + }, + }, + }); + } + + // adding the data for the first input + { + addWaitingExecutionSource( + waitingExecutionSource, + nodeName1, + 1, // runIndex + NodeConnectionType.Main, + 0, // inputIndex + sourceData, + ); + expect(waitingExecutionSource).toEqual({ + [nodeName1]: { + // runIndex + 1: { + [NodeConnectionType.Main]: [sourceData, sourceData], + }, + }, + }); + } + + // adding data for another node connection type + { + addWaitingExecutionSource( + waitingExecutionSource, + nodeName1, + 1, // runIndex + NodeConnectionType.AiMemory, + 0, // inputIndex + sourceData, + ); + expect(waitingExecutionSource).toEqual({ + [nodeName1]: { + // runIndex + 1: { + [NodeConnectionType.Main]: [sourceData, sourceData], + [NodeConnectionType.AiMemory]: [sourceData], + }, + }, + }); + } + + // adding data for another run + { + addWaitingExecutionSource( + waitingExecutionSource, + nodeName1, + 0, // runIndex + NodeConnectionType.AiChain, + 0, // inputIndex + sourceData, + ); + expect(waitingExecutionSource).toEqual({ + [nodeName1]: { + // runIndex + 0: { + [NodeConnectionType.AiChain]: [sourceData], + }, + 1: { + [NodeConnectionType.Main]: [sourceData, sourceData], + [NodeConnectionType.AiMemory]: [sourceData], + }, + }, + }); + } + + // adding data for another node + { + addWaitingExecutionSource( + waitingExecutionSource, + nodeName2, + 0, // runIndex + NodeConnectionType.Main, + 2, // inputIndex + sourceData, + ); + expect(waitingExecutionSource).toEqual({ + [nodeName1]: { + // runIndex + 0: { + [NodeConnectionType.AiChain]: [sourceData], + }, + 1: { + [NodeConnectionType.Main]: [sourceData, sourceData], + [NodeConnectionType.AiMemory]: [sourceData], + }, + }, + [nodeName2]: { + // runIndex + 0: { + [NodeConnectionType.Main]: [undefined, undefined, sourceData], + }, + }, + }); + } + + // allow adding null + { + addWaitingExecutionSource( + waitingExecutionSource, + nodeName2, + 0, // runIndex + NodeConnectionType.Main, + 0, // inputIndex + null, + ); + expect(waitingExecutionSource).toEqual({ + [nodeName1]: { + // runIndex + 0: { + [NodeConnectionType.AiChain]: [sourceData], + }, + 1: { + [NodeConnectionType.Main]: [sourceData, sourceData], + [NodeConnectionType.AiMemory]: [sourceData], + }, + }, + [nodeName2]: { + // runIndex + 0: { + [NodeConnectionType.Main]: [null, undefined, sourceData], + }, + }, + }); + } + }); +}); diff --git a/packages/core/src/PartialExecutionUtils/getIncomingData.ts b/packages/core/src/PartialExecutionUtils/getIncomingData.ts index 2f5f22cd35..acac8ad22d 100644 --- a/packages/core/src/PartialExecutionUtils/getIncomingData.ts +++ b/packages/core/src/PartialExecutionUtils/getIncomingData.ts @@ -20,3 +20,26 @@ export function getIncomingData( return runData[nodeName][runIndex].data[connectionType][outputIndex]; } + +function getRunIndexLength(runData: IRunData, nodeName: string) { + return runData[nodeName]?.length ?? 0; +} + +export function getIncomingDataFromAnyRun( + runData: IRunData, + nodeName: string, + connectionType: NodeConnectionType, + outputIndex: number, +): { data: INodeExecutionData[]; runIndex: number } | undefined { + const maxRunIndexes = getRunIndexLength(runData, nodeName); + + for (let runIndex = 0; runIndex < maxRunIndexes; runIndex++) { + const data = getIncomingData(runData, nodeName, runIndex, connectionType, outputIndex); + + if (data && data.length > 0) { + return { data, runIndex }; + } + } + + return undefined; +} diff --git a/packages/core/src/PartialExecutionUtils/getSourceDataGroups.ts b/packages/core/src/PartialExecutionUtils/getSourceDataGroups.ts index 58f8f2f745..d9a9940816 100644 --- a/packages/core/src/PartialExecutionUtils/getSourceDataGroups.ts +++ b/packages/core/src/PartialExecutionUtils/getSourceDataGroups.ts @@ -13,6 +13,25 @@ function sortByInputIndexThenByName( } } +type SourceConnectionGroup = { + /** + * This is true if all connections have data. If any connection does not have + * data it false. + * + * This is interesting to decide if a node should be put on the execution + * stack of the waiting stack in the execution engine. + */ + complete: boolean; + connections: GraphConnection[]; +}; + +function newGroup(): SourceConnectionGroup { + return { + complete: true, + connections: [], + }; +} + /** * Groups incoming connections to the node. The groups contain one connection * per input, if possible, with run data or pinned data. @@ -58,55 +77,87 @@ function sortByInputIndexThenByName( * * Since `source1` has no run data and no pinned data it's skipped in favor of * `source2` for the for input. + * It will become it's own group that is marked as `complete: false` * - * So this will return 1 group: - * 1. source2 and source3 + * So this will return 2 group: + * 1. source2 and source3, `complete: true` + * 2. source1, `complete: false` */ export function getSourceDataGroups( graph: DirectedGraph, node: INode, runData: IRunData, pinnedData: IPinData, -): GraphConnection[][] { +): SourceConnectionGroup[] { const connections = graph.getConnections({ to: node }); const sortedConnectionsWithData = []; + const sortedConnectionsWithoutData = []; for (const connection of connections) { const hasData = runData[connection.from.name] || pinnedData[connection.from.name]; if (hasData) { sortedConnectionsWithData.push(connection); + } else { + sortedConnectionsWithoutData.push(connection); } } + if (sortedConnectionsWithData.length === 0 && sortedConnectionsWithoutData.length === 0) { + return []; + } + sortedConnectionsWithData.sort(sortByInputIndexThenByName); + sortedConnectionsWithoutData.sort(sortByInputIndexThenByName); - const groups: GraphConnection[][] = []; - let currentGroup: GraphConnection[] = []; - let currentInputIndex = -1; + const groups: SourceConnectionGroup[] = []; + let currentGroup = newGroup(); + let currentInputIndex = + Math.min( + ...sortedConnectionsWithData.map((c) => c.inputIndex), + ...sortedConnectionsWithoutData.map((c) => c.inputIndex), + ) - 1; + + while (sortedConnectionsWithData.length > 0 || sortedConnectionsWithoutData.length > 0) { + currentInputIndex++; - while (sortedConnectionsWithData.length > 0) { const connectionWithDataIndex = sortedConnectionsWithData.findIndex( // eslint-disable-next-line @typescript-eslint/no-loop-func - (c) => c.inputIndex > currentInputIndex, + (c) => c.inputIndex === currentInputIndex, ); - const connection: GraphConnection | undefined = - sortedConnectionsWithData[connectionWithDataIndex]; - if (connection === undefined) { - groups.push(currentGroup); - currentGroup = []; - currentInputIndex = -1; + if (connectionWithDataIndex >= 0) { + const connection = sortedConnectionsWithData[connectionWithDataIndex]; + + currentGroup.connections.push(connection); + + sortedConnectionsWithData.splice(connectionWithDataIndex, 1); continue; } - currentInputIndex = connection.inputIndex; - currentGroup.push(connection); + const connectionWithoutDataIndex = sortedConnectionsWithoutData.findIndex( + // eslint-disable-next-line @typescript-eslint/no-loop-func + (c) => c.inputIndex === currentInputIndex, + ); - if (connectionWithDataIndex >= 0) { - sortedConnectionsWithData.splice(connectionWithDataIndex, 1); + if (connectionWithoutDataIndex >= 0) { + const connection = sortedConnectionsWithoutData[connectionWithoutDataIndex]; + + currentGroup.connections.push(connection); + currentGroup.complete = false; + + sortedConnectionsWithoutData.splice(connectionWithoutDataIndex, 1); + continue; } + + groups.push(currentGroup); + currentGroup = newGroup(); + currentInputIndex = + Math.min( + ...sortedConnectionsWithData.map((c) => c.inputIndex), + ...sortedConnectionsWithoutData.map((c) => c.inputIndex), + ) - 1; } groups.push(currentGroup); diff --git a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts index 534969f960..542d4b8fbd 100644 --- a/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts +++ b/packages/core/src/PartialExecutionUtils/recreateNodeExecutionStack.ts @@ -13,9 +13,47 @@ import { } from 'n8n-workflow'; import type { DirectedGraph } from './DirectedGraph'; -import { getIncomingData } from './getIncomingData'; +import { getIncomingDataFromAnyRun } from './getIncomingData'; import { getSourceDataGroups } from './getSourceDataGroups'; +export function addWaitingExecution( + waitingExecution: IWaitingForExecution, + nodeName: string, + runIndex: number, + inputType: NodeConnectionType, + inputIndex: number, + executionData: INodeExecutionData[] | null, +) { + const waitingExecutionObject = waitingExecution[nodeName] ?? {}; + const taskDataConnections = waitingExecutionObject[runIndex] ?? {}; + const executionDataList = taskDataConnections[inputType] ?? []; + + executionDataList[inputIndex] = executionData; + + taskDataConnections[inputType] = executionDataList; + waitingExecutionObject[runIndex] = taskDataConnections; + waitingExecution[nodeName] = waitingExecutionObject; +} + +export function addWaitingExecutionSource( + waitingExecutionSource: IWaitingForExecutionSource, + nodeName: string, + runIndex: number, + inputType: NodeConnectionType, + inputIndex: number, + sourceData: ISourceData | null, +) { + const waitingExecutionSourceObject = waitingExecutionSource[nodeName] ?? {}; + const taskDataConnectionsSource = waitingExecutionSourceObject[runIndex] ?? {}; + const sourceDataList = taskDataConnectionsSource[inputType] ?? []; + + sourceDataList[inputIndex] = sourceData; + + taskDataConnectionsSource[inputType] = sourceDataList; + waitingExecutionSourceObject[runIndex] = taskDataConnectionsSource; + waitingExecutionSource[nodeName] = waitingExecutionSourceObject; +} + /** * Recreates the node execution stack, waiting executions and waiting * execution sources from a directed graph, start nodes, the destination node, @@ -33,7 +71,6 @@ import { getSourceDataGroups } from './getSourceDataGroups'; export function recreateNodeExecutionStack( graph: DirectedGraph, startNodes: Set, - destinationNode: INode, runData: IRunData, pinData: IPinData, ): { @@ -59,9 +96,6 @@ export function recreateNodeExecutionStack( const waitingExecution: IWaitingForExecution = {}; const waitingExecutionSource: IWaitingForExecutionSource = {}; - // TODO: Don't hard code this! - const runIndex = 0; - for (const startNode of startNodes) { const incomingStartNodeConnections = graph .getDirectParentConnections(startNode) @@ -84,89 +118,94 @@ export function recreateNodeExecutionStack( const sourceDataSets = getSourceDataGroups(graph, startNode, runData, pinData); for (const sourceData of sourceDataSets) { - incomingData = []; + if (sourceData.complete) { + // All incoming connections have data, so let's put the node on the + // stack! + incomingData = []; - incomingSourceData = { main: [] }; + incomingSourceData = { main: [] }; - for (const incomingConnection of sourceData) { - const node = incomingConnection.from; + for (const incomingConnection of sourceData.connections) { + let runIndex = 0; + const sourceNode = incomingConnection.from; - if (pinData[node.name]) { - incomingData.push(pinData[node.name]); - } else { - a.ok( - runData[node.name], - `Start node(${incomingConnection.to.name}) has an incoming connection with no run or pinned data. This is not supported. The connection in question is "${node.name}->${startNode.name}". Are you sure the start nodes come from the "findStartNodes" function?`, - ); + if (pinData[sourceNode.name]) { + incomingData.push(pinData[sourceNode.name]); + } else { + a.ok( + runData[sourceNode.name], + `Start node(${incomingConnection.to.name}) has an incoming connection with no run or pinned data. This is not supported. The connection in question is "${sourceNode.name}->${startNode.name}". Are you sure the start nodes come from the "findStartNodes" function?`, + ); - const nodeIncomingData = getIncomingData( + const nodeIncomingData = getIncomingDataFromAnyRun( + runData, + sourceNode.name, + incomingConnection.type, + incomingConnection.outputIndex, + ); + + if (nodeIncomingData) { + runIndex = nodeIncomingData.runIndex; + incomingData.push(nodeIncomingData.data); + } + } + + incomingSourceData.main.push({ + previousNode: incomingConnection.from.name, + previousNodeOutput: incomingConnection.outputIndex, + previousNodeRun: runIndex, + }); + } + + const executeData: IExecuteData = { + node: startNode, + data: { main: incomingData }, + source: incomingSourceData, + }; + + nodeExecutionStack.push(executeData); + } else { + const nodeName = startNode.name; + const nextRunIndex = waitingExecution[nodeName] + ? Object.keys(waitingExecution[nodeName]).length + : 0; + + for (const incomingConnection of sourceData.connections) { + const sourceNode = incomingConnection.from; + const maybeNodeIncomingData = getIncomingDataFromAnyRun( runData, - node.name, - runIndex, + sourceNode.name, incomingConnection.type, incomingConnection.outputIndex, ); + const nodeIncomingData = maybeNodeIncomingData?.data ?? null; if (nodeIncomingData) { - incomingData.push(nodeIncomingData); + addWaitingExecution( + waitingExecution, + nodeName, + nextRunIndex, + incomingConnection.type, + incomingConnection.inputIndex, + nodeIncomingData, + ); + + addWaitingExecutionSource( + waitingExecutionSource, + nodeName, + nextRunIndex, + incomingConnection.type, + incomingConnection.inputIndex, + nodeIncomingData + ? { + previousNode: incomingConnection.from.name, + previousNodeRun: nextRunIndex, + previousNodeOutput: incomingConnection.outputIndex, + } + : null, + ); } } - - incomingSourceData.main.push({ - previousNode: incomingConnection.from.name, - previousNodeOutput: incomingConnection.outputIndex, - previousNodeRun: 0, - }); - } - - const executeData: IExecuteData = { - node: startNode, - data: { main: incomingData }, - source: incomingSourceData, - }; - - nodeExecutionStack.push(executeData); - } - } - - // TODO: Do we need this? - if (destinationNode) { - const destinationNodeName = destinationNode.name; - // Check if the destinationNode has to be added as waiting - // because some input data is already fully available - const incomingDestinationNodeConnections = graph - .getDirectParentConnections(destinationNode) - .filter((c) => c.type === NodeConnectionType.Main); - if (incomingDestinationNodeConnections !== undefined) { - for (const connection of incomingDestinationNodeConnections) { - if (waitingExecution[destinationNodeName] === undefined) { - waitingExecution[destinationNodeName] = {}; - waitingExecutionSource[destinationNodeName] = {}; - } - if (waitingExecution[destinationNodeName][runIndex] === undefined) { - waitingExecution[destinationNodeName][runIndex] = {}; - waitingExecutionSource[destinationNodeName][runIndex] = {}; - } - if (waitingExecution[destinationNodeName][runIndex][connection.type] === undefined) { - waitingExecution[destinationNodeName][runIndex][connection.type] = []; - waitingExecutionSource[destinationNodeName][runIndex][connection.type] = []; - } - - if (runData[connection.from.name] !== undefined) { - // Input data exists so add as waiting - // incomingDataDestination.push(runData[connection.node!][runIndex].data![connection.type][connection.index]); - waitingExecution[destinationNodeName][runIndex][connection.type].push( - runData[connection.from.name][runIndex].data![connection.type][connection.inputIndex], - ); - waitingExecutionSource[destinationNodeName][runIndex][connection.type].push({ - previousNode: connection.from.name, - previousNodeOutput: connection.inputIndex || undefined, - previousNodeRun: runIndex || undefined, - } as ISourceData); - } else { - waitingExecution[destinationNodeName][runIndex][connection.type].push(null); - waitingExecutionSource[destinationNodeName][runIndex][connection.type].push(null); - } } } } diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 1d9aee76c6..23b88abd5b 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -363,7 +363,7 @@ export class WorkflowExecute { // 7. Recreate Execution Stack const { nodeExecutionStack, waitingExecution, waitingExecutionSource } = - recreateNodeExecutionStack(subgraph, startNodes, destination, runData, pinData ?? {}); + recreateNodeExecutionStack(subgraph, new Set(startNodes), runData, pinData ?? {}); // 8. Execute this.status = 'running'; diff --git a/packages/editor-ui/src/App.vue b/packages/editor-ui/src/App.vue index 77d371855c..47d2f1268b 100644 --- a/packages/editor-ui/src/App.vue +++ b/packages/editor-ui/src/App.vue @@ -8,7 +8,6 @@ import Modals from '@/components/Modals.vue'; import Telemetry from '@/components/Telemetry.vue'; import AskAssistantFloatingButton from '@/components/AskAssistant/AskAssistantFloatingButton.vue'; import { loadLanguage } from '@/plugins/i18n'; -import { useExternalHooks } from '@/composables/useExternalHooks'; import { APP_MODALS_ELEMENT_ID, HIRING_BANNER, VIEWS } from '@/constants'; import { useRootStore } from '@/stores/root.store'; import { useAssistantStore } from '@/stores/assistant.store'; @@ -46,7 +45,6 @@ watch(defaultLocale, (newLocale) => { onMounted(async () => { setAppZIndexes(); logHiringBanner(); - void useExternalHooks().run('app.mount'); loading.value = false; window.addEventListener('resize', updateGridWidth); await updateGridWidth(); diff --git a/packages/editor-ui/src/__tests__/defaults.ts b/packages/editor-ui/src/__tests__/defaults.ts index dd4fbed9d5..d16678b17c 100644 --- a/packages/editor-ui/src/__tests__/defaults.ts +++ b/packages/editor-ui/src/__tests__/defaults.ts @@ -124,4 +124,5 @@ export const defaultSettings: FrontendSettings = { aiAssistant: { enabled: false, }, + betaFeatures: [], }; diff --git a/packages/editor-ui/src/components/UpdatesPanel.vue b/packages/editor-ui/src/components/UpdatesPanel.vue index b96a20c650..fb17fd34ca 100644 --- a/packages/editor-ui/src/components/UpdatesPanel.vue +++ b/packages/editor-ui/src/components/UpdatesPanel.vue @@ -1,38 +1,26 @@ - @@ -45,24 +33,24 @@ export default defineComponent({ >