mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
Merge remote-tracking branch 'origin/master' into CAT-271-split-node-execution-contexts
This commit is contained in:
commit
9bc4c6b2ce
|
@ -259,7 +259,7 @@ describe('Sharing', { disableAutoLogin: true }, () => {
|
|||
credentialsPage.getters
|
||||
.credentialCards()
|
||||
.should('have.length', 2)
|
||||
.filter(':contains("Owned by me")')
|
||||
.filter(':contains("Personal")')
|
||||
.should('have.length', 1);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -15,12 +15,8 @@ import {
|
|||
NDV,
|
||||
MainSidebar,
|
||||
} from '../pages';
|
||||
import {
|
||||
getVisibleDropdown,
|
||||
getVisibleModalOverlay,
|
||||
getVisibleSelect,
|
||||
getVisiblePopper,
|
||||
} from '../utils';
|
||||
import { clearNotifications } from '../pages/notifications';
|
||||
import { getVisibleDropdown, getVisibleModalOverlay, getVisibleSelect } from '../utils';
|
||||
|
||||
const workflowsPage = new WorkflowsPage();
|
||||
const workflowPage = new WorkflowPage();
|
||||
|
@ -453,38 +449,48 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
workflowsPage.getters.workflowCards().should('not.have.length');
|
||||
workflowsPage.getters.newWorkflowButtonCard().click();
|
||||
projects.createWorkflow('Test_workflow_1.json', 'Workflow in Home project');
|
||||
clearNotifications();
|
||||
|
||||
projects.getHomeButton().click();
|
||||
projects.getProjectTabCredentials().should('be.visible').click();
|
||||
credentialsPage.getters.emptyListCreateCredentialButton().click();
|
||||
projects.createCredential('Credential in Home project');
|
||||
|
||||
clearNotifications();
|
||||
|
||||
// Create a project and add a credential and a workflow to it
|
||||
projects.createProject('Project 1');
|
||||
clearNotifications();
|
||||
projects.getProjectTabCredentials().click();
|
||||
credentialsPage.getters.emptyListCreateCredentialButton().click();
|
||||
projects.createCredential('Credential in Project 1');
|
||||
clearNotifications();
|
||||
|
||||
projects.getProjectTabWorkflows().click();
|
||||
workflowsPage.getters.newWorkflowButtonCard().click();
|
||||
projects.createWorkflow('Test_workflow_1.json', 'Workflow in Project 1');
|
||||
|
||||
clearNotifications();
|
||||
|
||||
// Create another project and add a credential and a workflow to it
|
||||
projects.createProject('Project 2');
|
||||
clearNotifications();
|
||||
projects.getProjectTabCredentials().click();
|
||||
credentialsPage.getters.emptyListCreateCredentialButton().click();
|
||||
projects.createCredential('Credential in Project 2');
|
||||
clearNotifications();
|
||||
|
||||
projects.getProjectTabWorkflows().click();
|
||||
workflowsPage.getters.newWorkflowButtonCard().click();
|
||||
projects.createWorkflow('Test_workflow_1.json', 'Workflow in Project 2');
|
||||
clearNotifications();
|
||||
|
||||
// Move the workflow owned by me from Home to Project 1
|
||||
// Move the workflow Personal from Home to Project 1
|
||||
projects.getHomeButton().click();
|
||||
workflowsPage.getters
|
||||
.workflowCards()
|
||||
.should('have.length', 3)
|
||||
.filter(':contains("Owned by me")')
|
||||
.filter(':contains("Personal")')
|
||||
.should('exist');
|
||||
workflowsPage.getters.workflowCardActions('Workflow in Home project').click();
|
||||
workflowsPage.getters.workflowMoveButton().click();
|
||||
|
@ -501,11 +507,12 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
.filter(':contains("Project 1")')
|
||||
.click();
|
||||
projects.getResourceMoveModal().find('button:contains("Move workflow")').click();
|
||||
clearNotifications();
|
||||
|
||||
workflowsPage.getters
|
||||
.workflowCards()
|
||||
.should('have.length', 3)
|
||||
.filter(':contains("Owned by me")')
|
||||
.filter(':contains("Personal")')
|
||||
.should('not.exist');
|
||||
|
||||
// Move the workflow from Project 1 to Project 2
|
||||
|
@ -532,6 +539,7 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
workflowsPage.getters.workflowCards().should('have.length', 2);
|
||||
workflowsPage.getters.workflowCardActions('Workflow in Home project').click();
|
||||
workflowsPage.getters.workflowMoveButton().click();
|
||||
clearNotifications();
|
||||
|
||||
projects
|
||||
.getResourceMoveModal()
|
||||
|
@ -571,10 +579,11 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
.click();
|
||||
|
||||
projects.getResourceMoveModal().find('button:contains("Move workflow")').click();
|
||||
clearNotifications();
|
||||
workflowsPage.getters
|
||||
.workflowCards()
|
||||
.should('have.length', 3)
|
||||
.filter(':contains("Owned by me")')
|
||||
.filter(':contains("Personal")')
|
||||
.should('have.length', 1);
|
||||
|
||||
// Move the credential from Project 1 to Project 2
|
||||
|
@ -584,9 +593,6 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
credentialsPage.getters.credentialCardActions('Credential in Project 1').click();
|
||||
credentialsPage.getters.credentialMoveButton().click();
|
||||
|
||||
// wait for all poppers to be gone
|
||||
getVisiblePopper().should('have.length', 0);
|
||||
|
||||
projects
|
||||
.getResourceMoveModal()
|
||||
.should('be.visible')
|
||||
|
@ -599,7 +605,7 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
.filter(':contains("Project 2")')
|
||||
.click();
|
||||
projects.getResourceMoveModal().find('button:contains("Move credential")').click();
|
||||
|
||||
clearNotifications();
|
||||
credentialsPage.getters.credentialCards().should('not.have.length');
|
||||
|
||||
// Move the credential from Project 2 to admin user
|
||||
|
@ -610,9 +616,6 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
credentialsPage.getters.credentialCardActions('Credential in Project 1').click();
|
||||
credentialsPage.getters.credentialMoveButton().click();
|
||||
|
||||
// wait for all poppers to be gone
|
||||
getVisiblePopper().should('have.length', 0);
|
||||
|
||||
projects
|
||||
.getResourceMoveModal()
|
||||
.should('be.visible')
|
||||
|
@ -635,9 +638,6 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
credentialsPage.getters.credentialCardActions('Credential in Project 1').click();
|
||||
credentialsPage.getters.credentialMoveButton().click();
|
||||
|
||||
// wait for all poppers to be gone
|
||||
getVisiblePopper().should('have.length', 0);
|
||||
|
||||
projects
|
||||
.getResourceMoveModal()
|
||||
.should('be.visible')
|
||||
|
@ -651,13 +651,12 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
.click();
|
||||
projects.getResourceMoveModal().find('button:contains("Move credential")').click();
|
||||
|
||||
// wait for all poppers to be gone
|
||||
getVisiblePopper().should('have.length', 0);
|
||||
clearNotifications();
|
||||
|
||||
credentialsPage.getters
|
||||
.credentialCards()
|
||||
.should('have.length', 3)
|
||||
.filter(':contains("Owned by me")')
|
||||
.filter(':contains("Personal")')
|
||||
.should('have.length', 2);
|
||||
|
||||
// Move the credential from admin user back to its original project (Project 1)
|
||||
|
@ -716,7 +715,7 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
workflowsPage.getters
|
||||
.workflowCards()
|
||||
.should('have.length', 1)
|
||||
.filter(':contains("Owned by me")')
|
||||
.filter(':contains("Personal")')
|
||||
.should('exist');
|
||||
workflowsPage.getters.workflowCardActions('My workflow').click();
|
||||
workflowsPage.getters.workflowMoveButton().click();
|
||||
|
@ -737,7 +736,7 @@ describe('Projects', { disableAutoLogin: true }, () => {
|
|||
workflowsPage.getters
|
||||
.workflowCards()
|
||||
.should('have.length', 1)
|
||||
.filter(':contains("Owned by me")')
|
||||
.filter(':contains("Personal")')
|
||||
.should('not.exist');
|
||||
|
||||
//Log out with instance owner and log in with the member user
|
||||
|
|
28
packages/@n8n/config/src/configs/license.config.ts
Normal file
28
packages/@n8n/config/src/configs/license.config.ts
Normal file
|
@ -0,0 +1,28 @@
|
|||
import { Config, Env } from '../decorators';
|
||||
|
||||
@Config
|
||||
export class LicenseConfig {
|
||||
/** License server URL to retrieve license. */
|
||||
@Env('N8N_LICENSE_SERVER_URL')
|
||||
serverUrl: string = 'https://license.n8n.io/v1';
|
||||
|
||||
/** Whether autorenewal for licenses is enabled. */
|
||||
@Env('N8N_LICENSE_AUTO_RENEW_ENABLED')
|
||||
autoRenewalEnabled: boolean = true;
|
||||
|
||||
/** How long (in seconds) before expiry a license should be autorenewed. */
|
||||
@Env('N8N_LICENSE_AUTO_RENEW_OFFSET')
|
||||
autoRenewOffset: number = 60 * 60 * 72; // 72 hours
|
||||
|
||||
/** Activation key to initialize license. */
|
||||
@Env('N8N_LICENSE_ACTIVATION_KEY')
|
||||
activationKey: string = '';
|
||||
|
||||
/** Tenant ID used by the license manager SDK, e.g. for self-hosted, sandbox, embed, cloud. */
|
||||
@Env('N8N_LICENSE_TENANT_ID')
|
||||
tenantId: number = 1;
|
||||
|
||||
/** Ephemeral license certificate. See: https://github.com/n8n-io/license-management?tab=readme-ov-file#concept-ephemeral-entitlements */
|
||||
@Env('N8N_LICENSE_CERT')
|
||||
cert: string = '';
|
||||
}
|
|
@ -6,12 +6,12 @@ import { EventBusConfig } from './configs/event-bus.config';
|
|||
import { ExternalSecretsConfig } from './configs/external-secrets.config';
|
||||
import { ExternalStorageConfig } from './configs/external-storage.config';
|
||||
import { GenericConfig } from './configs/generic.config';
|
||||
import { LicenseConfig } from './configs/license.config';
|
||||
import { LoggingConfig } from './configs/logging.config';
|
||||
import { MultiMainSetupConfig } from './configs/multi-main-setup.config';
|
||||
import { NodesConfig } from './configs/nodes.config';
|
||||
import { PublicApiConfig } from './configs/public-api.config';
|
||||
import { TaskRunnersConfig } from './configs/runners.config';
|
||||
export { TaskRunnersConfig } from './configs/runners.config';
|
||||
import { ScalingModeConfig } from './configs/scaling-mode.config';
|
||||
import { SentryConfig } from './configs/sentry.config';
|
||||
import { TemplatesConfig } from './configs/templates.config';
|
||||
|
@ -19,8 +19,9 @@ import { UserManagementConfig } from './configs/user-management.config';
|
|||
import { VersionNotificationsConfig } from './configs/version-notifications.config';
|
||||
import { WorkflowsConfig } from './configs/workflows.config';
|
||||
import { Config, Env, Nested } from './decorators';
|
||||
export { Config, Env, Nested } from './decorators';
|
||||
|
||||
export { Config, Env, Nested } from './decorators';
|
||||
export { TaskRunnersConfig } from './configs/runners.config';
|
||||
export { LOG_SCOPES } from './configs/logging.config';
|
||||
export type { LogScope } from './configs/logging.config';
|
||||
|
||||
|
@ -102,4 +103,7 @@ export class GlobalConfig {
|
|||
|
||||
@Nested
|
||||
generic: GenericConfig;
|
||||
|
||||
@Nested
|
||||
license: LicenseConfig;
|
||||
}
|
||||
|
|
|
@ -256,6 +256,14 @@ describe('GlobalConfig', () => {
|
|||
releaseChannel: 'dev',
|
||||
gracefulShutdownTimeout: 30,
|
||||
},
|
||||
license: {
|
||||
serverUrl: 'https://license.n8n.io/v1',
|
||||
autoRenewalEnabled: true,
|
||||
autoRenewOffset: 60 * 60 * 72,
|
||||
activationKey: '',
|
||||
tenantId: 1,
|
||||
cert: '',
|
||||
},
|
||||
};
|
||||
|
||||
it('should use all default values when no env variables are defined', () => {
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type INodeOutputConfiguration,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
INodeOutputConfiguration,
|
||||
SupplyData,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
// TODO: Add support for execute function. Got already started but got commented out
|
||||
|
@ -72,7 +72,7 @@ export const vmResolver = makeResolverFromLegacyOptions({
|
|||
});
|
||||
|
||||
function getSandbox(
|
||||
this: IExecuteFunctions,
|
||||
this: IExecuteFunctions | ISupplyDataFunctions,
|
||||
code: string,
|
||||
options?: { addItems?: boolean; itemIndex?: number },
|
||||
) {
|
||||
|
@ -354,7 +354,7 @@ export class Code implements INodeType {
|
|||
}
|
||||
}
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const code = this.getNodeParameter('code', itemIndex) as { supplyData?: { code: string } };
|
||||
|
||||
if (!code.supplyData?.code) {
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -177,7 +177,7 @@ export class DocumentBinaryInputLoader implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Binary Input Loader');
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -283,7 +283,7 @@ export class DocumentDefaultDataLoader implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const dataType = this.getNodeParameter('dataType', itemIndex, 'json') as 'json' | 'binary';
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GithubRepoLoader } from '@langchain/community/document_loaders/web/github';
|
||||
|
@ -93,7 +93,7 @@ export class DocumentGithubLoader implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
console.log('Supplying data for Github Document Loader');
|
||||
|
||||
const repository = this.getNodeParameter('repository', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -79,7 +79,7 @@ export class DocumentJsonInputLoader implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for JSON Input Loader');
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
import { BedrockEmbeddings } from '@langchain/aws';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -104,7 +104,7 @@ export class EmbeddingsAwsBedrock implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('aws');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -92,7 +92,7 @@ export class EmbeddingsAzureOpenAi implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings');
|
||||
const credentials = await this.getCredentials<{
|
||||
apiKey: string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { CohereEmbeddings } from '@langchain/cohere';
|
||||
|
@ -99,7 +99,7 @@ export class EmbeddingsCohere implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings Cohere');
|
||||
const modelName = this.getNodeParameter('modelName', itemIndex, 'embed-english-v2.0') as string;
|
||||
const credentials = await this.getCredentials<{ apiKey: string }>('cohereApi');
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { GoogleGenerativeAIEmbeddings } from '@langchain/google-genai';
|
||||
|
@ -116,7 +116,7 @@ export class EmbeddingsGoogleGemini implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings Google Gemini');
|
||||
const modelName = this.getNodeParameter(
|
||||
'modelName',
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf';
|
||||
|
@ -81,7 +81,7 @@ export class EmbeddingsHuggingFaceInference implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings HF Inference');
|
||||
const model = this.getNodeParameter(
|
||||
'modelName',
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { MistralAIEmbeddingsParams } from '@langchain/mistralai';
|
||||
|
@ -134,7 +134,7 @@ export class EmbeddingsMistralCloud implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('mistralCloudApi');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter(
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { OllamaEmbeddings } from '@langchain/ollama';
|
||||
|
@ -44,7 +44,7 @@ export class EmbeddingsOllama implements INodeType {
|
|||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiVectorStore]), ollamaModel],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings Ollama');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
type ISupplyDataFunctions,
|
||||
type INodeProperties,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -170,7 +170,7 @@ export class EmbeddingsOpenAi implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply data for embeddings');
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import {
|
|||
NodeConnectionType,
|
||||
type INodePropertyOptions,
|
||||
type INodeProperties,
|
||||
type IExecuteFunctions,
|
||||
type ISupplyDataFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
|
@ -175,7 +175,7 @@ export class LmChatAnthropic implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('anthropicApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -52,7 +52,7 @@ export class LmChatOllama implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
type JsonObject,
|
||||
NodeApiError,
|
||||
|
@ -242,7 +242,7 @@ export class LmChatOpenAi implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -90,7 +90,7 @@ export class LmCohere implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('cohereApi');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as object;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -51,7 +51,7 @@ export class LmOllama implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('ollamaApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ILoadOptionsFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -229,7 +229,7 @@ export class LmOpenAi implements INodeType {
|
|||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('openAiApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex, '', {
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -132,7 +132,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('huggingFaceApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
import { ChatBedrockConverse } from '@langchain/aws';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -132,7 +132,7 @@ export class LmChatAwsBedrock implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('aws');
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -162,7 +162,7 @@ export class LmChatAzureOpenAi implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials<{
|
||||
apiKey: string;
|
||||
resourceName: string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
||||
|
@ -113,7 +113,7 @@ export class LmChatGoogleGemini implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('googlePalmApi');
|
||||
|
||||
const modelName = this.getNodeParameter('modelName', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
type ILoadOptionsFunctions,
|
||||
type JsonObject,
|
||||
|
@ -124,7 +124,7 @@ export class LmChatGoogleVertex implements INodeType {
|
|||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('googleApi');
|
||||
const privateKey = formatPrivateKey(credentials.privateKey as string);
|
||||
const email = (credentials.email as string).trim();
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -129,7 +129,7 @@ export class LmChatGroq implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('groqApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -172,7 +172,7 @@ export class LmChatMistralCloud implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('mistralCloudApi');
|
||||
|
||||
const modelName = this.getNodeParameter('model', itemIndex) as string;
|
||||
|
|
|
@ -7,7 +7,7 @@ import type {
|
|||
SerializedSecret,
|
||||
} from '@langchain/core/load/serializable';
|
||||
import type { LLMResult } from '@langchain/core/outputs';
|
||||
import type { IDataObject, IExecuteFunctions } from 'n8n-workflow';
|
||||
import type { IDataObject, ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import { pick } from 'lodash';
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
|
@ -30,8 +30,6 @@ const TIKTOKEN_ESTIMATE_MODEL = 'gpt-4o';
|
|||
export class N8nLlmTracing extends BaseCallbackHandler {
|
||||
name = 'N8nLlmTracing';
|
||||
|
||||
executionFunctions: IExecuteFunctions;
|
||||
|
||||
connectionType = NodeConnectionType.AiLanguageModel;
|
||||
|
||||
promptTokensEstimate = 0;
|
||||
|
@ -61,11 +59,10 @@ export class N8nLlmTracing extends BaseCallbackHandler {
|
|||
};
|
||||
|
||||
constructor(
|
||||
executionFunctions: IExecuteFunctions,
|
||||
private executionFunctions: ISupplyDataFunctions,
|
||||
options?: { tokensUsageParser: TokensUsageParser },
|
||||
) {
|
||||
super();
|
||||
this.executionFunctions = executionFunctions;
|
||||
this.options = { ...this.options, ...options };
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { BufferWindowMemoryInput } from 'langchain/memory';
|
||||
|
@ -134,7 +134,7 @@ export class MemoryBufferWindow implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const contextWindowLength = this.getNodeParameter('contextWindowLength', itemIndex) as number;
|
||||
const workflowId = this.getWorkflow().id;
|
||||
const memoryInstance = MemoryChatBufferSingleton.getInstance();
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -86,7 +86,7 @@ export class MemoryMotorhead implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('motorheadApi');
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import { BufferMemory, BufferWindowMemory } from 'langchain/memory';
|
||||
import { PostgresChatMessageHistory } from '@langchain/community/stores/message/postgres';
|
||||
|
@ -73,7 +78,7 @@ export class MemoryPostgresChat implements INodeType {
|
|||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials<PostgresNodeCredentials>('postgres');
|
||||
const tableName = this.getNodeParameter('tableName', itemIndex, 'n8n_chat_histories') as string;
|
||||
const sessionId = getSessionId(this, itemIndex);
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeOperationError,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -102,7 +102,7 @@ export class MemoryRedisChat implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('redis');
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { XataChatMessageHistory } from '@langchain/community/stores/message/xata';
|
||||
import { BufferMemory, BufferWindowMemory } from 'langchain/memory';
|
||||
import { BaseClient } from '@xata.io/client';
|
||||
|
@ -88,7 +93,7 @@ export class MemoryXata implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('xataApi');
|
||||
const nodeVersion = this.getNode().typeVersion;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type ISupplyDataFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type SupplyData,
|
||||
|
@ -103,7 +103,7 @@ export class MemoryZep implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials<{
|
||||
apiKey?: string;
|
||||
apiUrl?: string;
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import {
|
||||
N8nOutputFixingParser,
|
||||
|
@ -63,7 +68,7 @@ export class OutputParserAutofixing implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
itemIndex,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -80,7 +80,7 @@ export class OutputParserItemList implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as {
|
||||
numberOfItems?: number;
|
||||
separator?: string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import type { JSONSchema7 } from 'json-schema';
|
||||
import {
|
||||
jsonParse,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
NodeOperationError,
|
||||
NodeConnectionType,
|
||||
|
@ -122,7 +122,7 @@ export class OutputParserStructured implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const schemaType = this.getNodeParameter('schemaType', itemIndex, '') as 'fromJson' | 'manual';
|
||||
// We initialize these even though one of them will always be empty
|
||||
// it makes it easer to navigate the ternary operator
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -63,7 +63,7 @@ export class RetrieverContextualCompression implements INodeType {
|
|||
properties: [],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Contextual Compression Retriever');
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
@ -82,7 +82,7 @@ export class RetrieverMultiQuery implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for MultiQuery Retriever');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex, {}) as { queryCount?: number };
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { VectorStore } from '@langchain/core/vectorstores';
|
||||
|
@ -56,7 +56,7 @@ export class RetrieverVectorStore implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Vector Store Retriever');
|
||||
|
||||
const topK = this.getNodeParameter('topK', itemIndex, 4) as number;
|
||||
|
|
|
@ -5,7 +5,7 @@ import type {
|
|||
IExecuteWorkflowInfo,
|
||||
INodeExecutionData,
|
||||
IWorkflowBase,
|
||||
IExecuteFunctions,
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
|
@ -292,15 +292,15 @@ export class RetrieverWorkflow implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
class WorkflowRetriever extends BaseRetriever {
|
||||
lc_namespace = ['n8n-nodes-langchain', 'retrievers', 'workflow'];
|
||||
|
||||
executeFunctions: IExecuteFunctions;
|
||||
|
||||
constructor(executeFunctions: IExecuteFunctions, fields: BaseRetrieverInput) {
|
||||
constructor(
|
||||
private executeFunctions: ISupplyDataFunctions,
|
||||
fields: BaseRetrieverInput,
|
||||
) {
|
||||
super(fields);
|
||||
this.executeFunctions = executeFunctions;
|
||||
}
|
||||
|
||||
async _getRelevantDocuments(
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { CharacterTextSplitterParams } from '@langchain/textsplitters';
|
||||
|
@ -63,7 +63,7 @@ export class TextSplitterCharacterTextSplitter implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Text Splitter');
|
||||
|
||||
const separator = this.getNodeParameter('separator', itemIndex) as string;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type {
|
||||
|
@ -94,7 +94,7 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Text Splitter');
|
||||
|
||||
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { TokenTextSplitter } from '@langchain/textsplitters';
|
||||
|
@ -56,7 +56,7 @@ export class TextSplitterTokenSplitter implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Data for Text Splitter');
|
||||
|
||||
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { Calculator } from '@langchain/community/tools/calculator';
|
||||
|
@ -43,7 +43,7 @@ export class ToolCalculator implements INodeType {
|
|||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
return {
|
||||
response: logWrapper(new Calculator(), this),
|
||||
};
|
||||
|
|
|
@ -6,9 +6,9 @@ import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox';
|
|||
import type { Sandbox } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
|
||||
import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ExecutionError,
|
||||
IDataObject,
|
||||
|
@ -175,7 +175,7 @@ export class ToolCode implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const node = this.getNode();
|
||||
const workflowMode = this.getMode();
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
IHttpRequestMethods,
|
||||
IHttpRequestOptions,
|
||||
|
@ -250,7 +250,7 @@ export class ToolHttpRequest implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const name = this.getNode().name.replace(/ /g, '_');
|
||||
try {
|
||||
tryToParseAlphanumericString(name);
|
||||
|
|
|
@ -8,12 +8,12 @@ import unset from 'lodash/unset';
|
|||
import * as mime from 'mime-types';
|
||||
import { getOAuth2AdditionalParameters } from 'n8n-nodes-base/dist/nodes/HttpRequest/GenericFunctions';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
IDataObject,
|
||||
IHttpRequestOptions,
|
||||
IRequestOptionsSimplified,
|
||||
ExecutionError,
|
||||
NodeApiError,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
@ -28,7 +28,7 @@ import type {
|
|||
} from './interfaces';
|
||||
import type { DynamicZodObject } from '../../../types/zod.types';
|
||||
|
||||
const genericCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
const genericCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string;
|
||||
|
||||
if (genericType === 'httpBasicAuth' || genericType === 'httpDigestAuth') {
|
||||
|
@ -104,7 +104,7 @@ const genericCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: numbe
|
|||
});
|
||||
};
|
||||
|
||||
const predefinedCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
const predefinedCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
const predefinedType = ctx.getNodeParameter('nodeCredentialType', itemIndex) as string;
|
||||
const additionalOptions = getOAuth2AdditionalParameters(predefinedType);
|
||||
|
||||
|
@ -119,7 +119,7 @@ const predefinedCredentialRequest = async (ctx: IExecuteFunctions, itemIndex: nu
|
|||
};
|
||||
|
||||
export const configureHttpRequestFunction = async (
|
||||
ctx: IExecuteFunctions,
|
||||
ctx: ISupplyDataFunctions,
|
||||
credentialsType: 'predefinedCredentialType' | 'genericCredentialType' | 'none',
|
||||
itemIndex: number,
|
||||
) => {
|
||||
|
@ -146,7 +146,7 @@ const defaultOptimizer = <T>(response: T) => {
|
|||
return String(response);
|
||||
};
|
||||
|
||||
const htmlOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: number) => {
|
||||
const htmlOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number, maxLength: number) => {
|
||||
const cssSelector = ctx.getNodeParameter('cssSelector', itemIndex, '') as string;
|
||||
const onlyContent = ctx.getNodeParameter('onlyContent', itemIndex, false) as boolean;
|
||||
let elementsToOmit: string[] = [];
|
||||
|
@ -214,7 +214,7 @@ const htmlOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: num
|
|||
};
|
||||
};
|
||||
|
||||
const textOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: number) => {
|
||||
const textOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number, maxLength: number) => {
|
||||
return (response: string | IDataObject) => {
|
||||
if (typeof response === 'object') {
|
||||
try {
|
||||
|
@ -245,7 +245,7 @@ const textOptimizer = (ctx: IExecuteFunctions, itemIndex: number, maxLength: num
|
|||
};
|
||||
};
|
||||
|
||||
const jsonOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
const jsonOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
return (response: string): string => {
|
||||
let responseData: IDataObject | IDataObject[] | string = response;
|
||||
|
||||
|
@ -324,7 +324,7 @@ const jsonOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => {
|
|||
};
|
||||
};
|
||||
|
||||
export const configureResponseOptimizer = (ctx: IExecuteFunctions, itemIndex: number) => {
|
||||
export const configureResponseOptimizer = (ctx: ISupplyDataFunctions, itemIndex: number) => {
|
||||
const optimizeResponse = ctx.getNodeParameter('optimizeResponse', itemIndex, false) as boolean;
|
||||
|
||||
if (optimizeResponse) {
|
||||
|
@ -469,7 +469,7 @@ const MODEL_INPUT_DESCRIPTION = {
|
|||
};
|
||||
|
||||
export const updateParametersAndOptions = (options: {
|
||||
ctx: IExecuteFunctions;
|
||||
ctx: ISupplyDataFunctions;
|
||||
itemIndex: number;
|
||||
toolParameters: ToolParameter[];
|
||||
placeholdersDefinitions: PlaceholderDefinition[];
|
||||
|
@ -558,7 +558,7 @@ export const prepareToolDescription = (
|
|||
};
|
||||
|
||||
export const configureToolFunction = (
|
||||
ctx: IExecuteFunctions,
|
||||
ctx: ISupplyDataFunctions,
|
||||
itemIndex: number,
|
||||
toolParameters: ToolParameter[],
|
||||
requestOptions: IHttpRequestOptions,
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { SerpAPI } from '@langchain/community/tools/serpapi';
|
||||
|
@ -113,7 +113,7 @@ export class ToolSerpApi implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('serpApi');
|
||||
|
||||
const options = this.getNodeParameter('options', itemIndex) as object;
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
|
||||
import type {
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { VectorStoreQATool } from 'langchain/tools';
|
||||
|
@ -82,7 +87,7 @@ export class ToolVectorStore implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const toolDescription = this.getNodeParameter('description', itemIndex) as string;
|
||||
const topK = this.getNodeParameter('topK', itemIndex, 4) as number;
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run';
|
||||
|
@ -43,7 +43,7 @@ export class ToolWikipedia implements INodeType {
|
|||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
const WikiTool = new WikipediaQueryRun();
|
||||
|
||||
WikiTool.description =
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import { WolframAlphaTool } from '@langchain/community/tools/wolframalpha';
|
||||
|
@ -49,7 +49,7 @@ export class ToolWolframAlpha implements INodeType {
|
|||
properties: [getConnectionHintNoticeField([NodeConnectionType.AiAgent])],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions): Promise<SupplyData> {
|
||||
const credentials = await this.getCredentials('wolframAlphaApi');
|
||||
|
||||
return {
|
||||
|
|
|
@ -6,12 +6,12 @@ import isObject from 'lodash/isObject';
|
|||
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
|
||||
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
IExecuteWorkflowInfo,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
IWorkflowBase,
|
||||
ISupplyDataFunctions,
|
||||
SupplyData,
|
||||
ExecutionError,
|
||||
IDataObject,
|
||||
|
@ -357,7 +357,7 @@ export class ToolWorkflow implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const name = this.getNodeParameter('name', itemIndex) as string;
|
||||
const description = this.getNodeParameter('description', itemIndex) as string;
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type SupplyData,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { Embeddings } from '@langchain/core/embeddings';
|
||||
import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager';
|
||||
|
@ -59,7 +59,7 @@ export class VectorStoreInMemoryLoad implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const embeddings = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiEmbedding,
|
||||
itemIndex,
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { PineconeStoreParams } from '@langchain/pinecone';
|
||||
|
@ -84,7 +84,7 @@ export class VectorStorePineconeLoad implements INodeType {
|
|||
},
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Pinecone Load Vector Store');
|
||||
|
||||
const namespace = this.getNodeParameter('pineconeNamespace', itemIndex) as string;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
NodeConnectionType,
|
||||
} from 'n8n-workflow';
|
||||
|
@ -81,7 +81,7 @@ export class VectorStoreSupabaseLoad implements INodeType {
|
|||
|
||||
methods = { listSearch: { supabaseTableNameSearch } };
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supply Supabase Load Vector Store');
|
||||
|
||||
const tableName = this.getNodeParameter('tableName', itemIndex, '', {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
import type { IZepConfig } from '@langchain/community/vectorstores/zep';
|
||||
|
@ -83,7 +83,7 @@ export class VectorStoreZepLoad implements INodeType {
|
|||
],
|
||||
};
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
this.logger.debug('Supplying data for Zep Load Vector Store');
|
||||
|
||||
const collectionName = this.getNodeParameter('collectionName', itemIndex) as string;
|
||||
|
|
|
@ -7,11 +7,8 @@ export class MemoryVectorStoreManager {
|
|||
|
||||
private vectorStoreBuffer: Map<string, MemoryVectorStore>;
|
||||
|
||||
private embeddings: Embeddings;
|
||||
|
||||
private constructor(embeddings: Embeddings) {
|
||||
private constructor(private embeddings: Embeddings) {
|
||||
this.vectorStoreBuffer = new Map();
|
||||
this.embeddings = embeddings;
|
||||
}
|
||||
|
||||
public static getInstance(embeddings: Embeddings): MemoryVectorStoreManager {
|
||||
|
|
|
@ -5,12 +5,13 @@ import type { Embeddings } from '@langchain/core/embeddings';
|
|||
import type { VectorStore } from '@langchain/core/vectorstores';
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeCredentialDescription,
|
||||
INodeProperties,
|
||||
INodeExecutionData,
|
||||
IExecuteFunctions,
|
||||
INodeTypeDescription,
|
||||
SupplyData,
|
||||
ISupplyDataFunctions,
|
||||
INodeType,
|
||||
ILoadOptionsFunctions,
|
||||
INodeListSearchResult,
|
||||
|
@ -57,13 +58,13 @@ interface VectorStoreNodeConstructorArgs {
|
|||
retrieveFields?: INodeProperties[];
|
||||
updateFields?: INodeProperties[];
|
||||
populateVectorStore: (
|
||||
context: IExecuteFunctions,
|
||||
context: ISupplyDataFunctions,
|
||||
embeddings: Embeddings,
|
||||
documents: Array<Document<Record<string, unknown>>>,
|
||||
itemIndex: number,
|
||||
) => Promise<void>;
|
||||
getVectorStoreClient: (
|
||||
context: IExecuteFunctions,
|
||||
context: ISupplyDataFunctions,
|
||||
filter: Record<string, never> | undefined,
|
||||
embeddings: Embeddings,
|
||||
itemIndex: number,
|
||||
|
@ -380,7 +381,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
|||
);
|
||||
}
|
||||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const mode = this.getNodeParameter('mode', 0) as 'load' | 'insert' | 'retrieve';
|
||||
const filter = getMetadataFiltersValues(this, itemIndex);
|
||||
const embeddings = (await this.getInputConnectionData(
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
import { pipeline } from 'stream/promises';
|
||||
import { createWriteStream } from 'fs';
|
||||
import type { IBinaryData, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
import type {
|
||||
IBinaryData,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
|
@ -26,25 +31,12 @@ const SUPPORTED_MIME_TYPES = {
|
|||
};
|
||||
|
||||
export class N8nBinaryLoader {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
private optionsPrefix: string;
|
||||
|
||||
private binaryDataKey: string;
|
||||
|
||||
private textSplitter?: TextSplitter;
|
||||
|
||||
constructor(
|
||||
context: IExecuteFunctions,
|
||||
optionsPrefix = '',
|
||||
binaryDataKey = '',
|
||||
textSplitter?: TextSplitter,
|
||||
) {
|
||||
this.context = context;
|
||||
this.textSplitter = textSplitter;
|
||||
this.optionsPrefix = optionsPrefix;
|
||||
this.binaryDataKey = binaryDataKey;
|
||||
}
|
||||
private context: IExecuteFunctions | ISupplyDataFunctions,
|
||||
private optionsPrefix = '',
|
||||
private binaryDataKey = '',
|
||||
private textSplitter?: TextSplitter,
|
||||
) {}
|
||||
|
||||
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
|
||||
const docs: Document[] = [];
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
import { type IExecuteFunctions, type INodeExecutionData, NodeOperationError } from 'n8n-workflow';
|
||||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type ISupplyDataFunctions,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from '@langchain/textsplitters';
|
||||
import type { Document } from '@langchain/core/documents';
|
||||
|
@ -7,17 +12,11 @@ import { TextLoader } from 'langchain/document_loaders/fs/text';
|
|||
import { getMetadataFiltersValues } from './helpers';
|
||||
|
||||
export class N8nJsonLoader {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
private optionsPrefix: string;
|
||||
|
||||
private textSplitter?: TextSplitter;
|
||||
|
||||
constructor(context: IExecuteFunctions, optionsPrefix = '', textSplitter?: TextSplitter) {
|
||||
this.context = context;
|
||||
this.textSplitter = textSplitter;
|
||||
this.optionsPrefix = optionsPrefix;
|
||||
}
|
||||
constructor(
|
||||
private context: IExecuteFunctions | ISupplyDataFunctions,
|
||||
private optionsPrefix = '',
|
||||
private textSplitter?: TextSplitter,
|
||||
) {}
|
||||
|
||||
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
|
||||
const docs: Document[] = [];
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import type { DynamicStructuredToolInput } from '@langchain/core/tools';
|
||||
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
|
||||
import type { IExecuteFunctions, IDataObject } from 'n8n-workflow';
|
||||
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
|
||||
import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow';
|
||||
import { StructuredOutputParser } from 'langchain/output_parsers';
|
||||
import type { ZodTypeAny } from 'zod';
|
||||
|
@ -45,12 +45,11 @@ ALL parameters marked as required must be provided`;
|
|||
};
|
||||
|
||||
export class N8nTool extends DynamicStructuredTool {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
constructor(context: IExecuteFunctions, fields: DynamicStructuredToolInput) {
|
||||
constructor(
|
||||
private context: ISupplyDataFunctions,
|
||||
fields: DynamicStructuredToolInput,
|
||||
) {
|
||||
super(fields);
|
||||
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
asDynamicTool(): DynamicTool {
|
||||
|
|
|
@ -5,7 +5,13 @@ import type { BaseMessage } from '@langchain/core/messages';
|
|||
import type { Tool } from '@langchain/core/tools';
|
||||
import type { BaseChatMemory } from 'langchain/memory';
|
||||
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
|
||||
import type { AiEvent, IDataObject, IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow';
|
||||
import type {
|
||||
AiEvent,
|
||||
IDataObject,
|
||||
IExecuteFunctions,
|
||||
ISupplyDataFunctions,
|
||||
IWebhookFunctions,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { N8nTool } from './N8nTool';
|
||||
|
||||
|
@ -20,7 +26,7 @@ function hasMethods<T>(obj: unknown, ...methodNames: Array<string | symbol>): ob
|
|||
}
|
||||
|
||||
export function getMetadataFiltersValues(
|
||||
ctx: IExecuteFunctions,
|
||||
ctx: IExecuteFunctions | ISupplyDataFunctions,
|
||||
itemIndex: number,
|
||||
): Record<string, never> | undefined {
|
||||
const options = ctx.getNodeParameter('options', itemIndex, {});
|
||||
|
@ -93,7 +99,7 @@ export function getPromptInputByType(options: {
|
|||
}
|
||||
|
||||
export function getSessionId(
|
||||
ctx: IExecuteFunctions | IWebhookFunctions,
|
||||
ctx: ISupplyDataFunctions | IWebhookFunctions,
|
||||
itemIndex: number,
|
||||
selectorKey = 'sessionIdType',
|
||||
autoSelect = 'fromInput',
|
||||
|
@ -134,7 +140,7 @@ export function getSessionId(
|
|||
}
|
||||
|
||||
export function logAiEvent(
|
||||
executeFunctions: IExecuteFunctions,
|
||||
executeFunctions: IExecuteFunctions | ISupplyDataFunctions,
|
||||
event: AiEvent,
|
||||
data?: IDataObject,
|
||||
) {
|
||||
|
|
|
@ -10,7 +10,7 @@ import type { Tool } from '@langchain/core/tools';
|
|||
import { VectorStore } from '@langchain/core/vectorstores';
|
||||
import { TextSplitter } from '@langchain/textsplitters';
|
||||
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
|
||||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
import type { IExecuteFunctions, INodeExecutionData, ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
|
||||
|
@ -27,7 +27,7 @@ const errorsMap: { [key: string]: { message: string; description: string } } = {
|
|||
export async function callMethodAsync<T>(
|
||||
this: T,
|
||||
parameters: {
|
||||
executeFunctions: IExecuteFunctions;
|
||||
executeFunctions: IExecuteFunctions | ISupplyDataFunctions;
|
||||
connectionType: NodeConnectionType;
|
||||
currentNodeRunIndex: number;
|
||||
method: (...args: any[]) => Promise<unknown>;
|
||||
|
@ -113,7 +113,7 @@ export function logWrapper(
|
|||
| VectorStore
|
||||
| N8nBinaryLoader
|
||||
| N8nJsonLoader,
|
||||
executeFunctions: IExecuteFunctions,
|
||||
executeFunctions: IExecuteFunctions | ISupplyDataFunctions,
|
||||
) {
|
||||
return new Proxy(originalInstance, {
|
||||
get: (target, prop) => {
|
||||
|
|
|
@ -2,7 +2,7 @@ import type { Callbacks } from '@langchain/core/callbacks/manager';
|
|||
import type { BaseLanguageModel } from '@langchain/core/language_models/base';
|
||||
import type { AIMessage } from '@langchain/core/messages';
|
||||
import { BaseOutputParser } from '@langchain/core/output_parsers';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
import type { ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
|
||||
import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser';
|
||||
|
@ -10,23 +10,14 @@ import { NAIVE_FIX_PROMPT } from './prompt';
|
|||
import { logAiEvent } from '../helpers';
|
||||
|
||||
export class N8nOutputFixingParser extends BaseOutputParser {
|
||||
private context: IExecuteFunctions;
|
||||
|
||||
private model: BaseLanguageModel;
|
||||
|
||||
private outputParser: N8nStructuredOutputParser;
|
||||
|
||||
lc_namespace = ['langchain', 'output_parsers', 'fix'];
|
||||
|
||||
constructor(
|
||||
context: IExecuteFunctions,
|
||||
model: BaseLanguageModel,
|
||||
outputParser: N8nStructuredOutputParser,
|
||||
private context: ISupplyDataFunctions,
|
||||
private model: BaseLanguageModel,
|
||||
private outputParser: N8nStructuredOutputParser,
|
||||
) {
|
||||
super();
|
||||
this.context = context;
|
||||
this.model = model;
|
||||
this.outputParser = outputParser;
|
||||
}
|
||||
|
||||
getRetryChain() {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import type { Callbacks } from '@langchain/core/callbacks/manager';
|
||||
import { StructuredOutputParser } from 'langchain/output_parsers';
|
||||
import get from 'lodash/get';
|
||||
import type { IExecuteFunctions } from 'n8n-workflow';
|
||||
import type { ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
||||
import { z } from 'zod';
|
||||
|
||||
|
@ -14,11 +14,11 @@ const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array';
|
|||
export class N8nStructuredOutputParser extends StructuredOutputParser<
|
||||
z.ZodType<object, z.ZodTypeDef, object>
|
||||
> {
|
||||
context: IExecuteFunctions;
|
||||
|
||||
constructor(context: IExecuteFunctions, zodSchema: z.ZodSchema<object>) {
|
||||
constructor(
|
||||
private context: ISupplyDataFunctions,
|
||||
zodSchema: z.ZodSchema<object>,
|
||||
) {
|
||||
super(zodSchema);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
lc_namespace = ['langchain', 'output_parsers', 'structured'];
|
||||
|
@ -73,7 +73,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
|
|||
static async fromZodJsonSchema(
|
||||
zodSchema: z.ZodSchema<object>,
|
||||
nodeVersion: number,
|
||||
context: IExecuteFunctions,
|
||||
context: ISupplyDataFunctions,
|
||||
): Promise<N8nStructuredOutputParser> {
|
||||
let returnSchema: z.ZodType<object, z.ZodTypeDef, object>;
|
||||
if (nodeVersion === 1) {
|
||||
|
|
|
@ -17,14 +17,16 @@ const MOCK_ACTIVATION_KEY = 'activation-key';
|
|||
const MOCK_FEATURE_FLAG = 'feat:sharing';
|
||||
const MOCK_MAIN_PLAN_ID = '1b765dc4-d39d-4ffe-9885-c56dd67c4b26';
|
||||
|
||||
describe('License', () => {
|
||||
beforeAll(() => {
|
||||
config.set('license.serverUrl', MOCK_SERVER_URL);
|
||||
config.set('license.autoRenewEnabled', true);
|
||||
config.set('license.autoRenewOffset', MOCK_RENEW_OFFSET);
|
||||
config.set('license.tenantId', 1);
|
||||
});
|
||||
const licenseConfig: GlobalConfig['license'] = {
|
||||
serverUrl: MOCK_SERVER_URL,
|
||||
autoRenewalEnabled: true,
|
||||
autoRenewOffset: MOCK_RENEW_OFFSET,
|
||||
activationKey: MOCK_ACTIVATION_KEY,
|
||||
tenantId: 1,
|
||||
cert: '',
|
||||
};
|
||||
|
||||
describe('License', () => {
|
||||
let license: License;
|
||||
const instanceSettings = mock<InstanceSettings>({
|
||||
instanceId: MOCK_INSTANCE_ID,
|
||||
|
@ -32,7 +34,10 @@ describe('License', () => {
|
|||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
const globalConfig = mock<GlobalConfig>({ multiMainSetup: { enabled: false } });
|
||||
const globalConfig = mock<GlobalConfig>({
|
||||
license: licenseConfig,
|
||||
multiMainSetup: { enabled: false },
|
||||
});
|
||||
license = new License(mockLogger(), instanceSettings, mock(), mock(), mock(), globalConfig);
|
||||
await license.init();
|
||||
});
|
||||
|
@ -66,7 +71,7 @@ describe('License', () => {
|
|||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
mock<GlobalConfig>({ license: licenseConfig }),
|
||||
);
|
||||
await license.init();
|
||||
expect(LicenseManager).toHaveBeenCalledWith(
|
||||
|
@ -192,17 +197,23 @@ describe('License', () => {
|
|||
});
|
||||
|
||||
describe('License', () => {
|
||||
beforeEach(() => {
|
||||
config.load(config.default);
|
||||
});
|
||||
|
||||
describe('init', () => {
|
||||
describe('in single-main setup', () => {
|
||||
describe('with `license.autoRenewEnabled` enabled', () => {
|
||||
it('should enable renewal', async () => {
|
||||
const globalConfig = mock<GlobalConfig>({ multiMainSetup: { enabled: false } });
|
||||
const globalConfig = mock<GlobalConfig>({
|
||||
license: licenseConfig,
|
||||
multiMainSetup: { enabled: false },
|
||||
});
|
||||
|
||||
await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init();
|
||||
await new License(
|
||||
mockLogger(),
|
||||
mock<InstanceSettings>({ instanceType: 'main' }),
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
globalConfig,
|
||||
).init();
|
||||
|
||||
expect(LicenseManager).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ autoRenewEnabled: true, renewOnInit: true }),
|
||||
|
@ -212,9 +223,14 @@ describe('License', () => {
|
|||
|
||||
describe('with `license.autoRenewEnabled` disabled', () => {
|
||||
it('should disable renewal', async () => {
|
||||
config.set('license.autoRenewEnabled', false);
|
||||
|
||||
await new License(mockLogger(), mock(), mock(), mock(), mock(), mock()).init();
|
||||
await new License(
|
||||
mockLogger(),
|
||||
mock<InstanceSettings>({ instanceType: 'main' }),
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
mock(),
|
||||
).init();
|
||||
|
||||
expect(LicenseManager).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ autoRenewEnabled: false, renewOnInit: false }),
|
||||
|
@ -228,9 +244,11 @@ describe('License', () => {
|
|||
test.each(['unset', 'leader', 'follower'])(
|
||||
'if %s status, should disable removal',
|
||||
async (status) => {
|
||||
const globalConfig = mock<GlobalConfig>({ multiMainSetup: { enabled: true } });
|
||||
const globalConfig = mock<GlobalConfig>({
|
||||
license: { ...licenseConfig, autoRenewalEnabled: false },
|
||||
multiMainSetup: { enabled: true },
|
||||
});
|
||||
config.set('multiMainSetup.instanceType', status);
|
||||
config.set('license.autoRenewEnabled', false);
|
||||
|
||||
await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init();
|
||||
|
||||
|
@ -243,9 +261,11 @@ describe('License', () => {
|
|||
|
||||
describe('with `license.autoRenewEnabled` enabled', () => {
|
||||
test.each(['unset', 'follower'])('if %s status, should disable removal', async (status) => {
|
||||
const globalConfig = mock<GlobalConfig>({ multiMainSetup: { enabled: true } });
|
||||
const globalConfig = mock<GlobalConfig>({
|
||||
license: { ...licenseConfig, autoRenewalEnabled: false },
|
||||
multiMainSetup: { enabled: true },
|
||||
});
|
||||
config.set('multiMainSetup.instanceType', status);
|
||||
config.set('license.autoRenewEnabled', false);
|
||||
|
||||
await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init();
|
||||
|
||||
|
@ -255,7 +275,10 @@ describe('License', () => {
|
|||
});
|
||||
|
||||
it('if leader status, should enable renewal', async () => {
|
||||
const globalConfig = mock<GlobalConfig>({ multiMainSetup: { enabled: true } });
|
||||
const globalConfig = mock<GlobalConfig>({
|
||||
license: licenseConfig,
|
||||
multiMainSetup: { enabled: true },
|
||||
});
|
||||
config.set('multiMainSetup.instanceType', 'leader');
|
||||
|
||||
await new License(mockLogger(), mock(), mock(), mock(), mock(), globalConfig).init();
|
||||
|
|
|
@ -274,7 +274,7 @@ export abstract class BaseCommand extends Command {
|
|||
this.license = Container.get(License);
|
||||
await this.license.init();
|
||||
|
||||
const activationKey = config.getEnv('license.activationKey');
|
||||
const { activationKey } = this.globalConfig.license;
|
||||
|
||||
if (activationKey) {
|
||||
const hasCert = (await this.license.loadCertStr()).length > 0;
|
||||
|
|
|
@ -199,7 +199,7 @@ export class Start extends BaseCommand {
|
|||
await this.initOrchestration();
|
||||
this.logger.debug('Orchestration init complete');
|
||||
|
||||
if (!config.getEnv('license.autoRenewEnabled') && this.instanceSettings.isLeader) {
|
||||
if (!this.globalConfig.license.autoRenewalEnabled && this.instanceSettings.isLeader) {
|
||||
this.logger.warn(
|
||||
'Automatic license renewal is disabled. The license will not renew automatically, and access to licensed features may be lost!',
|
||||
);
|
||||
|
|
|
@ -411,45 +411,6 @@ export const schema = {
|
|||
env: 'N8N_DEFAULT_LOCALE',
|
||||
},
|
||||
|
||||
license: {
|
||||
serverUrl: {
|
||||
format: String,
|
||||
default: 'https://license.n8n.io/v1',
|
||||
env: 'N8N_LICENSE_SERVER_URL',
|
||||
doc: 'License server url to retrieve license.',
|
||||
},
|
||||
autoRenewEnabled: {
|
||||
format: Boolean,
|
||||
default: true,
|
||||
env: 'N8N_LICENSE_AUTO_RENEW_ENABLED',
|
||||
doc: 'Whether auto renewal for licenses is enabled.',
|
||||
},
|
||||
autoRenewOffset: {
|
||||
format: Number,
|
||||
default: 60 * 60 * 72, // 72 hours
|
||||
env: 'N8N_LICENSE_AUTO_RENEW_OFFSET',
|
||||
doc: 'How many seconds before expiry a license should get automatically renewed. ',
|
||||
},
|
||||
activationKey: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_LICENSE_ACTIVATION_KEY',
|
||||
doc: 'Activation key to initialize license',
|
||||
},
|
||||
tenantId: {
|
||||
format: Number,
|
||||
default: 1,
|
||||
env: 'N8N_LICENSE_TENANT_ID',
|
||||
doc: 'Tenant id used by the license manager',
|
||||
},
|
||||
cert: {
|
||||
format: String,
|
||||
default: '',
|
||||
env: 'N8N_LICENSE_CERT',
|
||||
doc: 'Ephemeral license certificate',
|
||||
},
|
||||
},
|
||||
|
||||
hideUsagePage: {
|
||||
format: Boolean,
|
||||
default: false,
|
||||
|
|
|
@ -778,7 +778,7 @@ export class TelemetryEventRelay extends EventRelay {
|
|||
ldap_allowed: authenticationMethod === 'ldap',
|
||||
saml_enabled: authenticationMethod === 'saml',
|
||||
license_plan_name: this.license.getPlanName(),
|
||||
license_tenant_id: config.getEnv('license.tenantId'),
|
||||
license_tenant_id: this.globalConfig.license.tenantId,
|
||||
binary_data_s3: isS3Available && isS3Selected && isS3Licensed,
|
||||
multi_main_setup_enabled: this.globalConfig.multiMainSetup.enabled,
|
||||
metrics: {
|
||||
|
|
|
@ -48,8 +48,7 @@ export class License {
|
|||
*/
|
||||
private renewalEnabled() {
|
||||
if (this.instanceSettings.instanceType !== 'main') return false;
|
||||
|
||||
const autoRenewEnabled = config.getEnv('license.autoRenewEnabled');
|
||||
const autoRenewEnabled = this.globalConfig.license.autoRenewalEnabled;
|
||||
|
||||
/**
|
||||
* In multi-main setup, all mains start off with `unset` status and so renewal disabled.
|
||||
|
@ -75,9 +74,9 @@ export class License {
|
|||
|
||||
const { instanceType } = this.instanceSettings;
|
||||
const isMainInstance = instanceType === 'main';
|
||||
const server = config.getEnv('license.serverUrl');
|
||||
const server = this.globalConfig.license.serverUrl;
|
||||
const offlineMode = !isMainInstance;
|
||||
const autoRenewOffset = config.getEnv('license.autoRenewOffset');
|
||||
const autoRenewOffset = this.globalConfig.license.autoRenewOffset;
|
||||
const saveCertStr = isMainInstance
|
||||
? async (value: TLicenseBlock) => await this.saveCertStr(value)
|
||||
: async () => {};
|
||||
|
@ -96,7 +95,7 @@ export class License {
|
|||
try {
|
||||
this.manager = new LicenseManager({
|
||||
server,
|
||||
tenantId: config.getEnv('license.tenantId'),
|
||||
tenantId: this.globalConfig.license.tenantId,
|
||||
productIdentifier: `n8n-${N8N_VERSION}`,
|
||||
autoRenewEnabled: renewalEnabled,
|
||||
renewOnInit: renewalEnabled,
|
||||
|
@ -122,7 +121,7 @@ export class License {
|
|||
|
||||
async loadCertStr(): Promise<TLicenseBlock> {
|
||||
// if we have an ephemeral license, we don't want to load it from the database
|
||||
const ephemeralLicense = config.get('license.cert');
|
||||
const ephemeralLicense = this.globalConfig.license.cert;
|
||||
if (ephemeralLicense) {
|
||||
return ephemeralLicense;
|
||||
}
|
||||
|
@ -179,7 +178,7 @@ export class License {
|
|||
|
||||
async saveCertStr(value: TLicenseBlock): Promise<void> {
|
||||
// if we have an ephemeral license, we don't want to save it to the database
|
||||
if (config.get('license.cert')) return;
|
||||
if (this.globalConfig.license.cert) return;
|
||||
await this.settingsRepository.upsert(
|
||||
{
|
||||
key: SETTINGS_LICENSE_CERT_KEY,
|
||||
|
|
|
@ -201,7 +201,7 @@ export class FrontendService {
|
|||
hideUsagePage: config.getEnv('hideUsagePage'),
|
||||
license: {
|
||||
consumerId: 'unknown',
|
||||
environment: config.getEnv('license.tenantId') === 1 ? 'production' : 'staging',
|
||||
environment: this.globalConfig.license.tenantId === 1 ? 'production' : 'staging',
|
||||
},
|
||||
variables: {
|
||||
limit: 0,
|
||||
|
|
|
@ -106,6 +106,7 @@ import type {
|
|||
DeduplicationItemTypes,
|
||||
ICheckProcessedContextData,
|
||||
AiEvent,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
|
@ -2710,12 +2711,14 @@ export async function getInputConnectionData(
|
|||
runExecutionData: IRunExecutionData,
|
||||
runIndex: number,
|
||||
connectionInputData: INodeExecutionData[],
|
||||
inputData: ITaskDataConnections,
|
||||
additionalData: IWorkflowExecuteAdditionalData,
|
||||
executeData: IExecuteData | undefined,
|
||||
executeData: IExecuteData,
|
||||
mode: WorkflowExecuteMode,
|
||||
closeFunctions: CloseFunction[],
|
||||
inputName: NodeConnectionType,
|
||||
itemIndex: number,
|
||||
abortSignal?: AbortSignal,
|
||||
): Promise<unknown> {
|
||||
const node = context.getNode();
|
||||
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
|
||||
|
@ -2763,74 +2766,20 @@ export async function getInputConnectionData(
|
|||
connectedNode.typeVersion,
|
||||
);
|
||||
|
||||
// TODO: create a new context object here based on the type of `connectedNode`, and avoid using `Object.assign` on context objects
|
||||
// https://linear.app/n8n/issue/CAT-269
|
||||
const newContext = Object.assign({}, context);
|
||||
|
||||
newContext.getNodeParameter = (
|
||||
parameterName: string,
|
||||
itemIndex: number,
|
||||
fallbackValue?: any,
|
||||
options?: IGetNodeParameterOptions,
|
||||
) => {
|
||||
return getNodeParameter(
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
const newContext = getSupplyDataFunctions(
|
||||
workflow,
|
||||
runExecutionData,
|
||||
runIndex,
|
||||
connectionInputData,
|
||||
inputData,
|
||||
connectedNode,
|
||||
parameterName,
|
||||
itemIndex,
|
||||
mode,
|
||||
getAdditionalKeys(additionalData, mode, runExecutionData),
|
||||
executeData,
|
||||
fallbackValue,
|
||||
{ ...(options || {}), contextNode: node },
|
||||
) as any;
|
||||
};
|
||||
|
||||
// TODO: Check what else should be overwritten
|
||||
newContext.getNode = () => {
|
||||
return deepCopy(connectedNode);
|
||||
};
|
||||
|
||||
newContext.getCredentials = async (key: string) => {
|
||||
try {
|
||||
return await getCredentials(
|
||||
workflow,
|
||||
connectedNode,
|
||||
key,
|
||||
additionalData,
|
||||
mode,
|
||||
executeData,
|
||||
runExecutionData,
|
||||
runIndex,
|
||||
connectionInputData,
|
||||
itemIndex,
|
||||
mode,
|
||||
closeFunctions,
|
||||
abortSignal,
|
||||
);
|
||||
} catch (error) {
|
||||
// Display the error on the node which is causing it
|
||||
|
||||
let currentNodeRunIndex = 0;
|
||||
if (runExecutionData.resultData.runData.hasOwnProperty(node.name)) {
|
||||
currentNodeRunIndex = runExecutionData.resultData.runData[node.name].length;
|
||||
}
|
||||
|
||||
await addExecutionDataFunctions(
|
||||
'input',
|
||||
connectedNode.name,
|
||||
error,
|
||||
runExecutionData,
|
||||
inputName,
|
||||
additionalData,
|
||||
node.name,
|
||||
runIndex,
|
||||
currentNodeRunIndex,
|
||||
);
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
if (!nodeType.supplyData) {
|
||||
if (nodeType.description.outputs.includes(NodeConnectionType.AiTool)) {
|
||||
|
@ -3250,9 +3199,7 @@ export function getExecuteFunctions(
|
|||
runExecutionData,
|
||||
runIndex,
|
||||
itemIndex,
|
||||
// TODO: revert this back to `node.name` when we stop using `IExecuteFunctions` as the context object in AI nodes.
|
||||
// https://linear.app/n8n/issue/CAT-269
|
||||
this.getNode().name,
|
||||
node.name,
|
||||
connectionInputData,
|
||||
mode,
|
||||
getAdditionalKeys(additionalData, mode, runExecutionData),
|
||||
|
@ -3295,12 +3242,14 @@ export function getExecuteFunctions(
|
|||
runExecutionData,
|
||||
runIndex,
|
||||
connectionInputData,
|
||||
inputData,
|
||||
additionalData,
|
||||
executeData,
|
||||
mode,
|
||||
closeFunctions,
|
||||
inputName,
|
||||
itemIndex,
|
||||
abortSignal,
|
||||
);
|
||||
},
|
||||
|
||||
|
@ -3542,7 +3491,273 @@ export function getExecuteFunctions(
|
|||
})(workflow, runExecutionData, connectionInputData, inputData, node) as IExecuteFunctions;
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
export function getSupplyDataFunctions(
|
||||
workflow: Workflow,
|
||||
runExecutionData: IRunExecutionData,
|
||||
runIndex: number,
|
||||
connectionInputData: INodeExecutionData[],
|
||||
inputData: ITaskDataConnections,
|
||||
node: INode,
|
||||
additionalData: IWorkflowExecuteAdditionalData,
|
||||
executeData: IExecuteData,
|
||||
mode: WorkflowExecuteMode,
|
||||
closeFunctions: CloseFunction[],
|
||||
abortSignal?: AbortSignal,
|
||||
): ISupplyDataFunctions {
|
||||
return {
|
||||
...getCommonWorkflowFunctions(workflow, node, additionalData),
|
||||
...executionCancellationFunctions(abortSignal),
|
||||
getMode: () => mode,
|
||||
getCredentials: async (type, itemIndex) =>
|
||||
await getCredentials(
|
||||
workflow,
|
||||
node,
|
||||
type,
|
||||
additionalData,
|
||||
mode,
|
||||
executeData,
|
||||
runExecutionData,
|
||||
runIndex,
|
||||
connectionInputData,
|
||||
itemIndex,
|
||||
),
|
||||
continueOnFail: () => continueOnFail(node),
|
||||
evaluateExpression: (expression: string, itemIndex: number) =>
|
||||
workflow.expression.resolveSimpleParameterValue(
|
||||
`=${expression}`,
|
||||
{},
|
||||
runExecutionData,
|
||||
runIndex,
|
||||
itemIndex,
|
||||
node.name,
|
||||
connectionInputData,
|
||||
mode,
|
||||
getAdditionalKeys(additionalData, mode, runExecutionData),
|
||||
executeData,
|
||||
),
|
||||
executeWorkflow: async (
|
||||
workflowInfo: IExecuteWorkflowInfo,
|
||||
inputData?: INodeExecutionData[],
|
||||
parentCallbackManager?: CallbackManager,
|
||||
) =>
|
||||
await additionalData
|
||||
.executeWorkflow(workflowInfo, additionalData, {
|
||||
parentWorkflowId: workflow.id?.toString(),
|
||||
inputData,
|
||||
parentWorkflowSettings: workflow.settings,
|
||||
node,
|
||||
parentCallbackManager,
|
||||
})
|
||||
.then(
|
||||
async (result) =>
|
||||
await Container.get(BinaryDataService).duplicateBinaryData(
|
||||
workflow.id,
|
||||
additionalData.executionId!,
|
||||
result,
|
||||
),
|
||||
),
|
||||
getNodeOutputs() {
|
||||
const nodeType = workflow.nodeTypes.getByNameAndVersion(node.type, node.typeVersion);
|
||||
return NodeHelpers.getNodeOutputs(workflow, node, nodeType.description).map((output) => {
|
||||
if (typeof output === 'string') {
|
||||
return {
|
||||
type: output,
|
||||
};
|
||||
}
|
||||
return output;
|
||||
});
|
||||
},
|
||||
async getInputConnectionData(
|
||||
inputName: NodeConnectionType,
|
||||
itemIndex: number,
|
||||
): Promise<unknown> {
|
||||
return await getInputConnectionData(
|
||||
this,
|
||||
workflow,
|
||||
runExecutionData,
|
||||
runIndex,
|
||||
connectionInputData,
|
||||
inputData,
|
||||
additionalData,
|
||||
executeData,
|
||||
mode,
|
||||
closeFunctions,
|
||||
inputName,
|
||||
itemIndex,
|
||||
abortSignal,
|
||||
);
|
||||
},
|
||||
getInputData: (inputIndex = 0, inputName = 'main') => {
|
||||
if (!inputData.hasOwnProperty(inputName)) {
|
||||
// Return empty array because else it would throw error when nothing is connected to input
|
||||
return [];
|
||||
}
|
||||
|
||||
// TODO: Check if nodeType has input with that index defined
|
||||
if (inputData[inputName].length < inputIndex) {
|
||||
throw new ApplicationError('Could not get input with given index', {
|
||||
extra: { inputIndex, inputName },
|
||||
});
|
||||
}
|
||||
|
||||
if (inputData[inputName][inputIndex] === null) {
|
||||
throw new ApplicationError('Value of input was not set', {
|
||||
extra: { inputIndex, inputName },
|
||||
});
|
||||
}
|
||||
|
||||
return inputData[inputName][inputIndex];
|
||||
},
|
||||
getNodeParameter: ((
|
||||
parameterName: string,
|
||||
itemIndex: number,
|
||||
fallbackValue?: any,
|
||||
options?: IGetNodeParameterOptions,
|
||||
) =>
|
||||
getNodeParameter(
|
||||
workflow,
|
||||
runExecutionData,
|
||||
runIndex,
|
||||
connectionInputData,
|
||||
node,
|
||||
parameterName,
|
||||
itemIndex,
|
||||
mode,
|
||||
getAdditionalKeys(additionalData, mode, runExecutionData),
|
||||
executeData,
|
||||
fallbackValue,
|
||||
options,
|
||||
)) as ISupplyDataFunctions['getNodeParameter'],
|
||||
getWorkflowDataProxy: (itemIndex: number) =>
|
||||
new WorkflowDataProxy(
|
||||
workflow,
|
||||
runExecutionData,
|
||||
runIndex,
|
||||
itemIndex,
|
||||
node.name,
|
||||
connectionInputData,
|
||||
{},
|
||||
mode,
|
||||
getAdditionalKeys(additionalData, mode, runExecutionData),
|
||||
executeData,
|
||||
).getDataProxy(),
|
||||
sendMessageToUI(...args: any[]): void {
|
||||
if (mode !== 'manual') {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (additionalData.sendDataToUI) {
|
||||
args = args.map((arg) => {
|
||||
// prevent invalid dates from being logged as null
|
||||
if (arg.isLuxonDateTime && arg.invalidReason) return { ...arg };
|
||||
|
||||
// log valid dates in human readable format, as in browser
|
||||
if (arg.isLuxonDateTime) return new Date(arg.ts).toString();
|
||||
if (arg instanceof Date) return arg.toString();
|
||||
|
||||
return arg;
|
||||
});
|
||||
|
||||
additionalData.sendDataToUI('sendConsoleMessage', {
|
||||
source: `[Node: "${node.name}"]`,
|
||||
messages: args,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
Logger.warn(`There was a problem sending message to UI: ${error.message}`);
|
||||
}
|
||||
},
|
||||
logAiEvent: (eventName: AiEvent, msg: string) =>
|
||||
additionalData.logAiEvent(eventName, {
|
||||
executionId: additionalData.executionId ?? 'unsaved-execution',
|
||||
nodeName: node.name,
|
||||
workflowName: workflow.name ?? 'Unnamed workflow',
|
||||
nodeType: node.type,
|
||||
workflowId: workflow.id ?? 'unsaved-workflow',
|
||||
msg,
|
||||
}),
|
||||
addInputData(
|
||||
connectionType: NodeConnectionType,
|
||||
data: INodeExecutionData[][],
|
||||
): { index: number } {
|
||||
const nodeName = this.getNode().name;
|
||||
let currentNodeRunIndex = 0;
|
||||
if (runExecutionData.resultData.runData.hasOwnProperty(nodeName)) {
|
||||
currentNodeRunIndex = runExecutionData.resultData.runData[nodeName].length;
|
||||
}
|
||||
|
||||
addExecutionDataFunctions(
|
||||
'input',
|
||||
this.getNode().name,
|
||||
data,
|
||||
runExecutionData,
|
||||
connectionType,
|
||||
additionalData,
|
||||
node.name,
|
||||
runIndex,
|
||||
currentNodeRunIndex,
|
||||
).catch((error) => {
|
||||
Logger.warn(
|
||||
`There was a problem logging input data of node "${this.getNode().name}": ${
|
||||
error.message
|
||||
}`,
|
||||
);
|
||||
});
|
||||
|
||||
return { index: currentNodeRunIndex };
|
||||
},
|
||||
addOutputData(
|
||||
connectionType: NodeConnectionType,
|
||||
currentNodeRunIndex: number,
|
||||
data: INodeExecutionData[][],
|
||||
): void {
|
||||
addExecutionDataFunctions(
|
||||
'output',
|
||||
this.getNode().name,
|
||||
data,
|
||||
runExecutionData,
|
||||
connectionType,
|
||||
additionalData,
|
||||
node.name,
|
||||
runIndex,
|
||||
currentNodeRunIndex,
|
||||
).catch((error) => {
|
||||
Logger.warn(
|
||||
`There was a problem logging output data of node "${this.getNode().name}": ${
|
||||
error.message
|
||||
}`,
|
||||
);
|
||||
});
|
||||
},
|
||||
helpers: {
|
||||
createDeferredPromise,
|
||||
copyInputItems,
|
||||
...getRequestHelperFunctions(
|
||||
workflow,
|
||||
node,
|
||||
additionalData,
|
||||
runExecutionData,
|
||||
connectionInputData,
|
||||
),
|
||||
...getSSHTunnelFunctions(),
|
||||
...getFileSystemHelperFunctions(node),
|
||||
...getBinaryHelperFunctions(additionalData, workflow.id),
|
||||
...getCheckProcessedHelperFunctions(workflow, node),
|
||||
assertBinaryData: (itemIndex, propertyName) =>
|
||||
assertBinaryData(inputData, node, itemIndex, propertyName, 0),
|
||||
getBinaryDataBuffer: async (itemIndex, propertyName) =>
|
||||
await getBinaryDataBuffer(inputData, itemIndex, propertyName, 0),
|
||||
|
||||
returnJsonArray,
|
||||
normalizeItems,
|
||||
constructExecutionMetaData,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the execute functions regular nodes have access to when single-function is defined.
|
||||
*/
|
||||
export function getExecuteSingleFunctions(
|
||||
workflow: Workflow,
|
||||
node: INode,
|
||||
|
|
|
@ -52,15 +52,15 @@ describe('getSourceDataGroups', () => {
|
|||
expect(groups).toHaveLength(2);
|
||||
|
||||
const group1 = groups[0];
|
||||
expect(group1).toHaveLength(2);
|
||||
expect(group1[0]).toEqual({
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source1,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
expect(group1[1]).toEqual({
|
||||
expect(group1.connections[1]).toEqual({
|
||||
from: source3,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
|
@ -69,8 +69,8 @@ describe('getSourceDataGroups', () => {
|
|||
});
|
||||
|
||||
const group2 = groups[1];
|
||||
expect(group2).toHaveLength(1);
|
||||
expect(group2[0]).toEqual({
|
||||
expect(group2.connections).toHaveLength(1);
|
||||
expect(group2.connections[0]).toEqual({
|
||||
from: source2,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
|
@ -116,15 +116,15 @@ describe('getSourceDataGroups', () => {
|
|||
expect(groups).toHaveLength(2);
|
||||
|
||||
const group1 = groups[0];
|
||||
expect(group1).toHaveLength(2);
|
||||
expect(group1[0]).toEqual({
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source1,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
expect(group1[1]).toEqual({
|
||||
expect(group1.connections[1]).toEqual({
|
||||
from: source3,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
|
@ -133,8 +133,8 @@ describe('getSourceDataGroups', () => {
|
|||
});
|
||||
|
||||
const group2 = groups[1];
|
||||
expect(group2).toHaveLength(1);
|
||||
expect(group2[0]).toEqual({
|
||||
expect(group2.connections).toHaveLength(1);
|
||||
expect(group2.connections[0]).toEqual({
|
||||
from: source2,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
|
@ -152,7 +152,7 @@ describe('getSourceDataGroups', () => {
|
|||
//┌───────┐1 │ └────┘
|
||||
//│source3├────┘
|
||||
//└───────┘
|
||||
it('groups sources into possibly complete sets if all of them have data', () => {
|
||||
it('groups sources into one complete set with 2 connections and one incomplete set with 1 connection', () => {
|
||||
// ARRANGE
|
||||
const source1 = createNodeData({ name: 'source1' });
|
||||
const source2 = createNodeData({ name: 'source2' });
|
||||
|
@ -176,23 +176,341 @@ describe('getSourceDataGroups', () => {
|
|||
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
|
||||
|
||||
// ASSERT
|
||||
expect(groups).toHaveLength(1);
|
||||
|
||||
const group1 = groups[0];
|
||||
expect(group1).toHaveLength(2);
|
||||
expect(group1[0]).toEqual({
|
||||
const completeGroups = groups.filter((g) => g.complete);
|
||||
{
|
||||
expect(completeGroups).toHaveLength(1);
|
||||
const group1 = completeGroups[0];
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source2,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
expect(group1[1]).toEqual({
|
||||
expect(group1.connections[1]).toEqual({
|
||||
from: source3,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 1,
|
||||
to: node,
|
||||
});
|
||||
}
|
||||
|
||||
const incompleteGroups = groups.filter((g) => !g.complete);
|
||||
{
|
||||
expect(incompleteGroups).toHaveLength(1);
|
||||
const group1 = incompleteGroups[0];
|
||||
expect(group1.connections).toHaveLength(1);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source1,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
//┌───────┐0
|
||||
//│source1├───────┐
|
||||
//└───────┘ │
|
||||
// │
|
||||
//┌───────┐1 │
|
||||
//│source2├───────┤ ┌────┐
|
||||
//└───────┘ └────► │
|
||||
// │node│
|
||||
//┌───────┐1 ┌────► │
|
||||
//│source3├───────┤ └────┘
|
||||
//└───────┘ │
|
||||
// │
|
||||
//┌───────┐0 │
|
||||
//│source4├───────┘
|
||||
//└───────┘
|
||||
it('groups sources into one complete set with 2 connections and one incomplete set with 2 connection', () => {
|
||||
// ARRANGE
|
||||
const source1 = createNodeData({ name: 'source1' });
|
||||
const source2 = createNodeData({ name: 'source2' });
|
||||
const source3 = createNodeData({ name: 'source3' });
|
||||
const source4 = createNodeData({ name: 'source4' });
|
||||
const node = createNodeData({ name: 'node' });
|
||||
|
||||
const graph = new DirectedGraph()
|
||||
.addNodes(source1, source2, source3, source4, node)
|
||||
.addConnections(
|
||||
{ from: source1, to: node, inputIndex: 0 },
|
||||
{ from: source2, to: node, inputIndex: 0 },
|
||||
{ from: source3, to: node, inputIndex: 1 },
|
||||
{ from: source4, to: node, inputIndex: 1 },
|
||||
);
|
||||
const runData: IRunData = {
|
||||
[source2.name]: [toITaskData([{ data: { value: 1 } }])],
|
||||
[source3.name]: [toITaskData([{ data: { value: 1 } }])],
|
||||
};
|
||||
const pinnedData: IPinData = {};
|
||||
|
||||
// ACT
|
||||
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
|
||||
|
||||
// ASSERT
|
||||
const completeGroups = groups.filter((g) => g.complete);
|
||||
{
|
||||
expect(completeGroups).toHaveLength(1);
|
||||
const group1 = completeGroups[0];
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source2,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
expect(group1.connections[1]).toEqual({
|
||||
from: source3,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 1,
|
||||
to: node,
|
||||
});
|
||||
}
|
||||
|
||||
const incompleteGroups = groups.filter((g) => !g.complete);
|
||||
{
|
||||
expect(incompleteGroups).toHaveLength(1);
|
||||
const group1 = incompleteGroups[0];
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source1,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
expect(group1.connections[1]).toEqual({
|
||||
from: source4,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 1,
|
||||
to: node,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ┌───────┐1
|
||||
// │source1├───────┐
|
||||
// └───────┘ │
|
||||
// │
|
||||
// ┌───────┐0 │
|
||||
// │source2├───────┤ ┌────┐
|
||||
// └───────┘ └────► │
|
||||
// │node│
|
||||
// ┌───────┐0 ┌────► │
|
||||
// │source3├───────┘ └────┘
|
||||
// └───────┘
|
||||
it('groups sources into two incomplete sets, one with 1 connection without and one with 2 connections one with data and one without', () => {
|
||||
// ARRANGE
|
||||
const source1 = createNodeData({ name: 'source1' });
|
||||
const source2 = createNodeData({ name: 'source2' });
|
||||
const source3 = createNodeData({ name: 'source3' });
|
||||
const node = createNodeData({ name: 'node' });
|
||||
|
||||
const graph = new DirectedGraph()
|
||||
.addNodes(source1, source2, source3, node)
|
||||
.addConnections(
|
||||
{ from: source1, to: node, inputIndex: 0 },
|
||||
{ from: source2, to: node, inputIndex: 0 },
|
||||
{ from: source3, to: node, inputIndex: 1 },
|
||||
);
|
||||
const runData: IRunData = {
|
||||
[source1.name]: [toITaskData([{ data: { node: 'source1' } }])],
|
||||
};
|
||||
const pinnedData: IPinData = {};
|
||||
|
||||
// ACT
|
||||
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
|
||||
|
||||
// ASSERT
|
||||
const completeGroups = groups.filter((g) => g.complete);
|
||||
expect(completeGroups).toHaveLength(0);
|
||||
|
||||
const incompleteGroups = groups.filter((g) => !g.complete);
|
||||
expect(incompleteGroups).toHaveLength(2);
|
||||
|
||||
const group1 = incompleteGroups[0];
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source1,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
expect(group1.connections[1]).toEqual({
|
||||
from: source3,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 1,
|
||||
to: node,
|
||||
});
|
||||
|
||||
const group2 = incompleteGroups[1];
|
||||
expect(group2.connections).toHaveLength(1);
|
||||
expect(group2.connections[0]).toEqual({
|
||||
from: source2,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 0,
|
||||
to: node,
|
||||
});
|
||||
});
|
||||
|
||||
// ┌─────┐1 ►►
|
||||
// ┌─►│Node1┼──┐ ┌─────┐
|
||||
// ┌───────┐1│ └─────┘ └──►│ │
|
||||
// │Trigger├─┤ │Node3│
|
||||
// └───────┘ │ ┌─────┐0 ┌──►│ │
|
||||
// └─►│Node2├──┘ └─────┘
|
||||
// └─────┘
|
||||
test('return an incomplete group when there is no data on input 2', () => {
|
||||
// ARRANGE
|
||||
const trigger = createNodeData({ name: 'trigger' });
|
||||
const node1 = createNodeData({ name: 'node1' });
|
||||
const node2 = createNodeData({ name: 'node2' });
|
||||
const node3 = createNodeData({ name: 'node3' });
|
||||
const graph = new DirectedGraph()
|
||||
.addNodes(trigger, node1, node2, node3)
|
||||
.addConnections(
|
||||
{ from: trigger, to: node1 },
|
||||
{ from: trigger, to: node2 },
|
||||
{ from: node1, to: node3, inputIndex: 0 },
|
||||
{ from: node2, to: node3, inputIndex: 1 },
|
||||
);
|
||||
const runData: IRunData = {
|
||||
[trigger.name]: [toITaskData([{ data: { nodeName: 'trigger' } }])],
|
||||
[node1.name]: [toITaskData([{ data: { nodeName: 'node1' } }])],
|
||||
};
|
||||
const pinData: IPinData = {};
|
||||
|
||||
// ACT
|
||||
const groups = getSourceDataGroups(graph, node3, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(groups).toHaveLength(1);
|
||||
const group1 = groups[0];
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.complete).toEqual(false);
|
||||
});
|
||||
|
||||
// ┌─────┐0 ►►
|
||||
// ┌─►│Node1┼──┐ ┌─────┐
|
||||
// ┌───────┐1│ └─────┘ └──►│ │
|
||||
// │Trigger├─┤ │Node3│
|
||||
// └───────┘ │ ┌─────┐1 ┌──►│ │
|
||||
// └─►│Node2├──┘ └─────┘
|
||||
// └─────┘
|
||||
test('return an incomplete group when there is no data on input 1', () => {
|
||||
// ARRANGE
|
||||
const trigger = createNodeData({ name: 'trigger' });
|
||||
const node1 = createNodeData({ name: 'node1' });
|
||||
const node2 = createNodeData({ name: 'node2' });
|
||||
const node3 = createNodeData({ name: 'node3' });
|
||||
const graph = new DirectedGraph()
|
||||
.addNodes(trigger, node1, node2, node3)
|
||||
.addConnections(
|
||||
{ from: trigger, to: node1 },
|
||||
{ from: trigger, to: node2 },
|
||||
{ from: node1, to: node3, inputIndex: 0 },
|
||||
{ from: node2, to: node3, inputIndex: 1 },
|
||||
);
|
||||
const runData: IRunData = {
|
||||
[trigger.name]: [toITaskData([{ data: { nodeName: 'trigger' } }])],
|
||||
[node2.name]: [toITaskData([{ data: { nodeName: 'node2' } }])],
|
||||
};
|
||||
const pinData: IPinData = {};
|
||||
|
||||
// ACT
|
||||
const groups = getSourceDataGroups(graph, node3, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(groups).toHaveLength(1);
|
||||
const group1 = groups[0];
|
||||
expect(group1.connections).toHaveLength(2);
|
||||
expect(group1.complete).toEqual(false);
|
||||
});
|
||||
|
||||
it('terminates with negative input indexes', () => {
|
||||
// ARRANGE
|
||||
const source1 = createNodeData({ name: 'source1' });
|
||||
const node = createNodeData({ name: 'node' });
|
||||
|
||||
const graph = new DirectedGraph()
|
||||
.addNodes(source1, node)
|
||||
.addConnections({ from: source1, to: node, inputIndex: -1 });
|
||||
const runData: IRunData = {
|
||||
[source1.name]: [toITaskData([{ data: { node: source1.name } }])],
|
||||
};
|
||||
const pinnedData: IPinData = {};
|
||||
|
||||
// ACT
|
||||
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
|
||||
|
||||
// ASSERT
|
||||
expect(groups).toHaveLength(1);
|
||||
const group1 = groups[0];
|
||||
expect(group1.connections).toHaveLength(1);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source1,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: -1,
|
||||
to: node,
|
||||
});
|
||||
});
|
||||
|
||||
it('terminates inputs with missing connections', () => {
|
||||
// ARRANGE
|
||||
const source1 = createNodeData({ name: 'source1' });
|
||||
const node = createNodeData({ name: 'node' });
|
||||
|
||||
const graph = new DirectedGraph()
|
||||
.addNodes(source1, node)
|
||||
.addConnections({ from: source1, to: node, inputIndex: 1 });
|
||||
const runData: IRunData = {
|
||||
[source1.name]: [toITaskData([{ data: { node: source1.name } }])],
|
||||
};
|
||||
const pinnedData: IPinData = {};
|
||||
|
||||
// ACT
|
||||
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
|
||||
|
||||
// ASSERT
|
||||
expect(groups).toHaveLength(1);
|
||||
const group1 = groups[0];
|
||||
expect(group1.connections).toHaveLength(1);
|
||||
expect(group1.connections[0]).toEqual({
|
||||
from: source1,
|
||||
outputIndex: 0,
|
||||
type: NodeConnectionType.Main,
|
||||
inputIndex: 1,
|
||||
to: node,
|
||||
});
|
||||
});
|
||||
|
||||
it('terminates if the graph has no connections', () => {
|
||||
// ARRANGE
|
||||
const source1 = createNodeData({ name: 'source1' });
|
||||
const node = createNodeData({ name: 'node' });
|
||||
|
||||
const graph = new DirectedGraph().addNodes(source1, node);
|
||||
const runData: IRunData = {
|
||||
[source1.name]: [toITaskData([{ data: { node: source1.name } }])],
|
||||
};
|
||||
const pinnedData: IPinData = {};
|
||||
|
||||
// ACT
|
||||
const groups = getSourceDataGroups(graph, node, runData, pinnedData);
|
||||
|
||||
// ASSERT
|
||||
expect(groups).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -10,9 +10,19 @@
|
|||
// PD denotes that the node has pinned data
|
||||
|
||||
import { AssertionError } from 'assert';
|
||||
import { type IPinData, type IRunData } from 'n8n-workflow';
|
||||
import type {
|
||||
INodeExecutionData,
|
||||
ISourceData,
|
||||
IWaitingForExecution,
|
||||
IWaitingForExecutionSource,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeConnectionType, type IPinData, type IRunData } from 'n8n-workflow';
|
||||
|
||||
import { recreateNodeExecutionStack } from '@/PartialExecutionUtils/recreateNodeExecutionStack';
|
||||
import {
|
||||
addWaitingExecution,
|
||||
addWaitingExecutionSource,
|
||||
recreateNodeExecutionStack,
|
||||
} from '@/PartialExecutionUtils/recreateNodeExecutionStack';
|
||||
|
||||
import { createNodeData, toITaskData } from './helpers';
|
||||
import { DirectedGraph } from '../DirectedGraph';
|
||||
|
@ -41,7 +51,7 @@ describe('recreateNodeExecutionStack', () => {
|
|||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(workflow, startNodes, node, runData, pinData);
|
||||
recreateNodeExecutionStack(workflow, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(1);
|
||||
|
@ -62,17 +72,8 @@ describe('recreateNodeExecutionStack', () => {
|
|||
},
|
||||
},
|
||||
]);
|
||||
|
||||
expect(waitingExecution).toEqual({ node: { '0': { main: [[{ json: { value: 1 } }]] } } });
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
node: {
|
||||
'0': {
|
||||
main: [
|
||||
{ previousNode: 'trigger', previousNodeOutput: undefined, previousNodeRun: undefined },
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(waitingExecution).toEqual({});
|
||||
expect(waitingExecutionSource).toEqual({});
|
||||
});
|
||||
|
||||
// ►►
|
||||
|
@ -93,7 +94,7 @@ describe('recreateNodeExecutionStack', () => {
|
|||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(workflow, startNodes, node, runData, pinData);
|
||||
recreateNodeExecutionStack(workflow, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(1);
|
||||
|
@ -105,8 +106,8 @@ describe('recreateNodeExecutionStack', () => {
|
|||
},
|
||||
]);
|
||||
|
||||
expect(waitingExecution).toEqual({ node: { '0': { main: [null] } } });
|
||||
expect(waitingExecutionSource).toEqual({ node: { '0': { main: [null] } } });
|
||||
expect(waitingExecution).toEqual({});
|
||||
expect(waitingExecutionSource).toEqual({});
|
||||
});
|
||||
|
||||
// PinData ►►
|
||||
|
@ -129,7 +130,7 @@ describe('recreateNodeExecutionStack', () => {
|
|||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(workflow, startNodes, node, runData, pinData);
|
||||
recreateNodeExecutionStack(workflow, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(1);
|
||||
|
@ -151,8 +152,8 @@ describe('recreateNodeExecutionStack', () => {
|
|||
},
|
||||
]);
|
||||
|
||||
expect(waitingExecution).toEqual({ node: { '0': { main: [null] } } });
|
||||
expect(waitingExecutionSource).toEqual({ node: { '0': { main: [null] } } });
|
||||
expect(waitingExecution).toEqual({});
|
||||
expect(waitingExecutionSource).toEqual({});
|
||||
});
|
||||
|
||||
// XX ►►
|
||||
|
@ -176,9 +177,9 @@ describe('recreateNodeExecutionStack', () => {
|
|||
const pinData = {};
|
||||
|
||||
// ACT & ASSERT
|
||||
expect(() =>
|
||||
recreateNodeExecutionStack(graph, startNodes, node2, runData, pinData),
|
||||
).toThrowError(AssertionError);
|
||||
expect(() => recreateNodeExecutionStack(graph, startNodes, runData, pinData)).toThrowError(
|
||||
AssertionError,
|
||||
);
|
||||
});
|
||||
|
||||
// ►►
|
||||
|
@ -214,10 +215,9 @@ describe('recreateNodeExecutionStack', () => {
|
|||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(graph, startNodes, node3, runData, pinData);
|
||||
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
|
||||
expect(nodeExecutionStack).toEqual([
|
||||
{
|
||||
data: { main: [[{ json: { value: 1 } }]] },
|
||||
|
@ -251,19 +251,8 @@ describe('recreateNodeExecutionStack', () => {
|
|||
},
|
||||
]);
|
||||
|
||||
expect(waitingExecution).toEqual({
|
||||
node3: { '0': { main: [[{ json: { value: 1 } }], [{ json: { value: 1 } }]] } },
|
||||
});
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
node3: {
|
||||
'0': {
|
||||
main: [
|
||||
{ previousNode: 'node1', previousNodeOutput: undefined, previousNodeRun: undefined },
|
||||
{ previousNode: 'node2', previousNodeOutput: undefined, previousNodeRun: undefined },
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(waitingExecution).toEqual({});
|
||||
expect(waitingExecutionSource).toEqual({});
|
||||
});
|
||||
|
||||
// ┌─────┐1 ►►
|
||||
|
@ -299,7 +288,7 @@ describe('recreateNodeExecutionStack', () => {
|
|||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(graph, startNodes, node3, runData, pinData);
|
||||
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(1);
|
||||
|
@ -314,22 +303,515 @@ describe('recreateNodeExecutionStack', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(waitingExecution).toEqual({});
|
||||
expect(waitingExecutionSource).toEqual({});
|
||||
});
|
||||
|
||||
// ┌─────┐ ┌─────┐
|
||||
// ┌──►node1┼────┬──────► │
|
||||
// │ └─────┘ │ │merge│
|
||||
// │ │ ┌───► │
|
||||
// ├─────────────┘ │ └─────┘
|
||||
// │ │
|
||||
//┌───────┐ │ ┌─────┐ │
|
||||
//│trigger├───┴────►node2├─────┘
|
||||
//└───────┘ └─────┘
|
||||
describe('multiple inputs', () => {
|
||||
// ARRANGE
|
||||
const trigger = createNodeData({ name: 'trigger' });
|
||||
const node1 = createNodeData({ name: 'node1' });
|
||||
const node2 = createNodeData({ name: 'node2' });
|
||||
const merge = createNodeData({ name: 'merge' });
|
||||
const graph = new DirectedGraph()
|
||||
.addNodes(trigger, node1, node2, merge)
|
||||
.addConnections(
|
||||
{ from: trigger, to: node1 },
|
||||
{ from: trigger, to: node2 },
|
||||
{ from: trigger, to: merge, inputIndex: 0 },
|
||||
{ from: node1, to: merge, inputIndex: 0 },
|
||||
{ from: node2, to: merge, inputIndex: 1 },
|
||||
);
|
||||
|
||||
test('only the trigger has run data', () => {
|
||||
// ARRANGE
|
||||
const runData: IRunData = {
|
||||
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
|
||||
};
|
||||
const pinData: IPinData = {};
|
||||
const startNodes = new Set([node1, node2, merge]);
|
||||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(2);
|
||||
expect(nodeExecutionStack[0]).toEqual({
|
||||
node: node1,
|
||||
data: { main: [[{ json: { node: 'trigger' } }]] },
|
||||
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
|
||||
});
|
||||
expect(nodeExecutionStack[1]).toEqual({
|
||||
node: node2,
|
||||
data: { main: [[{ json: { node: 'trigger' } }]] },
|
||||
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
|
||||
});
|
||||
|
||||
expect(waitingExecution).toEqual({
|
||||
node3: {
|
||||
[merge.name]: {
|
||||
'0': {
|
||||
main: [[{ json: { value: 1 } }]],
|
||||
main: [[{ json: { node: 'trigger' } }]],
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
node3: {
|
||||
[merge.name]: {
|
||||
'0': {
|
||||
main: [
|
||||
{ previousNode: 'node1', previousNodeOutput: undefined, previousNodeRun: undefined },
|
||||
{ previousNode: 'node2', previousNodeOutput: 1, previousNodeRun: undefined },
|
||||
{
|
||||
previousNode: 'trigger',
|
||||
previousNodeOutput: 0,
|
||||
previousNodeRun: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('the trigger and node1 have run data', () => {
|
||||
// ARRANGE
|
||||
const runData: IRunData = {
|
||||
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
|
||||
[node1.name]: [toITaskData([{ data: { node: 'node1' } }])],
|
||||
};
|
||||
const pinData: IPinData = {};
|
||||
const startNodes = new Set([node2, merge]);
|
||||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(2);
|
||||
expect(nodeExecutionStack[0]).toEqual({
|
||||
node: node2,
|
||||
data: { main: [[{ json: { node: 'trigger' } }]] },
|
||||
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
|
||||
});
|
||||
expect(nodeExecutionStack[1]).toEqual({
|
||||
node: merge,
|
||||
data: { main: [[{ json: { node: 'trigger' } }]] },
|
||||
source: {
|
||||
main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }],
|
||||
},
|
||||
});
|
||||
|
||||
expect(waitingExecution).toEqual({
|
||||
[merge.name]: {
|
||||
'0': {
|
||||
main: [[{ json: { node: 'node1' } }]],
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
[merge.name]: {
|
||||
'0': {
|
||||
main: [
|
||||
{
|
||||
previousNode: 'node1',
|
||||
previousNodeOutput: 0,
|
||||
previousNodeRun: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('the trigger and node2 have run data', () => {
|
||||
// ARRANGE
|
||||
const runData: IRunData = {
|
||||
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
|
||||
[node2.name]: [toITaskData([{ data: { node: 'node2' } }])],
|
||||
};
|
||||
const pinData: IPinData = {};
|
||||
const startNodes = new Set([node1, merge]);
|
||||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(2);
|
||||
expect(nodeExecutionStack[0]).toEqual({
|
||||
node: node1,
|
||||
data: { main: [[{ json: { node: 'trigger' } }]] },
|
||||
source: { main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }] },
|
||||
});
|
||||
expect(nodeExecutionStack[1]).toEqual({
|
||||
node: merge,
|
||||
data: { main: [[{ json: { node: 'trigger' } }], [{ json: { node: 'node2' } }]] },
|
||||
source: {
|
||||
main: [
|
||||
{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 },
|
||||
{ previousNode: 'node2', previousNodeOutput: 0, previousNodeRun: 0 },
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
expect(waitingExecution).toEqual({});
|
||||
expect(waitingExecutionSource).toEqual({});
|
||||
});
|
||||
|
||||
test('the trigger, node1 and node2 have run data', () => {
|
||||
// ARRANGE
|
||||
const runData: IRunData = {
|
||||
[trigger.name]: [toITaskData([{ data: { node: 'trigger' } }])],
|
||||
[node1.name]: [toITaskData([{ data: { node: 'node1' } }])],
|
||||
[node2.name]: [toITaskData([{ data: { node: 'node2' } }])],
|
||||
};
|
||||
const pinData: IPinData = {};
|
||||
const startNodes = new Set([merge]);
|
||||
|
||||
// ACT
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(graph, startNodes, runData, pinData);
|
||||
|
||||
// ASSERT
|
||||
expect(nodeExecutionStack).toHaveLength(2);
|
||||
expect(nodeExecutionStack[0]).toEqual({
|
||||
node: merge,
|
||||
data: { main: [[{ json: { node: 'node1' } }], [{ json: { node: 'node2' } }]] },
|
||||
source: {
|
||||
main: [
|
||||
{ previousNode: 'node1', previousNodeOutput: 0, previousNodeRun: 0 },
|
||||
{ previousNode: 'node2', previousNodeOutput: 0, previousNodeRun: 0 },
|
||||
],
|
||||
},
|
||||
});
|
||||
expect(nodeExecutionStack[1]).toEqual({
|
||||
node: merge,
|
||||
data: { main: [[{ json: { node: 'trigger' } }]] },
|
||||
source: {
|
||||
main: [{ previousNode: 'trigger', previousNodeOutput: 0, previousNodeRun: 0 }],
|
||||
},
|
||||
});
|
||||
|
||||
expect(waitingExecution).toEqual({});
|
||||
expect(waitingExecutionSource).toEqual({});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('addWaitingExecution', () => {
|
||||
test('allow adding data partially', () => {
|
||||
const waitingExecution: IWaitingForExecution = {};
|
||||
const nodeName1 = 'node 1';
|
||||
const nodeName2 = 'node 2';
|
||||
const executionData: INodeExecutionData[] = [{ json: { item: 1 } }, { json: { item: 2 } }];
|
||||
|
||||
// adding the data for the second input index first
|
||||
{
|
||||
addWaitingExecution(
|
||||
waitingExecution,
|
||||
nodeName1,
|
||||
1, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
1, // inputIndex
|
||||
executionData,
|
||||
);
|
||||
expect(waitingExecution).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [undefined, executionData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding the data for the first input
|
||||
{
|
||||
addWaitingExecution(
|
||||
waitingExecution,
|
||||
nodeName1,
|
||||
1, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
0, // inputIndex
|
||||
executionData,
|
||||
);
|
||||
expect(waitingExecution).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [executionData, executionData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding data for another node connection type
|
||||
{
|
||||
addWaitingExecution(
|
||||
waitingExecution,
|
||||
nodeName1,
|
||||
1, // runIndex
|
||||
NodeConnectionType.AiMemory,
|
||||
0, // inputIndex
|
||||
executionData,
|
||||
);
|
||||
expect(waitingExecution).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [executionData, executionData],
|
||||
[NodeConnectionType.AiMemory]: [executionData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding data for another run
|
||||
{
|
||||
addWaitingExecution(
|
||||
waitingExecution,
|
||||
nodeName1,
|
||||
0, // runIndex
|
||||
NodeConnectionType.AiChain,
|
||||
0, // inputIndex
|
||||
executionData,
|
||||
);
|
||||
expect(waitingExecution).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.AiChain]: [executionData],
|
||||
},
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [executionData, executionData],
|
||||
[NodeConnectionType.AiMemory]: [executionData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding data for another node
|
||||
{
|
||||
addWaitingExecution(
|
||||
waitingExecution,
|
||||
nodeName2,
|
||||
0, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
2, // inputIndex
|
||||
executionData,
|
||||
);
|
||||
expect(waitingExecution).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.AiChain]: [executionData],
|
||||
},
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [executionData, executionData],
|
||||
[NodeConnectionType.AiMemory]: [executionData],
|
||||
},
|
||||
},
|
||||
[nodeName2]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.Main]: [undefined, undefined, executionData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// allow adding null
|
||||
{
|
||||
addWaitingExecution(
|
||||
waitingExecution,
|
||||
nodeName2,
|
||||
0, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
0, // inputIndex
|
||||
null,
|
||||
);
|
||||
expect(waitingExecution).toEqual({
|
||||
[nodeName2]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.Main]: [null, undefined, executionData],
|
||||
},
|
||||
},
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.AiChain]: [executionData],
|
||||
},
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [executionData, executionData],
|
||||
[NodeConnectionType.AiMemory]: [executionData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('addWaitingExecutionSource', () => {
|
||||
test('allow adding data partially', () => {
|
||||
const waitingExecutionSource: IWaitingForExecutionSource = {};
|
||||
const nodeName1 = 'node 1';
|
||||
const nodeName2 = 'node 2';
|
||||
const sourceData: ISourceData = {
|
||||
previousNode: 'node 0',
|
||||
previousNodeRun: 0,
|
||||
previousNodeOutput: 0,
|
||||
};
|
||||
|
||||
// adding the data for the second input index first
|
||||
{
|
||||
addWaitingExecutionSource(
|
||||
waitingExecutionSource,
|
||||
nodeName1,
|
||||
1, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
1, // inputIndex
|
||||
sourceData,
|
||||
);
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [undefined, sourceData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding the data for the first input
|
||||
{
|
||||
addWaitingExecutionSource(
|
||||
waitingExecutionSource,
|
||||
nodeName1,
|
||||
1, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
0, // inputIndex
|
||||
sourceData,
|
||||
);
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [sourceData, sourceData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding data for another node connection type
|
||||
{
|
||||
addWaitingExecutionSource(
|
||||
waitingExecutionSource,
|
||||
nodeName1,
|
||||
1, // runIndex
|
||||
NodeConnectionType.AiMemory,
|
||||
0, // inputIndex
|
||||
sourceData,
|
||||
);
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [sourceData, sourceData],
|
||||
[NodeConnectionType.AiMemory]: [sourceData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding data for another run
|
||||
{
|
||||
addWaitingExecutionSource(
|
||||
waitingExecutionSource,
|
||||
nodeName1,
|
||||
0, // runIndex
|
||||
NodeConnectionType.AiChain,
|
||||
0, // inputIndex
|
||||
sourceData,
|
||||
);
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.AiChain]: [sourceData],
|
||||
},
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [sourceData, sourceData],
|
||||
[NodeConnectionType.AiMemory]: [sourceData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// adding data for another node
|
||||
{
|
||||
addWaitingExecutionSource(
|
||||
waitingExecutionSource,
|
||||
nodeName2,
|
||||
0, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
2, // inputIndex
|
||||
sourceData,
|
||||
);
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.AiChain]: [sourceData],
|
||||
},
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [sourceData, sourceData],
|
||||
[NodeConnectionType.AiMemory]: [sourceData],
|
||||
},
|
||||
},
|
||||
[nodeName2]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.Main]: [undefined, undefined, sourceData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// allow adding null
|
||||
{
|
||||
addWaitingExecutionSource(
|
||||
waitingExecutionSource,
|
||||
nodeName2,
|
||||
0, // runIndex
|
||||
NodeConnectionType.Main,
|
||||
0, // inputIndex
|
||||
null,
|
||||
);
|
||||
expect(waitingExecutionSource).toEqual({
|
||||
[nodeName1]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.AiChain]: [sourceData],
|
||||
},
|
||||
1: {
|
||||
[NodeConnectionType.Main]: [sourceData, sourceData],
|
||||
[NodeConnectionType.AiMemory]: [sourceData],
|
||||
},
|
||||
},
|
||||
[nodeName2]: {
|
||||
// runIndex
|
||||
0: {
|
||||
[NodeConnectionType.Main]: [null, undefined, sourceData],
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -20,3 +20,26 @@ export function getIncomingData(
|
|||
|
||||
return runData[nodeName][runIndex].data[connectionType][outputIndex];
|
||||
}
|
||||
|
||||
function getRunIndexLength(runData: IRunData, nodeName: string) {
|
||||
return runData[nodeName]?.length ?? 0;
|
||||
}
|
||||
|
||||
export function getIncomingDataFromAnyRun(
|
||||
runData: IRunData,
|
||||
nodeName: string,
|
||||
connectionType: NodeConnectionType,
|
||||
outputIndex: number,
|
||||
): { data: INodeExecutionData[]; runIndex: number } | undefined {
|
||||
const maxRunIndexes = getRunIndexLength(runData, nodeName);
|
||||
|
||||
for (let runIndex = 0; runIndex < maxRunIndexes; runIndex++) {
|
||||
const data = getIncomingData(runData, nodeName, runIndex, connectionType, outputIndex);
|
||||
|
||||
if (data && data.length > 0) {
|
||||
return { data, runIndex };
|
||||
}
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
|
|
@ -13,6 +13,25 @@ function sortByInputIndexThenByName(
|
|||
}
|
||||
}
|
||||
|
||||
type SourceConnectionGroup = {
|
||||
/**
|
||||
* This is true if all connections have data. If any connection does not have
|
||||
* data it false.
|
||||
*
|
||||
* This is interesting to decide if a node should be put on the execution
|
||||
* stack of the waiting stack in the execution engine.
|
||||
*/
|
||||
complete: boolean;
|
||||
connections: GraphConnection[];
|
||||
};
|
||||
|
||||
function newGroup(): SourceConnectionGroup {
|
||||
return {
|
||||
complete: true,
|
||||
connections: [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups incoming connections to the node. The groups contain one connection
|
||||
* per input, if possible, with run data or pinned data.
|
||||
|
@ -58,55 +77,87 @@ function sortByInputIndexThenByName(
|
|||
*
|
||||
* Since `source1` has no run data and no pinned data it's skipped in favor of
|
||||
* `source2` for the for input.
|
||||
* It will become it's own group that is marked as `complete: false`
|
||||
*
|
||||
* So this will return 1 group:
|
||||
* 1. source2 and source3
|
||||
* So this will return 2 group:
|
||||
* 1. source2 and source3, `complete: true`
|
||||
* 2. source1, `complete: false`
|
||||
*/
|
||||
export function getSourceDataGroups(
|
||||
graph: DirectedGraph,
|
||||
node: INode,
|
||||
runData: IRunData,
|
||||
pinnedData: IPinData,
|
||||
): GraphConnection[][] {
|
||||
): SourceConnectionGroup[] {
|
||||
const connections = graph.getConnections({ to: node });
|
||||
|
||||
const sortedConnectionsWithData = [];
|
||||
const sortedConnectionsWithoutData = [];
|
||||
|
||||
for (const connection of connections) {
|
||||
const hasData = runData[connection.from.name] || pinnedData[connection.from.name];
|
||||
|
||||
if (hasData) {
|
||||
sortedConnectionsWithData.push(connection);
|
||||
} else {
|
||||
sortedConnectionsWithoutData.push(connection);
|
||||
}
|
||||
}
|
||||
|
||||
if (sortedConnectionsWithData.length === 0 && sortedConnectionsWithoutData.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
sortedConnectionsWithData.sort(sortByInputIndexThenByName);
|
||||
sortedConnectionsWithoutData.sort(sortByInputIndexThenByName);
|
||||
|
||||
const groups: GraphConnection[][] = [];
|
||||
let currentGroup: GraphConnection[] = [];
|
||||
let currentInputIndex = -1;
|
||||
const groups: SourceConnectionGroup[] = [];
|
||||
let currentGroup = newGroup();
|
||||
let currentInputIndex =
|
||||
Math.min(
|
||||
...sortedConnectionsWithData.map((c) => c.inputIndex),
|
||||
...sortedConnectionsWithoutData.map((c) => c.inputIndex),
|
||||
) - 1;
|
||||
|
||||
while (sortedConnectionsWithData.length > 0 || sortedConnectionsWithoutData.length > 0) {
|
||||
currentInputIndex++;
|
||||
|
||||
while (sortedConnectionsWithData.length > 0) {
|
||||
const connectionWithDataIndex = sortedConnectionsWithData.findIndex(
|
||||
// eslint-disable-next-line @typescript-eslint/no-loop-func
|
||||
(c) => c.inputIndex > currentInputIndex,
|
||||
(c) => c.inputIndex === currentInputIndex,
|
||||
);
|
||||
const connection: GraphConnection | undefined =
|
||||
sortedConnectionsWithData[connectionWithDataIndex];
|
||||
|
||||
if (connection === undefined) {
|
||||
groups.push(currentGroup);
|
||||
currentGroup = [];
|
||||
currentInputIndex = -1;
|
||||
if (connectionWithDataIndex >= 0) {
|
||||
const connection = sortedConnectionsWithData[connectionWithDataIndex];
|
||||
|
||||
currentGroup.connections.push(connection);
|
||||
|
||||
sortedConnectionsWithData.splice(connectionWithDataIndex, 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
currentInputIndex = connection.inputIndex;
|
||||
currentGroup.push(connection);
|
||||
const connectionWithoutDataIndex = sortedConnectionsWithoutData.findIndex(
|
||||
// eslint-disable-next-line @typescript-eslint/no-loop-func
|
||||
(c) => c.inputIndex === currentInputIndex,
|
||||
);
|
||||
|
||||
if (connectionWithDataIndex >= 0) {
|
||||
sortedConnectionsWithData.splice(connectionWithDataIndex, 1);
|
||||
if (connectionWithoutDataIndex >= 0) {
|
||||
const connection = sortedConnectionsWithoutData[connectionWithoutDataIndex];
|
||||
|
||||
currentGroup.connections.push(connection);
|
||||
currentGroup.complete = false;
|
||||
|
||||
sortedConnectionsWithoutData.splice(connectionWithoutDataIndex, 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
groups.push(currentGroup);
|
||||
currentGroup = newGroup();
|
||||
currentInputIndex =
|
||||
Math.min(
|
||||
...sortedConnectionsWithData.map((c) => c.inputIndex),
|
||||
...sortedConnectionsWithoutData.map((c) => c.inputIndex),
|
||||
) - 1;
|
||||
}
|
||||
|
||||
groups.push(currentGroup);
|
||||
|
|
|
@ -13,9 +13,47 @@ import {
|
|||
} from 'n8n-workflow';
|
||||
|
||||
import type { DirectedGraph } from './DirectedGraph';
|
||||
import { getIncomingData } from './getIncomingData';
|
||||
import { getIncomingDataFromAnyRun } from './getIncomingData';
|
||||
import { getSourceDataGroups } from './getSourceDataGroups';
|
||||
|
||||
export function addWaitingExecution(
|
||||
waitingExecution: IWaitingForExecution,
|
||||
nodeName: string,
|
||||
runIndex: number,
|
||||
inputType: NodeConnectionType,
|
||||
inputIndex: number,
|
||||
executionData: INodeExecutionData[] | null,
|
||||
) {
|
||||
const waitingExecutionObject = waitingExecution[nodeName] ?? {};
|
||||
const taskDataConnections = waitingExecutionObject[runIndex] ?? {};
|
||||
const executionDataList = taskDataConnections[inputType] ?? [];
|
||||
|
||||
executionDataList[inputIndex] = executionData;
|
||||
|
||||
taskDataConnections[inputType] = executionDataList;
|
||||
waitingExecutionObject[runIndex] = taskDataConnections;
|
||||
waitingExecution[nodeName] = waitingExecutionObject;
|
||||
}
|
||||
|
||||
export function addWaitingExecutionSource(
|
||||
waitingExecutionSource: IWaitingForExecutionSource,
|
||||
nodeName: string,
|
||||
runIndex: number,
|
||||
inputType: NodeConnectionType,
|
||||
inputIndex: number,
|
||||
sourceData: ISourceData | null,
|
||||
) {
|
||||
const waitingExecutionSourceObject = waitingExecutionSource[nodeName] ?? {};
|
||||
const taskDataConnectionsSource = waitingExecutionSourceObject[runIndex] ?? {};
|
||||
const sourceDataList = taskDataConnectionsSource[inputType] ?? [];
|
||||
|
||||
sourceDataList[inputIndex] = sourceData;
|
||||
|
||||
taskDataConnectionsSource[inputType] = sourceDataList;
|
||||
waitingExecutionSourceObject[runIndex] = taskDataConnectionsSource;
|
||||
waitingExecutionSource[nodeName] = waitingExecutionSourceObject;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recreates the node execution stack, waiting executions and waiting
|
||||
* execution sources from a directed graph, start nodes, the destination node,
|
||||
|
@ -33,7 +71,6 @@ import { getSourceDataGroups } from './getSourceDataGroups';
|
|||
export function recreateNodeExecutionStack(
|
||||
graph: DirectedGraph,
|
||||
startNodes: Set<INode>,
|
||||
destinationNode: INode,
|
||||
runData: IRunData,
|
||||
pinData: IPinData,
|
||||
): {
|
||||
|
@ -59,9 +96,6 @@ export function recreateNodeExecutionStack(
|
|||
const waitingExecution: IWaitingForExecution = {};
|
||||
const waitingExecutionSource: IWaitingForExecutionSource = {};
|
||||
|
||||
// TODO: Don't hard code this!
|
||||
const runIndex = 0;
|
||||
|
||||
for (const startNode of startNodes) {
|
||||
const incomingStartNodeConnections = graph
|
||||
.getDirectParentConnections(startNode)
|
||||
|
@ -84,38 +118,42 @@ export function recreateNodeExecutionStack(
|
|||
const sourceDataSets = getSourceDataGroups(graph, startNode, runData, pinData);
|
||||
|
||||
for (const sourceData of sourceDataSets) {
|
||||
if (sourceData.complete) {
|
||||
// All incoming connections have data, so let's put the node on the
|
||||
// stack!
|
||||
incomingData = [];
|
||||
|
||||
incomingSourceData = { main: [] };
|
||||
|
||||
for (const incomingConnection of sourceData) {
|
||||
const node = incomingConnection.from;
|
||||
for (const incomingConnection of sourceData.connections) {
|
||||
let runIndex = 0;
|
||||
const sourceNode = incomingConnection.from;
|
||||
|
||||
if (pinData[node.name]) {
|
||||
incomingData.push(pinData[node.name]);
|
||||
if (pinData[sourceNode.name]) {
|
||||
incomingData.push(pinData[sourceNode.name]);
|
||||
} else {
|
||||
a.ok(
|
||||
runData[node.name],
|
||||
`Start node(${incomingConnection.to.name}) has an incoming connection with no run or pinned data. This is not supported. The connection in question is "${node.name}->${startNode.name}". Are you sure the start nodes come from the "findStartNodes" function?`,
|
||||
runData[sourceNode.name],
|
||||
`Start node(${incomingConnection.to.name}) has an incoming connection with no run or pinned data. This is not supported. The connection in question is "${sourceNode.name}->${startNode.name}". Are you sure the start nodes come from the "findStartNodes" function?`,
|
||||
);
|
||||
|
||||
const nodeIncomingData = getIncomingData(
|
||||
const nodeIncomingData = getIncomingDataFromAnyRun(
|
||||
runData,
|
||||
node.name,
|
||||
runIndex,
|
||||
sourceNode.name,
|
||||
incomingConnection.type,
|
||||
incomingConnection.outputIndex,
|
||||
);
|
||||
|
||||
if (nodeIncomingData) {
|
||||
incomingData.push(nodeIncomingData);
|
||||
runIndex = nodeIncomingData.runIndex;
|
||||
incomingData.push(nodeIncomingData.data);
|
||||
}
|
||||
}
|
||||
|
||||
incomingSourceData.main.push({
|
||||
previousNode: incomingConnection.from.name,
|
||||
previousNodeOutput: incomingConnection.outputIndex,
|
||||
previousNodeRun: 0,
|
||||
previousNodeRun: runIndex,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -126,46 +164,47 @@ export function recreateNodeExecutionStack(
|
|||
};
|
||||
|
||||
nodeExecutionStack.push(executeData);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Do we need this?
|
||||
if (destinationNode) {
|
||||
const destinationNodeName = destinationNode.name;
|
||||
// Check if the destinationNode has to be added as waiting
|
||||
// because some input data is already fully available
|
||||
const incomingDestinationNodeConnections = graph
|
||||
.getDirectParentConnections(destinationNode)
|
||||
.filter((c) => c.type === NodeConnectionType.Main);
|
||||
if (incomingDestinationNodeConnections !== undefined) {
|
||||
for (const connection of incomingDestinationNodeConnections) {
|
||||
if (waitingExecution[destinationNodeName] === undefined) {
|
||||
waitingExecution[destinationNodeName] = {};
|
||||
waitingExecutionSource[destinationNodeName] = {};
|
||||
}
|
||||
if (waitingExecution[destinationNodeName][runIndex] === undefined) {
|
||||
waitingExecution[destinationNodeName][runIndex] = {};
|
||||
waitingExecutionSource[destinationNodeName][runIndex] = {};
|
||||
}
|
||||
if (waitingExecution[destinationNodeName][runIndex][connection.type] === undefined) {
|
||||
waitingExecution[destinationNodeName][runIndex][connection.type] = [];
|
||||
waitingExecutionSource[destinationNodeName][runIndex][connection.type] = [];
|
||||
}
|
||||
|
||||
if (runData[connection.from.name] !== undefined) {
|
||||
// Input data exists so add as waiting
|
||||
// incomingDataDestination.push(runData[connection.node!][runIndex].data![connection.type][connection.index]);
|
||||
waitingExecution[destinationNodeName][runIndex][connection.type].push(
|
||||
runData[connection.from.name][runIndex].data![connection.type][connection.inputIndex],
|
||||
);
|
||||
waitingExecutionSource[destinationNodeName][runIndex][connection.type].push({
|
||||
previousNode: connection.from.name,
|
||||
previousNodeOutput: connection.inputIndex || undefined,
|
||||
previousNodeRun: runIndex || undefined,
|
||||
} as ISourceData);
|
||||
} else {
|
||||
waitingExecution[destinationNodeName][runIndex][connection.type].push(null);
|
||||
waitingExecutionSource[destinationNodeName][runIndex][connection.type].push(null);
|
||||
const nodeName = startNode.name;
|
||||
const nextRunIndex = waitingExecution[nodeName]
|
||||
? Object.keys(waitingExecution[nodeName]).length
|
||||
: 0;
|
||||
|
||||
for (const incomingConnection of sourceData.connections) {
|
||||
const sourceNode = incomingConnection.from;
|
||||
const maybeNodeIncomingData = getIncomingDataFromAnyRun(
|
||||
runData,
|
||||
sourceNode.name,
|
||||
incomingConnection.type,
|
||||
incomingConnection.outputIndex,
|
||||
);
|
||||
const nodeIncomingData = maybeNodeIncomingData?.data ?? null;
|
||||
|
||||
if (nodeIncomingData) {
|
||||
addWaitingExecution(
|
||||
waitingExecution,
|
||||
nodeName,
|
||||
nextRunIndex,
|
||||
incomingConnection.type,
|
||||
incomingConnection.inputIndex,
|
||||
nodeIncomingData,
|
||||
);
|
||||
|
||||
addWaitingExecutionSource(
|
||||
waitingExecutionSource,
|
||||
nodeName,
|
||||
nextRunIndex,
|
||||
incomingConnection.type,
|
||||
incomingConnection.inputIndex,
|
||||
nodeIncomingData
|
||||
? {
|
||||
previousNode: incomingConnection.from.name,
|
||||
previousNodeRun: nextRunIndex,
|
||||
previousNodeOutput: incomingConnection.outputIndex,
|
||||
}
|
||||
: null,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -363,7 +363,7 @@ export class WorkflowExecute {
|
|||
|
||||
// 7. Recreate Execution Stack
|
||||
const { nodeExecutionStack, waitingExecution, waitingExecutionSource } =
|
||||
recreateNodeExecutionStack(subgraph, startNodes, destination, runData, pinData ?? {});
|
||||
recreateNodeExecutionStack(subgraph, new Set(startNodes), runData, pinData ?? {});
|
||||
|
||||
// 8. Execute
|
||||
this.status = 'running';
|
||||
|
|
|
@ -8,6 +8,7 @@ import type {
|
|||
INode,
|
||||
INodeExecutionData,
|
||||
IRunExecutionData,
|
||||
ITaskDataConnections,
|
||||
IWebhookData,
|
||||
IWebhookFunctions,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
|
@ -178,6 +179,7 @@ export class WebhookContext extends BaseContext implements IWebhookFunctions {
|
|||
runExecutionData,
|
||||
runIndex,
|
||||
connectionInputData,
|
||||
{} as ITaskDataConnections,
|
||||
this.additionalData,
|
||||
executeData,
|
||||
this.mode,
|
||||
|
|
|
@ -5,7 +5,7 @@ import { truncate } from 'n8n-design-system';
|
|||
const renderComponent = createComponentRenderer(ProjectCardBadge);
|
||||
|
||||
describe('ProjectCardBadge', () => {
|
||||
it('should show "Owned by me" badge if there is no homeProject', () => {
|
||||
it('should show "Personal" badge if there is no homeProject', () => {
|
||||
const { getByText } = renderComponent({
|
||||
props: {
|
||||
resource: {},
|
||||
|
@ -13,15 +13,16 @@ describe('ProjectCardBadge', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(getByText('Owned by me')).toBeVisible();
|
||||
expect(getByText('Personal')).toBeVisible();
|
||||
});
|
||||
|
||||
it('should show "Owned by me" badge if homeProject ID equals personalProject ID', () => {
|
||||
it('should show "Personal" badge if homeProject ID equals personalProject ID', () => {
|
||||
const { getByText } = renderComponent({
|
||||
props: {
|
||||
resource: {
|
||||
homeProject: {
|
||||
id: '1',
|
||||
name: 'John',
|
||||
},
|
||||
},
|
||||
resourceType: 'workflow',
|
||||
|
@ -31,7 +32,27 @@ describe('ProjectCardBadge', () => {
|
|||
},
|
||||
});
|
||||
|
||||
expect(getByText('Owned by me')).toBeVisible();
|
||||
expect(getByText('Personal')).toBeVisible();
|
||||
});
|
||||
|
||||
it('should show shared with count', () => {
|
||||
const { getByText } = renderComponent({
|
||||
props: {
|
||||
resource: {
|
||||
sharedWithProjects: [{}, {}, {}],
|
||||
homeProject: {
|
||||
id: '1',
|
||||
name: 'John',
|
||||
},
|
||||
},
|
||||
resourceType: 'workflow',
|
||||
personalProject: {
|
||||
id: '1',
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(getByText('+ 3')).toBeVisible();
|
||||
});
|
||||
|
||||
test.each([
|
||||
|
|
|
@ -52,12 +52,17 @@ const projectState = computed(() => {
|
|||
}
|
||||
return ProjectState.Unknown;
|
||||
});
|
||||
|
||||
const numberOfMembersInHomeTeamProject = computed(
|
||||
() => props.resource.sharedWithProjects?.length ?? 0,
|
||||
);
|
||||
|
||||
const badgeText = computed(() => {
|
||||
if (
|
||||
projectState.value === ProjectState.Owned ||
|
||||
projectState.value === ProjectState.SharedOwned
|
||||
) {
|
||||
return i18n.baseText('generic.ownedByMe');
|
||||
return i18n.baseText('projects.menu.personal');
|
||||
} else {
|
||||
const { name, email } = splitName(props.resource.homeProject?.name ?? '');
|
||||
return name ?? email ?? '';
|
||||
|
@ -65,12 +70,12 @@ const badgeText = computed(() => {
|
|||
});
|
||||
const badgeIcon = computed(() => {
|
||||
switch (projectState.value) {
|
||||
case ProjectState.SharedPersonal:
|
||||
case ProjectState.Owned:
|
||||
case ProjectState.SharedOwned:
|
||||
return 'user-friends';
|
||||
return 'user';
|
||||
case ProjectState.Team:
|
||||
case ProjectState.SharedTeam:
|
||||
return 'archive';
|
||||
return 'layer-group';
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
|
@ -81,6 +86,7 @@ const badgeTooltip = computed(() => {
|
|||
return i18n.baseText('projects.badge.tooltip.sharedOwned', {
|
||||
interpolate: {
|
||||
resourceTypeLabel: props.resourceTypeLabel,
|
||||
count: numberOfMembersInHomeTeamProject.value,
|
||||
},
|
||||
});
|
||||
case ProjectState.SharedPersonal:
|
||||
|
@ -88,6 +94,7 @@ const badgeTooltip = computed(() => {
|
|||
interpolate: {
|
||||
resourceTypeLabel: props.resourceTypeLabel,
|
||||
name: badgeText.value,
|
||||
count: numberOfMembersInHomeTeamProject.value,
|
||||
},
|
||||
});
|
||||
case ProjectState.Personal:
|
||||
|
@ -109,6 +116,7 @@ const badgeTooltip = computed(() => {
|
|||
interpolate: {
|
||||
resourceTypeLabel: props.resourceTypeLabel,
|
||||
name: badgeText.value,
|
||||
count: numberOfMembersInHomeTeamProject.value,
|
||||
},
|
||||
});
|
||||
default:
|
||||
|
@ -118,14 +126,53 @@ const badgeTooltip = computed(() => {
|
|||
</script>
|
||||
<template>
|
||||
<N8nTooltip :disabled="!badgeTooltip" placement="top">
|
||||
<N8nBadge v-if="badgeText" class="mr-xs" theme="tertiary" bold data-test-id="card-badge">
|
||||
<div class="mr-xs">
|
||||
<N8nBadge
|
||||
v-if="badgeText"
|
||||
:class="$style.badge"
|
||||
theme="tertiary"
|
||||
bold
|
||||
data-test-id="card-badge"
|
||||
>
|
||||
<N8nIcon v-if="badgeIcon" :icon="badgeIcon" size="small" class="mr-3xs" />
|
||||
<span v-n8n-truncate:20>{{ badgeText }}</span>
|
||||
<N8nIcon v-if="badgeIcon" :icon="badgeIcon" size="small" class="ml-5xs" />
|
||||
</N8nBadge>
|
||||
<N8nBadge
|
||||
v-if="numberOfMembersInHomeTeamProject"
|
||||
:class="[$style.badge, $style.countBadge]"
|
||||
theme="tertiary"
|
||||
bold
|
||||
>
|
||||
+ {{ numberOfMembersInHomeTeamProject }}
|
||||
</N8nBadge>
|
||||
</div>
|
||||
<template #content>
|
||||
{{ badgeTooltip }}
|
||||
</template>
|
||||
</N8nTooltip>
|
||||
</template>
|
||||
|
||||
<style lang="scss" module></style>
|
||||
<style lang="scss" module>
|
||||
.badge {
|
||||
padding: var(--spacing-4xs) var(--spacing-2xs);
|
||||
background-color: var(--color-background-xlight);
|
||||
border-color: var(--color-foreground-base);
|
||||
|
||||
z-index: 1;
|
||||
position: relative;
|
||||
height: 23px;
|
||||
:global(.n8n-text) {
|
||||
color: var(--color-text-base);
|
||||
}
|
||||
}
|
||||
|
||||
.countBadge {
|
||||
margin-left: -5px;
|
||||
z-index: 0;
|
||||
position: relative;
|
||||
height: 23px;
|
||||
:global(.n8n-text) {
|
||||
color: var(--color-text-light);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
import { createTestingPinia } from '@pinia/testing';
|
||||
import { setActivePinia } from 'pinia';
|
||||
import { mockedStore } from '@/__tests__/utils';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { useExecutionDebugging } from './useExecutionDebugging';
|
||||
import type { INodeUi, IExecutionResponse } from '@/Interface';
|
||||
import type { Workflow } from 'n8n-workflow';
|
||||
|
||||
describe('useExecutionDebugging()', () => {
|
||||
it('should applyExecutionData', async () => {
|
||||
setActivePinia(createTestingPinia());
|
||||
const mockExecution = {
|
||||
data: {
|
||||
resultData: {
|
||||
runData: {
|
||||
testNode: [
|
||||
{
|
||||
data: {},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
} as unknown as IExecutionResponse;
|
||||
|
||||
const workflowStore = mockedStore(useWorkflowsStore);
|
||||
workflowStore.getNodes.mockReturnValue([{ name: 'testNode' }] as INodeUi[]);
|
||||
workflowStore.getExecution.mockResolvedValueOnce(mockExecution);
|
||||
workflowStore.getCurrentWorkflow.mockReturnValue({
|
||||
pinData: {},
|
||||
getParentNodes: vi.fn().mockReturnValue([]),
|
||||
} as unknown as Workflow);
|
||||
|
||||
const { applyExecutionData } = useExecutionDebugging();
|
||||
|
||||
await applyExecutionData('1');
|
||||
|
||||
expect(workflowStore.setWorkflowExecutionData).toHaveBeenCalledWith(mockExecution);
|
||||
});
|
||||
});
|
|
@ -105,7 +105,7 @@ export const useExecutionDebugging = () => {
|
|||
let pinnings = 0;
|
||||
|
||||
pinnableNodes.forEach((node: INodeUi) => {
|
||||
const nodeData = runData[node.name]?.[0].data?.main[0];
|
||||
const nodeData = runData[node.name]?.[0].data?.main?.[0];
|
||||
if (nodeData) {
|
||||
pinnings++;
|
||||
workflowsStore.pinData({
|
||||
|
|
|
@ -71,7 +71,7 @@
|
|||
"generic.seePlans": "See plans",
|
||||
"generic.loading": "Loading",
|
||||
"generic.and": "and",
|
||||
"generic.ownedByMe": "Owned by me",
|
||||
"generic.ownedByMe": "(You)",
|
||||
"generic.moreInfo": "More info",
|
||||
"generic.next": "Next",
|
||||
"about.aboutN8n": "About n8n",
|
||||
|
@ -2543,11 +2543,11 @@
|
|||
"projects.move.resource.success.message": "{resourceName} {resourceTypeLabel} was moved to {targetProjectName}. {workflow} {link}",
|
||||
"projects.move.resource.success.message.workflow": "Please double check any credentials this workflow is using are also shared with {targetProjectName}.",
|
||||
"projects.move.resource.success.link": "View {targetProjectName}",
|
||||
"projects.badge.tooltip.sharedOwned": "This {resourceTypeLabel} is owned by you and shared with one or more projects or users",
|
||||
"projects.badge.tooltip.sharedPersonal": "This {resourceTypeLabel} is owned by {name} and shared with one or more projects or users",
|
||||
"projects.badge.tooltip.sharedOwned": "This {resourceTypeLabel} is owned by you and shared with {count} users",
|
||||
"projects.badge.tooltip.sharedPersonal": "This {resourceTypeLabel} is owned by {name} and shared with {count} users",
|
||||
"projects.badge.tooltip.personal": "This {resourceTypeLabel} is owned by {name}",
|
||||
"projects.badge.tooltip.team": "This {resourceTypeLabel} is owned and accessible by the {name} project.",
|
||||
"projects.badge.tooltip.sharedTeam": "This {resourceTypeLabel} is owned and accessible by the {name} project and shared with one or more projects or users",
|
||||
"projects.badge.tooltip.sharedTeam": "This {resourceTypeLabel} is owned by the {name} project and accessible by {count} users",
|
||||
"mfa.setup.invalidAuthenticatorCode": "{code} is not a valid number",
|
||||
"mfa.setup.invalidCode": "Two-factor code failed. Please try again.",
|
||||
"mfa.code.modal.title": "Two-factor authentication",
|
||||
|
|
|
@ -64,6 +64,7 @@ export async function asanaApiRequestAllItems(
|
|||
uri,
|
||||
);
|
||||
uri = get(responseData, 'next_page.uri');
|
||||
query = {}; // query is not needed once we have next_page.uri
|
||||
returnData.push.apply(returnData, responseData.data as IDataObject[]);
|
||||
} while (responseData.next_page !== null);
|
||||
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
import { EventEmitter } from 'events';
|
||||
import type { IExecuteFunctions, INodeExecutionData, IWorkflowDataProxyData } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
ISupplyDataFunctions,
|
||||
IWorkflowDataProxyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { isObject } from './utils';
|
||||
import { ValidationError } from './ValidationError';
|
||||
|
@ -19,7 +24,10 @@ export interface SandboxContext extends IWorkflowDataProxyData {
|
|||
|
||||
export const REQUIRED_N8N_ITEM_KEYS = new Set(['json', 'binary', 'pairedItem', 'error']);
|
||||
|
||||
export function getSandboxContext(this: IExecuteFunctions, index: number): SandboxContext {
|
||||
export function getSandboxContext(
|
||||
this: IExecuteFunctions | ISupplyDataFunctions,
|
||||
index: number,
|
||||
): SandboxContext {
|
||||
const helpers = {
|
||||
...this.helpers,
|
||||
httpRequestWithAuthentication: this.helpers.httpRequestWithAuthentication.bind(this),
|
||||
|
|
|
@ -4,6 +4,7 @@ import type {
|
|||
IExecuteFunctions,
|
||||
INode,
|
||||
INodeExecutionData,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import {
|
||||
ApplicationError,
|
||||
|
@ -51,7 +52,7 @@ const configureFieldHelper = (dotNotation?: boolean) => {
|
|||
};
|
||||
|
||||
export function composeReturnItem(
|
||||
this: IExecuteFunctions,
|
||||
this: IExecuteFunctions | ISupplyDataFunctions,
|
||||
itemIndex: number,
|
||||
inputItem: INodeExecutionData,
|
||||
newFields: IDataObject,
|
||||
|
@ -220,7 +221,11 @@ export const validateEntry = (
|
|||
};
|
||||
};
|
||||
|
||||
export function resolveRawData(this: IExecuteFunctions, rawData: string, i: number) {
|
||||
export function resolveRawData(
|
||||
this: IExecuteFunctions | ISupplyDataFunctions,
|
||||
rawData: string,
|
||||
i: number,
|
||||
) {
|
||||
const resolvables = getResolvables(rawData);
|
||||
let returnData: string = rawData;
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import type {
|
|||
INode,
|
||||
INodeExecutionData,
|
||||
INodeProperties,
|
||||
ISupplyDataFunctions,
|
||||
} from 'n8n-workflow';
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
|
@ -185,7 +186,7 @@ const displayOptions = {
|
|||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(
|
||||
this: IExecuteFunctions,
|
||||
this: IExecuteFunctions | ISupplyDataFunctions,
|
||||
item: INodeExecutionData,
|
||||
i: number,
|
||||
options: SetNodeOptions,
|
||||
|
|
|
@ -35,6 +35,7 @@ export type IAllExecuteFunctions =
|
|||
| IExecuteFunctions
|
||||
| IExecutePaginationFunctions
|
||||
| IExecuteSingleFunctions
|
||||
| ISupplyDataFunctions
|
||||
| IHookFunctions
|
||||
| ILoadOptionsFunctions
|
||||
| IPollFunctions
|
||||
|
@ -1016,6 +1017,27 @@ export interface IExecuteSingleFunctions extends BaseExecutionFunctions {
|
|||
};
|
||||
}
|
||||
|
||||
export type ISupplyDataFunctions = ExecuteFunctions.GetNodeParameterFn &
|
||||
FunctionsBaseWithRequiredKeys<'getMode'> &
|
||||
Pick<
|
||||
IExecuteFunctions,
|
||||
| 'addInputData'
|
||||
| 'addOutputData'
|
||||
| 'getInputConnectionData'
|
||||
| 'getInputData'
|
||||
| 'getNodeOutputs'
|
||||
| 'executeWorkflow'
|
||||
| 'sendMessageToUI'
|
||||
| 'helpers'
|
||||
> & {
|
||||
continueOnFail(): boolean;
|
||||
evaluateExpression(expression: string, itemIndex: number): NodeParameterValueType;
|
||||
getWorkflowDataProxy(itemIndex: number): IWorkflowDataProxyData;
|
||||
getExecutionCancelSignal(): AbortSignal | undefined;
|
||||
onExecutionCancellation(handler: () => unknown): void;
|
||||
logAiEvent(eventName: AiEvent, msg?: string | undefined): void;
|
||||
};
|
||||
|
||||
export interface IExecutePaginationFunctions extends IExecuteSingleFunctions {
|
||||
makeRoutingRequest(
|
||||
this: IAllExecuteFunctions,
|
||||
|
@ -1561,7 +1583,7 @@ export class NodeExecutionOutput extends Array {
|
|||
|
||||
export interface INodeType {
|
||||
description: INodeTypeDescription;
|
||||
supplyData?(this: IAllExecuteFunctions, itemIndex: number): Promise<SupplyData>;
|
||||
supplyData?(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData>;
|
||||
execute?(
|
||||
this: IExecuteFunctions,
|
||||
): Promise<INodeExecutionData[][] | NodeExecutionWithMetadata[][] | null>;
|
||||
|
|
Loading…
Reference in a new issue