diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts index 1ecd909535..d3927e505c 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts @@ -161,7 +161,7 @@ export class Agent implements INodeType { name: 'agent', icon: 'fa:robot', group: ['transform'], - version: [1, 1.1, 1.2, 1.3, 1.4], + version: [1, 1.1, 1.2, 1.3, 1.4, 1.5], description: 'Generates an action plan and executes it. Can use external tools.', subtitle: "={{ { conversationalAgent: 'Conversational Agent', openAiFunctionsAgent: 'OpenAI Functions Agent', reActAgent: 'ReAct Agent', sqlAgent: 'SQL Agent', planAndExecuteAgent: 'Plan and Execute Agent' }[$parameter.agent] }}", @@ -314,17 +314,18 @@ export class Agent implements INodeType { async execute(this: IExecuteFunctions): Promise { const agentType = this.getNodeParameter('agent', 0, '') as string; + const nodeVersion = this.getNode().typeVersion; if (agentType === 'conversationalAgent') { - return await conversationalAgentExecute.call(this); + return await conversationalAgentExecute.call(this, nodeVersion); } else if (agentType === 'openAiFunctionsAgent') { - return await openAiFunctionsAgentExecute.call(this); + return await openAiFunctionsAgentExecute.call(this, nodeVersion); } else if (agentType === 'reActAgent') { - return await reActAgentAgentExecute.call(this); + return await reActAgentAgentExecute.call(this, nodeVersion); } else if (agentType === 'sqlAgent') { - return await sqlAgentAgentExecute.call(this); + return await sqlAgentAgentExecute.call(this, nodeVersion); } else if (agentType === 'planAndExecuteAgent') { - return await planAndExecuteAgentExecute.call(this); + return await planAndExecuteAgentExecute.call(this, nodeVersion); } throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts index ddf0f4e6ab..62cc6edbd8 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts @@ -6,7 +6,6 @@ import { } from 'n8n-workflow'; import { initializeAgentExecutorWithOptions } from 'langchain/agents'; -import type { Tool } from 'langchain/tools'; import type { BaseChatMemory } from 'langchain/memory'; import type { BaseOutputParser } from 'langchain/schema/output_parser'; import { PromptTemplate } from 'langchain/prompts'; @@ -15,10 +14,12 @@ import { isChatInstance, getPromptInputByType, getOptionalOutputParsers, + getConnectedTools, } from '../../../../../utils/helpers'; export async function conversationalAgentExecute( this: IExecuteFunctions, + nodeVersion: number, ): Promise { this.logger.verbose('Executing Conversational Agent'); @@ -31,7 +32,8 @@ export async function conversationalAgentExecute( const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as | BaseChatMemory | undefined; - const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; + + const tools = await getConnectedTools(this, nodeVersion >= 1.5); const outputParsers = await getOptionalOutputParsers(this); // TODO: Make it possible in the future to use values for other items than just 0 diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts index 87e6752fb5..bcfe28a02a 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts @@ -7,16 +7,20 @@ import { import type { AgentExecutorInput } from 'langchain/agents'; import { AgentExecutor, OpenAIAgent } from 'langchain/agents'; -import type { Tool } from 'langchain/tools'; import type { BaseOutputParser } from 'langchain/schema/output_parser'; import { PromptTemplate } from 'langchain/prompts'; import { CombiningOutputParser } from 'langchain/output_parsers'; import { BufferMemory, type BaseChatMemory } from 'langchain/memory'; import { ChatOpenAI } from 'langchain/chat_models/openai'; -import { getOptionalOutputParsers, getPromptInputByType } from '../../../../../utils/helpers'; +import { + getConnectedTools, + getOptionalOutputParsers, + getPromptInputByType, +} from '../../../../../utils/helpers'; export async function openAiFunctionsAgentExecute( this: IExecuteFunctions, + nodeVersion: number, ): Promise { this.logger.verbose('Executing OpenAi Functions Agent'); const model = (await this.getInputConnectionData( @@ -33,7 +37,7 @@ export async function openAiFunctionsAgentExecute( const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as | BaseChatMemory | undefined; - const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; + const tools = await getConnectedTools(this, nodeVersion >= 1.5); const outputParsers = await getOptionalOutputParsers(this); const options = this.getNodeParameter('options', 0, {}) as { systemMessage?: string; diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts index 8326b877b9..7350415568 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts @@ -5,16 +5,20 @@ import { NodeOperationError, } from 'n8n-workflow'; -import type { Tool } from 'langchain/tools'; import type { BaseOutputParser } from 'langchain/schema/output_parser'; import { PromptTemplate } from 'langchain/prompts'; import { CombiningOutputParser } from 'langchain/output_parsers'; import type { BaseChatModel } from 'langchain/chat_models/base'; import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute'; -import { getOptionalOutputParsers, getPromptInputByType } from '../../../../../utils/helpers'; +import { + getConnectedTools, + getOptionalOutputParsers, + getPromptInputByType, +} from '../../../../../utils/helpers'; export async function planAndExecuteAgentExecute( this: IExecuteFunctions, + nodeVersion: number, ): Promise { this.logger.verbose('Executing PlanAndExecute Agent'); const model = (await this.getInputConnectionData( @@ -22,7 +26,7 @@ export async function planAndExecuteAgentExecute( 0, )) as BaseChatModel; - const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; + const tools = await getConnectedTools(this, nodeVersion >= 1.5); const outputParsers = await getOptionalOutputParsers(this); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts index 49f0b8a6ae..02b3499663 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts @@ -7,12 +7,12 @@ import { import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents'; import type { BaseLanguageModel } from 'langchain/base_language'; -import type { Tool } from 'langchain/tools'; import type { BaseOutputParser } from 'langchain/schema/output_parser'; import { PromptTemplate } from 'langchain/prompts'; import { CombiningOutputParser } from 'langchain/output_parsers'; import type { BaseChatModel } from 'langchain/chat_models/base'; import { + getConnectedTools, getOptionalOutputParsers, getPromptInputByType, isChatInstance, @@ -20,6 +20,7 @@ import { export async function reActAgentAgentExecute( this: IExecuteFunctions, + nodeVersion: number, ): Promise { this.logger.verbose('Executing ReAct Agent'); @@ -27,7 +28,7 @@ export async function reActAgentAgentExecute( | BaseLanguageModel | BaseChatModel; - const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; + const tools = await getConnectedTools(this, nodeVersion >= 1.5); const outputParsers = await getOptionalOutputParsers(this); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts index 245d0cf89e..eae37cb7f9 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts @@ -26,6 +26,7 @@ const parseTablesString = (tablesString: string) => export async function sqlAgentAgentExecute( this: IExecuteFunctions, + nodeVersion: number, ): Promise { this.logger.verbose('Executing SQL Agent'); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts index 4a74d062ba..4c7243c7d5 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts @@ -1,7 +1,6 @@ import { AgentExecutor } from 'langchain/agents'; import { OpenAI as OpenAIClient } from 'openai'; import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; -import { type Tool } from 'langchain/tools'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { IExecuteFunctions, @@ -10,6 +9,7 @@ import type { INodeTypeDescription, } from 'n8n-workflow'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; +import { getConnectedTools } from '../../../utils/helpers'; import { formatToOpenAIAssistantTool } from './utils'; export class OpenAiAssistant implements INodeType { @@ -19,7 +19,7 @@ export class OpenAiAssistant implements INodeType { hidden: true, icon: 'fa:robot', group: ['transform'], - version: 1, + version: [1, 1.1], description: 'Utilizes Assistant API from Open AI.', subtitle: 'Open AI Assistant', defaults: { @@ -311,7 +311,8 @@ export class OpenAiAssistant implements INodeType { }; async execute(this: IExecuteFunctions): Promise { - const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; + const nodeVersion = this.getNode().typeVersion; + const tools = await getConnectedTools(this, nodeVersion > 1); const credentials = await this.getCredentials('openAiApi'); const items = this.getInputData(); diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts index a11bc7ab2b..3623c18fd4 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts @@ -1,15 +1,17 @@ import { AgentExecutor } from 'langchain/agents'; -import type { Tool } from 'langchain/tools'; + import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; import { OpenAI as OpenAIClient } from 'openai'; -import { NodeConnectionType, NodeOperationError, updateDisplayOptions } from 'n8n-workflow'; +import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow'; import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow'; import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { assistantRLC } from '../descriptions'; +import { getConnectedTools } from '../../../../../utils/helpers'; + const properties: INodeProperties[] = [ assistantRLC, { @@ -97,6 +99,7 @@ export const description = updateDisplayOptions(displayOptions, properties); export async function execute(this: IExecuteFunctions, i: number): Promise { const credentials = await this.getCredentials('openAiApi'); + const nodeVersion = this.getNode().typeVersion; const prompt = this.getNodeParameter('prompt', i) as string; @@ -131,7 +134,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise 1); if (tools.length) { const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? []; diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts index 762851a39a..6356622ad3 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/text/message.operation.ts @@ -4,13 +4,12 @@ import type { INodeExecutionData, IDataObject, } from 'n8n-workflow'; -import { NodeConnectionType, updateDisplayOptions } from 'n8n-workflow'; - -import type { Tool } from 'langchain/tools'; +import { updateDisplayOptions } from 'n8n-workflow'; import { apiRequest } from '../../transport'; import type { ChatCompletion } from '../../helpers/interfaces'; import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { modelRLC } from '../descriptions'; +import { getConnectedTools } from '../../../../../utils/helpers'; const properties: INodeProperties[] = [ modelRLC, @@ -166,6 +165,7 @@ const displayOptions = { export const description = updateDisplayOptions(displayOptions, properties); export async function execute(this: IExecuteFunctions, i: number): Promise { + const nodeVersion = this.getNode().typeVersion; const model = this.getNodeParameter('modelId', i, '', { extractValue: true }); let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[]; const options = this.getNodeParameter('options', i, {}); @@ -183,8 +183,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise 1); let tools; if (externalTools.length) { diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/versionDescription.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/versionDescription.ts index f3e8177e3f..4b3a611e2b 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/versionDescription.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/versionDescription.ts @@ -59,7 +59,7 @@ export const versionDescription: INodeTypeDescription = { name: 'openAi', icon: 'file:openAi.svg', group: ['transform'], - version: 1, + version: [1, 1.1], subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`, description: 'Message an assistant or GPT, analyze images, generate audio, etc.', defaults: { diff --git a/packages/@n8n/nodes-langchain/utils/helpers.ts b/packages/@n8n/nodes-langchain/utils/helpers.ts index 3c4b8a191e..7a91f1c17c 100644 --- a/packages/@n8n/nodes-langchain/utils/helpers.ts +++ b/packages/@n8n/nodes-langchain/utils/helpers.ts @@ -4,6 +4,7 @@ import { BaseChatModel } from 'langchain/chat_models/base'; import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models'; import type { BaseOutputParser } from '@langchain/core/output_parsers'; import type { BaseMessage } from 'langchain/schema'; +import { DynamicTool, type Tool } from 'langchain/tools'; export function getMetadataFiltersValues( ctx: IExecuteFunctions, @@ -125,3 +126,26 @@ export function serializeChatHistory(chatHistory: BaseMessage[]): string { }) .join('\n'); } + +export const getConnectedTools = async (ctx: IExecuteFunctions, enforceUniqueNames: boolean) => { + const connectedTools = ((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || []; + + if (!enforceUniqueNames) return connectedTools; + + const seenNames = new Set(); + + for (const tool of connectedTools) { + if (!(tool instanceof DynamicTool)) continue; + + const { name } = tool; + if (seenNames.has(name)) { + throw new NodeOperationError( + ctx.getNode(), + `You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`, + ); + } + seenNames.add(name); + } + + return connectedTools; +};