fix: AI agents, throw error on duplicate names in dynamic tools (#8766)

Co-authored-by: oleg <me@olegivaniv.com>
This commit is contained in:
Michael Kret 2024-02-29 13:28:38 +02:00 committed by GitHub
parent db4a419c8d
commit 75e4df138f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 68 additions and 28 deletions

View file

@ -161,7 +161,7 @@ export class Agent implements INodeType {
name: 'agent', name: 'agent',
icon: 'fa:robot', icon: 'fa:robot',
group: ['transform'], group: ['transform'],
version: [1, 1.1, 1.2, 1.3, 1.4], version: [1, 1.1, 1.2, 1.3, 1.4, 1.5],
description: 'Generates an action plan and executes it. Can use external tools.', description: 'Generates an action plan and executes it. Can use external tools.',
subtitle: subtitle:
"={{ { conversationalAgent: 'Conversational Agent', openAiFunctionsAgent: 'OpenAI Functions Agent', reActAgent: 'ReAct Agent', sqlAgent: 'SQL Agent', planAndExecuteAgent: 'Plan and Execute Agent' }[$parameter.agent] }}", "={{ { conversationalAgent: 'Conversational Agent', openAiFunctionsAgent: 'OpenAI Functions Agent', reActAgent: 'ReAct Agent', sqlAgent: 'SQL Agent', planAndExecuteAgent: 'Plan and Execute Agent' }[$parameter.agent] }}",
@ -314,17 +314,18 @@ export class Agent implements INodeType {
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const agentType = this.getNodeParameter('agent', 0, '') as string; const agentType = this.getNodeParameter('agent', 0, '') as string;
const nodeVersion = this.getNode().typeVersion;
if (agentType === 'conversationalAgent') { if (agentType === 'conversationalAgent') {
return await conversationalAgentExecute.call(this); return await conversationalAgentExecute.call(this, nodeVersion);
} else if (agentType === 'openAiFunctionsAgent') { } else if (agentType === 'openAiFunctionsAgent') {
return await openAiFunctionsAgentExecute.call(this); return await openAiFunctionsAgentExecute.call(this, nodeVersion);
} else if (agentType === 'reActAgent') { } else if (agentType === 'reActAgent') {
return await reActAgentAgentExecute.call(this); return await reActAgentAgentExecute.call(this, nodeVersion);
} else if (agentType === 'sqlAgent') { } else if (agentType === 'sqlAgent') {
return await sqlAgentAgentExecute.call(this); return await sqlAgentAgentExecute.call(this, nodeVersion);
} else if (agentType === 'planAndExecuteAgent') { } else if (agentType === 'planAndExecuteAgent') {
return await planAndExecuteAgentExecute.call(this); return await planAndExecuteAgentExecute.call(this, nodeVersion);
} }
throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`); throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`);

View file

@ -6,7 +6,6 @@ import {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { initializeAgentExecutorWithOptions } from 'langchain/agents'; import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import type { BaseChatMemory } from 'langchain/memory'; import type { BaseChatMemory } from 'langchain/memory';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from 'langchain/prompts';
@ -15,10 +14,12 @@ import {
isChatInstance, isChatInstance,
getPromptInputByType, getPromptInputByType,
getOptionalOutputParsers, getOptionalOutputParsers,
getConnectedTools,
} from '../../../../../utils/helpers'; } from '../../../../../utils/helpers';
export async function conversationalAgentExecute( export async function conversationalAgentExecute(
this: IExecuteFunctions, this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Conversational Agent'); this.logger.verbose('Executing Conversational Agent');
@ -31,7 +32,8 @@ export async function conversationalAgentExecute(
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory | BaseChatMemory
| undefined; | undefined;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);
// TODO: Make it possible in the future to use values for other items than just 0 // TODO: Make it possible in the future to use values for other items than just 0

View file

@ -7,16 +7,20 @@ import {
import type { AgentExecutorInput } from 'langchain/agents'; import type { AgentExecutorInput } from 'langchain/agents';
import { AgentExecutor, OpenAIAgent } from 'langchain/agents'; import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import { BufferMemory, type BaseChatMemory } from 'langchain/memory'; import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
import { ChatOpenAI } from 'langchain/chat_models/openai'; import { ChatOpenAI } from 'langchain/chat_models/openai';
import { getOptionalOutputParsers, getPromptInputByType } from '../../../../../utils/helpers'; import {
getConnectedTools,
getOptionalOutputParsers,
getPromptInputByType,
} from '../../../../../utils/helpers';
export async function openAiFunctionsAgentExecute( export async function openAiFunctionsAgentExecute(
this: IExecuteFunctions, this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing OpenAi Functions Agent'); this.logger.verbose('Executing OpenAi Functions Agent');
const model = (await this.getInputConnectionData( const model = (await this.getInputConnectionData(
@ -33,7 +37,7 @@ export async function openAiFunctionsAgentExecute(
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory | BaseChatMemory
| undefined; | undefined;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);
const options = this.getNodeParameter('options', 0, {}) as { const options = this.getNodeParameter('options', 0, {}) as {
systemMessage?: string; systemMessage?: string;

View file

@ -5,16 +5,20 @@ import {
NodeOperationError, NodeOperationError,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { Tool } from 'langchain/tools';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base'; import type { BaseChatModel } from 'langchain/chat_models/base';
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute'; import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
import { getOptionalOutputParsers, getPromptInputByType } from '../../../../../utils/helpers'; import {
getConnectedTools,
getOptionalOutputParsers,
getPromptInputByType,
} from '../../../../../utils/helpers';
export async function planAndExecuteAgentExecute( export async function planAndExecuteAgentExecute(
this: IExecuteFunctions, this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing PlanAndExecute Agent'); this.logger.verbose('Executing PlanAndExecute Agent');
const model = (await this.getInputConnectionData( const model = (await this.getInputConnectionData(
@ -22,7 +26,7 @@ export async function planAndExecuteAgentExecute(
0, 0,
)) as BaseChatModel; )) as BaseChatModel;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);

View file

@ -7,12 +7,12 @@ import {
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents'; import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
import type { BaseLanguageModel } from 'langchain/base_language'; import type { BaseLanguageModel } from 'langchain/base_language';
import type { Tool } from 'langchain/tools';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base'; import type { BaseChatModel } from 'langchain/chat_models/base';
import { import {
getConnectedTools,
getOptionalOutputParsers, getOptionalOutputParsers,
getPromptInputByType, getPromptInputByType,
isChatInstance, isChatInstance,
@ -20,6 +20,7 @@ import {
export async function reActAgentAgentExecute( export async function reActAgentAgentExecute(
this: IExecuteFunctions, this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing ReAct Agent'); this.logger.verbose('Executing ReAct Agent');
@ -27,7 +28,7 @@ export async function reActAgentAgentExecute(
| BaseLanguageModel | BaseLanguageModel
| BaseChatModel; | BaseChatModel;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);

View file

@ -26,6 +26,7 @@ const parseTablesString = (tablesString: string) =>
export async function sqlAgentAgentExecute( export async function sqlAgentAgentExecute(
this: IExecuteFunctions, this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing SQL Agent'); this.logger.verbose('Executing SQL Agent');

View file

@ -1,7 +1,6 @@
import { AgentExecutor } from 'langchain/agents'; import { AgentExecutor } from 'langchain/agents';
import { OpenAI as OpenAIClient } from 'openai'; import { OpenAI as OpenAIClient } from 'openai';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import { type Tool } from 'langchain/tools';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { import type {
IExecuteFunctions, IExecuteFunctions,
@ -10,6 +9,7 @@ import type {
INodeTypeDescription, INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { getConnectedTools } from '../../../utils/helpers';
import { formatToOpenAIAssistantTool } from './utils'; import { formatToOpenAIAssistantTool } from './utils';
export class OpenAiAssistant implements INodeType { export class OpenAiAssistant implements INodeType {
@ -19,7 +19,7 @@ export class OpenAiAssistant implements INodeType {
hidden: true, hidden: true,
icon: 'fa:robot', icon: 'fa:robot',
group: ['transform'], group: ['transform'],
version: 1, version: [1, 1.1],
description: 'Utilizes Assistant API from Open AI.', description: 'Utilizes Assistant API from Open AI.',
subtitle: 'Open AI Assistant', subtitle: 'Open AI Assistant',
defaults: { defaults: {
@ -311,7 +311,8 @@ export class OpenAiAssistant implements INodeType {
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]; const nodeVersion = this.getNode().typeVersion;
const tools = await getConnectedTools(this, nodeVersion > 1);
const credentials = await this.getCredentials('openAiApi'); const credentials = await this.getCredentials('openAiApi');
const items = this.getInputData(); const items = this.getInputData();

View file

@ -1,15 +1,17 @@
import { AgentExecutor } from 'langchain/agents'; import { AgentExecutor } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAI as OpenAIClient } from 'openai'; import { OpenAI as OpenAIClient } from 'openai';
import { NodeConnectionType, NodeOperationError, updateDisplayOptions } from 'n8n-workflow'; import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow'; import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions'; import { assistantRLC } from '../descriptions';
import { getConnectedTools } from '../../../../../utils/helpers';
const properties: INodeProperties[] = [ const properties: INodeProperties[] = [
assistantRLC, assistantRLC,
{ {
@ -97,6 +99,7 @@ export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> { export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const credentials = await this.getCredentials('openAiApi'); const credentials = await this.getCredentials('openAiApi');
const nodeVersion = this.getNode().typeVersion;
const prompt = this.getNodeParameter('prompt', i) as string; const prompt = this.getNodeParameter('prompt', i) as string;
@ -131,7 +134,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
const agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true }); const agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
const tools = ((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || []; const tools = await getConnectedTools(this, nodeVersion > 1);
if (tools.length) { if (tools.length) {
const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? []; const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? [];

View file

@ -4,13 +4,12 @@ import type {
INodeExecutionData, INodeExecutionData,
IDataObject, IDataObject,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { NodeConnectionType, updateDisplayOptions } from 'n8n-workflow'; import { updateDisplayOptions } from 'n8n-workflow';
import type { Tool } from 'langchain/tools';
import { apiRequest } from '../../transport'; import { apiRequest } from '../../transport';
import type { ChatCompletion } from '../../helpers/interfaces'; import type { ChatCompletion } from '../../helpers/interfaces';
import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { modelRLC } from '../descriptions'; import { modelRLC } from '../descriptions';
import { getConnectedTools } from '../../../../../utils/helpers';
const properties: INodeProperties[] = [ const properties: INodeProperties[] = [
modelRLC, modelRLC,
@ -166,6 +165,7 @@ const displayOptions = {
export const description = updateDisplayOptions(displayOptions, properties); export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> { export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const nodeVersion = this.getNode().typeVersion;
const model = this.getNodeParameter('modelId', i, '', { extractValue: true }); const model = this.getNodeParameter('modelId', i, '', { extractValue: true });
let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[]; let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[];
const options = this.getNodeParameter('options', i, {}); const options = this.getNodeParameter('options', i, {});
@ -183,8 +183,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
]; ];
} }
const externalTools = const externalTools = await getConnectedTools(this, nodeVersion > 1);
((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
let tools; let tools;
if (externalTools.length) { if (externalTools.length) {

View file

@ -59,7 +59,7 @@ export const versionDescription: INodeTypeDescription = {
name: 'openAi', name: 'openAi',
icon: 'file:openAi.svg', icon: 'file:openAi.svg',
group: ['transform'], group: ['transform'],
version: 1, version: [1, 1.1],
subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`, subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`,
description: 'Message an assistant or GPT, analyze images, generate audio, etc.', description: 'Message an assistant or GPT, analyze images, generate audio, etc.',
defaults: { defaults: {

View file

@ -4,6 +4,7 @@ import { BaseChatModel } from 'langchain/chat_models/base';
import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models'; import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models';
import type { BaseOutputParser } from '@langchain/core/output_parsers'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { BaseMessage } from 'langchain/schema'; import type { BaseMessage } from 'langchain/schema';
import { DynamicTool, type Tool } from 'langchain/tools';
export function getMetadataFiltersValues( export function getMetadataFiltersValues(
ctx: IExecuteFunctions, ctx: IExecuteFunctions,
@ -125,3 +126,26 @@ export function serializeChatHistory(chatHistory: BaseMessage[]): string {
}) })
.join('\n'); .join('\n');
} }
export const getConnectedTools = async (ctx: IExecuteFunctions, enforceUniqueNames: boolean) => {
const connectedTools = ((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>();
for (const tool of connectedTools) {
if (!(tool instanceof DynamicTool)) continue;
const { name } = tool;
if (seenNames.has(name)) {
throw new NodeOperationError(
ctx.getNode(),
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
);
}
seenNames.add(name);
}
return connectedTools;
};