fix: AI agents, throw error on duplicate names in dynamic tools (#8766)

Co-authored-by: oleg <me@olegivaniv.com>
This commit is contained in:
Michael Kret 2024-02-29 13:28:38 +02:00 committed by GitHub
parent db4a419c8d
commit 75e4df138f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 68 additions and 28 deletions

View file

@ -161,7 +161,7 @@ export class Agent implements INodeType {
name: 'agent',
icon: 'fa:robot',
group: ['transform'],
version: [1, 1.1, 1.2, 1.3, 1.4],
version: [1, 1.1, 1.2, 1.3, 1.4, 1.5],
description: 'Generates an action plan and executes it. Can use external tools.',
subtitle:
"={{ { conversationalAgent: 'Conversational Agent', openAiFunctionsAgent: 'OpenAI Functions Agent', reActAgent: 'ReAct Agent', sqlAgent: 'SQL Agent', planAndExecuteAgent: 'Plan and Execute Agent' }[$parameter.agent] }}",
@ -314,17 +314,18 @@ export class Agent implements INodeType {
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const agentType = this.getNodeParameter('agent', 0, '') as string;
const nodeVersion = this.getNode().typeVersion;
if (agentType === 'conversationalAgent') {
return await conversationalAgentExecute.call(this);
return await conversationalAgentExecute.call(this, nodeVersion);
} else if (agentType === 'openAiFunctionsAgent') {
return await openAiFunctionsAgentExecute.call(this);
return await openAiFunctionsAgentExecute.call(this, nodeVersion);
} else if (agentType === 'reActAgent') {
return await reActAgentAgentExecute.call(this);
return await reActAgentAgentExecute.call(this, nodeVersion);
} else if (agentType === 'sqlAgent') {
return await sqlAgentAgentExecute.call(this);
return await sqlAgentAgentExecute.call(this, nodeVersion);
} else if (agentType === 'planAndExecuteAgent') {
return await planAndExecuteAgentExecute.call(this);
return await planAndExecuteAgentExecute.call(this, nodeVersion);
}
throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`);

View file

@ -6,7 +6,6 @@ import {
} from 'n8n-workflow';
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
@ -15,10 +14,12 @@ import {
isChatInstance,
getPromptInputByType,
getOptionalOutputParsers,
getConnectedTools,
} from '../../../../../utils/helpers';
export async function conversationalAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Conversational Agent');
@ -31,7 +32,8 @@ export async function conversationalAgentExecute(
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory
| undefined;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this);
// TODO: Make it possible in the future to use values for other items than just 0

View file

@ -7,16 +7,20 @@ import {
import type { AgentExecutorInput } from 'langchain/agents';
import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import { getOptionalOutputParsers, getPromptInputByType } from '../../../../../utils/helpers';
import {
getConnectedTools,
getOptionalOutputParsers,
getPromptInputByType,
} from '../../../../../utils/helpers';
export async function openAiFunctionsAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing OpenAi Functions Agent');
const model = (await this.getInputConnectionData(
@ -33,7 +37,7 @@ export async function openAiFunctionsAgentExecute(
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory
| undefined;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this);
const options = this.getNodeParameter('options', 0, {}) as {
systemMessage?: string;

View file

@ -5,16 +5,20 @@ import {
NodeOperationError,
} from 'n8n-workflow';
import type { Tool } from 'langchain/tools';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base';
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
import { getOptionalOutputParsers, getPromptInputByType } from '../../../../../utils/helpers';
import {
getConnectedTools,
getOptionalOutputParsers,
getPromptInputByType,
} from '../../../../../utils/helpers';
export async function planAndExecuteAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing PlanAndExecute Agent');
const model = (await this.getInputConnectionData(
@ -22,7 +26,7 @@ export async function planAndExecuteAgentExecute(
0,
)) as BaseChatModel;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this);

View file

@ -7,12 +7,12 @@ import {
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
import type { BaseLanguageModel } from 'langchain/base_language';
import type { Tool } from 'langchain/tools';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base';
import {
getConnectedTools,
getOptionalOutputParsers,
getPromptInputByType,
isChatInstance,
@ -20,6 +20,7 @@ import {
export async function reActAgentAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing ReAct Agent');
@ -27,7 +28,7 @@ export async function reActAgentAgentExecute(
| BaseLanguageModel
| BaseChatModel;
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
const tools = await getConnectedTools(this, nodeVersion >= 1.5);
const outputParsers = await getOptionalOutputParsers(this);

View file

@ -26,6 +26,7 @@ const parseTablesString = (tablesString: string) =>
export async function sqlAgentAgentExecute(
this: IExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing SQL Agent');

View file

@ -1,7 +1,6 @@
import { AgentExecutor } from 'langchain/agents';
import { OpenAI as OpenAIClient } from 'openai';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import { type Tool } from 'langchain/tools';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
IExecuteFunctions,
@ -10,6 +9,7 @@ import type {
INodeTypeDescription,
} from 'n8n-workflow';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { getConnectedTools } from '../../../utils/helpers';
import { formatToOpenAIAssistantTool } from './utils';
export class OpenAiAssistant implements INodeType {
@ -19,7 +19,7 @@ export class OpenAiAssistant implements INodeType {
hidden: true,
icon: 'fa:robot',
group: ['transform'],
version: 1,
version: [1, 1.1],
description: 'Utilizes Assistant API from Open AI.',
subtitle: 'Open AI Assistant',
defaults: {
@ -311,7 +311,8 @@ export class OpenAiAssistant implements INodeType {
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const tools = (await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[];
const nodeVersion = this.getNode().typeVersion;
const tools = await getConnectedTools(this, nodeVersion > 1);
const credentials = await this.getCredentials('openAiApi');
const items = this.getInputData();

View file

@ -1,15 +1,17 @@
import { AgentExecutor } from 'langchain/agents';
import type { Tool } from 'langchain/tools';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAI as OpenAIClient } from 'openai';
import { NodeConnectionType, NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow';
import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions';
import { getConnectedTools } from '../../../../../utils/helpers';
const properties: INodeProperties[] = [
assistantRLC,
{
@ -97,6 +99,7 @@ export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const credentials = await this.getCredentials('openAiApi');
const nodeVersion = this.getNode().typeVersion;
const prompt = this.getNodeParameter('prompt', i) as string;
@ -131,7 +134,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
const agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
const tools = ((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
const tools = await getConnectedTools(this, nodeVersion > 1);
if (tools.length) {
const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? [];

View file

@ -4,13 +4,12 @@ import type {
INodeExecutionData,
IDataObject,
} from 'n8n-workflow';
import { NodeConnectionType, updateDisplayOptions } from 'n8n-workflow';
import type { Tool } from 'langchain/tools';
import { updateDisplayOptions } from 'n8n-workflow';
import { apiRequest } from '../../transport';
import type { ChatCompletion } from '../../helpers/interfaces';
import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { modelRLC } from '../descriptions';
import { getConnectedTools } from '../../../../../utils/helpers';
const properties: INodeProperties[] = [
modelRLC,
@ -166,6 +165,7 @@ const displayOptions = {
export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const nodeVersion = this.getNode().typeVersion;
const model = this.getNodeParameter('modelId', i, '', { extractValue: true });
let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[];
const options = this.getNodeParameter('options', i, {});
@ -183,8 +183,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
];
}
const externalTools =
((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
const externalTools = await getConnectedTools(this, nodeVersion > 1);
let tools;
if (externalTools.length) {

View file

@ -59,7 +59,7 @@ export const versionDescription: INodeTypeDescription = {
name: 'openAi',
icon: 'file:openAi.svg',
group: ['transform'],
version: 1,
version: [1, 1.1],
subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`,
description: 'Message an assistant or GPT, analyze images, generate audio, etc.',
defaults: {

View file

@ -4,6 +4,7 @@ import { BaseChatModel } from 'langchain/chat_models/base';
import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { BaseMessage } from 'langchain/schema';
import { DynamicTool, type Tool } from 'langchain/tools';
export function getMetadataFiltersValues(
ctx: IExecuteFunctions,
@ -125,3 +126,26 @@ export function serializeChatHistory(chatHistory: BaseMessage[]): string {
})
.join('\n');
}
export const getConnectedTools = async (ctx: IExecuteFunctions, enforceUniqueNames: boolean) => {
const connectedTools = ((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>();
for (const tool of connectedTools) {
if (!(tool instanceof DynamicTool)) continue;
const { name } = tool;
if (seenNames.has(name)) {
throw new NodeOperationError(
ctx.getNode(),
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
);
}
seenNames.add(name);
}
return connectedTools;
};