diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts index 230a6ddc6e..ff5c8871f2 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/Agent.node.ts @@ -1,10 +1,9 @@ -import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import { AiRootNode, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { INodeInputConfiguration, INodeInputFilter, - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, - INodeType, INodeTypeDescription, INodeProperties, } from 'n8n-workflow'; @@ -245,7 +244,7 @@ const agentTypeProperty: INodeProperties = { default: '', }; -export class Agent implements INodeType { +export class Agent extends AiRootNode { description: INodeTypeDescription = { displayName: 'AI Agent', name: 'agent', @@ -416,24 +415,27 @@ export class Agent implements INodeType { ], }; - async execute(this: IExecuteFunctions): Promise { - const agentType = this.getNodeParameter('agent', 0, '') as string; - const nodeVersion = this.getNode().typeVersion; + async execute(context: AiRootNodeExecuteFunctions): Promise { + const agentType = context.getNodeParameter('agent', 0, '') as string; + const nodeVersion = context.getNode().typeVersion; if (agentType === 'conversationalAgent') { - return await conversationalAgentExecute.call(this, nodeVersion); + return await conversationalAgentExecute.call(context, nodeVersion); } else if (agentType === 'toolsAgent') { - return await toolsAgentExecute.call(this); + return await toolsAgentExecute.call(context); } else if (agentType === 'openAiFunctionsAgent') { - return await openAiFunctionsAgentExecute.call(this, nodeVersion); + return await openAiFunctionsAgentExecute.call(context, nodeVersion); } else if (agentType === 'reActAgent') { - return await reActAgentAgentExecute.call(this, nodeVersion); + return await reActAgentAgentExecute.call(context, nodeVersion); } else if (agentType === 'sqlAgent') { - return await sqlAgentAgentExecute.call(this); + return await sqlAgentAgentExecute.call(context); } else if (agentType === 'planAndExecuteAgent') { - return await planAndExecuteAgentExecute.call(this, nodeVersion); + return await planAndExecuteAgentExecute.call(context, nodeVersion); } - throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`); + throw new NodeOperationError( + context.getNode(), + `The agent type "${agentType}" is not supported`, + ); } } diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts index 04a565971c..a88407ff9d 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ConversationalAgent/execute.ts @@ -3,18 +3,15 @@ import type { BaseOutputParser } from '@langchain/core/output_parsers'; import { PromptTemplate } from '@langchain/core/prompts'; import { initializeAgentExecutorWithOptions } from 'langchain/agents'; import { CombiningOutputParser } from 'langchain/output_parsers'; -import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; +import type { AiRootNodeExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers'; +import { isChatInstance } from '@utils/helpers'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { throwIfToolSchema } from '@utils/schemaParsing'; -import { getTracingConfig } from '@utils/tracing'; - -import { checkForStructuredTools, extractParsedOutput } from '../utils'; export async function conversationalAgentExecute( - this: IExecuteFunctions, + this: AiRootNodeExecuteFunctions, nodeVersion: number, ): Promise { this.logger.debug('Executing Conversational Agent'); @@ -28,10 +25,10 @@ export async function conversationalAgentExecute( | BaseChatMemory | undefined; - const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true); + const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true); const outputParsers = await getOptionalOutputParsers(this); - await checkForStructuredTools(tools, this.getNode(), 'Conversational Agent'); + this.checkForStructuredTools(tools, this.getNode(), 'Conversational Agent'); // TODO: Make it possible in the future to use values for other items than just 0 const options = this.getNodeParameter('options', 0, {}) as { @@ -86,12 +83,7 @@ export async function conversationalAgentExecute( if (this.getNode().typeVersion <= 1.2) { input = this.getNodeParameter('text', itemIndex) as string; } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + input = this.getPromptInputByType(itemIndex); } if (input === undefined) { @@ -103,11 +95,11 @@ export async function conversationalAgentExecute( } const response = await agentExecutor - .withConfig(getTracingConfig(this)) + .withConfig(this.getTracingConfig()) .invoke({ input, outputParsers }); if (outputParser) { - response.output = await extractParsedOutput(this, outputParser, response.output as string); + response.output = await this.extractParsedOutput(outputParser, response.output as string); } returnData.push({ json: response }); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts index 17a2d43590..cdff8a5a11 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/OpenAiFunctionsAgent/execute.ts @@ -6,20 +6,16 @@ import { AgentExecutor, OpenAIAgent } from 'langchain/agents'; import { BufferMemory, type BaseChatMemory } from 'langchain/memory'; import { CombiningOutputParser } from 'langchain/output_parsers'; import { - type IExecuteFunctions, + type AiRootNodeExecuteFunctions, type INodeExecutionData, NodeConnectionType, NodeOperationError, } from 'n8n-workflow'; -import { getConnectedTools, getPromptInputByType } from '@utils/helpers'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; -import { getTracingConfig } from '@utils/tracing'; - -import { extractParsedOutput } from '../utils'; export async function openAiFunctionsAgentExecute( - this: IExecuteFunctions, + this: AiRootNodeExecuteFunctions, nodeVersion: number, ): Promise { this.logger.debug('Executing OpenAi Functions Agent'); @@ -37,7 +33,7 @@ export async function openAiFunctionsAgentExecute( const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as | BaseChatMemory | undefined; - const tools = await getConnectedTools(this, nodeVersion >= 1.5, false); + const tools = await this.getConnectedTools(nodeVersion >= 1.5, false); const outputParsers = await getOptionalOutputParsers(this); const options = this.getNodeParameter('options', 0, {}) as { systemMessage?: string; @@ -89,12 +85,7 @@ export async function openAiFunctionsAgentExecute( if (this.getNode().typeVersion <= 1.2) { input = this.getNodeParameter('text', itemIndex) as string; } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + input = this.getPromptInputByType(itemIndex); } if (input === undefined) { @@ -106,11 +97,11 @@ export async function openAiFunctionsAgentExecute( } const response = await agentExecutor - .withConfig(getTracingConfig(this)) + .withConfig(this.getTracingConfig()) .invoke({ input, outputParsers }); if (outputParser) { - response.output = await extractParsedOutput(this, outputParser, response.output as string); + response.output = await this.extractParsedOutput(outputParser, response.output as string); } returnData.push({ json: response }); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts index 379475f923..9a32bc5888 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/PlanAndExecuteAgent/execute.ts @@ -4,21 +4,17 @@ import { PromptTemplate } from '@langchain/core/prompts'; import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute'; import { CombiningOutputParser } from 'langchain/output_parsers'; import { - type IExecuteFunctions, + type AiRootNodeExecuteFunctions, type INodeExecutionData, NodeConnectionType, NodeOperationError, } from 'n8n-workflow'; -import { getConnectedTools, getPromptInputByType } from '@utils/helpers'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { throwIfToolSchema } from '@utils/schemaParsing'; -import { getTracingConfig } from '@utils/tracing'; - -import { checkForStructuredTools, extractParsedOutput } from '../utils'; export async function planAndExecuteAgentExecute( - this: IExecuteFunctions, + this: AiRootNodeExecuteFunctions, nodeVersion: number, ): Promise { this.logger.debug('Executing PlanAndExecute Agent'); @@ -27,9 +23,9 @@ export async function planAndExecuteAgentExecute( 0, )) as BaseChatModel; - const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true); + const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true); - await checkForStructuredTools(tools, this.getNode(), 'Plan & Execute Agent'); + this.checkForStructuredTools(tools, this.getNode(), 'Plan & Execute Agent'); const outputParsers = await getOptionalOutputParsers(this); const options = this.getNodeParameter('options', 0, {}) as { @@ -66,12 +62,7 @@ export async function planAndExecuteAgentExecute( if (this.getNode().typeVersion <= 1.2) { input = this.getNodeParameter('text', itemIndex) as string; } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + input = this.getPromptInputByType(itemIndex); } if (input === undefined) { @@ -83,11 +74,11 @@ export async function planAndExecuteAgentExecute( } const response = await agentExecutor - .withConfig(getTracingConfig(this)) + .withConfig(this.getTracingConfig()) .invoke({ input, outputParsers }); if (outputParser) { - response.output = await extractParsedOutput(this, outputParser, response.output as string); + response.output = await this.extractParsedOutput(outputParser, response.output as string); } returnData.push({ json: response }); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts index 4db35634d6..703ff8006d 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ReActAgent/execute.ts @@ -5,21 +5,18 @@ import { PromptTemplate } from '@langchain/core/prompts'; import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents'; import { CombiningOutputParser } from 'langchain/output_parsers'; import { - type IExecuteFunctions, + type AiRootNodeExecuteFunctions, type INodeExecutionData, NodeConnectionType, NodeOperationError, } from 'n8n-workflow'; -import { getConnectedTools, getPromptInputByType, isChatInstance } from '@utils/helpers'; +import { isChatInstance } from '@utils/helpers'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { throwIfToolSchema } from '@utils/schemaParsing'; -import { getTracingConfig } from '@utils/tracing'; - -import { checkForStructuredTools, extractParsedOutput } from '../utils'; export async function reActAgentAgentExecute( - this: IExecuteFunctions, + this: AiRootNodeExecuteFunctions, nodeVersion: number, ): Promise { this.logger.debug('Executing ReAct Agent'); @@ -28,9 +25,9 @@ export async function reActAgentAgentExecute( | BaseLanguageModel | BaseChatModel; - const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true); + const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true); - await checkForStructuredTools(tools, this.getNode(), 'ReAct Agent'); + this.checkForStructuredTools(tools, this.getNode(), 'ReAct Agent'); const outputParsers = await getOptionalOutputParsers(this); @@ -87,12 +84,7 @@ export async function reActAgentAgentExecute( if (this.getNode().typeVersion <= 1.2) { input = this.getNodeParameter('text', itemIndex) as string; } else { - input = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + input = this.getPromptInputByType(itemIndex); } if (input === undefined) { @@ -104,11 +96,11 @@ export async function reActAgentAgentExecute( } const response = await agentExecutor - .withConfig(getTracingConfig(this)) + .withConfig(this.getTracingConfig()) .invoke({ input, outputParsers }); if (outputParser) { - response.output = await extractParsedOutput(this, outputParser, response.output as string); + response.output = await this.extractParsedOutput(outputParser, response.output as string); } returnData.push({ json: response }); diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts index 369ca109af..f8eb1e8311 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/SqlAgent/execute.ts @@ -5,15 +5,14 @@ import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql'; import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql'; import { SqlDatabase } from 'langchain/sql_db'; import { - type IExecuteFunctions, + type AiRootNodeExecuteFunctions, type INodeExecutionData, NodeConnectionType, NodeOperationError, type IDataObject, } from 'n8n-workflow'; -import { getPromptInputByType, serializeChatHistory } from '@utils/helpers'; -import { getTracingConfig } from '@utils/tracing'; +import { serializeChatHistory } from '@utils/helpers'; import { getMysqlDataSource } from './other/handlers/mysql'; import { getPostgresDataSource } from './other/handlers/postgres'; @@ -27,7 +26,7 @@ const parseTablesString = (tablesString: string) => .filter((table) => table.length > 0); export async function sqlAgentAgentExecute( - this: IExecuteFunctions, + this: AiRootNodeExecuteFunctions, ): Promise { this.logger.debug('Executing SQL Agent'); @@ -39,27 +38,22 @@ export async function sqlAgentAgentExecute( const returnData: INodeExecutionData[] = []; - for (let i = 0; i < items.length; i++) { + for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { try { - const item = items[i]; + const item = items[itemIndex]; let input; if (this.getNode().typeVersion <= 1.2) { - input = this.getNodeParameter('input', i) as string; + input = this.getNodeParameter('input', itemIndex) as string; } else { - input = getPromptInputByType({ - ctx: this, - i, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + input = this.getPromptInputByType(itemIndex, 'text', 'promptType'); } if (input === undefined) { throw new NodeOperationError(this.getNode(), 'The ‘prompt’ parameter is empty.'); } - const options = this.getNodeParameter('options', i, {}); - const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as + const options = this.getNodeParameter('options', itemIndex, {}); + const selectedDataSource = this.getNodeParameter('dataSource', itemIndex, 'sqlite') as | 'mysql' | 'postgres' | 'sqlite'; @@ -77,7 +71,7 @@ export async function sqlAgentAgentExecute( ); } - const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data'); + const binaryPropertyName = this.getNodeParameter('binaryPropertyName', itemIndex, 'data'); dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName); } @@ -127,7 +121,7 @@ export async function sqlAgentAgentExecute( let response: IDataObject; try { - response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ + response = await agentExecutor.withConfig(this.getTracingConfig()).invoke({ input, signal: this.getExecutionCancelSignal(), chatHistory, @@ -136,14 +130,14 @@ export async function sqlAgentAgentExecute( if ((error.message as IDataObject)?.output) { response = error.message as IDataObject; } else { - throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i }); + throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex }); } } returnData.push({ json: response }); } catch (error) { if (this.continueOnFail()) { - returnData.push({ json: { error: error.message }, pairedItem: { item: i } }); + returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); continue; } diff --git a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts index b0e36d0d8b..11b9b19d63 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/Agent/agents/ToolsAgent/execute.ts @@ -10,11 +10,10 @@ import type { AgentAction, AgentFinish } from 'langchain/agents'; import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'; import { omit } from 'lodash'; import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; -import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; -import type { ZodObject } from 'zod'; +import type { AiRootNodeExecuteFunctions, INodeExecutionData, ZodObjectAny } from 'n8n-workflow'; import { z } from 'zod'; -import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers'; +import { isChatInstance } from '@utils/helpers'; import { getOptionalOutputParsers, type N8nOutputParser, @@ -22,14 +21,13 @@ import { import { SYSTEM_MESSAGE } from './prompt'; -function getOutputParserSchema(outputParser: N8nOutputParser): ZodObject { - const schema = - (outputParser.getSchema() as ZodObject) ?? z.object({ text: z.string() }); +function getOutputParserSchema(outputParser: N8nOutputParser): ZodObjectAny { + const schema = (outputParser.getSchema() as ZodObjectAny) ?? z.object({ text: z.string() }); return schema; } -async function extractBinaryMessages(ctx: IExecuteFunctions) { +async function extractBinaryMessages(ctx: AiRootNodeExecuteFunctions) { const binaryData = ctx.getInputData()?.[0]?.binary ?? {}; const binaryMessages = await Promise.all( Object.values(binaryData) @@ -96,7 +94,9 @@ function fixEmptyContentMessage(steps: AgentFinish | AgentAction[]) { return steps; } -export async function toolsAgentExecute(this: IExecuteFunctions): Promise { +export async function toolsAgentExecute( + this: AiRootNodeExecuteFunctions, +): Promise { this.logger.debug('Executing Tools Agent'); const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0); @@ -111,7 +111,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise; + const tools = (await this.getConnectedTools(true, false)) as Array; const outputParser = (await getOptionalOutputParsers(this))?.[0]; let structuredOutputParserTool: DynamicStructuredTool | undefined; /** @@ -289,13 +289,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise; - -export async function extractParsedOutput( - ctx: IExecuteFunctions, - outputParser: BaseOutputParser, - output: string, -): Promise | undefined> { - const parsedOutput = (await outputParser.parse(output)) as { - output: Record; - }; - - if (ctx.getNode().typeVersion <= 1.6) { - return parsedOutput; - } - // For 1.7 and above, we try to extract the output from the parsed output - // with fallback to the original output if it's not present - return parsedOutput?.output ?? parsedOutput; -} - -export async function checkForStructuredTools( - tools: Array>, - node: INode, - currentAgentType: string, -) { - const dynamicStructuredTools = tools.filter( - (tool) => tool.constructor.name === 'DynamicStructuredTool', - ); - if (dynamicStructuredTools.length > 0) { - const getToolName = (tool: Tool | DynamicStructuredTool) => `"${tool.name}"`; - throw new NodeOperationError( - node, - `The selected tools are not supported by "${currentAgentType}", please use "Tools Agent" instead`, - { - itemIndex: 0, - description: `Incompatible connected tools: ${dynamicStructuredTools.map(getToolName).join(', ')}`, - }, - ); - } -} diff --git a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts index e44ad8f9d2..05630ef858 100644 --- a/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/agents/OpenAiAssistant/OpenAiAssistant.node.ts @@ -1,21 +1,17 @@ import { AgentExecutor } from 'langchain/agents'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; -import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import { AiRootNode, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, - INodeType, INodeTypeDescription, } from 'n8n-workflow'; import { OpenAI as OpenAIClient } from 'openai'; -import { getConnectedTools } from '@utils/helpers'; -import { getTracingConfig } from '@utils/tracing'; - import { formatToOpenAIAssistantTool } from './utils'; -export class OpenAiAssistant implements INodeType { +export class OpenAiAssistant extends AiRootNode { description: INodeTypeDescription = { displayName: 'OpenAI Assistant', name: 'openAiAssistant', @@ -313,30 +309,30 @@ export class OpenAiAssistant implements INodeType { ], }; - async execute(this: IExecuteFunctions): Promise { - const nodeVersion = this.getNode().typeVersion; - const tools = await getConnectedTools(this, nodeVersion > 1, false); - const credentials = await this.getCredentials('openAiApi'); + async execute(context: AiRootNodeExecuteFunctions): Promise { + const nodeVersion = context.getNode().typeVersion; + const tools = await context.getConnectedTools(nodeVersion > 1, false); + const credentials = await context.getCredentials('openAiApi'); - const items = this.getInputData(); + const items = context.getInputData(); const returnData: INodeExecutionData[] = []; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { try { - const input = this.getNodeParameter('text', itemIndex) as string; - const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string; - const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array< + const input = context.getNodeParameter('text', itemIndex) as string; + const assistantId = context.getNodeParameter('assistantId', itemIndex, '') as string; + const nativeTools = context.getNodeParameter('nativeTools', itemIndex, []) as Array< 'code_interpreter' | 'retrieval' >; - const options = this.getNodeParameter('options', itemIndex, {}) as { + const options = context.getNodeParameter('options', itemIndex, {}) as { baseURL?: string; maxRetries: number; timeout: number; }; if (input === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘text‘ parameter is empty.'); + throw new NodeOperationError(context.getNode(), 'The ‘text‘ parameter is empty.'); } const client = new OpenAIClient({ @@ -358,9 +354,13 @@ export class OpenAiAssistant implements INodeType { tools: newTools, }); } else { - const name = this.getNodeParameter('name', itemIndex, '') as string; - const instructions = this.getNodeParameter('instructions', itemIndex, '') as string; - const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string; + const name = context.getNodeParameter('name', itemIndex, '') as string; + const instructions = context.getNodeParameter('instructions', itemIndex, '') as string; + const model = context.getNodeParameter( + 'model', + itemIndex, + 'gpt-3.5-turbo-1106', + ) as string; agent = await OpenAIAssistantRunnable.createAssistant({ model, @@ -377,15 +377,15 @@ export class OpenAiAssistant implements INodeType { tools, }); - const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ + const response = await agentExecutor.withConfig(context.getTracingConfig()).invoke({ content: input, - signal: this.getExecutionCancelSignal(), + signal: context.getExecutionCancelSignal(), timeout: options.timeout ?? 10000, }); returnData.push({ json: response }); } catch (error) { - if (this.continueOnFail()) { + if (context.continueOnFail()) { returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); continue; } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts index d4e205ec88..54360f78e9 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainLLM/ChainLlm.node.ts @@ -15,12 +15,12 @@ import { CombiningOutputParser } from 'langchain/output_parsers'; import type { IBinaryData, IDataObject, - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, - INodeType, INodeTypeDescription, } from 'n8n-workflow'; import { + AiRootNode, ApplicationError, NodeApiError, NodeConnectionType, @@ -28,11 +28,10 @@ import { } from 'n8n-workflow'; import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions'; -import { getPromptInputByType, isChatInstance } from '@utils/helpers'; +import { isChatInstance } from '@utils/helpers'; import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { getTemplateNoticeField } from '@utils/sharedFields'; -import { getTracingConfig } from '@utils/tracing'; import { getCustomErrorMessage as getCustomOpenAiErrorMessage, @@ -49,7 +48,7 @@ interface MessagesTemplate { } async function getImageMessage( - context: IExecuteFunctions, + context: AiRootNodeExecuteFunctions, itemIndex: number, message: MessagesTemplate, ) { @@ -106,7 +105,7 @@ async function getImageMessage( } async function getChainPromptTemplate( - context: IExecuteFunctions, + context: AiRootNodeExecuteFunctions, itemIndex: number, llm: BaseLanguageModel | BaseChatModel, messages?: MessagesTemplate[], @@ -165,7 +164,7 @@ async function getChainPromptTemplate( } async function createSimpleLLMChain( - context: IExecuteFunctions, + context: AiRootNodeExecuteFunctions, llm: BaseLanguageModel, query: string, prompt: ChatPromptTemplate | PromptTemplate, @@ -173,7 +172,7 @@ async function createSimpleLLMChain( const chain = new LLMChain({ llm, prompt, - }).withConfig(getTracingConfig(context)); + }).withConfig(context.getTracingConfig()); const response = (await chain.invoke({ query, @@ -184,7 +183,7 @@ async function createSimpleLLMChain( } async function getChain( - context: IExecuteFunctions, + context: AiRootNodeExecuteFunctions, itemIndex: number, query: string, llm: BaseLanguageModel, @@ -222,7 +221,7 @@ async function getChain( ); const chain = prompt.pipe(llm).pipe(combinedOutputParser); - const response = (await chain.withConfig(getTracingConfig(context)).invoke({ query })) as + const response = (await chain.withConfig(context.getTracingConfig()).invoke({ query })) as | string | string[]; @@ -249,7 +248,7 @@ function getInputs(parameters: IDataObject) { return inputs; } -export class ChainLlm implements INodeType { +export class ChainLlm extends AiRootNode { description: INodeTypeDescription = { displayName: 'Basic LLM Chain', name: 'chainLlm', @@ -510,42 +509,37 @@ export class ChainLlm implements INodeType { ], }; - async execute(this: IExecuteFunctions): Promise { - this.logger.debug('Executing LLM Chain'); - const items = this.getInputData(); + async execute(context: AiRootNodeExecuteFunctions): Promise { + context.logger.debug('Executing LLM Chain'); + const items = context.getInputData(); const returnData: INodeExecutionData[] = []; - const llm = (await this.getInputConnectionData( + const llm = (await context.getInputConnectionData( NodeConnectionType.AiLanguageModel, 0, )) as BaseLanguageModel; - const outputParsers = await getOptionalOutputParsers(this); + const outputParsers = await getOptionalOutputParsers(context); for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { try { let prompt: string; - if (this.getNode().typeVersion <= 1.3) { - prompt = this.getNodeParameter('prompt', itemIndex) as string; + if (context.getNode().typeVersion <= 1.3) { + prompt = context.getNodeParameter('prompt', itemIndex) as string; } else { - prompt = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + prompt = context.getPromptInputByType(itemIndex); } - const messages = this.getNodeParameter( + const messages = context.getNodeParameter( 'messages.messageValues', itemIndex, [], ) as MessagesTemplate[]; if (prompt === undefined) { - throw new NodeOperationError(this.getNode(), "The 'prompt' parameter is empty."); + throw new NodeOperationError(context.getNode(), "The 'prompt' parameter is empty."); } - const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages); + const responses = await getChain(context, itemIndex, prompt, llm, outputParsers, messages); responses.forEach((response) => { let data: IDataObject; @@ -586,7 +580,7 @@ export class ChainLlm implements INodeType { } } - if (this.continueOnFail()) { + if (context.continueOnFail()) { returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); continue; } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts index 9c7c739701..ea53eb3a12 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainRetrievalQA/ChainRetrievalQa.node.ts @@ -9,24 +9,23 @@ import type { BaseRetriever } from '@langchain/core/retrievers'; import { RetrievalQAChain } from 'langchain/chains'; import { NodeConnectionType, - type IExecuteFunctions, + type AiRootNodeExecuteFunctions, type INodeExecutionData, - type INodeType, type INodeTypeDescription, NodeOperationError, + AiRootNode, } from 'n8n-workflow'; import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions'; -import { getPromptInputByType, isChatInstance } from '@utils/helpers'; +import { isChatInstance } from '@utils/helpers'; import { getTemplateNoticeField } from '@utils/sharedFields'; -import { getTracingConfig } from '@utils/tracing'; const SYSTEM_PROMPT_TEMPLATE = `Use the following pieces of context to answer the users question. If you don't know the answer, just say that you don't know, don't try to make up an answer. ---------------- {context}`; -export class ChainRetrievalQa implements INodeType { +export class ChainRetrievalQa extends AiRootNode { description: INodeTypeDescription = { displayName: 'Question and Answer Chain', name: 'chainRetrievalQa', @@ -158,20 +157,20 @@ export class ChainRetrievalQa implements INodeType { ], }; - async execute(this: IExecuteFunctions): Promise { - this.logger.debug('Executing Retrieval QA Chain'); + async execute(context: AiRootNodeExecuteFunctions): Promise { + context.logger.debug('Executing Retrieval QA Chain'); - const model = (await this.getInputConnectionData( + const model = (await context.getInputConnectionData( NodeConnectionType.AiLanguageModel, 0, )) as BaseLanguageModel; - const retriever = (await this.getInputConnectionData( + const retriever = (await context.getInputConnectionData( NodeConnectionType.AiRetriever, 0, )) as BaseRetriever; - const items = this.getInputData(); + const items = context.getInputData(); const returnData: INodeExecutionData[] = []; @@ -180,22 +179,17 @@ export class ChainRetrievalQa implements INodeType { try { let query; - if (this.getNode().typeVersion <= 1.2) { - query = this.getNodeParameter('query', itemIndex) as string; + if (context.getNode().typeVersion <= 1.2) { + query = context.getNodeParameter('query', itemIndex) as string; } else { - query = getPromptInputByType({ - ctx: this, - i: itemIndex, - inputKey: 'text', - promptTypeKey: 'promptType', - }); + query = context.getPromptInputByType(itemIndex); } if (query === undefined) { - throw new NodeOperationError(this.getNode(), 'The ‘query‘ parameter is empty.'); + throw new NodeOperationError(context.getNode(), 'The ‘query‘ parameter is empty.'); } - const options = this.getNodeParameter('options', itemIndex, {}) as { + const options = context.getNodeParameter('options', itemIndex, {}) as { systemPromptTemplate?: string; }; @@ -224,10 +218,10 @@ export class ChainRetrievalQa implements INodeType { const chain = RetrievalQAChain.fromLLM(model, retriever, chainParameters); - const response = await chain.withConfig(getTracingConfig(this)).invoke({ query }); + const response = await chain.withConfig(context.getTracingConfig()).invoke({ query }); returnData.push({ json: { response } }); } catch (error) { - if (this.continueOnFail()) { + if (context.continueOnFail()) { returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); continue; } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/ChainSummarization.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/ChainSummarization.node.ts index cd47eb6a15..ba64cbb7fa 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/ChainSummarization.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/ChainSummarization.node.ts @@ -1,4 +1,4 @@ -import type { INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow'; +import type { INodeType, INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow'; import { VersionedNodeType } from 'n8n-workflow'; import { ChainSummarizationV1 } from './V1/ChainSummarizationV1.node'; @@ -30,8 +30,8 @@ export class ChainSummarization extends VersionedNodeType { }; const nodeVersions: IVersionedNodeType['nodeVersions'] = { - 1: new ChainSummarizationV1(baseDescription), - 2: new ChainSummarizationV2(baseDescription), + 1: new ChainSummarizationV1(baseDescription) as INodeType, + 2: new ChainSummarizationV2(baseDescription) as INodeType, }; super(nodeVersions, baseDescription); diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts index fedf979082..3455365c84 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V1/ChainSummarizationV1.node.ts @@ -8,8 +8,8 @@ import { type INodeTypeBaseDescription, type IExecuteFunctions, type INodeExecutionData, - type INodeType, type INodeTypeDescription, + AiRootNode, } from 'n8n-workflow'; import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; @@ -18,10 +18,11 @@ import { getTemplateNoticeField } from '@utils/sharedFields'; import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt'; -export class ChainSummarizationV1 implements INodeType { +export class ChainSummarizationV1 extends AiRootNode { description: INodeTypeDescription; constructor(baseDescription: INodeTypeBaseDescription) { + super(); this.description = { ...baseDescription, version: 1, @@ -162,20 +163,21 @@ export class ChainSummarizationV1 implements INodeType { }; } - async execute(this: IExecuteFunctions): Promise { - this.logger.debug('Executing Vector Store QA Chain'); - const type = this.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine'; + async execute(context: IExecuteFunctions): Promise { + context.logger.debug('Executing Vector Store QA Chain'); + const type = context.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine'; - const model = (await this.getInputConnectionData( + const model = (await context.getInputConnectionData( NodeConnectionType.AiLanguageModel, 0, )) as BaseLanguageModel; - const documentInput = (await this.getInputConnectionData(NodeConnectionType.AiDocument, 0)) as - | N8nJsonLoader - | Array>>; + const documentInput = (await context.getInputConnectionData( + NodeConnectionType.AiDocument, + 0, + )) as N8nJsonLoader | Array>>; - const options = this.getNodeParameter('options', 0, {}) as { + const options = context.getNodeParameter('options', 0, {}) as { prompt?: string; refineQuestionPrompt?: string; refinePrompt?: string; @@ -241,7 +243,7 @@ export class ChainSummarizationV1 implements INodeType { const chain = loadSummarizationChain(model, chainArgs); - const items = this.getInputData(); + const items = context.getInputData(); const returnData: INodeExecutionData[] = []; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { diff --git a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts index ff6dadde59..a320315b23 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/ChainSummarization/V2/ChainSummarizationV2.node.ts @@ -5,18 +5,16 @@ import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'; import { loadSummarizationChain } from 'langchain/chains'; import type { INodeTypeBaseDescription, - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, - INodeType, INodeTypeDescription, IDataObject, } from 'n8n-workflow'; -import { NodeConnectionType } from 'n8n-workflow'; +import { AiRootNode, NodeConnectionType } from 'n8n-workflow'; import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; import { N8nJsonLoader } from '@utils/N8nJsonLoader'; import { getTemplateNoticeField } from '@utils/sharedFields'; -import { getTracingConfig } from '@utils/tracing'; import { getChainPromptsArgs } from '../helpers'; import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt'; @@ -56,10 +54,11 @@ function getInputs(parameters: IDataObject) { return inputs; } -export class ChainSummarizationV2 implements INodeType { +export class ChainSummarizationV2 extends AiRootNode { description: INodeTypeDescription; constructor(baseDescription: INodeTypeBaseDescription) { + super(); this.description = { ...baseDescription, version: [2], @@ -311,27 +310,27 @@ export class ChainSummarizationV2 implements INodeType { }; } - async execute(this: IExecuteFunctions): Promise { - this.logger.debug('Executing Summarization Chain V2'); - const operationMode = this.getNodeParameter('operationMode', 0, 'nodeInputJson') as + async execute(context: AiRootNodeExecuteFunctions): Promise { + context.logger.debug('Executing Summarization Chain V2'); + const operationMode = context.getNodeParameter('operationMode', 0, 'nodeInputJson') as | 'nodeInputJson' | 'nodeInputBinary' | 'documentLoader'; - const chunkingMode = this.getNodeParameter('chunkingMode', 0, 'simple') as + const chunkingMode = context.getNodeParameter('chunkingMode', 0, 'simple') as | 'simple' | 'advanced'; - const model = (await this.getInputConnectionData( + const model = (await context.getInputConnectionData( NodeConnectionType.AiLanguageModel, 0, )) as BaseLanguageModel; - const items = this.getInputData(); + const items = context.getInputData(); const returnData: INodeExecutionData[] = []; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { try { - const summarizationMethodAndPrompts = this.getNodeParameter( + const summarizationMethodAndPrompts = context.getNodeParameter( 'options.summarizationMethodAndPrompts.values', itemIndex, {}, @@ -355,7 +354,7 @@ export class ChainSummarizationV2 implements INodeType { // Use dedicated document loader input to load documents if (operationMode === 'documentLoader') { - const documentInput = (await this.getInputConnectionData( + const documentInput = (await context.getInputConnectionData( NodeConnectionType.AiDocument, 0, )) as N8nJsonLoader | Array>>; @@ -367,7 +366,7 @@ export class ChainSummarizationV2 implements INodeType { ? await documentInput.processItem(item, itemIndex) : documentInput; - const response = await chain.withConfig(getTracingConfig(this)).invoke({ + const response = await chain.withConfig(context.getTracingConfig()).invoke({ input_documents: processedDocuments, }); @@ -381,15 +380,19 @@ export class ChainSummarizationV2 implements INodeType { switch (chunkingMode) { // In simple mode we use recursive character splitter with default settings case 'simple': - const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number; - const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number; + const chunkSize = context.getNodeParameter('chunkSize', itemIndex, 1000) as number; + const chunkOverlap = context.getNodeParameter( + 'chunkOverlap', + itemIndex, + 200, + ) as number; textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize }); break; // In advanced mode user can connect text splitter node so we just retrieve it case 'advanced': - textSplitter = (await this.getInputConnectionData( + textSplitter = (await context.getInputConnectionData( NodeConnectionType.AiTextSplitter, 0, )) as TextSplitter | undefined; @@ -400,14 +403,14 @@ export class ChainSummarizationV2 implements INodeType { let processor: N8nJsonLoader | N8nBinaryLoader; if (operationMode === 'nodeInputBinary') { - const binaryDataKey = this.getNodeParameter( + const binaryDataKey = context.getNodeParameter( 'options.binaryDataKey', itemIndex, 'data', ) as string; - processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter); + processor = new N8nBinaryLoader(context, 'options.', binaryDataKey, textSplitter); } else { - processor = new N8nJsonLoader(this, 'options.', textSplitter); + processor = new N8nJsonLoader(context, 'options.', textSplitter); } const processedItem = await processor.processItem(item, itemIndex); @@ -417,7 +420,7 @@ export class ChainSummarizationV2 implements INodeType { returnData.push({ json: { response } }); } } catch (error) { - if (this.continueOnFail()) { + if (context.continueOnFail()) { returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); continue; } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts index 365a35ddd3..8ef00e64e7 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/InformationExtractor/InformationExtractor.node.ts @@ -3,11 +3,10 @@ import { HumanMessage } from '@langchain/core/messages'; import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts'; import type { JSONSchema7 } from 'json-schema'; import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; -import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; +import { AiRootNode, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { - INodeType, INodeTypeDescription, - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, INodePropertyOptions, } from 'n8n-workflow'; @@ -15,7 +14,6 @@ import type { z } from 'zod'; import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@utils/descriptions'; import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; -import { getTracingConfig } from '@utils/tracing'; import { makeZodSchemaFromAttributes } from './helpers'; import type { AttributeDefinition } from './types'; @@ -24,7 +22,7 @@ const SYSTEM_PROMPT_TEMPLATE = `You are an expert extraction algorithm. Only extract relevant information from the text. If you do not know the value of an attribute asked to extract, you may omit the attribute's value.`; -export class InformationExtractor implements INodeType { +export class InformationExtractor extends AiRootNode { description: INodeTypeDescription = { displayName: 'Information Extractor', name: 'informationExtractor', @@ -218,15 +216,15 @@ export class InformationExtractor implements INodeType { ], }; - async execute(this: IExecuteFunctions): Promise { - const items = this.getInputData(); + async execute(context: AiRootNodeExecuteFunctions): Promise { + const items = context.getInputData(); - const llm = (await this.getInputConnectionData( + const llm = (await context.getInputConnectionData( NodeConnectionType.AiLanguageModel, 0, )) as BaseLanguageModel; - const schemaType = this.getNodeParameter('schemaType', 0, '') as + const schemaType = context.getNodeParameter('schemaType', 0, '') as | 'fromAttributes' | 'fromJson' | 'manual'; @@ -234,14 +232,14 @@ export class InformationExtractor implements INodeType { let parser: OutputFixingParser; if (schemaType === 'fromAttributes') { - const attributes = this.getNodeParameter( + const attributes = context.getNodeParameter( 'attributes.attributes', 0, [], ) as AttributeDefinition[]; if (attributes.length === 0) { - throw new NodeOperationError(this.getNode(), 'At least one attribute must be specified'); + throw new NodeOperationError(context.getNode(), 'At least one attribute must be specified'); } parser = OutputFixingParser.fromLLM( @@ -252,10 +250,10 @@ export class InformationExtractor implements INodeType { let jsonSchema: JSONSchema7; if (schemaType === 'fromJson') { - const jsonExample = this.getNodeParameter('jsonSchemaExample', 0, '') as string; + const jsonExample = context.getNodeParameter('jsonSchemaExample', 0, '') as string; jsonSchema = generateSchema(jsonExample); } else { - const inputSchema = this.getNodeParameter('inputSchema', 0, '') as string; + const inputSchema = context.getNodeParameter('inputSchema', 0, '') as string; jsonSchema = jsonParse(inputSchema); } @@ -266,10 +264,10 @@ export class InformationExtractor implements INodeType { const resultData: INodeExecutionData[] = []; for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { - const input = this.getNodeParameter('text', itemIndex) as string; + const input = context.getNodeParameter('text', itemIndex) as string; const inputPrompt = new HumanMessage(input); - const options = this.getNodeParameter('options', itemIndex, {}) as { + const options = context.getNodeParameter('options', itemIndex, {}) as { systemPromptTemplate?: string; }; @@ -285,13 +283,13 @@ export class InformationExtractor implements INodeType { inputPrompt, ]; const prompt = ChatPromptTemplate.fromMessages(messages); - const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this)); + const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig()); try { const output = await chain.invoke(messages); resultData.push({ json: { output } }); } catch (error) { - if (this.continueOnFail()) { + if (context.continueOnFail()) { resultData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); continue; } diff --git a/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts index e810b0f98a..fdca663027 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/SentimentAnalysis/SentimentAnalysis.node.ts @@ -5,16 +5,13 @@ import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_par import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { IDataObject, - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, INodeParameters, - INodeType, INodeTypeDescription, } from 'n8n-workflow'; import { z } from 'zod'; -import { getTracingConfig } from '@utils/tracing'; - const DEFAULT_SYSTEM_PROMPT_TEMPLATE = 'You are highly intelligent and accurate sentiment analyzer. Analyze the sentiment of the provided text. Categorize it into one of the following: {categories}. Use the provided formatting instructions. Only output the JSON.'; @@ -28,7 +25,7 @@ const configuredOutputs = (parameters: INodeParameters, defaultCategories: strin return ret; }; -export class SentimentAnalysis implements INodeType { +export class SentimentAnalysis extends AiRootNode { description: INodeTypeDescription = { displayName: 'Sentiment Analysis', name: 'sentimentAnalysis', @@ -136,10 +133,10 @@ export class SentimentAnalysis implements INodeType { ], }; - async execute(this: IExecuteFunctions): Promise { - const items = this.getInputData(); + async execute(context: AiRootNodeExecuteFunctions): Promise { + const items = context.getInputData(); - const llm = (await this.getInputConnectionData( + const llm = (await context.getInputConnectionData( NodeConnectionType.AiLanguageModel, 0, )) as BaseLanguageModel; @@ -148,7 +145,7 @@ export class SentimentAnalysis implements INodeType { for (let i = 0; i < items.length; i++) { try { - const sentimentCategories = this.getNodeParameter( + const sentimentCategories = context.getNodeParameter( 'options.categories', i, DEFAULT_CATEGORIES, @@ -160,7 +157,7 @@ export class SentimentAnalysis implements INodeType { .filter(Boolean); if (categories.length === 0) { - throw new NodeOperationError(this.getNode(), 'No sentiment categories provided', { + throw new NodeOperationError(context.getNode(), 'No sentiment categories provided', { itemIndex: i, }); } @@ -170,7 +167,7 @@ export class SentimentAnalysis implements INodeType { returnData.push(...Array.from({ length: categories.length }, () => [])); } - const options = this.getNodeParameter('options', i, {}) as { + const options = context.getNodeParameter('options', i, {}) as { systemPromptTemplate?: string; includeDetailedResults?: boolean; enableAutoFixing?: boolean; @@ -197,7 +194,7 @@ export class SentimentAnalysis implements INodeType { {format_instructions}`, ); - const input = this.getNodeParameter('inputText', i) as string; + const input = context.getNodeParameter('inputText', i) as string; const inputPrompt = new HumanMessage(input); const messages = [ await systemPromptTemplate.format({ @@ -208,7 +205,7 @@ export class SentimentAnalysis implements INodeType { ]; const prompt = ChatPromptTemplate.fromMessages(messages); - const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this)); + const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig()); try { const output = await chain.invoke(messages); @@ -233,7 +230,7 @@ export class SentimentAnalysis implements INodeType { } } catch (error) { throw new NodeOperationError( - this.getNode(), + context.getNode(), 'Error during parsing of LLM output, please check your LLM model and configuration', { itemIndex: i, @@ -241,9 +238,9 @@ export class SentimentAnalysis implements INodeType { ); } } catch (error) { - if (this.continueOnFail()) { - const executionErrorData = this.helpers.constructExecutionMetaData( - this.helpers.returnJsonArray({ error: error.message }), + if (context.continueOnFail()) { + const executionErrorData = context.helpers.constructExecutionMetaData( + context.helpers.returnJsonArray({ error: error.message }), { itemData: { item: i } }, ); returnData[0].push(...executionErrorData); diff --git a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts index 298c41572d..1e26324664 100644 --- a/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/chains/TextClassifier/TextClassifier.node.ts @@ -2,19 +2,16 @@ import type { BaseLanguageModel } from '@langchain/core/language_models/base'; import { HumanMessage } from '@langchain/core/messages'; import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; -import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; +import { NodeOperationError, NodeConnectionType, AiRootNode } from 'n8n-workflow'; import type { IDataObject, - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, INodeParameters, - INodeType, INodeTypeDescription, } from 'n8n-workflow'; import { z } from 'zod'; -import { getTracingConfig } from '@utils/tracing'; - const SYSTEM_PROMPT_TEMPLATE = "Please classify the text provided by the user into one of the following categories: {categories}, and use the provided formatting instructions below. Don't explain, and only output the json."; @@ -28,7 +25,7 @@ const configuredOutputs = (parameters: INodeParameters) => { return ret; }; -export class TextClassifier implements INodeType { +export class TextClassifier extends AiRootNode { description: INodeTypeDescription = { displayName: 'Text Classifier', name: 'textClassifier', @@ -163,24 +160,24 @@ export class TextClassifier implements INodeType { ], }; - async execute(this: IExecuteFunctions): Promise { - const items = this.getInputData(); + async execute(context: AiRootNodeExecuteFunctions): Promise { + const items = context.getInputData(); - const llm = (await this.getInputConnectionData( + const llm = (await context.getInputConnectionData( NodeConnectionType.AiLanguageModel, 0, )) as BaseLanguageModel; - const categories = this.getNodeParameter('categories.categories', 0, []) as Array<{ + const categories = context.getNodeParameter('categories.categories', 0, []) as Array<{ category: string; description: string; }>; if (categories.length === 0) { - throw new NodeOperationError(this.getNode(), 'At least one category must be defined'); + throw new NodeOperationError(context.getNode(), 'At least one category must be defined'); } - const options = this.getNodeParameter('options', 0, {}) as { + const options = context.getNodeParameter('options', 0, {}) as { multiClass: boolean; fallback?: string; systemPromptTemplate?: string; @@ -226,10 +223,10 @@ export class TextClassifier implements INodeType { for (let itemIdx = 0; itemIdx < items.length; itemIdx++) { const item = items[itemIdx]; item.pairedItem = { item: itemIdx }; - const input = this.getNodeParameter('inputText', itemIdx) as string; + const input = context.getNodeParameter('inputText', itemIdx) as string; const inputPrompt = new HumanMessage(input); - const systemPromptTemplateOpt = this.getNodeParameter( + const systemPromptTemplateOpt = context.getNodeParameter( 'options.systemPromptTemplate', itemIdx, SYSTEM_PROMPT_TEMPLATE, @@ -249,7 +246,7 @@ ${fallbackPrompt}`, inputPrompt, ]; const prompt = ChatPromptTemplate.fromMessages(messages); - const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this)); + const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig()); try { const output = await chain.invoke(messages); @@ -259,7 +256,7 @@ ${fallbackPrompt}`, }); if (fallback === 'other' && output.fallback) returnData[returnData.length - 1].push(item); } catch (error) { - if (this.continueOnFail()) { + if (context.continueOnFail()) { returnData[0].push({ json: { error: error.message }, pairedItem: { item: itemIdx }, diff --git a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts index 3d426309b7..131d7edaed 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts @@ -13,8 +13,6 @@ import { pick } from 'lodash'; import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow'; import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow'; -import { logAiEvent } from '@utils/helpers'; - type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => { completionTokens: number; promptTokens: number; @@ -141,7 +139,7 @@ export class N8nLlmTracing extends BaseCallbackHandler { [{ json: { ...response } }], ]); - logAiEvent(this.executionFunctions, 'ai-llm-generated-output', { + this.executionFunctions.logAiEvent('ai-llm-generated-output', { messages: parsedMessages, options: runDetails.options, response, @@ -204,7 +202,7 @@ export class N8nLlmTracing extends BaseCallbackHandler { ); } - logAiEvent(this.executionFunctions, 'ai-llm-errored', { + this.executionFunctions.logAiEvent('ai-llm-errored', { error: Object.keys(error).length === 0 ? error.toString() : error, runId, parentRunId, diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts index 214d4ed82a..226bb87870 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolCode/ToolCode.node.ts @@ -12,6 +12,7 @@ import type { SupplyData, ExecutionError, IDataObject, + ZodObjectAny, } from 'n8n-workflow'; import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; @@ -19,8 +20,6 @@ import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@util import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; -import type { DynamicZodObject } from '../../../types/zod.types'; - export class ToolCode implements INodeType { description: INodeTypeDescription = { displayName: 'Code Tool', @@ -269,7 +268,7 @@ export class ToolCode implements INodeType { ? generateSchema(jsonExample) : jsonParse(inputSchema); - const zodSchema = convertJsonSchemaToZod(jsonSchema); + const zodSchema = convertJsonSchemaToZod(jsonSchema); tool = new DynamicStructuredTool({ schema: zodSchema, diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts index bfdd3e7ace..5519147725 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/ToolHttpRequest.node.ts @@ -1,5 +1,6 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import { DynamicTool } from '@langchain/core/tools'; +import { N8nTool } from '@utils/N8nTool'; import type { INodeType, INodeTypeDescription, @@ -10,7 +11,6 @@ import type { } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, tryToParseAlphanumericString } from 'n8n-workflow'; -import { N8nTool } from '@utils/N8nTool'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { @@ -407,6 +407,7 @@ export class ToolHttpRequest implements INodeType { if (this.getNode().typeVersion >= 1.1) { const schema = makeToolInputSchema(toolParameters); + // TODO: add a new this.createN8NTool method tool = new N8nTool(this, { name, description: toolDescription, diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts index 05ed1e619c..a90fc203ee 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/test/ToolHttpRequest.node.test.ts @@ -1,9 +1,8 @@ +import type { N8nTool } from '@utils/N8nTool'; import { mock } from 'jest-mock-extended'; import type { IExecuteFunctions, INode } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow'; -import type { N8nTool } from '@utils/N8nTool'; - import { ToolHttpRequest } from '../ToolHttpRequest.node'; describe('ToolHttpRequest', () => { diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts index f1d6dfd150..63ede4662f 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolHttpRequest/utils.ts @@ -14,6 +14,7 @@ import type { ExecutionError, NodeApiError, ISupplyDataFunctions, + ZodObjectAny, } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; import { z } from 'zod'; @@ -26,7 +27,6 @@ import type { SendIn, ToolParameter, } from './interfaces'; -import type { DynamicZodObject } from '../../../types/zod.types'; const genericCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => { const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string; @@ -814,7 +814,7 @@ function makeParameterZodSchema(parameter: ToolParameter) { return schema; } -export function makeToolInputSchema(parameters: ToolParameter[]): DynamicZodObject { +export function makeToolInputSchema(parameters: ToolParameter[]): ZodObjectAny { const schemaEntries = parameters.map((parameter) => [ parameter.name, makeParameterZodSchema(parameter), diff --git a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts index 6b09cbfc88..4bdb2eaad8 100644 --- a/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/tools/ToolWorkflow/ToolWorkflow.node.ts @@ -18,6 +18,7 @@ import type { IDataObject, INodeParameterResourceLocator, ITaskMetadata, + ZodObjectAny, } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; @@ -25,8 +26,6 @@ import { jsonSchemaExampleField, schemaTypeField, inputSchemaField } from '@util import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; -import type { DynamicZodObject } from '../../../types/zod.types'; - export class ToolWorkflow implements INodeType { description: INodeTypeDescription = { displayName: 'Call n8n Workflow Tool', @@ -543,7 +542,7 @@ export class ToolWorkflow implements INodeType { ? generateSchema(jsonExample) : jsonParse(inputSchema); - const zodSchema = convertJsonSchemaToZod(jsonSchema); + const zodSchema = convertJsonSchemaToZod(jsonSchema); tool = new DynamicStructuredTool({ schema: zodSchema, diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index 6d4abfb0cd..fe3d2d27a9 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -19,7 +19,7 @@ import type { INodePropertyOptions, } from 'n8n-workflow'; -import { getMetadataFiltersValues, logAiEvent } from '@utils/helpers'; +import { getMetadataFiltersValues } from '@utils/helpers'; import { logWrapper } from '@utils/logWrapper'; import type { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; import { N8nJsonLoader } from '@utils/N8nJsonLoader'; @@ -283,7 +283,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => }); resultData.push(...serializedDocs); - logAiEvent(this, 'ai-vector-store-searched', { query: prompt }); + this.logAiEvent('ai-vector-store-searched', { query: prompt }); } return [resultData]; @@ -313,7 +313,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => try { await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex); - logAiEvent(this, 'ai-vector-store-populated'); + this.logAiEvent('ai-vector-store-populated'); } catch (error) { throw error; } @@ -367,7 +367,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => ids: [documentId], }); - logAiEvent(this, 'ai-vector-store-updated'); + this.logAiEvent('ai-vector-store-updated'); } catch (error) { throw error; } diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts index 251618c01e..6ad5d933a5 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/OpenAi.node.ts @@ -1,10 +1,8 @@ -import type { IExecuteFunctions, INodeType } from 'n8n-workflow'; - import { router } from './actions/router'; import { versionDescription } from './actions/versionDescription'; import { listSearch, loadOptions } from './methods'; -export class OpenAi implements INodeType { +export class OpenAi extends AiRootNode { description = versionDescription; methods = { @@ -12,7 +10,7 @@ export class OpenAi implements INodeType { loadOptions, }; - async execute(this: IExecuteFunctions) { - return await router.call(this); + async execute(context: AiRootNodeExecuteFunctions) { + return await router.call(context); } } diff --git a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts index 977530cc83..69901cb1d0 100644 --- a/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts +++ b/packages/@n8n/nodes-langchain/nodes/vendors/OpenAi/actions/assistant/message.operation.ts @@ -6,7 +6,7 @@ import type { BufferWindowMemory } from 'langchain/memory'; import omit from 'lodash/omit'; import type { IDataObject, - IExecuteFunctions, + AiRootNodeExecuteFunctions, INodeExecutionData, INodeProperties, } from 'n8n-workflow'; @@ -19,8 +19,6 @@ import { import { OpenAI as OpenAIClient } from 'openai'; import { promptTypeOptions } from '@utils/descriptions'; -import { getConnectedTools } from '@utils/helpers'; -import { getTracingConfig } from '@utils/tracing'; import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { assistantRLC } from '../descriptions'; @@ -153,7 +151,10 @@ const mapChatMessageToThreadMessage = ( content: message.content.toString(), }); -export async function execute(this: IExecuteFunctions, i: number): Promise { +export async function execute( + this: AiRootNodeExecuteFunctions, + i: number, +): Promise { const credentials = await this.getCredentials('openAiApi'); const nodeVersion = this.getNode().typeVersion; @@ -191,7 +192,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise 1, false); + const tools = await this.getConnectedTools(nodeVersion > 1, false); let assistantTools; if (tools.length) { @@ -270,7 +271,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise { +export async function execute( + this: AiRootNodeExecuteFunctions, + i: number, +): Promise { const nodeVersion = this.getNode().typeVersion; const model = this.getNodeParameter('modelId', i, '', { extractValue: true }); let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[]; @@ -239,7 +240,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise 1; - externalTools = await getConnectedTools(this, enforceUniqueNames, false); + externalTools = await this.getConnectedTools(enforceUniqueNames, false); } if (externalTools.length) { diff --git a/packages/@n8n/nodes-langchain/package.json b/packages/@n8n/nodes-langchain/package.json index 05715fcf7c..fbe2598a2a 100644 --- a/packages/@n8n/nodes-langchain/package.json +++ b/packages/@n8n/nodes-langchain/package.json @@ -168,7 +168,7 @@ "generate-schema": "2.6.0", "html-to-text": "9.0.5", "jsdom": "23.0.1", - "langchain": "0.3.6", + "langchain": "catalog:", "lodash": "catalog:", "mammoth": "1.7.2", "mime-types": "2.1.35", diff --git a/packages/@n8n/nodes-langchain/types/zod.types.ts b/packages/@n8n/nodes-langchain/types/zod.types.ts deleted file mode 100644 index 933bd1e33d..0000000000 --- a/packages/@n8n/nodes-langchain/types/zod.types.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { z } from 'zod'; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export type DynamicZodObject = z.ZodObject; diff --git a/packages/@n8n/nodes-langchain/utils/helpers.ts b/packages/@n8n/nodes-langchain/utils/helpers.ts index 6b5816e7b8..d8fe929559 100644 --- a/packages/@n8n/nodes-langchain/utils/helpers.ts +++ b/packages/@n8n/nodes-langchain/utils/helpers.ts @@ -4,16 +4,8 @@ import type { BaseLLM } from '@langchain/core/language_models/llms'; import type { BaseMessage } from '@langchain/core/messages'; import type { Tool } from '@langchain/core/tools'; import type { BaseChatMemory } from 'langchain/memory'; -import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; -import type { - AiEvent, - IDataObject, - IExecuteFunctions, - ISupplyDataFunctions, - IWebhookFunctions, -} from 'n8n-workflow'; - -import { N8nTool } from './N8nTool'; +import { NodeOperationError } from 'n8n-workflow'; +import type { IExecuteFunctions, ISupplyDataFunctions, IWebhookFunctions } from 'n8n-workflow'; function hasMethods(obj: unknown, ...methodNames: Array): obj is T { return methodNames.every( @@ -72,32 +64,6 @@ export function isToolsInstance(model: unknown): model is Tool { return namespace.includes('tools'); } -export function getPromptInputByType(options: { - ctx: IExecuteFunctions; - i: number; - promptTypeKey: string; - inputKey: string; -}) { - const { ctx, i, promptTypeKey, inputKey } = options; - const prompt = ctx.getNodeParameter(promptTypeKey, i) as string; - - let input; - if (prompt === 'auto') { - input = ctx.evaluateExpression('{{ $json["chatInput"] }}', i) as string; - } else { - input = ctx.getNodeParameter(inputKey, i) as string; - } - - if (input === undefined) { - throw new NodeOperationError(ctx.getNode(), 'No prompt specified', { - description: - "Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter", - }); - } - - return input; -} - export function getSessionId( ctx: ISupplyDataFunctions | IWebhookFunctions, itemIndex: number, @@ -139,18 +105,6 @@ export function getSessionId( return sessionId; } -export function logAiEvent( - executeFunctions: IExecuteFunctions | ISupplyDataFunctions, - event: AiEvent, - data?: IDataObject, -) { - try { - executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined); - } catch (error) { - executeFunctions.logger.debug(`Error logging AI event: ${event}`); - } -} - export function serializeChatHistory(chatHistory: BaseMessage[]): string { return chatHistory .map((chatMessage) => { @@ -164,60 +118,3 @@ export function serializeChatHistory(chatHistory: BaseMessage[]): string { }) .join('\n'); } - -export function escapeSingleCurlyBrackets(text?: string): string | undefined { - if (text === undefined) return undefined; - - let result = text; - - result = result - // First handle triple brackets to avoid interference with double brackets - .replace(/(? { - const connectedTools = - ((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || []; - - if (!enforceUniqueNames) return connectedTools; - - const seenNames = new Set(); - - const finalTools = []; - - for (const tool of connectedTools) { - const { name } = tool; - if (seenNames.has(name)) { - throw new NodeOperationError( - ctx.getNode(), - `You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`, - ); - } - seenNames.add(name); - - if (escapeCurlyBrackets) { - tool.description = escapeSingleCurlyBrackets(tool.description) ?? tool.description; - } - - if (convertStructuredTool && tool instanceof N8nTool) { - finalTools.push(tool.asDynamicTool()); - } else { - finalTools.push(tool); - } - } - - return finalTools; -}; diff --git a/packages/@n8n/nodes-langchain/utils/logWrapper.ts b/packages/@n8n/nodes-langchain/utils/logWrapper.ts index fa1a38b31a..aa27478ddf 100644 --- a/packages/@n8n/nodes-langchain/utils/logWrapper.ts +++ b/packages/@n8n/nodes-langchain/utils/logWrapper.ts @@ -18,7 +18,7 @@ import type { } from 'n8n-workflow'; import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; -import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers'; +import { isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers'; import { N8nBinaryLoader } from './N8nBinaryLoader'; import { N8nJsonLoader } from './N8nJsonLoader'; @@ -182,7 +182,7 @@ export function logWrapper( const payload = { action: 'getMessages', response }; executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); - logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response }); + executeFunctions.logAiEvent('ai-messages-retrieved-from-memory', { response }); return response; }; } else if (prop === 'addMessage' && 'addMessage' in target) { @@ -199,7 +199,7 @@ export function logWrapper( arguments: [message], }); - logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message }); + executeFunctions.logAiEvent('ai-message-added-to-memory', { message }); executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); }; } @@ -236,7 +236,7 @@ export function logWrapper( }; } - logAiEvent(executeFunctions, 'ai-documents-retrieved', { query }); + executeFunctions.logAiEvent('ai-documents-retrieved', { query }); executeFunctions.addOutputData( connectionType, index, @@ -266,7 +266,7 @@ export function logWrapper( arguments: [documents], })) as number[][]; - logAiEvent(executeFunctions, 'ai-document-embedded'); + executeFunctions.logAiEvent('ai-document-embedded'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -286,7 +286,7 @@ export function logWrapper( method: target[prop], arguments: [query], })) as number[]; - logAiEvent(executeFunctions, 'ai-query-embedded'); + executeFunctions.logAiEvent('ai-query-embedded'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -331,7 +331,7 @@ export function logWrapper( arguments: [item, itemIndex], })) as number[]; - logAiEvent(executeFunctions, 'ai-document-processed'); + executeFunctions.logAiEvent('ai-document-processed'); executeFunctions.addOutputData(connectionType, index, [ [{ json: { response }, pairedItem: { item: itemIndex } }], ]); @@ -357,7 +357,7 @@ export function logWrapper( arguments: [text], })) as string[]; - logAiEvent(executeFunctions, 'ai-text-split'); + executeFunctions.logAiEvent('ai-text-split'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -381,7 +381,7 @@ export function logWrapper( arguments: [query], })) as string; - logAiEvent(executeFunctions, 'ai-tool-called', { query, response }); + executeFunctions.logAiEvent('ai-tool-called', { query, response }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -411,7 +411,7 @@ export function logWrapper( arguments: [query, k, filter, _callbacks], })) as Array>>; - logAiEvent(executeFunctions, 'ai-vector-store-searched', { query }); + executeFunctions.logAiEvent('ai-vector-store-searched', { query }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts index de07bfc7cf..4ccf0a0fcf 100644 --- a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nOutputFixingParser.ts @@ -7,7 +7,6 @@ import type { ISupplyDataFunctions } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow'; import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser'; -import { logAiEvent } from '../helpers'; export class N8nOutputFixingParser extends BaseOutputParser { lc_namespace = ['langchain', 'output_parsers', 'fix']; @@ -40,7 +39,7 @@ export class N8nOutputFixingParser extends BaseOutputParser { try { // First attempt to parse the completion const response = await this.outputParser.parse(completion, callbacks, (e) => e); - logAiEvent(this.context, 'ai-output-parsed', { text: completion, response }); + this.context.logAiEvent('ai-output-parsed', { text: completion, response }); this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ [{ json: { action: 'parse', response } }], diff --git a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts index 3b8410df74..79a2bd86d4 100644 --- a/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts +++ b/packages/@n8n/nodes-langchain/utils/output_parsers/N8nStructuredOutputParser.ts @@ -5,8 +5,6 @@ import type { ISupplyDataFunctions } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { z } from 'zod'; -import { logAiEvent } from '../helpers'; - const STRUCTURED_OUTPUT_KEY = '__structured__output'; const STRUCTURED_OUTPUT_OBJECT_KEY = '__structured__output__object'; const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array'; @@ -41,7 +39,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser< get(parsed, STRUCTURED_OUTPUT_KEY) ?? parsed) as Record; - logAiEvent(this.context, 'ai-output-parsed', { text, response: result }); + this.context.logAiEvent('ai-output-parsed', { text, response: result }); this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ [{ json: { action: 'parse', response: result } }], @@ -58,7 +56,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser< }, ); - logAiEvent(this.context, 'ai-output-parsed', { + this.context.logAiEvent('ai-output-parsed', { text, response: e.message ?? e, }); diff --git a/packages/@n8n/nodes-langchain/utils/tracing.ts b/packages/@n8n/nodes-langchain/utils/tracing.ts deleted file mode 100644 index b9b3699859..0000000000 --- a/packages/@n8n/nodes-langchain/utils/tracing.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { BaseCallbackConfig } from '@langchain/core/callbacks/manager'; -import type { IExecuteFunctions } from 'n8n-workflow'; - -interface TracingConfig { - additionalMetadata?: Record; -} - -export function getTracingConfig( - context: IExecuteFunctions, - config: TracingConfig = {}, -): BaseCallbackConfig { - const parentRunManager = context.getParentCallbackManager - ? context.getParentCallbackManager() - : undefined; - - return { - runName: `[${context.getWorkflow().name}] ${context.getNode().name}`, - metadata: { - execution_id: context.getExecutionId(), - workflow: context.getWorkflow(), - node: context.getNode().name, - ...(config.additionalMetadata ?? {}), - }, - callbacks: parentRunManager, - }; -} diff --git a/packages/core/package.json b/packages/core/package.json index 7fdd5f99af..854ff0a922 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -48,6 +48,7 @@ "file-type": "16.5.4", "form-data": "catalog:", "iconv-lite": "catalog:", + "langchain": "catalog:", "lodash": "catalog:", "luxon": "catalog:", "mime-types": "2.1.35", diff --git a/packages/core/src/WorkflowExecute.ts b/packages/core/src/WorkflowExecute.ts index 6f2cce0b3d..2b77dd22e1 100644 --- a/packages/core/src/WorkflowExecute.ts +++ b/packages/core/src/WorkflowExecute.ts @@ -51,6 +51,7 @@ import { sleep, ExecutionCancelledError, Node, + AiRootNode, } from 'n8n-workflow'; import PCancelable from 'p-cancelable'; import Container from 'typedi'; @@ -1047,6 +1048,7 @@ export class WorkflowExecute { if (nodeType.execute) { const closeFunctions: CloseFunction[] = []; + const context = new ExecuteContext( workflow, node, @@ -1061,10 +1063,15 @@ export class WorkflowExecute { abortSignal, ); - const data = - nodeType instanceof Node - ? await nodeType.execute(context) - : await nodeType.execute.call(context); + let data: INodeExecutionData[][] | null; + if (nodeType instanceof AiRootNode) { + data = await nodeType.execute(context.getAiRootNodeExecuteFunctions()); + } else { + data = + nodeType instanceof Node + ? await nodeType.execute(context) + : await nodeType.execute.call(context); + } const closeFunctionsResults = await Promise.allSettled( closeFunctions.map(async (fn) => await fn()), diff --git a/packages/core/src/node-execution-context/base-execute-context.ts b/packages/core/src/node-execution-context/base-execute-context.ts index 8ecc658579..b2495aa0f0 100644 --- a/packages/core/src/node-execution-context/base-execute-context.ts +++ b/packages/core/src/node-execution-context/base-execute-context.ts @@ -1,3 +1,4 @@ +import type { CallbackManager } from '@langchain/core/callbacks/manager'; import { get } from 'lodash'; import type { Workflow, @@ -9,7 +10,6 @@ import type { ITaskDataConnections, IExecuteData, ICredentialDataDecryptedObject, - CallbackManager, IExecuteWorkflowInfo, RelatedExecution, ExecuteWorkflowData, @@ -28,6 +28,7 @@ import { NodeConnectionType, WAIT_INDEFINITELY, WorkflowDataProxy, + jsonStringify, } from 'n8n-workflow'; import { Container } from 'typedi'; @@ -236,14 +237,14 @@ export class BaseExecuteContext extends NodeExecutionContext { } } - logAiEvent(eventName: AiEvent, msg: string) { + logAiEvent(eventName: AiEvent, msg: object) { return this.additionalData.logAiEvent(eventName, { executionId: this.additionalData.executionId ?? 'unsaved-execution', nodeName: this.node.name, workflowName: this.workflow.name ?? 'Unnamed workflow', nodeType: this.node.type, workflowId: this.workflow.id ?? 'unsaved-workflow', - msg, + msg: jsonStringify(msg), }); } } diff --git a/packages/core/src/node-execution-context/execute-context.ts b/packages/core/src/node-execution-context/execute-context.ts index 954059d86d..c2d374b783 100644 --- a/packages/core/src/node-execution-context/execute-context.ts +++ b/packages/core/src/node-execution-context/execute-context.ts @@ -1,6 +1,9 @@ +import type { BaseCallbackConfig, CallbackManager } from '@langchain/core/callbacks/manager'; +import type { BaseOutputParser } from '@langchain/core/output_parsers'; +import type { DynamicStructuredTool, Tool } from '@langchain/core/tools'; import type { AINodeConnectionType, - CallbackManager, + AiRootNodeExecuteFunctions, CloseFunction, IExecuteData, IExecuteFunctions, @@ -12,14 +15,17 @@ import type { ITaskDataConnections, IWorkflowExecuteAdditionalData, Result, + TracingConfig, Workflow, WorkflowExecuteMode, + ZodObjectAny, } from 'n8n-workflow'; import { ApplicationError, createDeferredPromise, createEnvProviderState, NodeConnectionType, + NodeOperationError, } from 'n8n-workflow'; // eslint-disable-next-line import/no-cycle @@ -40,6 +46,8 @@ import { } from '@/NodeExecuteFunctions'; import { BaseExecuteContext } from './base-execute-context'; +import { N8nTool } from './n8n-tool'; +import { escapeSingleCurlyBrackets } from './utils'; export class ExecuteContext extends BaseExecuteContext implements IExecuteFunctions { readonly helpers: IExecuteFunctions['helpers']; @@ -206,4 +214,135 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti getParentCallbackManager(): CallbackManager | undefined { return this.additionalData.parentCallbackManager; } + + getAiRootNodeExecuteFunctions(): AiRootNodeExecuteFunctions { + const { + getConnectedTools, + getPromptInputByType, + getTracingConfig, + extractParsedOutput, + checkForStructuredTools, + } = this; + return Object.create(this, { + getConnectedTools: { value: getConnectedTools }, + getPromptInputByType: { value: getPromptInputByType }, + getTracingConfig: { value: getTracingConfig }, + extractParsedOutput: { value: extractParsedOutput }, + checkForStructuredTools: { value: checkForStructuredTools }, + }); + } + + async getConnectedTools( + enforceUniqueNames: boolean, + convertStructuredTool = true, + escapeCurlyBrackets = false, + ) { + const connectedTools = + ((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || []; + + if (!enforceUniqueNames) return connectedTools; + + const seenNames = new Set(); + + const finalTools = []; + + for (const tool of connectedTools) { + const { name } = tool; + if (seenNames.has(name)) { + throw new NodeOperationError( + this.node, + `You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`, + ); + } + seenNames.add(name); + + if (escapeCurlyBrackets) { + tool.description = escapeSingleCurlyBrackets(tool.description) ?? tool.description; + } + + if (convertStructuredTool && tool instanceof N8nTool) { + finalTools.push(tool.asDynamicTool()); + } else { + finalTools.push(tool); + } + } + + return finalTools; + } + + getPromptInputByType( + itemIndex: number, + promptTypeKey: string = 'text', + inputKey: string = 'promptType', + ) { + const prompt = this.getNodeParameter(promptTypeKey, itemIndex) as string; + + let input; + if (prompt === 'auto') { + input = this.evaluateExpression('{{ $json["chatInput"] }}', itemIndex) as string; + } else { + input = this.getNodeParameter(inputKey, itemIndex) as string; + } + + if (input === undefined) { + throw new NodeOperationError(this.node, 'No prompt specified', { + description: + "Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter", + }); + } + + return input; + } + + getTracingConfig(config: TracingConfig = {}): BaseCallbackConfig { + const parentRunManager = this.getParentCallbackManager?.(); + + return { + runName: `[${this.workflow.name}] ${this.node.name}`, + metadata: { + execution_id: this.getExecutionId(), + workflow: this.workflow, + node: this.node.name, + ...(config.additionalMetadata ?? {}), + }, + callbacks: parentRunManager, + }; + } + + async extractParsedOutput( + outputParser: BaseOutputParser, + output: string, + ): Promise | undefined> { + const parsedOutput = (await outputParser.parse(output)) as { + output: Record; + }; + + if (this.node.typeVersion <= 1.6) { + return parsedOutput; + } + // For 1.7 and above, we try to extract the output from the parsed output + // with fallback to the original output if it's not present + return parsedOutput?.output ?? parsedOutput; + } + + checkForStructuredTools( + tools: Array>, + node: INode, + currentAgentType: string, + ) { + const dynamicStructuredTools = tools.filter( + (tool) => tool.constructor.name === 'DynamicStructuredTool', + ); + if (dynamicStructuredTools.length > 0) { + const getToolName = (tool: Tool | DynamicStructuredTool) => `"${tool.name}"`; + throw new NodeOperationError( + node, + `The selected tools are not supported by "${currentAgentType}", please use "Tools Agent" instead`, + { + itemIndex: 0, + description: `Incompatible connected tools: ${dynamicStructuredTools.map(getToolName).join(', ')}`, + }, + ); + } + } } diff --git a/packages/@n8n/nodes-langchain/utils/N8nTool.ts b/packages/core/src/node-execution-context/n8n-tool.ts similarity index 85% rename from packages/@n8n/nodes-langchain/utils/N8nTool.ts rename to packages/core/src/node-execution-context/n8n-tool.ts index 2cb89630f0..28851f932a 100644 --- a/packages/@n8n/nodes-langchain/utils/N8nTool.ts +++ b/packages/core/src/node-execution-context/n8n-tool.ts @@ -1,8 +1,12 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +/* eslint-disable @typescript-eslint/no-unsafe-return */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ import type { DynamicStructuredToolInput } from '@langchain/core/tools'; import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools'; -import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow'; -import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow'; import { StructuredOutputParser } from 'langchain/output_parsers'; +import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow'; +import { NodeConnectionType, jsonParse, NodeOperationError, ensureError } from 'n8n-workflow'; import type { ZodTypeAny } from 'zod'; import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod'; @@ -28,7 +32,7 @@ const getParametersDescription = (parameters: Array<[string, ZodTypeAny]>) => ) .join(',\n '); -export const prepareFallbackToolDescription = (toolDescription: string, schema: ZodObject) => { +const prepareFallbackToolDescription = (toolDescription: string, schema: ZodObject) => { let description = `${toolDescription}`; const toolParameters = Object.entries(schema.shape); @@ -80,7 +84,7 @@ export class N8nTool extends DynamicStructuredTool { // Finally throw an error if we were unable to parse the query throw new NodeOperationError( context.getNode(), - `Input is not a valid JSON: ${error.message}`, + `Input is not a valid JSON: ${ensureError(error).message}`, ); } } @@ -92,14 +96,12 @@ export class N8nTool extends DynamicStructuredTool { try { // Call tool function with parsed query - const result = await func(parsedQuery); - - return result; + return await func(parsedQuery); } catch (e) { const { index } = context.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]); void context.addOutputData(NodeConnectionType.AiTool, index, e); - return e.toString(); + return ensureError(e).toString(); } }; diff --git a/packages/core/src/node-execution-context/utils.ts b/packages/core/src/node-execution-context/utils.ts index a09147d543..68be4f7410 100644 --- a/packages/core/src/node-execution-context/utils.ts +++ b/packages/core/src/node-execution-context/utils.ts @@ -421,3 +421,21 @@ export function getAdditionalKeys( $resumeWebhookUrl: resumeUrl, }; } + +export function escapeSingleCurlyBrackets(text?: string): string | undefined { + if (text === undefined) return undefined; + + let result = text; + + result = result + // First handle triple brackets to avoid interference with double brackets + .replace(/(?; + export interface IAdditionalCredentialOptions { oauth2?: IOAuth2Options; credentialsDecrypted?: ICredentialsDecrypted; @@ -893,7 +897,7 @@ type BaseExecutionFunctions = FunctionsBaseWithRequiredKeys<'getMode'> & { getInputSourceData(inputIndex?: number, connectionType?: NodeConnectionType): ISourceData; getExecutionCancelSignal(): AbortSignal | undefined; onExecutionCancellation(handler: () => unknown): void; - logAiEvent(eventName: AiEvent, msg?: string | undefined): void; + logAiEvent(eventName: AiEvent, msg?: object): void; }; // TODO: Create later own type only for Config-Nodes @@ -919,8 +923,6 @@ export type IExecuteFunctions = ExecuteFunctions.GetNodeParameterFn & putExecutionToWait(waitTill: Date): Promise; sendMessageToUI(message: any): void; sendResponse(response: IExecuteResponsePromiseData): void; - - // TODO: Make this one then only available in the new config one addInputData( connectionType: NodeConnectionType, data: INodeExecutionData[][] | ExecutionError, @@ -932,6 +934,7 @@ export type IExecuteFunctions = ExecuteFunctions.GetNodeParameterFn & data: INodeExecutionData[][] | ExecutionError, metadata?: ITaskMetadata, ): void; + getAiRootNodeExecuteFunctions(): AiRootNodeExecuteFunctions; nodeHelpers: NodeHelperFunctions; helpers: RequestHelperFunctions & @@ -976,26 +979,49 @@ export interface IExecuteSingleFunctions extends BaseExecutionFunctions { }; } +export interface TracingConfig { + additionalMetadata?: Record; +} + +// TODO: `Pick` from IExecuteFunctions, but do not extends completely +export type AiRootNodeExecuteFunctions = IExecuteFunctions & { + getConnectedTools( + enforceUniqueNames: boolean, + convertStructuredTool?: boolean, + escapeCurlyBrackets?: boolean, + ): Promise; + getPromptInputByType(itemIndex: number, promptTypeKey?: string, inputKey?: string): string; + getTracingConfig(config?: TracingConfig): BaseCallbackConfig; + extractParsedOutput( + outputParser: BaseOutputParser, + output: string, + ): Promise | undefined>; + checkForStructuredTools( + tools: Array>, + node: INode, + currentAgentType: string, + ): void; +}; + export type ISupplyDataFunctions = ExecuteFunctions.GetNodeParameterFn & FunctionsBaseWithRequiredKeys<'getMode'> & Pick< IExecuteFunctions, | 'addInputData' | 'addOutputData' + | 'continueOnFail' + | 'evaluateExpression' + | 'executeWorkflow' + | 'getExecutionCancelSignal' | 'getInputConnectionData' | 'getInputData' | 'getNodeOutputs' - | 'executeWorkflow' + | 'getWorkflowDataProxy' + | 'logAiEvent' + | 'onExecutionCancellation' | 'sendMessageToUI' | 'helpers' - > & { - continueOnFail(): boolean; - evaluateExpression(expression: string, itemIndex: number): NodeParameterValueType; - getWorkflowDataProxy(itemIndex: number): IWorkflowDataProxyData; - getExecutionCancelSignal(): AbortSignal | undefined; - onExecutionCancellation(handler: () => unknown): void; - logAiEvent(eventName: AiEvent, msg?: string | undefined): void; - }; + >; export interface IExecutePaginationFunctions extends IExecuteSingleFunctions { makeRoutingRequest( @@ -1606,6 +1632,14 @@ export abstract class Node { poll?(context: IPollFunctions): Promise; } +/** + * This class serves as a base for all AI nodes that can invoke subnodes, + * like models, memory, and tools + */ +export abstract class AiRootNode extends Node { + execute?(context: AiRootNodeExecuteFunctions): Promise; +} + export interface IVersionedNodeType { nodeVersions: { [key: number]: INodeType; @@ -2776,8 +2810,6 @@ export type BannerName = export type Functionality = 'regular' | 'configuration-node' | 'pairedItem'; -export type CallbackManager = CallbackManagerLC; - export type IPersonalizationSurveyAnswersV4 = { version: 'v4'; personalization_survey_submitted_at: string; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e21c919dee..054c0338dd 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -45,6 +45,9 @@ catalogs: iconv-lite: specifier: 0.6.3 version: 0.6.3 + langchain: + specifier: 0.3.6 + version: 0.3.6 lodash: specifier: 4.17.21 version: 4.17.21 @@ -541,7 +544,7 @@ importers: specifier: 23.0.1 version: 23.0.1 langchain: - specifier: 0.3.6 + specifier: 'catalog:' version: 0.3.6(e4rnrwhosnp2xiru36mqgdy2bu) lodash: specifier: 'catalog:' @@ -1114,7 +1117,7 @@ importers: dependencies: '@langchain/core': specifier: 'catalog:' - version: 0.3.19(openai@4.73.1(zod@3.23.8)) + version: 0.3.19(openai@4.73.1) '@n8n/client-oauth2': specifier: workspace:* version: link:../@n8n/client-oauth2 @@ -1148,6 +1151,9 @@ importers: iconv-lite: specifier: 'catalog:' version: 0.6.3 + langchain: + specifier: 'catalog:' + version: 0.3.6(@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(axios@1.7.4)(cheerio@1.0.0)(handlebars@4.7.8)(openai@4.73.1(zod@3.23.8)) lodash: specifier: 'catalog:' version: 4.17.21 @@ -13974,6 +13980,25 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt + '@aws-sdk/credential-provider-ini@3.666.0(@aws-sdk/client-sts@3.666.0)': + dependencies: + '@aws-sdk/client-sts': 3.666.0 + '@aws-sdk/credential-provider-env': 3.664.0 + '@aws-sdk/credential-provider-http': 3.666.0 + '@aws-sdk/credential-provider-process': 3.664.0 + '@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0)) + '@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0) + '@aws-sdk/types': 3.664.0 + '@smithy/credential-provider-imds': 3.2.4 + '@smithy/property-provider': 3.1.7 + '@smithy/shared-ini-file-loader': 3.1.8 + '@smithy/types': 3.5.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + optional: true + '@aws-sdk/credential-provider-node@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)': dependencies: '@aws-sdk/credential-provider-env': 3.664.0 @@ -13993,6 +14018,26 @@ snapshots: - '@aws-sdk/client-sts' - aws-crt + '@aws-sdk/credential-provider-node@3.666.0(@aws-sdk/client-sts@3.666.0)': + dependencies: + '@aws-sdk/credential-provider-env': 3.664.0 + '@aws-sdk/credential-provider-http': 3.666.0 + '@aws-sdk/credential-provider-ini': 3.666.0(@aws-sdk/client-sts@3.666.0) + '@aws-sdk/credential-provider-process': 3.664.0 + '@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0)) + '@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0) + '@aws-sdk/types': 3.664.0 + '@smithy/credential-provider-imds': 3.2.4 + '@smithy/property-provider': 3.1.7 + '@smithy/shared-ini-file-loader': 3.1.8 + '@smithy/types': 3.5.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + optional: true + '@aws-sdk/credential-provider-process@3.664.0': dependencies: '@aws-sdk/types': 3.664.0 @@ -14022,6 +14067,29 @@ snapshots: '@smithy/types': 3.5.0 tslib: 2.6.2 + '@aws-sdk/credential-providers@3.666.0': + dependencies: + '@aws-sdk/client-cognito-identity': 3.666.0 + '@aws-sdk/client-sso': 3.666.0 + '@aws-sdk/client-sts': 3.666.0 + '@aws-sdk/credential-provider-cognito-identity': 3.666.0 + '@aws-sdk/credential-provider-env': 3.664.0 + '@aws-sdk/credential-provider-http': 3.666.0 + '@aws-sdk/credential-provider-ini': 3.666.0(@aws-sdk/client-sts@3.666.0) + '@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sts@3.666.0) + '@aws-sdk/credential-provider-process': 3.664.0 + '@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0)) + '@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0) + '@aws-sdk/types': 3.664.0 + '@smithy/credential-provider-imds': 3.2.4 + '@smithy/property-provider': 3.1.7 + '@smithy/types': 3.5.0 + tslib: 2.6.2 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + optional: true + '@aws-sdk/credential-providers@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))': dependencies: '@aws-sdk/client-cognito-identity': 3.666.0 @@ -16087,6 +16155,18 @@ snapshots: - encoding - supports-color + '@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@anthropic-ai/sdk': 0.27.3(encoding@0.1.13) + '@langchain/core': 0.3.19(openai@4.73.1) + fast-xml-parser: 4.4.1 + zod: 3.23.8 + zod-to-json-schema: 3.23.3(zod@3.23.8) + transitivePeerDependencies: + - encoding + - supports-color + optional: true + '@langchain/aws@0.1.2(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': dependencies: '@aws-sdk/client-bedrock-agent-runtime': 3.666.0 @@ -16101,6 +16181,21 @@ snapshots: - '@aws-sdk/client-sts' - aws-crt + '@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@aws-sdk/client-bedrock-agent-runtime': 3.666.0 + '@aws-sdk/client-bedrock-runtime': 3.666.0 + '@aws-sdk/client-kendra': 3.666.0 + '@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sts@3.666.0) + '@langchain/core': 0.3.19(openai@4.73.1) + zod: 3.23.8 + zod-to-json-schema: 3.23.3(zod@3.23.8) + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + optional: true + '@langchain/cohere@0.3.1(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -16113,6 +16208,19 @@ snapshots: - aws-crt - encoding + '@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + cohere-ai: 7.14.0 + uuid: 10.0.0 + zod: 3.23.8 + zod-to-json-schema: 3.23.3(zod@3.23.8) + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + - encoding + optional: true + '@langchain/community@0.3.15(vc5hvyy27o4cmm4jplsptc2fqm)': dependencies: '@ibm-cloud/watsonx-ai': 1.1.2 @@ -16200,22 +16308,6 @@ snapshots: transitivePeerDependencies: - openai - '@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))': - dependencies: - ansi-styles: 5.2.0 - camelcase: 6.3.0 - decamelize: 1.2.0 - js-tiktoken: 1.0.12 - langsmith: 0.2.3(openai@4.73.1(zod@3.23.8)) - mustache: 4.2.0 - p-queue: 6.6.2 - p-retry: 4.6.2 - uuid: 10.0.0 - zod: 3.23.8 - zod-to-json-schema: 3.23.3(zod@3.23.8) - transitivePeerDependencies: - - openai - '@langchain/core@0.3.19(openai@4.73.1)': dependencies: ansi-styles: 5.2.0 @@ -16240,6 +16332,15 @@ snapshots: transitivePeerDependencies: - zod + '@langchain/google-common@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + uuid: 10.0.0 + zod-to-json-schema: 3.23.3(zod@3.23.8) + transitivePeerDependencies: + - zod + optional: true + '@langchain/google-gauth@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -16250,6 +16351,17 @@ snapshots: - supports-color - zod + '@langchain/google-gauth@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + '@langchain/google-common': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8) + google-auth-library: 8.9.0(encoding@0.1.13) + transitivePeerDependencies: + - encoding + - supports-color + - zod + optional: true + '@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(zod@3.23.8)': dependencies: '@google/generative-ai': 0.21.0 @@ -16258,6 +16370,15 @@ snapshots: transitivePeerDependencies: - zod + '@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)': + dependencies: + '@google/generative-ai': 0.21.0 + '@langchain/core': 0.3.19(openai@4.73.1) + zod-to-json-schema: 3.23.3(zod@3.23.8) + transitivePeerDependencies: + - zod + optional: true + '@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -16267,6 +16388,16 @@ snapshots: - supports-color - zod + '@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + '@langchain/google-gauth': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8) + transitivePeerDependencies: + - encoding + - supports-color + - zod + optional: true + '@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -16278,6 +16409,18 @@ snapshots: - encoding - supports-color + '@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + '@langchain/openai': 0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + groq-sdk: 0.5.0(encoding@0.1.13) + zod: 3.23.8 + zod-to-json-schema: 3.23.3(zod@3.23.8) + transitivePeerDependencies: + - encoding + - supports-color + optional: true + '@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -16286,12 +16429,28 @@ snapshots: zod: 3.23.8 zod-to-json-schema: 3.23.3(zod@3.23.8) + '@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + '@mistralai/mistralai': 1.3.4(zod@3.23.8) + uuid: 10.0.0 + zod: 3.23.8 + zod-to-json-schema: 3.23.3(zod@3.23.8) + optional: true + '@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) ollama: 0.5.9 uuid: 10.0.0 + '@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + ollama: 0.5.9 + uuid: 10.0.0 + optional: true + '@langchain/openai@0.3.14(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -16303,6 +16462,17 @@ snapshots: - encoding - supports-color + '@langchain/openai@0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + js-tiktoken: 1.0.12 + openai: 4.73.1(zod@3.23.8) + zod: 3.23.8 + zod-to-json-schema: 3.23.3(zod@3.23.8) + transitivePeerDependencies: + - encoding + - supports-color + '@langchain/pinecone@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -16328,6 +16498,11 @@ snapshots: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) js-tiktoken: 1.0.12 + '@langchain/textsplitters@0.1.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))': + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + js-tiktoken: 1.0.12 + '@lezer/common@1.1.0': {} '@lezer/common@1.2.1': {} @@ -19460,14 +19635,6 @@ snapshots: transitivePeerDependencies: - debug - axios@1.7.7: - dependencies: - follow-redirects: 1.15.6(debug@4.3.6) - form-data: 4.0.0 - proxy-from-env: 1.1.0 - transitivePeerDependencies: - - debug - axios@1.7.7(debug@4.3.6): dependencies: follow-redirects: 1.15.6(debug@4.3.6) @@ -20021,6 +20188,26 @@ snapshots: '@lezer/html': 1.3.0 '@lezer/lr': 1.4.0 + cohere-ai@7.14.0: + dependencies: + '@aws-sdk/client-sagemaker': 3.666.0 + '@aws-sdk/credential-providers': 3.666.0 + '@aws-sdk/protocol-http': 3.374.0 + '@aws-sdk/signature-v4': 3.374.0 + form-data: 4.0.0 + form-data-encoder: 4.0.2 + formdata-node: 6.0.3 + js-base64: 3.7.2 + node-fetch: 2.7.0(encoding@0.1.13) + qs: 6.11.2 + readable-stream: 4.5.2 + url-join: 4.0.1 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + - encoding + optional: true + cohere-ai@7.14.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(encoding@0.1.13): dependencies: '@aws-sdk/client-sagemaker': 3.666.0 @@ -22384,7 +22571,7 @@ snapshots: infisical-node@1.3.0: dependencies: - axios: 1.7.7 + axios: 1.7.7(debug@4.3.6) dotenv: 16.3.1 tweetnacl: 1.0.3 tweetnacl-util: 0.15.1 @@ -23318,6 +23505,38 @@ snapshots: kuler@2.0.0: {} + langchain@0.3.6(@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(axios@1.7.4)(cheerio@1.0.0)(handlebars@4.7.8)(openai@4.73.1(zod@3.23.8)): + dependencies: + '@langchain/core': 0.3.19(openai@4.73.1) + '@langchain/openai': 0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + '@langchain/textsplitters': 0.1.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + js-tiktoken: 1.0.12 + js-yaml: 4.1.0 + jsonpointer: 5.0.1 + langsmith: 0.2.3(openai@4.73.1) + openapi-types: 12.1.3 + p-retry: 4.6.2 + uuid: 10.0.0 + yaml: 2.3.4 + zod: 3.23.8 + zod-to-json-schema: 3.23.3(zod@3.23.8) + optionalDependencies: + '@langchain/anthropic': 0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + '@langchain/aws': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + '@langchain/cohere': 0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + '@langchain/google-genai': 0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8) + '@langchain/google-vertexai': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8) + '@langchain/groq': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + '@langchain/mistralai': 0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + '@langchain/ollama': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))) + axios: 1.7.4 + cheerio: 1.0.0 + handlebars: 4.7.8 + transitivePeerDependencies: + - encoding + - openai + - supports-color + langchain@0.3.6(e4rnrwhosnp2xiru36mqgdy2bu): dependencies: '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) @@ -23361,17 +23580,6 @@ snapshots: optionalDependencies: openai: 4.73.1(encoding@0.1.13)(zod@3.23.8) - langsmith@0.2.3(openai@4.73.1(zod@3.23.8)): - dependencies: - '@types/uuid': 10.0.0 - commander: 10.0.1 - p-queue: 6.6.2 - p-retry: 4.6.2 - semver: 7.6.0 - uuid: 10.0.0 - optionalDependencies: - openai: 4.73.1(zod@3.23.8) - langsmith@0.2.3(openai@4.73.1): dependencies: '@types/uuid': 10.0.0 @@ -24729,7 +24937,6 @@ snapshots: transitivePeerDependencies: - encoding - supports-color - optional: true openapi-sampler@1.5.1: dependencies: @@ -25113,7 +25320,7 @@ snapshots: posthog-node@3.2.1: dependencies: - axios: 1.7.7 + axios: 1.7.7(debug@4.3.6) rusha: 0.8.14 transitivePeerDependencies: - debug @@ -26131,7 +26338,7 @@ snapshots: asn1.js: 5.4.1 asn1.js-rfc2560: 5.0.1(asn1.js@5.4.1) asn1.js-rfc5280: 3.0.0 - axios: 1.7.7 + axios: 1.7.7(debug@4.3.6) big-integer: 1.6.51 bignumber.js: 9.1.2 binascii: 0.0.2 diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 6888cae7ae..f469551e9f 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -27,6 +27,7 @@ catalog: xss: 1.0.15 zod: 3.23.8 '@langchain/core': 0.3.19 + langchain: 0.3.6 catalogs: frontend: