create a new context type for AI Agent execution

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™ 2024-12-13 14:42:49 +01:00
parent 80eea49cf0
commit c003280ebe
No known key found for this signature in database
44 changed files with 741 additions and 576 deletions

View file

@ -1,10 +1,9 @@
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { AiRootNode, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { import type {
INodeInputConfiguration, INodeInputConfiguration,
INodeInputFilter, INodeInputFilter,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodeType,
INodeTypeDescription, INodeTypeDescription,
INodeProperties, INodeProperties,
} from 'n8n-workflow'; } from 'n8n-workflow';
@ -245,7 +244,7 @@ const agentTypeProperty: INodeProperties = {
default: '', default: '',
}; };
export class Agent implements INodeType { export class Agent extends AiRootNode {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'AI Agent', displayName: 'AI Agent',
name: 'agent', name: 'agent',
@ -416,24 +415,27 @@ export class Agent implements INodeType {
], ],
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const agentType = this.getNodeParameter('agent', 0, '') as string; const agentType = context.getNodeParameter('agent', 0, '') as string;
const nodeVersion = this.getNode().typeVersion; const nodeVersion = context.getNode().typeVersion;
if (agentType === 'conversationalAgent') { if (agentType === 'conversationalAgent') {
return await conversationalAgentExecute.call(this, nodeVersion); return await conversationalAgentExecute.call(context, nodeVersion);
} else if (agentType === 'toolsAgent') { } else if (agentType === 'toolsAgent') {
return await toolsAgentExecute.call(this); return await toolsAgentExecute.call(context);
} else if (agentType === 'openAiFunctionsAgent') { } else if (agentType === 'openAiFunctionsAgent') {
return await openAiFunctionsAgentExecute.call(this, nodeVersion); return await openAiFunctionsAgentExecute.call(context, nodeVersion);
} else if (agentType === 'reActAgent') { } else if (agentType === 'reActAgent') {
return await reActAgentAgentExecute.call(this, nodeVersion); return await reActAgentAgentExecute.call(context, nodeVersion);
} else if (agentType === 'sqlAgent') { } else if (agentType === 'sqlAgent') {
return await sqlAgentAgentExecute.call(this); return await sqlAgentAgentExecute.call(context);
} else if (agentType === 'planAndExecuteAgent') { } else if (agentType === 'planAndExecuteAgent') {
return await planAndExecuteAgentExecute.call(this, nodeVersion); return await planAndExecuteAgentExecute.call(context, nodeVersion);
} }
throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`); throw new NodeOperationError(
context.getNode(),
`The agent type "${agentType}" is not supported`,
);
} }
} }

View file

@ -3,18 +3,15 @@ import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from '@langchain/core/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
import { initializeAgentExecutorWithOptions } from 'langchain/agents'; import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import type { AiRootNodeExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers'; import { isChatInstance } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { throwIfToolSchema } from '@utils/schemaParsing'; import { throwIfToolSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { checkForStructuredTools, extractParsedOutput } from '../utils';
export async function conversationalAgentExecute( export async function conversationalAgentExecute(
this: IExecuteFunctions, this: AiRootNodeExecuteFunctions,
nodeVersion: number, nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Conversational Agent'); this.logger.debug('Executing Conversational Agent');
@ -28,10 +25,10 @@ export async function conversationalAgentExecute(
| BaseChatMemory | BaseChatMemory
| undefined; | undefined;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true); const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true);
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);
await checkForStructuredTools(tools, this.getNode(), 'Conversational Agent'); this.checkForStructuredTools(tools, this.getNode(), 'Conversational Agent');
// TODO: Make it possible in the future to use values for other items than just 0 // TODO: Make it possible in the future to use values for other items than just 0
const options = this.getNodeParameter('options', 0, {}) as { const options = this.getNodeParameter('options', 0, {}) as {
@ -86,12 +83,7 @@ export async function conversationalAgentExecute(
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string; input = this.getNodeParameter('text', itemIndex) as string;
} else { } else {
input = getPromptInputByType({ input = this.getPromptInputByType(itemIndex);
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
} }
if (input === undefined) { if (input === undefined) {
@ -103,11 +95,11 @@ export async function conversationalAgentExecute(
} }
const response = await agentExecutor const response = await agentExecutor
.withConfig(getTracingConfig(this)) .withConfig(this.getTracingConfig())
.invoke({ input, outputParsers }); .invoke({ input, outputParsers });
if (outputParser) { if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string); response.output = await this.extractParsedOutput(outputParser, response.output as string);
} }
returnData.push({ json: response }); returnData.push({ json: response });

View file

@ -6,20 +6,16 @@ import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
import { BufferMemory, type BaseChatMemory } from 'langchain/memory'; import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import { import {
type IExecuteFunctions, type AiRootNodeExecuteFunctions,
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
NodeOperationError, NodeOperationError,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { getConnectedTools, getPromptInputByType } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { getTracingConfig } from '@utils/tracing';
import { extractParsedOutput } from '../utils';
export async function openAiFunctionsAgentExecute( export async function openAiFunctionsAgentExecute(
this: IExecuteFunctions, this: AiRootNodeExecuteFunctions,
nodeVersion: number, nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing OpenAi Functions Agent'); this.logger.debug('Executing OpenAi Functions Agent');
@ -37,7 +33,7 @@ export async function openAiFunctionsAgentExecute(
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory | BaseChatMemory
| undefined; | undefined;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, false); const tools = await this.getConnectedTools(nodeVersion >= 1.5, false);
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);
const options = this.getNodeParameter('options', 0, {}) as { const options = this.getNodeParameter('options', 0, {}) as {
systemMessage?: string; systemMessage?: string;
@ -89,12 +85,7 @@ export async function openAiFunctionsAgentExecute(
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string; input = this.getNodeParameter('text', itemIndex) as string;
} else { } else {
input = getPromptInputByType({ input = this.getPromptInputByType(itemIndex);
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
} }
if (input === undefined) { if (input === undefined) {
@ -106,11 +97,11 @@ export async function openAiFunctionsAgentExecute(
} }
const response = await agentExecutor const response = await agentExecutor
.withConfig(getTracingConfig(this)) .withConfig(this.getTracingConfig())
.invoke({ input, outputParsers }); .invoke({ input, outputParsers });
if (outputParser) { if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string); response.output = await this.extractParsedOutput(outputParser, response.output as string);
} }
returnData.push({ json: response }); returnData.push({ json: response });

View file

@ -4,21 +4,17 @@ import { PromptTemplate } from '@langchain/core/prompts';
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute'; import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import { import {
type IExecuteFunctions, type AiRootNodeExecuteFunctions,
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
NodeOperationError, NodeOperationError,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { getConnectedTools, getPromptInputByType } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { throwIfToolSchema } from '@utils/schemaParsing'; import { throwIfToolSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { checkForStructuredTools, extractParsedOutput } from '../utils';
export async function planAndExecuteAgentExecute( export async function planAndExecuteAgentExecute(
this: IExecuteFunctions, this: AiRootNodeExecuteFunctions,
nodeVersion: number, nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing PlanAndExecute Agent'); this.logger.debug('Executing PlanAndExecute Agent');
@ -27,9 +23,9 @@ export async function planAndExecuteAgentExecute(
0, 0,
)) as BaseChatModel; )) as BaseChatModel;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true); const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true);
await checkForStructuredTools(tools, this.getNode(), 'Plan & Execute Agent'); this.checkForStructuredTools(tools, this.getNode(), 'Plan & Execute Agent');
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);
const options = this.getNodeParameter('options', 0, {}) as { const options = this.getNodeParameter('options', 0, {}) as {
@ -66,12 +62,7 @@ export async function planAndExecuteAgentExecute(
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string; input = this.getNodeParameter('text', itemIndex) as string;
} else { } else {
input = getPromptInputByType({ input = this.getPromptInputByType(itemIndex);
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
} }
if (input === undefined) { if (input === undefined) {
@ -83,11 +74,11 @@ export async function planAndExecuteAgentExecute(
} }
const response = await agentExecutor const response = await agentExecutor
.withConfig(getTracingConfig(this)) .withConfig(this.getTracingConfig())
.invoke({ input, outputParsers }); .invoke({ input, outputParsers });
if (outputParser) { if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string); response.output = await this.extractParsedOutput(outputParser, response.output as string);
} }
returnData.push({ json: response }); returnData.push({ json: response });

View file

@ -5,21 +5,18 @@ import { PromptTemplate } from '@langchain/core/prompts';
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents'; import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import { import {
type IExecuteFunctions, type AiRootNodeExecuteFunctions,
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
NodeOperationError, NodeOperationError,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { getConnectedTools, getPromptInputByType, isChatInstance } from '@utils/helpers'; import { isChatInstance } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { throwIfToolSchema } from '@utils/schemaParsing'; import { throwIfToolSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { checkForStructuredTools, extractParsedOutput } from '../utils';
export async function reActAgentAgentExecute( export async function reActAgentAgentExecute(
this: IExecuteFunctions, this: AiRootNodeExecuteFunctions,
nodeVersion: number, nodeVersion: number,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing ReAct Agent'); this.logger.debug('Executing ReAct Agent');
@ -28,9 +25,9 @@ export async function reActAgentAgentExecute(
| BaseLanguageModel | BaseLanguageModel
| BaseChatModel; | BaseChatModel;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true); const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true);
await checkForStructuredTools(tools, this.getNode(), 'ReAct Agent'); this.checkForStructuredTools(tools, this.getNode(), 'ReAct Agent');
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);
@ -87,12 +84,7 @@ export async function reActAgentAgentExecute(
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string; input = this.getNodeParameter('text', itemIndex) as string;
} else { } else {
input = getPromptInputByType({ input = this.getPromptInputByType(itemIndex);
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
} }
if (input === undefined) { if (input === undefined) {
@ -104,11 +96,11 @@ export async function reActAgentAgentExecute(
} }
const response = await agentExecutor const response = await agentExecutor
.withConfig(getTracingConfig(this)) .withConfig(this.getTracingConfig())
.invoke({ input, outputParsers }); .invoke({ input, outputParsers });
if (outputParser) { if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string); response.output = await this.extractParsedOutput(outputParser, response.output as string);
} }
returnData.push({ json: response }); returnData.push({ json: response });

View file

@ -5,15 +5,14 @@ import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql';
import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql'; import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql';
import { SqlDatabase } from 'langchain/sql_db'; import { SqlDatabase } from 'langchain/sql_db';
import { import {
type IExecuteFunctions, type AiRootNodeExecuteFunctions,
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
NodeOperationError, NodeOperationError,
type IDataObject, type IDataObject,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { getPromptInputByType, serializeChatHistory } from '@utils/helpers'; import { serializeChatHistory } from '@utils/helpers';
import { getTracingConfig } from '@utils/tracing';
import { getMysqlDataSource } from './other/handlers/mysql'; import { getMysqlDataSource } from './other/handlers/mysql';
import { getPostgresDataSource } from './other/handlers/postgres'; import { getPostgresDataSource } from './other/handlers/postgres';
@ -27,7 +26,7 @@ const parseTablesString = (tablesString: string) =>
.filter((table) => table.length > 0); .filter((table) => table.length > 0);
export async function sqlAgentAgentExecute( export async function sqlAgentAgentExecute(
this: IExecuteFunctions, this: AiRootNodeExecuteFunctions,
): Promise<INodeExecutionData[][]> { ): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing SQL Agent'); this.logger.debug('Executing SQL Agent');
@ -39,27 +38,22 @@ export async function sqlAgentAgentExecute(
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
for (let i = 0; i < items.length; i++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try { try {
const item = items[i]; const item = items[itemIndex];
let input; let input;
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('input', i) as string; input = this.getNodeParameter('input', itemIndex) as string;
} else { } else {
input = getPromptInputByType({ input = this.getPromptInputByType(itemIndex, 'text', 'promptType');
ctx: this,
i,
inputKey: 'text',
promptTypeKey: 'promptType',
});
} }
if (input === undefined) { if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The prompt parameter is empty.'); throw new NodeOperationError(this.getNode(), 'The prompt parameter is empty.');
} }
const options = this.getNodeParameter('options', i, {}); const options = this.getNodeParameter('options', itemIndex, {});
const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as const selectedDataSource = this.getNodeParameter('dataSource', itemIndex, 'sqlite') as
| 'mysql' | 'mysql'
| 'postgres' | 'postgres'
| 'sqlite'; | 'sqlite';
@ -77,7 +71,7 @@ export async function sqlAgentAgentExecute(
); );
} }
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data'); const binaryPropertyName = this.getNodeParameter('binaryPropertyName', itemIndex, 'data');
dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName); dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName);
} }
@ -127,7 +121,7 @@ export async function sqlAgentAgentExecute(
let response: IDataObject; let response: IDataObject;
try { try {
response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ response = await agentExecutor.withConfig(this.getTracingConfig()).invoke({
input, input,
signal: this.getExecutionCancelSignal(), signal: this.getExecutionCancelSignal(),
chatHistory, chatHistory,
@ -136,14 +130,14 @@ export async function sqlAgentAgentExecute(
if ((error.message as IDataObject)?.output) { if ((error.message as IDataObject)?.output) {
response = error.message as IDataObject; response = error.message as IDataObject;
} else { } else {
throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i }); throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex });
} }
} }
returnData.push({ json: response }); returnData.push({ json: response });
} catch (error) { } catch (error) {
if (this.continueOnFail()) { if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: i } }); returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue; continue;
} }

View file

@ -10,11 +10,10 @@ import type { AgentAction, AgentFinish } from 'langchain/agents';
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'; import { AgentExecutor, createToolCallingAgent } from 'langchain/agents';
import { omit } from 'lodash'; import { omit } from 'lodash';
import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import type { AiRootNodeExecuteFunctions, INodeExecutionData, ZodObjectAny } from 'n8n-workflow';
import type { ZodObject } from 'zod';
import { z } from 'zod'; import { z } from 'zod';
import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers'; import { isChatInstance } from '@utils/helpers';
import { import {
getOptionalOutputParsers, getOptionalOutputParsers,
type N8nOutputParser, type N8nOutputParser,
@ -22,14 +21,13 @@ import {
import { SYSTEM_MESSAGE } from './prompt'; import { SYSTEM_MESSAGE } from './prompt';
function getOutputParserSchema(outputParser: N8nOutputParser): ZodObject<any, any, any, any> { function getOutputParserSchema(outputParser: N8nOutputParser): ZodObjectAny {
const schema = const schema = (outputParser.getSchema() as ZodObjectAny) ?? z.object({ text: z.string() });
(outputParser.getSchema() as ZodObject<any, any, any, any>) ?? z.object({ text: z.string() });
return schema; return schema;
} }
async function extractBinaryMessages(ctx: IExecuteFunctions) { async function extractBinaryMessages(ctx: AiRootNodeExecuteFunctions) {
const binaryData = ctx.getInputData()?.[0]?.binary ?? {}; const binaryData = ctx.getInputData()?.[0]?.binary ?? {};
const binaryMessages = await Promise.all( const binaryMessages = await Promise.all(
Object.values(binaryData) Object.values(binaryData)
@ -96,7 +94,9 @@ function fixEmptyContentMessage(steps: AgentFinish | AgentAction[]) {
return steps; return steps;
} }
export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { export async function toolsAgentExecute(
this: AiRootNodeExecuteFunctions,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Tools Agent'); this.logger.debug('Executing Tools Agent');
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0); const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
@ -111,7 +111,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
| BaseChatMemory | BaseChatMemory
| undefined; | undefined;
const tools = (await getConnectedTools(this, true, false)) as Array<DynamicStructuredTool | Tool>; const tools = (await this.getConnectedTools(true, false)) as Array<DynamicStructuredTool | Tool>;
const outputParser = (await getOptionalOutputParsers(this))?.[0]; const outputParser = (await getOptionalOutputParsers(this))?.[0];
let structuredOutputParserTool: DynamicStructuredTool | undefined; let structuredOutputParserTool: DynamicStructuredTool | undefined;
/** /**
@ -289,13 +289,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
const items = this.getInputData(); const items = this.getInputData();
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try { try {
const input = getPromptInputByType({ const input = this.getPromptInputByType(itemIndex);
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
if (input === undefined) { if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.'); throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
} }

View file

@ -1,44 +0,0 @@
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import { NodeOperationError, type IExecuteFunctions, type INode } from 'n8n-workflow';
import type { z } from 'zod';
type ZodObjectAny = z.ZodObject<any, any, any, any>;
export async function extractParsedOutput(
ctx: IExecuteFunctions,
outputParser: BaseOutputParser<unknown>,
output: string,
): Promise<Record<string, unknown> | undefined> {
const parsedOutput = (await outputParser.parse(output)) as {
output: Record<string, unknown>;
};
if (ctx.getNode().typeVersion <= 1.6) {
return parsedOutput;
}
// For 1.7 and above, we try to extract the output from the parsed output
// with fallback to the original output if it's not present
return parsedOutput?.output ?? parsedOutput;
}
export async function checkForStructuredTools(
tools: Array<Tool | DynamicStructuredTool<ZodObjectAny>>,
node: INode,
currentAgentType: string,
) {
const dynamicStructuredTools = tools.filter(
(tool) => tool.constructor.name === 'DynamicStructuredTool',
);
if (dynamicStructuredTools.length > 0) {
const getToolName = (tool: Tool | DynamicStructuredTool) => `"${tool.name}"`;
throw new NodeOperationError(
node,
`The selected tools are not supported by "${currentAgentType}", please use "Tools Agent" instead`,
{
itemIndex: 0,
description: `Incompatible connected tools: ${dynamicStructuredTools.map(getToolName).join(', ')}`,
},
);
}
}

View file

@ -1,21 +1,17 @@
import { AgentExecutor } from 'langchain/agents'; import { AgentExecutor } from 'langchain/agents';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant'; import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { AiRootNode, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { import type {
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodeType,
INodeTypeDescription, INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { OpenAI as OpenAIClient } from 'openai'; import { OpenAI as OpenAIClient } from 'openai';
import { getConnectedTools } from '@utils/helpers';
import { getTracingConfig } from '@utils/tracing';
import { formatToOpenAIAssistantTool } from './utils'; import { formatToOpenAIAssistantTool } from './utils';
export class OpenAiAssistant implements INodeType { export class OpenAiAssistant extends AiRootNode {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'OpenAI Assistant', displayName: 'OpenAI Assistant',
name: 'openAiAssistant', name: 'openAiAssistant',
@ -313,30 +309,30 @@ export class OpenAiAssistant implements INodeType {
], ],
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const nodeVersion = this.getNode().typeVersion; const nodeVersion = context.getNode().typeVersion;
const tools = await getConnectedTools(this, nodeVersion > 1, false); const tools = await context.getConnectedTools(nodeVersion > 1, false);
const credentials = await this.getCredentials('openAiApi'); const credentials = await context.getCredentials('openAiApi');
const items = this.getInputData(); const items = context.getInputData();
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try { try {
const input = this.getNodeParameter('text', itemIndex) as string; const input = context.getNodeParameter('text', itemIndex) as string;
const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string; const assistantId = context.getNodeParameter('assistantId', itemIndex, '') as string;
const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array< const nativeTools = context.getNodeParameter('nativeTools', itemIndex, []) as Array<
'code_interpreter' | 'retrieval' 'code_interpreter' | 'retrieval'
>; >;
const options = this.getNodeParameter('options', itemIndex, {}) as { const options = context.getNodeParameter('options', itemIndex, {}) as {
baseURL?: string; baseURL?: string;
maxRetries: number; maxRetries: number;
timeout: number; timeout: number;
}; };
if (input === undefined) { if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.'); throw new NodeOperationError(context.getNode(), 'The text parameter is empty.');
} }
const client = new OpenAIClient({ const client = new OpenAIClient({
@ -358,9 +354,13 @@ export class OpenAiAssistant implements INodeType {
tools: newTools, tools: newTools,
}); });
} else { } else {
const name = this.getNodeParameter('name', itemIndex, '') as string; const name = context.getNodeParameter('name', itemIndex, '') as string;
const instructions = this.getNodeParameter('instructions', itemIndex, '') as string; const instructions = context.getNodeParameter('instructions', itemIndex, '') as string;
const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string; const model = context.getNodeParameter(
'model',
itemIndex,
'gpt-3.5-turbo-1106',
) as string;
agent = await OpenAIAssistantRunnable.createAssistant({ agent = await OpenAIAssistantRunnable.createAssistant({
model, model,
@ -377,15 +377,15 @@ export class OpenAiAssistant implements INodeType {
tools, tools,
}); });
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ const response = await agentExecutor.withConfig(context.getTracingConfig()).invoke({
content: input, content: input,
signal: this.getExecutionCancelSignal(), signal: context.getExecutionCancelSignal(),
timeout: options.timeout ?? 10000, timeout: options.timeout ?? 10000,
}); });
returnData.push({ json: response }); returnData.push({ json: response });
} catch (error) { } catch (error) {
if (this.continueOnFail()) { if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue; continue;
} }

View file

@ -15,12 +15,12 @@ import { CombiningOutputParser } from 'langchain/output_parsers';
import type { import type {
IBinaryData, IBinaryData,
IDataObject, IDataObject,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodeType,
INodeTypeDescription, INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
AiRootNode,
ApplicationError, ApplicationError,
NodeApiError, NodeApiError,
NodeConnectionType, NodeConnectionType,
@ -28,11 +28,10 @@ import {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions'; import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions';
import { getPromptInputByType, isChatInstance } from '@utils/helpers'; import { isChatInstance } from '@utils/helpers';
import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser'; import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser'; import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { getTemplateNoticeField } from '@utils/sharedFields'; import { getTemplateNoticeField } from '@utils/sharedFields';
import { getTracingConfig } from '@utils/tracing';
import { import {
getCustomErrorMessage as getCustomOpenAiErrorMessage, getCustomErrorMessage as getCustomOpenAiErrorMessage,
@ -49,7 +48,7 @@ interface MessagesTemplate {
} }
async function getImageMessage( async function getImageMessage(
context: IExecuteFunctions, context: AiRootNodeExecuteFunctions,
itemIndex: number, itemIndex: number,
message: MessagesTemplate, message: MessagesTemplate,
) { ) {
@ -106,7 +105,7 @@ async function getImageMessage(
} }
async function getChainPromptTemplate( async function getChainPromptTemplate(
context: IExecuteFunctions, context: AiRootNodeExecuteFunctions,
itemIndex: number, itemIndex: number,
llm: BaseLanguageModel | BaseChatModel, llm: BaseLanguageModel | BaseChatModel,
messages?: MessagesTemplate[], messages?: MessagesTemplate[],
@ -165,7 +164,7 @@ async function getChainPromptTemplate(
} }
async function createSimpleLLMChain( async function createSimpleLLMChain(
context: IExecuteFunctions, context: AiRootNodeExecuteFunctions,
llm: BaseLanguageModel, llm: BaseLanguageModel,
query: string, query: string,
prompt: ChatPromptTemplate | PromptTemplate, prompt: ChatPromptTemplate | PromptTemplate,
@ -173,7 +172,7 @@ async function createSimpleLLMChain(
const chain = new LLMChain({ const chain = new LLMChain({
llm, llm,
prompt, prompt,
}).withConfig(getTracingConfig(context)); }).withConfig(context.getTracingConfig());
const response = (await chain.invoke({ const response = (await chain.invoke({
query, query,
@ -184,7 +183,7 @@ async function createSimpleLLMChain(
} }
async function getChain( async function getChain(
context: IExecuteFunctions, context: AiRootNodeExecuteFunctions,
itemIndex: number, itemIndex: number,
query: string, query: string,
llm: BaseLanguageModel, llm: BaseLanguageModel,
@ -222,7 +221,7 @@ async function getChain(
); );
const chain = prompt.pipe(llm).pipe(combinedOutputParser); const chain = prompt.pipe(llm).pipe(combinedOutputParser);
const response = (await chain.withConfig(getTracingConfig(context)).invoke({ query })) as const response = (await chain.withConfig(context.getTracingConfig()).invoke({ query })) as
| string | string
| string[]; | string[];
@ -249,7 +248,7 @@ function getInputs(parameters: IDataObject) {
return inputs; return inputs;
} }
export class ChainLlm implements INodeType { export class ChainLlm extends AiRootNode {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Basic LLM Chain', displayName: 'Basic LLM Chain',
name: 'chainLlm', name: 'chainLlm',
@ -510,42 +509,37 @@ export class ChainLlm implements INodeType {
], ],
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing LLM Chain'); context.logger.debug('Executing LLM Chain');
const items = this.getInputData(); const items = context.getInputData();
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
const llm = (await this.getInputConnectionData( const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel, NodeConnectionType.AiLanguageModel,
0, 0,
)) as BaseLanguageModel; )) as BaseLanguageModel;
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(context);
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try { try {
let prompt: string; let prompt: string;
if (this.getNode().typeVersion <= 1.3) { if (context.getNode().typeVersion <= 1.3) {
prompt = this.getNodeParameter('prompt', itemIndex) as string; prompt = context.getNodeParameter('prompt', itemIndex) as string;
} else { } else {
prompt = getPromptInputByType({ prompt = context.getPromptInputByType(itemIndex);
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
} }
const messages = this.getNodeParameter( const messages = context.getNodeParameter(
'messages.messageValues', 'messages.messageValues',
itemIndex, itemIndex,
[], [],
) as MessagesTemplate[]; ) as MessagesTemplate[];
if (prompt === undefined) { if (prompt === undefined) {
throw new NodeOperationError(this.getNode(), "The 'prompt' parameter is empty."); throw new NodeOperationError(context.getNode(), "The 'prompt' parameter is empty.");
} }
const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages); const responses = await getChain(context, itemIndex, prompt, llm, outputParsers, messages);
responses.forEach((response) => { responses.forEach((response) => {
let data: IDataObject; let data: IDataObject;
@ -586,7 +580,7 @@ export class ChainLlm implements INodeType {
} }
} }
if (this.continueOnFail()) { if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue; continue;
} }

View file

@ -9,24 +9,23 @@ import type { BaseRetriever } from '@langchain/core/retrievers';
import { RetrievalQAChain } from 'langchain/chains'; import { RetrievalQAChain } from 'langchain/chains';
import { import {
NodeConnectionType, NodeConnectionType,
type IExecuteFunctions, type AiRootNodeExecuteFunctions,
type INodeExecutionData, type INodeExecutionData,
type INodeType,
type INodeTypeDescription, type INodeTypeDescription,
NodeOperationError, NodeOperationError,
AiRootNode,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions'; import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions';
import { getPromptInputByType, isChatInstance } from '@utils/helpers'; import { isChatInstance } from '@utils/helpers';
import { getTemplateNoticeField } from '@utils/sharedFields'; import { getTemplateNoticeField } from '@utils/sharedFields';
import { getTracingConfig } from '@utils/tracing';
const SYSTEM_PROMPT_TEMPLATE = `Use the following pieces of context to answer the users question. const SYSTEM_PROMPT_TEMPLATE = `Use the following pieces of context to answer the users question.
If you don't know the answer, just say that you don't know, don't try to make up an answer. If you don't know the answer, just say that you don't know, don't try to make up an answer.
---------------- ----------------
{context}`; {context}`;
export class ChainRetrievalQa implements INodeType { export class ChainRetrievalQa extends AiRootNode {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Question and Answer Chain', displayName: 'Question and Answer Chain',
name: 'chainRetrievalQa', name: 'chainRetrievalQa',
@ -158,20 +157,20 @@ export class ChainRetrievalQa implements INodeType {
], ],
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Retrieval QA Chain'); context.logger.debug('Executing Retrieval QA Chain');
const model = (await this.getInputConnectionData( const model = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel, NodeConnectionType.AiLanguageModel,
0, 0,
)) as BaseLanguageModel; )) as BaseLanguageModel;
const retriever = (await this.getInputConnectionData( const retriever = (await context.getInputConnectionData(
NodeConnectionType.AiRetriever, NodeConnectionType.AiRetriever,
0, 0,
)) as BaseRetriever; )) as BaseRetriever;
const items = this.getInputData(); const items = context.getInputData();
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
@ -180,22 +179,17 @@ export class ChainRetrievalQa implements INodeType {
try { try {
let query; let query;
if (this.getNode().typeVersion <= 1.2) { if (context.getNode().typeVersion <= 1.2) {
query = this.getNodeParameter('query', itemIndex) as string; query = context.getNodeParameter('query', itemIndex) as string;
} else { } else {
query = getPromptInputByType({ query = context.getPromptInputByType(itemIndex);
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
} }
if (query === undefined) { if (query === undefined) {
throw new NodeOperationError(this.getNode(), 'The query parameter is empty.'); throw new NodeOperationError(context.getNode(), 'The query parameter is empty.');
} }
const options = this.getNodeParameter('options', itemIndex, {}) as { const options = context.getNodeParameter('options', itemIndex, {}) as {
systemPromptTemplate?: string; systemPromptTemplate?: string;
}; };
@ -224,10 +218,10 @@ export class ChainRetrievalQa implements INodeType {
const chain = RetrievalQAChain.fromLLM(model, retriever, chainParameters); const chain = RetrievalQAChain.fromLLM(model, retriever, chainParameters);
const response = await chain.withConfig(getTracingConfig(this)).invoke({ query }); const response = await chain.withConfig(context.getTracingConfig()).invoke({ query });
returnData.push({ json: { response } }); returnData.push({ json: { response } });
} catch (error) { } catch (error) {
if (this.continueOnFail()) { if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue; continue;
} }

View file

@ -1,4 +1,4 @@
import type { INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow'; import type { INodeType, INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow';
import { VersionedNodeType } from 'n8n-workflow'; import { VersionedNodeType } from 'n8n-workflow';
import { ChainSummarizationV1 } from './V1/ChainSummarizationV1.node'; import { ChainSummarizationV1 } from './V1/ChainSummarizationV1.node';
@ -30,8 +30,8 @@ export class ChainSummarization extends VersionedNodeType {
}; };
const nodeVersions: IVersionedNodeType['nodeVersions'] = { const nodeVersions: IVersionedNodeType['nodeVersions'] = {
1: new ChainSummarizationV1(baseDescription), 1: new ChainSummarizationV1(baseDescription) as INodeType,
2: new ChainSummarizationV2(baseDescription), 2: new ChainSummarizationV2(baseDescription) as INodeType,
}; };
super(nodeVersions, baseDescription); super(nodeVersions, baseDescription);

View file

@ -8,8 +8,8 @@ import {
type INodeTypeBaseDescription, type INodeTypeBaseDescription,
type IExecuteFunctions, type IExecuteFunctions,
type INodeExecutionData, type INodeExecutionData,
type INodeType,
type INodeTypeDescription, type INodeTypeDescription,
AiRootNode,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; import { N8nBinaryLoader } from '@utils/N8nBinaryLoader';
@ -18,10 +18,11 @@ import { getTemplateNoticeField } from '@utils/sharedFields';
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt'; import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
export class ChainSummarizationV1 implements INodeType { export class ChainSummarizationV1 extends AiRootNode {
description: INodeTypeDescription; description: INodeTypeDescription;
constructor(baseDescription: INodeTypeBaseDescription) { constructor(baseDescription: INodeTypeBaseDescription) {
super();
this.description = { this.description = {
...baseDescription, ...baseDescription,
version: 1, version: 1,
@ -162,20 +163,21 @@ export class ChainSummarizationV1 implements INodeType {
}; };
} }
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Vector Store QA Chain'); context.logger.debug('Executing Vector Store QA Chain');
const type = this.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine'; const type = context.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine';
const model = (await this.getInputConnectionData( const model = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel, NodeConnectionType.AiLanguageModel,
0, 0,
)) as BaseLanguageModel; )) as BaseLanguageModel;
const documentInput = (await this.getInputConnectionData(NodeConnectionType.AiDocument, 0)) as const documentInput = (await context.getInputConnectionData(
| N8nJsonLoader NodeConnectionType.AiDocument,
| Array<Document<Record<string, unknown>>>; 0,
)) as N8nJsonLoader | Array<Document<Record<string, unknown>>>;
const options = this.getNodeParameter('options', 0, {}) as { const options = context.getNodeParameter('options', 0, {}) as {
prompt?: string; prompt?: string;
refineQuestionPrompt?: string; refineQuestionPrompt?: string;
refinePrompt?: string; refinePrompt?: string;
@ -241,7 +243,7 @@ export class ChainSummarizationV1 implements INodeType {
const chain = loadSummarizationChain(model, chainArgs); const chain = loadSummarizationChain(model, chainArgs);
const items = this.getInputData(); const items = context.getInputData();
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {

View file

@ -5,18 +5,16 @@ import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
import { loadSummarizationChain } from 'langchain/chains'; import { loadSummarizationChain } from 'langchain/chains';
import type { import type {
INodeTypeBaseDescription, INodeTypeBaseDescription,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodeType,
INodeTypeDescription, INodeTypeDescription,
IDataObject, IDataObject,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow'; import { AiRootNode, NodeConnectionType } from 'n8n-workflow';
import { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; import { N8nBinaryLoader } from '@utils/N8nBinaryLoader';
import { N8nJsonLoader } from '@utils/N8nJsonLoader'; import { N8nJsonLoader } from '@utils/N8nJsonLoader';
import { getTemplateNoticeField } from '@utils/sharedFields'; import { getTemplateNoticeField } from '@utils/sharedFields';
import { getTracingConfig } from '@utils/tracing';
import { getChainPromptsArgs } from '../helpers'; import { getChainPromptsArgs } from '../helpers';
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt'; import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
@ -56,10 +54,11 @@ function getInputs(parameters: IDataObject) {
return inputs; return inputs;
} }
export class ChainSummarizationV2 implements INodeType { export class ChainSummarizationV2 extends AiRootNode {
description: INodeTypeDescription; description: INodeTypeDescription;
constructor(baseDescription: INodeTypeBaseDescription) { constructor(baseDescription: INodeTypeBaseDescription) {
super();
this.description = { this.description = {
...baseDescription, ...baseDescription,
version: [2], version: [2],
@ -311,27 +310,27 @@ export class ChainSummarizationV2 implements INodeType {
}; };
} }
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Summarization Chain V2'); context.logger.debug('Executing Summarization Chain V2');
const operationMode = this.getNodeParameter('operationMode', 0, 'nodeInputJson') as const operationMode = context.getNodeParameter('operationMode', 0, 'nodeInputJson') as
| 'nodeInputJson' | 'nodeInputJson'
| 'nodeInputBinary' | 'nodeInputBinary'
| 'documentLoader'; | 'documentLoader';
const chunkingMode = this.getNodeParameter('chunkingMode', 0, 'simple') as const chunkingMode = context.getNodeParameter('chunkingMode', 0, 'simple') as
| 'simple' | 'simple'
| 'advanced'; | 'advanced';
const model = (await this.getInputConnectionData( const model = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel, NodeConnectionType.AiLanguageModel,
0, 0,
)) as BaseLanguageModel; )) as BaseLanguageModel;
const items = this.getInputData(); const items = context.getInputData();
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try { try {
const summarizationMethodAndPrompts = this.getNodeParameter( const summarizationMethodAndPrompts = context.getNodeParameter(
'options.summarizationMethodAndPrompts.values', 'options.summarizationMethodAndPrompts.values',
itemIndex, itemIndex,
{}, {},
@ -355,7 +354,7 @@ export class ChainSummarizationV2 implements INodeType {
// Use dedicated document loader input to load documents // Use dedicated document loader input to load documents
if (operationMode === 'documentLoader') { if (operationMode === 'documentLoader') {
const documentInput = (await this.getInputConnectionData( const documentInput = (await context.getInputConnectionData(
NodeConnectionType.AiDocument, NodeConnectionType.AiDocument,
0, 0,
)) as N8nJsonLoader | Array<Document<Record<string, unknown>>>; )) as N8nJsonLoader | Array<Document<Record<string, unknown>>>;
@ -367,7 +366,7 @@ export class ChainSummarizationV2 implements INodeType {
? await documentInput.processItem(item, itemIndex) ? await documentInput.processItem(item, itemIndex)
: documentInput; : documentInput;
const response = await chain.withConfig(getTracingConfig(this)).invoke({ const response = await chain.withConfig(context.getTracingConfig()).invoke({
input_documents: processedDocuments, input_documents: processedDocuments,
}); });
@ -381,15 +380,19 @@ export class ChainSummarizationV2 implements INodeType {
switch (chunkingMode) { switch (chunkingMode) {
// In simple mode we use recursive character splitter with default settings // In simple mode we use recursive character splitter with default settings
case 'simple': case 'simple':
const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number; const chunkSize = context.getNodeParameter('chunkSize', itemIndex, 1000) as number;
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number; const chunkOverlap = context.getNodeParameter(
'chunkOverlap',
itemIndex,
200,
) as number;
textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize }); textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize });
break; break;
// In advanced mode user can connect text splitter node so we just retrieve it // In advanced mode user can connect text splitter node so we just retrieve it
case 'advanced': case 'advanced':
textSplitter = (await this.getInputConnectionData( textSplitter = (await context.getInputConnectionData(
NodeConnectionType.AiTextSplitter, NodeConnectionType.AiTextSplitter,
0, 0,
)) as TextSplitter | undefined; )) as TextSplitter | undefined;
@ -400,14 +403,14 @@ export class ChainSummarizationV2 implements INodeType {
let processor: N8nJsonLoader | N8nBinaryLoader; let processor: N8nJsonLoader | N8nBinaryLoader;
if (operationMode === 'nodeInputBinary') { if (operationMode === 'nodeInputBinary') {
const binaryDataKey = this.getNodeParameter( const binaryDataKey = context.getNodeParameter(
'options.binaryDataKey', 'options.binaryDataKey',
itemIndex, itemIndex,
'data', 'data',
) as string; ) as string;
processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter); processor = new N8nBinaryLoader(context, 'options.', binaryDataKey, textSplitter);
} else { } else {
processor = new N8nJsonLoader(this, 'options.', textSplitter); processor = new N8nJsonLoader(context, 'options.', textSplitter);
} }
const processedItem = await processor.processItem(item, itemIndex); const processedItem = await processor.processItem(item, itemIndex);
@ -417,7 +420,7 @@ export class ChainSummarizationV2 implements INodeType {
returnData.push({ json: { response } }); returnData.push({ json: { response } });
} }
} catch (error) { } catch (error) {
if (this.continueOnFail()) { if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue; continue;
} }

View file

@ -3,11 +3,10 @@ import { HumanMessage } from '@langchain/core/messages';
import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts'; import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts';
import type { JSONSchema7 } from 'json-schema'; import type { JSONSchema7 } from 'json-schema';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { AiRootNode, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { import type {
INodeType,
INodeTypeDescription, INodeTypeDescription,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodePropertyOptions, INodePropertyOptions,
} from 'n8n-workflow'; } from 'n8n-workflow';
@ -15,7 +14,6 @@ import type { z } from 'zod';
import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@utils/descriptions'; import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@utils/descriptions';
import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { makeZodSchemaFromAttributes } from './helpers'; import { makeZodSchemaFromAttributes } from './helpers';
import type { AttributeDefinition } from './types'; import type { AttributeDefinition } from './types';
@ -24,7 +22,7 @@ const SYSTEM_PROMPT_TEMPLATE = `You are an expert extraction algorithm.
Only extract relevant information from the text. Only extract relevant information from the text.
If you do not know the value of an attribute asked to extract, you may omit the attribute's value.`; If you do not know the value of an attribute asked to extract, you may omit the attribute's value.`;
export class InformationExtractor implements INodeType { export class InformationExtractor extends AiRootNode {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Information Extractor', displayName: 'Information Extractor',
name: 'informationExtractor', name: 'informationExtractor',
@ -218,15 +216,15 @@ export class InformationExtractor implements INodeType {
], ],
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData(); const items = context.getInputData();
const llm = (await this.getInputConnectionData( const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel, NodeConnectionType.AiLanguageModel,
0, 0,
)) as BaseLanguageModel; )) as BaseLanguageModel;
const schemaType = this.getNodeParameter('schemaType', 0, '') as const schemaType = context.getNodeParameter('schemaType', 0, '') as
| 'fromAttributes' | 'fromAttributes'
| 'fromJson' | 'fromJson'
| 'manual'; | 'manual';
@ -234,14 +232,14 @@ export class InformationExtractor implements INodeType {
let parser: OutputFixingParser<object>; let parser: OutputFixingParser<object>;
if (schemaType === 'fromAttributes') { if (schemaType === 'fromAttributes') {
const attributes = this.getNodeParameter( const attributes = context.getNodeParameter(
'attributes.attributes', 'attributes.attributes',
0, 0,
[], [],
) as AttributeDefinition[]; ) as AttributeDefinition[];
if (attributes.length === 0) { if (attributes.length === 0) {
throw new NodeOperationError(this.getNode(), 'At least one attribute must be specified'); throw new NodeOperationError(context.getNode(), 'At least one attribute must be specified');
} }
parser = OutputFixingParser.fromLLM( parser = OutputFixingParser.fromLLM(
@ -252,10 +250,10 @@ export class InformationExtractor implements INodeType {
let jsonSchema: JSONSchema7; let jsonSchema: JSONSchema7;
if (schemaType === 'fromJson') { if (schemaType === 'fromJson') {
const jsonExample = this.getNodeParameter('jsonSchemaExample', 0, '') as string; const jsonExample = context.getNodeParameter('jsonSchemaExample', 0, '') as string;
jsonSchema = generateSchema(jsonExample); jsonSchema = generateSchema(jsonExample);
} else { } else {
const inputSchema = this.getNodeParameter('inputSchema', 0, '') as string; const inputSchema = context.getNodeParameter('inputSchema', 0, '') as string;
jsonSchema = jsonParse<JSONSchema7>(inputSchema); jsonSchema = jsonParse<JSONSchema7>(inputSchema);
} }
@ -266,10 +264,10 @@ export class InformationExtractor implements INodeType {
const resultData: INodeExecutionData[] = []; const resultData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const input = this.getNodeParameter('text', itemIndex) as string; const input = context.getNodeParameter('text', itemIndex) as string;
const inputPrompt = new HumanMessage(input); const inputPrompt = new HumanMessage(input);
const options = this.getNodeParameter('options', itemIndex, {}) as { const options = context.getNodeParameter('options', itemIndex, {}) as {
systemPromptTemplate?: string; systemPromptTemplate?: string;
}; };
@ -285,13 +283,13 @@ export class InformationExtractor implements INodeType {
inputPrompt, inputPrompt,
]; ];
const prompt = ChatPromptTemplate.fromMessages(messages); const prompt = ChatPromptTemplate.fromMessages(messages);
const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this)); const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig());
try { try {
const output = await chain.invoke(messages); const output = await chain.invoke(messages);
resultData.push({ json: { output } }); resultData.push({ json: { output } });
} catch (error) { } catch (error) {
if (this.continueOnFail()) { if (context.continueOnFail()) {
resultData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } }); resultData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue; continue;
} }

View file

@ -5,16 +5,13 @@ import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_par
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { import type {
IDataObject, IDataObject,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodeParameters, INodeParameters,
INodeType,
INodeTypeDescription, INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { z } from 'zod'; import { z } from 'zod';
import { getTracingConfig } from '@utils/tracing';
const DEFAULT_SYSTEM_PROMPT_TEMPLATE = const DEFAULT_SYSTEM_PROMPT_TEMPLATE =
'You are highly intelligent and accurate sentiment analyzer. Analyze the sentiment of the provided text. Categorize it into one of the following: {categories}. Use the provided formatting instructions. Only output the JSON.'; 'You are highly intelligent and accurate sentiment analyzer. Analyze the sentiment of the provided text. Categorize it into one of the following: {categories}. Use the provided formatting instructions. Only output the JSON.';
@ -28,7 +25,7 @@ const configuredOutputs = (parameters: INodeParameters, defaultCategories: strin
return ret; return ret;
}; };
export class SentimentAnalysis implements INodeType { export class SentimentAnalysis extends AiRootNode {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Sentiment Analysis', displayName: 'Sentiment Analysis',
name: 'sentimentAnalysis', name: 'sentimentAnalysis',
@ -136,10 +133,10 @@ export class SentimentAnalysis implements INodeType {
], ],
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData(); const items = context.getInputData();
const llm = (await this.getInputConnectionData( const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel, NodeConnectionType.AiLanguageModel,
0, 0,
)) as BaseLanguageModel; )) as BaseLanguageModel;
@ -148,7 +145,7 @@ export class SentimentAnalysis implements INodeType {
for (let i = 0; i < items.length; i++) { for (let i = 0; i < items.length; i++) {
try { try {
const sentimentCategories = this.getNodeParameter( const sentimentCategories = context.getNodeParameter(
'options.categories', 'options.categories',
i, i,
DEFAULT_CATEGORIES, DEFAULT_CATEGORIES,
@ -160,7 +157,7 @@ export class SentimentAnalysis implements INodeType {
.filter(Boolean); .filter(Boolean);
if (categories.length === 0) { if (categories.length === 0) {
throw new NodeOperationError(this.getNode(), 'No sentiment categories provided', { throw new NodeOperationError(context.getNode(), 'No sentiment categories provided', {
itemIndex: i, itemIndex: i,
}); });
} }
@ -170,7 +167,7 @@ export class SentimentAnalysis implements INodeType {
returnData.push(...Array.from({ length: categories.length }, () => [])); returnData.push(...Array.from({ length: categories.length }, () => []));
} }
const options = this.getNodeParameter('options', i, {}) as { const options = context.getNodeParameter('options', i, {}) as {
systemPromptTemplate?: string; systemPromptTemplate?: string;
includeDetailedResults?: boolean; includeDetailedResults?: boolean;
enableAutoFixing?: boolean; enableAutoFixing?: boolean;
@ -197,7 +194,7 @@ export class SentimentAnalysis implements INodeType {
{format_instructions}`, {format_instructions}`,
); );
const input = this.getNodeParameter('inputText', i) as string; const input = context.getNodeParameter('inputText', i) as string;
const inputPrompt = new HumanMessage(input); const inputPrompt = new HumanMessage(input);
const messages = [ const messages = [
await systemPromptTemplate.format({ await systemPromptTemplate.format({
@ -208,7 +205,7 @@ export class SentimentAnalysis implements INodeType {
]; ];
const prompt = ChatPromptTemplate.fromMessages(messages); const prompt = ChatPromptTemplate.fromMessages(messages);
const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this)); const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig());
try { try {
const output = await chain.invoke(messages); const output = await chain.invoke(messages);
@ -233,7 +230,7 @@ export class SentimentAnalysis implements INodeType {
} }
} catch (error) { } catch (error) {
throw new NodeOperationError( throw new NodeOperationError(
this.getNode(), context.getNode(),
'Error during parsing of LLM output, please check your LLM model and configuration', 'Error during parsing of LLM output, please check your LLM model and configuration',
{ {
itemIndex: i, itemIndex: i,
@ -241,9 +238,9 @@ export class SentimentAnalysis implements INodeType {
); );
} }
} catch (error) { } catch (error) {
if (this.continueOnFail()) { if (context.continueOnFail()) {
const executionErrorData = this.helpers.constructExecutionMetaData( const executionErrorData = context.helpers.constructExecutionMetaData(
this.helpers.returnJsonArray({ error: error.message }), context.helpers.returnJsonArray({ error: error.message }),
{ itemData: { item: i } }, { itemData: { item: i } },
); );
returnData[0].push(...executionErrorData); returnData[0].push(...executionErrorData);

View file

@ -2,19 +2,16 @@ import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { HumanMessage } from '@langchain/core/messages'; import { HumanMessage } from '@langchain/core/messages';
import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts'; import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers'; import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import { NodeOperationError, NodeConnectionType, AiRootNode } from 'n8n-workflow';
import type { import type {
IDataObject, IDataObject,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodeParameters, INodeParameters,
INodeType,
INodeTypeDescription, INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { z } from 'zod'; import { z } from 'zod';
import { getTracingConfig } from '@utils/tracing';
const SYSTEM_PROMPT_TEMPLATE = const SYSTEM_PROMPT_TEMPLATE =
"Please classify the text provided by the user into one of the following categories: {categories}, and use the provided formatting instructions below. Don't explain, and only output the json."; "Please classify the text provided by the user into one of the following categories: {categories}, and use the provided formatting instructions below. Don't explain, and only output the json.";
@ -28,7 +25,7 @@ const configuredOutputs = (parameters: INodeParameters) => {
return ret; return ret;
}; };
export class TextClassifier implements INodeType { export class TextClassifier extends AiRootNode {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Text Classifier', displayName: 'Text Classifier',
name: 'textClassifier', name: 'textClassifier',
@ -163,24 +160,24 @@ export class TextClassifier implements INodeType {
], ],
}; };
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> { async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData(); const items = context.getInputData();
const llm = (await this.getInputConnectionData( const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel, NodeConnectionType.AiLanguageModel,
0, 0,
)) as BaseLanguageModel; )) as BaseLanguageModel;
const categories = this.getNodeParameter('categories.categories', 0, []) as Array<{ const categories = context.getNodeParameter('categories.categories', 0, []) as Array<{
category: string; category: string;
description: string; description: string;
}>; }>;
if (categories.length === 0) { if (categories.length === 0) {
throw new NodeOperationError(this.getNode(), 'At least one category must be defined'); throw new NodeOperationError(context.getNode(), 'At least one category must be defined');
} }
const options = this.getNodeParameter('options', 0, {}) as { const options = context.getNodeParameter('options', 0, {}) as {
multiClass: boolean; multiClass: boolean;
fallback?: string; fallback?: string;
systemPromptTemplate?: string; systemPromptTemplate?: string;
@ -226,10 +223,10 @@ export class TextClassifier implements INodeType {
for (let itemIdx = 0; itemIdx < items.length; itemIdx++) { for (let itemIdx = 0; itemIdx < items.length; itemIdx++) {
const item = items[itemIdx]; const item = items[itemIdx];
item.pairedItem = { item: itemIdx }; item.pairedItem = { item: itemIdx };
const input = this.getNodeParameter('inputText', itemIdx) as string; const input = context.getNodeParameter('inputText', itemIdx) as string;
const inputPrompt = new HumanMessage(input); const inputPrompt = new HumanMessage(input);
const systemPromptTemplateOpt = this.getNodeParameter( const systemPromptTemplateOpt = context.getNodeParameter(
'options.systemPromptTemplate', 'options.systemPromptTemplate',
itemIdx, itemIdx,
SYSTEM_PROMPT_TEMPLATE, SYSTEM_PROMPT_TEMPLATE,
@ -249,7 +246,7 @@ ${fallbackPrompt}`,
inputPrompt, inputPrompt,
]; ];
const prompt = ChatPromptTemplate.fromMessages(messages); const prompt = ChatPromptTemplate.fromMessages(messages);
const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this)); const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig());
try { try {
const output = await chain.invoke(messages); const output = await chain.invoke(messages);
@ -259,7 +256,7 @@ ${fallbackPrompt}`,
}); });
if (fallback === 'other' && output.fallback) returnData[returnData.length - 1].push(item); if (fallback === 'other' && output.fallback) returnData[returnData.length - 1].push(item);
} catch (error) { } catch (error) {
if (this.continueOnFail()) { if (context.continueOnFail()) {
returnData[0].push({ returnData[0].push({
json: { error: error.message }, json: { error: error.message },
pairedItem: { item: itemIdx }, pairedItem: { item: itemIdx },

View file

@ -13,8 +13,6 @@ import { pick } from 'lodash';
import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow'; import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow'; import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow';
import { logAiEvent } from '@utils/helpers';
type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => { type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {
completionTokens: number; completionTokens: number;
promptTokens: number; promptTokens: number;
@ -141,7 +139,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
[{ json: { ...response } }], [{ json: { ...response } }],
]); ]);
logAiEvent(this.executionFunctions, 'ai-llm-generated-output', { this.executionFunctions.logAiEvent('ai-llm-generated-output', {
messages: parsedMessages, messages: parsedMessages,
options: runDetails.options, options: runDetails.options,
response, response,
@ -204,7 +202,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
); );
} }
logAiEvent(this.executionFunctions, 'ai-llm-errored', { this.executionFunctions.logAiEvent('ai-llm-errored', {
error: Object.keys(error).length === 0 ? error.toString() : error, error: Object.keys(error).length === 0 ? error.toString() : error,
runId, runId,
parentRunId, parentRunId,

View file

@ -12,6 +12,7 @@ import type {
SupplyData, SupplyData,
ExecutionError, ExecutionError,
IDataObject, IDataObject,
ZodObjectAny,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
@ -19,8 +20,6 @@ import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@util
import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing';
import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { DynamicZodObject } from '../../../types/zod.types';
export class ToolCode implements INodeType { export class ToolCode implements INodeType {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Code Tool', displayName: 'Code Tool',
@ -269,7 +268,7 @@ export class ToolCode implements INodeType {
? generateSchema(jsonExample) ? generateSchema(jsonExample)
: jsonParse<JSONSchema7>(inputSchema); : jsonParse<JSONSchema7>(inputSchema);
const zodSchema = convertJsonSchemaToZod<DynamicZodObject>(jsonSchema); const zodSchema = convertJsonSchemaToZod<ZodObjectAny>(jsonSchema);
tool = new DynamicStructuredTool({ tool = new DynamicStructuredTool({
schema: zodSchema, schema: zodSchema,

View file

@ -1,5 +1,6 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { DynamicTool } from '@langchain/core/tools'; import { DynamicTool } from '@langchain/core/tools';
import { N8nTool } from '@utils/N8nTool';
import type { import type {
INodeType, INodeType,
INodeTypeDescription, INodeTypeDescription,
@ -10,7 +11,6 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, tryToParseAlphanumericString } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, tryToParseAlphanumericString } from 'n8n-workflow';
import { N8nTool } from '@utils/N8nTool';
import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { getConnectionHintNoticeField } from '@utils/sharedFields';
import { import {
@ -407,6 +407,7 @@ export class ToolHttpRequest implements INodeType {
if (this.getNode().typeVersion >= 1.1) { if (this.getNode().typeVersion >= 1.1) {
const schema = makeToolInputSchema(toolParameters); const schema = makeToolInputSchema(toolParameters);
// TODO: add a new this.createN8NTool method
tool = new N8nTool(this, { tool = new N8nTool(this, {
name, name,
description: toolDescription, description: toolDescription,

View file

@ -1,9 +1,8 @@
import type { N8nTool } from '@utils/N8nTool';
import { mock } from 'jest-mock-extended'; import { mock } from 'jest-mock-extended';
import type { IExecuteFunctions, INode } from 'n8n-workflow'; import type { IExecuteFunctions, INode } from 'n8n-workflow';
import { jsonParse } from 'n8n-workflow'; import { jsonParse } from 'n8n-workflow';
import type { N8nTool } from '@utils/N8nTool';
import { ToolHttpRequest } from '../ToolHttpRequest.node'; import { ToolHttpRequest } from '../ToolHttpRequest.node';
describe('ToolHttpRequest', () => { describe('ToolHttpRequest', () => {

View file

@ -14,6 +14,7 @@ import type {
ExecutionError, ExecutionError,
NodeApiError, NodeApiError,
ISupplyDataFunctions, ISupplyDataFunctions,
ZodObjectAny,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
import { z } from 'zod'; import { z } from 'zod';
@ -26,7 +27,6 @@ import type {
SendIn, SendIn,
ToolParameter, ToolParameter,
} from './interfaces'; } from './interfaces';
import type { DynamicZodObject } from '../../../types/zod.types';
const genericCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => { const genericCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => {
const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string; const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string;
@ -814,7 +814,7 @@ function makeParameterZodSchema(parameter: ToolParameter) {
return schema; return schema;
} }
export function makeToolInputSchema(parameters: ToolParameter[]): DynamicZodObject { export function makeToolInputSchema(parameters: ToolParameter[]): ZodObjectAny {
const schemaEntries = parameters.map((parameter) => [ const schemaEntries = parameters.map((parameter) => [
parameter.name, parameter.name,
makeParameterZodSchema(parameter), makeParameterZodSchema(parameter),

View file

@ -18,6 +18,7 @@ import type {
IDataObject, IDataObject,
INodeParameterResourceLocator, INodeParameterResourceLocator,
ITaskMetadata, ITaskMetadata,
ZodObjectAny,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
@ -25,8 +26,6 @@ import { jsonSchemaExampleField, schemaTypeField, inputSchemaField } from '@util
import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing'; import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing';
import { getConnectionHintNoticeField } from '@utils/sharedFields'; import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { DynamicZodObject } from '../../../types/zod.types';
export class ToolWorkflow implements INodeType { export class ToolWorkflow implements INodeType {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Call n8n Workflow Tool', displayName: 'Call n8n Workflow Tool',
@ -543,7 +542,7 @@ export class ToolWorkflow implements INodeType {
? generateSchema(jsonExample) ? generateSchema(jsonExample)
: jsonParse<JSONSchema7>(inputSchema); : jsonParse<JSONSchema7>(inputSchema);
const zodSchema = convertJsonSchemaToZod<DynamicZodObject>(jsonSchema); const zodSchema = convertJsonSchemaToZod<ZodObjectAny>(jsonSchema);
tool = new DynamicStructuredTool({ tool = new DynamicStructuredTool({
schema: zodSchema, schema: zodSchema,

View file

@ -19,7 +19,7 @@ import type {
INodePropertyOptions, INodePropertyOptions,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { getMetadataFiltersValues, logAiEvent } from '@utils/helpers'; import { getMetadataFiltersValues } from '@utils/helpers';
import { logWrapper } from '@utils/logWrapper'; import { logWrapper } from '@utils/logWrapper';
import type { N8nBinaryLoader } from '@utils/N8nBinaryLoader'; import type { N8nBinaryLoader } from '@utils/N8nBinaryLoader';
import { N8nJsonLoader } from '@utils/N8nJsonLoader'; import { N8nJsonLoader } from '@utils/N8nJsonLoader';
@ -283,7 +283,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
}); });
resultData.push(...serializedDocs); resultData.push(...serializedDocs);
logAiEvent(this, 'ai-vector-store-searched', { query: prompt }); this.logAiEvent('ai-vector-store-searched', { query: prompt });
} }
return [resultData]; return [resultData];
@ -313,7 +313,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
try { try {
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex); await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);
logAiEvent(this, 'ai-vector-store-populated'); this.logAiEvent('ai-vector-store-populated');
} catch (error) { } catch (error) {
throw error; throw error;
} }
@ -367,7 +367,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
ids: [documentId], ids: [documentId],
}); });
logAiEvent(this, 'ai-vector-store-updated'); this.logAiEvent('ai-vector-store-updated');
} catch (error) { } catch (error) {
throw error; throw error;
} }

View file

@ -1,10 +1,8 @@
import type { IExecuteFunctions, INodeType } from 'n8n-workflow';
import { router } from './actions/router'; import { router } from './actions/router';
import { versionDescription } from './actions/versionDescription'; import { versionDescription } from './actions/versionDescription';
import { listSearch, loadOptions } from './methods'; import { listSearch, loadOptions } from './methods';
export class OpenAi implements INodeType { export class OpenAi extends AiRootNode {
description = versionDescription; description = versionDescription;
methods = { methods = {
@ -12,7 +10,7 @@ export class OpenAi implements INodeType {
loadOptions, loadOptions,
}; };
async execute(this: IExecuteFunctions) { async execute(context: AiRootNodeExecuteFunctions) {
return await router.call(this); return await router.call(context);
} }
} }

View file

@ -6,7 +6,7 @@ import type { BufferWindowMemory } from 'langchain/memory';
import omit from 'lodash/omit'; import omit from 'lodash/omit';
import type { import type {
IDataObject, IDataObject,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
INodeProperties, INodeProperties,
} from 'n8n-workflow'; } from 'n8n-workflow';
@ -19,8 +19,6 @@ import {
import { OpenAI as OpenAIClient } from 'openai'; import { OpenAI as OpenAIClient } from 'openai';
import { promptTypeOptions } from '@utils/descriptions'; import { promptTypeOptions } from '@utils/descriptions';
import { getConnectedTools } from '@utils/helpers';
import { getTracingConfig } from '@utils/tracing';
import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions'; import { assistantRLC } from '../descriptions';
@ -153,7 +151,10 @@ const mapChatMessageToThreadMessage = (
content: message.content.toString(), content: message.content.toString(),
}); });
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> { export async function execute(
this: AiRootNodeExecuteFunctions,
i: number,
): Promise<INodeExecutionData[]> {
const credentials = await this.getCredentials('openAiApi'); const credentials = await this.getCredentials('openAiApi');
const nodeVersion = this.getNode().typeVersion; const nodeVersion = this.getNode().typeVersion;
@ -191,7 +192,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
const agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true }); const agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
const tools = await getConnectedTools(this, nodeVersion > 1, false); const tools = await this.getConnectedTools(nodeVersion > 1, false);
let assistantTools; let assistantTools;
if (tools.length) { if (tools.length) {
@ -270,7 +271,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
let filteredResponse: IDataObject = {}; let filteredResponse: IDataObject = {};
try { try {
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke(chainValues); const response = await agentExecutor.withConfig(this.getTracingConfig()).invoke(chainValues);
if (memory) { if (memory) {
await memory.saveContext({ input }, { output: response.output }); await memory.saveContext({ input }, { output: response.output });

View file

@ -1,6 +1,6 @@
import { import {
NodeOperationError, NodeOperationError,
type IExecuteFunctions, type AiRootNodeExecuteFunctions,
type INodeExecutionData, type INodeExecutionData,
NodeApiError, NodeApiError,
} from 'n8n-workflow'; } from 'n8n-workflow';
@ -13,7 +13,7 @@ import type { OpenAiType } from './node.type';
import * as text from './text'; import * as text from './text';
import { getCustomErrorMessage } from '../helpers/error-handling'; import { getCustomErrorMessage } from '../helpers/error-handling';
export async function router(this: IExecuteFunctions) { export async function router(this: AiRootNodeExecuteFunctions) {
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
const items = this.getInputData(); const items = this.getInputData();

View file

@ -2,14 +2,12 @@ import type { Tool } from '@langchain/core/tools';
import _omit from 'lodash/omit'; import _omit from 'lodash/omit';
import type { import type {
INodeProperties, INodeProperties,
IExecuteFunctions, AiRootNodeExecuteFunctions,
INodeExecutionData, INodeExecutionData,
IDataObject, IDataObject,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { jsonParse, updateDisplayOptions } from 'n8n-workflow'; import { jsonParse, updateDisplayOptions } from 'n8n-workflow';
import { getConnectedTools } from '@utils/helpers';
import { MODELS_NOT_SUPPORT_FUNCTION_CALLS } from '../../helpers/constants'; import { MODELS_NOT_SUPPORT_FUNCTION_CALLS } from '../../helpers/constants';
import type { ChatCompletion } from '../../helpers/interfaces'; import type { ChatCompletion } from '../../helpers/interfaces';
import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { formatToOpenAIAssistantTool } from '../../helpers/utils';
@ -199,7 +197,10 @@ const displayOptions = {
export const description = updateDisplayOptions(displayOptions, properties); export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> { export async function execute(
this: AiRootNodeExecuteFunctions,
i: number,
): Promise<INodeExecutionData[]> {
const nodeVersion = this.getNode().typeVersion; const nodeVersion = this.getNode().typeVersion;
const model = this.getNodeParameter('modelId', i, '', { extractValue: true }); const model = this.getNodeParameter('modelId', i, '', { extractValue: true });
let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[]; let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[];
@ -239,7 +240,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
if (hideTools !== 'hide') { if (hideTools !== 'hide') {
const enforceUniqueNames = nodeVersion > 1; const enforceUniqueNames = nodeVersion > 1;
externalTools = await getConnectedTools(this, enforceUniqueNames, false); externalTools = await this.getConnectedTools(enforceUniqueNames, false);
} }
if (externalTools.length) { if (externalTools.length) {

View file

@ -168,7 +168,7 @@
"generate-schema": "2.6.0", "generate-schema": "2.6.0",
"html-to-text": "9.0.5", "html-to-text": "9.0.5",
"jsdom": "23.0.1", "jsdom": "23.0.1",
"langchain": "0.3.6", "langchain": "catalog:",
"lodash": "catalog:", "lodash": "catalog:",
"mammoth": "1.7.2", "mammoth": "1.7.2",
"mime-types": "2.1.35", "mime-types": "2.1.35",

View file

@ -1,4 +0,0 @@
import type { z } from 'zod';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type DynamicZodObject = z.ZodObject<any, any, any, any>;

View file

@ -4,16 +4,8 @@ import type { BaseLLM } from '@langchain/core/language_models/llms';
import type { BaseMessage } from '@langchain/core/messages'; import type { BaseMessage } from '@langchain/core/messages';
import type { Tool } from '@langchain/core/tools'; import type { Tool } from '@langchain/core/tools';
import type { BaseChatMemory } from 'langchain/memory'; import type { BaseChatMemory } from 'langchain/memory';
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; import { NodeOperationError } from 'n8n-workflow';
import type { import type { IExecuteFunctions, ISupplyDataFunctions, IWebhookFunctions } from 'n8n-workflow';
AiEvent,
IDataObject,
IExecuteFunctions,
ISupplyDataFunctions,
IWebhookFunctions,
} from 'n8n-workflow';
import { N8nTool } from './N8nTool';
function hasMethods<T>(obj: unknown, ...methodNames: Array<string | symbol>): obj is T { function hasMethods<T>(obj: unknown, ...methodNames: Array<string | symbol>): obj is T {
return methodNames.every( return methodNames.every(
@ -72,32 +64,6 @@ export function isToolsInstance(model: unknown): model is Tool {
return namespace.includes('tools'); return namespace.includes('tools');
} }
export function getPromptInputByType(options: {
ctx: IExecuteFunctions;
i: number;
promptTypeKey: string;
inputKey: string;
}) {
const { ctx, i, promptTypeKey, inputKey } = options;
const prompt = ctx.getNodeParameter(promptTypeKey, i) as string;
let input;
if (prompt === 'auto') {
input = ctx.evaluateExpression('{{ $json["chatInput"] }}', i) as string;
} else {
input = ctx.getNodeParameter(inputKey, i) as string;
}
if (input === undefined) {
throw new NodeOperationError(ctx.getNode(), 'No prompt specified', {
description:
"Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter",
});
}
return input;
}
export function getSessionId( export function getSessionId(
ctx: ISupplyDataFunctions | IWebhookFunctions, ctx: ISupplyDataFunctions | IWebhookFunctions,
itemIndex: number, itemIndex: number,
@ -139,18 +105,6 @@ export function getSessionId(
return sessionId; return sessionId;
} }
export function logAiEvent(
executeFunctions: IExecuteFunctions | ISupplyDataFunctions,
event: AiEvent,
data?: IDataObject,
) {
try {
executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined);
} catch (error) {
executeFunctions.logger.debug(`Error logging AI event: ${event}`);
}
}
export function serializeChatHistory(chatHistory: BaseMessage[]): string { export function serializeChatHistory(chatHistory: BaseMessage[]): string {
return chatHistory return chatHistory
.map((chatMessage) => { .map((chatMessage) => {
@ -164,60 +118,3 @@ export function serializeChatHistory(chatHistory: BaseMessage[]): string {
}) })
.join('\n'); .join('\n');
} }
export function escapeSingleCurlyBrackets(text?: string): string | undefined {
if (text === undefined) return undefined;
let result = text;
result = result
// First handle triple brackets to avoid interference with double brackets
.replace(/(?<!{){{{(?!{)/g, '{{{{')
.replace(/(?<!})}}}(?!})/g, '}}}}')
// Then handle single brackets, but only if they're not part of double brackets
// Convert single { to {{ if it's not already part of {{ or {{{
.replace(/(?<!{){(?!{)/g, '{{')
// Convert single } to }} if it's not already part of }} or }}}
.replace(/(?<!})}(?!})/g, '}}');
return result;
}
export const getConnectedTools = async (
ctx: IExecuteFunctions,
enforceUniqueNames: boolean,
convertStructuredTool: boolean = true,
escapeCurlyBrackets: boolean = false,
) => {
const connectedTools =
((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>();
const finalTools = [];
for (const tool of connectedTools) {
const { name } = tool;
if (seenNames.has(name)) {
throw new NodeOperationError(
ctx.getNode(),
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
);
}
seenNames.add(name);
if (escapeCurlyBrackets) {
tool.description = escapeSingleCurlyBrackets(tool.description) ?? tool.description;
}
if (convertStructuredTool && tool instanceof N8nTool) {
finalTools.push(tool.asDynamicTool());
} else {
finalTools.push(tool);
}
}
return finalTools;
};

View file

@ -18,7 +18,7 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers'; import { isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
import { N8nBinaryLoader } from './N8nBinaryLoader'; import { N8nBinaryLoader } from './N8nBinaryLoader';
import { N8nJsonLoader } from './N8nJsonLoader'; import { N8nJsonLoader } from './N8nJsonLoader';
@ -182,7 +182,7 @@ export function logWrapper(
const payload = { action: 'getMessages', response }; const payload = { action: 'getMessages', response };
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response }); executeFunctions.logAiEvent('ai-messages-retrieved-from-memory', { response });
return response; return response;
}; };
} else if (prop === 'addMessage' && 'addMessage' in target) { } else if (prop === 'addMessage' && 'addMessage' in target) {
@ -199,7 +199,7 @@ export function logWrapper(
arguments: [message], arguments: [message],
}); });
logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message }); executeFunctions.logAiEvent('ai-message-added-to-memory', { message });
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
}; };
} }
@ -236,7 +236,7 @@ export function logWrapper(
}; };
} }
logAiEvent(executeFunctions, 'ai-documents-retrieved', { query }); executeFunctions.logAiEvent('ai-documents-retrieved', { query });
executeFunctions.addOutputData( executeFunctions.addOutputData(
connectionType, connectionType,
index, index,
@ -266,7 +266,7 @@ export function logWrapper(
arguments: [documents], arguments: [documents],
})) as number[][]; })) as number[][];
logAiEvent(executeFunctions, 'ai-document-embedded'); executeFunctions.logAiEvent('ai-document-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -286,7 +286,7 @@ export function logWrapper(
method: target[prop], method: target[prop],
arguments: [query], arguments: [query],
})) as number[]; })) as number[];
logAiEvent(executeFunctions, 'ai-query-embedded'); executeFunctions.logAiEvent('ai-query-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -331,7 +331,7 @@ export function logWrapper(
arguments: [item, itemIndex], arguments: [item, itemIndex],
})) as number[]; })) as number[];
logAiEvent(executeFunctions, 'ai-document-processed'); executeFunctions.logAiEvent('ai-document-processed');
executeFunctions.addOutputData(connectionType, index, [ executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }], [{ json: { response }, pairedItem: { item: itemIndex } }],
]); ]);
@ -357,7 +357,7 @@ export function logWrapper(
arguments: [text], arguments: [text],
})) as string[]; })) as string[];
logAiEvent(executeFunctions, 'ai-text-split'); executeFunctions.logAiEvent('ai-text-split');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -381,7 +381,7 @@ export function logWrapper(
arguments: [query], arguments: [query],
})) as string; })) as string;
logAiEvent(executeFunctions, 'ai-tool-called', { query, response }); executeFunctions.logAiEvent('ai-tool-called', { query, response });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -411,7 +411,7 @@ export function logWrapper(
arguments: [query, k, filter, _callbacks], arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>; })) as Array<Document<Record<string, any>>>;
logAiEvent(executeFunctions, 'ai-vector-store-searched', { query }); executeFunctions.logAiEvent('ai-vector-store-searched', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;

View file

@ -7,7 +7,6 @@ import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow';
import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser'; import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser';
import { logAiEvent } from '../helpers';
export class N8nOutputFixingParser extends BaseOutputParser { export class N8nOutputFixingParser extends BaseOutputParser {
lc_namespace = ['langchain', 'output_parsers', 'fix']; lc_namespace = ['langchain', 'output_parsers', 'fix'];
@ -40,7 +39,7 @@ export class N8nOutputFixingParser extends BaseOutputParser {
try { try {
// First attempt to parse the completion // First attempt to parse the completion
const response = await this.outputParser.parse(completion, callbacks, (e) => e); const response = await this.outputParser.parse(completion, callbacks, (e) => e);
logAiEvent(this.context, 'ai-output-parsed', { text: completion, response }); this.context.logAiEvent('ai-output-parsed', { text: completion, response });
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
[{ json: { action: 'parse', response } }], [{ json: { action: 'parse', response } }],

View file

@ -5,8 +5,6 @@ import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { z } from 'zod'; import { z } from 'zod';
import { logAiEvent } from '../helpers';
const STRUCTURED_OUTPUT_KEY = '__structured__output'; const STRUCTURED_OUTPUT_KEY = '__structured__output';
const STRUCTURED_OUTPUT_OBJECT_KEY = '__structured__output__object'; const STRUCTURED_OUTPUT_OBJECT_KEY = '__structured__output__object';
const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array'; const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array';
@ -41,7 +39,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
get(parsed, STRUCTURED_OUTPUT_KEY) ?? get(parsed, STRUCTURED_OUTPUT_KEY) ??
parsed) as Record<string, unknown>; parsed) as Record<string, unknown>;
logAiEvent(this.context, 'ai-output-parsed', { text, response: result }); this.context.logAiEvent('ai-output-parsed', { text, response: result });
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [ this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
[{ json: { action: 'parse', response: result } }], [{ json: { action: 'parse', response: result } }],
@ -58,7 +56,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
}, },
); );
logAiEvent(this.context, 'ai-output-parsed', { this.context.logAiEvent('ai-output-parsed', {
text, text,
response: e.message ?? e, response: e.message ?? e,
}); });

View file

@ -1,26 +0,0 @@
import type { BaseCallbackConfig } from '@langchain/core/callbacks/manager';
import type { IExecuteFunctions } from 'n8n-workflow';
interface TracingConfig {
additionalMetadata?: Record<string, unknown>;
}
export function getTracingConfig(
context: IExecuteFunctions,
config: TracingConfig = {},
): BaseCallbackConfig {
const parentRunManager = context.getParentCallbackManager
? context.getParentCallbackManager()
: undefined;
return {
runName: `[${context.getWorkflow().name}] ${context.getNode().name}`,
metadata: {
execution_id: context.getExecutionId(),
workflow: context.getWorkflow(),
node: context.getNode().name,
...(config.additionalMetadata ?? {}),
},
callbacks: parentRunManager,
};
}

View file

@ -48,6 +48,7 @@
"file-type": "16.5.4", "file-type": "16.5.4",
"form-data": "catalog:", "form-data": "catalog:",
"iconv-lite": "catalog:", "iconv-lite": "catalog:",
"langchain": "catalog:",
"lodash": "catalog:", "lodash": "catalog:",
"luxon": "catalog:", "luxon": "catalog:",
"mime-types": "2.1.35", "mime-types": "2.1.35",

View file

@ -51,6 +51,7 @@ import {
sleep, sleep,
ExecutionCancelledError, ExecutionCancelledError,
Node, Node,
AiRootNode,
} from 'n8n-workflow'; } from 'n8n-workflow';
import PCancelable from 'p-cancelable'; import PCancelable from 'p-cancelable';
import Container from 'typedi'; import Container from 'typedi';
@ -1047,6 +1048,7 @@ export class WorkflowExecute {
if (nodeType.execute) { if (nodeType.execute) {
const closeFunctions: CloseFunction[] = []; const closeFunctions: CloseFunction[] = [];
const context = new ExecuteContext( const context = new ExecuteContext(
workflow, workflow,
node, node,
@ -1061,10 +1063,15 @@ export class WorkflowExecute {
abortSignal, abortSignal,
); );
const data = let data: INodeExecutionData[][] | null;
nodeType instanceof Node if (nodeType instanceof AiRootNode) {
? await nodeType.execute(context) data = await nodeType.execute(context.getAiRootNodeExecuteFunctions());
: await nodeType.execute.call(context); } else {
data =
nodeType instanceof Node
? await nodeType.execute(context)
: await nodeType.execute.call(context);
}
const closeFunctionsResults = await Promise.allSettled( const closeFunctionsResults = await Promise.allSettled(
closeFunctions.map(async (fn) => await fn()), closeFunctions.map(async (fn) => await fn()),

View file

@ -1,3 +1,4 @@
import type { CallbackManager } from '@langchain/core/callbacks/manager';
import { get } from 'lodash'; import { get } from 'lodash';
import type { import type {
Workflow, Workflow,
@ -9,7 +10,6 @@ import type {
ITaskDataConnections, ITaskDataConnections,
IExecuteData, IExecuteData,
ICredentialDataDecryptedObject, ICredentialDataDecryptedObject,
CallbackManager,
IExecuteWorkflowInfo, IExecuteWorkflowInfo,
RelatedExecution, RelatedExecution,
ExecuteWorkflowData, ExecuteWorkflowData,
@ -28,6 +28,7 @@ import {
NodeConnectionType, NodeConnectionType,
WAIT_INDEFINITELY, WAIT_INDEFINITELY,
WorkflowDataProxy, WorkflowDataProxy,
jsonStringify,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { Container } from 'typedi'; import { Container } from 'typedi';
@ -236,14 +237,14 @@ export class BaseExecuteContext extends NodeExecutionContext {
} }
} }
logAiEvent(eventName: AiEvent, msg: string) { logAiEvent(eventName: AiEvent, msg: object) {
return this.additionalData.logAiEvent(eventName, { return this.additionalData.logAiEvent(eventName, {
executionId: this.additionalData.executionId ?? 'unsaved-execution', executionId: this.additionalData.executionId ?? 'unsaved-execution',
nodeName: this.node.name, nodeName: this.node.name,
workflowName: this.workflow.name ?? 'Unnamed workflow', workflowName: this.workflow.name ?? 'Unnamed workflow',
nodeType: this.node.type, nodeType: this.node.type,
workflowId: this.workflow.id ?? 'unsaved-workflow', workflowId: this.workflow.id ?? 'unsaved-workflow',
msg, msg: jsonStringify(msg),
}); });
} }
} }

View file

@ -1,6 +1,9 @@
import type { BaseCallbackConfig, CallbackManager } from '@langchain/core/callbacks/manager';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from '@langchain/core/tools';
import type { import type {
AINodeConnectionType, AINodeConnectionType,
CallbackManager, AiRootNodeExecuteFunctions,
CloseFunction, CloseFunction,
IExecuteData, IExecuteData,
IExecuteFunctions, IExecuteFunctions,
@ -12,14 +15,17 @@ import type {
ITaskDataConnections, ITaskDataConnections,
IWorkflowExecuteAdditionalData, IWorkflowExecuteAdditionalData,
Result, Result,
TracingConfig,
Workflow, Workflow,
WorkflowExecuteMode, WorkflowExecuteMode,
ZodObjectAny,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
ApplicationError, ApplicationError,
createDeferredPromise, createDeferredPromise,
createEnvProviderState, createEnvProviderState,
NodeConnectionType, NodeConnectionType,
NodeOperationError,
} from 'n8n-workflow'; } from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle // eslint-disable-next-line import/no-cycle
@ -40,6 +46,8 @@ import {
} from '@/NodeExecuteFunctions'; } from '@/NodeExecuteFunctions';
import { BaseExecuteContext } from './base-execute-context'; import { BaseExecuteContext } from './base-execute-context';
import { N8nTool } from './n8n-tool';
import { escapeSingleCurlyBrackets } from './utils';
export class ExecuteContext extends BaseExecuteContext implements IExecuteFunctions { export class ExecuteContext extends BaseExecuteContext implements IExecuteFunctions {
readonly helpers: IExecuteFunctions['helpers']; readonly helpers: IExecuteFunctions['helpers'];
@ -206,4 +214,135 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti
getParentCallbackManager(): CallbackManager | undefined { getParentCallbackManager(): CallbackManager | undefined {
return this.additionalData.parentCallbackManager; return this.additionalData.parentCallbackManager;
} }
getAiRootNodeExecuteFunctions(): AiRootNodeExecuteFunctions {
const {
getConnectedTools,
getPromptInputByType,
getTracingConfig,
extractParsedOutput,
checkForStructuredTools,
} = this;
return Object.create(this, {
getConnectedTools: { value: getConnectedTools },
getPromptInputByType: { value: getPromptInputByType },
getTracingConfig: { value: getTracingConfig },
extractParsedOutput: { value: extractParsedOutput },
checkForStructuredTools: { value: checkForStructuredTools },
});
}
async getConnectedTools(
enforceUniqueNames: boolean,
convertStructuredTool = true,
escapeCurlyBrackets = false,
) {
const connectedTools =
((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>();
const finalTools = [];
for (const tool of connectedTools) {
const { name } = tool;
if (seenNames.has(name)) {
throw new NodeOperationError(
this.node,
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
);
}
seenNames.add(name);
if (escapeCurlyBrackets) {
tool.description = escapeSingleCurlyBrackets(tool.description) ?? tool.description;
}
if (convertStructuredTool && tool instanceof N8nTool) {
finalTools.push(tool.asDynamicTool());
} else {
finalTools.push(tool);
}
}
return finalTools;
}
getPromptInputByType(
itemIndex: number,
promptTypeKey: string = 'text',
inputKey: string = 'promptType',
) {
const prompt = this.getNodeParameter(promptTypeKey, itemIndex) as string;
let input;
if (prompt === 'auto') {
input = this.evaluateExpression('{{ $json["chatInput"] }}', itemIndex) as string;
} else {
input = this.getNodeParameter(inputKey, itemIndex) as string;
}
if (input === undefined) {
throw new NodeOperationError(this.node, 'No prompt specified', {
description:
"Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter",
});
}
return input;
}
getTracingConfig(config: TracingConfig = {}): BaseCallbackConfig {
const parentRunManager = this.getParentCallbackManager?.();
return {
runName: `[${this.workflow.name}] ${this.node.name}`,
metadata: {
execution_id: this.getExecutionId(),
workflow: this.workflow,
node: this.node.name,
...(config.additionalMetadata ?? {}),
},
callbacks: parentRunManager,
};
}
async extractParsedOutput(
outputParser: BaseOutputParser<unknown>,
output: string,
): Promise<Record<string, unknown> | undefined> {
const parsedOutput = (await outputParser.parse(output)) as {
output: Record<string, unknown>;
};
if (this.node.typeVersion <= 1.6) {
return parsedOutput;
}
// For 1.7 and above, we try to extract the output from the parsed output
// with fallback to the original output if it's not present
return parsedOutput?.output ?? parsedOutput;
}
checkForStructuredTools(
tools: Array<Tool | DynamicStructuredTool<ZodObjectAny>>,
node: INode,
currentAgentType: string,
) {
const dynamicStructuredTools = tools.filter(
(tool) => tool.constructor.name === 'DynamicStructuredTool',
);
if (dynamicStructuredTools.length > 0) {
const getToolName = (tool: Tool | DynamicStructuredTool) => `"${tool.name}"`;
throw new NodeOperationError(
node,
`The selected tools are not supported by "${currentAgentType}", please use "Tools Agent" instead`,
{
itemIndex: 0,
description: `Incompatible connected tools: ${dynamicStructuredTools.map(getToolName).join(', ')}`,
},
);
}
}
} }

View file

@ -1,8 +1,12 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
import type { DynamicStructuredToolInput } from '@langchain/core/tools'; import type { DynamicStructuredToolInput } from '@langchain/core/tools';
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools'; import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow';
import { StructuredOutputParser } from 'langchain/output_parsers'; import { StructuredOutputParser } from 'langchain/output_parsers';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionType, jsonParse, NodeOperationError, ensureError } from 'n8n-workflow';
import type { ZodTypeAny } from 'zod'; import type { ZodTypeAny } from 'zod';
import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod'; import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod';
@ -28,7 +32,7 @@ const getParametersDescription = (parameters: Array<[string, ZodTypeAny]>) =>
) )
.join(',\n '); .join(',\n ');
export const prepareFallbackToolDescription = (toolDescription: string, schema: ZodObject<any>) => { const prepareFallbackToolDescription = (toolDescription: string, schema: ZodObject<any>) => {
let description = `${toolDescription}`; let description = `${toolDescription}`;
const toolParameters = Object.entries<ZodTypeAny>(schema.shape); const toolParameters = Object.entries<ZodTypeAny>(schema.shape);
@ -80,7 +84,7 @@ export class N8nTool extends DynamicStructuredTool {
// Finally throw an error if we were unable to parse the query // Finally throw an error if we were unable to parse the query
throw new NodeOperationError( throw new NodeOperationError(
context.getNode(), context.getNode(),
`Input is not a valid JSON: ${error.message}`, `Input is not a valid JSON: ${ensureError(error).message}`,
); );
} }
} }
@ -92,14 +96,12 @@ export class N8nTool extends DynamicStructuredTool {
try { try {
// Call tool function with parsed query // Call tool function with parsed query
const result = await func(parsedQuery); return await func(parsedQuery);
return result;
} catch (e) { } catch (e) {
const { index } = context.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]); const { index } = context.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]);
void context.addOutputData(NodeConnectionType.AiTool, index, e); void context.addOutputData(NodeConnectionType.AiTool, index, e);
return e.toString(); return ensureError(e).toString();
} }
}; };

View file

@ -421,3 +421,21 @@ export function getAdditionalKeys(
$resumeWebhookUrl: resumeUrl, $resumeWebhookUrl: resumeUrl,
}; };
} }
export function escapeSingleCurlyBrackets(text?: string): string | undefined {
if (text === undefined) return undefined;
let result = text;
result = result
// First handle triple brackets to avoid interference with double brackets
.replace(/(?<!{){{{(?!{)/g, '{{{{')
.replace(/(?<!})}}}(?!})/g, '}}}}')
// Then handle single brackets, but only if they're not part of double brackets
// Convert single { to {{ if it's not already part of {{ or {{{
.replace(/(?<!{){(?!{)/g, '{{')
// Convert single } to }} if it's not already part of }} or }}}
.replace(/(?<!})}(?!})/g, '}}');
return result;
}

View file

@ -1,6 +1,7 @@
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
import type { CallbackManager, BaseCallbackConfig } from '@langchain/core/callbacks/manager';
import type { CallbackManager as CallbackManagerLC } from '@langchain/core/callbacks/manager'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from '@langchain/core/tools';
import type { AxiosProxyConfig, GenericAbortSignal } from 'axios'; import type { AxiosProxyConfig, GenericAbortSignal } from 'axios';
import type * as express from 'express'; import type * as express from 'express';
import type FormData from 'form-data'; import type FormData from 'form-data';
@ -11,6 +12,7 @@ import type { Client as SSHClient } from 'ssh2';
import type { Readable } from 'stream'; import type { Readable } from 'stream';
import type { SecureContextOptions } from 'tls'; import type { SecureContextOptions } from 'tls';
import type { URLSearchParams } from 'url'; import type { URLSearchParams } from 'url';
import type { ZodObject } from 'zod';
import type { CODE_EXECUTION_MODES, CODE_LANGUAGES, LOG_LEVELS } from './Constants'; import type { CODE_EXECUTION_MODES, CODE_LANGUAGES, LOG_LEVELS } from './Constants';
import type { IDeferredPromise } from './DeferredPromise'; import type { IDeferredPromise } from './DeferredPromise';
@ -26,6 +28,8 @@ import type { Workflow } from './Workflow';
import type { EnvProviderState } from './WorkflowDataProxyEnvProvider'; import type { EnvProviderState } from './WorkflowDataProxyEnvProvider';
import type { WorkflowHooks } from './WorkflowHooks'; import type { WorkflowHooks } from './WorkflowHooks';
export type ZodObjectAny = ZodObject<any, any, any, any>;
export interface IAdditionalCredentialOptions { export interface IAdditionalCredentialOptions {
oauth2?: IOAuth2Options; oauth2?: IOAuth2Options;
credentialsDecrypted?: ICredentialsDecrypted; credentialsDecrypted?: ICredentialsDecrypted;
@ -893,7 +897,7 @@ type BaseExecutionFunctions = FunctionsBaseWithRequiredKeys<'getMode'> & {
getInputSourceData(inputIndex?: number, connectionType?: NodeConnectionType): ISourceData; getInputSourceData(inputIndex?: number, connectionType?: NodeConnectionType): ISourceData;
getExecutionCancelSignal(): AbortSignal | undefined; getExecutionCancelSignal(): AbortSignal | undefined;
onExecutionCancellation(handler: () => unknown): void; onExecutionCancellation(handler: () => unknown): void;
logAiEvent(eventName: AiEvent, msg?: string | undefined): void; logAiEvent(eventName: AiEvent, msg?: object): void;
}; };
// TODO: Create later own type only for Config-Nodes // TODO: Create later own type only for Config-Nodes
@ -919,8 +923,6 @@ export type IExecuteFunctions = ExecuteFunctions.GetNodeParameterFn &
putExecutionToWait(waitTill: Date): Promise<void>; putExecutionToWait(waitTill: Date): Promise<void>;
sendMessageToUI(message: any): void; sendMessageToUI(message: any): void;
sendResponse(response: IExecuteResponsePromiseData): void; sendResponse(response: IExecuteResponsePromiseData): void;
// TODO: Make this one then only available in the new config one
addInputData( addInputData(
connectionType: NodeConnectionType, connectionType: NodeConnectionType,
data: INodeExecutionData[][] | ExecutionError, data: INodeExecutionData[][] | ExecutionError,
@ -932,6 +934,7 @@ export type IExecuteFunctions = ExecuteFunctions.GetNodeParameterFn &
data: INodeExecutionData[][] | ExecutionError, data: INodeExecutionData[][] | ExecutionError,
metadata?: ITaskMetadata, metadata?: ITaskMetadata,
): void; ): void;
getAiRootNodeExecuteFunctions(): AiRootNodeExecuteFunctions;
nodeHelpers: NodeHelperFunctions; nodeHelpers: NodeHelperFunctions;
helpers: RequestHelperFunctions & helpers: RequestHelperFunctions &
@ -976,26 +979,49 @@ export interface IExecuteSingleFunctions extends BaseExecutionFunctions {
}; };
} }
export interface TracingConfig {
additionalMetadata?: Record<string, unknown>;
}
// TODO: `Pick` from IExecuteFunctions, but do not extends completely
export type AiRootNodeExecuteFunctions = IExecuteFunctions & {
getConnectedTools(
enforceUniqueNames: boolean,
convertStructuredTool?: boolean,
escapeCurlyBrackets?: boolean,
): Promise<Tool[]>;
getPromptInputByType(itemIndex: number, promptTypeKey?: string, inputKey?: string): string;
getTracingConfig(config?: TracingConfig): BaseCallbackConfig;
extractParsedOutput(
outputParser: BaseOutputParser<unknown>,
output: string,
): Promise<Record<string, unknown> | undefined>;
checkForStructuredTools(
tools: Array<Tool | DynamicStructuredTool<ZodObjectAny>>,
node: INode,
currentAgentType: string,
): void;
};
export type ISupplyDataFunctions = ExecuteFunctions.GetNodeParameterFn & export type ISupplyDataFunctions = ExecuteFunctions.GetNodeParameterFn &
FunctionsBaseWithRequiredKeys<'getMode'> & FunctionsBaseWithRequiredKeys<'getMode'> &
Pick< Pick<
IExecuteFunctions, IExecuteFunctions,
| 'addInputData' | 'addInputData'
| 'addOutputData' | 'addOutputData'
| 'continueOnFail'
| 'evaluateExpression'
| 'executeWorkflow'
| 'getExecutionCancelSignal'
| 'getInputConnectionData' | 'getInputConnectionData'
| 'getInputData' | 'getInputData'
| 'getNodeOutputs' | 'getNodeOutputs'
| 'executeWorkflow' | 'getWorkflowDataProxy'
| 'logAiEvent'
| 'onExecutionCancellation'
| 'sendMessageToUI' | 'sendMessageToUI'
| 'helpers' | 'helpers'
> & { >;
continueOnFail(): boolean;
evaluateExpression(expression: string, itemIndex: number): NodeParameterValueType;
getWorkflowDataProxy(itemIndex: number): IWorkflowDataProxyData;
getExecutionCancelSignal(): AbortSignal | undefined;
onExecutionCancellation(handler: () => unknown): void;
logAiEvent(eventName: AiEvent, msg?: string | undefined): void;
};
export interface IExecutePaginationFunctions extends IExecuteSingleFunctions { export interface IExecutePaginationFunctions extends IExecuteSingleFunctions {
makeRoutingRequest( makeRoutingRequest(
@ -1606,6 +1632,14 @@ export abstract class Node {
poll?(context: IPollFunctions): Promise<INodeExecutionData[][] | null>; poll?(context: IPollFunctions): Promise<INodeExecutionData[][] | null>;
} }
/**
* This class serves as a base for all AI nodes that can invoke subnodes,
* like models, memory, and tools
*/
export abstract class AiRootNode extends Node {
execute?(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]>;
}
export interface IVersionedNodeType { export interface IVersionedNodeType {
nodeVersions: { nodeVersions: {
[key: number]: INodeType; [key: number]: INodeType;
@ -2776,8 +2810,6 @@ export type BannerName =
export type Functionality = 'regular' | 'configuration-node' | 'pairedItem'; export type Functionality = 'regular' | 'configuration-node' | 'pairedItem';
export type CallbackManager = CallbackManagerLC;
export type IPersonalizationSurveyAnswersV4 = { export type IPersonalizationSurveyAnswersV4 = {
version: 'v4'; version: 'v4';
personalization_survey_submitted_at: string; personalization_survey_submitted_at: string;

View file

@ -45,6 +45,9 @@ catalogs:
iconv-lite: iconv-lite:
specifier: 0.6.3 specifier: 0.6.3
version: 0.6.3 version: 0.6.3
langchain:
specifier: 0.3.6
version: 0.3.6
lodash: lodash:
specifier: 4.17.21 specifier: 4.17.21
version: 4.17.21 version: 4.17.21
@ -541,7 +544,7 @@ importers:
specifier: 23.0.1 specifier: 23.0.1
version: 23.0.1 version: 23.0.1
langchain: langchain:
specifier: 0.3.6 specifier: 'catalog:'
version: 0.3.6(e4rnrwhosnp2xiru36mqgdy2bu) version: 0.3.6(e4rnrwhosnp2xiru36mqgdy2bu)
lodash: lodash:
specifier: 'catalog:' specifier: 'catalog:'
@ -1114,7 +1117,7 @@ importers:
dependencies: dependencies:
'@langchain/core': '@langchain/core':
specifier: 'catalog:' specifier: 'catalog:'
version: 0.3.19(openai@4.73.1(zod@3.23.8)) version: 0.3.19(openai@4.73.1)
'@n8n/client-oauth2': '@n8n/client-oauth2':
specifier: workspace:* specifier: workspace:*
version: link:../@n8n/client-oauth2 version: link:../@n8n/client-oauth2
@ -1148,6 +1151,9 @@ importers:
iconv-lite: iconv-lite:
specifier: 'catalog:' specifier: 'catalog:'
version: 0.6.3 version: 0.6.3
langchain:
specifier: 'catalog:'
version: 0.3.6(@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(axios@1.7.4)(cheerio@1.0.0)(handlebars@4.7.8)(openai@4.73.1(zod@3.23.8))
lodash: lodash:
specifier: 'catalog:' specifier: 'catalog:'
version: 4.17.21 version: 4.17.21
@ -13974,6 +13980,25 @@ snapshots:
- '@aws-sdk/client-sso-oidc' - '@aws-sdk/client-sso-oidc'
- aws-crt - aws-crt
'@aws-sdk/credential-provider-ini@3.666.0(@aws-sdk/client-sts@3.666.0)':
dependencies:
'@aws-sdk/client-sts': 3.666.0
'@aws-sdk/credential-provider-env': 3.664.0
'@aws-sdk/credential-provider-http': 3.666.0
'@aws-sdk/credential-provider-process': 3.664.0
'@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))
'@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/types': 3.664.0
'@smithy/credential-provider-imds': 3.2.4
'@smithy/property-provider': 3.1.7
'@smithy/shared-ini-file-loader': 3.1.8
'@smithy/types': 3.5.0
tslib: 2.6.2
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
optional: true
'@aws-sdk/credential-provider-node@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)': '@aws-sdk/credential-provider-node@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)':
dependencies: dependencies:
'@aws-sdk/credential-provider-env': 3.664.0 '@aws-sdk/credential-provider-env': 3.664.0
@ -13993,6 +14018,26 @@ snapshots:
- '@aws-sdk/client-sts' - '@aws-sdk/client-sts'
- aws-crt - aws-crt
'@aws-sdk/credential-provider-node@3.666.0(@aws-sdk/client-sts@3.666.0)':
dependencies:
'@aws-sdk/credential-provider-env': 3.664.0
'@aws-sdk/credential-provider-http': 3.666.0
'@aws-sdk/credential-provider-ini': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/credential-provider-process': 3.664.0
'@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))
'@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/types': 3.664.0
'@smithy/credential-provider-imds': 3.2.4
'@smithy/property-provider': 3.1.7
'@smithy/shared-ini-file-loader': 3.1.8
'@smithy/types': 3.5.0
tslib: 2.6.2
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- '@aws-sdk/client-sts'
- aws-crt
optional: true
'@aws-sdk/credential-provider-process@3.664.0': '@aws-sdk/credential-provider-process@3.664.0':
dependencies: dependencies:
'@aws-sdk/types': 3.664.0 '@aws-sdk/types': 3.664.0
@ -14022,6 +14067,29 @@ snapshots:
'@smithy/types': 3.5.0 '@smithy/types': 3.5.0
tslib: 2.6.2 tslib: 2.6.2
'@aws-sdk/credential-providers@3.666.0':
dependencies:
'@aws-sdk/client-cognito-identity': 3.666.0
'@aws-sdk/client-sso': 3.666.0
'@aws-sdk/client-sts': 3.666.0
'@aws-sdk/credential-provider-cognito-identity': 3.666.0
'@aws-sdk/credential-provider-env': 3.664.0
'@aws-sdk/credential-provider-http': 3.666.0
'@aws-sdk/credential-provider-ini': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/credential-provider-process': 3.664.0
'@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))
'@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/types': 3.664.0
'@smithy/credential-provider-imds': 3.2.4
'@smithy/property-provider': 3.1.7
'@smithy/types': 3.5.0
tslib: 2.6.2
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
optional: true
'@aws-sdk/credential-providers@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))': '@aws-sdk/credential-providers@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))':
dependencies: dependencies:
'@aws-sdk/client-cognito-identity': 3.666.0 '@aws-sdk/client-cognito-identity': 3.666.0
@ -16087,6 +16155,18 @@ snapshots:
- encoding - encoding
- supports-color - supports-color
'@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@anthropic-ai/sdk': 0.27.3(encoding@0.1.13)
'@langchain/core': 0.3.19(openai@4.73.1)
fast-xml-parser: 4.4.1
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
optional: true
'@langchain/aws@0.1.2(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': '@langchain/aws@0.1.2(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies: dependencies:
'@aws-sdk/client-bedrock-agent-runtime': 3.666.0 '@aws-sdk/client-bedrock-agent-runtime': 3.666.0
@ -16101,6 +16181,21 @@ snapshots:
- '@aws-sdk/client-sts' - '@aws-sdk/client-sts'
- aws-crt - aws-crt
'@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@aws-sdk/client-bedrock-agent-runtime': 3.666.0
'@aws-sdk/client-bedrock-runtime': 3.666.0
'@aws-sdk/client-kendra': 3.666.0
'@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@langchain/core': 0.3.19(openai@4.73.1)
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- '@aws-sdk/client-sts'
- aws-crt
optional: true
'@langchain/cohere@0.3.1(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)': '@langchain/cohere@0.3.1(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16113,6 +16208,19 @@ snapshots:
- aws-crt - aws-crt
- encoding - encoding
'@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
cohere-ai: 7.14.0
uuid: 10.0.0
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
- encoding
optional: true
'@langchain/community@0.3.15(vc5hvyy27o4cmm4jplsptc2fqm)': '@langchain/community@0.3.15(vc5hvyy27o4cmm4jplsptc2fqm)':
dependencies: dependencies:
'@ibm-cloud/watsonx-ai': 1.1.2 '@ibm-cloud/watsonx-ai': 1.1.2
@ -16200,22 +16308,6 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- openai - openai
'@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))':
dependencies:
ansi-styles: 5.2.0
camelcase: 6.3.0
decamelize: 1.2.0
js-tiktoken: 1.0.12
langsmith: 0.2.3(openai@4.73.1(zod@3.23.8))
mustache: 4.2.0
p-queue: 6.6.2
p-retry: 4.6.2
uuid: 10.0.0
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- openai
'@langchain/core@0.3.19(openai@4.73.1)': '@langchain/core@0.3.19(openai@4.73.1)':
dependencies: dependencies:
ansi-styles: 5.2.0 ansi-styles: 5.2.0
@ -16240,6 +16332,15 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- zod - zod
'@langchain/google-common@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
uuid: 10.0.0
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- zod
optional: true
'@langchain/google-gauth@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)': '@langchain/google-gauth@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16250,6 +16351,17 @@ snapshots:
- supports-color - supports-color
- zod - zod
'@langchain/google-gauth@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/google-common': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
google-auth-library: 8.9.0(encoding@0.1.13)
transitivePeerDependencies:
- encoding
- supports-color
- zod
optional: true
'@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(zod@3.23.8)': '@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(zod@3.23.8)':
dependencies: dependencies:
'@google/generative-ai': 0.21.0 '@google/generative-ai': 0.21.0
@ -16258,6 +16370,15 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- zod - zod
'@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@google/generative-ai': 0.21.0
'@langchain/core': 0.3.19(openai@4.73.1)
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- zod
optional: true
'@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)': '@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16267,6 +16388,16 @@ snapshots:
- supports-color - supports-color
- zod - zod
'@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/google-gauth': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
- zod
optional: true
'@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)': '@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16278,6 +16409,18 @@ snapshots:
- encoding - encoding
- supports-color - supports-color
'@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/openai': 0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
groq-sdk: 0.5.0(encoding@0.1.13)
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
optional: true
'@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': '@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16286,12 +16429,28 @@ snapshots:
zod: 3.23.8 zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8) zod-to-json-schema: 3.23.3(zod@3.23.8)
'@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@mistralai/mistralai': 1.3.4(zod@3.23.8)
uuid: 10.0.0
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
optional: true
'@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': '@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
ollama: 0.5.9 ollama: 0.5.9
uuid: 10.0.0 uuid: 10.0.0
'@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
ollama: 0.5.9
uuid: 10.0.0
optional: true
'@langchain/openai@0.3.14(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)': '@langchain/openai@0.3.14(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16303,6 +16462,17 @@ snapshots:
- encoding - encoding
- supports-color - supports-color
'@langchain/openai@0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
js-tiktoken: 1.0.12
openai: 4.73.1(zod@3.23.8)
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
'@langchain/pinecone@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))': '@langchain/pinecone@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16328,6 +16498,11 @@ snapshots:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
js-tiktoken: 1.0.12 js-tiktoken: 1.0.12
'@langchain/textsplitters@0.1.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
js-tiktoken: 1.0.12
'@lezer/common@1.1.0': {} '@lezer/common@1.1.0': {}
'@lezer/common@1.2.1': {} '@lezer/common@1.2.1': {}
@ -19460,14 +19635,6 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- debug - debug
axios@1.7.7:
dependencies:
follow-redirects: 1.15.6(debug@4.3.6)
form-data: 4.0.0
proxy-from-env: 1.1.0
transitivePeerDependencies:
- debug
axios@1.7.7(debug@4.3.6): axios@1.7.7(debug@4.3.6):
dependencies: dependencies:
follow-redirects: 1.15.6(debug@4.3.6) follow-redirects: 1.15.6(debug@4.3.6)
@ -20021,6 +20188,26 @@ snapshots:
'@lezer/html': 1.3.0 '@lezer/html': 1.3.0
'@lezer/lr': 1.4.0 '@lezer/lr': 1.4.0
cohere-ai@7.14.0:
dependencies:
'@aws-sdk/client-sagemaker': 3.666.0
'@aws-sdk/credential-providers': 3.666.0
'@aws-sdk/protocol-http': 3.374.0
'@aws-sdk/signature-v4': 3.374.0
form-data: 4.0.0
form-data-encoder: 4.0.2
formdata-node: 6.0.3
js-base64: 3.7.2
node-fetch: 2.7.0(encoding@0.1.13)
qs: 6.11.2
readable-stream: 4.5.2
url-join: 4.0.1
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
- encoding
optional: true
cohere-ai@7.14.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(encoding@0.1.13): cohere-ai@7.14.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(encoding@0.1.13):
dependencies: dependencies:
'@aws-sdk/client-sagemaker': 3.666.0 '@aws-sdk/client-sagemaker': 3.666.0
@ -22384,7 +22571,7 @@ snapshots:
infisical-node@1.3.0: infisical-node@1.3.0:
dependencies: dependencies:
axios: 1.7.7 axios: 1.7.7(debug@4.3.6)
dotenv: 16.3.1 dotenv: 16.3.1
tweetnacl: 1.0.3 tweetnacl: 1.0.3
tweetnacl-util: 0.15.1 tweetnacl-util: 0.15.1
@ -23318,6 +23505,38 @@ snapshots:
kuler@2.0.0: {} kuler@2.0.0: {}
langchain@0.3.6(@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(axios@1.7.4)(cheerio@1.0.0)(handlebars@4.7.8)(openai@4.73.1(zod@3.23.8)):
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/openai': 0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/textsplitters': 0.1.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
js-tiktoken: 1.0.12
js-yaml: 4.1.0
jsonpointer: 5.0.1
langsmith: 0.2.3(openai@4.73.1)
openapi-types: 12.1.3
p-retry: 4.6.2
uuid: 10.0.0
yaml: 2.3.4
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
optionalDependencies:
'@langchain/anthropic': 0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/aws': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/cohere': 0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/google-genai': 0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
'@langchain/google-vertexai': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
'@langchain/groq': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/mistralai': 0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/ollama': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
axios: 1.7.4
cheerio: 1.0.0
handlebars: 4.7.8
transitivePeerDependencies:
- encoding
- openai
- supports-color
langchain@0.3.6(e4rnrwhosnp2xiru36mqgdy2bu): langchain@0.3.6(e4rnrwhosnp2xiru36mqgdy2bu):
dependencies: dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)) '@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -23361,17 +23580,6 @@ snapshots:
optionalDependencies: optionalDependencies:
openai: 4.73.1(encoding@0.1.13)(zod@3.23.8) openai: 4.73.1(encoding@0.1.13)(zod@3.23.8)
langsmith@0.2.3(openai@4.73.1(zod@3.23.8)):
dependencies:
'@types/uuid': 10.0.0
commander: 10.0.1
p-queue: 6.6.2
p-retry: 4.6.2
semver: 7.6.0
uuid: 10.0.0
optionalDependencies:
openai: 4.73.1(zod@3.23.8)
langsmith@0.2.3(openai@4.73.1): langsmith@0.2.3(openai@4.73.1):
dependencies: dependencies:
'@types/uuid': 10.0.0 '@types/uuid': 10.0.0
@ -24729,7 +24937,6 @@ snapshots:
transitivePeerDependencies: transitivePeerDependencies:
- encoding - encoding
- supports-color - supports-color
optional: true
openapi-sampler@1.5.1: openapi-sampler@1.5.1:
dependencies: dependencies:
@ -25113,7 +25320,7 @@ snapshots:
posthog-node@3.2.1: posthog-node@3.2.1:
dependencies: dependencies:
axios: 1.7.7 axios: 1.7.7(debug@4.3.6)
rusha: 0.8.14 rusha: 0.8.14
transitivePeerDependencies: transitivePeerDependencies:
- debug - debug
@ -26131,7 +26338,7 @@ snapshots:
asn1.js: 5.4.1 asn1.js: 5.4.1
asn1.js-rfc2560: 5.0.1(asn1.js@5.4.1) asn1.js-rfc2560: 5.0.1(asn1.js@5.4.1)
asn1.js-rfc5280: 3.0.0 asn1.js-rfc5280: 3.0.0
axios: 1.7.7 axios: 1.7.7(debug@4.3.6)
big-integer: 1.6.51 big-integer: 1.6.51
bignumber.js: 9.1.2 bignumber.js: 9.1.2
binascii: 0.0.2 binascii: 0.0.2

View file

@ -27,6 +27,7 @@ catalog:
xss: 1.0.15 xss: 1.0.15
zod: 3.23.8 zod: 3.23.8
'@langchain/core': 0.3.19 '@langchain/core': 0.3.19
langchain: 0.3.6
catalogs: catalogs:
frontend: frontend: