create a new context type for AI Agent execution

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™ 2024-12-13 14:42:49 +01:00
parent 80eea49cf0
commit c003280ebe
No known key found for this signature in database
44 changed files with 741 additions and 576 deletions

View file

@ -1,10 +1,9 @@
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { AiRootNode, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
INodeInputConfiguration,
INodeInputFilter,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodeType,
INodeTypeDescription,
INodeProperties,
} from 'n8n-workflow';
@ -245,7 +244,7 @@ const agentTypeProperty: INodeProperties = {
default: '',
};
export class Agent implements INodeType {
export class Agent extends AiRootNode {
description: INodeTypeDescription = {
displayName: 'AI Agent',
name: 'agent',
@ -416,24 +415,27 @@ export class Agent implements INodeType {
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const agentType = this.getNodeParameter('agent', 0, '') as string;
const nodeVersion = this.getNode().typeVersion;
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const agentType = context.getNodeParameter('agent', 0, '') as string;
const nodeVersion = context.getNode().typeVersion;
if (agentType === 'conversationalAgent') {
return await conversationalAgentExecute.call(this, nodeVersion);
return await conversationalAgentExecute.call(context, nodeVersion);
} else if (agentType === 'toolsAgent') {
return await toolsAgentExecute.call(this);
return await toolsAgentExecute.call(context);
} else if (agentType === 'openAiFunctionsAgent') {
return await openAiFunctionsAgentExecute.call(this, nodeVersion);
return await openAiFunctionsAgentExecute.call(context, nodeVersion);
} else if (agentType === 'reActAgent') {
return await reActAgentAgentExecute.call(this, nodeVersion);
return await reActAgentAgentExecute.call(context, nodeVersion);
} else if (agentType === 'sqlAgent') {
return await sqlAgentAgentExecute.call(this);
return await sqlAgentAgentExecute.call(context);
} else if (agentType === 'planAndExecuteAgent') {
return await planAndExecuteAgentExecute.call(this, nodeVersion);
return await planAndExecuteAgentExecute.call(context, nodeVersion);
}
throw new NodeOperationError(this.getNode(), `The agent type "${agentType}" is not supported`);
throw new NodeOperationError(
context.getNode(),
`The agent type "${agentType}" is not supported`,
);
}
}

View file

@ -3,18 +3,15 @@ import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from '@langchain/core/prompts';
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import { CombiningOutputParser } from 'langchain/output_parsers';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import type { AiRootNodeExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers';
import { isChatInstance } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { throwIfToolSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { checkForStructuredTools, extractParsedOutput } from '../utils';
export async function conversationalAgentExecute(
this: IExecuteFunctions,
this: AiRootNodeExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Conversational Agent');
@ -28,10 +25,10 @@ export async function conversationalAgentExecute(
| BaseChatMemory
| undefined;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true);
const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true);
const outputParsers = await getOptionalOutputParsers(this);
await checkForStructuredTools(tools, this.getNode(), 'Conversational Agent');
this.checkForStructuredTools(tools, this.getNode(), 'Conversational Agent');
// TODO: Make it possible in the future to use values for other items than just 0
const options = this.getNodeParameter('options', 0, {}) as {
@ -86,12 +83,7 @@ export async function conversationalAgentExecute(
if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string;
} else {
input = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
input = this.getPromptInputByType(itemIndex);
}
if (input === undefined) {
@ -103,11 +95,11 @@ export async function conversationalAgentExecute(
}
const response = await agentExecutor
.withConfig(getTracingConfig(this))
.withConfig(this.getTracingConfig())
.invoke({ input, outputParsers });
if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string);
response.output = await this.extractParsedOutput(outputParser, response.output as string);
}
returnData.push({ json: response });

View file

@ -6,20 +6,16 @@ import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
import { CombiningOutputParser } from 'langchain/output_parsers';
import {
type IExecuteFunctions,
type AiRootNodeExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
NodeOperationError,
} from 'n8n-workflow';
import { getConnectedTools, getPromptInputByType } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { getTracingConfig } from '@utils/tracing';
import { extractParsedOutput } from '../utils';
export async function openAiFunctionsAgentExecute(
this: IExecuteFunctions,
this: AiRootNodeExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing OpenAi Functions Agent');
@ -37,7 +33,7 @@ export async function openAiFunctionsAgentExecute(
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory
| undefined;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, false);
const tools = await this.getConnectedTools(nodeVersion >= 1.5, false);
const outputParsers = await getOptionalOutputParsers(this);
const options = this.getNodeParameter('options', 0, {}) as {
systemMessage?: string;
@ -89,12 +85,7 @@ export async function openAiFunctionsAgentExecute(
if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string;
} else {
input = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
input = this.getPromptInputByType(itemIndex);
}
if (input === undefined) {
@ -106,11 +97,11 @@ export async function openAiFunctionsAgentExecute(
}
const response = await agentExecutor
.withConfig(getTracingConfig(this))
.withConfig(this.getTracingConfig())
.invoke({ input, outputParsers });
if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string);
response.output = await this.extractParsedOutput(outputParser, response.output as string);
}
returnData.push({ json: response });

View file

@ -4,21 +4,17 @@ import { PromptTemplate } from '@langchain/core/prompts';
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
import { CombiningOutputParser } from 'langchain/output_parsers';
import {
type IExecuteFunctions,
type AiRootNodeExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
NodeOperationError,
} from 'n8n-workflow';
import { getConnectedTools, getPromptInputByType } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { throwIfToolSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { checkForStructuredTools, extractParsedOutput } from '../utils';
export async function planAndExecuteAgentExecute(
this: IExecuteFunctions,
this: AiRootNodeExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing PlanAndExecute Agent');
@ -27,9 +23,9 @@ export async function planAndExecuteAgentExecute(
0,
)) as BaseChatModel;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true);
const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true);
await checkForStructuredTools(tools, this.getNode(), 'Plan & Execute Agent');
this.checkForStructuredTools(tools, this.getNode(), 'Plan & Execute Agent');
const outputParsers = await getOptionalOutputParsers(this);
const options = this.getNodeParameter('options', 0, {}) as {
@ -66,12 +62,7 @@ export async function planAndExecuteAgentExecute(
if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string;
} else {
input = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
input = this.getPromptInputByType(itemIndex);
}
if (input === undefined) {
@ -83,11 +74,11 @@ export async function planAndExecuteAgentExecute(
}
const response = await agentExecutor
.withConfig(getTracingConfig(this))
.withConfig(this.getTracingConfig())
.invoke({ input, outputParsers });
if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string);
response.output = await this.extractParsedOutput(outputParser, response.output as string);
}
returnData.push({ json: response });

View file

@ -5,21 +5,18 @@ import { PromptTemplate } from '@langchain/core/prompts';
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
import { CombiningOutputParser } from 'langchain/output_parsers';
import {
type IExecuteFunctions,
type AiRootNodeExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
NodeOperationError,
} from 'n8n-workflow';
import { getConnectedTools, getPromptInputByType, isChatInstance } from '@utils/helpers';
import { isChatInstance } from '@utils/helpers';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { throwIfToolSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { checkForStructuredTools, extractParsedOutput } from '../utils';
export async function reActAgentAgentExecute(
this: IExecuteFunctions,
this: AiRootNodeExecuteFunctions,
nodeVersion: number,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing ReAct Agent');
@ -28,9 +25,9 @@ export async function reActAgentAgentExecute(
| BaseLanguageModel
| BaseChatModel;
const tools = await getConnectedTools(this, nodeVersion >= 1.5, true, true);
const tools = await this.getConnectedTools(nodeVersion >= 1.5, true, true);
await checkForStructuredTools(tools, this.getNode(), 'ReAct Agent');
this.checkForStructuredTools(tools, this.getNode(), 'ReAct Agent');
const outputParsers = await getOptionalOutputParsers(this);
@ -87,12 +84,7 @@ export async function reActAgentAgentExecute(
if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string;
} else {
input = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
input = this.getPromptInputByType(itemIndex);
}
if (input === undefined) {
@ -104,11 +96,11 @@ export async function reActAgentAgentExecute(
}
const response = await agentExecutor
.withConfig(getTracingConfig(this))
.withConfig(this.getTracingConfig())
.invoke({ input, outputParsers });
if (outputParser) {
response.output = await extractParsedOutput(this, outputParser, response.output as string);
response.output = await this.extractParsedOutput(outputParser, response.output as string);
}
returnData.push({ json: response });

View file

@ -5,15 +5,14 @@ import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql';
import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql';
import { SqlDatabase } from 'langchain/sql_db';
import {
type IExecuteFunctions,
type AiRootNodeExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
NodeOperationError,
type IDataObject,
} from 'n8n-workflow';
import { getPromptInputByType, serializeChatHistory } from '@utils/helpers';
import { getTracingConfig } from '@utils/tracing';
import { serializeChatHistory } from '@utils/helpers';
import { getMysqlDataSource } from './other/handlers/mysql';
import { getPostgresDataSource } from './other/handlers/postgres';
@ -27,7 +26,7 @@ const parseTablesString = (tablesString: string) =>
.filter((table) => table.length > 0);
export async function sqlAgentAgentExecute(
this: IExecuteFunctions,
this: AiRootNodeExecuteFunctions,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing SQL Agent');
@ -39,27 +38,22 @@ export async function sqlAgentAgentExecute(
const returnData: INodeExecutionData[] = [];
for (let i = 0; i < items.length; i++) {
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try {
const item = items[i];
const item = items[itemIndex];
let input;
if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('input', i) as string;
input = this.getNodeParameter('input', itemIndex) as string;
} else {
input = getPromptInputByType({
ctx: this,
i,
inputKey: 'text',
promptTypeKey: 'promptType',
});
input = this.getPromptInputByType(itemIndex, 'text', 'promptType');
}
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The prompt parameter is empty.');
}
const options = this.getNodeParameter('options', i, {});
const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as
const options = this.getNodeParameter('options', itemIndex, {});
const selectedDataSource = this.getNodeParameter('dataSource', itemIndex, 'sqlite') as
| 'mysql'
| 'postgres'
| 'sqlite';
@ -77,7 +71,7 @@ export async function sqlAgentAgentExecute(
);
}
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data');
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', itemIndex, 'data');
dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName);
}
@ -127,7 +121,7 @@ export async function sqlAgentAgentExecute(
let response: IDataObject;
try {
response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
response = await agentExecutor.withConfig(this.getTracingConfig()).invoke({
input,
signal: this.getExecutionCancelSignal(),
chatHistory,
@ -136,14 +130,14 @@ export async function sqlAgentAgentExecute(
if ((error.message as IDataObject)?.output) {
response = error.message as IDataObject;
} else {
throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i });
throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex });
}
}
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: i } });
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}

View file

@ -10,11 +10,10 @@ import type { AgentAction, AgentFinish } from 'langchain/agents';
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents';
import { omit } from 'lodash';
import { BINARY_ENCODING, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import type { ZodObject } from 'zod';
import type { AiRootNodeExecuteFunctions, INodeExecutionData, ZodObjectAny } from 'n8n-workflow';
import { z } from 'zod';
import { isChatInstance, getPromptInputByType, getConnectedTools } from '@utils/helpers';
import { isChatInstance } from '@utils/helpers';
import {
getOptionalOutputParsers,
type N8nOutputParser,
@ -22,14 +21,13 @@ import {
import { SYSTEM_MESSAGE } from './prompt';
function getOutputParserSchema(outputParser: N8nOutputParser): ZodObject<any, any, any, any> {
const schema =
(outputParser.getSchema() as ZodObject<any, any, any, any>) ?? z.object({ text: z.string() });
function getOutputParserSchema(outputParser: N8nOutputParser): ZodObjectAny {
const schema = (outputParser.getSchema() as ZodObjectAny) ?? z.object({ text: z.string() });
return schema;
}
async function extractBinaryMessages(ctx: IExecuteFunctions) {
async function extractBinaryMessages(ctx: AiRootNodeExecuteFunctions) {
const binaryData = ctx.getInputData()?.[0]?.binary ?? {};
const binaryMessages = await Promise.all(
Object.values(binaryData)
@ -96,7 +94,9 @@ function fixEmptyContentMessage(steps: AgentFinish | AgentAction[]) {
return steps;
}
export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
export async function toolsAgentExecute(
this: AiRootNodeExecuteFunctions,
): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Tools Agent');
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
@ -111,7 +111,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
| BaseChatMemory
| undefined;
const tools = (await getConnectedTools(this, true, false)) as Array<DynamicStructuredTool | Tool>;
const tools = (await this.getConnectedTools(true, false)) as Array<DynamicStructuredTool | Tool>;
const outputParser = (await getOptionalOutputParsers(this))?.[0];
let structuredOutputParserTool: DynamicStructuredTool | undefined;
/**
@ -289,13 +289,7 @@ export async function toolsAgentExecute(this: IExecuteFunctions): Promise<INodeE
const items = this.getInputData();
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try {
const input = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
const input = this.getPromptInputByType(itemIndex);
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}

View file

@ -1,44 +0,0 @@
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from 'langchain/tools';
import { NodeOperationError, type IExecuteFunctions, type INode } from 'n8n-workflow';
import type { z } from 'zod';
type ZodObjectAny = z.ZodObject<any, any, any, any>;
export async function extractParsedOutput(
ctx: IExecuteFunctions,
outputParser: BaseOutputParser<unknown>,
output: string,
): Promise<Record<string, unknown> | undefined> {
const parsedOutput = (await outputParser.parse(output)) as {
output: Record<string, unknown>;
};
if (ctx.getNode().typeVersion <= 1.6) {
return parsedOutput;
}
// For 1.7 and above, we try to extract the output from the parsed output
// with fallback to the original output if it's not present
return parsedOutput?.output ?? parsedOutput;
}
export async function checkForStructuredTools(
tools: Array<Tool | DynamicStructuredTool<ZodObjectAny>>,
node: INode,
currentAgentType: string,
) {
const dynamicStructuredTools = tools.filter(
(tool) => tool.constructor.name === 'DynamicStructuredTool',
);
if (dynamicStructuredTools.length > 0) {
const getToolName = (tool: Tool | DynamicStructuredTool) => `"${tool.name}"`;
throw new NodeOperationError(
node,
`The selected tools are not supported by "${currentAgentType}", please use "Tools Agent" instead`,
{
itemIndex: 0,
description: `Incompatible connected tools: ${dynamicStructuredTools.map(getToolName).join(', ')}`,
},
);
}
}

View file

@ -1,21 +1,17 @@
import { AgentExecutor } from 'langchain/agents';
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { AiRootNode, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodeType,
INodeTypeDescription,
} from 'n8n-workflow';
import { OpenAI as OpenAIClient } from 'openai';
import { getConnectedTools } from '@utils/helpers';
import { getTracingConfig } from '@utils/tracing';
import { formatToOpenAIAssistantTool } from './utils';
export class OpenAiAssistant implements INodeType {
export class OpenAiAssistant extends AiRootNode {
description: INodeTypeDescription = {
displayName: 'OpenAI Assistant',
name: 'openAiAssistant',
@ -313,30 +309,30 @@ export class OpenAiAssistant implements INodeType {
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const nodeVersion = this.getNode().typeVersion;
const tools = await getConnectedTools(this, nodeVersion > 1, false);
const credentials = await this.getCredentials('openAiApi');
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const nodeVersion = context.getNode().typeVersion;
const tools = await context.getConnectedTools(nodeVersion > 1, false);
const credentials = await context.getCredentials('openAiApi');
const items = this.getInputData();
const items = context.getInputData();
const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try {
const input = this.getNodeParameter('text', itemIndex) as string;
const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string;
const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array<
const input = context.getNodeParameter('text', itemIndex) as string;
const assistantId = context.getNodeParameter('assistantId', itemIndex, '') as string;
const nativeTools = context.getNodeParameter('nativeTools', itemIndex, []) as Array<
'code_interpreter' | 'retrieval'
>;
const options = this.getNodeParameter('options', itemIndex, {}) as {
const options = context.getNodeParameter('options', itemIndex, {}) as {
baseURL?: string;
maxRetries: number;
timeout: number;
};
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
throw new NodeOperationError(context.getNode(), 'The text parameter is empty.');
}
const client = new OpenAIClient({
@ -358,9 +354,13 @@ export class OpenAiAssistant implements INodeType {
tools: newTools,
});
} else {
const name = this.getNodeParameter('name', itemIndex, '') as string;
const instructions = this.getNodeParameter('instructions', itemIndex, '') as string;
const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string;
const name = context.getNodeParameter('name', itemIndex, '') as string;
const instructions = context.getNodeParameter('instructions', itemIndex, '') as string;
const model = context.getNodeParameter(
'model',
itemIndex,
'gpt-3.5-turbo-1106',
) as string;
agent = await OpenAIAssistantRunnable.createAssistant({
model,
@ -377,15 +377,15 @@ export class OpenAiAssistant implements INodeType {
tools,
});
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
const response = await agentExecutor.withConfig(context.getTracingConfig()).invoke({
content: input,
signal: this.getExecutionCancelSignal(),
signal: context.getExecutionCancelSignal(),
timeout: options.timeout ?? 10000,
});
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}

View file

@ -15,12 +15,12 @@ import { CombiningOutputParser } from 'langchain/output_parsers';
import type {
IBinaryData,
IDataObject,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodeType,
INodeTypeDescription,
} from 'n8n-workflow';
import {
AiRootNode,
ApplicationError,
NodeApiError,
NodeConnectionType,
@ -28,11 +28,10 @@ import {
} from 'n8n-workflow';
import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions';
import { getPromptInputByType, isChatInstance } from '@utils/helpers';
import { isChatInstance } from '@utils/helpers';
import type { N8nOutputParser } from '@utils/output_parsers/N8nOutputParser';
import { getOptionalOutputParsers } from '@utils/output_parsers/N8nOutputParser';
import { getTemplateNoticeField } from '@utils/sharedFields';
import { getTracingConfig } from '@utils/tracing';
import {
getCustomErrorMessage as getCustomOpenAiErrorMessage,
@ -49,7 +48,7 @@ interface MessagesTemplate {
}
async function getImageMessage(
context: IExecuteFunctions,
context: AiRootNodeExecuteFunctions,
itemIndex: number,
message: MessagesTemplate,
) {
@ -106,7 +105,7 @@ async function getImageMessage(
}
async function getChainPromptTemplate(
context: IExecuteFunctions,
context: AiRootNodeExecuteFunctions,
itemIndex: number,
llm: BaseLanguageModel | BaseChatModel,
messages?: MessagesTemplate[],
@ -165,7 +164,7 @@ async function getChainPromptTemplate(
}
async function createSimpleLLMChain(
context: IExecuteFunctions,
context: AiRootNodeExecuteFunctions,
llm: BaseLanguageModel,
query: string,
prompt: ChatPromptTemplate | PromptTemplate,
@ -173,7 +172,7 @@ async function createSimpleLLMChain(
const chain = new LLMChain({
llm,
prompt,
}).withConfig(getTracingConfig(context));
}).withConfig(context.getTracingConfig());
const response = (await chain.invoke({
query,
@ -184,7 +183,7 @@ async function createSimpleLLMChain(
}
async function getChain(
context: IExecuteFunctions,
context: AiRootNodeExecuteFunctions,
itemIndex: number,
query: string,
llm: BaseLanguageModel,
@ -222,7 +221,7 @@ async function getChain(
);
const chain = prompt.pipe(llm).pipe(combinedOutputParser);
const response = (await chain.withConfig(getTracingConfig(context)).invoke({ query })) as
const response = (await chain.withConfig(context.getTracingConfig()).invoke({ query })) as
| string
| string[];
@ -249,7 +248,7 @@ function getInputs(parameters: IDataObject) {
return inputs;
}
export class ChainLlm implements INodeType {
export class ChainLlm extends AiRootNode {
description: INodeTypeDescription = {
displayName: 'Basic LLM Chain',
name: 'chainLlm',
@ -510,42 +509,37 @@ export class ChainLlm implements INodeType {
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing LLM Chain');
const items = this.getInputData();
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
context.logger.debug('Executing LLM Chain');
const items = context.getInputData();
const returnData: INodeExecutionData[] = [];
const llm = (await this.getInputConnectionData(
const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseLanguageModel;
const outputParsers = await getOptionalOutputParsers(this);
const outputParsers = await getOptionalOutputParsers(context);
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try {
let prompt: string;
if (this.getNode().typeVersion <= 1.3) {
prompt = this.getNodeParameter('prompt', itemIndex) as string;
if (context.getNode().typeVersion <= 1.3) {
prompt = context.getNodeParameter('prompt', itemIndex) as string;
} else {
prompt = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
prompt = context.getPromptInputByType(itemIndex);
}
const messages = this.getNodeParameter(
const messages = context.getNodeParameter(
'messages.messageValues',
itemIndex,
[],
) as MessagesTemplate[];
if (prompt === undefined) {
throw new NodeOperationError(this.getNode(), "The 'prompt' parameter is empty.");
throw new NodeOperationError(context.getNode(), "The 'prompt' parameter is empty.");
}
const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages);
const responses = await getChain(context, itemIndex, prompt, llm, outputParsers, messages);
responses.forEach((response) => {
let data: IDataObject;
@ -586,7 +580,7 @@ export class ChainLlm implements INodeType {
}
}
if (this.continueOnFail()) {
if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}

View file

@ -9,24 +9,23 @@ import type { BaseRetriever } from '@langchain/core/retrievers';
import { RetrievalQAChain } from 'langchain/chains';
import {
NodeConnectionType,
type IExecuteFunctions,
type AiRootNodeExecuteFunctions,
type INodeExecutionData,
type INodeType,
type INodeTypeDescription,
NodeOperationError,
AiRootNode,
} from 'n8n-workflow';
import { promptTypeOptions, textFromPreviousNode } from '@utils/descriptions';
import { getPromptInputByType, isChatInstance } from '@utils/helpers';
import { isChatInstance } from '@utils/helpers';
import { getTemplateNoticeField } from '@utils/sharedFields';
import { getTracingConfig } from '@utils/tracing';
const SYSTEM_PROMPT_TEMPLATE = `Use the following pieces of context to answer the users question.
If you don't know the answer, just say that you don't know, don't try to make up an answer.
----------------
{context}`;
export class ChainRetrievalQa implements INodeType {
export class ChainRetrievalQa extends AiRootNode {
description: INodeTypeDescription = {
displayName: 'Question and Answer Chain',
name: 'chainRetrievalQa',
@ -158,20 +157,20 @@ export class ChainRetrievalQa implements INodeType {
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Retrieval QA Chain');
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
context.logger.debug('Executing Retrieval QA Chain');
const model = (await this.getInputConnectionData(
const model = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseLanguageModel;
const retriever = (await this.getInputConnectionData(
const retriever = (await context.getInputConnectionData(
NodeConnectionType.AiRetriever,
0,
)) as BaseRetriever;
const items = this.getInputData();
const items = context.getInputData();
const returnData: INodeExecutionData[] = [];
@ -180,22 +179,17 @@ export class ChainRetrievalQa implements INodeType {
try {
let query;
if (this.getNode().typeVersion <= 1.2) {
query = this.getNodeParameter('query', itemIndex) as string;
if (context.getNode().typeVersion <= 1.2) {
query = context.getNodeParameter('query', itemIndex) as string;
} else {
query = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
query = context.getPromptInputByType(itemIndex);
}
if (query === undefined) {
throw new NodeOperationError(this.getNode(), 'The query parameter is empty.');
throw new NodeOperationError(context.getNode(), 'The query parameter is empty.');
}
const options = this.getNodeParameter('options', itemIndex, {}) as {
const options = context.getNodeParameter('options', itemIndex, {}) as {
systemPromptTemplate?: string;
};
@ -224,10 +218,10 @@ export class ChainRetrievalQa implements INodeType {
const chain = RetrievalQAChain.fromLLM(model, retriever, chainParameters);
const response = await chain.withConfig(getTracingConfig(this)).invoke({ query });
const response = await chain.withConfig(context.getTracingConfig()).invoke({ query });
returnData.push({ json: { response } });
} catch (error) {
if (this.continueOnFail()) {
if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}

View file

@ -1,4 +1,4 @@
import type { INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow';
import type { INodeType, INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow';
import { VersionedNodeType } from 'n8n-workflow';
import { ChainSummarizationV1 } from './V1/ChainSummarizationV1.node';
@ -30,8 +30,8 @@ export class ChainSummarization extends VersionedNodeType {
};
const nodeVersions: IVersionedNodeType['nodeVersions'] = {
1: new ChainSummarizationV1(baseDescription),
2: new ChainSummarizationV2(baseDescription),
1: new ChainSummarizationV1(baseDescription) as INodeType,
2: new ChainSummarizationV2(baseDescription) as INodeType,
};
super(nodeVersions, baseDescription);

View file

@ -8,8 +8,8 @@ import {
type INodeTypeBaseDescription,
type IExecuteFunctions,
type INodeExecutionData,
type INodeType,
type INodeTypeDescription,
AiRootNode,
} from 'n8n-workflow';
import { N8nBinaryLoader } from '@utils/N8nBinaryLoader';
@ -18,10 +18,11 @@ import { getTemplateNoticeField } from '@utils/sharedFields';
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
export class ChainSummarizationV1 implements INodeType {
export class ChainSummarizationV1 extends AiRootNode {
description: INodeTypeDescription;
constructor(baseDescription: INodeTypeBaseDescription) {
super();
this.description = {
...baseDescription,
version: 1,
@ -162,20 +163,21 @@ export class ChainSummarizationV1 implements INodeType {
};
}
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Vector Store QA Chain');
const type = this.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine';
async execute(context: IExecuteFunctions): Promise<INodeExecutionData[][]> {
context.logger.debug('Executing Vector Store QA Chain');
const type = context.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine';
const model = (await this.getInputConnectionData(
const model = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseLanguageModel;
const documentInput = (await this.getInputConnectionData(NodeConnectionType.AiDocument, 0)) as
| N8nJsonLoader
| Array<Document<Record<string, unknown>>>;
const documentInput = (await context.getInputConnectionData(
NodeConnectionType.AiDocument,
0,
)) as N8nJsonLoader | Array<Document<Record<string, unknown>>>;
const options = this.getNodeParameter('options', 0, {}) as {
const options = context.getNodeParameter('options', 0, {}) as {
prompt?: string;
refineQuestionPrompt?: string;
refinePrompt?: string;
@ -241,7 +243,7 @@ export class ChainSummarizationV1 implements INodeType {
const chain = loadSummarizationChain(model, chainArgs);
const items = this.getInputData();
const items = context.getInputData();
const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {

View file

@ -5,18 +5,16 @@ import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
import { loadSummarizationChain } from 'langchain/chains';
import type {
INodeTypeBaseDescription,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodeType,
INodeTypeDescription,
IDataObject,
} from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import { AiRootNode, NodeConnectionType } from 'n8n-workflow';
import { N8nBinaryLoader } from '@utils/N8nBinaryLoader';
import { N8nJsonLoader } from '@utils/N8nJsonLoader';
import { getTemplateNoticeField } from '@utils/sharedFields';
import { getTracingConfig } from '@utils/tracing';
import { getChainPromptsArgs } from '../helpers';
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
@ -56,10 +54,11 @@ function getInputs(parameters: IDataObject) {
return inputs;
}
export class ChainSummarizationV2 implements INodeType {
export class ChainSummarizationV2 extends AiRootNode {
description: INodeTypeDescription;
constructor(baseDescription: INodeTypeBaseDescription) {
super();
this.description = {
...baseDescription,
version: [2],
@ -311,27 +310,27 @@ export class ChainSummarizationV2 implements INodeType {
};
}
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
this.logger.debug('Executing Summarization Chain V2');
const operationMode = this.getNodeParameter('operationMode', 0, 'nodeInputJson') as
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
context.logger.debug('Executing Summarization Chain V2');
const operationMode = context.getNodeParameter('operationMode', 0, 'nodeInputJson') as
| 'nodeInputJson'
| 'nodeInputBinary'
| 'documentLoader';
const chunkingMode = this.getNodeParameter('chunkingMode', 0, 'simple') as
const chunkingMode = context.getNodeParameter('chunkingMode', 0, 'simple') as
| 'simple'
| 'advanced';
const model = (await this.getInputConnectionData(
const model = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseLanguageModel;
const items = this.getInputData();
const items = context.getInputData();
const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
try {
const summarizationMethodAndPrompts = this.getNodeParameter(
const summarizationMethodAndPrompts = context.getNodeParameter(
'options.summarizationMethodAndPrompts.values',
itemIndex,
{},
@ -355,7 +354,7 @@ export class ChainSummarizationV2 implements INodeType {
// Use dedicated document loader input to load documents
if (operationMode === 'documentLoader') {
const documentInput = (await this.getInputConnectionData(
const documentInput = (await context.getInputConnectionData(
NodeConnectionType.AiDocument,
0,
)) as N8nJsonLoader | Array<Document<Record<string, unknown>>>;
@ -367,7 +366,7 @@ export class ChainSummarizationV2 implements INodeType {
? await documentInput.processItem(item, itemIndex)
: documentInput;
const response = await chain.withConfig(getTracingConfig(this)).invoke({
const response = await chain.withConfig(context.getTracingConfig()).invoke({
input_documents: processedDocuments,
});
@ -381,15 +380,19 @@ export class ChainSummarizationV2 implements INodeType {
switch (chunkingMode) {
// In simple mode we use recursive character splitter with default settings
case 'simple':
const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number;
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number;
const chunkSize = context.getNodeParameter('chunkSize', itemIndex, 1000) as number;
const chunkOverlap = context.getNodeParameter(
'chunkOverlap',
itemIndex,
200,
) as number;
textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize });
break;
// In advanced mode user can connect text splitter node so we just retrieve it
case 'advanced':
textSplitter = (await this.getInputConnectionData(
textSplitter = (await context.getInputConnectionData(
NodeConnectionType.AiTextSplitter,
0,
)) as TextSplitter | undefined;
@ -400,14 +403,14 @@ export class ChainSummarizationV2 implements INodeType {
let processor: N8nJsonLoader | N8nBinaryLoader;
if (operationMode === 'nodeInputBinary') {
const binaryDataKey = this.getNodeParameter(
const binaryDataKey = context.getNodeParameter(
'options.binaryDataKey',
itemIndex,
'data',
) as string;
processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter);
processor = new N8nBinaryLoader(context, 'options.', binaryDataKey, textSplitter);
} else {
processor = new N8nJsonLoader(this, 'options.', textSplitter);
processor = new N8nJsonLoader(context, 'options.', textSplitter);
}
const processedItem = await processor.processItem(item, itemIndex);
@ -417,7 +420,7 @@ export class ChainSummarizationV2 implements INodeType {
returnData.push({ json: { response } });
}
} catch (error) {
if (this.continueOnFail()) {
if (context.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}

View file

@ -3,11 +3,10 @@ import { HumanMessage } from '@langchain/core/messages';
import { ChatPromptTemplate, SystemMessagePromptTemplate } from '@langchain/core/prompts';
import type { JSONSchema7 } from 'json-schema';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { AiRootNode, jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
INodeType,
INodeTypeDescription,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodePropertyOptions,
} from 'n8n-workflow';
@ -15,7 +14,6 @@ import type { z } from 'zod';
import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@utils/descriptions';
import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing';
import { getTracingConfig } from '@utils/tracing';
import { makeZodSchemaFromAttributes } from './helpers';
import type { AttributeDefinition } from './types';
@ -24,7 +22,7 @@ const SYSTEM_PROMPT_TEMPLATE = `You are an expert extraction algorithm.
Only extract relevant information from the text.
If you do not know the value of an attribute asked to extract, you may omit the attribute's value.`;
export class InformationExtractor implements INodeType {
export class InformationExtractor extends AiRootNode {
description: INodeTypeDescription = {
displayName: 'Information Extractor',
name: 'informationExtractor',
@ -218,15 +216,15 @@ export class InformationExtractor implements INodeType {
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = context.getInputData();
const llm = (await this.getInputConnectionData(
const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseLanguageModel;
const schemaType = this.getNodeParameter('schemaType', 0, '') as
const schemaType = context.getNodeParameter('schemaType', 0, '') as
| 'fromAttributes'
| 'fromJson'
| 'manual';
@ -234,14 +232,14 @@ export class InformationExtractor implements INodeType {
let parser: OutputFixingParser<object>;
if (schemaType === 'fromAttributes') {
const attributes = this.getNodeParameter(
const attributes = context.getNodeParameter(
'attributes.attributes',
0,
[],
) as AttributeDefinition[];
if (attributes.length === 0) {
throw new NodeOperationError(this.getNode(), 'At least one attribute must be specified');
throw new NodeOperationError(context.getNode(), 'At least one attribute must be specified');
}
parser = OutputFixingParser.fromLLM(
@ -252,10 +250,10 @@ export class InformationExtractor implements INodeType {
let jsonSchema: JSONSchema7;
if (schemaType === 'fromJson') {
const jsonExample = this.getNodeParameter('jsonSchemaExample', 0, '') as string;
const jsonExample = context.getNodeParameter('jsonSchemaExample', 0, '') as string;
jsonSchema = generateSchema(jsonExample);
} else {
const inputSchema = this.getNodeParameter('inputSchema', 0, '') as string;
const inputSchema = context.getNodeParameter('inputSchema', 0, '') as string;
jsonSchema = jsonParse<JSONSchema7>(inputSchema);
}
@ -266,10 +264,10 @@ export class InformationExtractor implements INodeType {
const resultData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const input = this.getNodeParameter('text', itemIndex) as string;
const input = context.getNodeParameter('text', itemIndex) as string;
const inputPrompt = new HumanMessage(input);
const options = this.getNodeParameter('options', itemIndex, {}) as {
const options = context.getNodeParameter('options', itemIndex, {}) as {
systemPromptTemplate?: string;
};
@ -285,13 +283,13 @@ export class InformationExtractor implements INodeType {
inputPrompt,
];
const prompt = ChatPromptTemplate.fromMessages(messages);
const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this));
const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig());
try {
const output = await chain.invoke(messages);
resultData.push({ json: { output } });
} catch (error) {
if (this.continueOnFail()) {
if (context.continueOnFail()) {
resultData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}

View file

@ -5,16 +5,13 @@ import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_par
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
IDataObject,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodeParameters,
INodeType,
INodeTypeDescription,
} from 'n8n-workflow';
import { z } from 'zod';
import { getTracingConfig } from '@utils/tracing';
const DEFAULT_SYSTEM_PROMPT_TEMPLATE =
'You are highly intelligent and accurate sentiment analyzer. Analyze the sentiment of the provided text. Categorize it into one of the following: {categories}. Use the provided formatting instructions. Only output the JSON.';
@ -28,7 +25,7 @@ const configuredOutputs = (parameters: INodeParameters, defaultCategories: strin
return ret;
};
export class SentimentAnalysis implements INodeType {
export class SentimentAnalysis extends AiRootNode {
description: INodeTypeDescription = {
displayName: 'Sentiment Analysis',
name: 'sentimentAnalysis',
@ -136,10 +133,10 @@ export class SentimentAnalysis implements INodeType {
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = context.getInputData();
const llm = (await this.getInputConnectionData(
const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseLanguageModel;
@ -148,7 +145,7 @@ export class SentimentAnalysis implements INodeType {
for (let i = 0; i < items.length; i++) {
try {
const sentimentCategories = this.getNodeParameter(
const sentimentCategories = context.getNodeParameter(
'options.categories',
i,
DEFAULT_CATEGORIES,
@ -160,7 +157,7 @@ export class SentimentAnalysis implements INodeType {
.filter(Boolean);
if (categories.length === 0) {
throw new NodeOperationError(this.getNode(), 'No sentiment categories provided', {
throw new NodeOperationError(context.getNode(), 'No sentiment categories provided', {
itemIndex: i,
});
}
@ -170,7 +167,7 @@ export class SentimentAnalysis implements INodeType {
returnData.push(...Array.from({ length: categories.length }, () => []));
}
const options = this.getNodeParameter('options', i, {}) as {
const options = context.getNodeParameter('options', i, {}) as {
systemPromptTemplate?: string;
includeDetailedResults?: boolean;
enableAutoFixing?: boolean;
@ -197,7 +194,7 @@ export class SentimentAnalysis implements INodeType {
{format_instructions}`,
);
const input = this.getNodeParameter('inputText', i) as string;
const input = context.getNodeParameter('inputText', i) as string;
const inputPrompt = new HumanMessage(input);
const messages = [
await systemPromptTemplate.format({
@ -208,7 +205,7 @@ export class SentimentAnalysis implements INodeType {
];
const prompt = ChatPromptTemplate.fromMessages(messages);
const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this));
const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig());
try {
const output = await chain.invoke(messages);
@ -233,7 +230,7 @@ export class SentimentAnalysis implements INodeType {
}
} catch (error) {
throw new NodeOperationError(
this.getNode(),
context.getNode(),
'Error during parsing of LLM output, please check your LLM model and configuration',
{
itemIndex: i,
@ -241,9 +238,9 @@ export class SentimentAnalysis implements INodeType {
);
}
} catch (error) {
if (this.continueOnFail()) {
const executionErrorData = this.helpers.constructExecutionMetaData(
this.helpers.returnJsonArray({ error: error.message }),
if (context.continueOnFail()) {
const executionErrorData = context.helpers.constructExecutionMetaData(
context.helpers.returnJsonArray({ error: error.message }),
{ itemData: { item: i } },
);
returnData[0].push(...executionErrorData);

View file

@ -2,19 +2,16 @@ import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { HumanMessage } from '@langchain/core/messages';
import { SystemMessagePromptTemplate, ChatPromptTemplate } from '@langchain/core/prompts';
import { OutputFixingParser, StructuredOutputParser } from 'langchain/output_parsers';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType, AiRootNode } from 'n8n-workflow';
import type {
IDataObject,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodeParameters,
INodeType,
INodeTypeDescription,
} from 'n8n-workflow';
import { z } from 'zod';
import { getTracingConfig } from '@utils/tracing';
const SYSTEM_PROMPT_TEMPLATE =
"Please classify the text provided by the user into one of the following categories: {categories}, and use the provided formatting instructions below. Don't explain, and only output the json.";
@ -28,7 +25,7 @@ const configuredOutputs = (parameters: INodeParameters) => {
return ret;
};
export class TextClassifier implements INodeType {
export class TextClassifier extends AiRootNode {
description: INodeTypeDescription = {
displayName: 'Text Classifier',
name: 'textClassifier',
@ -163,24 +160,24 @@ export class TextClassifier implements INodeType {
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
async execute(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = context.getInputData();
const llm = (await this.getInputConnectionData(
const llm = (await context.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseLanguageModel;
const categories = this.getNodeParameter('categories.categories', 0, []) as Array<{
const categories = context.getNodeParameter('categories.categories', 0, []) as Array<{
category: string;
description: string;
}>;
if (categories.length === 0) {
throw new NodeOperationError(this.getNode(), 'At least one category must be defined');
throw new NodeOperationError(context.getNode(), 'At least one category must be defined');
}
const options = this.getNodeParameter('options', 0, {}) as {
const options = context.getNodeParameter('options', 0, {}) as {
multiClass: boolean;
fallback?: string;
systemPromptTemplate?: string;
@ -226,10 +223,10 @@ export class TextClassifier implements INodeType {
for (let itemIdx = 0; itemIdx < items.length; itemIdx++) {
const item = items[itemIdx];
item.pairedItem = { item: itemIdx };
const input = this.getNodeParameter('inputText', itemIdx) as string;
const input = context.getNodeParameter('inputText', itemIdx) as string;
const inputPrompt = new HumanMessage(input);
const systemPromptTemplateOpt = this.getNodeParameter(
const systemPromptTemplateOpt = context.getNodeParameter(
'options.systemPromptTemplate',
itemIdx,
SYSTEM_PROMPT_TEMPLATE,
@ -249,7 +246,7 @@ ${fallbackPrompt}`,
inputPrompt,
];
const prompt = ChatPromptTemplate.fromMessages(messages);
const chain = prompt.pipe(llm).pipe(parser).withConfig(getTracingConfig(this));
const chain = prompt.pipe(llm).pipe(parser).withConfig(context.getTracingConfig());
try {
const output = await chain.invoke(messages);
@ -259,7 +256,7 @@ ${fallbackPrompt}`,
});
if (fallback === 'other' && output.fallback) returnData[returnData.length - 1].push(item);
} catch (error) {
if (this.continueOnFail()) {
if (context.continueOnFail()) {
returnData[0].push({
json: { error: error.message },
pairedItem: { item: itemIdx },

View file

@ -13,8 +13,6 @@ import { pick } from 'lodash';
import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow';
import { logAiEvent } from '@utils/helpers';
type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {
completionTokens: number;
promptTokens: number;
@ -141,7 +139,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
[{ json: { ...response } }],
]);
logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
this.executionFunctions.logAiEvent('ai-llm-generated-output', {
messages: parsedMessages,
options: runDetails.options,
response,
@ -204,7 +202,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
);
}
logAiEvent(this.executionFunctions, 'ai-llm-errored', {
this.executionFunctions.logAiEvent('ai-llm-errored', {
error: Object.keys(error).length === 0 ? error.toString() : error,
runId,
parentRunId,

View file

@ -12,6 +12,7 @@ import type {
SupplyData,
ExecutionError,
IDataObject,
ZodObjectAny,
} from 'n8n-workflow';
import { jsonParse, NodeConnectionType, NodeOperationError } from 'n8n-workflow';
@ -19,8 +20,6 @@ import { inputSchemaField, jsonSchemaExampleField, schemaTypeField } from '@util
import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { DynamicZodObject } from '../../../types/zod.types';
export class ToolCode implements INodeType {
description: INodeTypeDescription = {
displayName: 'Code Tool',
@ -269,7 +268,7 @@ export class ToolCode implements INodeType {
? generateSchema(jsonExample)
: jsonParse<JSONSchema7>(inputSchema);
const zodSchema = convertJsonSchemaToZod<DynamicZodObject>(jsonSchema);
const zodSchema = convertJsonSchemaToZod<ZodObjectAny>(jsonSchema);
tool = new DynamicStructuredTool({
schema: zodSchema,

View file

@ -1,5 +1,6 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { DynamicTool } from '@langchain/core/tools';
import { N8nTool } from '@utils/N8nTool';
import type {
INodeType,
INodeTypeDescription,
@ -10,7 +11,6 @@ import type {
} from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, tryToParseAlphanumericString } from 'n8n-workflow';
import { N8nTool } from '@utils/N8nTool';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import {
@ -407,6 +407,7 @@ export class ToolHttpRequest implements INodeType {
if (this.getNode().typeVersion >= 1.1) {
const schema = makeToolInputSchema(toolParameters);
// TODO: add a new this.createN8NTool method
tool = new N8nTool(this, {
name,
description: toolDescription,

View file

@ -1,9 +1,8 @@
import type { N8nTool } from '@utils/N8nTool';
import { mock } from 'jest-mock-extended';
import type { IExecuteFunctions, INode } from 'n8n-workflow';
import { jsonParse } from 'n8n-workflow';
import type { N8nTool } from '@utils/N8nTool';
import { ToolHttpRequest } from '../ToolHttpRequest.node';
describe('ToolHttpRequest', () => {

View file

@ -14,6 +14,7 @@ import type {
ExecutionError,
NodeApiError,
ISupplyDataFunctions,
ZodObjectAny,
} from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
import { z } from 'zod';
@ -26,7 +27,6 @@ import type {
SendIn,
ToolParameter,
} from './interfaces';
import type { DynamicZodObject } from '../../../types/zod.types';
const genericCredentialRequest = async (ctx: ISupplyDataFunctions, itemIndex: number) => {
const genericType = ctx.getNodeParameter('genericAuthType', itemIndex) as string;
@ -814,7 +814,7 @@ function makeParameterZodSchema(parameter: ToolParameter) {
return schema;
}
export function makeToolInputSchema(parameters: ToolParameter[]): DynamicZodObject {
export function makeToolInputSchema(parameters: ToolParameter[]): ZodObjectAny {
const schemaEntries = parameters.map((parameter) => [
parameter.name,
makeParameterZodSchema(parameter),

View file

@ -18,6 +18,7 @@ import type {
IDataObject,
INodeParameterResourceLocator,
ITaskMetadata,
ZodObjectAny,
} from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, jsonParse } from 'n8n-workflow';
@ -25,8 +26,6 @@ import { jsonSchemaExampleField, schemaTypeField, inputSchemaField } from '@util
import { convertJsonSchemaToZod, generateSchema } from '@utils/schemaParsing';
import { getConnectionHintNoticeField } from '@utils/sharedFields';
import type { DynamicZodObject } from '../../../types/zod.types';
export class ToolWorkflow implements INodeType {
description: INodeTypeDescription = {
displayName: 'Call n8n Workflow Tool',
@ -543,7 +542,7 @@ export class ToolWorkflow implements INodeType {
? generateSchema(jsonExample)
: jsonParse<JSONSchema7>(inputSchema);
const zodSchema = convertJsonSchemaToZod<DynamicZodObject>(jsonSchema);
const zodSchema = convertJsonSchemaToZod<ZodObjectAny>(jsonSchema);
tool = new DynamicStructuredTool({
schema: zodSchema,

View file

@ -19,7 +19,7 @@ import type {
INodePropertyOptions,
} from 'n8n-workflow';
import { getMetadataFiltersValues, logAiEvent } from '@utils/helpers';
import { getMetadataFiltersValues } from '@utils/helpers';
import { logWrapper } from '@utils/logWrapper';
import type { N8nBinaryLoader } from '@utils/N8nBinaryLoader';
import { N8nJsonLoader } from '@utils/N8nJsonLoader';
@ -283,7 +283,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
});
resultData.push(...serializedDocs);
logAiEvent(this, 'ai-vector-store-searched', { query: prompt });
this.logAiEvent('ai-vector-store-searched', { query: prompt });
}
return [resultData];
@ -313,7 +313,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
try {
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);
logAiEvent(this, 'ai-vector-store-populated');
this.logAiEvent('ai-vector-store-populated');
} catch (error) {
throw error;
}
@ -367,7 +367,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
ids: [documentId],
});
logAiEvent(this, 'ai-vector-store-updated');
this.logAiEvent('ai-vector-store-updated');
} catch (error) {
throw error;
}

View file

@ -1,10 +1,8 @@
import type { IExecuteFunctions, INodeType } from 'n8n-workflow';
import { router } from './actions/router';
import { versionDescription } from './actions/versionDescription';
import { listSearch, loadOptions } from './methods';
export class OpenAi implements INodeType {
export class OpenAi extends AiRootNode {
description = versionDescription;
methods = {
@ -12,7 +10,7 @@ export class OpenAi implements INodeType {
loadOptions,
};
async execute(this: IExecuteFunctions) {
return await router.call(this);
async execute(context: AiRootNodeExecuteFunctions) {
return await router.call(context);
}
}

View file

@ -6,7 +6,7 @@ import type { BufferWindowMemory } from 'langchain/memory';
import omit from 'lodash/omit';
import type {
IDataObject,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
INodeProperties,
} from 'n8n-workflow';
@ -19,8 +19,6 @@ import {
import { OpenAI as OpenAIClient } from 'openai';
import { promptTypeOptions } from '@utils/descriptions';
import { getConnectedTools } from '@utils/helpers';
import { getTracingConfig } from '@utils/tracing';
import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions';
@ -153,7 +151,10 @@ const mapChatMessageToThreadMessage = (
content: message.content.toString(),
});
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
export async function execute(
this: AiRootNodeExecuteFunctions,
i: number,
): Promise<INodeExecutionData[]> {
const credentials = await this.getCredentials('openAiApi');
const nodeVersion = this.getNode().typeVersion;
@ -191,7 +192,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
const agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
const tools = await getConnectedTools(this, nodeVersion > 1, false);
const tools = await this.getConnectedTools(nodeVersion > 1, false);
let assistantTools;
if (tools.length) {
@ -270,7 +271,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
let filteredResponse: IDataObject = {};
try {
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke(chainValues);
const response = await agentExecutor.withConfig(this.getTracingConfig()).invoke(chainValues);
if (memory) {
await memory.saveContext({ input }, { output: response.output });

View file

@ -1,6 +1,6 @@
import {
NodeOperationError,
type IExecuteFunctions,
type AiRootNodeExecuteFunctions,
type INodeExecutionData,
NodeApiError,
} from 'n8n-workflow';
@ -13,7 +13,7 @@ import type { OpenAiType } from './node.type';
import * as text from './text';
import { getCustomErrorMessage } from '../helpers/error-handling';
export async function router(this: IExecuteFunctions) {
export async function router(this: AiRootNodeExecuteFunctions) {
const returnData: INodeExecutionData[] = [];
const items = this.getInputData();

View file

@ -2,14 +2,12 @@ import type { Tool } from '@langchain/core/tools';
import _omit from 'lodash/omit';
import type {
INodeProperties,
IExecuteFunctions,
AiRootNodeExecuteFunctions,
INodeExecutionData,
IDataObject,
} from 'n8n-workflow';
import { jsonParse, updateDisplayOptions } from 'n8n-workflow';
import { getConnectedTools } from '@utils/helpers';
import { MODELS_NOT_SUPPORT_FUNCTION_CALLS } from '../../helpers/constants';
import type { ChatCompletion } from '../../helpers/interfaces';
import { formatToOpenAIAssistantTool } from '../../helpers/utils';
@ -199,7 +197,10 @@ const displayOptions = {
export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
export async function execute(
this: AiRootNodeExecuteFunctions,
i: number,
): Promise<INodeExecutionData[]> {
const nodeVersion = this.getNode().typeVersion;
const model = this.getNodeParameter('modelId', i, '', { extractValue: true });
let messages = this.getNodeParameter('messages.values', i, []) as IDataObject[];
@ -239,7 +240,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
if (hideTools !== 'hide') {
const enforceUniqueNames = nodeVersion > 1;
externalTools = await getConnectedTools(this, enforceUniqueNames, false);
externalTools = await this.getConnectedTools(enforceUniqueNames, false);
}
if (externalTools.length) {

View file

@ -168,7 +168,7 @@
"generate-schema": "2.6.0",
"html-to-text": "9.0.5",
"jsdom": "23.0.1",
"langchain": "0.3.6",
"langchain": "catalog:",
"lodash": "catalog:",
"mammoth": "1.7.2",
"mime-types": "2.1.35",

View file

@ -1,4 +0,0 @@
import type { z } from 'zod';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type DynamicZodObject = z.ZodObject<any, any, any, any>;

View file

@ -4,16 +4,8 @@ import type { BaseLLM } from '@langchain/core/language_models/llms';
import type { BaseMessage } from '@langchain/core/messages';
import type { Tool } from '@langchain/core/tools';
import type { BaseChatMemory } from 'langchain/memory';
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type {
AiEvent,
IDataObject,
IExecuteFunctions,
ISupplyDataFunctions,
IWebhookFunctions,
} from 'n8n-workflow';
import { N8nTool } from './N8nTool';
import { NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, ISupplyDataFunctions, IWebhookFunctions } from 'n8n-workflow';
function hasMethods<T>(obj: unknown, ...methodNames: Array<string | symbol>): obj is T {
return methodNames.every(
@ -72,32 +64,6 @@ export function isToolsInstance(model: unknown): model is Tool {
return namespace.includes('tools');
}
export function getPromptInputByType(options: {
ctx: IExecuteFunctions;
i: number;
promptTypeKey: string;
inputKey: string;
}) {
const { ctx, i, promptTypeKey, inputKey } = options;
const prompt = ctx.getNodeParameter(promptTypeKey, i) as string;
let input;
if (prompt === 'auto') {
input = ctx.evaluateExpression('{{ $json["chatInput"] }}', i) as string;
} else {
input = ctx.getNodeParameter(inputKey, i) as string;
}
if (input === undefined) {
throw new NodeOperationError(ctx.getNode(), 'No prompt specified', {
description:
"Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter",
});
}
return input;
}
export function getSessionId(
ctx: ISupplyDataFunctions | IWebhookFunctions,
itemIndex: number,
@ -139,18 +105,6 @@ export function getSessionId(
return sessionId;
}
export function logAiEvent(
executeFunctions: IExecuteFunctions | ISupplyDataFunctions,
event: AiEvent,
data?: IDataObject,
) {
try {
executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined);
} catch (error) {
executeFunctions.logger.debug(`Error logging AI event: ${event}`);
}
}
export function serializeChatHistory(chatHistory: BaseMessage[]): string {
return chatHistory
.map((chatMessage) => {
@ -164,60 +118,3 @@ export function serializeChatHistory(chatHistory: BaseMessage[]): string {
})
.join('\n');
}
export function escapeSingleCurlyBrackets(text?: string): string | undefined {
if (text === undefined) return undefined;
let result = text;
result = result
// First handle triple brackets to avoid interference with double brackets
.replace(/(?<!{){{{(?!{)/g, '{{{{')
.replace(/(?<!})}}}(?!})/g, '}}}}')
// Then handle single brackets, but only if they're not part of double brackets
// Convert single { to {{ if it's not already part of {{ or {{{
.replace(/(?<!{){(?!{)/g, '{{')
// Convert single } to }} if it's not already part of }} or }}}
.replace(/(?<!})}(?!})/g, '}}');
return result;
}
export const getConnectedTools = async (
ctx: IExecuteFunctions,
enforceUniqueNames: boolean,
convertStructuredTool: boolean = true,
escapeCurlyBrackets: boolean = false,
) => {
const connectedTools =
((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>();
const finalTools = [];
for (const tool of connectedTools) {
const { name } = tool;
if (seenNames.has(name)) {
throw new NodeOperationError(
ctx.getNode(),
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
);
}
seenNames.add(name);
if (escapeCurlyBrackets) {
tool.description = escapeSingleCurlyBrackets(tool.description) ?? tool.description;
}
if (convertStructuredTool && tool instanceof N8nTool) {
finalTools.push(tool.asDynamicTool());
} else {
finalTools.push(tool);
}
}
return finalTools;
};

View file

@ -18,7 +18,7 @@ import type {
} from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
import { isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory } from './helpers';
import { N8nBinaryLoader } from './N8nBinaryLoader';
import { N8nJsonLoader } from './N8nJsonLoader';
@ -182,7 +182,7 @@ export function logWrapper(
const payload = { action: 'getMessages', response };
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response });
executeFunctions.logAiEvent('ai-messages-retrieved-from-memory', { response });
return response;
};
} else if (prop === 'addMessage' && 'addMessage' in target) {
@ -199,7 +199,7 @@ export function logWrapper(
arguments: [message],
});
logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message });
executeFunctions.logAiEvent('ai-message-added-to-memory', { message });
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
};
}
@ -236,7 +236,7 @@ export function logWrapper(
};
}
logAiEvent(executeFunctions, 'ai-documents-retrieved', { query });
executeFunctions.logAiEvent('ai-documents-retrieved', { query });
executeFunctions.addOutputData(
connectionType,
index,
@ -266,7 +266,7 @@ export function logWrapper(
arguments: [documents],
})) as number[][];
logAiEvent(executeFunctions, 'ai-document-embedded');
executeFunctions.logAiEvent('ai-document-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -286,7 +286,7 @@ export function logWrapper(
method: target[prop],
arguments: [query],
})) as number[];
logAiEvent(executeFunctions, 'ai-query-embedded');
executeFunctions.logAiEvent('ai-query-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -331,7 +331,7 @@ export function logWrapper(
arguments: [item, itemIndex],
})) as number[];
logAiEvent(executeFunctions, 'ai-document-processed');
executeFunctions.logAiEvent('ai-document-processed');
executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }],
]);
@ -357,7 +357,7 @@ export function logWrapper(
arguments: [text],
})) as string[];
logAiEvent(executeFunctions, 'ai-text-split');
executeFunctions.logAiEvent('ai-text-split');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -381,7 +381,7 @@ export function logWrapper(
arguments: [query],
})) as string;
logAiEvent(executeFunctions, 'ai-tool-called', { query, response });
executeFunctions.logAiEvent('ai-tool-called', { query, response });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -411,7 +411,7 @@ export function logWrapper(
arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>;
logAiEvent(executeFunctions, 'ai-vector-store-searched', { query });
executeFunctions.logAiEvent('ai-vector-store-searched', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;

View file

@ -7,7 +7,6 @@ import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType } from 'n8n-workflow';
import type { N8nStructuredOutputParser } from './N8nStructuredOutputParser';
import { logAiEvent } from '../helpers';
export class N8nOutputFixingParser extends BaseOutputParser {
lc_namespace = ['langchain', 'output_parsers', 'fix'];
@ -40,7 +39,7 @@ export class N8nOutputFixingParser extends BaseOutputParser {
try {
// First attempt to parse the completion
const response = await this.outputParser.parse(completion, callbacks, (e) => e);
logAiEvent(this.context, 'ai-output-parsed', { text: completion, response });
this.context.logAiEvent('ai-output-parsed', { text: completion, response });
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
[{ json: { action: 'parse', response } }],

View file

@ -5,8 +5,6 @@ import type { ISupplyDataFunctions } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import { z } from 'zod';
import { logAiEvent } from '../helpers';
const STRUCTURED_OUTPUT_KEY = '__structured__output';
const STRUCTURED_OUTPUT_OBJECT_KEY = '__structured__output__object';
const STRUCTURED_OUTPUT_ARRAY_KEY = '__structured__output__array';
@ -41,7 +39,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
get(parsed, STRUCTURED_OUTPUT_KEY) ??
parsed) as Record<string, unknown>;
logAiEvent(this.context, 'ai-output-parsed', { text, response: result });
this.context.logAiEvent('ai-output-parsed', { text, response: result });
this.context.addOutputData(NodeConnectionType.AiOutputParser, index, [
[{ json: { action: 'parse', response: result } }],
@ -58,7 +56,7 @@ export class N8nStructuredOutputParser extends StructuredOutputParser<
},
);
logAiEvent(this.context, 'ai-output-parsed', {
this.context.logAiEvent('ai-output-parsed', {
text,
response: e.message ?? e,
});

View file

@ -1,26 +0,0 @@
import type { BaseCallbackConfig } from '@langchain/core/callbacks/manager';
import type { IExecuteFunctions } from 'n8n-workflow';
interface TracingConfig {
additionalMetadata?: Record<string, unknown>;
}
export function getTracingConfig(
context: IExecuteFunctions,
config: TracingConfig = {},
): BaseCallbackConfig {
const parentRunManager = context.getParentCallbackManager
? context.getParentCallbackManager()
: undefined;
return {
runName: `[${context.getWorkflow().name}] ${context.getNode().name}`,
metadata: {
execution_id: context.getExecutionId(),
workflow: context.getWorkflow(),
node: context.getNode().name,
...(config.additionalMetadata ?? {}),
},
callbacks: parentRunManager,
};
}

View file

@ -48,6 +48,7 @@
"file-type": "16.5.4",
"form-data": "catalog:",
"iconv-lite": "catalog:",
"langchain": "catalog:",
"lodash": "catalog:",
"luxon": "catalog:",
"mime-types": "2.1.35",

View file

@ -51,6 +51,7 @@ import {
sleep,
ExecutionCancelledError,
Node,
AiRootNode,
} from 'n8n-workflow';
import PCancelable from 'p-cancelable';
import Container from 'typedi';
@ -1047,6 +1048,7 @@ export class WorkflowExecute {
if (nodeType.execute) {
const closeFunctions: CloseFunction[] = [];
const context = new ExecuteContext(
workflow,
node,
@ -1061,10 +1063,15 @@ export class WorkflowExecute {
abortSignal,
);
const data =
nodeType instanceof Node
? await nodeType.execute(context)
: await nodeType.execute.call(context);
let data: INodeExecutionData[][] | null;
if (nodeType instanceof AiRootNode) {
data = await nodeType.execute(context.getAiRootNodeExecuteFunctions());
} else {
data =
nodeType instanceof Node
? await nodeType.execute(context)
: await nodeType.execute.call(context);
}
const closeFunctionsResults = await Promise.allSettled(
closeFunctions.map(async (fn) => await fn()),

View file

@ -1,3 +1,4 @@
import type { CallbackManager } from '@langchain/core/callbacks/manager';
import { get } from 'lodash';
import type {
Workflow,
@ -9,7 +10,6 @@ import type {
ITaskDataConnections,
IExecuteData,
ICredentialDataDecryptedObject,
CallbackManager,
IExecuteWorkflowInfo,
RelatedExecution,
ExecuteWorkflowData,
@ -28,6 +28,7 @@ import {
NodeConnectionType,
WAIT_INDEFINITELY,
WorkflowDataProxy,
jsonStringify,
} from 'n8n-workflow';
import { Container } from 'typedi';
@ -236,14 +237,14 @@ export class BaseExecuteContext extends NodeExecutionContext {
}
}
logAiEvent(eventName: AiEvent, msg: string) {
logAiEvent(eventName: AiEvent, msg: object) {
return this.additionalData.logAiEvent(eventName, {
executionId: this.additionalData.executionId ?? 'unsaved-execution',
nodeName: this.node.name,
workflowName: this.workflow.name ?? 'Unnamed workflow',
nodeType: this.node.type,
workflowId: this.workflow.id ?? 'unsaved-workflow',
msg,
msg: jsonStringify(msg),
});
}
}

View file

@ -1,6 +1,9 @@
import type { BaseCallbackConfig, CallbackManager } from '@langchain/core/callbacks/manager';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from '@langchain/core/tools';
import type {
AINodeConnectionType,
CallbackManager,
AiRootNodeExecuteFunctions,
CloseFunction,
IExecuteData,
IExecuteFunctions,
@ -12,14 +15,17 @@ import type {
ITaskDataConnections,
IWorkflowExecuteAdditionalData,
Result,
TracingConfig,
Workflow,
WorkflowExecuteMode,
ZodObjectAny,
} from 'n8n-workflow';
import {
ApplicationError,
createDeferredPromise,
createEnvProviderState,
NodeConnectionType,
NodeOperationError,
} from 'n8n-workflow';
// eslint-disable-next-line import/no-cycle
@ -40,6 +46,8 @@ import {
} from '@/NodeExecuteFunctions';
import { BaseExecuteContext } from './base-execute-context';
import { N8nTool } from './n8n-tool';
import { escapeSingleCurlyBrackets } from './utils';
export class ExecuteContext extends BaseExecuteContext implements IExecuteFunctions {
readonly helpers: IExecuteFunctions['helpers'];
@ -206,4 +214,135 @@ export class ExecuteContext extends BaseExecuteContext implements IExecuteFuncti
getParentCallbackManager(): CallbackManager | undefined {
return this.additionalData.parentCallbackManager;
}
getAiRootNodeExecuteFunctions(): AiRootNodeExecuteFunctions {
const {
getConnectedTools,
getPromptInputByType,
getTracingConfig,
extractParsedOutput,
checkForStructuredTools,
} = this;
return Object.create(this, {
getConnectedTools: { value: getConnectedTools },
getPromptInputByType: { value: getPromptInputByType },
getTracingConfig: { value: getTracingConfig },
extractParsedOutput: { value: extractParsedOutput },
checkForStructuredTools: { value: checkForStructuredTools },
});
}
async getConnectedTools(
enforceUniqueNames: boolean,
convertStructuredTool = true,
escapeCurlyBrackets = false,
) {
const connectedTools =
((await this.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>();
const finalTools = [];
for (const tool of connectedTools) {
const { name } = tool;
if (seenNames.has(name)) {
throw new NodeOperationError(
this.node,
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
);
}
seenNames.add(name);
if (escapeCurlyBrackets) {
tool.description = escapeSingleCurlyBrackets(tool.description) ?? tool.description;
}
if (convertStructuredTool && tool instanceof N8nTool) {
finalTools.push(tool.asDynamicTool());
} else {
finalTools.push(tool);
}
}
return finalTools;
}
getPromptInputByType(
itemIndex: number,
promptTypeKey: string = 'text',
inputKey: string = 'promptType',
) {
const prompt = this.getNodeParameter(promptTypeKey, itemIndex) as string;
let input;
if (prompt === 'auto') {
input = this.evaluateExpression('{{ $json["chatInput"] }}', itemIndex) as string;
} else {
input = this.getNodeParameter(inputKey, itemIndex) as string;
}
if (input === undefined) {
throw new NodeOperationError(this.node, 'No prompt specified', {
description:
"Expected to find the prompt in an input field called 'chatInput' (this is what the chat trigger node outputs). To use something else, change the 'Prompt' parameter",
});
}
return input;
}
getTracingConfig(config: TracingConfig = {}): BaseCallbackConfig {
const parentRunManager = this.getParentCallbackManager?.();
return {
runName: `[${this.workflow.name}] ${this.node.name}`,
metadata: {
execution_id: this.getExecutionId(),
workflow: this.workflow,
node: this.node.name,
...(config.additionalMetadata ?? {}),
},
callbacks: parentRunManager,
};
}
async extractParsedOutput(
outputParser: BaseOutputParser<unknown>,
output: string,
): Promise<Record<string, unknown> | undefined> {
const parsedOutput = (await outputParser.parse(output)) as {
output: Record<string, unknown>;
};
if (this.node.typeVersion <= 1.6) {
return parsedOutput;
}
// For 1.7 and above, we try to extract the output from the parsed output
// with fallback to the original output if it's not present
return parsedOutput?.output ?? parsedOutput;
}
checkForStructuredTools(
tools: Array<Tool | DynamicStructuredTool<ZodObjectAny>>,
node: INode,
currentAgentType: string,
) {
const dynamicStructuredTools = tools.filter(
(tool) => tool.constructor.name === 'DynamicStructuredTool',
);
if (dynamicStructuredTools.length > 0) {
const getToolName = (tool: Tool | DynamicStructuredTool) => `"${tool.name}"`;
throw new NodeOperationError(
node,
`The selected tools are not supported by "${currentAgentType}", please use "Tools Agent" instead`,
{
itemIndex: 0,
description: `Incompatible connected tools: ${dynamicStructuredTools.map(getToolName).join(', ')}`,
},
);
}
}
}

View file

@ -1,8 +1,12 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
/* eslint-disable @typescript-eslint/no-unsafe-return */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/no-unsafe-argument */
import type { DynamicStructuredToolInput } from '@langchain/core/tools';
import { DynamicStructuredTool, DynamicTool } from '@langchain/core/tools';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionType, jsonParse, NodeOperationError } from 'n8n-workflow';
import { StructuredOutputParser } from 'langchain/output_parsers';
import type { ISupplyDataFunctions, IDataObject } from 'n8n-workflow';
import { NodeConnectionType, jsonParse, NodeOperationError, ensureError } from 'n8n-workflow';
import type { ZodTypeAny } from 'zod';
import { ZodBoolean, ZodNullable, ZodNumber, ZodObject, ZodOptional } from 'zod';
@ -28,7 +32,7 @@ const getParametersDescription = (parameters: Array<[string, ZodTypeAny]>) =>
)
.join(',\n ');
export const prepareFallbackToolDescription = (toolDescription: string, schema: ZodObject<any>) => {
const prepareFallbackToolDescription = (toolDescription: string, schema: ZodObject<any>) => {
let description = `${toolDescription}`;
const toolParameters = Object.entries<ZodTypeAny>(schema.shape);
@ -80,7 +84,7 @@ export class N8nTool extends DynamicStructuredTool {
// Finally throw an error if we were unable to parse the query
throw new NodeOperationError(
context.getNode(),
`Input is not a valid JSON: ${error.message}`,
`Input is not a valid JSON: ${ensureError(error).message}`,
);
}
}
@ -92,14 +96,12 @@ export class N8nTool extends DynamicStructuredTool {
try {
// Call tool function with parsed query
const result = await func(parsedQuery);
return result;
return await func(parsedQuery);
} catch (e) {
const { index } = context.addInputData(NodeConnectionType.AiTool, [[{ json: { query } }]]);
void context.addOutputData(NodeConnectionType.AiTool, index, e);
return e.toString();
return ensureError(e).toString();
}
};

View file

@ -421,3 +421,21 @@ export function getAdditionalKeys(
$resumeWebhookUrl: resumeUrl,
};
}
export function escapeSingleCurlyBrackets(text?: string): string | undefined {
if (text === undefined) return undefined;
let result = text;
result = result
// First handle triple brackets to avoid interference with double brackets
.replace(/(?<!{){{{(?!{)/g, '{{{{')
.replace(/(?<!})}}}(?!})/g, '}}}}')
// Then handle single brackets, but only if they're not part of double brackets
// Convert single { to {{ if it's not already part of {{ or {{{
.replace(/(?<!{){(?!{)/g, '{{')
// Convert single } to }} if it's not already part of }} or }}}
.replace(/(?<!})}(?!})/g, '}}');
return result;
}

View file

@ -1,6 +1,7 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import type { CallbackManager as CallbackManagerLC } from '@langchain/core/callbacks/manager';
import type { CallbackManager, BaseCallbackConfig } from '@langchain/core/callbacks/manager';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { DynamicStructuredTool, Tool } from '@langchain/core/tools';
import type { AxiosProxyConfig, GenericAbortSignal } from 'axios';
import type * as express from 'express';
import type FormData from 'form-data';
@ -11,6 +12,7 @@ import type { Client as SSHClient } from 'ssh2';
import type { Readable } from 'stream';
import type { SecureContextOptions } from 'tls';
import type { URLSearchParams } from 'url';
import type { ZodObject } from 'zod';
import type { CODE_EXECUTION_MODES, CODE_LANGUAGES, LOG_LEVELS } from './Constants';
import type { IDeferredPromise } from './DeferredPromise';
@ -26,6 +28,8 @@ import type { Workflow } from './Workflow';
import type { EnvProviderState } from './WorkflowDataProxyEnvProvider';
import type { WorkflowHooks } from './WorkflowHooks';
export type ZodObjectAny = ZodObject<any, any, any, any>;
export interface IAdditionalCredentialOptions {
oauth2?: IOAuth2Options;
credentialsDecrypted?: ICredentialsDecrypted;
@ -893,7 +897,7 @@ type BaseExecutionFunctions = FunctionsBaseWithRequiredKeys<'getMode'> & {
getInputSourceData(inputIndex?: number, connectionType?: NodeConnectionType): ISourceData;
getExecutionCancelSignal(): AbortSignal | undefined;
onExecutionCancellation(handler: () => unknown): void;
logAiEvent(eventName: AiEvent, msg?: string | undefined): void;
logAiEvent(eventName: AiEvent, msg?: object): void;
};
// TODO: Create later own type only for Config-Nodes
@ -919,8 +923,6 @@ export type IExecuteFunctions = ExecuteFunctions.GetNodeParameterFn &
putExecutionToWait(waitTill: Date): Promise<void>;
sendMessageToUI(message: any): void;
sendResponse(response: IExecuteResponsePromiseData): void;
// TODO: Make this one then only available in the new config one
addInputData(
connectionType: NodeConnectionType,
data: INodeExecutionData[][] | ExecutionError,
@ -932,6 +934,7 @@ export type IExecuteFunctions = ExecuteFunctions.GetNodeParameterFn &
data: INodeExecutionData[][] | ExecutionError,
metadata?: ITaskMetadata,
): void;
getAiRootNodeExecuteFunctions(): AiRootNodeExecuteFunctions;
nodeHelpers: NodeHelperFunctions;
helpers: RequestHelperFunctions &
@ -976,26 +979,49 @@ export interface IExecuteSingleFunctions extends BaseExecutionFunctions {
};
}
export interface TracingConfig {
additionalMetadata?: Record<string, unknown>;
}
// TODO: `Pick` from IExecuteFunctions, but do not extends completely
export type AiRootNodeExecuteFunctions = IExecuteFunctions & {
getConnectedTools(
enforceUniqueNames: boolean,
convertStructuredTool?: boolean,
escapeCurlyBrackets?: boolean,
): Promise<Tool[]>;
getPromptInputByType(itemIndex: number, promptTypeKey?: string, inputKey?: string): string;
getTracingConfig(config?: TracingConfig): BaseCallbackConfig;
extractParsedOutput(
outputParser: BaseOutputParser<unknown>,
output: string,
): Promise<Record<string, unknown> | undefined>;
checkForStructuredTools(
tools: Array<Tool | DynamicStructuredTool<ZodObjectAny>>,
node: INode,
currentAgentType: string,
): void;
};
export type ISupplyDataFunctions = ExecuteFunctions.GetNodeParameterFn &
FunctionsBaseWithRequiredKeys<'getMode'> &
Pick<
IExecuteFunctions,
| 'addInputData'
| 'addOutputData'
| 'continueOnFail'
| 'evaluateExpression'
| 'executeWorkflow'
| 'getExecutionCancelSignal'
| 'getInputConnectionData'
| 'getInputData'
| 'getNodeOutputs'
| 'executeWorkflow'
| 'getWorkflowDataProxy'
| 'logAiEvent'
| 'onExecutionCancellation'
| 'sendMessageToUI'
| 'helpers'
> & {
continueOnFail(): boolean;
evaluateExpression(expression: string, itemIndex: number): NodeParameterValueType;
getWorkflowDataProxy(itemIndex: number): IWorkflowDataProxyData;
getExecutionCancelSignal(): AbortSignal | undefined;
onExecutionCancellation(handler: () => unknown): void;
logAiEvent(eventName: AiEvent, msg?: string | undefined): void;
};
>;
export interface IExecutePaginationFunctions extends IExecuteSingleFunctions {
makeRoutingRequest(
@ -1606,6 +1632,14 @@ export abstract class Node {
poll?(context: IPollFunctions): Promise<INodeExecutionData[][] | null>;
}
/**
* This class serves as a base for all AI nodes that can invoke subnodes,
* like models, memory, and tools
*/
export abstract class AiRootNode extends Node {
execute?(context: AiRootNodeExecuteFunctions): Promise<INodeExecutionData[][]>;
}
export interface IVersionedNodeType {
nodeVersions: {
[key: number]: INodeType;
@ -2776,8 +2810,6 @@ export type BannerName =
export type Functionality = 'regular' | 'configuration-node' | 'pairedItem';
export type CallbackManager = CallbackManagerLC;
export type IPersonalizationSurveyAnswersV4 = {
version: 'v4';
personalization_survey_submitted_at: string;

View file

@ -45,6 +45,9 @@ catalogs:
iconv-lite:
specifier: 0.6.3
version: 0.6.3
langchain:
specifier: 0.3.6
version: 0.3.6
lodash:
specifier: 4.17.21
version: 4.17.21
@ -541,7 +544,7 @@ importers:
specifier: 23.0.1
version: 23.0.1
langchain:
specifier: 0.3.6
specifier: 'catalog:'
version: 0.3.6(e4rnrwhosnp2xiru36mqgdy2bu)
lodash:
specifier: 'catalog:'
@ -1114,7 +1117,7 @@ importers:
dependencies:
'@langchain/core':
specifier: 'catalog:'
version: 0.3.19(openai@4.73.1(zod@3.23.8))
version: 0.3.19(openai@4.73.1)
'@n8n/client-oauth2':
specifier: workspace:*
version: link:../@n8n/client-oauth2
@ -1148,6 +1151,9 @@ importers:
iconv-lite:
specifier: 'catalog:'
version: 0.6.3
langchain:
specifier: 'catalog:'
version: 0.3.6(@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(axios@1.7.4)(cheerio@1.0.0)(handlebars@4.7.8)(openai@4.73.1(zod@3.23.8))
lodash:
specifier: 'catalog:'
version: 4.17.21
@ -13974,6 +13980,25 @@ snapshots:
- '@aws-sdk/client-sso-oidc'
- aws-crt
'@aws-sdk/credential-provider-ini@3.666.0(@aws-sdk/client-sts@3.666.0)':
dependencies:
'@aws-sdk/client-sts': 3.666.0
'@aws-sdk/credential-provider-env': 3.664.0
'@aws-sdk/credential-provider-http': 3.666.0
'@aws-sdk/credential-provider-process': 3.664.0
'@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))
'@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/types': 3.664.0
'@smithy/credential-provider-imds': 3.2.4
'@smithy/property-provider': 3.1.7
'@smithy/shared-ini-file-loader': 3.1.8
'@smithy/types': 3.5.0
tslib: 2.6.2
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
optional: true
'@aws-sdk/credential-provider-node@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)':
dependencies:
'@aws-sdk/credential-provider-env': 3.664.0
@ -13993,6 +14018,26 @@ snapshots:
- '@aws-sdk/client-sts'
- aws-crt
'@aws-sdk/credential-provider-node@3.666.0(@aws-sdk/client-sts@3.666.0)':
dependencies:
'@aws-sdk/credential-provider-env': 3.664.0
'@aws-sdk/credential-provider-http': 3.666.0
'@aws-sdk/credential-provider-ini': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/credential-provider-process': 3.664.0
'@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))
'@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/types': 3.664.0
'@smithy/credential-provider-imds': 3.2.4
'@smithy/property-provider': 3.1.7
'@smithy/shared-ini-file-loader': 3.1.8
'@smithy/types': 3.5.0
tslib: 2.6.2
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- '@aws-sdk/client-sts'
- aws-crt
optional: true
'@aws-sdk/credential-provider-process@3.664.0':
dependencies:
'@aws-sdk/types': 3.664.0
@ -14022,6 +14067,29 @@ snapshots:
'@smithy/types': 3.5.0
tslib: 2.6.2
'@aws-sdk/credential-providers@3.666.0':
dependencies:
'@aws-sdk/client-cognito-identity': 3.666.0
'@aws-sdk/client-sso': 3.666.0
'@aws-sdk/client-sts': 3.666.0
'@aws-sdk/credential-provider-cognito-identity': 3.666.0
'@aws-sdk/credential-provider-env': 3.664.0
'@aws-sdk/credential-provider-http': 3.666.0
'@aws-sdk/credential-provider-ini': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/credential-provider-process': 3.664.0
'@aws-sdk/credential-provider-sso': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))
'@aws-sdk/credential-provider-web-identity': 3.664.0(@aws-sdk/client-sts@3.666.0)
'@aws-sdk/types': 3.664.0
'@smithy/credential-provider-imds': 3.2.4
'@smithy/property-provider': 3.1.7
'@smithy/types': 3.5.0
tslib: 2.6.2
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
optional: true
'@aws-sdk/credential-providers@3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))':
dependencies:
'@aws-sdk/client-cognito-identity': 3.666.0
@ -16087,6 +16155,18 @@ snapshots:
- encoding
- supports-color
'@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@anthropic-ai/sdk': 0.27.3(encoding@0.1.13)
'@langchain/core': 0.3.19(openai@4.73.1)
fast-xml-parser: 4.4.1
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
optional: true
'@langchain/aws@0.1.2(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies:
'@aws-sdk/client-bedrock-agent-runtime': 3.666.0
@ -16101,6 +16181,21 @@ snapshots:
- '@aws-sdk/client-sts'
- aws-crt
'@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@aws-sdk/client-bedrock-agent-runtime': 3.666.0
'@aws-sdk/client-bedrock-runtime': 3.666.0
'@aws-sdk/client-kendra': 3.666.0
'@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sts@3.666.0)
'@langchain/core': 0.3.19(openai@4.73.1)
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- '@aws-sdk/client-sts'
- aws-crt
optional: true
'@langchain/cohere@0.3.1(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16113,6 +16208,19 @@ snapshots:
- aws-crt
- encoding
'@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
cohere-ai: 7.14.0
uuid: 10.0.0
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
- encoding
optional: true
'@langchain/community@0.3.15(vc5hvyy27o4cmm4jplsptc2fqm)':
dependencies:
'@ibm-cloud/watsonx-ai': 1.1.2
@ -16200,22 +16308,6 @@ snapshots:
transitivePeerDependencies:
- openai
'@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))':
dependencies:
ansi-styles: 5.2.0
camelcase: 6.3.0
decamelize: 1.2.0
js-tiktoken: 1.0.12
langsmith: 0.2.3(openai@4.73.1(zod@3.23.8))
mustache: 4.2.0
p-queue: 6.6.2
p-retry: 4.6.2
uuid: 10.0.0
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- openai
'@langchain/core@0.3.19(openai@4.73.1)':
dependencies:
ansi-styles: 5.2.0
@ -16240,6 +16332,15 @@ snapshots:
transitivePeerDependencies:
- zod
'@langchain/google-common@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
uuid: 10.0.0
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- zod
optional: true
'@langchain/google-gauth@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16250,6 +16351,17 @@ snapshots:
- supports-color
- zod
'@langchain/google-gauth@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/google-common': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
google-auth-library: 8.9.0(encoding@0.1.13)
transitivePeerDependencies:
- encoding
- supports-color
- zod
optional: true
'@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@google/generative-ai': 0.21.0
@ -16258,6 +16370,15 @@ snapshots:
transitivePeerDependencies:
- zod
'@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@google/generative-ai': 0.21.0
'@langchain/core': 0.3.19(openai@4.73.1)
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- zod
optional: true
'@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16267,6 +16388,16 @@ snapshots:
- supports-color
- zod
'@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/google-gauth': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
- zod
optional: true
'@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16278,6 +16409,18 @@ snapshots:
- encoding
- supports-color
'@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/openai': 0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
groq-sdk: 0.5.0(encoding@0.1.13)
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
optional: true
'@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16286,12 +16429,28 @@ snapshots:
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
'@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@mistralai/mistralai': 1.3.4(zod@3.23.8)
uuid: 10.0.0
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
optional: true
'@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
ollama: 0.5.9
uuid: 10.0.0
'@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
ollama: 0.5.9
uuid: 10.0.0
optional: true
'@langchain/openai@0.3.14(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16303,6 +16462,17 @@ snapshots:
- encoding
- supports-color
'@langchain/openai@0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
js-tiktoken: 1.0.12
openai: 4.73.1(zod@3.23.8)
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
transitivePeerDependencies:
- encoding
- supports-color
'@langchain/pinecone@0.1.3(@langchain/core@0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -16328,6 +16498,11 @@ snapshots:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
js-tiktoken: 1.0.12
'@langchain/textsplitters@0.1.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))':
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
js-tiktoken: 1.0.12
'@lezer/common@1.1.0': {}
'@lezer/common@1.2.1': {}
@ -19460,14 +19635,6 @@ snapshots:
transitivePeerDependencies:
- debug
axios@1.7.7:
dependencies:
follow-redirects: 1.15.6(debug@4.3.6)
form-data: 4.0.0
proxy-from-env: 1.1.0
transitivePeerDependencies:
- debug
axios@1.7.7(debug@4.3.6):
dependencies:
follow-redirects: 1.15.6(debug@4.3.6)
@ -20021,6 +20188,26 @@ snapshots:
'@lezer/html': 1.3.0
'@lezer/lr': 1.4.0
cohere-ai@7.14.0:
dependencies:
'@aws-sdk/client-sagemaker': 3.666.0
'@aws-sdk/credential-providers': 3.666.0
'@aws-sdk/protocol-http': 3.374.0
'@aws-sdk/signature-v4': 3.374.0
form-data: 4.0.0
form-data-encoder: 4.0.2
formdata-node: 6.0.3
js-base64: 3.7.2
node-fetch: 2.7.0(encoding@0.1.13)
qs: 6.11.2
readable-stream: 4.5.2
url-join: 4.0.1
transitivePeerDependencies:
- '@aws-sdk/client-sso-oidc'
- aws-crt
- encoding
optional: true
cohere-ai@7.14.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(encoding@0.1.13):
dependencies:
'@aws-sdk/client-sagemaker': 3.666.0
@ -22384,7 +22571,7 @@ snapshots:
infisical-node@1.3.0:
dependencies:
axios: 1.7.7
axios: 1.7.7(debug@4.3.6)
dotenv: 16.3.1
tweetnacl: 1.0.3
tweetnacl-util: 0.15.1
@ -23318,6 +23505,38 @@ snapshots:
kuler@2.0.0: {}
langchain@0.3.6(@langchain/anthropic@0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/aws@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/cohere@0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(@langchain/google-genai@0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/google-vertexai@0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8))(@langchain/groq@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/mistralai@0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(@langchain/ollama@0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8))))(axios@1.7.4)(cheerio@1.0.0)(handlebars@4.7.8)(openai@4.73.1(zod@3.23.8)):
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1)
'@langchain/openai': 0.3.14(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/textsplitters': 0.1.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
js-tiktoken: 1.0.12
js-yaml: 4.1.0
jsonpointer: 5.0.1
langsmith: 0.2.3(openai@4.73.1)
openapi-types: 12.1.3
p-retry: 4.6.2
uuid: 10.0.0
yaml: 2.3.4
zod: 3.23.8
zod-to-json-schema: 3.23.3(zod@3.23.8)
optionalDependencies:
'@langchain/anthropic': 0.3.8(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/aws': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/cohere': 0.3.1(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/google-genai': 0.1.4(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
'@langchain/google-vertexai': 0.1.3(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))(zod@3.23.8)
'@langchain/groq': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/mistralai': 0.2.0(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
'@langchain/ollama': 0.1.2(@langchain/core@0.3.19(openai@4.73.1(zod@3.23.8)))
axios: 1.7.4
cheerio: 1.0.0
handlebars: 4.7.8
transitivePeerDependencies:
- encoding
- openai
- supports-color
langchain@0.3.6(e4rnrwhosnp2xiru36mqgdy2bu):
dependencies:
'@langchain/core': 0.3.19(openai@4.73.1(encoding@0.1.13)(zod@3.23.8))
@ -23361,17 +23580,6 @@ snapshots:
optionalDependencies:
openai: 4.73.1(encoding@0.1.13)(zod@3.23.8)
langsmith@0.2.3(openai@4.73.1(zod@3.23.8)):
dependencies:
'@types/uuid': 10.0.0
commander: 10.0.1
p-queue: 6.6.2
p-retry: 4.6.2
semver: 7.6.0
uuid: 10.0.0
optionalDependencies:
openai: 4.73.1(zod@3.23.8)
langsmith@0.2.3(openai@4.73.1):
dependencies:
'@types/uuid': 10.0.0
@ -24729,7 +24937,6 @@ snapshots:
transitivePeerDependencies:
- encoding
- supports-color
optional: true
openapi-sampler@1.5.1:
dependencies:
@ -25113,7 +25320,7 @@ snapshots:
posthog-node@3.2.1:
dependencies:
axios: 1.7.7
axios: 1.7.7(debug@4.3.6)
rusha: 0.8.14
transitivePeerDependencies:
- debug
@ -26131,7 +26338,7 @@ snapshots:
asn1.js: 5.4.1
asn1.js-rfc2560: 5.0.1(asn1.js@5.4.1)
asn1.js-rfc5280: 3.0.0
axios: 1.7.7
axios: 1.7.7(debug@4.3.6)
big-integer: 1.6.51
bignumber.js: 9.1.2
binascii: 0.0.2

View file

@ -27,6 +27,7 @@ catalog:
xss: 1.0.15
zod: 3.23.8
'@langchain/core': 0.3.19
langchain: 0.3.6
catalogs:
frontend: