From 0882dc0ce9ad4c9260390f99be56df2d6f7b5e86 Mon Sep 17 00:00:00 2001 From: oleg Date: Fri, 23 Feb 2024 10:27:39 +0100 Subject: [PATCH] fix: Fix execution error when using AI chain nodes with non-chat model (#8724) Signed-off-by: Oleg Ivaniv --- .../shared/createVectorStoreNode.ts | 8 +- .../@n8n/nodes-langchain/utils/helpers.ts | 15 +++- .../@n8n/nodes-langchain/utils/logWrapper.ts | 86 +++++++------------ 3 files changed, 50 insertions(+), 59 deletions(-) diff --git a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts index b061afa348..409fae30ab 100644 --- a/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts +++ b/packages/@n8n/nodes-langchain/nodes/vector_store/shared/createVectorStoreNode.ts @@ -1,7 +1,7 @@ /* eslint-disable n8n-nodes-base/node-filename-against-convention */ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import type { VectorStore } from 'langchain/vectorstores/base'; -import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; +import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import type { INodeCredentialDescription, INodeProperties, @@ -18,7 +18,7 @@ import type { Document } from 'langchain/document'; import { logWrapper } from '../../../utils/logWrapper'; import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; -import { getMetadataFiltersValues } from '../../../utils/helpers'; +import { getMetadataFiltersValues, logAiEvent } from '../../../utils/helpers'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { processDocument } from './processDocuments'; @@ -237,7 +237,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => }); resultData.push(...serializedDocs); - void this.logAiEvent('n8n.ai.vector.store.searched', jsonStringify({ query: prompt })); + void logAiEvent(this, 'n8n.ai.vector.store.searched', { query: prompt }); } return await this.prepareOutputData(resultData); @@ -264,7 +264,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) => try { await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex); - void this.logAiEvent('n8n.ai.vector.store.populated'); + void logAiEvent(this, 'n8n.ai.vector.store.populated'); } catch (error) { throw error; } diff --git a/packages/@n8n/nodes-langchain/utils/helpers.ts b/packages/@n8n/nodes-langchain/utils/helpers.ts index 94f7cdd71e..f6778fea45 100644 --- a/packages/@n8n/nodes-langchain/utils/helpers.ts +++ b/packages/@n8n/nodes-langchain/utils/helpers.ts @@ -1,4 +1,5 @@ -import { NodeConnectionType, type IExecuteFunctions, NodeOperationError } from 'n8n-workflow'; +import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; +import type { EventNamesAiNodesType, IDataObject, IExecuteFunctions } from 'n8n-workflow'; import { BaseChatModel } from 'langchain/chat_models/base'; import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models'; import type { BaseOutputParser } from '@langchain/core/output_parsers'; @@ -64,3 +65,15 @@ export function getPromptInputByType(options: { return input; } + +export async function logAiEvent( + executeFunctions: IExecuteFunctions, + event: EventNamesAiNodesType, + data?: IDataObject, +) { + try { + await executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined); + } catch (error) { + executeFunctions.logger.debug(`Error logging AI event: ${event}`); + } +} diff --git a/packages/@n8n/nodes-langchain/utils/logWrapper.ts b/packages/@n8n/nodes-langchain/utils/logWrapper.ts index a5d861a1c6..2b358f7d68 100644 --- a/packages/@n8n/nodes-langchain/utils/logWrapper.ts +++ b/packages/@n8n/nodes-langchain/utils/logWrapper.ts @@ -1,16 +1,10 @@ -import { - NodeOperationError, - type ConnectionTypes, - type IExecuteFunctions, - type INodeExecutionData, - NodeConnectionType, - jsonStringify, -} from 'n8n-workflow'; +import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; +import type { ConnectionTypes, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import { Tool } from 'langchain/tools'; -import type { BaseMessage, ChatResult, InputValues } from 'langchain/schema'; +import type { ChatResult, InputValues, BaseMessage } from 'langchain/schema'; import { BaseChatMessageHistory } from 'langchain/schema'; -import { BaseChatModel } from 'langchain/chat_models/base'; +import type { BaseChatModel } from 'langchain/chat_models/base'; import type { CallbackManagerForLLMRun } from 'langchain/callbacks'; import { Embeddings } from 'langchain/embeddings/base'; @@ -28,7 +22,7 @@ import { BaseOutputParser } from 'langchain/schema/output_parser'; import { isObject } from 'lodash'; import { N8nJsonLoader } from './N8nJsonLoader'; import { N8nBinaryLoader } from './N8nBinaryLoader'; -import { isChatInstance } from './helpers'; +import { isChatInstance, logAiEvent } from './helpers'; const errorsMap: { [key: string]: { message: string; description: string } } = { 'You exceeded your current quota, please check your plan and billing details.': { @@ -202,10 +196,7 @@ export function logWrapper( const payload = { action: 'getMessages', response }; executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); - void executeFunctions.logAiEvent( - 'n8n.ai.memory.get.messages', - jsonStringify({ response }), - ); + void logAiEvent(executeFunctions, 'n8n.ai.memory.get.messages', { response }); return response; }; } else if (prop === 'addMessage' && 'addMessage' in target) { @@ -222,10 +213,7 @@ export function logWrapper( arguments: [message], }); - void executeFunctions.logAiEvent( - 'n8n.ai.memory.added.message', - jsonStringify({ message }), - ); + void logAiEvent(executeFunctions, 'n8n.ai.memory.added.message', { message }); executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); }; } @@ -255,18 +243,21 @@ export function logWrapper( runManager, ], })) as ChatResult; + const parsedMessages = + typeof messages === 'string' + ? messages + : messages.map((message) => { + if (typeof message === 'string') return message; + if (typeof message?.toJSON === 'function') return message.toJSON(); - void executeFunctions.logAiEvent( - 'n8n.ai.llm.generated', - jsonStringify({ - messages: - typeof messages === 'string' - ? messages - : messages.map((message) => message.toJSON()), - options, - response, - }), - ); + return message; + }); + + void logAiEvent(executeFunctions, 'n8n.ai.llm.generated', { + messages: parsedMessages, + options, + response, + }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; } catch (error) { @@ -299,10 +290,9 @@ export function logWrapper( executeFunctions.addOutputData(connectionType, index, [ [{ json: { action: 'getFormatInstructions', response } }], ]); - void executeFunctions.logAiEvent( - 'n8n.ai.output.parser.get.instructions', - jsonStringify({ response }), - ); + void logAiEvent(executeFunctions, 'n8n.ai.output.parser.get.instructions', { + response, + }); return response; }; } else if (prop === 'parse' && 'parse' in target) { @@ -321,10 +311,7 @@ export function logWrapper( arguments: [stringifiedText], })) as object; - void executeFunctions.logAiEvent( - 'n8n.ai.output.parser.parsed', - jsonStringify({ text, response }), - ); + void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { text, response }); executeFunctions.addOutputData(connectionType, index, [ [{ json: { action: 'parse', response } }], ]); @@ -353,10 +340,7 @@ export function logWrapper( arguments: [query, config], })) as Array>>; - void executeFunctions.logAiEvent( - 'n8n.ai.retriever.get.relevant.documents', - jsonStringify({ query }), - ); + void logAiEvent(executeFunctions, 'n8n.ai.retriever.get.relevant.documents', { query }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -381,7 +365,7 @@ export function logWrapper( arguments: [documents], })) as number[][]; - void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.document'); + void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.document'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -401,7 +385,7 @@ export function logWrapper( method: target[prop], arguments: [query], })) as number[]; - void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.query'); + void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.query'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -446,7 +430,7 @@ export function logWrapper( arguments: [item, itemIndex], })) as number[]; - void executeFunctions.logAiEvent('n8n.ai.document.processed'); + void logAiEvent(executeFunctions, 'n8n.ai.document.processed'); executeFunctions.addOutputData(connectionType, index, [ [{ json: { response }, pairedItem: { item: itemIndex } }], ]); @@ -472,7 +456,7 @@ export function logWrapper( arguments: [text], })) as string[]; - void executeFunctions.logAiEvent('n8n.ai.text.splitter.split'); + void logAiEvent(executeFunctions, 'n8n.ai.text.splitter.split'); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -496,10 +480,7 @@ export function logWrapper( arguments: [query], })) as string; - void executeFunctions.logAiEvent( - 'n8n.ai.tool.called', - jsonStringify({ query, response }), - ); + void logAiEvent(executeFunctions, 'n8n.ai.tool.called', { query, response }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response; }; @@ -529,10 +510,7 @@ export function logWrapper( arguments: [query, k, filter, _callbacks], })) as Array>>; - void executeFunctions.logAiEvent( - 'n8n.ai.vector.store.searched', - jsonStringify({ query }), - ); + void logAiEvent(executeFunctions, 'n8n.ai.vector.store.searched', { query }); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); return response;