From 577d5a1bff0f583a953e0f459d882ed929fb8168 Mon Sep 17 00:00:00 2001 From: oleg Date: Thu, 6 Feb 2025 17:28:13 +0100 Subject: [PATCH] refactor(Google Gemini Chat Model Node): Provide custom message mapper to N8nLlmTracing (no-changelog) (#13076) --- .../LmChatGoogleGemini.node.ts | 22 +++++++++++++------ .../nodes/llms/N8nLlmTracing.ts | 10 ++++++++- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts index 05c2475556..f6ff0f5038 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatGoogleGemini/LmChatGoogleGemini.node.ts @@ -1,12 +1,13 @@ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */ import type { SafetySetting } from '@google/generative-ai'; import { ChatGoogleGenerativeAI } from '@langchain/google-genai'; -import { - NodeConnectionType, - type INodeType, - type INodeTypeDescription, - type ISupplyDataFunctions, - type SupplyData, +import { NodeConnectionType } from 'n8n-workflow'; +import type { + NodeError, + INodeType, + INodeTypeDescription, + ISupplyDataFunctions, + SupplyData, } from 'n8n-workflow'; import { getConnectionHintNoticeField } from '@utils/sharedFields'; @@ -15,6 +16,13 @@ import { additionalOptions } from '../gemini-common/additional-options'; import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler'; import { N8nLlmTracing } from '../N8nLlmTracing'; +function errorDescriptionMapper(error: NodeError) { + if (error.description?.includes('properties: should be non-empty for OBJECT type')) { + return 'Google Gemini requires at least one dynamic parameter when using tools'; + } + + return error.description ?? 'Unknown error'; +} export class LmChatGoogleGemini implements INodeType { description: INodeTypeDescription = { displayName: 'Google Gemini Chat Model', @@ -147,7 +155,7 @@ export class LmChatGoogleGemini implements INodeType { temperature: options.temperature, maxOutputTokens: options.maxOutputTokens, safetySettings, - callbacks: [new N8nLlmTracing(this)], + callbacks: [new N8nLlmTracing(this, { errorDescriptionMapper })], onFailedAttempt: makeN8nLlmFailedAttemptHandler(this), }); diff --git a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts index 3d426309b7..7f7d70841e 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/N8nLlmTracing.ts @@ -61,11 +61,15 @@ export class N8nLlmTracing extends BaseCallbackHandler { totalTokens: completionTokens + promptTokens, }; }, + errorDescriptionMapper: (error: NodeError) => error.description, }; constructor( private executionFunctions: ISupplyDataFunctions, - options?: { tokensUsageParser: TokensUsageParser }, + options?: { + tokensUsageParser?: TokensUsageParser; + errorDescriptionMapper?: (error: NodeError) => string; + }, ) { super(); this.options = { ...this.options, ...options }; @@ -192,6 +196,10 @@ export class N8nLlmTracing extends BaseCallbackHandler { } if (error instanceof NodeError) { + if (this.options.errorDescriptionMapper) { + error.description = this.options.errorDescriptionMapper(error); + } + this.executionFunctions.addOutputData(this.connectionType, runDetails.index, error); } else { // If the error is not a NodeError, we wrap it in a NodeOperationError