From 0b5299a248fdd451ceabb98ff6a2b38e818d02f8 Mon Sep 17 00:00:00 2001 From: Eugene Date: Tue, 17 Sep 2024 16:27:21 +0200 Subject: [PATCH] fix(Azure OpenAI Chat Model Node): Add response format option (#10851) --- .../LmChatAzureOpenAi.node.ts | 41 +++++++++++++++++-- 1 file changed, 37 insertions(+), 4 deletions(-) diff --git a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts index 96f5b8e6d8..03548142db 100644 --- a/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts +++ b/packages/@n8n/nodes-langchain/nodes/llms/LmChatAzureOpenAi/LmChatAzureOpenAi.node.ts @@ -7,7 +7,6 @@ import { type SupplyData, } from 'n8n-workflow'; -import type { ClientOptions } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { N8nLlmTracing } from '../N8nLlmTracing'; @@ -51,6 +50,18 @@ export class LmChatAzureOpenAi implements INodeType { ], properties: [ getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]), + { + displayName: + 'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.', + name: 'notice', + type: 'notice', + default: '', + displayOptions: { + show: { + '/options.responseFormat': ['json_object'], + }, + }, + }, { displayName: 'Model (Deployment) Name', name: 'model', @@ -86,6 +97,25 @@ export class LmChatAzureOpenAi implements INodeType { maxValue: 32768, }, }, + { + displayName: 'Response Format', + name: 'responseFormat', + default: 'text', + type: 'options', + options: [ + { + name: 'Text', + value: 'text', + description: 'Regular text response', + }, + { + name: 'JSON', + value: 'json_object', + description: + 'Enables JSON mode, which should guarantee the message the model generates is valid JSON', + }, + ], + }, { displayName: 'Presence Penalty', name: 'presencePenalty', @@ -148,10 +178,9 @@ export class LmChatAzureOpenAi implements INodeType { presencePenalty?: number; temperature?: number; topP?: number; + responseFormat?: 'text' | 'json_object'; }; - const configuration: ClientOptions = {}; - const model = new ChatOpenAI({ azureOpenAIApiDeploymentName: modelName, azureOpenAIApiInstanceName: credentials.resourceName, @@ -160,8 +189,12 @@ export class LmChatAzureOpenAi implements INodeType { ...options, timeout: options.timeout ?? 60000, maxRetries: options.maxRetries ?? 2, - configuration, callbacks: [new N8nLlmTracing(this)], + modelKwargs: options.responseFormat + ? { + response_format: { type: options.responseFormat }, + } + : undefined, }); return {