fix(Azure OpenAI Chat Model Node): Add response format option (#10851)

This commit is contained in:
Eugene 2024-09-17 16:27:21 +02:00 committed by GitHub
parent d65ade4e92
commit 0b5299a248
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -7,7 +7,6 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { ClientOptions } from '@langchain/openai';
import { ChatOpenAI } from '@langchain/openai'; import { ChatOpenAI } from '@langchain/openai';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { N8nLlmTracing } from '../N8nLlmTracing'; import { N8nLlmTracing } from '../N8nLlmTracing';
@ -51,6 +50,18 @@ export class LmChatAzureOpenAi implements INodeType {
], ],
properties: [ properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]), getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiAgent]),
{
displayName:
'If using JSON response format, you must include word "json" in the prompt in your chain or agent. Also, make sure to select latest models released post November 2023.',
name: 'notice',
type: 'notice',
default: '',
displayOptions: {
show: {
'/options.responseFormat': ['json_object'],
},
},
},
{ {
displayName: 'Model (Deployment) Name', displayName: 'Model (Deployment) Name',
name: 'model', name: 'model',
@ -86,6 +97,25 @@ export class LmChatAzureOpenAi implements INodeType {
maxValue: 32768, maxValue: 32768,
}, },
}, },
{
displayName: 'Response Format',
name: 'responseFormat',
default: 'text',
type: 'options',
options: [
{
name: 'Text',
value: 'text',
description: 'Regular text response',
},
{
name: 'JSON',
value: 'json_object',
description:
'Enables JSON mode, which should guarantee the message the model generates is valid JSON',
},
],
},
{ {
displayName: 'Presence Penalty', displayName: 'Presence Penalty',
name: 'presencePenalty', name: 'presencePenalty',
@ -148,10 +178,9 @@ export class LmChatAzureOpenAi implements INodeType {
presencePenalty?: number; presencePenalty?: number;
temperature?: number; temperature?: number;
topP?: number; topP?: number;
responseFormat?: 'text' | 'json_object';
}; };
const configuration: ClientOptions = {};
const model = new ChatOpenAI({ const model = new ChatOpenAI({
azureOpenAIApiDeploymentName: modelName, azureOpenAIApiDeploymentName: modelName,
azureOpenAIApiInstanceName: credentials.resourceName, azureOpenAIApiInstanceName: credentials.resourceName,
@ -160,8 +189,12 @@ export class LmChatAzureOpenAi implements INodeType {
...options, ...options,
timeout: options.timeout ?? 60000, timeout: options.timeout ?? 60000,
maxRetries: options.maxRetries ?? 2, maxRetries: options.maxRetries ?? 2,
configuration,
callbacks: [new N8nLlmTracing(this)], callbacks: [new N8nLlmTracing(this)],
modelKwargs: options.responseFormat
? {
response_format: { type: options.responseFormat },
}
: undefined,
}); });
return { return {