feat(OpenAI Node): Update max token limit to support newer model limits (#6644)

This commit is contained in:
MC Naveen 2023-07-12 17:08:54 +05:30 committed by GitHub
parent ad581566ae
commit 26046f6fe8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 4 deletions

View file

@ -252,7 +252,7 @@ const sharedOperations: INodeProperties[] = [
name: 'maxTokens',
default: 16,
description:
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).',
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
type: 'number',
displayOptions: {
show: {
@ -260,7 +260,7 @@ const sharedOperations: INodeProperties[] = [
},
},
typeOptions: {
maxValue: 4096,
maxValue: 32768,
},
routing: {
send: {

View file

@ -403,7 +403,7 @@ const sharedOperations: INodeProperties[] = [
name: 'maxTokens',
default: 16,
description:
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).',
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
type: 'number',
displayOptions: {
show: {
@ -411,7 +411,7 @@ const sharedOperations: INodeProperties[] = [
},
},
typeOptions: {
maxValue: 4096,
maxValue: 32768,
},
routing: {
send: {