feat(OpenAI Node): Update max token limit to support newer model limits (#6644)

This commit is contained in:
MC Naveen 2023-07-12 17:08:54 +05:30 committed by GitHub
parent ad581566ae
commit 26046f6fe8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 4 additions and 4 deletions

View file

@ -252,7 +252,7 @@ const sharedOperations: INodeProperties[] = [
name: 'maxTokens', name: 'maxTokens',
default: 16, default: 16,
description: description:
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).', 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
type: 'number', type: 'number',
displayOptions: { displayOptions: {
show: { show: {
@ -260,7 +260,7 @@ const sharedOperations: INodeProperties[] = [
}, },
}, },
typeOptions: { typeOptions: {
maxValue: 4096, maxValue: 32768,
}, },
routing: { routing: {
send: { send: {

View file

@ -403,7 +403,7 @@ const sharedOperations: INodeProperties[] = [
name: 'maxTokens', name: 'maxTokens',
default: 16, default: 16,
description: description:
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).', 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
type: 'number', type: 'number',
displayOptions: { displayOptions: {
show: { show: {
@ -411,7 +411,7 @@ const sharedOperations: INodeProperties[] = [
}, },
}, },
typeOptions: { typeOptions: {
maxValue: 4096, maxValue: 32768,
}, },
routing: { routing: {
send: { send: {