mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-10 06:34:05 -08:00
feat(OpenAI Node): Update max token limit to support newer model limits (#6644)
This commit is contained in:
parent
ad581566ae
commit
26046f6fe8
|
@ -252,7 +252,7 @@ const sharedOperations: INodeProperties[] = [
|
|||
name: 'maxTokens',
|
||||
default: 16,
|
||||
description:
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).',
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
|
@ -260,7 +260,7 @@ const sharedOperations: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
typeOptions: {
|
||||
maxValue: 4096,
|
||||
maxValue: 32768,
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
|
|
|
@ -403,7 +403,7 @@ const sharedOperations: INodeProperties[] = [
|
|||
name: 'maxTokens',
|
||||
default: 16,
|
||||
description:
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).',
|
||||
'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 32,768).',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
|
@ -411,7 +411,7 @@ const sharedOperations: INodeProperties[] = [
|
|||
},
|
||||
},
|
||||
typeOptions: {
|
||||
maxValue: 4096,
|
||||
maxValue: 32768,
|
||||
},
|
||||
routing: {
|
||||
send: {
|
||||
|
|
Loading…
Reference in a new issue