diff --git a/packages/nodes-base/nodes/OpenAi/ChatDescription.ts b/packages/nodes-base/nodes/OpenAi/ChatDescription.ts index 9812d5dffb..1548d89291 100644 --- a/packages/nodes-base/nodes/OpenAi/ChatDescription.ts +++ b/packages/nodes-base/nodes/OpenAi/ChatDescription.ts @@ -68,6 +68,7 @@ const completeOperations: INodeProperties[] = [ sortable: true, multipleValues: true, }, + description: 'The prompt can be a list of messages or a single message, depending on what you want to achieve and the model you are using. More info.', displayOptions: { show: { resource: ['chat'], @@ -89,14 +90,17 @@ const completeOperations: INodeProperties[] = [ { name: 'Assistant', value: 'assistant', + description: 'Store prior responses', }, { name: 'System', value: 'system', + description: 'Set the behavior of the assistant', }, { name: 'User', value: 'user', + description: 'Instruct the assistant', }, ], default: 'user', @@ -218,7 +222,7 @@ const sharedOperations: INodeProperties[] = [ { displayName: 'Maximum Number of Tokens', name: 'maxTokens', - default: 16, + default: 128, description: 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).', type: 'number', diff --git a/packages/nodes-base/nodes/OpenAi/TextDescription.ts b/packages/nodes-base/nodes/OpenAi/TextDescription.ts index e79acfe142..ede6e4ee0e 100644 --- a/packages/nodes-base/nodes/OpenAi/TextDescription.ts +++ b/packages/nodes-base/nodes/OpenAi/TextDescription.ts @@ -59,7 +59,7 @@ const completeOperations: INodeProperties[] = [ name: 'model', type: 'options', description: - 'The model which will generate the completion. Learn more.', + 'The model which will generate the completion. More info.', displayOptions: { show: { operation: ['complete'], @@ -145,7 +145,7 @@ const editOperations: INodeProperties[] = [ name: 'model', type: 'options', description: - 'The model which will generate the edited version. Learn more.', + 'The model which will generate the edited version. More info.', displayOptions: { show: { operation: ['edit'], @@ -218,7 +218,7 @@ const moderateOperations: INodeProperties[] = [ name: 'model', type: 'options', description: - 'The model which will classify the text. Learn more.', + 'The model which will classify the text. More info.', displayOptions: { show: { resource: ['text'], @@ -397,7 +397,7 @@ const sharedOperations: INodeProperties[] = [ { displayName: 'Maximum Number of Tokens', name: 'maxTokens', - default: 16, + default: 128, description: 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).', type: 'number',