2023-11-29 03:13:55 -08:00
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
2024-11-05 02:50:10 -08:00
import { ChatAnthropic } from '@langchain/anthropic' ;
import type { LLMResult } from '@langchain/core/outputs' ;
2023-11-29 03:13:55 -08:00
import {
NodeConnectionType ,
2024-06-21 04:55:37 -07:00
type INodePropertyOptions ,
2024-03-07 02:36:36 -08:00
type INodeProperties ,
2024-10-28 03:37:23 -07:00
type ISupplyDataFunctions ,
2023-11-29 03:13:55 -08:00
type INodeType ,
type INodeTypeDescription ,
type SupplyData ,
} from 'n8n-workflow' ;
2024-12-16 04:46:19 -08:00
import { getConnectionHintNoticeField } from '@utils/sharedFields' ;
2025-02-27 06:40:58 -08:00
import { searchModels } from './methods/searchModels' ;
2024-11-08 01:17:11 -08:00
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler' ;
2024-12-16 04:46:19 -08:00
import { N8nLlmTracing } from '../N8nLlmTracing' ;
2023-11-29 03:13:55 -08:00
2024-03-07 02:36:36 -08:00
const modelField : INodeProperties = {
displayName : 'Model' ,
name : 'model' ,
type : 'options' ,
// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
options : [
2024-10-31 02:10:49 -07:00
{
name : 'Claude 3.5 Sonnet(20241022)' ,
value : 'claude-3-5-sonnet-20241022' ,
} ,
2024-03-07 02:36:36 -08:00
{
name : 'Claude 3 Opus(20240229)' ,
value : 'claude-3-opus-20240229' ,
} ,
2024-06-21 04:55:37 -07:00
{
name : 'Claude 3.5 Sonnet(20240620)' ,
value : 'claude-3-5-sonnet-20240620' ,
} ,
2024-03-07 02:36:36 -08:00
{
name : 'Claude 3 Sonnet(20240229)' ,
value : 'claude-3-sonnet-20240229' ,
} ,
2024-11-05 02:50:10 -08:00
{
name : 'Claude 3.5 Haiku(20241022)' ,
value : 'claude-3-5-haiku-20241022' ,
} ,
2024-03-22 04:03:54 -07:00
{
name : 'Claude 3 Haiku(20240307)' ,
value : 'claude-3-haiku-20240307' ,
} ,
2024-03-07 02:36:36 -08:00
{
name : 'LEGACY: Claude 2' ,
value : 'claude-2' ,
} ,
{
name : 'LEGACY: Claude 2.1' ,
value : 'claude-2.1' ,
} ,
{
name : 'LEGACY: Claude Instant 1.2' ,
value : 'claude-instant-1.2' ,
} ,
{
name : 'LEGACY: Claude Instant 1' ,
value : 'claude-instant-1' ,
} ,
] ,
description :
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/docs/models-overview">Learn more</a>.' ,
default : 'claude-2' ,
} ;
2025-02-27 06:40:58 -08:00
const MIN_THINKING_BUDGET = 1024 ;
const DEFAULT_MAX_TOKENS = 4096 ;
2023-11-29 03:13:55 -08:00
export class LmChatAnthropic implements INodeType {
2025-02-27 06:40:58 -08:00
methods = {
listSearch : {
searchModels ,
} ,
} ;
2023-11-29 03:13:55 -08:00
description : INodeTypeDescription = {
displayName : 'Anthropic Chat Model' ,
// eslint-disable-next-line n8n-nodes-base/node-class-description-name-miscased
name : 'lmChatAnthropic' ,
icon : 'file:anthropic.svg' ,
group : [ 'transform' ] ,
2025-02-27 06:40:58 -08:00
version : [ 1 , 1.1 , 1.2 , 1.3 ] ,
defaultVersion : 1.3 ,
2023-11-29 03:13:55 -08:00
description : 'Language Model Anthropic' ,
defaults : {
name : 'Anthropic Chat Model' ,
} ,
codex : {
categories : [ 'AI' ] ,
subcategories : {
2024-08-05 04:59:02 -07:00
AI : [ 'Language Models' , 'Root Nodes' ] ,
2024-07-23 07:40:28 -07:00
'Language Models' : [ 'Chat Models (Recommended)' ] ,
2023-11-29 03:13:55 -08:00
} ,
resources : {
primaryDocumentation : [
{
url : 'https://docs.n8n.io/integrations/builtin/cluster-nodes/sub-nodes/n8n-nodes-langchain.lmchatanthropic/' ,
} ,
] ,
} ,
2024-03-07 02:36:36 -08:00
alias : [ 'claude' , 'sonnet' , 'opus' ] ,
2023-11-29 03:13:55 -08:00
} ,
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs : [ ] ,
// eslint-disable-next-line n8n-nodes-base/node-class-description-outputs-wrong
outputs : [ NodeConnectionType . AiLanguageModel ] ,
outputNames : [ 'Model' ] ,
credentials : [
{
name : 'anthropicApi' ,
required : true ,
} ,
] ,
properties : [
getConnectionHintNoticeField ( [ NodeConnectionType . AiChain , NodeConnectionType . AiChain ] ) ,
{
2024-03-07 02:36:36 -08:00
. . . modelField ,
displayOptions : {
show : {
'@version' : [ 1 ] ,
2023-11-29 03:13:55 -08:00
} ,
2024-03-07 02:36:36 -08:00
} ,
} ,
{
. . . modelField ,
default : 'claude-3-sonnet-20240229' ,
displayOptions : {
2024-06-21 04:55:37 -07:00
show : {
'@version' : [ 1.1 ] ,
} ,
} ,
} ,
{
. . . modelField ,
default : 'claude-3-5-sonnet-20240620' ,
options : ( modelField . options ? ? [ ] ) . filter (
( o ) : o is INodePropertyOptions = > 'name' in o && ! o . name . toString ( ) . startsWith ( 'LEGACY' ) ,
) ,
displayOptions : {
show : {
2025-02-27 06:40:58 -08:00
'@version' : [ { _cnd : { lte : 1.2 } } ] ,
} ,
} ,
} ,
{
displayName : 'Model' ,
name : 'model' ,
type : 'resourceLocator' ,
default : {
mode : 'list' ,
value : 'claude-3-7-sonnet-20250219' ,
cachedResultName : 'Claude 3.7 Sonnet' ,
} ,
required : true ,
modes : [
{
displayName : 'From List' ,
name : 'list' ,
type : 'list' ,
placeholder : 'Select a model...' ,
typeOptions : {
searchListMethod : 'searchModels' ,
searchable : true ,
} ,
} ,
{
displayName : 'ID' ,
name : 'id' ,
type : 'string' ,
placeholder : 'Claude Sonnet' ,
} ,
] ,
description :
'The model. Choose from the list, or specify an ID. <a href="https://docs.anthropic.com/claude/docs/models-overview">Learn more</a>.' ,
displayOptions : {
show : {
'@version' : [ { _cnd : { gte : 1.3 } } ] ,
2023-11-29 03:13:55 -08:00
} ,
2024-03-07 02:36:36 -08:00
} ,
2023-11-29 03:13:55 -08:00
} ,
{
displayName : 'Options' ,
name : 'options' ,
placeholder : 'Add Option' ,
description : 'Additional options to add' ,
type : 'collection' ,
default : { } ,
options : [
{
displayName : 'Maximum Number of Tokens' ,
name : 'maxTokensToSample' ,
2025-02-27 06:40:58 -08:00
default : DEFAULT_MAX_TOKENS ,
2023-11-29 03:13:55 -08:00
description : 'The maximum number of tokens to generate in the completion' ,
type : 'number' ,
} ,
{
displayName : 'Sampling Temperature' ,
name : 'temperature' ,
default : 0.7 ,
typeOptions : { maxValue : 1 , minValue : 0 , numberPrecision : 1 } ,
description :
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.' ,
type : 'number' ,
2025-02-27 06:40:58 -08:00
displayOptions : {
hide : {
thinking : [ true ] ,
} ,
} ,
2023-11-29 03:13:55 -08:00
} ,
{
displayName : 'Top K' ,
name : 'topK' ,
default : - 1 ,
typeOptions : { maxValue : 1 , minValue : - 1 , numberPrecision : 1 } ,
description :
'Used to remove "long tail" low probability responses. Defaults to -1, which disables it.' ,
type : 'number' ,
2025-02-27 06:40:58 -08:00
displayOptions : {
hide : {
thinking : [ true ] ,
} ,
} ,
2023-11-29 03:13:55 -08:00
} ,
{
displayName : 'Top P' ,
name : 'topP' ,
default : 1 ,
typeOptions : { maxValue : 1 , minValue : 0 , numberPrecision : 1 } ,
description :
'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.' ,
type : 'number' ,
2025-02-27 06:40:58 -08:00
displayOptions : {
hide : {
thinking : [ true ] ,
} ,
} ,
} ,
{
displayName : 'Enable Thinking' ,
name : 'thinking' ,
type : 'boolean' ,
default : false ,
description : 'Whether to enable thinking mode for the model' ,
} ,
{
displayName : 'Thinking Budget (Tokens)' ,
name : 'thinkingBudget' ,
type : 'number' ,
default : MIN_THINKING_BUDGET ,
description : 'The maximum number of tokens to use for thinking' ,
displayOptions : {
show : {
thinking : [ true ] ,
} ,
} ,
2023-11-29 03:13:55 -08:00
} ,
] ,
} ,
] ,
} ;
2024-10-28 03:37:23 -07:00
async supplyData ( this : ISupplyDataFunctions , itemIndex : number ) : Promise < SupplyData > {
2023-11-29 03:13:55 -08:00
const credentials = await this . getCredentials ( 'anthropicApi' ) ;
2025-02-27 06:40:58 -08:00
const version = this . getNode ( ) . typeVersion ;
const modelName =
version >= 1.3
? ( this . getNodeParameter ( 'model.value' , itemIndex ) as string )
: ( this . getNodeParameter ( 'model' , itemIndex ) as string ) ;
2024-03-07 02:36:36 -08:00
const options = this . getNodeParameter ( 'options' , itemIndex , { } ) as {
maxTokensToSample? : number ;
temperature : number ;
2025-02-27 06:40:58 -08:00
topK? : number ;
topP? : number ;
thinking? : boolean ;
thinkingBudget? : number ;
2024-03-07 02:36:36 -08:00
} ;
2025-02-27 06:40:58 -08:00
let invocationKwargs = { } ;
2023-11-29 03:13:55 -08:00
2024-05-12 12:12:07 -07:00
const tokensUsageParser = ( llmOutput : LLMResult [ 'llmOutput' ] ) = > {
const usage = ( llmOutput ? . usage as { input_tokens : number ; output_tokens : number } ) ? ? {
input_tokens : 0 ,
output_tokens : 0 ,
} ;
return {
completionTokens : usage.output_tokens ,
promptTokens : usage.input_tokens ,
totalTokens : usage.input_tokens + usage . output_tokens ,
} ;
} ;
2025-02-27 06:40:58 -08:00
if ( options . thinking ) {
invocationKwargs = {
thinking : {
type : 'enabled' ,
// If thinking is enabled, we need to set a budget.
// We fallback to 1024 as that is the minimum
budget_tokens : options.thinkingBudget ? ? MIN_THINKING_BUDGET ,
} ,
// The default Langchain max_tokens is -1 (no limit) but Anthropic requires a number
// higher than budget_tokens
max_tokens : options.maxTokensToSample ? ? DEFAULT_MAX_TOKENS ,
// These need to be unset when thinking is enabled.
// Because the invocationKwargs will override the model options
// we can pass options to the model and then override them here
top_k : undefined ,
top_p : undefined ,
temperature : undefined ,
} ;
}
2023-11-29 03:13:55 -08:00
const model = new ChatAnthropic ( {
anthropicApiKey : credentials.apiKey as string ,
modelName ,
2024-03-07 02:36:36 -08:00
maxTokens : options.maxTokensToSample ,
temperature : options.temperature ,
topK : options.topK ,
topP : options.topP ,
2024-05-12 12:12:07 -07:00
callbacks : [ new N8nLlmTracing ( this , { tokensUsageParser } ) ] ,
2024-11-08 01:17:11 -08:00
onFailedAttempt : makeN8nLlmFailedAttemptHandler ( this ) ,
2025-02-27 06:40:58 -08:00
invocationKwargs ,
2023-11-29 03:13:55 -08:00
} ) ;
return {
2024-05-12 12:12:07 -07:00
response : model ,
2023-11-29 03:13:55 -08:00
} ;
}
}