2023-11-29 03:13:55 -08:00
import {
NodeConnectionType ,
type IExecuteFunctions ,
type INodeExecutionData ,
type INodeType ,
type INodeTypeDescription ,
NodeOperationError ,
} from 'n8n-workflow' ;
import { RetrievalQAChain } from 'langchain/chains' ;
2024-03-07 02:36:36 -08:00
import type { BaseLanguageModel } from '@langchain/core/language_models/base' ;
import type { BaseRetriever } from '@langchain/core/retrievers' ;
2024-08-12 16:13:46 -07:00
import {
ChatPromptTemplate ,
SystemMessagePromptTemplate ,
HumanMessagePromptTemplate ,
2024-08-13 11:16:42 -07:00
PromptTemplate ,
} from '@langchain/core/prompts' ;
2023-11-29 03:13:55 -08:00
import { getTemplateNoticeField } from '../../../utils/sharedFields' ;
2024-02-21 05:59:37 -08:00
import { getPromptInputByType } from '../../../utils/helpers' ;
2024-04-08 13:51:49 -07:00
import { getTracingConfig } from '../../../utils/tracing' ;
2023-11-29 03:13:55 -08:00
2024-08-13 11:16:42 -07:00
const QA_PROMPT_TEMPLATE =
"Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.\n\n{context}\n\nQuestion: {question}\nHelpful Answer:" ;
2024-08-12 16:13:46 -07:00
const CHAT_PROMPT_TEMPLATE = ` Use the following pieces of context to answer the users question.
If you don 't know the answer, just say that you don' t know , don ' t try to make up an answer .
-- -- -- -- -- -- -- --
{ context } ` ;
2024-08-09 17:18:25 -07:00
2023-11-29 03:13:55 -08:00
export class ChainRetrievalQa implements INodeType {
description : INodeTypeDescription = {
displayName : 'Question and Answer Chain' ,
name : 'chainRetrievalQa' ,
icon : 'fa:link' ,
group : [ 'transform' ] ,
2024-02-21 05:59:37 -08:00
version : [ 1 , 1.1 , 1.2 , 1.3 ] ,
2023-11-29 03:13:55 -08:00
description : 'Answer questions about retrieved documents' ,
defaults : {
name : 'Question and Answer Chain' ,
color : '#909298' ,
} ,
codex : {
alias : [ 'LangChain' ] ,
categories : [ 'AI' ] ,
subcategories : {
2024-05-30 07:53:33 -07:00
AI : [ 'Chains' , 'Root Nodes' ] ,
2023-11-29 03:13:55 -08:00
} ,
resources : {
primaryDocumentation : [
{
url : 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainretrievalqa/' ,
} ,
] ,
} ,
} ,
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs : [
NodeConnectionType . Main ,
{
displayName : 'Model' ,
maxConnections : 1 ,
type : NodeConnectionType . AiLanguageModel ,
required : true ,
} ,
{
displayName : 'Retriever' ,
maxConnections : 1 ,
type : NodeConnectionType . AiRetriever ,
required : true ,
} ,
] ,
outputs : [ NodeConnectionType . Main ] ,
credentials : [ ] ,
properties : [
getTemplateNoticeField ( 1960 ) ,
{
displayName : 'Query' ,
name : 'query' ,
type : 'string' ,
required : true ,
default : '={{ $json.input }}' ,
displayOptions : {
show : {
'@version' : [ 1 ] ,
} ,
} ,
} ,
{
displayName : 'Query' ,
name : 'query' ,
type : 'string' ,
required : true ,
default : '={{ $json.chat_input }}' ,
displayOptions : {
show : {
'@version' : [ 1.1 ] ,
} ,
} ,
} ,
2024-01-09 03:11:39 -08:00
{
displayName : 'Query' ,
name : 'query' ,
type : 'string' ,
required : true ,
default : '={{ $json.chatInput }}' ,
displayOptions : {
show : {
'@version' : [ 1.2 ] ,
} ,
} ,
} ,
2024-02-21 05:59:37 -08:00
{
displayName : 'Prompt' ,
name : 'promptType' ,
type : 'options' ,
options : [
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name : 'Take from previous node automatically' ,
value : 'auto' ,
description : 'Looks for an input field called chatInput' ,
} ,
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name : 'Define below' ,
value : 'define' ,
description :
'Use an expression to reference data in previous nodes or enter static text' ,
} ,
] ,
displayOptions : {
hide : {
'@version' : [ { _cnd : { lte : 1.2 } } ] ,
} ,
} ,
default : 'auto' ,
} ,
{
displayName : 'Text' ,
name : 'text' ,
type : 'string' ,
required : true ,
default : '' ,
typeOptions : {
rows : 2 ,
} ,
displayOptions : {
show : {
promptType : [ 'define' ] ,
} ,
} ,
} ,
2024-08-09 17:18:25 -07:00
{
2024-08-12 16:13:46 -07:00
displayName : 'Custom Question and Answer Prompt' ,
name : 'customQAPrompt' ,
type : 'boolean' ,
default : false ,
2024-08-13 11:35:06 -07:00
description : 'Whether to enable customization of the Question and Answer prompt' ,
2024-08-12 16:13:46 -07:00
} ,
{
displayName : 'Question and Answer Prompt Type' ,
name : 'qAPromptType' ,
type : 'options' ,
default : 'standardPrompt' ,
description : 'Select the type of prompt for customization' ,
2024-08-09 17:18:25 -07:00
options : [
{
2024-08-12 16:13:46 -07:00
name : 'Standard Prompt' ,
value : 'standardPrompt' ,
description : 'Uses a standard prompt template (for non-Chat Models)' ,
} ,
{
2024-08-13 11:16:42 -07:00
name : 'Chat Prompt' ,
2024-08-12 16:13:46 -07:00
value : 'chatPrompt' ,
2024-08-13 11:16:42 -07:00
description : 'Uses a system message template (for Chat Models)' ,
2024-08-09 17:18:25 -07:00
} ,
] ,
2024-08-12 16:13:46 -07:00
displayOptions : {
show : {
customQAPrompt : [ true ] ,
} ,
} ,
} ,
{
displayName : 'Standard Prompt Template' ,
name : 'standardPromptTemplate' ,
type : 'string' ,
default : QA_PROMPT_TEMPLATE ,
2024-09-16 06:44:25 -07:00
description :
"Template string for the Question and Answer prompt (for non-Chat Models). This prompt expects the variables `{context}` (for the provided context) and `{question}` (for the user's question)." ,
2024-08-12 16:13:46 -07:00
typeOptions : {
rows : 8 ,
} ,
displayOptions : {
show : {
qAPromptType : [ 'standardPrompt' ] ,
} ,
} ,
} ,
{
displayName : 'Chat Prompt Template' ,
name : 'chatPromptTemplate' ,
type : 'string' ,
default : CHAT_PROMPT_TEMPLATE ,
2024-08-13 11:16:42 -07:00
description :
2024-09-16 06:44:25 -07:00
'Template string for the Question and Answer prompt as a system message (for Chat Models). This prompt expects the variable `{context}` (for the provided context).' ,
2024-08-12 16:13:46 -07:00
typeOptions : {
rows : 8 ,
} ,
displayOptions : {
show : {
qAPromptType : [ 'chatPrompt' ] ,
} ,
} ,
2024-08-09 17:18:25 -07:00
} ,
2023-11-29 03:13:55 -08:00
] ,
} ;
async execute ( this : IExecuteFunctions ) : Promise < INodeExecutionData [ ] [ ] > {
this . logger . verbose ( 'Executing Retrieval QA Chain' ) ;
const model = ( await this . getInputConnectionData (
NodeConnectionType . AiLanguageModel ,
0 ,
) ) as BaseLanguageModel ;
const retriever = ( await this . getInputConnectionData (
NodeConnectionType . AiRetriever ,
0 ,
) ) as BaseRetriever ;
const items = this . getInputData ( ) ;
2024-08-09 17:18:25 -07:00
2023-11-29 03:13:55 -08:00
const returnData : INodeExecutionData [ ] = [ ] ;
// Run for each item
for ( let itemIndex = 0 ; itemIndex < items . length ; itemIndex ++ ) {
2024-04-09 05:06:12 -07:00
try {
let query ;
2024-02-21 05:59:37 -08:00
2024-04-09 05:06:12 -07:00
if ( this . getNode ( ) . typeVersion <= 1.2 ) {
query = this . getNodeParameter ( 'query' , itemIndex ) as string ;
} else {
query = getPromptInputByType ( {
ctx : this ,
i : itemIndex ,
inputKey : 'text' ,
promptTypeKey : 'promptType' ,
} ) ;
}
2023-11-29 03:13:55 -08:00
2024-04-09 05:06:12 -07:00
if ( query === undefined ) {
throw new NodeOperationError ( this . getNode ( ) , 'The ‘ query‘ parameter is empty.' ) ;
}
2023-11-29 03:13:55 -08:00
2024-09-13 11:16:08 -07:00
const customQAPrompt = this . getNodeParameter ( 'customQAPrompt' , itemIndex , false ) as boolean ;
const chainParameters = { } as {
prompt? : PromptTemplate | ChatPromptTemplate ;
} ;
if ( customQAPrompt ) {
const qAPromptType = this . getNodeParameter ( 'qAPromptType' , itemIndex ) as string ;
if ( qAPromptType === 'standardPrompt' ) {
const standardPromptTemplateParameter = this . getNodeParameter (
'standardPromptTemplate' ,
itemIndex ,
) as string ;
const standardPromptTemplate = new PromptTemplate ( {
template : standardPromptTemplateParameter ,
inputVariables : [ 'context' , 'question' ] ,
} ) ;
chainParameters . prompt = standardPromptTemplate ;
} else if ( qAPromptType === 'chatPrompt' ) {
const chatPromptTemplateParameter = this . getNodeParameter (
'chatPromptTemplate' ,
itemIndex ,
) as string ;
const messages = [
SystemMessagePromptTemplate . fromTemplate ( chatPromptTemplateParameter ) ,
HumanMessagePromptTemplate . fromTemplate ( '{question}' ) ,
] ;
const chatPromptTemplate = ChatPromptTemplate . fromMessages ( messages ) ;
chainParameters . prompt = chatPromptTemplate ;
}
}
const chain = RetrievalQAChain . fromLLM ( model , retriever , chainParameters ) ;
2024-04-09 05:06:12 -07:00
const response = await chain . withConfig ( getTracingConfig ( this ) ) . invoke ( { query } ) ;
returnData . push ( { json : { response } } ) ;
} catch ( error ) {
2024-06-19 22:45:00 -07:00
if ( this . continueOnFail ( error ) ) {
2024-04-09 05:06:12 -07:00
returnData . push ( { json : { error : error.message } , pairedItem : { item : itemIndex } } ) ;
continue ;
}
throw error ;
}
2023-11-29 03:13:55 -08:00
}
2024-05-22 08:40:52 -07:00
return [ returnData ] ;
2023-11-29 03:13:55 -08:00
}
}