2023-11-29 03:13:55 -08:00
import {
NodeConnectionType ,
type IExecuteFunctions ,
type INodeExecutionData ,
type INodeType ,
type INodeTypeDescription ,
NodeOperationError ,
} from 'n8n-workflow' ;
import { RetrievalQAChain } from 'langchain/chains' ;
2024-03-07 02:36:36 -08:00
import type { BaseLanguageModel } from '@langchain/core/language_models/base' ;
import type { BaseRetriever } from '@langchain/core/retrievers' ;
2024-09-27 03:09:39 -07:00
import {
ChatPromptTemplate ,
SystemMessagePromptTemplate ,
HumanMessagePromptTemplate ,
PromptTemplate ,
} from '@langchain/core/prompts' ;
2023-11-29 03:13:55 -08:00
import { getTemplateNoticeField } from '../../../utils/sharedFields' ;
2024-09-27 03:09:39 -07:00
import { getPromptInputByType , isChatInstance } from '../../../utils/helpers' ;
2024-04-08 13:51:49 -07:00
import { getTracingConfig } from '../../../utils/tracing' ;
2023-11-29 03:13:55 -08:00
2024-09-27 03:09:39 -07:00
const SYSTEM_PROMPT_TEMPLATE = ` Use the following pieces of context to answer the users question.
If you don 't know the answer, just say that you don' t know , don ' t try to make up an answer .
-- -- -- -- -- -- -- --
{ context } ` ;
2023-11-29 03:13:55 -08:00
export class ChainRetrievalQa implements INodeType {
description : INodeTypeDescription = {
displayName : 'Question and Answer Chain' ,
name : 'chainRetrievalQa' ,
icon : 'fa:link' ,
group : [ 'transform' ] ,
2024-02-21 05:59:37 -08:00
version : [ 1 , 1.1 , 1.2 , 1.3 ] ,
2023-11-29 03:13:55 -08:00
description : 'Answer questions about retrieved documents' ,
defaults : {
name : 'Question and Answer Chain' ,
color : '#909298' ,
} ,
codex : {
alias : [ 'LangChain' ] ,
categories : [ 'AI' ] ,
subcategories : {
2024-05-30 07:53:33 -07:00
AI : [ 'Chains' , 'Root Nodes' ] ,
2023-11-29 03:13:55 -08:00
} ,
resources : {
primaryDocumentation : [
{
url : 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainretrievalqa/' ,
} ,
] ,
} ,
} ,
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs : [
NodeConnectionType . Main ,
{
displayName : 'Model' ,
maxConnections : 1 ,
type : NodeConnectionType . AiLanguageModel ,
required : true ,
} ,
{
displayName : 'Retriever' ,
maxConnections : 1 ,
type : NodeConnectionType . AiRetriever ,
required : true ,
} ,
] ,
outputs : [ NodeConnectionType . Main ] ,
credentials : [ ] ,
properties : [
getTemplateNoticeField ( 1960 ) ,
{
displayName : 'Query' ,
name : 'query' ,
type : 'string' ,
required : true ,
default : '={{ $json.input }}' ,
displayOptions : {
show : {
'@version' : [ 1 ] ,
} ,
} ,
} ,
{
displayName : 'Query' ,
name : 'query' ,
type : 'string' ,
required : true ,
default : '={{ $json.chat_input }}' ,
displayOptions : {
show : {
'@version' : [ 1.1 ] ,
} ,
} ,
} ,
2024-01-09 03:11:39 -08:00
{
displayName : 'Query' ,
name : 'query' ,
type : 'string' ,
required : true ,
default : '={{ $json.chatInput }}' ,
displayOptions : {
show : {
'@version' : [ 1.2 ] ,
} ,
} ,
} ,
2024-02-21 05:59:37 -08:00
{
displayName : 'Prompt' ,
name : 'promptType' ,
type : 'options' ,
options : [
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name : 'Take from previous node automatically' ,
value : 'auto' ,
description : 'Looks for an input field called chatInput' ,
} ,
{
// eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased
name : 'Define below' ,
value : 'define' ,
description :
'Use an expression to reference data in previous nodes or enter static text' ,
} ,
] ,
displayOptions : {
hide : {
'@version' : [ { _cnd : { lte : 1.2 } } ] ,
} ,
} ,
default : 'auto' ,
} ,
{
displayName : 'Text' ,
name : 'text' ,
type : 'string' ,
required : true ,
default : '' ,
typeOptions : {
rows : 2 ,
} ,
displayOptions : {
show : {
promptType : [ 'define' ] ,
} ,
} ,
} ,
2024-09-27 03:09:39 -07:00
{
displayName : 'Options' ,
name : 'options' ,
type : 'collection' ,
default : { } ,
placeholder : 'Add Option' ,
options : [
{
displayName : 'System Prompt Template' ,
name : 'systemPromptTemplate' ,
type : 'string' ,
default : SYSTEM_PROMPT_TEMPLATE ,
description :
'Template string used for the system prompt. This should include the variable `{context}` for the provided context. For text completion models, you should also include the variable `{question}` for the user’ s query.' ,
typeOptions : {
rows : 6 ,
} ,
} ,
] ,
} ,
2023-11-29 03:13:55 -08:00
] ,
} ;
async execute ( this : IExecuteFunctions ) : Promise < INodeExecutionData [ ] [ ] > {
2024-08-28 00:32:53 -07:00
this . logger . debug ( 'Executing Retrieval QA Chain' ) ;
2023-11-29 03:13:55 -08:00
const model = ( await this . getInputConnectionData (
NodeConnectionType . AiLanguageModel ,
0 ,
) ) as BaseLanguageModel ;
const retriever = ( await this . getInputConnectionData (
NodeConnectionType . AiRetriever ,
0 ,
) ) as BaseRetriever ;
const items = this . getInputData ( ) ;
const returnData : INodeExecutionData [ ] = [ ] ;
// Run for each item
for ( let itemIndex = 0 ; itemIndex < items . length ; itemIndex ++ ) {
2024-04-09 05:06:12 -07:00
try {
let query ;
2024-02-21 05:59:37 -08:00
2024-04-09 05:06:12 -07:00
if ( this . getNode ( ) . typeVersion <= 1.2 ) {
query = this . getNodeParameter ( 'query' , itemIndex ) as string ;
} else {
query = getPromptInputByType ( {
ctx : this ,
i : itemIndex ,
inputKey : 'text' ,
promptTypeKey : 'promptType' ,
} ) ;
}
2023-11-29 03:13:55 -08:00
2024-04-09 05:06:12 -07:00
if ( query === undefined ) {
throw new NodeOperationError ( this . getNode ( ) , 'The ‘ query‘ parameter is empty.' ) ;
}
2023-11-29 03:13:55 -08:00
2024-09-27 03:09:39 -07:00
const options = this . getNodeParameter ( 'options' , itemIndex , { } ) as {
systemPromptTemplate? : string ;
} ;
const chainParameters = { } as {
prompt? : PromptTemplate | ChatPromptTemplate ;
} ;
if ( options . systemPromptTemplate !== undefined ) {
if ( isChatInstance ( model ) ) {
const messages = [
SystemMessagePromptTemplate . fromTemplate ( options . systemPromptTemplate ) ,
HumanMessagePromptTemplate . fromTemplate ( '{question}' ) ,
] ;
const chatPromptTemplate = ChatPromptTemplate . fromMessages ( messages ) ;
chainParameters . prompt = chatPromptTemplate ;
} else {
const completionPromptTemplate = new PromptTemplate ( {
template : options.systemPromptTemplate ,
inputVariables : [ 'context' , 'question' ] ,
} ) ;
chainParameters . prompt = completionPromptTemplate ;
}
}
const chain = RetrievalQAChain . fromLLM ( model , retriever , chainParameters ) ;
2024-04-09 05:06:12 -07:00
const response = await chain . withConfig ( getTracingConfig ( this ) ) . invoke ( { query } ) ;
returnData . push ( { json : { response } } ) ;
} catch ( error ) {
2024-08-30 00:59:30 -07:00
if ( this . continueOnFail ( ) ) {
2024-04-09 05:06:12 -07:00
returnData . push ( { json : { error : error.message } , pairedItem : { item : itemIndex } } ) ;
continue ;
}
throw error ;
}
2023-11-29 03:13:55 -08:00
}
2024-05-22 08:40:52 -07:00
return [ returnData ] ;
2023-11-29 03:13:55 -08:00
}
}