fix: Continue on fail / error output support for chains and agents (#9078)

This commit is contained in:
Michael Kret 2024-04-09 15:06:12 +03:00 committed by GitHub
parent 8c2622549b
commit f62800cd72
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 469 additions and 388 deletions

View file

@ -79,36 +79,45 @@ export async function conversationalAgentExecute(
const items = this.getInputData(); const items = this.getInputData();
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
let input; try {
let input;
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string; input = this.getNodeParameter('text', itemIndex) as string;
} else { } else {
input = getPromptInputByType({ input = getPromptInputByType({
ctx: this, ctx: this,
i: itemIndex, i: itemIndex,
inputKey: 'text', inputKey: 'text',
promptTypeKey: 'promptType', promptTypeKey: 'promptType',
}); });
}
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
} }
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} }
return await this.prepareOutputData(returnData); return await this.prepareOutputData(returnData);

View file

@ -85,35 +85,44 @@ export async function openAiFunctionsAgentExecute(
const items = this.getInputData(); const items = this.getInputData();
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
let input; try {
if (this.getNode().typeVersion <= 1.2) { let input;
input = this.getNodeParameter('text', itemIndex) as string; if (this.getNode().typeVersion <= 1.2) {
} else { input = this.getNodeParameter('text', itemIndex) as string;
input = getPromptInputByType({ } else {
ctx: this, input = getPromptInputByType({
i: itemIndex, ctx: this,
inputKey: 'text', i: itemIndex,
promptTypeKey: 'promptType', inputKey: 'text',
}); promptTypeKey: 'promptType',
});
}
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
} }
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} }
return await this.prepareOutputData(returnData); return await this.prepareOutputData(returnData);

View file

@ -60,35 +60,44 @@ export async function planAndExecuteAgentExecute(
const items = this.getInputData(); const items = this.getInputData();
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
let input; try {
if (this.getNode().typeVersion <= 1.2) { let input;
input = this.getNodeParameter('text', itemIndex) as string; if (this.getNode().typeVersion <= 1.2) {
} else { input = this.getNodeParameter('text', itemIndex) as string;
input = getPromptInputByType({ } else {
ctx: this, input = getPromptInputByType({
i: itemIndex, ctx: this,
inputKey: 'text', i: itemIndex,
promptTypeKey: 'promptType', inputKey: 'text',
}); promptTypeKey: 'promptType',
});
}
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
} }
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} }
return await this.prepareOutputData(returnData); return await this.prepareOutputData(returnData);

View file

@ -80,36 +80,45 @@ export async function reActAgentAgentExecute(
const items = this.getInputData(); const items = this.getInputData();
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
let input; try {
let input;
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
input = this.getNodeParameter('text', itemIndex) as string; input = this.getNodeParameter('text', itemIndex) as string;
} else { } else {
input = getPromptInputByType({ input = getPromptInputByType({
ctx: this, ctx: this,
i: itemIndex, i: itemIndex,
inputKey: 'text', inputKey: 'text',
promptTypeKey: 'promptType', promptTypeKey: 'promptType',
}); });
}
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
} }
if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
}
if (prompt) {
input = (await prompt.invoke({ input })).value;
}
let response = await agentExecutor
.withConfig(getTracingConfig(this))
.invoke({ input, outputParsers });
if (outputParser) {
response = { output: await outputParser.parse(response.output as string) };
}
returnData.push({ json: response });
} }
return await this.prepareOutputData(returnData); return await this.prepareOutputData(returnData);

View file

@ -41,106 +41,115 @@ export async function sqlAgentAgentExecute(
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
for (let i = 0; i < items.length; i++) { for (let i = 0; i < items.length; i++) {
const item = items[i]; try {
let input; const item = items[i];
if (this.getNode().typeVersion <= 1.2) { let input;
input = this.getNodeParameter('input', i) as string; if (this.getNode().typeVersion <= 1.2) {
} else { input = this.getNodeParameter('input', i) as string;
input = getPromptInputByType({ } else {
ctx: this, input = getPromptInputByType({
i, ctx: this,
inputKey: 'text', i,
promptTypeKey: 'promptType', inputKey: 'text',
}); promptTypeKey: 'promptType',
} });
}
if (input === undefined) { if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The prompt parameter is empty.'); throw new NodeOperationError(this.getNode(), 'The prompt parameter is empty.');
} }
const options = this.getNodeParameter('options', i, {}); const options = this.getNodeParameter('options', i, {});
const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as const selectedDataSource = this.getNodeParameter('dataSource', i, 'sqlite') as
| 'mysql' | 'mysql'
| 'postgres' | 'postgres'
| 'sqlite'; | 'sqlite';
const includedSampleRows = options.includedSampleRows as number; const includedSampleRows = options.includedSampleRows as number;
const includedTablesArray = parseTablesString((options.includedTables as string) ?? ''); const includedTablesArray = parseTablesString((options.includedTables as string) ?? '');
const ignoredTablesArray = parseTablesString((options.ignoredTables as string) ?? ''); const ignoredTablesArray = parseTablesString((options.ignoredTables as string) ?? '');
let dataSource: DataSource | null = null; let dataSource: DataSource | null = null;
if (selectedDataSource === 'sqlite') { if (selectedDataSource === 'sqlite') {
if (!item.binary) { if (!item.binary) {
throw new NodeOperationError(
this.getNode(),
'No binary data found, please connect a binary to the input if you want to use SQLite as data source',
);
}
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data');
dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName);
}
if (selectedDataSource === 'postgres') {
dataSource = await getPostgresDataSource.call(this);
}
if (selectedDataSource === 'mysql') {
dataSource = await getMysqlDataSource.call(this);
}
if (!dataSource) {
throw new NodeOperationError( throw new NodeOperationError(
this.getNode(), this.getNode(),
'No binary data found, please connect a binary to the input if you want to use SQLite as data source', 'No data source found, please configure data source',
); );
} }
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', i, 'data'); const agentOptions: SqlCreatePromptArgs = {
dataSource = await getSqliteDataSource.call(this, item.binary, binaryPropertyName); topK: (options.topK as number) ?? 10,
} prefix: (options.prefixPrompt as string) ?? SQL_PREFIX,
suffix: (options.suffixPrompt as string) ?? SQL_SUFFIX,
inputVariables: ['chatHistory', 'input', 'agent_scratchpad'],
};
if (selectedDataSource === 'postgres') { const dbInstance = await SqlDatabase.fromDataSourceParams({
dataSource = await getPostgresDataSource.call(this); appDataSource: dataSource,
} includesTables: includedTablesArray.length > 0 ? includedTablesArray : undefined,
ignoreTables: ignoredTablesArray.length > 0 ? ignoredTablesArray : undefined,
if (selectedDataSource === 'mysql') { sampleRowsInTableInfo: includedSampleRows ?? 3,
dataSource = await getMysqlDataSource.call(this);
}
if (!dataSource) {
throw new NodeOperationError(
this.getNode(),
'No data source found, please configure data source',
);
}
const agentOptions: SqlCreatePromptArgs = {
topK: (options.topK as number) ?? 10,
prefix: (options.prefixPrompt as string) ?? SQL_PREFIX,
suffix: (options.suffixPrompt as string) ?? SQL_SUFFIX,
inputVariables: ['chatHistory', 'input', 'agent_scratchpad'],
};
const dbInstance = await SqlDatabase.fromDataSourceParams({
appDataSource: dataSource,
includesTables: includedTablesArray.length > 0 ? includedTablesArray : undefined,
ignoreTables: ignoredTablesArray.length > 0 ? ignoredTablesArray : undefined,
sampleRowsInTableInfo: includedSampleRows ?? 3,
});
const toolkit = new SqlToolkit(dbInstance, model);
const agentExecutor = createSqlAgent(model, toolkit, agentOptions);
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory
| undefined;
agentExecutor.memory = memory;
let chatHistory = '';
if (memory) {
const messages = await memory.chatHistory.getMessages();
chatHistory = serializeChatHistory(messages);
}
let response: IDataObject;
try {
response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
input,
signal: this.getExecutionCancelSignal(),
chatHistory,
}); });
} catch (error) {
if ((error.message as IDataObject)?.output) {
response = error.message as IDataObject;
} else {
throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i });
}
}
returnData.push({ json: response }); const toolkit = new SqlToolkit(dbInstance, model);
const agentExecutor = createSqlAgent(model, toolkit, agentOptions);
const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BaseChatMemory
| undefined;
agentExecutor.memory = memory;
let chatHistory = '';
if (memory) {
const messages = await memory.chatHistory.getMessages();
chatHistory = serializeChatHistory(messages);
}
let response: IDataObject;
try {
response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
input,
signal: this.getExecutionCancelSignal(),
chatHistory,
});
} catch (error) {
if ((error.message as IDataObject)?.output) {
response = error.message as IDataObject;
} else {
throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i });
}
}
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: i } });
continue;
}
throw error;
}
} }
return await this.prepareOutputData(returnData); return await this.prepareOutputData(returnData);

View file

@ -320,67 +320,76 @@ export class OpenAiAssistant implements INodeType {
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const input = this.getNodeParameter('text', itemIndex) as string; try {
const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string; const input = this.getNodeParameter('text', itemIndex) as string;
const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array< const assistantId = this.getNodeParameter('assistantId', itemIndex, '') as string;
'code_interpreter' | 'retrieval' const nativeTools = this.getNodeParameter('nativeTools', itemIndex, []) as Array<
>; 'code_interpreter' | 'retrieval'
>;
const options = this.getNodeParameter('options', itemIndex, {}) as { const options = this.getNodeParameter('options', itemIndex, {}) as {
baseURL?: string; baseURL?: string;
maxRetries: number; maxRetries: number;
timeout: number; timeout: number;
}; };
if (input === undefined) { if (input === undefined) {
throw new NodeOperationError(this.getNode(), 'The text parameter is empty.'); throw new NodeOperationError(this.getNode(), 'The text parameter is empty.');
} }
const client = new OpenAIClient({ const client = new OpenAIClient({
apiKey: credentials.apiKey as string, apiKey: credentials.apiKey as string,
maxRetries: options.maxRetries ?? 2, maxRetries: options.maxRetries ?? 2,
timeout: options.timeout ?? 10000, timeout: options.timeout ?? 10000,
baseURL: options.baseURL, baseURL: options.baseURL,
});
let agent;
const nativeToolsParsed: OpenAIToolType = nativeTools.map((tool) => ({ type: tool }));
const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? [];
const newTools = [...transformedConnectedTools, ...nativeToolsParsed];
// Existing agent, update tools with currently assigned
if (assistantId) {
agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
await client.beta.assistants.update(assistantId, {
tools: newTools,
}); });
} else { let agent;
const name = this.getNodeParameter('name', itemIndex, '') as string; const nativeToolsParsed: OpenAIToolType = nativeTools.map((tool) => ({ type: tool }));
const instructions = this.getNodeParameter('instructions', itemIndex, '') as string; const transformedConnectedTools = tools?.map(formatToOpenAIAssistantTool) ?? [];
const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string; const newTools = [...transformedConnectedTools, ...nativeToolsParsed];
agent = await OpenAIAssistantRunnable.createAssistant({ // Existing agent, update tools with currently assigned
model, if (assistantId) {
client, agent = new OpenAIAssistantRunnable({ assistantId, client, asAgent: true });
instructions,
name, await client.beta.assistants.update(assistantId, {
tools: newTools, tools: newTools,
asAgent: true, });
} else {
const name = this.getNodeParameter('name', itemIndex, '') as string;
const instructions = this.getNodeParameter('instructions', itemIndex, '') as string;
const model = this.getNodeParameter('model', itemIndex, 'gpt-3.5-turbo-1106') as string;
agent = await OpenAIAssistantRunnable.createAssistant({
model,
client,
instructions,
name,
tools: newTools,
asAgent: true,
});
}
const agentExecutor = AgentExecutor.fromAgentAndTools({
agent,
tools,
}); });
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
content: input,
signal: this.getExecutionCancelSignal(),
timeout: options.timeout ?? 10000,
});
returnData.push({ json: response });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
} }
const agentExecutor = AgentExecutor.fromAgentAndTools({
agent,
tools,
});
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({
content: input,
signal: this.getExecutionCancelSignal(),
timeout: options.timeout ?? 10000,
});
returnData.push({ json: response });
} }
return await this.prepareOutputData(returnData); return await this.prepareOutputData(returnData);

View file

@ -519,55 +519,64 @@ export class ChainLlm implements INodeType {
const outputParsers = await getOptionalOutputParsers(this); const outputParsers = await getOptionalOutputParsers(this);
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
let prompt: string; try {
if (this.getNode().typeVersion <= 1.3) { let prompt: string;
prompt = this.getNodeParameter('prompt', itemIndex) as string; if (this.getNode().typeVersion <= 1.3) {
} else { prompt = this.getNodeParameter('prompt', itemIndex) as string;
prompt = getPromptInputByType({
ctx: this,
i: itemIndex,
inputKey: 'text',
promptTypeKey: 'promptType',
});
}
const messages = this.getNodeParameter(
'messages.messageValues',
itemIndex,
[],
) as MessagesTemplate[];
if (prompt === undefined) {
throw new NodeOperationError(this.getNode(), 'The prompt parameter is empty.');
}
const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages);
responses.forEach((response) => {
let data: IDataObject;
if (typeof response === 'string') {
data = {
response: {
text: response.trim(),
},
};
} else if (Array.isArray(response)) {
data = {
data: response,
};
} else if (response instanceof Object) {
data = response as IDataObject;
} else { } else {
data = { prompt = getPromptInputByType({
response: { ctx: this,
text: response, i: itemIndex,
}, inputKey: 'text',
}; promptTypeKey: 'promptType',
});
}
const messages = this.getNodeParameter(
'messages.messageValues',
itemIndex,
[],
) as MessagesTemplate[];
if (prompt === undefined) {
throw new NodeOperationError(this.getNode(), "The 'prompt' parameter is empty.");
} }
returnData.push({ const responses = await getChain(this, itemIndex, prompt, llm, outputParsers, messages);
json: data,
responses.forEach((response) => {
let data: IDataObject;
if (typeof response === 'string') {
data = {
response: {
text: response.trim(),
},
};
} else if (Array.isArray(response)) {
data = {
data: response,
};
} else if (response instanceof Object) {
data = response as IDataObject;
} else {
data = {
response: {
text: response,
},
};
}
returnData.push({
json: data,
});
}); });
}); } catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
}
} }
return [returnData]; return [returnData];

View file

@ -160,25 +160,34 @@ export class ChainRetrievalQa implements INodeType {
// Run for each item // Run for each item
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
let query; try {
let query;
if (this.getNode().typeVersion <= 1.2) { if (this.getNode().typeVersion <= 1.2) {
query = this.getNodeParameter('query', itemIndex) as string; query = this.getNodeParameter('query', itemIndex) as string;
} else { } else {
query = getPromptInputByType({ query = getPromptInputByType({
ctx: this, ctx: this,
i: itemIndex, i: itemIndex,
inputKey: 'text', inputKey: 'text',
promptTypeKey: 'promptType', promptTypeKey: 'promptType',
}); });
}
if (query === undefined) {
throw new NodeOperationError(this.getNode(), 'The query parameter is empty.');
}
const response = await chain.withConfig(getTracingConfig(this)).invoke({ query });
returnData.push({ json: { response } });
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
}
throw error;
} }
if (query === undefined) {
throw new NodeOperationError(this.getNode(), 'The query parameter is empty.');
}
const response = await chain.withConfig(getTracingConfig(this)).invoke({ query });
returnData.push({ json: { response } });
} }
return await this.prepareOutputData(returnData); return await this.prepareOutputData(returnData);
} }

View file

@ -329,90 +329,99 @@ export class ChainSummarizationV2 implements INodeType {
const returnData: INodeExecutionData[] = []; const returnData: INodeExecutionData[] = [];
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) { for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
const summarizationMethodAndPrompts = this.getNodeParameter( try {
'options.summarizationMethodAndPrompts.values', const summarizationMethodAndPrompts = this.getNodeParameter(
itemIndex, 'options.summarizationMethodAndPrompts.values',
{}, itemIndex,
) as { {},
prompt?: string; ) as {
refineQuestionPrompt?: string; prompt?: string;
refinePrompt?: string; refineQuestionPrompt?: string;
summarizationMethod: 'map_reduce' | 'stuff' | 'refine'; refinePrompt?: string;
combineMapPrompt?: string; summarizationMethod: 'map_reduce' | 'stuff' | 'refine';
}; combineMapPrompt?: string;
};
const chainArgs = getChainPromptsArgs( const chainArgs = getChainPromptsArgs(
summarizationMethodAndPrompts.summarizationMethod ?? 'map_reduce', summarizationMethodAndPrompts.summarizationMethod ?? 'map_reduce',
summarizationMethodAndPrompts, summarizationMethodAndPrompts,
); );
const chain = loadSummarizationChain(model, chainArgs); const chain = loadSummarizationChain(model, chainArgs);
const item = items[itemIndex]; const item = items[itemIndex];
let processedDocuments: Document[]; let processedDocuments: Document[];
// Use dedicated document loader input to load documents // Use dedicated document loader input to load documents
if (operationMode === 'documentLoader') { if (operationMode === 'documentLoader') {
const documentInput = (await this.getInputConnectionData( const documentInput = (await this.getInputConnectionData(
NodeConnectionType.AiDocument, NodeConnectionType.AiDocument,
0, 0,
)) as N8nJsonLoader | Array<Document<Record<string, unknown>>>; )) as N8nJsonLoader | Array<Document<Record<string, unknown>>>;
const isN8nLoader = const isN8nLoader =
documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader; documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader;
processedDocuments = isN8nLoader processedDocuments = isN8nLoader
? await documentInput.processItem(item, itemIndex) ? await documentInput.processItem(item, itemIndex)
: documentInput; : documentInput;
const response = await chain.withConfig(getTracingConfig(this)).invoke({ const response = await chain.withConfig(getTracingConfig(this)).invoke({
input_documents: processedDocuments, input_documents: processedDocuments,
}); });
returnData.push({ json: { response } }); returnData.push({ json: { response } });
}
// Take the input and use binary or json loader
if (['nodeInputJson', 'nodeInputBinary'].includes(operationMode)) {
let textSplitter: TextSplitter | undefined;
switch (chunkingMode) {
// In simple mode we use recursive character splitter with default settings
case 'simple':
const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number;
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number;
textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize });
break;
// In advanced mode user can connect text splitter node so we just retrieve it
case 'advanced':
textSplitter = (await this.getInputConnectionData(
NodeConnectionType.AiTextSplitter,
0,
)) as TextSplitter | undefined;
break;
default:
break;
} }
let processor: N8nJsonLoader | N8nBinaryLoader; // Take the input and use binary or json loader
if (operationMode === 'nodeInputBinary') { if (['nodeInputJson', 'nodeInputBinary'].includes(operationMode)) {
const binaryDataKey = this.getNodeParameter( let textSplitter: TextSplitter | undefined;
'options.binaryDataKey',
itemIndex, switch (chunkingMode) {
'data', // In simple mode we use recursive character splitter with default settings
) as string; case 'simple':
processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter); const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number;
} else { const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number;
processor = new N8nJsonLoader(this, 'options.', textSplitter);
textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize });
break;
// In advanced mode user can connect text splitter node so we just retrieve it
case 'advanced':
textSplitter = (await this.getInputConnectionData(
NodeConnectionType.AiTextSplitter,
0,
)) as TextSplitter | undefined;
break;
default:
break;
}
let processor: N8nJsonLoader | N8nBinaryLoader;
if (operationMode === 'nodeInputBinary') {
const binaryDataKey = this.getNodeParameter(
'options.binaryDataKey',
itemIndex,
'data',
) as string;
processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter);
} else {
processor = new N8nJsonLoader(this, 'options.', textSplitter);
}
const processedItem = await processor.processItem(item, itemIndex);
const response = await chain.call({
input_documents: processedItem,
});
returnData.push({ json: { response } });
}
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ json: { error: error.message }, pairedItem: { item: itemIndex } });
continue;
} }
const processedItem = await processor.processItem(item, itemIndex); throw error;
const response = await chain.call({
input_documents: processedItem,
});
returnData.push({ json: { response } });
} }
} }