refactor: Update Langchain to 0.1.41 & add support for Claude 3 (#8825)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
Co-authored-by: Michael Kret <michael.k@radency.com>
This commit is contained in:
oleg 2024-03-07 11:36:36 +01:00 committed by GitHub
parent c6f6254c0e
commit 0f7ae3f50a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
76 changed files with 908 additions and 451 deletions

View file

@ -92,7 +92,7 @@
"xml2js": "^0.5.0", "xml2js": "^0.5.0",
"cpy@8>globby": "^11.1.0", "cpy@8>globby": "^11.1.0",
"qqjs>globby": "^11.1.0", "qqjs>globby": "^11.1.0",
"@langchain/core": "^0.1.8" "@langchain/core": "0.1.41"
}, },
"patchedDependencies": { "patchedDependencies": {
"typedi@0.10.0": "patches/typedi@0.10.0.patch", "typedi@0.10.0": "patches/typedi@0.10.0.patch",

View file

@ -6,9 +6,9 @@ import {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { initializeAgentExecutorWithOptions } from 'langchain/agents'; import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import type { BaseChatMemory } from 'langchain/memory'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import { import {
isChatInstance, isChatInstance,

View file

@ -7,11 +7,11 @@ import {
import type { AgentExecutorInput } from 'langchain/agents'; import type { AgentExecutorInput } from 'langchain/agents';
import { AgentExecutor, OpenAIAgent } from 'langchain/agents'; import { AgentExecutor, OpenAIAgent } from 'langchain/agents';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import { BufferMemory, type BaseChatMemory } from 'langchain/memory'; import { BufferMemory, type BaseChatMemory } from 'langchain/memory';
import { ChatOpenAI } from 'langchain/chat_models/openai'; import { ChatOpenAI } from '@langchain/openai';
import { import {
getConnectedTools, getConnectedTools,
getOptionalOutputParsers, getOptionalOutputParsers,

View file

@ -5,10 +5,10 @@ import {
NodeOperationError, NodeOperationError,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute'; import { PlanAndExecuteAgentExecutor } from 'langchain/experimental/plan_and_execute';
import { import {
getConnectedTools, getConnectedTools,

View file

@ -7,10 +7,10 @@ import {
import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents'; import { AgentExecutor, ChatAgent, ZeroShotAgent } from 'langchain/agents';
import type { BaseLanguageModel } from 'langchain/base_language'; import type { BaseLanguageModel } from 'langchain/base_language';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import type { BaseChatModel } from 'langchain/chat_models/base'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { import {
getConnectedTools, getConnectedTools,
getOptionalOutputParsers, getOptionalOutputParsers,

View file

@ -3,13 +3,14 @@ import {
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
NodeOperationError, NodeOperationError,
type IDataObject,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { SqlDatabase } from 'langchain/sql_db'; import { SqlDatabase } from 'langchain/sql_db';
import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql'; import type { SqlCreatePromptArgs } from 'langchain/agents/toolkits/sql';
import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql'; import { SqlToolkit, createSqlAgent } from 'langchain/agents/toolkits/sql';
import type { BaseLanguageModel } from 'langchain/dist/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseChatMemory } from 'langchain/memory'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { DataSource } from '@n8n/typeorm'; import type { DataSource } from '@n8n/typeorm';
import { getPromptInputByType, serializeChatHistory } from '../../../../../utils/helpers'; import { getPromptInputByType, serializeChatHistory } from '../../../../../utils/helpers';
@ -123,7 +124,7 @@ export async function sqlAgentAgentExecute(
chatHistory = serializeChatHistory(messages); chatHistory = serializeChatHistory(messages);
} }
let response; let response: IDataObject;
try { try {
response = await agentExecutor.call({ response = await agentExecutor.call({
input, input,
@ -131,10 +132,10 @@ export async function sqlAgentAgentExecute(
chatHistory, chatHistory,
}); });
} catch (error) { } catch (error) {
if (error.message?.output) { if ((error.message as IDataObject)?.output) {
response = error.message; response = error.message as IDataObject;
} else { } else {
throw new NodeOperationError(this.getNode(), error.message, { itemIndex: i }); throw new NodeOperationError(this.getNode(), error.message as string, { itemIndex: i });
} }
} }

View file

@ -1,6 +1,6 @@
import { zodToJsonSchema } from 'zod-to-json-schema'; import { zodToJsonSchema } from 'zod-to-json-schema';
import type { OpenAI as OpenAIClient } from 'openai'; import type { OpenAI as OpenAIClient } from 'openai';
import type { StructuredTool } from 'langchain/tools'; import type { StructuredTool } from '@langchain/core/tools';
// Copied from langchain(`langchain/src/tools/convert_to_openai.ts`) // Copied from langchain(`langchain/src/tools/convert_to_openai.ts`)
// since these functions are not exported // since these functions are not exported

View file

@ -8,19 +8,19 @@ import type {
INodeTypeDescription, INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { BaseLanguageModel } from 'langchain/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { import {
AIMessagePromptTemplate, AIMessagePromptTemplate,
PromptTemplate, PromptTemplate,
SystemMessagePromptTemplate, SystemMessagePromptTemplate,
HumanMessagePromptTemplate, HumanMessagePromptTemplate,
ChatPromptTemplate, ChatPromptTemplate,
} from 'langchain/prompts'; } from '@langchain/core/prompts';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import { CombiningOutputParser } from 'langchain/output_parsers'; import { CombiningOutputParser } from 'langchain/output_parsers';
import { LLMChain } from 'langchain/chains'; import { LLMChain } from 'langchain/chains';
import type { BaseChatModel } from 'langchain/chat_models/base'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { HumanMessage } from 'langchain/schema'; import { HumanMessage } from '@langchain/core/messages';
import { getTemplateNoticeField } from '../../../utils/sharedFields'; import { getTemplateNoticeField } from '../../../utils/sharedFields';
import { import {
getOptionalOutputParsers, getOptionalOutputParsers,
@ -92,6 +92,7 @@ async function getChainPromptTemplate(
llm: BaseLanguageModel | BaseChatModel, llm: BaseLanguageModel | BaseChatModel,
messages?: MessagesTemplate[], messages?: MessagesTemplate[],
formatInstructions?: string, formatInstructions?: string,
query?: string,
) { ) {
const queryTemplate = new PromptTemplate({ const queryTemplate = new PromptTemplate({
template: `{query}${formatInstructions ? '\n{formatInstructions}' : ''}`, template: `{query}${formatInstructions ? '\n{formatInstructions}' : ''}`,
@ -129,7 +130,15 @@ async function getChainPromptTemplate(
}), }),
); );
parsedMessages.push(new HumanMessagePromptTemplate(queryTemplate)); const lastMessage = parsedMessages[parsedMessages.length - 1];
// If the last message is a human message and it has an array of content, we need to add the query to the last message
if (lastMessage instanceof HumanMessage && Array.isArray(lastMessage.content)) {
const humanMessage = new HumanMessagePromptTemplate(queryTemplate);
const test = await humanMessage.format({ query });
lastMessage.content.push({ text: test.content.toString(), type: 'text' });
} else {
parsedMessages.push(new HumanMessagePromptTemplate(queryTemplate));
}
return ChatPromptTemplate.fromMessages(parsedMessages); return ChatPromptTemplate.fromMessages(parsedMessages);
} }
@ -146,6 +155,7 @@ async function createSimpleLLMChain(
llm, llm,
prompt, prompt,
}); });
const response = (await chain.call({ const response = (await chain.call({
query, query,
signal: context.getExecutionCancelSignal(), signal: context.getExecutionCancelSignal(),
@ -167,6 +177,8 @@ async function getChain(
itemIndex, itemIndex,
llm, llm,
messages, messages,
undefined,
query,
); );
// If there are no output parsers, create a simple LLM chain and execute the query // If there are no output parsers, create a simple LLM chain and execute the query
@ -187,6 +199,7 @@ async function getChain(
llm, llm,
messages, messages,
formatInstructions, formatInstructions,
query,
); );
const chain = prompt.pipe(llm).pipe(combinedOutputParser); const chain = prompt.pipe(llm).pipe(combinedOutputParser);

View file

@ -8,8 +8,8 @@ import {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { RetrievalQAChain } from 'langchain/chains'; import { RetrievalQAChain } from 'langchain/chains';
import type { BaseLanguageModel } from 'langchain/dist/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from 'langchain/schema/retriever'; import type { BaseRetriever } from '@langchain/core/retrievers';
import { getTemplateNoticeField } from '../../../utils/sharedFields'; import { getTemplateNoticeField } from '../../../utils/sharedFields';
import { getPromptInputByType } from '../../../utils/helpers'; import { getPromptInputByType } from '../../../utils/helpers';

View file

@ -9,9 +9,9 @@ import {
import type { SummarizationChainParams } from 'langchain/chains'; import type { SummarizationChainParams } from 'langchain/chains';
import { loadSummarizationChain } from 'langchain/chains'; import { loadSummarizationChain } from 'langchain/chains';
import type { BaseLanguageModel } from 'langchain/dist/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader'; import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader';
import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader'; import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader';
import { getTemplateNoticeField } from '../../../../utils/sharedFields'; import { getTemplateNoticeField } from '../../../../utils/sharedFields';

View file

@ -9,8 +9,8 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { loadSummarizationChain } from 'langchain/chains'; import { loadSummarizationChain } from 'langchain/chains';
import type { BaseLanguageModel } from 'langchain/dist/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import type { TextSplitter } from 'langchain/text_splitter'; import type { TextSplitter } from 'langchain/text_splitter';
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'; import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader'; import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader';

View file

@ -1,6 +1,5 @@
import type { SummarizationChainParams } from 'langchain/chains'; import type { SummarizationChainParams } from 'langchain/chains';
import { PromptTemplate } from 'langchain/prompts'; import { PromptTemplate } from '@langchain/core/prompts';
interface ChainTypeOptions { interface ChainTypeOptions {
combineMapPrompt?: string; combineMapPrompt?: string;
prompt?: string; prompt?: string;

View file

@ -15,7 +15,7 @@ import {
import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox'; import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
import { standardizeOutput } from 'n8n-nodes-base/dist/nodes/Code/utils'; import { standardizeOutput } from 'n8n-nodes-base/dist/nodes/Code/utils';
import type { Tool } from 'langchain/tools'; import type { Tool } from '@langchain/core/tools';
import { makeResolverFromLegacyOptions } from '@n8n/vm2'; import { makeResolverFromLegacyOptions } from '@n8n/vm2';
import { logWrapper } from '../../utils/logWrapper'; import { logWrapper } from '../../utils/logWrapper';
@ -36,7 +36,7 @@ const connectorTypes = {
[NodeConnectionType.Main]: 'Main', [NodeConnectionType.Main]: 'Main',
}; };
const defaultCodeExecute = `const { PromptTemplate } = require('langchain/prompts'); const defaultCodeExecute = `const { PromptTemplate } = require('@langchain/core/prompts');
const query = 'Tell me a joke'; const query = 'Tell me a joke';
const prompt = PromptTemplate.fromTemplate(query); const prompt = PromptTemplate.fromTemplate(query);

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { BedrockEmbeddings } from 'langchain/embeddings/bedrock'; import { BedrockEmbeddings } from '@langchain/community/embeddings/bedrock';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -7,7 +7,7 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'; import { OpenAIEmbeddings } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { CohereEmbeddings } from 'langchain/embeddings/cohere'; import { CohereEmbeddings } from '@langchain/cohere';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
@ -89,7 +89,7 @@ export class EmbeddingsCohere implements INodeType {
const credentials = await this.getCredentials('cohereApi'); const credentials = await this.getCredentials('cohereApi');
const embeddings = new CohereEmbeddings({ const embeddings = new CohereEmbeddings({
apiKey: credentials.apiKey as string, apiKey: credentials.apiKey as string,
modelName, model: modelName,
}); });
return { return {

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { GooglePaLMEmbeddings } from 'langchain/embeddings/googlepalm'; import { GooglePaLMEmbeddings } from '@langchain/community/embeddings/googlepalm';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { HuggingFaceInferenceEmbeddings } from 'langchain/embeddings/hf'; import { HuggingFaceInferenceEmbeddings } from '@langchain/community/embeddings/hf';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { OllamaEmbeddings } from 'langchain/embeddings/ollama'; import { OllamaEmbeddings } from '@langchain/community/embeddings/ollama';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaDescription, ollamaModel } from '../../llms/LMOllama/description'; import { ollamaDescription, ollamaModel } from '../../llms/LMOllama/description';

View file

@ -9,7 +9,7 @@ import {
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { ClientOptions } from 'openai'; import type { ClientOptions } from 'openai';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai'; import { OpenAIEmbeddings } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -1,16 +1,53 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { import {
NodeConnectionType, NodeConnectionType,
type INodeProperties,
type IExecuteFunctions, type IExecuteFunctions,
type INodeType, type INodeType,
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { ChatAnthropic } from 'langchain/chat_models/anthropic'; import { ChatAnthropic } from '@langchain/anthropic';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
const modelField: INodeProperties = {
displayName: 'Model',
name: 'model',
type: 'options',
// eslint-disable-next-line n8n-nodes-base/node-param-options-type-unsorted-items
options: [
{
name: 'Claude 3 Opus(20240229)',
value: 'claude-3-opus-20240229',
},
{
name: 'Claude 3 Sonnet(20240229)',
value: 'claude-3-sonnet-20240229',
},
{
name: 'LEGACY: Claude 2',
value: 'claude-2',
},
{
name: 'LEGACY: Claude 2.1',
value: 'claude-2.1',
},
{
name: 'LEGACY: Claude Instant 1.2',
value: 'claude-instant-1.2',
},
{
name: 'LEGACY: Claude Instant 1',
value: 'claude-instant-1',
},
],
description:
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/docs/models-overview">Learn more</a>.',
default: 'claude-2',
};
export class LmChatAnthropic implements INodeType { export class LmChatAnthropic implements INodeType {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Anthropic Chat Model', displayName: 'Anthropic Chat Model',
@ -18,7 +55,7 @@ export class LmChatAnthropic implements INodeType {
name: 'lmChatAnthropic', name: 'lmChatAnthropic',
icon: 'file:anthropic.svg', icon: 'file:anthropic.svg',
group: ['transform'], group: ['transform'],
version: 1, version: [1, 1.1],
description: 'Language Model Anthropic', description: 'Language Model Anthropic',
defaults: { defaults: {
name: 'Anthropic Chat Model', name: 'Anthropic Chat Model',
@ -35,6 +72,7 @@ export class LmChatAnthropic implements INodeType {
}, },
], ],
}, },
alias: ['claude', 'sonnet', 'opus'],
}, },
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node // eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
inputs: [], inputs: [],
@ -50,30 +88,21 @@ export class LmChatAnthropic implements INodeType {
properties: [ properties: [
getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]), getConnectionHintNoticeField([NodeConnectionType.AiChain, NodeConnectionType.AiChain]),
{ {
displayName: 'Model', ...modelField,
name: 'model', displayOptions: {
type: 'options', show: {
options: [ '@version': [1],
{
name: 'Claude 2',
value: 'claude-2',
}, },
{ },
name: 'Claude 2.1', },
value: 'claude-2.1', {
...modelField,
default: 'claude-3-sonnet-20240229',
displayOptions: {
hide: {
'@version': [1],
}, },
{ },
name: 'Claude Instant 1.2',
value: 'claude-instant-1.2',
},
{
name: 'Claude Instant 1',
value: 'claude-instant-1',
},
],
description:
'The model which will generate the completion. <a href="https://docs.anthropic.com/claude/reference/selecting-a-model">Learn more</a>.',
default: 'claude-2',
}, },
{ {
displayName: 'Options', displayName: 'Options',
@ -86,7 +115,7 @@ export class LmChatAnthropic implements INodeType {
{ {
displayName: 'Maximum Number of Tokens', displayName: 'Maximum Number of Tokens',
name: 'maxTokensToSample', name: 'maxTokensToSample',
default: 32768, default: 4096,
description: 'The maximum number of tokens to generate in the completion', description: 'The maximum number of tokens to generate in the completion',
type: 'number', type: 'number',
}, },
@ -126,12 +155,20 @@ export class LmChatAnthropic implements INodeType {
const credentials = await this.getCredentials('anthropicApi'); const credentials = await this.getCredentials('anthropicApi');
const modelName = this.getNodeParameter('model', itemIndex) as string; const modelName = this.getNodeParameter('model', itemIndex) as string;
const options = this.getNodeParameter('options', itemIndex, {}) as object; const options = this.getNodeParameter('options', itemIndex, {}) as {
maxTokensToSample?: number;
temperature: number;
topK: number;
topP: number;
};
const model = new ChatAnthropic({ const model = new ChatAnthropic({
anthropicApiKey: credentials.apiKey as string, anthropicApiKey: credentials.apiKey as string,
modelName, modelName,
...options, maxTokens: options.maxTokensToSample,
temperature: options.temperature,
topK: options.topK,
topP: options.topP,
}); });
return { return {

View file

@ -7,8 +7,7 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { ChatOllama } from 'langchain/chat_models/ollama'; import { ChatOllama } from '@langchain/community/chat_models/ollama';
// import { ChatAnthropic } from 'langchain/chat_models/anthropic';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description'; import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';

View file

@ -7,8 +7,7 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { ClientOptions } from 'openai'; import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
import { ChatOpenAI } from 'langchain/chat_models/openai';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -7,7 +7,7 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { Cohere } from 'langchain/llms/cohere'; import { Cohere } from '@langchain/cohere';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -7,7 +7,7 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { Ollama } from 'langchain/llms/ollama'; import { Ollama } from '@langchain/community/llms/ollama';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { ollamaDescription, ollamaModel, ollamaOptions } from './description'; import { ollamaDescription, ollamaModel, ollamaOptions } from './description';

View file

@ -8,8 +8,7 @@ import type {
ILoadOptionsFunctions, ILoadOptionsFunctions,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { ClientOptions } from 'openai'; import { OpenAI, type ClientOptions } from '@langchain/openai';
import { OpenAI } from 'langchain/llms/openai';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -7,7 +7,7 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { HuggingFaceInference } from 'langchain/llms/hf'; import { HuggingFaceInference } from '@langchain/community/llms/hf';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { ChatBedrock } from 'langchain/chat_models/bedrock'; import { BedrockChat } from '@langchain/community/chat_models/bedrock';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
// Dependencies needed underneath the hood. We add them // Dependencies needed underneath the hood. We add them
@ -149,7 +149,7 @@ export class LmChatAwsBedrock implements INodeType {
maxTokensToSample: number; maxTokensToSample: number;
}; };
const model = new ChatBedrock({ const model = new BedrockChat({
region: credentials.region as string, region: credentials.region as string,
model: modelName, model: modelName,
temperature: options.temperature, temperature: options.temperature,

View file

@ -7,8 +7,8 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { ClientOptions } from 'openai'; import type { ClientOptions } from '@langchain/openai';
import { ChatOpenAI } from 'langchain/chat_models/openai'; import { ChatOpenAI } from '@langchain/openai';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { ChatGooglePaLM } from 'langchain/chat_models/googlepalm'; import { ChatGooglePaLM } from '@langchain/community/chat_models/googlepalm';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { GooglePaLM } from 'langchain/llms/googlepalm'; import { GooglePaLM } from '@langchain/community/llms/googlepalm';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -7,8 +7,8 @@ import {
type INodeType, type INodeType,
type INodeTypeDescription, type INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { BaseChatMemory } from 'langchain/memory'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import type { BaseMessage } from 'langchain/schema'; import type { BaseMessage } from '@langchain/core/messages';
function simplifyMessages(messages: BaseMessage[]) { function simplifyMessages(messages: BaseMessage[]) {
const chunkedMessages = []; const chunkedMessages = [];

View file

@ -7,8 +7,8 @@ import type {
INodeType, INodeType,
INodeTypeDescription, INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { BaseChatMemory } from 'langchain/memory'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { AIMessage, SystemMessage, HumanMessage, type BaseMessage } from 'langchain/schema'; import { AIMessage, SystemMessage, HumanMessage, type BaseMessage } from '@langchain/core/messages';
type MessageRole = 'ai' | 'system' | 'user'; type MessageRole = 'ai' | 'system' | 'user';
interface MessageRecord { interface MessageRecord {

View file

@ -7,7 +7,7 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { MotorheadMemory } from 'langchain/memory'; import { MotorheadMemory } from '@langchain/community/memory/motorhead_memory';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { sessionIdOption, sessionKeyProperty } from '../descriptions'; import { sessionIdOption, sessionKeyProperty } from '../descriptions';

View file

@ -8,8 +8,8 @@ import {
NodeConnectionType, NodeConnectionType,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { BufferMemory } from 'langchain/memory'; import { BufferMemory } from 'langchain/memory';
import type { RedisChatMessageHistoryInput } from 'langchain/stores/message/redis'; import type { RedisChatMessageHistoryInput } from '@langchain/redis';
import { RedisChatMessageHistory } from 'langchain/stores/message/redis'; import { RedisChatMessageHistory } from '@langchain/redis';
import type { RedisClientOptions } from 'redis'; import type { RedisClientOptions } from 'redis';
import { createClient } from 'redis'; import { createClient } from 'redis';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';

View file

@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow'; import type { IExecuteFunctions, INodeType, INodeTypeDescription, SupplyData } from 'n8n-workflow';
import { XataChatMessageHistory } from 'langchain/stores/message/xata'; import { XataChatMessageHistory } from '@langchain/community/stores/message/xata';
import { BufferMemory } from 'langchain/memory'; import { BufferMemory } from 'langchain/memory';
import { BaseClient } from '@xata.io/client'; import { BaseClient } from '@xata.io/client';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { ZepMemory } from 'langchain/memory/zep'; import { ZepMemory } from '@langchain/community/memory/zep';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { sessionIdOption, sessionKeyProperty } from '../descriptions'; import { sessionIdOption, sessionKeyProperty } from '../descriptions';

View file

@ -7,8 +7,8 @@ import {
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { OutputFixingParser } from 'langchain/output_parsers'; import { OutputFixingParser } from 'langchain/output_parsers';
import type { BaseOutputParser } from 'langchain/schema/output_parser'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { BaseLanguageModel } from 'langchain/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -1,4 +1,4 @@
import { BaseOutputParser, OutputParserException } from 'langchain/schema/output_parser'; import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
export class ItemListOutputParser extends BaseOutputParser<string[]> { export class ItemListOutputParser extends BaseOutputParser<string[]> {
lc_namespace = ['n8n-nodes-langchain', 'output_parsers', 'list_items']; lc_namespace = ['n8n-nodes-langchain', 'output_parsers', 'list_items'];

View file

@ -13,7 +13,7 @@ import { parseSchema } from 'json-schema-to-zod';
import { z } from 'zod'; import { z } from 'zod';
import type { JSONSchema7 } from 'json-schema'; import type { JSONSchema7 } from 'json-schema';
import { StructuredOutputParser } from 'langchain/output_parsers'; import { StructuredOutputParser } from 'langchain/output_parsers';
import { OutputParserException } from 'langchain/schema/output_parser'; import { OutputParserException } from '@langchain/core/output_parsers';
import get from 'lodash/get'; import get from 'lodash/get';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -9,8 +9,8 @@ import {
import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression'; import { ContextualCompressionRetriever } from 'langchain/retrievers/contextual_compression';
import { LLMChainExtractor } from 'langchain/retrievers/document_compressors/chain_extract'; import { LLMChainExtractor } from 'langchain/retrievers/document_compressors/chain_extract';
import type { BaseLanguageModel } from 'langchain/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from 'langchain/schema/retriever'; import type { BaseRetriever } from '@langchain/core/retrievers';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';

View file

@ -8,8 +8,8 @@ import {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { MultiQueryRetriever } from 'langchain/retrievers/multi_query'; import { MultiQueryRetriever } from 'langchain/retrievers/multi_query';
import type { BaseLanguageModel } from 'langchain/base_language'; import type { BaseLanguageModel } from '@langchain/core/language_models/base';
import type { BaseRetriever } from 'langchain/schema/retriever'; import type { BaseRetriever } from '@langchain/core/retrievers';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { VectorStore } from 'langchain/vectorstores/base'; import type { VectorStore } from '@langchain/core/vectorstores';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
export class RetrieverVectorStore implements INodeType { export class RetrieverVectorStore implements INodeType {

View file

@ -11,8 +11,8 @@ import type {
SupplyData, SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { BaseRetriever, type BaseRetrieverInput } from 'langchain/schema/retriever'; import { BaseRetriever, type BaseRetrieverInput } from '@langchain/core/retrievers';
import { Document } from 'langchain/document'; import { Document } from '@langchain/core/documents';
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces'; import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode'; import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';

View file

@ -6,11 +6,31 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { RecursiveCharacterTextSplitterParams } from 'langchain/text_splitter'; import type {
RecursiveCharacterTextSplitterParams,
SupportedTextSplitterLanguage,
} from 'langchain/text_splitter';
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter'; import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
const supportedLanguages: SupportedTextSplitterLanguage[] = [
'cpp',
'go',
'java',
'js',
'php',
'proto',
'python',
'rst',
'ruby',
'rust',
'scala',
'swift',
'markdown',
'latex',
'html',
];
export class TextSplitterRecursiveCharacterTextSplitter implements INodeType { export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
description: INodeTypeDescription = { description: INodeTypeDescription = {
displayName: 'Recursive Character Text Splitter', displayName: 'Recursive Character Text Splitter',
@ -54,6 +74,23 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
type: 'number', type: 'number',
default: 0, default: 0,
}, },
{
displayName: 'Options',
name: 'options',
placeholder: 'Add Option',
description: 'Additional options to add',
type: 'collection',
default: {},
options: [
{
displayName: 'Split Code',
name: 'splitCode',
default: 'markdown',
type: 'options',
options: supportedLanguages.map((lang) => ({ name: lang, value: lang })),
},
],
},
], ],
}; };
@ -62,7 +99,11 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number; const chunkSize = this.getNodeParameter('chunkSize', itemIndex) as number;
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex) as number; const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex) as number;
const splitCode = this.getNodeParameter(
'options.splitCode',
itemIndex,
null,
) as SupportedTextSplitterLanguage | null;
const params: RecursiveCharacterTextSplitterParams = { const params: RecursiveCharacterTextSplitterParams = {
// TODO: These are the default values, should we allow the user to change them? // TODO: These are the default values, should we allow the user to change them?
separators: ['\n\n', '\n', ' ', ''], separators: ['\n\n', '\n', ' ', ''],
@ -70,8 +111,13 @@ export class TextSplitterRecursiveCharacterTextSplitter implements INodeType {
chunkOverlap, chunkOverlap,
keepSeparator: false, keepSeparator: false,
}; };
let splitter: RecursiveCharacterTextSplitter;
const splitter = new RecursiveCharacterTextSplitter(params); if (splitCode && supportedLanguages.includes(splitCode)) {
splitter = RecursiveCharacterTextSplitter.fromLanguage(splitCode, params);
} else {
splitter = new RecursiveCharacterTextSplitter(params);
}
return { return {
response: logWrapper(splitter, this), response: logWrapper(splitter, this),

View file

@ -12,7 +12,7 @@ import { getSandboxContext } from 'n8n-nodes-base/dist/nodes/Code/Sandbox';
import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox'; import { JavaScriptSandbox } from 'n8n-nodes-base/dist/nodes/Code/JavaScriptSandbox';
import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox'; import { PythonSandbox } from 'n8n-nodes-base/dist/nodes/Code/PythonSandbox';
import { DynamicTool } from 'langchain/tools'; import { DynamicTool } from '@langchain/core/tools';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
export class ToolCode implements INodeType { export class ToolCode implements INodeType {

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { SerpAPI } from 'langchain/tools'; import { SerpAPI } from '@langchain/community/tools/serpapi';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { WikipediaQueryRun } from 'langchain/tools'; import { WikipediaQueryRun } from '@langchain/community/tools/wikipedia_query_run';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -6,7 +6,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { WolframAlphaTool } from 'langchain/tools'; import { WolframAlphaTool } from '@langchain/community/tools/wolframalpha';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -13,7 +13,7 @@ import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces'; import type { SetField, SetNodeOptions } from 'n8n-nodes-base/dist/nodes/Set/v2/helpers/interfaces';
import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode'; import * as manual from 'n8n-nodes-base/dist/nodes/Set/v2/manual.mode';
import { DynamicTool } from 'langchain/tools'; import { DynamicTool } from '@langchain/core/tools';
import get from 'lodash/get'; import get from 'lodash/get';
import isObject from 'lodash/isObject'; import isObject from 'lodash/isObject';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields'; import { getConnectionHintNoticeField } from '../../../utils/sharedFields';

View file

@ -7,7 +7,7 @@ import {
NodeConnectionType, NodeConnectionType,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { pick } from 'lodash'; import { pick } from 'lodash';
import type { BaseChatMemory } from 'langchain/memory'; import type { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { createPage } from './templates'; import { createPage } from './templates';
import { validateAuth } from './GenericFunctions'; import { validateAuth } from './GenericFunctions';
import type { LoadPreviousSessionChatOption } from './types'; import type { LoadPreviousSessionChatOption } from './types';

View file

@ -7,7 +7,7 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { Document } from 'langchain/document'; import type { Document } from 'langchain/document';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments'; import { processDocuments } from '../shared/processDocuments';
import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager'; import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager';

View file

@ -6,7 +6,7 @@ import {
type INodeType, type INodeType,
type INodeTypeDescription, type INodeTypeDescription,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager'; import { MemoryVectorStoreManager } from '../shared/MemoryVectorStoreManager';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';

View file

@ -1,6 +1,6 @@
import { NodeOperationError, type INodeProperties } from 'n8n-workflow'; import { NodeOperationError, type INodeProperties } from 'n8n-workflow';
import type { PineconeLibArgs } from 'langchain/vectorstores/pinecone'; import type { PineconeStoreParams } from '@langchain/pinecone';
import { PineconeStore } from 'langchain/vectorstores/pinecone'; import { PineconeStore } from '@langchain/pinecone';
import { Pinecone } from '@pinecone-database/pinecone'; import { Pinecone } from '@pinecone-database/pinecone';
import { createVectorStoreNode } from '../shared/createVectorStoreNode'; import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { metadataFilterField } from '../../../utils/sharedFields'; import { metadataFilterField } from '../../../utils/sharedFields';
@ -87,11 +87,10 @@ export const VectorStorePinecone = createVectorStoreNode({
const client = new Pinecone({ const client = new Pinecone({
apiKey: credentials.apiKey as string, apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
}); });
const pineconeIndex = client.Index(index); const pineconeIndex = client.Index(index);
const config: PineconeLibArgs = { const config: PineconeStoreParams = {
namespace: options.pineconeNamespace ?? undefined, namespace: options.pineconeNamespace ?? undefined,
pineconeIndex, pineconeIndex,
filter, filter,
@ -111,10 +110,9 @@ export const VectorStorePinecone = createVectorStoreNode({
const client = new Pinecone({ const client = new Pinecone({
apiKey: credentials.apiKey as string, apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
}); });
const indexes = (await client.listIndexes()).map((i) => i.name); const indexes = ((await client.listIndexes()).indexes ?? []).map((i) => i.name);
if (!indexes.includes(index)) { if (!indexes.includes(index)) {
throw new NodeOperationError(context.getNode(), `Index ${index} not found`, { throw new NodeOperationError(context.getNode(), `Index ${index} not found`, {

View file

@ -5,10 +5,11 @@ import {
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { PineconeStore } from 'langchain/vectorstores/pinecone'; import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from '@langchain/core/documents';
import { PineconeStore } from '@langchain/pinecone';
import { Pinecone } from '@pinecone-database/pinecone'; import { Pinecone } from '@pinecone-database/pinecone';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Document } from 'langchain/document';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments'; import { processDocuments } from '../shared/processDocuments';
import { pineconeIndexRLC } from '../shared/descriptions'; import { pineconeIndexRLC } from '../shared/descriptions';
@ -115,7 +116,6 @@ export class VectorStorePineconeInsert implements INodeType {
const client = new Pinecone({ const client = new Pinecone({
apiKey: credentials.apiKey as string, apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
}); });
const pineconeIndex = client.Index(index); const pineconeIndex = client.Index(index);

View file

@ -5,10 +5,11 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { PineconeLibArgs } from 'langchain/vectorstores/pinecone'; import type { PineconeStoreParams } from '@langchain/pinecone';
import { PineconeStore } from 'langchain/vectorstores/pinecone'; import { PineconeStore } from '@langchain/pinecone';
import { Pinecone } from '@pinecone-database/pinecone'; import { Pinecone } from '@pinecone-database/pinecone';
import type { Embeddings } from 'langchain/embeddings/base';
import type { Embeddings } from '@langchain/core/embeddings';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { metadataFilterField } from '../../../utils/sharedFields'; import { metadataFilterField } from '../../../utils/sharedFields';
import { getMetadataFiltersValues } from '../../../utils/helpers'; import { getMetadataFiltersValues } from '../../../utils/helpers';
@ -99,11 +100,10 @@ export class VectorStorePineconeLoad implements INodeType {
const client = new Pinecone({ const client = new Pinecone({
apiKey: credentials.apiKey as string, apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
}); });
const pineconeIndex = client.Index(index); const pineconeIndex = client.Index(index);
const config: PineconeLibArgs = { const config: PineconeStoreParams = {
namespace: namespace || undefined, namespace: namespace || undefined,
pineconeIndex, pineconeIndex,
filter: getMetadataFiltersValues(this, itemIndex), filter: getMetadataFiltersValues(this, itemIndex),

View file

@ -1,6 +1,6 @@
import { type INodeProperties } from 'n8n-workflow'; import { type INodeProperties } from 'n8n-workflow';
import type { QdrantLibArgs } from 'langchain/vectorstores/qdrant'; import type { QdrantLibArgs } from '@langchain/community/vectorstores/qdrant';
import { QdrantVectorStore } from 'langchain/vectorstores/qdrant'; import { QdrantVectorStore } from '@langchain/community/vectorstores/qdrant';
import type { Schemas as QdrantSchemas } from '@qdrant/js-client-rest'; import type { Schemas as QdrantSchemas } from '@qdrant/js-client-rest';
import { createVectorStoreNode } from '../shared/createVectorStoreNode'; import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { qdrantCollectionRLC } from '../shared/descriptions'; import { qdrantCollectionRLC } from '../shared/descriptions';

View file

@ -1,6 +1,6 @@
import { NodeOperationError, type INodeProperties } from 'n8n-workflow'; import { NodeOperationError, type INodeProperties } from 'n8n-workflow';
import { createClient } from '@supabase/supabase-js'; import { createClient } from '@supabase/supabase-js';
import { SupabaseVectorStore } from 'langchain/vectorstores/supabase'; import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase';
import { createVectorStoreNode } from '../shared/createVectorStoreNode'; import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { metadataFilterField } from '../../../utils/sharedFields'; import { metadataFilterField } from '../../../utils/sharedFields';
import { supabaseTableNameRLC } from '../shared/descriptions'; import { supabaseTableNameRLC } from '../shared/descriptions';

View file

@ -5,10 +5,10 @@ import {
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import { createClient } from '@supabase/supabase-js'; import { createClient } from '@supabase/supabase-js';
import { SupabaseVectorStore } from 'langchain/vectorstores/supabase'; import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments'; import { processDocuments } from '../shared/processDocuments';

View file

@ -5,10 +5,10 @@ import {
type SupplyData, type SupplyData,
NodeConnectionType, NodeConnectionType,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import { createClient } from '@supabase/supabase-js'; import { createClient } from '@supabase/supabase-js';
import type { SupabaseLibArgs } from 'langchain/vectorstores/supabase'; import type { SupabaseLibArgs } from '@langchain/community/vectorstores/supabase';
import { SupabaseVectorStore } from 'langchain/vectorstores/supabase'; import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import { metadataFilterField } from '../../../utils/sharedFields'; import { metadataFilterField } from '../../../utils/sharedFields';
import { getMetadataFiltersValues } from '../../../utils/helpers'; import { getMetadataFiltersValues } from '../../../utils/helpers';

View file

@ -1,7 +1,7 @@
import type { IDataObject, INodeProperties } from 'n8n-workflow'; import type { IDataObject, INodeProperties } from 'n8n-workflow';
import { NodeOperationError } from 'n8n-workflow'; import { NodeOperationError } from 'n8n-workflow';
import type { IZepConfig } from 'langchain/vectorstores/zep'; import type { IZepConfig } from '@langchain/community/vectorstores/zep';
import { ZepVectorStore } from 'langchain/vectorstores/zep'; import { ZepVectorStore } from '@langchain/community/vectorstores/zep';
import { createVectorStoreNode } from '../shared/createVectorStoreNode'; import { createVectorStoreNode } from '../shared/createVectorStoreNode';
import { metadataFilterField } from '../../../utils/sharedFields'; import { metadataFilterField } from '../../../utils/sharedFields';

View file

@ -5,9 +5,9 @@ import {
type INodeExecutionData, type INodeExecutionData,
NodeConnectionType, NodeConnectionType,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { ZepVectorStore } from 'langchain/vectorstores/zep'; import { ZepVectorStore } from '@langchain/community/vectorstores/zep';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { processDocuments } from '../shared/processDocuments'; import { processDocuments } from '../shared/processDocuments';

View file

@ -5,9 +5,9 @@ import {
type INodeTypeDescription, type INodeTypeDescription,
type SupplyData, type SupplyData,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { IZepConfig } from 'langchain/vectorstores/zep'; import type { IZepConfig } from '@langchain/community/vectorstores/zep';
import { ZepVectorStore } from 'langchain/vectorstores/zep'; import { ZepVectorStore } from '@langchain/community/vectorstores/zep';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import { metadataFilterField } from '../../../utils/sharedFields'; import { metadataFilterField } from '../../../utils/sharedFields';
import { getMetadataFiltersValues } from '../../../utils/helpers'; import { getMetadataFiltersValues } from '../../../utils/helpers';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';

View file

@ -1,5 +1,5 @@
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import { MemoryVectorStore } from 'langchain/vectorstores/memory'; import { MemoryVectorStore } from 'langchain/vectorstores/memory';
export class MemoryVectorStoreManager { export class MemoryVectorStoreManager {

View file

@ -1,6 +1,6 @@
/* eslint-disable n8n-nodes-base/node-filename-against-convention */ /* eslint-disable n8n-nodes-base/node-filename-against-convention */
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */ /* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import type { VectorStore } from 'langchain/vectorstores/base'; import type { VectorStore } from '@langchain/core/vectorstores';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type { import type {
INodeCredentialDescription, INodeCredentialDescription,
@ -13,8 +13,8 @@ import type {
ILoadOptionsFunctions, ILoadOptionsFunctions,
INodeListSearchResult, INodeListSearchResult,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { Embeddings } from 'langchain/embeddings/base'; import type { Embeddings } from '@langchain/core/embeddings';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import { logWrapper } from '../../../utils/logWrapper'; import { logWrapper } from '../../../utils/logWrapper';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';

View file

@ -7,12 +7,11 @@ export async function pineconeIndexSearch(this: ILoadOptionsFunctions) {
const client = new Pinecone({ const client = new Pinecone({
apiKey: credentials.apiKey as string, apiKey: credentials.apiKey as string,
environment: credentials.environment as string,
}); });
const indexes = await client.listIndexes(); const indexes = await client.listIndexes();
const results = indexes.map((index) => ({ const results = (indexes.indexes ?? []).map((index) => ({
name: index.name, name: index.name,
value: index.name, value: index.name,
})); }));

View file

@ -1,4 +1,4 @@
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import type { INodeExecutionData } from 'n8n-workflow'; import type { INodeExecutionData } from 'n8n-workflow';
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader'; import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader'; import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';

View file

@ -170,7 +170,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
let after: string | undefined; let after: string | undefined;
do { do {
const response = await apiRequest.call(this, 'GET', '/assistants', { const response = (await apiRequest.call(this, 'GET', '/assistants', {
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v1',
}, },
@ -178,16 +178,16 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
limit: 100, limit: 100,
after, after,
}, },
}); })) as { data: IDataObject[]; has_more: boolean; last_id: string };
for (const assistant of response.data || []) { for (const assistant of response.data || []) {
assistants.push(assistant.name); assistants.push(assistant.name as string);
} }
has_more = response.has_more; has_more = response.has_more;
if (has_more) { if (has_more) {
after = response.last_id as string; after = response.last_id;
} else { } else {
break; break;
} }

View file

@ -1,6 +1,6 @@
import { zodToJsonSchema } from 'zod-to-json-schema'; import { zodToJsonSchema } from 'zod-to-json-schema';
import type { OpenAI as OpenAIClient } from 'openai'; import type { OpenAIClient } from '@langchain/openai';
import type { StructuredTool } from 'langchain/tools'; import type { StructuredTool } from '@langchain/core/tools';
// Copied from langchain(`langchain/src/tools/convert_to_openai.ts`) // Copied from langchain(`langchain/src/tools/convert_to_openai.ts`)
// since these functions are not exported // since these functions are not exported

View file

@ -132,14 +132,20 @@
"@getzep/zep-js": "0.9.0", "@getzep/zep-js": "0.9.0",
"@google-ai/generativelanguage": "0.2.1", "@google-ai/generativelanguage": "0.2.1",
"@huggingface/inference": "2.6.4", "@huggingface/inference": "2.6.4",
"@langchain/core": "0.1.8", "@langchain/anthropic": "^0.1.3",
"@langchain/mistralai": "0.0.6", "@langchain/cohere": "^0.0.5",
"@langchain/community": "^0.0.34",
"@langchain/core": "0.1.41",
"@langchain/mistralai": "0.0.7",
"@langchain/openai": "^0.0.16",
"@langchain/pinecone": "^0.0.3",
"@langchain/redis": "^0.0.2",
"@n8n/typeorm": "0.3.20-3", "@n8n/typeorm": "0.3.20-3",
"@n8n/vm2": "3.9.20", "@n8n/vm2": "3.9.20",
"@pinecone-database/pinecone": "1.1.2", "@pinecone-database/pinecone": "2.0.1",
"@qdrant/js-client-rest": "1.7.0", "@qdrant/js-client-rest": "1.7.0",
"@supabase/supabase-js": "2.38.5", "@supabase/supabase-js": "2.38.5",
"@xata.io/client": "0.25.3", "@xata.io/client": "0.28.0",
"basic-auth": "2.0.1", "basic-auth": "2.0.1",
"cohere-ai": "6.2.2", "cohere-ai": "6.2.2",
"d3-dsv": "2.0.0", "d3-dsv": "2.0.0",
@ -147,15 +153,16 @@
"form-data": "4.0.0", "form-data": "4.0.0",
"html-to-text": "9.0.5", "html-to-text": "9.0.5",
"json-schema-to-zod": "1.2.0", "json-schema-to-zod": "1.2.0",
"langchain": "0.0.198", "langchain": "0.1.25",
"lodash": "4.17.21", "lodash": "4.17.21",
"mammoth": "1.6.0", "mammoth": "1.6.0",
"mssql": "9.1.1", "mssql": "9.1.1",
"n8n-nodes-base": "workspace:*", "n8n-nodes-base": "workspace:*",
"n8n-workflow": "workspace:*", "n8n-workflow": "workspace:*",
"openai": "4.20.0", "openai": "4.26.1",
"pdf-parse": "1.1.1", "pdf-parse": "1.1.1",
"pg": "8.11.3", "pg": "8.11.3",
"tmp-promise": "3.0.3",
"redis": "4.6.12", "redis": "4.6.12",
"sqlite3": "5.1.7", "sqlite3": "5.1.7",
"temp": "0.9.4", "temp": "0.9.4",

View file

@ -4,7 +4,7 @@ import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow'; import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow';
import type { TextSplitter } from 'langchain/text_splitter'; import type { TextSplitter } from 'langchain/text_splitter';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import { CSVLoader } from 'langchain/document_loaders/fs/csv'; import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import { DocxLoader } from 'langchain/document_loaders/fs/docx'; import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { JSONLoader } from 'langchain/document_loaders/fs/json'; import { JSONLoader } from 'langchain/document_loaders/fs/json';

View file

@ -1,11 +1,7 @@
import { import { type IExecuteFunctions, type INodeExecutionData, NodeOperationError } from 'n8n-workflow';
type IExecuteFunctions,
type INodeExecutionData,
NodeOperationError,
} from 'n8n-workflow';
import type { TextSplitter } from 'langchain/text_splitter'; import type { TextSplitter } from 'langchain/text_splitter';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import { JSONLoader } from 'langchain/document_loaders/fs/json'; import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text'; import { TextLoader } from 'langchain/document_loaders/fs/text';
import { getMetadataFiltersValues } from './helpers'; import { getMetadataFiltersValues } from './helpers';

View file

@ -1,10 +1,9 @@
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type { EventNamesAiNodesType, IDataObject, IExecuteFunctions } from 'n8n-workflow'; import type { EventNamesAiNodesType, IDataObject, IExecuteFunctions } from 'n8n-workflow';
import { BaseChatModel } from 'langchain/chat_models/base'; import { BaseChatModel } from '@langchain/core/language_models/chat_models';
import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models';
import type { BaseOutputParser } from '@langchain/core/output_parsers'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { BaseMessage } from 'langchain/schema'; import type { BaseMessage } from '@langchain/core/messages';
import { DynamicTool, type Tool } from 'langchain/tools'; import { DynamicTool, type Tool } from '@langchain/core/tools';
export function getMetadataFiltersValues( export function getMetadataFiltersValues(
ctx: IExecuteFunctions, ctx: IExecuteFunctions,
@ -23,8 +22,8 @@ export function getMetadataFiltersValues(
// TODO: Remove this function once langchain package is updated to 0.1.x // TODO: Remove this function once langchain package is updated to 0.1.x
// eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents // eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents
export function isChatInstance(model: any): model is BaseChatModel | BaseChatModelCore { export function isChatInstance(model: any): model is BaseChatModel {
return model instanceof BaseChatModel || model instanceof BaseChatModelCore; return model instanceof BaseChatModel;
} }
export async function getOptionalOutputParsers( export async function getOptionalOutputParsers(
@ -128,24 +127,25 @@ export function serializeChatHistory(chatHistory: BaseMessage[]): string {
} }
export const getConnectedTools = async (ctx: IExecuteFunctions, enforceUniqueNames: boolean) => { export const getConnectedTools = async (ctx: IExecuteFunctions, enforceUniqueNames: boolean) => {
const connectedTools = ((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || []; const connectedTools =
((await ctx.getInputConnectionData(NodeConnectionType.AiTool, 0)) as Tool[]) || [];
if (!enforceUniqueNames) return connectedTools; if (!enforceUniqueNames) return connectedTools;
const seenNames = new Set<string>(); const seenNames = new Set<string>();
for (const tool of connectedTools) { for (const tool of connectedTools) {
if (!(tool instanceof DynamicTool)) continue; if (!(tool instanceof DynamicTool)) continue;
const { name } = tool; const { name } = tool;
if (seenNames.has(name)) { if (seenNames.has(name)) {
throw new NodeOperationError( throw new NodeOperationError(
ctx.getNode(), ctx.getNode(),
`You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`, `You have multiple tools with the same name: '${name}', please rename them to avoid conflicts`,
); );
} }
seenNames.add(name); seenNames.add(name);
} }
return connectedTools; return connectedTools;
}; };

View file

@ -1,25 +1,29 @@
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow'; import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import type { ConnectionTypes, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow'; import type { ConnectionTypes, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { Tool } from 'langchain/tools'; import { Tool } from '@langchain/core/tools';
import type { ChatResult, InputValues, BaseMessage } from 'langchain/schema'; import type { BaseMessage } from '@langchain/core/messages';
import { BaseChatMessageHistory } from 'langchain/schema'; import type { InputValues, MemoryVariables, OutputValues } from '@langchain/core/memory';
import type { BaseChatModel } from 'langchain/chat_models/base'; import type { ChatResult } from '@langchain/core/outputs';
import type { CallbackManagerForLLMRun } from 'langchain/callbacks'; import { BaseChatMessageHistory } from '@langchain/core/chat_history';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type {
CallbackManagerForLLMRun,
BaseCallbackConfig,
Callbacks,
} from '@langchain/core/callbacks/manager';
import { Embeddings } from 'langchain/embeddings/base'; import { Embeddings } from '@langchain/core/embeddings';
import { VectorStore } from 'langchain/vectorstores/base'; import { VectorStore } from '@langchain/core/vectorstores';
import type { Document } from 'langchain/document'; import type { Document } from '@langchain/core/documents';
import { TextSplitter } from 'langchain/text_splitter'; import { TextSplitter } from 'langchain/text_splitter';
import type { BaseDocumentLoader } from 'langchain/document_loaders/base'; import { BaseLLM } from '@langchain/core/language_models/llms';
import type { BaseCallbackConfig, Callbacks } from 'langchain/dist/callbacks/manager'; import { BaseChatMemory } from '@langchain/community/memory/chat_memory';
import { BaseLLM } from 'langchain/llms/base'; import { BaseRetriever } from '@langchain/core/retrievers';
import { BaseChatMemory } from 'langchain/memory'; import type { FormatInstructionsOptions } from '@langchain/core/output_parsers';
import type { MemoryVariables, OutputValues } from 'langchain/dist/memory/base'; import { BaseOutputParser, OutputParserException } from '@langchain/core/output_parsers';
import { BaseRetriever } from 'langchain/schema/retriever';
import type { FormatInstructionsOptions } from 'langchain/schema/output_parser';
import { BaseOutputParser, OutputParserException } from 'langchain/schema/output_parser';
import { isObject } from 'lodash'; import { isObject } from 'lodash';
import type { BaseDocumentLoader } from 'langchain/dist/document_loaders/base';
import { N8nJsonLoader } from './N8nJsonLoader'; import { N8nJsonLoader } from './N8nJsonLoader';
import { N8nBinaryLoader } from './N8nBinaryLoader'; import { N8nBinaryLoader } from './N8nBinaryLoader';
import { isChatInstance, logAiEvent } from './helpers'; import { isChatInstance, logAiEvent } from './helpers';
@ -524,6 +528,7 @@ export function logWrapper(
} }
} }
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
return (target as any)[prop]; return (target as any)[prop];
}, },
}); });

View file

@ -95,20 +95,21 @@ const outputTypeParsers: {
) { ) {
interface MessageContent { interface MessageContent {
type: string; type: string;
text?: string;
image_url?: { image_url?: {
url: string; url: string;
}; };
} }
let message = content.kwargs.content; let message = content.kwargs.content;
if (Array.isArray(message)) { if (Array.isArray(message)) {
const messageContent = message[0] as { message = (message as MessageContent[])
type?: string; .map((item) => {
image_url?: { url: string }; if (item?.type === 'image_url') {
}; return `![Input image](${item.image_url?.url})`;
if (messageContent?.type === 'image_url') { }
message = `![Input image](${messageContent.image_url?.url})`; return item.text;
} })
message = message as MessageContent[]; .join('\n');
} }
if (Object.keys(content.kwargs.additional_kwargs).length) { if (Object.keys(content.kwargs.additional_kwargs).length) {
message += ` (${JSON.stringify(content.kwargs.additional_kwargs)})`; message += ` (${JSON.stringify(content.kwargs.additional_kwargs)})`;

File diff suppressed because it is too large Load diff