fix: Update BaseChatModel import checks for MistralAI compatibility (#8527)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
oleg 2024-02-05 16:09:23 +01:00 committed by GitHub
parent b62c1d7c41
commit c8b8379015
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 18 additions and 11 deletions

View file

@ -6,24 +6,21 @@ import {
} from 'n8n-workflow';
import { initializeAgentExecutorWithOptions } from 'langchain/agents';
import { BaseChatModel } from 'langchain/chat_models/base';
import type { Tool } from 'langchain/tools';
import type { BaseChatMemory } from 'langchain/memory';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import { isChatInstance } from '../../../../../utils/helpers';
export async function conversationalAgentExecute(
this: IExecuteFunctions,
): Promise<INodeExecutionData[][]> {
this.logger.verbose('Executing Conversational Agent');
const model = (await this.getInputConnectionData(
NodeConnectionType.AiLanguageModel,
0,
)) as BaseChatModel;
const model = await this.getInputConnectionData(NodeConnectionType.AiLanguageModel, 0);
if (!(model instanceof BaseChatModel)) {
if (!isChatInstance(model)) {
throw new NodeOperationError(this.getNode(), 'Conversational Agent requires Chat Model');
}

View file

@ -11,7 +11,8 @@ import type { Tool } from 'langchain/tools';
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { PromptTemplate } from 'langchain/prompts';
import { CombiningOutputParser } from 'langchain/output_parsers';
import { BaseChatModel } from 'langchain/chat_models/base';
import type { BaseChatModel } from 'langchain/chat_models/base';
import { isChatInstance } from '../../../../../utils/helpers';
export async function reActAgentAgentExecute(
this: IExecuteFunctions,
@ -38,7 +39,7 @@ export async function reActAgentAgentExecute(
};
let agent: ChatAgent | ZeroShotAgent;
if (model instanceof BaseChatModel) {
if (isChatInstance(model)) {
agent = ChatAgent.fromLLMAndTools(model, tools, {
prefix: options.prefix,
suffix: options.suffixChat,

View file

@ -19,9 +19,10 @@ import {
import type { BaseOutputParser } from 'langchain/schema/output_parser';
import { CombiningOutputParser } from 'langchain/output_parsers';
import { LLMChain } from 'langchain/chains';
import { BaseChatModel } from 'langchain/chat_models/base';
import type { BaseChatModel } from 'langchain/chat_models/base';
import { HumanMessage } from 'langchain/schema';
import { getTemplateNoticeField } from '../../../utils/sharedFields';
import { isChatInstance } from '../../../utils/helpers';
interface MessagesTemplate {
type: string;
@ -94,7 +95,7 @@ async function getChainPromptTemplate(
partialVariables: formatInstructions ? { formatInstructions } : undefined,
});
if (llm instanceof BaseChatModel) {
if (isChatInstance(llm)) {
const parsedMessages = await Promise.all(
(messages ?? []).map(async (message) => {
const messageClass = [

View file

@ -1,4 +1,6 @@
import type { IExecuteFunctions } from 'n8n-workflow';
import { BaseChatModel } from 'langchain/chat_models/base';
import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models';
export function getMetadataFiltersValues(
ctx: IExecuteFunctions,
@ -14,3 +16,8 @@ export function getMetadataFiltersValues(
return undefined;
}
// TODO: Remove this function once langchain package is updated to 0.1.x
export function isChatInstance(model: any): model is BaseChatModel | BaseChatModelCore {
return model instanceof BaseChatModel || model instanceof BaseChatModelCore;
}

View file

@ -27,6 +27,7 @@ import { BaseOutputParser } from 'langchain/schema/output_parser';
import { isObject } from 'lodash';
import { N8nJsonLoader } from './N8nJsonLoader';
import { N8nBinaryLoader } from './N8nBinaryLoader';
import { isChatInstance } from './helpers';
const errorsMap: { [key: string]: { message: string; description: string } } = {
'You exceeded your current quota, please check your plan and billing details.': {
@ -225,7 +226,7 @@ export function logWrapper(
}
// ========== BaseChatModel ==========
if (originalInstance instanceof BaseLLM || originalInstance instanceof BaseChatModel) {
if (originalInstance instanceof BaseLLM || isChatInstance(originalInstance)) {
if (prop === '_generate' && '_generate' in target) {
return async (
messages: BaseMessage[] & string[],