fix: Fix execution error when using AI chain nodes with non-chat model (#8724)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
oleg 2024-02-23 10:27:39 +01:00 committed by GitHub
parent d03d9276f9
commit 0882dc0ce9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 50 additions and 59 deletions

View file

@ -1,7 +1,7 @@
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
import type { VectorStore } from 'langchain/vectorstores/base';
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
import type {
INodeCredentialDescription,
INodeProperties,
@ -18,7 +18,7 @@ import type { Document } from 'langchain/document';
import { logWrapper } from '../../../utils/logWrapper';
import type { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
import type { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
import { getMetadataFiltersValues } from '../../../utils/helpers';
import { getMetadataFiltersValues, logAiEvent } from '../../../utils/helpers';
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
import { processDocument } from './processDocuments';
@ -237,7 +237,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
});
resultData.push(...serializedDocs);
void this.logAiEvent('n8n.ai.vector.store.searched', jsonStringify({ query: prompt }));
void logAiEvent(this, 'n8n.ai.vector.store.searched', { query: prompt });
}
return await this.prepareOutputData(resultData);
@ -264,7 +264,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
try {
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);
void this.logAiEvent('n8n.ai.vector.store.populated');
void logAiEvent(this, 'n8n.ai.vector.store.populated');
} catch (error) {
throw error;
}

View file

@ -1,4 +1,5 @@
import { NodeConnectionType, type IExecuteFunctions, NodeOperationError } from 'n8n-workflow';
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type { EventNamesAiNodesType, IDataObject, IExecuteFunctions } from 'n8n-workflow';
import { BaseChatModel } from 'langchain/chat_models/base';
import { BaseChatModel as BaseChatModelCore } from '@langchain/core/language_models/chat_models';
import type { BaseOutputParser } from '@langchain/core/output_parsers';
@ -64,3 +65,15 @@ export function getPromptInputByType(options: {
return input;
}
export async function logAiEvent(
executeFunctions: IExecuteFunctions,
event: EventNamesAiNodesType,
data?: IDataObject,
) {
try {
await executeFunctions.logAiEvent(event, data ? jsonStringify(data) : undefined);
} catch (error) {
executeFunctions.logger.debug(`Error logging AI event: ${event}`);
}
}

View file

@ -1,16 +1,10 @@
import {
NodeOperationError,
type ConnectionTypes,
type IExecuteFunctions,
type INodeExecutionData,
NodeConnectionType,
jsonStringify,
} from 'n8n-workflow';
import { NodeOperationError, NodeConnectionType } from 'n8n-workflow';
import type { ConnectionTypes, IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
import { Tool } from 'langchain/tools';
import type { BaseMessage, ChatResult, InputValues } from 'langchain/schema';
import type { ChatResult, InputValues, BaseMessage } from 'langchain/schema';
import { BaseChatMessageHistory } from 'langchain/schema';
import { BaseChatModel } from 'langchain/chat_models/base';
import type { BaseChatModel } from 'langchain/chat_models/base';
import type { CallbackManagerForLLMRun } from 'langchain/callbacks';
import { Embeddings } from 'langchain/embeddings/base';
@ -28,7 +22,7 @@ import { BaseOutputParser } from 'langchain/schema/output_parser';
import { isObject } from 'lodash';
import { N8nJsonLoader } from './N8nJsonLoader';
import { N8nBinaryLoader } from './N8nBinaryLoader';
import { isChatInstance } from './helpers';
import { isChatInstance, logAiEvent } from './helpers';
const errorsMap: { [key: string]: { message: string; description: string } } = {
'You exceeded your current quota, please check your plan and billing details.': {
@ -202,10 +196,7 @@ export function logWrapper(
const payload = { action: 'getMessages', response };
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
void executeFunctions.logAiEvent(
'n8n.ai.memory.get.messages',
jsonStringify({ response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.memory.get.messages', { response });
return response;
};
} else if (prop === 'addMessage' && 'addMessage' in target) {
@ -222,10 +213,7 @@ export function logWrapper(
arguments: [message],
});
void executeFunctions.logAiEvent(
'n8n.ai.memory.added.message',
jsonStringify({ message }),
);
void logAiEvent(executeFunctions, 'n8n.ai.memory.added.message', { message });
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
};
}
@ -255,18 +243,21 @@ export function logWrapper(
runManager,
],
})) as ChatResult;
const parsedMessages =
typeof messages === 'string'
? messages
: messages.map((message) => {
if (typeof message === 'string') return message;
if (typeof message?.toJSON === 'function') return message.toJSON();
void executeFunctions.logAiEvent(
'n8n.ai.llm.generated',
jsonStringify({
messages:
typeof messages === 'string'
? messages
: messages.map((message) => message.toJSON()),
options,
response,
}),
);
return message;
});
void logAiEvent(executeFunctions, 'n8n.ai.llm.generated', {
messages: parsedMessages,
options,
response,
});
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
} catch (error) {
@ -299,10 +290,9 @@ export function logWrapper(
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'getFormatInstructions', response } }],
]);
void executeFunctions.logAiEvent(
'n8n.ai.output.parser.get.instructions',
jsonStringify({ response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.get.instructions', {
response,
});
return response;
};
} else if (prop === 'parse' && 'parse' in target) {
@ -321,10 +311,7 @@ export function logWrapper(
arguments: [stringifiedText],
})) as object;
void executeFunctions.logAiEvent(
'n8n.ai.output.parser.parsed',
jsonStringify({ text, response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { text, response });
executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'parse', response } }],
]);
@ -353,10 +340,7 @@ export function logWrapper(
arguments: [query, config],
})) as Array<Document<Record<string, any>>>;
void executeFunctions.logAiEvent(
'n8n.ai.retriever.get.relevant.documents',
jsonStringify({ query }),
);
void logAiEvent(executeFunctions, 'n8n.ai.retriever.get.relevant.documents', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -381,7 +365,7 @@ export function logWrapper(
arguments: [documents],
})) as number[][];
void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.document');
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.document');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -401,7 +385,7 @@ export function logWrapper(
method: target[prop],
arguments: [query],
})) as number[];
void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.query');
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.query');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -446,7 +430,7 @@ export function logWrapper(
arguments: [item, itemIndex],
})) as number[];
void executeFunctions.logAiEvent('n8n.ai.document.processed');
void logAiEvent(executeFunctions, 'n8n.ai.document.processed');
executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }],
]);
@ -472,7 +456,7 @@ export function logWrapper(
arguments: [text],
})) as string[];
void executeFunctions.logAiEvent('n8n.ai.text.splitter.split');
void logAiEvent(executeFunctions, 'n8n.ai.text.splitter.split');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -496,10 +480,7 @@ export function logWrapper(
arguments: [query],
})) as string;
void executeFunctions.logAiEvent(
'n8n.ai.tool.called',
jsonStringify({ query, response }),
);
void logAiEvent(executeFunctions, 'n8n.ai.tool.called', { query, response });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;
};
@ -529,10 +510,7 @@ export function logWrapper(
arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>;
void executeFunctions.logAiEvent(
'n8n.ai.vector.store.searched',
jsonStringify({ query }),
);
void logAiEvent(executeFunctions, 'n8n.ai.vector.store.searched', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response;