refactor(core): Include AI events in log streaming relay (#10768)

This commit is contained in:
Iván Ovejero 2024-09-12 12:02:47 +02:00 committed by GitHub
parent 8240b2a142
commit c133a6ef89
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 496 additions and 92 deletions

View file

@ -138,7 +138,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [ this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [
[{ json: { ...response } }], [{ json: { ...response } }],
]); ]);
void logAiEvent(this.executionFunctions, 'n8n.ai.llm.generated', { void logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
messages: parsedMessages, messages: parsedMessages,
options: runDetails.options, options: runDetails.options,
response, response,
@ -186,7 +186,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
}); });
} }
void logAiEvent(this.executionFunctions, 'n8n.ai.llm.error', { void logAiEvent(this.executionFunctions, 'ai-llm-errored', {
error: Object.keys(error).length === 0 ? error.toString() : error, error: Object.keys(error).length === 0 ? error.toString() : error,
runId, runId,
parentRunId, parentRunId,

View file

@ -280,7 +280,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
}); });
resultData.push(...serializedDocs); resultData.push(...serializedDocs);
void logAiEvent(this, 'n8n.ai.vector.store.searched', { query: prompt }); void logAiEvent(this, 'ai-vector-store-searched', { query: prompt });
} }
return [resultData]; return [resultData];
@ -307,7 +307,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
try { try {
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex); await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);
void logAiEvent(this, 'n8n.ai.vector.store.populated'); void logAiEvent(this, 'ai-vector-store-populated');
} catch (error) { } catch (error) {
throw error; throw error;
} }
@ -361,7 +361,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
ids: [documentId], ids: [documentId],
}); });
void logAiEvent(this, 'n8n.ai.vector.store.updated'); void logAiEvent(this, 'ai-vector-store-updated');
} catch (error) { } catch (error) {
throw error; throw error;
} }

View file

@ -1,10 +1,5 @@
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
import type { import type { AiEvent, IDataObject, IExecuteFunctions, IWebhookFunctions } from 'n8n-workflow';
EventNamesAiNodesType,
IDataObject,
IExecuteFunctions,
IWebhookFunctions,
} from 'n8n-workflow';
import type { BaseChatModel } from '@langchain/core/language_models/chat_models'; import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
import type { BaseOutputParser } from '@langchain/core/output_parsers'; import type { BaseOutputParser } from '@langchain/core/output_parsers';
import type { BaseMessage } from '@langchain/core/messages'; import type { BaseMessage } from '@langchain/core/messages';
@ -155,7 +150,7 @@ export function getSessionId(
export async function logAiEvent( export async function logAiEvent(
executeFunctions: IExecuteFunctions, executeFunctions: IExecuteFunctions,
event: EventNamesAiNodesType, event: AiEvent,
data?: IDataObject, data?: IDataObject,
) { ) {
try { try {

View file

@ -196,7 +196,7 @@ export function logWrapper(
const payload = { action: 'getMessages', response }; const payload = { action: 'getMessages', response };
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
void logAiEvent(executeFunctions, 'n8n.ai.memory.get.messages', { response }); void logAiEvent(executeFunctions, 'ai-messages-retrieved-from-memory', { response });
return response; return response;
}; };
} else if (prop === 'addMessage' && 'addMessage' in target) { } else if (prop === 'addMessage' && 'addMessage' in target) {
@ -213,7 +213,7 @@ export function logWrapper(
arguments: [message], arguments: [message],
}); });
void logAiEvent(executeFunctions, 'n8n.ai.memory.added.message', { message }); void logAiEvent(executeFunctions, 'ai-message-added-to-memory', { message });
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
}; };
} }
@ -238,13 +238,13 @@ export function logWrapper(
arguments: [stringifiedText], arguments: [stringifiedText],
})) as object; })) as object;
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { text, response }); void logAiEvent(executeFunctions, 'ai-output-parsed', { text, response });
executeFunctions.addOutputData(connectionType, index, [ executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'parse', response } }], [{ json: { action: 'parse', response } }],
]); ]);
return response; return response;
} catch (error) { } catch (error) {
void logAiEvent(executeFunctions, 'n8n.ai.output.parser.parsed', { void logAiEvent(executeFunctions, 'ai-output-parsed', {
text, text,
response: error.message ?? error, response: error.message ?? error,
}); });
@ -277,7 +277,7 @@ export function logWrapper(
arguments: [query, config], arguments: [query, config],
})) as Array<Document<Record<string, any>>>; })) as Array<Document<Record<string, any>>>;
void logAiEvent(executeFunctions, 'n8n.ai.retriever.get.relevant.documents', { query }); void logAiEvent(executeFunctions, 'ai-documents-retrieved', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -302,7 +302,7 @@ export function logWrapper(
arguments: [documents], arguments: [documents],
})) as number[][]; })) as number[][];
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.document'); void logAiEvent(executeFunctions, 'ai-document-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -322,7 +322,7 @@ export function logWrapper(
method: target[prop], method: target[prop],
arguments: [query], arguments: [query],
})) as number[]; })) as number[];
void logAiEvent(executeFunctions, 'n8n.ai.embeddings.embedded.query'); void logAiEvent(executeFunctions, 'ai-query-embedded');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -367,7 +367,7 @@ export function logWrapper(
arguments: [item, itemIndex], arguments: [item, itemIndex],
})) as number[]; })) as number[];
void logAiEvent(executeFunctions, 'n8n.ai.document.processed'); void logAiEvent(executeFunctions, 'ai-document-processed');
executeFunctions.addOutputData(connectionType, index, [ executeFunctions.addOutputData(connectionType, index, [
[{ json: { response }, pairedItem: { item: itemIndex } }], [{ json: { response }, pairedItem: { item: itemIndex } }],
]); ]);
@ -393,7 +393,7 @@ export function logWrapper(
arguments: [text], arguments: [text],
})) as string[]; })) as string[];
void logAiEvent(executeFunctions, 'n8n.ai.text.splitter.split'); void logAiEvent(executeFunctions, 'ai-text-split');
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -417,7 +417,7 @@ export function logWrapper(
arguments: [query], arguments: [query],
})) as string; })) as string;
void logAiEvent(executeFunctions, 'n8n.ai.tool.called', { query, response }); void logAiEvent(executeFunctions, 'ai-tool-called', { query, response });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;
}; };
@ -447,7 +447,7 @@ export function logWrapper(
arguments: [query, k, filter, _callbacks], arguments: [query, k, filter, _callbacks],
})) as Array<Document<Record<string, any>>>; })) as Array<Document<Record<string, any>>>;
void logAiEvent(executeFunctions, 'n8n.ai.vector.store.searched', { query }); void logAiEvent(executeFunctions, 'ai-vector-store-searched', { query });
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]); executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
return response; return response;

View file

@ -1,18 +1,17 @@
import { VariablesService } from '@/environments/variables/variables.service.ee'; import { VariablesService } from '@/environments/variables/variables.service.ee';
import { mockInstance } from '@test/mocking'; import { mockInstance } from '@test/mocking';
import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus';
import { getBase } from '@/workflow-execute-additional-data'; import { getBase } from '@/workflow-execute-additional-data';
import Container from 'typedi'; import Container from 'typedi';
import { CredentialsHelper } from '@/credentials-helper'; import { CredentialsHelper } from '@/credentials-helper';
import { SecretsHelper } from '@/secrets-helpers'; import { SecretsHelper } from '@/secrets-helpers';
import { EventService } from '@/events/event.service';
describe('WorkflowExecuteAdditionalData', () => { describe('WorkflowExecuteAdditionalData', () => {
const messageEventBus = mockInstance(MessageEventBus);
const variablesService = mockInstance(VariablesService); const variablesService = mockInstance(VariablesService);
variablesService.getAllCached.mockResolvedValue([]); variablesService.getAllCached.mockResolvedValue([]);
const credentialsHelper = mockInstance(CredentialsHelper); const credentialsHelper = mockInstance(CredentialsHelper);
const secretsHelper = mockInstance(SecretsHelper); const secretsHelper = mockInstance(SecretsHelper);
Container.set(MessageEventBus, messageEventBus); const eventService = mockInstance(EventService);
Container.set(VariablesService, variablesService); Container.set(VariablesService, variablesService);
Container.set(CredentialsHelper, credentialsHelper); Container.set(CredentialsHelper, credentialsHelper);
Container.set(SecretsHelper, secretsHelper); Container.set(SecretsHelper, secretsHelper);
@ -20,7 +19,7 @@ describe('WorkflowExecuteAdditionalData', () => {
test('logAiEvent should call MessageEventBus', async () => { test('logAiEvent should call MessageEventBus', async () => {
const additionalData = await getBase('user-id'); const additionalData = await getBase('user-id');
const eventName = 'n8n.ai.memory.get.messages'; const eventName = 'ai-messages-retrieved-from-memory';
const payload = { const payload = {
msg: 'test message', msg: 'test message',
executionId: '123', executionId: '123',
@ -30,12 +29,9 @@ describe('WorkflowExecuteAdditionalData', () => {
nodeType: 'n8n-memory', nodeType: 'n8n-memory',
}; };
await additionalData.logAiEvent(eventName, payload); additionalData.logAiEvent(eventName, payload);
expect(messageEventBus.sendAiNodeEvent).toHaveBeenCalledTimes(1); expect(eventService.emit).toHaveBeenCalledTimes(1);
expect(messageEventBus.sendAiNodeEvent).toHaveBeenCalledWith({ expect(eventService.emit).toHaveBeenCalledWith(eventName, payload);
eventName,
payload,
});
}); });
}); });

View file

@ -1,5 +1,6 @@
import { AbstractEventMessage, isEventMessageOptionsWithType } from './abstract-event-message'; import { AbstractEventMessage, isEventMessageOptionsWithType } from './abstract-event-message';
import type { EventNamesAiNodesType, JsonObject } from 'n8n-workflow'; import type { JsonObject } from 'n8n-workflow';
import type { EventNamesAiNodesType } from '.';
import { EventMessageTypeNames } from 'n8n-workflow'; import { EventMessageTypeNames } from 'n8n-workflow';
import type { AbstractEventMessageOptions } from './abstract-event-message-options'; import type { AbstractEventMessageOptions } from './abstract-event-message-options';
import type { AbstractEventPayload } from './abstract-event-payload'; import type { AbstractEventPayload } from './abstract-event-payload';

View file

@ -4,7 +4,25 @@ import type { EventMessageExecution } from './event-message-execution';
import type { EventMessageGeneric } from './event-message-generic'; import type { EventMessageGeneric } from './event-message-generic';
import type { EventMessageNode } from './event-message-node'; import type { EventMessageNode } from './event-message-node';
import type { EventMessageWorkflow } from './event-message-workflow'; import type { EventMessageWorkflow } from './event-message-workflow';
import { eventNamesAiNodes, type EventNamesAiNodesType } from 'n8n-workflow';
export const eventNamesAiNodes = [
'n8n.ai.memory.get.messages',
'n8n.ai.memory.added.message',
'n8n.ai.output.parser.parsed',
'n8n.ai.retriever.get.relevant.documents',
'n8n.ai.embeddings.embedded.document',
'n8n.ai.embeddings.embedded.query',
'n8n.ai.document.processed',
'n8n.ai.text.splitter.split',
'n8n.ai.tool.called',
'n8n.ai.vector.store.searched',
'n8n.ai.llm.generated',
'n8n.ai.llm.error',
'n8n.ai.vector.store.populated',
'n8n.ai.vector.store.updated',
] as const;
export type EventNamesAiNodesType = (typeof eventNamesAiNodes)[number];
export const eventNamesWorkflow = [ export const eventNamesWorkflow = [
'n8n.workflow.started', 'n8n.workflow.started',

View file

@ -945,4 +945,258 @@ describe('LogStreamingEventRelay', () => {
}); });
}); });
}); });
describe('AI events', () => {
it('should log on `ai-messages-retrieved-from-memory` event', () => {
const payload: RelayEventMap['ai-messages-retrieved-from-memory'] = {
msg: 'Hello, world!',
executionId: 'exec789',
nodeName: 'Memory',
workflowId: 'wf123',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.memory',
};
eventService.emit('ai-messages-retrieved-from-memory', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.memory.get.messages',
payload,
});
});
it('should log on `ai-message-added-to-memory` event', () => {
const payload: RelayEventMap['ai-message-added-to-memory'] = {
msg: 'Test',
executionId: 'exec456',
nodeName: 'Memory',
workflowId: 'wf789',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.memory',
};
eventService.emit('ai-message-added-to-memory', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.memory.added.message',
payload,
});
});
it('should log on `ai-output-parsed` event', () => {
const payload: RelayEventMap['ai-output-parsed'] = {
msg: 'Test',
executionId: 'exec123',
nodeName: 'Output Parser',
workflowId: 'wf456',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.outputParser',
};
eventService.emit('ai-output-parsed', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.output.parser.parsed',
payload,
});
});
it('should log on `ai-documents-retrieved` event', () => {
const payload: RelayEventMap['ai-documents-retrieved'] = {
msg: 'Test',
executionId: 'exec789',
nodeName: 'Retriever',
workflowId: 'wf123',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.retriever',
};
eventService.emit('ai-documents-retrieved', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.retriever.get.relevant.documents',
payload,
});
});
it('should log on `ai-document-embedded` event', () => {
const payload: RelayEventMap['ai-document-embedded'] = {
msg: 'Test',
executionId: 'exec456',
nodeName: 'Embeddings',
workflowId: 'wf789',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.embeddings',
};
eventService.emit('ai-document-embedded', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.embeddings.embedded.document',
payload,
});
});
it('should log on `ai-query-embedded` event', () => {
const payload: RelayEventMap['ai-query-embedded'] = {
msg: 'Test',
executionId: 'exec123',
nodeName: 'Embeddings',
workflowId: 'wf456',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.embeddings',
};
eventService.emit('ai-query-embedded', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.embeddings.embedded.query',
payload,
});
});
it('should log on `ai-document-processed` event', () => {
const payload: RelayEventMap['ai-document-processed'] = {
msg: 'Test',
executionId: 'exec789',
nodeName: 'Embeddings',
workflowId: 'wf789',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.embeddings',
};
eventService.emit('ai-document-processed', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.document.processed',
payload,
});
});
it('should log on `ai-text-split` event', () => {
const payload: RelayEventMap['ai-text-split'] = {
msg: 'Test',
executionId: 'exec456',
nodeName: 'Text Splitter',
workflowId: 'wf789',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.textSplitter',
};
eventService.emit('ai-text-split', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.text.splitter.split',
payload,
});
});
it('should log on `ai-tool-called` event', () => {
const payload: RelayEventMap['ai-tool-called'] = {
msg: 'Test',
executionId: 'exec123',
nodeName: 'Tool',
workflowId: 'wf456',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.tool',
};
eventService.emit('ai-tool-called', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.tool.called',
payload,
});
});
it('should log on `ai-vector-store-searched` event', () => {
const payload: RelayEventMap['ai-vector-store-searched'] = {
msg: 'Test',
executionId: 'exec789',
nodeName: 'Vector Store',
workflowId: 'wf123',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.vectorStore',
};
eventService.emit('ai-vector-store-searched', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.vector.store.searched',
payload,
});
});
it('should log on `ai-llm-generated-output` event', () => {
const payload: RelayEventMap['ai-llm-generated-output'] = {
msg: 'Test',
executionId: 'exec456',
nodeName: 'OpenAI',
workflowId: 'wf789',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.openai',
};
eventService.emit('ai-llm-generated-output', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.llm.generated',
payload,
});
});
it('should log on `ai-llm-errored` event', () => {
const payload: RelayEventMap['ai-llm-errored'] = {
msg: 'Test',
executionId: 'exec789',
nodeName: 'OpenAI',
workflowId: 'wf123',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.openai',
};
eventService.emit('ai-llm-errored', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.llm.error',
payload,
});
});
it('should log on `ai-vector-store-populated` event', () => {
const payload: RelayEventMap['ai-vector-store-populated'] = {
msg: 'Test',
executionId: 'exec456',
nodeName: 'Vector Store',
workflowId: 'wf789',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.vectorStore',
};
eventService.emit('ai-vector-store-populated', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.vector.store.populated',
payload,
});
});
it('should log on `ai-vector-store-updated` event', () => {
const payload: RelayEventMap['ai-vector-store-updated'] = {
msg: 'Test',
executionId: 'exec789',
nodeName: 'Vector Store',
workflowId: 'wf123',
workflowName: 'My Workflow',
nodeType: 'n8n-nodes-base.vectorStore',
};
eventService.emit('ai-vector-store-updated', payload);
expect(eventBus.sendAiNodeEvent).toHaveBeenCalledWith({
eventName: 'n8n.ai.vector.store.updated',
payload,
});
});
});
}); });

View file

@ -0,0 +1,38 @@
export type AiEventPayload = {
msg: string;
workflowName: string;
executionId: string;
nodeName: string;
workflowId?: string;
nodeType?: string;
};
export type AiEventMap = {
'ai-messages-retrieved-from-memory': AiEventPayload;
'ai-message-added-to-memory': AiEventPayload;
'ai-output-parsed': AiEventPayload;
'ai-documents-retrieved': AiEventPayload;
'ai-document-embedded': AiEventPayload;
'ai-query-embedded': AiEventPayload;
'ai-document-processed': AiEventPayload;
'ai-text-split': AiEventPayload;
'ai-tool-called': AiEventPayload;
'ai-vector-store-searched': AiEventPayload;
'ai-llm-generated-output': AiEventPayload;
'ai-llm-errored': AiEventPayload;
'ai-vector-store-populated': AiEventPayload;
'ai-vector-store-updated': AiEventPayload;
};

View file

@ -2,8 +2,9 @@ import { Service } from 'typedi';
import { TypedEmitter } from '@/typed-emitter'; import { TypedEmitter } from '@/typed-emitter';
import type { RelayEventMap } from './relay-event-map'; import type { RelayEventMap } from './relay-event-map';
import type { QueueMetricsEventMap } from './queue-metrics-event-map'; import type { QueueMetricsEventMap } from './queue-metrics-event-map';
import type { AiEventMap } from './ai-event-map';
type EventMap = RelayEventMap & QueueMetricsEventMap; type EventMap = RelayEventMap & QueueMetricsEventMap & AiEventMap;
@Service() @Service()
export class EventService extends TypedEmitter<EventMap> {} export class EventService extends TypedEmitter<EventMap> {}

View file

@ -46,6 +46,20 @@ export class LogStreamingEventRelay extends EventRelay {
'community-package-deleted': (event) => this.communityPackageDeleted(event), 'community-package-deleted': (event) => this.communityPackageDeleted(event),
'execution-throttled': (event) => this.executionThrottled(event), 'execution-throttled': (event) => this.executionThrottled(event),
'execution-started-during-bootup': (event) => this.executionStartedDuringBootup(event), 'execution-started-during-bootup': (event) => this.executionStartedDuringBootup(event),
'ai-messages-retrieved-from-memory': (event) => this.aiMessagesRetrievedFromMemory(event),
'ai-message-added-to-memory': (event) => this.aiMessageAddedToMemory(event),
'ai-output-parsed': (event) => this.aiOutputParsed(event),
'ai-documents-retrieved': (event) => this.aiDocumentsRetrieved(event),
'ai-document-embedded': (event) => this.aiDocumentEmbedded(event),
'ai-query-embedded': (event) => this.aiQueryEmbedded(event),
'ai-document-processed': (event) => this.aiDocumentProcessed(event),
'ai-text-split': (event) => this.aiTextSplitIntoChunks(event),
'ai-tool-called': (event) => this.aiToolCalled(event),
'ai-vector-store-searched': (event) => this.aiVectorStoreSearched(event),
'ai-llm-generated-output': (event) => this.aiLlmGeneratedOutput(event),
'ai-llm-errored': (event) => this.aiLlmErrored(event),
'ai-vector-store-populated': (event) => this.aiVectorStorePopulated(event),
'ai-vector-store-updated': (event) => this.aiVectorStoreUpdated(event),
}); });
} }
@ -387,4 +401,108 @@ export class LogStreamingEventRelay extends EventRelay {
} }
// #endregion // #endregion
// #region AI
private aiMessagesRetrievedFromMemory(
payload: RelayEventMap['ai-messages-retrieved-from-memory'],
) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.memory.get.messages',
payload,
});
}
private aiMessageAddedToMemory(payload: RelayEventMap['ai-message-added-to-memory']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.memory.added.message',
payload,
});
}
private aiOutputParsed(payload: RelayEventMap['ai-output-parsed']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.output.parser.parsed',
payload,
});
}
private aiDocumentsRetrieved(payload: RelayEventMap['ai-documents-retrieved']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.retriever.get.relevant.documents',
payload,
});
}
private aiDocumentEmbedded(payload: RelayEventMap['ai-document-embedded']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.embeddings.embedded.document',
payload,
});
}
private aiQueryEmbedded(payload: RelayEventMap['ai-query-embedded']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.embeddings.embedded.query',
payload,
});
}
private aiDocumentProcessed(payload: RelayEventMap['ai-document-processed']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.document.processed',
payload,
});
}
private aiTextSplitIntoChunks(payload: RelayEventMap['ai-text-split']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.text.splitter.split',
payload,
});
}
private aiToolCalled(payload: RelayEventMap['ai-tool-called']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.tool.called',
payload,
});
}
private aiVectorStoreSearched(payload: RelayEventMap['ai-vector-store-searched']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.vector.store.searched',
payload,
});
}
private aiLlmGeneratedOutput(payload: RelayEventMap['ai-llm-generated-output']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.llm.generated',
payload,
});
}
private aiLlmErrored(payload: RelayEventMap['ai-llm-errored']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.llm.error',
payload,
});
}
private aiVectorStorePopulated(payload: RelayEventMap['ai-vector-store-populated']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.vector.store.populated',
payload,
});
}
private aiVectorStoreUpdated(payload: RelayEventMap['ai-vector-store-updated']) {
void this.eventBus.sendAiNodeEvent({
eventName: 'n8n.ai.vector.store.updated',
payload,
});
}
// #endregion
} }

View file

@ -9,6 +9,7 @@ import type { IWorkflowDb } from '@/interfaces';
import type { ProjectRole } from '@/databases/entities/project-relation'; import type { ProjectRole } from '@/databases/entities/project-relation';
import type { GlobalRole } from '@/databases/entities/user'; import type { GlobalRole } from '@/databases/entities/user';
import type { AuthProviderType } from '@/databases/entities/auth-identity'; import type { AuthProviderType } from '@/databases/entities/auth-identity';
import type { AiEventMap } from './ai-event-map';
export type UserLike = { export type UserLike = {
id: string; id: string;
@ -470,4 +471,4 @@ export type RelayEventMap = {
}; };
// #endregion // #endregion
}; } & AiEventMap;

View file

@ -23,7 +23,6 @@ import type {
WorkflowExecuteMode, WorkflowExecuteMode,
ExecutionStatus, ExecutionStatus,
ExecutionError, ExecutionError,
EventNamesAiNodesType,
ExecuteWorkflowOptions, ExecuteWorkflowOptions,
IWorkflowExecutionDataProcess, IWorkflowExecutionDataProcess,
} from 'n8n-workflow'; } from 'n8n-workflow';
@ -69,7 +68,7 @@ import { WorkflowStaticDataService } from './workflows/workflow-static-data.serv
import { WorkflowRepository } from './databases/repositories/workflow.repository'; import { WorkflowRepository } from './databases/repositories/workflow.repository';
import { UrlService } from './services/url.service'; import { UrlService } from './services/url.service';
import { WorkflowExecutionService } from './workflows/workflow-execution.service'; import { WorkflowExecutionService } from './workflows/workflow-execution.service';
import { MessageEventBus } from '@/eventbus/message-event-bus/message-event-bus'; import type { AiEventMap, AiEventPayload } from './events/ai-event-map';
import { EventService } from './events/event.service'; import { EventService } from './events/event.service';
import { GlobalConfig } from '@n8n/config'; import { GlobalConfig } from '@n8n/config';
import { SubworkflowPolicyChecker } from './subworkflows/subworkflow-policy-checker.service'; import { SubworkflowPolicyChecker } from './subworkflows/subworkflow-policy-checker.service';
@ -969,6 +968,8 @@ export async function getBase(
const variables = await WorkflowHelpers.getVariables(); const variables = await WorkflowHelpers.getVariables();
const eventService = Container.get(EventService);
return { return {
credentialsHelper: Container.get(CredentialsHelper), credentialsHelper: Container.get(CredentialsHelper),
executeWorkflow, executeWorkflow,
@ -984,22 +985,8 @@ export async function getBase(
setExecutionStatus, setExecutionStatus,
variables, variables,
secretsHelpers: Container.get(SecretsHelper), secretsHelpers: Container.get(SecretsHelper),
logAiEvent: async ( logAiEvent: (eventName: keyof AiEventMap, payload: AiEventPayload) =>
eventName: EventNamesAiNodesType, eventService.emit(eventName, payload),
payload: {
msg?: string | undefined;
executionId: string;
nodeName: string;
workflowId?: string | undefined;
workflowName: string;
nodeType?: string | undefined;
},
) => {
return await Container.get(MessageEventBus).sendAiNodeEvent({
eventName,
payload,
});
},
}; };
} }

View file

@ -39,7 +39,6 @@ import type {
BinaryHelperFunctions, BinaryHelperFunctions,
CloseFunction, CloseFunction,
ContextType, ContextType,
EventNamesAiNodesType,
FieldType, FieldType,
FileSystemHelperFunctions, FileSystemHelperFunctions,
FunctionsBase, FunctionsBase,
@ -102,6 +101,7 @@ import type {
EnsureTypeOptions, EnsureTypeOptions,
SSHTunnelFunctions, SSHTunnelFunctions,
SchedulingFunctions, SchedulingFunctions,
AiEvent,
} from 'n8n-workflow'; } from 'n8n-workflow';
import { import {
NodeConnectionType, NodeConnectionType,
@ -3888,8 +3888,8 @@ export function getExecuteFunctions(
constructExecutionMetaData, constructExecutionMetaData,
}, },
nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id), nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id),
logAiEvent: async (eventName: EventNamesAiNodesType, msg: string) => { logAiEvent: async (eventName: AiEvent, msg: string) => {
return await additionalData.logAiEvent(eventName, { return additionalData.logAiEvent(eventName, {
executionId: additionalData.executionId ?? 'unsaved-execution', executionId: additionalData.executionId ?? 'unsaved-execution',
nodeName: node.name, nodeName: node.name,
workflowName: workflow.name ?? 'Unnamed workflow', workflowName: workflow.name ?? 'Unnamed workflow',
@ -4039,8 +4039,8 @@ export function getExecuteSingleFunctions(
getBinaryDataBuffer: async (propertyName, inputIndex = 0) => getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex), await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
}, },
logAiEvent: async (eventName: EventNamesAiNodesType, msg: string) => { logAiEvent: async (eventName: AiEvent, msg: string) => {
return await additionalData.logAiEvent(eventName, { return additionalData.logAiEvent(eventName, {
executionId: additionalData.executionId ?? 'unsaved-execution', executionId: additionalData.executionId ?? 'unsaved-execution',
nodeName: node.name, nodeName: node.name,
workflowName: workflow.name ?? 'Unnamed workflow', workflowName: workflow.name ?? 'Unnamed workflow',

View file

@ -898,7 +898,7 @@ type BaseExecutionFunctions = FunctionsBaseWithRequiredKeys<'getMode'> & {
getInputSourceData(inputIndex?: number, inputName?: string): ISourceData; getInputSourceData(inputIndex?: number, inputName?: string): ISourceData;
getExecutionCancelSignal(): AbortSignal | undefined; getExecutionCancelSignal(): AbortSignal | undefined;
onExecutionCancellation(handler: () => unknown): void; onExecutionCancellation(handler: () => unknown): void;
logAiEvent(eventName: EventNamesAiNodesType, msg?: string | undefined): Promise<void>; logAiEvent(eventName: AiEvent, msg?: string | undefined): Promise<void>;
}; };
// TODO: Create later own type only for Config-Nodes // TODO: Create later own type only for Config-Nodes
@ -2147,26 +2147,6 @@ export interface IWorkflowExecuteHooks {
sendResponse?: Array<(response: IExecuteResponsePromiseData) => Promise<void>>; sendResponse?: Array<(response: IExecuteResponsePromiseData) => Promise<void>>;
} }
export const eventNamesAiNodes = [
'n8n.ai.memory.get.messages',
'n8n.ai.memory.added.message',
'n8n.ai.output.parser.get.instructions',
'n8n.ai.output.parser.parsed',
'n8n.ai.retriever.get.relevant.documents',
'n8n.ai.embeddings.embedded.document',
'n8n.ai.embeddings.embedded.query',
'n8n.ai.document.processed',
'n8n.ai.text.splitter.split',
'n8n.ai.tool.called',
'n8n.ai.vector.store.searched',
'n8n.ai.llm.generated',
'n8n.ai.llm.error',
'n8n.ai.vector.store.populated',
'n8n.ai.vector.store.updated',
] as const;
export type EventNamesAiNodesType = (typeof eventNamesAiNodes)[number];
export interface IWorkflowExecutionDataProcess { export interface IWorkflowExecutionDataProcess {
destinationNode?: string; destinationNode?: string;
restartExecutionId?: string; restartExecutionId?: string;
@ -2192,6 +2172,31 @@ export interface ExecuteWorkflowOptions {
parentCallbackManager?: CallbackManager; parentCallbackManager?: CallbackManager;
} }
export type AiEvent =
| 'ai-messages-retrieved-from-memory'
| 'ai-message-added-to-memory'
| 'ai-output-parsed'
| 'ai-documents-retrieved'
| 'ai-document-embedded'
| 'ai-query-embedded'
| 'ai-document-processed'
| 'ai-text-split'
| 'ai-tool-called'
| 'ai-vector-store-searched'
| 'ai-llm-generated-output'
| 'ai-llm-errored'
| 'ai-vector-store-populated'
| 'ai-vector-store-updated';
type AiEventPayload = {
msg: string;
workflowName: string;
executionId: string;
nodeName: string;
workflowId?: string;
nodeType?: string;
};
export interface IWorkflowExecuteAdditionalData { export interface IWorkflowExecuteAdditionalData {
credentialsHelper: ICredentialsHelper; credentialsHelper: ICredentialsHelper;
executeWorkflow: ( executeWorkflow: (
@ -2217,17 +2222,7 @@ export interface IWorkflowExecuteAdditionalData {
userId?: string; userId?: string;
variables: IDataObject; variables: IDataObject;
secretsHelpers: SecretsHelpersBase; secretsHelpers: SecretsHelpersBase;
logAiEvent: ( logAiEvent: (eventName: AiEvent, payload: AiEventPayload) => void;
eventName: EventNamesAiNodesType,
payload: {
msg?: string;
executionId: string;
nodeName: string;
workflowId?: string;
workflowName: string;
nodeType?: string;
},
) => Promise<void>;
parentCallbackManager?: CallbackManager; parentCallbackManager?: CallbackManager;
} }