mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
feat: Add support for AI log streaming (#8526)
Co-authored-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
parent
ccc0ad5009
commit
7501ad8f3c
|
@ -1,7 +1,7 @@
|
||||||
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
|
/* eslint-disable n8n-nodes-base/node-filename-against-convention */
|
||||||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||||
import type { VectorStore } from 'langchain/vectorstores/base';
|
import type { VectorStore } from 'langchain/vectorstores/base';
|
||||||
import { NodeConnectionType, NodeOperationError } from 'n8n-workflow';
|
import { NodeConnectionType, NodeOperationError, jsonStringify } from 'n8n-workflow';
|
||||||
import type {
|
import type {
|
||||||
INodeCredentialDescription,
|
INodeCredentialDescription,
|
||||||
INodeProperties,
|
INodeProperties,
|
||||||
|
@ -237,6 +237,7 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
||||||
});
|
});
|
||||||
|
|
||||||
resultData.push(...serializedDocs);
|
resultData.push(...serializedDocs);
|
||||||
|
void this.logAiEvent('n8n.ai.vector.store.searched', jsonStringify({ query: prompt }));
|
||||||
}
|
}
|
||||||
|
|
||||||
return await this.prepareOutputData(resultData);
|
return await this.prepareOutputData(resultData);
|
||||||
|
@ -262,6 +263,8 @@ export const createVectorStoreNode = (args: VectorStoreNodeConstructorArgs) =>
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);
|
await args.populateVectorStore(this, embeddings, processedDocuments, itemIndex);
|
||||||
|
|
||||||
|
void this.logAiEvent('n8n.ai.vector.store.populated');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import {
|
||||||
type IExecuteFunctions,
|
type IExecuteFunctions,
|
||||||
type INodeExecutionData,
|
type INodeExecutionData,
|
||||||
NodeConnectionType,
|
NodeConnectionType,
|
||||||
|
jsonStringify,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
|
|
||||||
import { Tool } from 'langchain/tools';
|
import { Tool } from 'langchain/tools';
|
||||||
|
@ -198,17 +199,20 @@ export function logWrapper(
|
||||||
arguments: [],
|
arguments: [],
|
||||||
})) as BaseMessage[];
|
})) as BaseMessage[];
|
||||||
|
|
||||||
executeFunctions.addOutputData(connectionType, index, [
|
const payload = { action: 'getMessages', response };
|
||||||
[{ json: { action: 'getMessages', response } }],
|
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
|
||||||
]);
|
|
||||||
|
void executeFunctions.logAiEvent(
|
||||||
|
'n8n.ai.memory.get.messages',
|
||||||
|
jsonStringify({ response }),
|
||||||
|
);
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
} else if (prop === 'addMessage' && 'addMessage' in target) {
|
} else if (prop === 'addMessage' && 'addMessage' in target) {
|
||||||
return async (message: BaseMessage): Promise<void> => {
|
return async (message: BaseMessage): Promise<void> => {
|
||||||
connectionType = NodeConnectionType.AiMemory;
|
connectionType = NodeConnectionType.AiMemory;
|
||||||
const { index } = executeFunctions.addInputData(connectionType, [
|
const payload = { action: 'addMessage', message };
|
||||||
[{ json: { action: 'addMessage', message } }],
|
const { index } = executeFunctions.addInputData(connectionType, [[{ json: payload }]]);
|
||||||
]);
|
|
||||||
|
|
||||||
await callMethodAsync.call(target, {
|
await callMethodAsync.call(target, {
|
||||||
executeFunctions,
|
executeFunctions,
|
||||||
|
@ -218,9 +222,11 @@ export function logWrapper(
|
||||||
arguments: [message],
|
arguments: [message],
|
||||||
});
|
});
|
||||||
|
|
||||||
executeFunctions.addOutputData(connectionType, index, [
|
void executeFunctions.logAiEvent(
|
||||||
[{ json: { action: 'addMessage' } }],
|
'n8n.ai.memory.added.message',
|
||||||
]);
|
jsonStringify({ message }),
|
||||||
|
);
|
||||||
|
executeFunctions.addOutputData(connectionType, index, [[{ json: payload }]]);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -237,7 +243,6 @@ export function logWrapper(
|
||||||
const { index } = executeFunctions.addInputData(connectionType, [
|
const { index } = executeFunctions.addInputData(connectionType, [
|
||||||
[{ json: { messages, options } }],
|
[{ json: { messages, options } }],
|
||||||
]);
|
]);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = (await callMethodAsync.call(target, {
|
const response = (await callMethodAsync.call(target, {
|
||||||
executeFunctions,
|
executeFunctions,
|
||||||
|
@ -250,6 +255,18 @@ export function logWrapper(
|
||||||
runManager,
|
runManager,
|
||||||
],
|
],
|
||||||
})) as ChatResult;
|
})) as ChatResult;
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent(
|
||||||
|
'n8n.ai.llm.generated',
|
||||||
|
jsonStringify({
|
||||||
|
messages:
|
||||||
|
typeof messages === 'string'
|
||||||
|
? messages
|
||||||
|
: messages.map((message) => message.toJSON()),
|
||||||
|
options,
|
||||||
|
response,
|
||||||
|
}),
|
||||||
|
);
|
||||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||||
return response;
|
return response;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@ -282,6 +299,10 @@ export function logWrapper(
|
||||||
executeFunctions.addOutputData(connectionType, index, [
|
executeFunctions.addOutputData(connectionType, index, [
|
||||||
[{ json: { action: 'getFormatInstructions', response } }],
|
[{ json: { action: 'getFormatInstructions', response } }],
|
||||||
]);
|
]);
|
||||||
|
void executeFunctions.logAiEvent(
|
||||||
|
'n8n.ai.output.parser.get.instructions',
|
||||||
|
jsonStringify({ response }),
|
||||||
|
);
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
} else if (prop === 'parse' && 'parse' in target) {
|
} else if (prop === 'parse' && 'parse' in target) {
|
||||||
|
@ -300,6 +321,10 @@ export function logWrapper(
|
||||||
arguments: [stringifiedText],
|
arguments: [stringifiedText],
|
||||||
})) as object;
|
})) as object;
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent(
|
||||||
|
'n8n.ai.output.parser.parsed',
|
||||||
|
jsonStringify({ text, response }),
|
||||||
|
);
|
||||||
executeFunctions.addOutputData(connectionType, index, [
|
executeFunctions.addOutputData(connectionType, index, [
|
||||||
[{ json: { action: 'parse', response } }],
|
[{ json: { action: 'parse', response } }],
|
||||||
]);
|
]);
|
||||||
|
@ -328,6 +353,10 @@ export function logWrapper(
|
||||||
arguments: [query, config],
|
arguments: [query, config],
|
||||||
})) as Array<Document<Record<string, any>>>;
|
})) as Array<Document<Record<string, any>>>;
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent(
|
||||||
|
'n8n.ai.retriever.get.relevant.documents',
|
||||||
|
jsonStringify({ query }),
|
||||||
|
);
|
||||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
|
@ -352,6 +381,7 @@ export function logWrapper(
|
||||||
arguments: [documents],
|
arguments: [documents],
|
||||||
})) as number[][];
|
})) as number[][];
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.document');
|
||||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
|
@ -371,7 +401,7 @@ export function logWrapper(
|
||||||
method: target[prop],
|
method: target[prop],
|
||||||
arguments: [query],
|
arguments: [query],
|
||||||
})) as number[];
|
})) as number[];
|
||||||
|
void executeFunctions.logAiEvent('n8n.ai.embeddings.embedded.query');
|
||||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
|
@ -401,6 +431,7 @@ export function logWrapper(
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process Each
|
// Process Each
|
||||||
if (prop === 'processItem' && 'processItem' in target) {
|
if (prop === 'processItem' && 'processItem' in target) {
|
||||||
return async (item: INodeExecutionData, itemIndex: number): Promise<number[]> => {
|
return async (item: INodeExecutionData, itemIndex: number): Promise<number[]> => {
|
||||||
|
@ -415,6 +446,7 @@ export function logWrapper(
|
||||||
arguments: [item, itemIndex],
|
arguments: [item, itemIndex],
|
||||||
})) as number[];
|
})) as number[];
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent('n8n.ai.document.processed');
|
||||||
executeFunctions.addOutputData(connectionType, index, [
|
executeFunctions.addOutputData(connectionType, index, [
|
||||||
[{ json: { response }, pairedItem: { item: itemIndex } }],
|
[{ json: { response }, pairedItem: { item: itemIndex } }],
|
||||||
]);
|
]);
|
||||||
|
@ -440,6 +472,7 @@ export function logWrapper(
|
||||||
arguments: [text],
|
arguments: [text],
|
||||||
})) as string[];
|
})) as string[];
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent('n8n.ai.text.splitter.split');
|
||||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
|
@ -463,6 +496,10 @@ export function logWrapper(
|
||||||
arguments: [query],
|
arguments: [query],
|
||||||
})) as string;
|
})) as string;
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent(
|
||||||
|
'n8n.ai.tool.called',
|
||||||
|
jsonStringify({ query, response }),
|
||||||
|
);
|
||||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
|
@ -492,6 +529,10 @@ export function logWrapper(
|
||||||
arguments: [query, k, filter, _callbacks],
|
arguments: [query, k, filter, _callbacks],
|
||||||
})) as Array<Document<Record<string, any>>>;
|
})) as Array<Document<Record<string, any>>>;
|
||||||
|
|
||||||
|
void executeFunctions.logAiEvent(
|
||||||
|
'n8n.ai.vector.store.searched',
|
||||||
|
jsonStringify({ query }),
|
||||||
|
);
|
||||||
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
executeFunctions.addOutputData(connectionType, index, [[{ json: { response } }]]);
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
|
|
|
@ -23,6 +23,7 @@ import type {
|
||||||
WorkflowExecuteMode,
|
WorkflowExecuteMode,
|
||||||
ExecutionStatus,
|
ExecutionStatus,
|
||||||
ExecutionError,
|
ExecutionError,
|
||||||
|
EventNamesAiNodesType,
|
||||||
} from 'n8n-workflow';
|
} from 'n8n-workflow';
|
||||||
import {
|
import {
|
||||||
ApplicationError,
|
ApplicationError,
|
||||||
|
@ -68,6 +69,7 @@ import { WorkflowStaticDataService } from './workflows/workflowStaticData.servic
|
||||||
import { WorkflowRepository } from './databases/repositories/workflow.repository';
|
import { WorkflowRepository } from './databases/repositories/workflow.repository';
|
||||||
import { UrlService } from './services/url.service';
|
import { UrlService } from './services/url.service';
|
||||||
import { WorkflowExecutionService } from './workflows/workflowExecution.service';
|
import { WorkflowExecutionService } from './workflows/workflowExecution.service';
|
||||||
|
import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus';
|
||||||
|
|
||||||
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
const ERROR_TRIGGER_TYPE = config.getEnv('nodes.errorTriggerType');
|
||||||
|
|
||||||
|
@ -982,6 +984,22 @@ export async function getBase(
|
||||||
setExecutionStatus,
|
setExecutionStatus,
|
||||||
variables,
|
variables,
|
||||||
secretsHelpers: Container.get(SecretsHelper),
|
secretsHelpers: Container.get(SecretsHelper),
|
||||||
|
logAiEvent: async (
|
||||||
|
eventName: EventNamesAiNodesType,
|
||||||
|
payload: {
|
||||||
|
msg?: string | undefined;
|
||||||
|
executionId: string;
|
||||||
|
nodeName: string;
|
||||||
|
workflowId?: string | undefined;
|
||||||
|
workflowName: string;
|
||||||
|
nodeType?: string | undefined;
|
||||||
|
},
|
||||||
|
) => {
|
||||||
|
return await Container.get(MessageEventBus).sendAiNodeEvent({
|
||||||
|
eventName,
|
||||||
|
payload,
|
||||||
|
});
|
||||||
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
import { AbstractEventMessage, isEventMessageOptionsWithType } from './AbstractEventMessage';
|
||||||
|
import type { EventNamesAiNodesType, JsonObject } from 'n8n-workflow';
|
||||||
|
import { EventMessageTypeNames } from 'n8n-workflow';
|
||||||
|
import type { AbstractEventMessageOptions } from './AbstractEventMessageOptions';
|
||||||
|
import type { AbstractEventPayload } from './AbstractEventPayload';
|
||||||
|
|
||||||
|
// --------------------------------------
|
||||||
|
// EventMessage class for Node events
|
||||||
|
// --------------------------------------
|
||||||
|
export interface EventPayloadAiNode extends AbstractEventPayload {
|
||||||
|
msg?: string;
|
||||||
|
executionId: string;
|
||||||
|
nodeName: string;
|
||||||
|
workflowId?: string;
|
||||||
|
workflowName: string;
|
||||||
|
nodeType?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EventMessageAiNodeOptions extends AbstractEventMessageOptions {
|
||||||
|
eventName: EventNamesAiNodesType;
|
||||||
|
|
||||||
|
payload?: EventPayloadAiNode | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class EventMessageAiNode extends AbstractEventMessage {
|
||||||
|
readonly __type = EventMessageTypeNames.aiNode;
|
||||||
|
|
||||||
|
eventName: EventNamesAiNodesType;
|
||||||
|
|
||||||
|
payload: EventPayloadAiNode;
|
||||||
|
|
||||||
|
constructor(options: EventMessageAiNodeOptions) {
|
||||||
|
super(options);
|
||||||
|
if (options.payload) this.setPayload(options.payload);
|
||||||
|
if (options.anonymize) {
|
||||||
|
this.anonymize();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setPayload(payload: EventPayloadAiNode): this {
|
||||||
|
this.payload = payload;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
deserialize(data: JsonObject): this {
|
||||||
|
if (isEventMessageOptionsWithType(data, this.__type)) {
|
||||||
|
this.setOptionsOrDefault(data);
|
||||||
|
if (data.payload) this.setPayload(data.payload as EventPayloadAiNode);
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,7 +1,9 @@
|
||||||
|
import type { EventMessageAiNode } from './EventMessageAiNode';
|
||||||
import type { EventMessageAudit } from './EventMessageAudit';
|
import type { EventMessageAudit } from './EventMessageAudit';
|
||||||
import type { EventMessageGeneric } from './EventMessageGeneric';
|
import type { EventMessageGeneric } from './EventMessageGeneric';
|
||||||
import type { EventMessageNode } from './EventMessageNode';
|
import type { EventMessageNode } from './EventMessageNode';
|
||||||
import type { EventMessageWorkflow } from './EventMessageWorkflow';
|
import type { EventMessageWorkflow } from './EventMessageWorkflow';
|
||||||
|
import { eventNamesAiNodes, type EventNamesAiNodesType } from 'n8n-workflow';
|
||||||
|
|
||||||
export const eventNamesWorkflow = [
|
export const eventNamesWorkflow = [
|
||||||
'n8n.workflow.started',
|
'n8n.workflow.started',
|
||||||
|
@ -45,6 +47,7 @@ export type EventNamesTypes =
|
||||||
| EventNamesWorkflowType
|
| EventNamesWorkflowType
|
||||||
| EventNamesNodeType
|
| EventNamesNodeType
|
||||||
| EventNamesGenericType
|
| EventNamesGenericType
|
||||||
|
| EventNamesAiNodesType
|
||||||
| 'n8n.destination.test';
|
| 'n8n.destination.test';
|
||||||
|
|
||||||
export const eventNamesAll = [
|
export const eventNamesAll = [
|
||||||
|
@ -52,13 +55,15 @@ export const eventNamesAll = [
|
||||||
...eventNamesWorkflow,
|
...eventNamesWorkflow,
|
||||||
...eventNamesNode,
|
...eventNamesNode,
|
||||||
...eventNamesGeneric,
|
...eventNamesGeneric,
|
||||||
|
...eventNamesAiNodes,
|
||||||
];
|
];
|
||||||
|
|
||||||
export type EventMessageTypes =
|
export type EventMessageTypes =
|
||||||
| EventMessageGeneric
|
| EventMessageGeneric
|
||||||
| EventMessageWorkflow
|
| EventMessageWorkflow
|
||||||
| EventMessageAudit
|
| EventMessageAudit
|
||||||
| EventMessageNode;
|
| EventMessageNode
|
||||||
|
| EventMessageAiNode;
|
||||||
|
|
||||||
export interface FailedEventSummary {
|
export interface FailedEventSummary {
|
||||||
lastNodeExecuted: string;
|
lastNodeExecuted: string;
|
||||||
|
|
|
@ -37,6 +37,10 @@ import { METRICS_EVENT_NAME } from '../MessageEventBusDestination/Helpers.ee';
|
||||||
import type { AbstractEventMessageOptions } from '../EventMessageClasses/AbstractEventMessageOptions';
|
import type { AbstractEventMessageOptions } from '../EventMessageClasses/AbstractEventMessageOptions';
|
||||||
import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers';
|
import { getEventMessageObjectByType } from '../EventMessageClasses/Helpers';
|
||||||
import { ExecutionDataRecoveryService } from '../executionDataRecovery.service';
|
import { ExecutionDataRecoveryService } from '../executionDataRecovery.service';
|
||||||
|
import {
|
||||||
|
EventMessageAiNode,
|
||||||
|
type EventMessageAiNodeOptions,
|
||||||
|
} from '../EventMessageClasses/EventMessageAiNode';
|
||||||
|
|
||||||
export type EventMessageReturnMode = 'sent' | 'unsent' | 'all' | 'unfinished';
|
export type EventMessageReturnMode = 'sent' | 'unsent' | 'all' | 'unfinished';
|
||||||
|
|
||||||
|
@ -457,4 +461,8 @@ export class MessageEventBus extends EventEmitter {
|
||||||
async sendNodeEvent(options: EventMessageNodeOptions) {
|
async sendNodeEvent(options: EventMessageNodeOptions) {
|
||||||
await this.send(new EventMessageNode(options));
|
await this.send(new EventMessageNode(options));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async sendAiNodeEvent(options: EventMessageAiNodeOptions) {
|
||||||
|
await this.send(new EventMessageAiNode(options));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
34
packages/cli/test/unit/ExecutionMetadataService.test.ts
Normal file
34
packages/cli/test/unit/ExecutionMetadataService.test.ts
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
import { Container } from 'typedi';
|
||||||
|
import { ExecutionMetadataRepository } from '@db/repositories/executionMetadata.repository';
|
||||||
|
import { ExecutionMetadataService } from '@/services/executionMetadata.service';
|
||||||
|
import { mockInstance } from '../shared/mocking';
|
||||||
|
|
||||||
|
describe('ExecutionMetadataService', () => {
|
||||||
|
const repository = mockInstance(ExecutionMetadataRepository);
|
||||||
|
|
||||||
|
test('Execution metadata is saved in a batch', async () => {
|
||||||
|
const toSave = {
|
||||||
|
test1: 'value1',
|
||||||
|
test2: 'value2',
|
||||||
|
};
|
||||||
|
const executionId = '1234';
|
||||||
|
|
||||||
|
await Container.get(ExecutionMetadataService).save(executionId, toSave);
|
||||||
|
|
||||||
|
expect(repository.save).toHaveBeenCalledTimes(1);
|
||||||
|
expect(repository.save.mock.calls[0]).toEqual([
|
||||||
|
[
|
||||||
|
{
|
||||||
|
execution: { id: executionId },
|
||||||
|
key: 'test1',
|
||||||
|
value: 'value1',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
execution: { id: executionId },
|
||||||
|
key: 'test2',
|
||||||
|
value: 'value2',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
|
@ -1,34 +1,41 @@
|
||||||
import { Container } from 'typedi';
|
import { VariablesService } from '@/environments/variables/variables.service.ee';
|
||||||
import { ExecutionMetadataRepository } from '@db/repositories/executionMetadata.repository';
|
|
||||||
import { ExecutionMetadataService } from '@/services/executionMetadata.service';
|
|
||||||
import { mockInstance } from '../shared/mocking';
|
import { mockInstance } from '../shared/mocking';
|
||||||
|
import { MessageEventBus } from '@/eventbus/MessageEventBus/MessageEventBus';
|
||||||
|
import { getBase } from '@/WorkflowExecuteAdditionalData';
|
||||||
|
import Container from 'typedi';
|
||||||
|
import { CredentialsHelper } from '@/CredentialsHelper';
|
||||||
|
import { SecretsHelper } from '@/SecretsHelpers';
|
||||||
|
|
||||||
describe('WorkflowExecuteAdditionalData', () => {
|
describe('WorkflowExecuteAdditionalData', () => {
|
||||||
const repository = mockInstance(ExecutionMetadataRepository);
|
const messageEventBus = mockInstance(MessageEventBus);
|
||||||
|
const variablesService = mockInstance(VariablesService);
|
||||||
|
variablesService.getAllCached.mockResolvedValue([]);
|
||||||
|
const credentialsHelper = mockInstance(CredentialsHelper);
|
||||||
|
const secretsHelper = mockInstance(SecretsHelper);
|
||||||
|
Container.set(MessageEventBus, messageEventBus);
|
||||||
|
Container.set(VariablesService, variablesService);
|
||||||
|
Container.set(CredentialsHelper, credentialsHelper);
|
||||||
|
Container.set(SecretsHelper, secretsHelper);
|
||||||
|
|
||||||
test('Execution metadata is saved in a batch', async () => {
|
test('logAiEvent should call MessageEventBus', async () => {
|
||||||
const toSave = {
|
const additionalData = await getBase('user-id');
|
||||||
test1: 'value1',
|
|
||||||
test2: 'value2',
|
const eventName = 'n8n.ai.memory.get.messages';
|
||||||
|
const payload = {
|
||||||
|
msg: 'test message',
|
||||||
|
executionId: '123',
|
||||||
|
nodeName: 'n8n-memory',
|
||||||
|
workflowId: 'workflow-id',
|
||||||
|
workflowName: 'workflow-name',
|
||||||
|
nodeType: 'n8n-memory',
|
||||||
};
|
};
|
||||||
const executionId = '1234';
|
|
||||||
|
|
||||||
await Container.get(ExecutionMetadataService).save(executionId, toSave);
|
await additionalData.logAiEvent(eventName, payload);
|
||||||
|
|
||||||
expect(repository.save).toHaveBeenCalledTimes(1);
|
expect(messageEventBus.sendAiNodeEvent).toHaveBeenCalledTimes(1);
|
||||||
expect(repository.save.mock.calls[0]).toEqual([
|
expect(messageEventBus.sendAiNodeEvent).toHaveBeenCalledWith({
|
||||||
[
|
eventName,
|
||||||
{
|
payload,
|
||||||
execution: { id: executionId },
|
});
|
||||||
key: 'test1',
|
|
||||||
value: 'value1',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
execution: { id: executionId },
|
|
||||||
key: 'test2',
|
|
||||||
value: 'value2',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -40,6 +40,7 @@ import type {
|
||||||
CloseFunction,
|
CloseFunction,
|
||||||
ConnectionTypes,
|
ConnectionTypes,
|
||||||
ContextType,
|
ContextType,
|
||||||
|
EventNamesAiNodesType,
|
||||||
FieldType,
|
FieldType,
|
||||||
FileSystemHelperFunctions,
|
FileSystemHelperFunctions,
|
||||||
FunctionsBase,
|
FunctionsBase,
|
||||||
|
@ -3641,6 +3642,16 @@ export function getExecuteFunctions(
|
||||||
constructExecutionMetaData,
|
constructExecutionMetaData,
|
||||||
},
|
},
|
||||||
nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id),
|
nodeHelpers: getNodeHelperFunctions(additionalData, workflow.id),
|
||||||
|
logAiEvent: async (eventName: EventNamesAiNodesType, msg: string) => {
|
||||||
|
return await additionalData.logAiEvent(eventName, {
|
||||||
|
executionId: additionalData.executionId ?? 'unsaved-execution',
|
||||||
|
nodeName: node.name,
|
||||||
|
workflowName: workflow.name ?? 'Unnamed workflow',
|
||||||
|
nodeType: node.type,
|
||||||
|
workflowId: workflow.id ?? 'unsaved-workflow',
|
||||||
|
msg,
|
||||||
|
});
|
||||||
|
},
|
||||||
};
|
};
|
||||||
})(workflow, runExecutionData, connectionInputData, inputData, node) as IExecuteFunctions;
|
})(workflow, runExecutionData, connectionInputData, inputData, node) as IExecuteFunctions;
|
||||||
}
|
}
|
||||||
|
@ -3781,6 +3792,16 @@ export function getExecuteSingleFunctions(
|
||||||
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
|
getBinaryDataBuffer: async (propertyName, inputIndex = 0) =>
|
||||||
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
|
await getBinaryDataBuffer(inputData, itemIndex, propertyName, inputIndex),
|
||||||
},
|
},
|
||||||
|
logAiEvent: async (eventName: EventNamesAiNodesType, msg: string) => {
|
||||||
|
return await additionalData.logAiEvent(eventName, {
|
||||||
|
executionId: additionalData.executionId ?? 'unsaved-execution',
|
||||||
|
nodeName: node.name,
|
||||||
|
workflowName: workflow.name ?? 'Unnamed workflow',
|
||||||
|
nodeType: node.type,
|
||||||
|
workflowId: workflow.id ?? 'unsaved-workflow',
|
||||||
|
msg,
|
||||||
|
});
|
||||||
|
},
|
||||||
};
|
};
|
||||||
})(workflow, runExecutionData, connectionInputData, inputData, node, itemIndex);
|
})(workflow, runExecutionData, connectionInputData, inputData, node, itemIndex);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1519,6 +1519,7 @@
|
||||||
"settings.log-streaming.tab.events.title": "Select groups or single events to subscribe to:",
|
"settings.log-streaming.tab.events.title": "Select groups or single events to subscribe to:",
|
||||||
"settings.log-streaming.tab.events.anonymize": "Anonymize sensitive data",
|
"settings.log-streaming.tab.events.anonymize": "Anonymize sensitive data",
|
||||||
"settings.log-streaming.tab.events.anonymize.info": "Fields containing personal information like name or email are anonymized",
|
"settings.log-streaming.tab.events.anonymize.info": "Fields containing personal information like name or email are anonymized",
|
||||||
|
"settings.log-streaming.eventGroup.n8n.ai": "AI node logs",
|
||||||
"settings.log-streaming.eventGroup.n8n.audit": "Audit Events",
|
"settings.log-streaming.eventGroup.n8n.audit": "Audit Events",
|
||||||
"settings.log-streaming.eventGroup.n8n.audit.info": "Will send events when user details or other audit data changes",
|
"settings.log-streaming.eventGroup.n8n.audit.info": "Will send events when user details or other audit data changes",
|
||||||
"settings.log-streaming.eventGroup.n8n.workflow": "Workflow Events",
|
"settings.log-streaming.eventGroup.n8n.workflow": "Workflow Events",
|
||||||
|
|
|
@ -787,6 +787,7 @@ type BaseExecutionFunctions = FunctionsBaseWithRequiredKeys<'getMode'> & {
|
||||||
getInputSourceData(inputIndex?: number, inputName?: string): ISourceData;
|
getInputSourceData(inputIndex?: number, inputName?: string): ISourceData;
|
||||||
getExecutionCancelSignal(): AbortSignal | undefined;
|
getExecutionCancelSignal(): AbortSignal | undefined;
|
||||||
onExecutionCancellation(handler: () => unknown): void;
|
onExecutionCancellation(handler: () => unknown): void;
|
||||||
|
logAiEvent(eventName: EventNamesAiNodesType, msg?: string | undefined): Promise<void>;
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: Create later own type only for Config-Nodes
|
// TODO: Create later own type only for Config-Nodes
|
||||||
|
@ -1945,6 +1946,24 @@ export interface IWorkflowExecuteHooks {
|
||||||
sendResponse?: Array<(response: IExecuteResponsePromiseData) => Promise<void>>;
|
sendResponse?: Array<(response: IExecuteResponsePromiseData) => Promise<void>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const eventNamesAiNodes = [
|
||||||
|
'n8n.ai.memory.get.messages',
|
||||||
|
'n8n.ai.memory.added.message',
|
||||||
|
'n8n.ai.output.parser.get.instructions',
|
||||||
|
'n8n.ai.output.parser.parsed',
|
||||||
|
'n8n.ai.retriever.get.relevant.documents',
|
||||||
|
'n8n.ai.embeddings.embedded.document',
|
||||||
|
'n8n.ai.embeddings.embedded.query',
|
||||||
|
'n8n.ai.document.processed',
|
||||||
|
'n8n.ai.text.splitter.split',
|
||||||
|
'n8n.ai.tool.called',
|
||||||
|
'n8n.ai.vector.store.searched',
|
||||||
|
'n8n.ai.llm.generated',
|
||||||
|
'n8n.ai.vector.store.populated',
|
||||||
|
] as const;
|
||||||
|
|
||||||
|
export type EventNamesAiNodesType = (typeof eventNamesAiNodes)[number];
|
||||||
|
|
||||||
export interface IWorkflowExecuteAdditionalData {
|
export interface IWorkflowExecuteAdditionalData {
|
||||||
credentialsHelper: ICredentialsHelper;
|
credentialsHelper: ICredentialsHelper;
|
||||||
executeWorkflow: (
|
executeWorkflow: (
|
||||||
|
@ -1978,6 +1997,17 @@ export interface IWorkflowExecuteAdditionalData {
|
||||||
userId: string;
|
userId: string;
|
||||||
variables: IDataObject;
|
variables: IDataObject;
|
||||||
secretsHelpers: SecretsHelpersBase;
|
secretsHelpers: SecretsHelpersBase;
|
||||||
|
logAiEvent: (
|
||||||
|
eventName: EventNamesAiNodesType,
|
||||||
|
payload: {
|
||||||
|
msg?: string;
|
||||||
|
executionId: string;
|
||||||
|
nodeName: string;
|
||||||
|
workflowId?: string;
|
||||||
|
workflowName: string;
|
||||||
|
nodeType?: string;
|
||||||
|
},
|
||||||
|
) => Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type WorkflowExecuteMode =
|
export type WorkflowExecuteMode =
|
||||||
|
|
|
@ -11,6 +11,7 @@ export const enum EventMessageTypeNames {
|
||||||
confirm = '$$EventMessageConfirm',
|
confirm = '$$EventMessageConfirm',
|
||||||
workflow = '$$EventMessageWorkflow',
|
workflow = '$$EventMessageWorkflow',
|
||||||
node = '$$EventMessageNode',
|
node = '$$EventMessageNode',
|
||||||
|
aiNode = '$$EventMessageAiNode',
|
||||||
}
|
}
|
||||||
|
|
||||||
export const enum MessageEventBusDestinationTypeNames {
|
export const enum MessageEventBusDestinationTypeNames {
|
||||||
|
|
Loading…
Reference in a new issue