mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-11 12:57:29 -08:00
feat: Add more telemetry to free AI credits feature (no-changelog) (#12493)
This commit is contained in:
parent
6f00c74c1f
commit
b1d17f5201
|
@ -195,4 +195,3 @@ export const WsStatusCodes = {
|
|||
} as const;
|
||||
|
||||
export const FREE_AI_CREDITS_CREDENTIAL_NAME = 'n8n free OpenAI API credits';
|
||||
export const OPEN_AI_API_CREDENTIAL_TYPE = 'openAiApi';
|
||||
|
|
|
@ -6,10 +6,11 @@ import {
|
|||
} from '@n8n/api-types';
|
||||
import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
|
||||
import { Response } from 'express';
|
||||
import { OPEN_AI_API_CREDENTIAL_TYPE } from 'n8n-workflow';
|
||||
import { strict as assert } from 'node:assert';
|
||||
import { WritableStream } from 'node:stream/web';
|
||||
|
||||
import { FREE_AI_CREDITS_CREDENTIAL_NAME, OPEN_AI_API_CREDENTIAL_TYPE } from '@/constants';
|
||||
import { FREE_AI_CREDITS_CREDENTIAL_NAME } from '@/constants';
|
||||
import { CredentialsService } from '@/credentials/credentials.service';
|
||||
import { Body, Post, RestController } from '@/decorators';
|
||||
import { InternalServerError } from '@/errors/response-errors/internal-server.error';
|
||||
|
|
|
@ -1622,5 +1622,74 @@ describe('TelemetryEventRelay', () => {
|
|||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should call telemetry.track when user ran out of free AI credits', async () => {
|
||||
sharedWorkflowRepository.findSharingRole.mockResolvedValue('workflow:editor');
|
||||
credentialsRepository.findOneBy.mockResolvedValue(
|
||||
mock<CredentialsEntity>({ type: 'openAiApi', isManaged: true }),
|
||||
);
|
||||
|
||||
const runData = {
|
||||
status: 'error',
|
||||
mode: 'trigger',
|
||||
data: {
|
||||
startData: {
|
||||
destinationNode: 'OpenAI',
|
||||
runNodeFilter: ['OpenAI'],
|
||||
},
|
||||
executionData: {
|
||||
nodeExecutionStack: [{ node: { credentials: { openAiApi: { id: 'nhu-l8E4hX' } } } }],
|
||||
},
|
||||
resultData: {
|
||||
runData: {},
|
||||
lastNodeExecuted: 'OpenAI',
|
||||
error: new NodeApiError(
|
||||
{
|
||||
id: '1',
|
||||
typeVersion: 1,
|
||||
name: 'OpenAI',
|
||||
type: 'n8n-nodes-base.openAi',
|
||||
parameters: {},
|
||||
position: [100, 200],
|
||||
},
|
||||
{
|
||||
message: `400 - ${JSON.stringify({
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'error_type',
|
||||
code: 200,
|
||||
},
|
||||
})}`,
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'error_type',
|
||||
code: 200,
|
||||
},
|
||||
},
|
||||
{
|
||||
httpCode: '400',
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
} as unknown as IRun;
|
||||
|
||||
jest
|
||||
.spyOn(TelemetryHelpers, 'userInInstanceRanOutOfFreeAiCredits')
|
||||
.mockImplementation(() => true);
|
||||
|
||||
const event: RelayEventMap['workflow-post-execute'] = {
|
||||
workflow: mockWorkflowBase,
|
||||
executionId: 'execution123',
|
||||
userId: 'user123',
|
||||
runData,
|
||||
};
|
||||
|
||||
eventService.emit('workflow-post-execute', event);
|
||||
|
||||
await flushPromises();
|
||||
|
||||
expect(telemetry.track).toHaveBeenCalledWith('User ran out of free AI credits');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -634,6 +634,10 @@ export class TelemetryEventRelay extends EventRelay {
|
|||
let nodeGraphResult: INodesGraphResult | null = null;
|
||||
|
||||
if (!telemetryProperties.success && runData?.data.resultData.error) {
|
||||
if (TelemetryHelpers.userInInstanceRanOutOfFreeAiCredits(runData)) {
|
||||
this.telemetry.track('User ran out of free AI credits');
|
||||
}
|
||||
|
||||
telemetryProperties.error_message = runData?.data.resultData.error.message;
|
||||
let errorNodeName =
|
||||
'node' in runData?.data.resultData.error
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import { Container } from '@n8n/di';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { OPEN_AI_API_CREDENTIAL_TYPE } from 'n8n-workflow';
|
||||
|
||||
import { FREE_AI_CREDITS_CREDENTIAL_NAME, OPEN_AI_API_CREDENTIAL_TYPE } from '@/constants';
|
||||
import { FREE_AI_CREDITS_CREDENTIAL_NAME } from '@/constants';
|
||||
import type { Project } from '@/databases/entities/project';
|
||||
import type { User } from '@/databases/entities/user';
|
||||
import { CredentialsRepository } from '@/databases/repositories/credentials.repository';
|
||||
|
|
|
@ -10,8 +10,7 @@ import { useProjectsStore } from '@/stores/projects.store';
|
|||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
import { useUsersStore } from '@/stores/users.store';
|
||||
import { computed, ref } from 'vue';
|
||||
|
||||
const OPEN_AI_API_CREDENTIAL_TYPE = 'openAiApi';
|
||||
import { OPEN_AI_API_CREDENTIAL_TYPE } from 'n8n-workflow';
|
||||
|
||||
const LANGCHAIN_NODES_PREFIX = '@n8n/n8n-nodes-langchain.';
|
||||
|
||||
|
|
|
@ -95,3 +95,7 @@ export const AI_TRANSFORM_JS_CODE = 'jsCode';
|
|||
* in `cli` package.
|
||||
*/
|
||||
export const TRIMMED_TASK_DATA_CONNECTIONS_KEY = '__isTrimmedManualExecutionDataItem';
|
||||
|
||||
export const OPEN_AI_API_CREDENTIAL_TYPE = 'openAiApi';
|
||||
export const FREE_AI_CREDITS_ERROR_TYPE = 'free_ai_credits_request_error';
|
||||
export const FREE_AI_CREDITS_USED_ALL_CREDITS_ERROR_CODE = 400;
|
||||
|
|
|
@ -4,16 +4,20 @@ import {
|
|||
CHAIN_LLM_LANGCHAIN_NODE_TYPE,
|
||||
CHAIN_SUMMARIZATION_LANGCHAIN_NODE_TYPE,
|
||||
EXECUTE_WORKFLOW_NODE_TYPE,
|
||||
FREE_AI_CREDITS_ERROR_TYPE,
|
||||
FREE_AI_CREDITS_USED_ALL_CREDITS_ERROR_CODE,
|
||||
HTTP_REQUEST_NODE_TYPE,
|
||||
HTTP_REQUEST_TOOL_LANGCHAIN_NODE_TYPE,
|
||||
LANGCHAIN_CUSTOM_TOOLS,
|
||||
MERGE_NODE_TYPE,
|
||||
OPEN_AI_API_CREDENTIAL_TYPE,
|
||||
OPENAI_LANGCHAIN_NODE_TYPE,
|
||||
STICKY_NODE_TYPE,
|
||||
WEBHOOK_NODE_TYPE,
|
||||
WORKFLOW_TOOL_LANGCHAIN_NODE_TYPE,
|
||||
} from './Constants';
|
||||
import { ApplicationError } from './errors/application.error';
|
||||
import type { NodeApiError } from './errors/node-api.error';
|
||||
import type {
|
||||
IConnection,
|
||||
INode,
|
||||
|
@ -29,6 +33,10 @@ import type {
|
|||
IRun,
|
||||
} from './Interfaces';
|
||||
import { getNodeParameters } from './NodeHelpers';
|
||||
import { jsonParse } from './utils';
|
||||
|
||||
const isNodeApiError = (error: unknown): error is NodeApiError =>
|
||||
typeof error === 'object' && error !== null && 'name' in error && error?.name === 'NodeApiError';
|
||||
|
||||
export function getNodeTypeForName(workflow: IWorkflowBase, nodeName: string): INode | undefined {
|
||||
return workflow.nodes.find((node) => node.name === nodeName);
|
||||
|
@ -489,3 +497,31 @@ export function extractLastExecutedNodeCredentialData(
|
|||
|
||||
return { credentialId: id, credentialType };
|
||||
}
|
||||
|
||||
export const userInInstanceRanOutOfFreeAiCredits = (runData: IRun): boolean => {
|
||||
const credentials = extractLastExecutedNodeCredentialData(runData);
|
||||
|
||||
if (!credentials) return false;
|
||||
|
||||
if (credentials.credentialType !== OPEN_AI_API_CREDENTIAL_TYPE) return false;
|
||||
|
||||
const { error } = runData.data.resultData;
|
||||
|
||||
if (!isNodeApiError(error) || !error.messages[0]) return false;
|
||||
|
||||
const rawErrorResponse = error.messages[0].replace(`${error.httpCode} -`, '');
|
||||
|
||||
try {
|
||||
const errorResponse = jsonParse<{ error: { code: number; type: string } }>(rawErrorResponse);
|
||||
if (
|
||||
errorResponse?.error?.type === FREE_AI_CREDITS_ERROR_TYPE &&
|
||||
errorResponse.error.code === FREE_AI_CREDITS_USED_ALL_CREDITS_ERROR_CODE
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
|
|
@ -2,7 +2,7 @@ import { mock } from 'jest-mock-extended';
|
|||
import { v5 as uuidv5, v3 as uuidv3, v4 as uuidv4, v1 as uuidv1 } from 'uuid';
|
||||
|
||||
import { STICKY_NODE_TYPE } from '@/Constants';
|
||||
import { ApplicationError } from '@/errors';
|
||||
import { ApplicationError, ExpressionError, NodeApiError } from '@/errors';
|
||||
import type { IRun, IRunData } from '@/Interfaces';
|
||||
import { NodeConnectionType, type IWorkflowBase } from '@/Interfaces';
|
||||
import * as nodeHelpers from '@/NodeHelpers';
|
||||
|
@ -12,6 +12,7 @@ import {
|
|||
generateNodesGraph,
|
||||
getDomainBase,
|
||||
getDomainPath,
|
||||
userInInstanceRanOutOfFreeAiCredits,
|
||||
} from '@/TelemetryHelpers';
|
||||
import { randomInt } from '@/utils';
|
||||
|
||||
|
@ -930,6 +931,227 @@ describe('extractLastExecutedNodeCredentialData', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('userInInstanceRanOutOfFreeAiCredits', () => {
|
||||
it('should return false if could not find node credentials', () => {
|
||||
const runData = {
|
||||
status: 'error',
|
||||
mode: 'manual',
|
||||
data: {
|
||||
startData: {
|
||||
destinationNode: 'OpenAI',
|
||||
runNodeFilter: ['OpenAI'],
|
||||
},
|
||||
executionData: {
|
||||
nodeExecutionStack: [{ node: { credentials: {} } }],
|
||||
},
|
||||
resultData: {
|
||||
runData: {},
|
||||
lastNodeExecuted: 'OpenAI',
|
||||
error: new NodeApiError(
|
||||
{
|
||||
id: '1',
|
||||
typeVersion: 1,
|
||||
name: 'OpenAI',
|
||||
type: 'n8n-nodes-base.openAi',
|
||||
parameters: {},
|
||||
position: [100, 200],
|
||||
},
|
||||
{
|
||||
message: `400 - ${JSON.stringify({
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'free_ai_credits_request_error',
|
||||
code: 200,
|
||||
},
|
||||
})}`,
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'free_ai_credits_request_error',
|
||||
code: 200,
|
||||
},
|
||||
},
|
||||
{
|
||||
httpCode: '400',
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
} as unknown as IRun;
|
||||
|
||||
expect(userInInstanceRanOutOfFreeAiCredits(runData)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false if could not credential type it is not openAiApi', () => {
|
||||
const runData = {
|
||||
status: 'error',
|
||||
mode: 'manual',
|
||||
data: {
|
||||
startData: {
|
||||
destinationNode: 'OpenAI',
|
||||
runNodeFilter: ['OpenAI'],
|
||||
},
|
||||
executionData: {
|
||||
nodeExecutionStack: [{ node: { credentials: { jiraApi: { id: 'nhu-l8E4hX' } } } }],
|
||||
},
|
||||
resultData: {
|
||||
runData: {},
|
||||
lastNodeExecuted: 'OpenAI',
|
||||
error: new NodeApiError(
|
||||
{
|
||||
id: '1',
|
||||
typeVersion: 1,
|
||||
name: 'OpenAI',
|
||||
type: 'n8n-nodes-base.openAi',
|
||||
parameters: {},
|
||||
position: [100, 200],
|
||||
},
|
||||
{
|
||||
message: `400 - ${JSON.stringify({
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'free_ai_credits_request_error',
|
||||
code: 200,
|
||||
},
|
||||
})}`,
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'free_ai_credits_request_error',
|
||||
code: 200,
|
||||
},
|
||||
},
|
||||
{
|
||||
httpCode: '400',
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
} as unknown as IRun;
|
||||
|
||||
expect(userInInstanceRanOutOfFreeAiCredits(runData)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false if error is not NodeApiError', () => {
|
||||
const runData = {
|
||||
status: 'error',
|
||||
mode: 'manual',
|
||||
data: {
|
||||
startData: {
|
||||
destinationNode: 'OpenAI',
|
||||
runNodeFilter: ['OpenAI'],
|
||||
},
|
||||
executionData: {
|
||||
nodeExecutionStack: [{ node: { credentials: { openAiApi: { id: 'nhu-l8E4hX' } } } }],
|
||||
},
|
||||
resultData: {
|
||||
runData: {},
|
||||
lastNodeExecuted: 'OpenAI',
|
||||
error: new ExpressionError('error'),
|
||||
},
|
||||
},
|
||||
} as unknown as IRun;
|
||||
|
||||
expect(userInInstanceRanOutOfFreeAiCredits(runData)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false if error is not a free ai credit error', () => {
|
||||
const runData = {
|
||||
status: 'error',
|
||||
mode: 'manual',
|
||||
data: {
|
||||
startData: {
|
||||
destinationNode: 'OpenAI',
|
||||
runNodeFilter: ['OpenAI'],
|
||||
},
|
||||
executionData: {
|
||||
nodeExecutionStack: [{ node: { credentials: { openAiApi: { id: 'nhu-l8E4hX' } } } }],
|
||||
},
|
||||
resultData: {
|
||||
runData: {},
|
||||
lastNodeExecuted: 'OpenAI',
|
||||
error: new NodeApiError(
|
||||
{
|
||||
id: '1',
|
||||
typeVersion: 1,
|
||||
name: 'OpenAI',
|
||||
type: 'n8n-nodes-base.openAi',
|
||||
parameters: {},
|
||||
position: [100, 200],
|
||||
},
|
||||
{
|
||||
message: `400 - ${JSON.stringify({
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'error_type',
|
||||
code: 200,
|
||||
},
|
||||
})}`,
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'error_type',
|
||||
code: 200,
|
||||
},
|
||||
},
|
||||
{
|
||||
httpCode: '400',
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
} as unknown as IRun;
|
||||
|
||||
expect(userInInstanceRanOutOfFreeAiCredits(runData)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true if the user has ran out of free AI credits', () => {
|
||||
const runData = {
|
||||
status: 'error',
|
||||
mode: 'manual',
|
||||
data: {
|
||||
startData: {
|
||||
destinationNode: 'OpenAI',
|
||||
runNodeFilter: ['OpenAI'],
|
||||
},
|
||||
executionData: {
|
||||
nodeExecutionStack: [{ node: { credentials: { openAiApi: { id: 'nhu-l8E4hX' } } } }],
|
||||
},
|
||||
resultData: {
|
||||
runData: {},
|
||||
lastNodeExecuted: 'OpenAI',
|
||||
error: new NodeApiError(
|
||||
{
|
||||
id: '1',
|
||||
typeVersion: 1,
|
||||
name: 'OpenAI',
|
||||
type: 'n8n-nodes-base.openAi',
|
||||
parameters: {},
|
||||
position: [100, 200],
|
||||
},
|
||||
{
|
||||
message: `400 - ${JSON.stringify({
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'free_ai_credits_request_error',
|
||||
code: 400,
|
||||
},
|
||||
})}`,
|
||||
error: {
|
||||
message: 'error message',
|
||||
type: 'free_ai_credits_request_error',
|
||||
code: 400,
|
||||
},
|
||||
},
|
||||
{
|
||||
httpCode: '400',
|
||||
},
|
||||
),
|
||||
},
|
||||
},
|
||||
} as unknown as IRun;
|
||||
|
||||
expect(userInInstanceRanOutOfFreeAiCredits(runData)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
function validUrls(idMaker: typeof alphanumericId | typeof email, char = CHAR) {
|
||||
const firstId = idMaker();
|
||||
const secondId = idMaker();
|
||||
|
|
Loading…
Reference in a new issue