mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-11 04:47:29 -08:00
refactor: Use Ask AI feature through AI services instead of hooks (#11027)
This commit is contained in:
parent
e94cda3837
commit
06f51dc7d2
|
@ -78,7 +78,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should start chat session from node error view', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/simple_message_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -96,7 +96,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should render chat input correctly', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/simple_message_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -129,7 +129,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should render and handle quick replies', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/quick_reply_message_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -146,7 +146,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should show quick replies when node is executed after new suggestion', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', (req) => {
|
||||
cy.intercept('POST', '/rest/ai/chat', (req) => {
|
||||
req.reply((res) => {
|
||||
if (['init-error-helper', 'message'].includes(req.body.payload.type)) {
|
||||
res.send({ statusCode: 200, fixture: 'aiAssistant/simple_message_response.json' });
|
||||
|
@ -177,7 +177,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should warn before starting a new session', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/simple_message_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -204,11 +204,11 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should apply code diff to code node', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/code_diff_suggestion_response.json',
|
||||
}).as('chatRequest');
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat/apply-suggestion', {
|
||||
cy.intercept('POST', '/rest/ai/chat/apply-suggestion', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/apply_code_diff_response.json',
|
||||
}).as('applySuggestion');
|
||||
|
@ -254,7 +254,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should end chat session when `end_session` event is received', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/end_session_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -268,7 +268,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('should reset session after it ended and sidebar is closed', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', (req) => {
|
||||
cy.intercept('POST', '/rest/ai/chat', (req) => {
|
||||
req.reply((res) => {
|
||||
if (['init-support-chat'].includes(req.body.payload.type)) {
|
||||
res.send({ statusCode: 200, fixture: 'aiAssistant/simple_message_response.json' });
|
||||
|
@ -296,7 +296,7 @@ describe('AI Assistant::enabled', () => {
|
|||
});
|
||||
|
||||
it('Should not reset assistant session when workflow is saved', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/simple_message_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -321,7 +321,7 @@ describe('AI Assistant Credential Help', () => {
|
|||
});
|
||||
|
||||
it('should start credential help from node credential', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/simple_message_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -347,7 +347,7 @@ describe('AI Assistant Credential Help', () => {
|
|||
});
|
||||
|
||||
it('should start credential help from credential list', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/simple_message_response.json',
|
||||
}).as('chatRequest');
|
||||
|
@ -446,7 +446,7 @@ describe('General help', () => {
|
|||
});
|
||||
|
||||
it('assistant returns code snippet', () => {
|
||||
cy.intercept('POST', '/rest/ai-assistant/chat', {
|
||||
cy.intercept('POST', '/rest/ai/chat', {
|
||||
statusCode: 200,
|
||||
fixture: 'aiAssistant/code_snippet_response.json',
|
||||
}).as('chatRequest');
|
||||
|
|
|
@ -91,28 +91,12 @@ return []
|
|||
});
|
||||
|
||||
describe('Ask AI', () => {
|
||||
it('tab should display based on experiment', () => {
|
||||
WorkflowPage.actions.visit();
|
||||
cy.window().then((win) => {
|
||||
win.featureFlags.override('011_ask_AI', 'control');
|
||||
WorkflowPage.actions.addInitialNodeToCanvas('Manual');
|
||||
WorkflowPage.actions.addNodeToCanvas('Code');
|
||||
WorkflowPage.actions.openNode('Code');
|
||||
|
||||
cy.getByTestId('code-node-tab-ai').should('not.exist');
|
||||
|
||||
ndv.actions.close();
|
||||
win.featureFlags.override('011_ask_AI', undefined);
|
||||
WorkflowPage.actions.openNode('Code');
|
||||
cy.getByTestId('code-node-tab-ai').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enabled', () => {
|
||||
beforeEach(() => {
|
||||
cy.enableFeature('askAi');
|
||||
WorkflowPage.actions.visit();
|
||||
cy.window().then((win) => {
|
||||
win.featureFlags.override('011_ask_AI', 'gpt3');
|
||||
|
||||
cy.window().then(() => {
|
||||
WorkflowPage.actions.addInitialNodeToCanvas('Manual');
|
||||
WorkflowPage.actions.addNodeToCanvas('Code', true, true);
|
||||
});
|
||||
|
@ -157,7 +141,7 @@ return []
|
|||
|
||||
cy.getByTestId('ask-ai-prompt-input').type(prompt);
|
||||
|
||||
cy.intercept('POST', '/rest/ask-ai', {
|
||||
cy.intercept('POST', '/rest/ai/ask-ai', {
|
||||
statusCode: 200,
|
||||
body: {
|
||||
data: {
|
||||
|
@ -169,9 +153,7 @@ return []
|
|||
cy.getByTestId('ask-ai-cta').click();
|
||||
const askAiReq = cy.wait('@ask-ai');
|
||||
|
||||
askAiReq
|
||||
.its('request.body')
|
||||
.should('have.keys', ['question', 'model', 'context', 'n8nVersion']);
|
||||
askAiReq.its('request.body').should('have.keys', ['question', 'context', 'forNode']);
|
||||
|
||||
askAiReq.its('context').should('have.keys', ['schema', 'ndvPushRef', 'pushRef']);
|
||||
|
||||
|
@ -195,7 +177,7 @@ return []
|
|||
];
|
||||
|
||||
handledCodes.forEach(({ code, message }) => {
|
||||
cy.intercept('POST', '/rest/ask-ai', {
|
||||
cy.intercept('POST', '/rest/ai/ask-ai', {
|
||||
statusCode: code,
|
||||
status: code,
|
||||
}).as('ask-ai');
|
||||
|
|
|
@ -107,6 +107,9 @@ export interface FrontendSettings {
|
|||
aiAssistant: {
|
||||
enabled: boolean;
|
||||
};
|
||||
askAi: {
|
||||
enabled: boolean;
|
||||
};
|
||||
deployment: {
|
||||
type: string;
|
||||
};
|
||||
|
@ -154,9 +157,6 @@ export interface FrontendSettings {
|
|||
banners: {
|
||||
dismissed: string[];
|
||||
};
|
||||
ai: {
|
||||
enabled: boolean;
|
||||
};
|
||||
workflowHistory: {
|
||||
pruneTime: number;
|
||||
licensePruneTime: number;
|
||||
|
|
|
@ -93,7 +93,7 @@
|
|||
"@n8n/permissions": "workspace:*",
|
||||
"@n8n/task-runner": "workspace:*",
|
||||
"@n8n/typeorm": "0.3.20-12",
|
||||
"@n8n_io/ai-assistant-sdk": "1.9.4",
|
||||
"@n8n_io/ai-assistant-sdk": "1.10.3",
|
||||
"@n8n_io/license-sdk": "2.13.1",
|
||||
"@oclif/core": "4.0.7",
|
||||
"@rudderstack/rudder-sdk-node": "2.0.9",
|
||||
|
|
|
@ -91,6 +91,7 @@ export const LICENSE_FEATURES = {
|
|||
PROJECT_ROLE_EDITOR: 'feat:projectRole:editor',
|
||||
PROJECT_ROLE_VIEWER: 'feat:projectRole:viewer',
|
||||
AI_ASSISTANT: 'feat:aiAssistant',
|
||||
ASK_AI: 'feat:askAi',
|
||||
COMMUNITY_NODES_CUSTOM_REGISTRY: 'feat:communityNodes:customRegistry',
|
||||
} as const;
|
||||
|
||||
|
|
|
@ -7,18 +7,18 @@ import { WritableStream } from 'node:stream/web';
|
|||
import { Post, RestController } from '@/decorators';
|
||||
import { InternalServerError } from '@/errors/response-errors/internal-server.error';
|
||||
import { AiAssistantRequest } from '@/requests';
|
||||
import { AiAssistantService } from '@/services/ai-assistant.service';
|
||||
import { AiService } from '@/services/ai.service';
|
||||
|
||||
type FlushableResponse = Response & { flush: () => void };
|
||||
|
||||
@RestController('/ai-assistant')
|
||||
export class AiAssistantController {
|
||||
constructor(private readonly aiAssistantService: AiAssistantService) {}
|
||||
@RestController('/ai')
|
||||
export class AiController {
|
||||
constructor(private readonly aiService: AiService) {}
|
||||
|
||||
@Post('/chat', { rateLimit: { limit: 100 } })
|
||||
async chat(req: AiAssistantRequest.Chat, res: FlushableResponse) {
|
||||
try {
|
||||
const aiResponse = await this.aiAssistantService.chat(req.body, req.user);
|
||||
const aiResponse = await this.aiService.chat(req.body, req.user);
|
||||
if (aiResponse.body) {
|
||||
res.header('Content-type', 'application/json-lines').flush();
|
||||
await aiResponse.body.pipeTo(
|
||||
|
@ -40,10 +40,21 @@ export class AiAssistantController {
|
|||
|
||||
@Post('/chat/apply-suggestion')
|
||||
async applySuggestion(
|
||||
req: AiAssistantRequest.ApplySuggestion,
|
||||
req: AiAssistantRequest.ApplySuggestionPayload,
|
||||
): Promise<AiAssistantSDK.ApplySuggestionResponse> {
|
||||
try {
|
||||
return await this.aiAssistantService.applySuggestion(req.body, req.user);
|
||||
return await this.aiService.applySuggestion(req.body, req.user);
|
||||
} catch (e) {
|
||||
assert(e instanceof Error);
|
||||
ErrorReporterProxy.error(e);
|
||||
throw new InternalServerError(`Something went wrong: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@Post('/ask-ai')
|
||||
async askAi(req: AiAssistantRequest.AskAiPayload): Promise<AiAssistantSDK.AskAiResponsePayload> {
|
||||
try {
|
||||
return await this.aiService.askAi(req.body, req.user);
|
||||
} catch (e) {
|
||||
assert(e instanceof Error);
|
||||
ErrorReporterProxy.error(e);
|
|
@ -92,6 +92,7 @@ export class E2EController {
|
|||
[LICENSE_FEATURES.PROJECT_ROLE_VIEWER]: false,
|
||||
[LICENSE_FEATURES.AI_ASSISTANT]: false,
|
||||
[LICENSE_FEATURES.COMMUNITY_NODES_CUSTOM_REGISTRY]: false,
|
||||
[LICENSE_FEATURES.ASK_AI]: false,
|
||||
};
|
||||
|
||||
private numericFeatures: Record<NumericLicenseFeature, number> = {
|
||||
|
|
|
@ -255,6 +255,10 @@ export class License {
|
|||
return this.isFeatureEnabled(LICENSE_FEATURES.AI_ASSISTANT);
|
||||
}
|
||||
|
||||
isAskAiEnabled() {
|
||||
return this.isFeatureEnabled(LICENSE_FEATURES.ASK_AI);
|
||||
}
|
||||
|
||||
isAdvancedExecutionFiltersEnabled() {
|
||||
return this.isFeatureEnabled(LICENSE_FEATURES.ADVANCED_EXECUTION_FILTERS);
|
||||
}
|
||||
|
|
|
@ -586,5 +586,6 @@ export declare namespace AiAssistantRequest {
|
|||
type Chat = AuthenticatedRequest<{}, {}, AiAssistantSDK.ChatRequestPayload>;
|
||||
|
||||
type SuggestionPayload = { sessionId: string; suggestionId: string };
|
||||
type ApplySuggestion = AuthenticatedRequest<{}, {}, SuggestionPayload>;
|
||||
type ApplySuggestionPayload = AuthenticatedRequest<{}, {}, SuggestionPayload>;
|
||||
type AskAiPayload = AuthenticatedRequest<{}, {}, AiAssistantSDK.AskAiRequestPayload>;
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ import '@/controllers/annotation-tags.controller.ee';
|
|||
import '@/controllers/auth.controller';
|
||||
import '@/controllers/binary-data.controller';
|
||||
import '@/controllers/curl.controller';
|
||||
import '@/controllers/ai-assistant.controller';
|
||||
import '@/controllers/ai.controller';
|
||||
import '@/controllers/dynamic-node-parameters.controller';
|
||||
import '@/controllers/invitation.controller';
|
||||
import '@/controllers/me.controller';
|
||||
|
|
|
@ -3,7 +3,6 @@ import type { AiAssistantSDK } from '@n8n_io/ai-assistant-sdk';
|
|||
import { AiAssistantClient } from '@n8n_io/ai-assistant-sdk';
|
||||
import { assert, type IUser } from 'n8n-workflow';
|
||||
import { Service } from 'typedi';
|
||||
import type { Response } from 'undici';
|
||||
|
||||
import config from '@/config';
|
||||
import type { AiAssistantRequest } from '@/requests';
|
||||
|
@ -12,7 +11,7 @@ import { N8N_VERSION } from '../constants';
|
|||
import { License } from '../license';
|
||||
|
||||
@Service()
|
||||
export class AiAssistantService {
|
||||
export class AiService {
|
||||
private client: AiAssistantClient | undefined;
|
||||
|
||||
constructor(
|
||||
|
@ -40,7 +39,7 @@ export class AiAssistantService {
|
|||
});
|
||||
}
|
||||
|
||||
async chat(payload: AiAssistantSDK.ChatRequestPayload, user: IUser): Promise<Response> {
|
||||
async chat(payload: AiAssistantSDK.ChatRequestPayload, user: IUser) {
|
||||
if (!this.client) {
|
||||
await this.init();
|
||||
}
|
||||
|
@ -57,4 +56,13 @@ export class AiAssistantService {
|
|||
|
||||
return await this.client.applySuggestion(payload, { id: user.id });
|
||||
}
|
||||
|
||||
async askAi(payload: AiAssistantSDK.AskAiRequestPayload, user: IUser) {
|
||||
if (!this.client) {
|
||||
await this.init();
|
||||
}
|
||||
assert(this.client, 'Assistant client not setup');
|
||||
|
||||
return await this.client.askAi(payload, { id: user.id });
|
||||
}
|
||||
}
|
|
@ -212,8 +212,8 @@ export class FrontendService {
|
|||
banners: {
|
||||
dismissed: [],
|
||||
},
|
||||
ai: {
|
||||
enabled: config.getEnv('ai.enabled'),
|
||||
askAi: {
|
||||
enabled: false,
|
||||
},
|
||||
workflowHistory: {
|
||||
pruneTime: -1,
|
||||
|
@ -274,6 +274,7 @@ export class FrontendService {
|
|||
const isS3Available = config.getEnv('binaryDataManager.availableModes').includes('s3');
|
||||
const isS3Licensed = this.license.isBinaryDataS3Licensed();
|
||||
const isAiAssistantEnabled = this.license.isAiAssistantEnabled();
|
||||
const isAskAiEnabled = this.license.isAskAiEnabled();
|
||||
|
||||
this.settings.license.planName = this.license.getPlanName();
|
||||
this.settings.license.consumerId = this.license.getConsumerId();
|
||||
|
@ -330,6 +331,10 @@ export class FrontendService {
|
|||
this.settings.aiAssistant.enabled = isAiAssistantEnabled;
|
||||
}
|
||||
|
||||
if (isAskAiEnabled) {
|
||||
this.settings.askAi.enabled = isAskAiEnabled;
|
||||
}
|
||||
|
||||
this.settings.mfa.enabled = config.get('mfa.enabled');
|
||||
|
||||
this.settings.executionMode = config.getEnv('executions.mode');
|
||||
|
|
|
@ -111,7 +111,7 @@ export const defaultSettings: FrontendSettings = {
|
|||
mfa: {
|
||||
enabled: false,
|
||||
},
|
||||
ai: {
|
||||
askAi: {
|
||||
enabled: false,
|
||||
},
|
||||
workflowHistory: {
|
||||
|
|
|
@ -1,30 +1,44 @@
|
|||
import type { IRestApiContext, Schema } from '@/Interface';
|
||||
import { makeRestApiRequest } from '@/utils/apiUtils';
|
||||
import type { IRestApiContext } from '@/Interface';
|
||||
import type { AskAiRequest, ChatRequest, ReplaceCodeRequest } from '@/types/assistant.types';
|
||||
import { makeRestApiRequest, streamRequest } from '@/utils/apiUtils';
|
||||
import type { IDataObject } from 'n8n-workflow';
|
||||
|
||||
export function chatWithAssistant(
|
||||
ctx: IRestApiContext,
|
||||
payload: ChatRequest.RequestPayload,
|
||||
onMessageUpdated: (data: ChatRequest.ResponsePayload) => void,
|
||||
onDone: () => void,
|
||||
onError: (e: Error) => void,
|
||||
): void {
|
||||
void streamRequest<ChatRequest.ResponsePayload>(
|
||||
ctx,
|
||||
'/ai/chat',
|
||||
payload,
|
||||
onMessageUpdated,
|
||||
onDone,
|
||||
onError,
|
||||
);
|
||||
}
|
||||
|
||||
export async function replaceCode(
|
||||
context: IRestApiContext,
|
||||
data: ReplaceCodeRequest.RequestPayload,
|
||||
): Promise<ReplaceCodeRequest.ResponsePayload> {
|
||||
return await makeRestApiRequest<ReplaceCodeRequest.ResponsePayload>(
|
||||
context,
|
||||
'POST',
|
||||
'/ai/chat/apply-suggestion',
|
||||
data,
|
||||
);
|
||||
}
|
||||
|
||||
export async function generateCodeForPrompt(
|
||||
ctx: IRestApiContext,
|
||||
{
|
||||
question,
|
||||
context,
|
||||
model,
|
||||
n8nVersion,
|
||||
}: {
|
||||
question: string;
|
||||
context: {
|
||||
schema: Array<{ nodeName: string; schema: Schema }>;
|
||||
inputSchema: { nodeName: string; schema: Schema };
|
||||
pushRef: string;
|
||||
ndvPushRef: string;
|
||||
};
|
||||
model: string;
|
||||
n8nVersion: string;
|
||||
},
|
||||
{ question, context, forNode }: AskAiRequest.RequestPayload,
|
||||
): Promise<{ code: string }> {
|
||||
return await makeRestApiRequest(ctx, 'POST', '/ask-ai', {
|
||||
return await makeRestApiRequest(ctx, 'POST', '/ai/ask-ai', {
|
||||
question,
|
||||
context,
|
||||
model,
|
||||
n8nVersion,
|
||||
forNode,
|
||||
} as IDataObject);
|
||||
}
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
import type { IRestApiContext } from '@/Interface';
|
||||
import type { ChatRequest, ReplaceCodeRequest } from '@/types/assistant.types';
|
||||
import { makeRestApiRequest, streamRequest } from '@/utils/apiUtils';
|
||||
|
||||
export function chatWithAssistant(
|
||||
ctx: IRestApiContext,
|
||||
payload: ChatRequest.RequestPayload,
|
||||
onMessageUpdated: (data: ChatRequest.ResponsePayload) => void,
|
||||
onDone: () => void,
|
||||
onError: (e: Error) => void,
|
||||
): void {
|
||||
void streamRequest<ChatRequest.ResponsePayload>(
|
||||
ctx,
|
||||
'/ai-assistant/chat',
|
||||
payload,
|
||||
onMessageUpdated,
|
||||
onDone,
|
||||
onError,
|
||||
);
|
||||
}
|
||||
|
||||
export async function replaceCode(
|
||||
context: IRestApiContext,
|
||||
data: ReplaceCodeRequest.RequestPayload,
|
||||
): Promise<ReplaceCodeRequest.ResponsePayload> {
|
||||
return await makeRestApiRequest<ReplaceCodeRequest.ResponsePayload>(
|
||||
context,
|
||||
'POST',
|
||||
'/ai-assistant/chat/apply-suggestion',
|
||||
data,
|
||||
);
|
||||
}
|
|
@ -7,8 +7,6 @@ import { useI18n } from '@/composables/useI18n';
|
|||
import { useToast } from '@/composables/useToast';
|
||||
import { useNDVStore } from '@/stores/ndv.store';
|
||||
import { getSchemas, getParentNodes } from './utils';
|
||||
import { ASK_AI_EXPERIMENT } from '@/constants';
|
||||
import { usePostHog } from '@/stores/posthog.store';
|
||||
import { useRootStore } from '@/stores/root.store';
|
||||
import { useTelemetry } from '@/composables/useTelemetry';
|
||||
import { generateCodeForPrompt } from '@/api/ai';
|
||||
|
@ -16,6 +14,8 @@ import { generateCodeForPrompt } from '@/api/ai';
|
|||
import { format } from 'prettier';
|
||||
import jsParser from 'prettier/plugins/babel';
|
||||
import * as estree from 'prettier/plugins/estree';
|
||||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
import type { AskAiRequest } from '@/types/assistant.types';
|
||||
|
||||
const emit = defineEmits<{
|
||||
valueChanged: [value: IUpdateInformation];
|
||||
|
@ -27,8 +27,8 @@ const props = defineProps<{
|
|||
path: string;
|
||||
}>();
|
||||
|
||||
const posthog = usePostHog();
|
||||
const rootStore = useRootStore();
|
||||
const settingsStore = useSettingsStore();
|
||||
|
||||
const i18n = useI18n();
|
||||
|
||||
|
@ -94,13 +94,8 @@ async function onSubmit() {
|
|||
|
||||
try {
|
||||
const schemas = getSchemas();
|
||||
const version = rootStore.versionCli;
|
||||
const model =
|
||||
usePostHog().getVariant(ASK_AI_EXPERIMENT.name) === ASK_AI_EXPERIMENT.gpt4
|
||||
? 'gpt-4'
|
||||
: 'gpt-3.5-turbo-16k';
|
||||
|
||||
const payload = {
|
||||
const payload: AskAiRequest.RequestPayload = {
|
||||
question: prompt.value,
|
||||
context: {
|
||||
schema: schemas.parentNodesSchemas,
|
||||
|
@ -108,13 +103,12 @@ async function onSubmit() {
|
|||
ndvPushRef: useNDVStore().pushRef,
|
||||
pushRef: rootStore.pushRef,
|
||||
},
|
||||
model,
|
||||
n8nVersion: version,
|
||||
forNode: 'transform',
|
||||
};
|
||||
switch (type) {
|
||||
case 'askAiCodeGeneration':
|
||||
let value;
|
||||
if (posthog.isAiEnabled()) {
|
||||
if (settingsStore.isAskAiEnabled) {
|
||||
const { restApiContext } = useRootStore();
|
||||
const { code } = await generateCodeForPrompt(restApiContext, payload);
|
||||
value = code;
|
||||
|
|
|
@ -16,16 +16,15 @@ import { useI18n } from '@/composables/useI18n';
|
|||
import { useMessage } from '@/composables/useMessage';
|
||||
import { useToast } from '@/composables/useToast';
|
||||
import { useNDVStore } from '@/stores/ndv.store';
|
||||
import { usePostHog } from '@/stores/posthog.store';
|
||||
import { useRootStore } from '@/stores/root.store';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { executionDataToJson } from '@/utils/nodeTypesUtils';
|
||||
import {
|
||||
ASK_AI_EXPERIMENT,
|
||||
ASK_AI_MAX_PROMPT_LENGTH,
|
||||
ASK_AI_MIN_PROMPT_LENGTH,
|
||||
ASK_AI_LOADING_DURATION_MS,
|
||||
} from '@/constants';
|
||||
import type { AskAiRequest } from '@/types/assistant.types';
|
||||
|
||||
const emit = defineEmits<{
|
||||
submit: [code: string];
|
||||
|
@ -89,7 +88,7 @@ function getParentNodes() {
|
|||
return name !== activeNode.name && nodes.findIndex((node) => node.name === name) === i;
|
||||
})
|
||||
.map((n) => getNodeByName(n.name))
|
||||
.filter((n) => n !== null) as INodeUi[];
|
||||
.filter((n) => n !== null);
|
||||
}
|
||||
|
||||
function getSchemas() {
|
||||
|
@ -156,24 +155,19 @@ async function onSubmit() {
|
|||
|
||||
const rootStore = useRootStore();
|
||||
|
||||
try {
|
||||
const version = rootStore.versionCli;
|
||||
const model =
|
||||
usePostHog().getVariant(ASK_AI_EXPERIMENT.name) === ASK_AI_EXPERIMENT.gpt4
|
||||
? 'gpt-4'
|
||||
: 'gpt-3.5-turbo-16k';
|
||||
const payload: AskAiRequest.RequestPayload = {
|
||||
question: prompt.value,
|
||||
context: {
|
||||
schema: schemas.parentNodesSchemas,
|
||||
inputSchema: schemas.inputSchema!,
|
||||
ndvPushRef: useNDVStore().pushRef,
|
||||
pushRef: rootStore.pushRef,
|
||||
},
|
||||
forNode: 'code',
|
||||
};
|
||||
|
||||
const { code } = await generateCodeForPrompt(restApiContext, {
|
||||
question: prompt.value,
|
||||
context: {
|
||||
schema: schemas.parentNodesSchemas,
|
||||
inputSchema: schemas.inputSchema!,
|
||||
ndvPushRef: useNDVStore().pushRef,
|
||||
pushRef: rootStore.pushRef,
|
||||
},
|
||||
model,
|
||||
n8nVersion: version,
|
||||
});
|
||||
try {
|
||||
const { code } = await generateCodeForPrompt(restApiContext, payload);
|
||||
|
||||
stopLoading();
|
||||
emit('replaceCode', code);
|
||||
|
|
|
@ -15,7 +15,6 @@ import { type Ref, computed, nextTick, onBeforeUnmount, onMounted, ref, toRaw, w
|
|||
import { CODE_NODE_TYPE } from '@/constants';
|
||||
import { codeNodeEditorEventBus } from '@/event-bus';
|
||||
import { useRootStore } from '@/stores/root.store';
|
||||
import { usePostHog } from '@/stores/posthog.store';
|
||||
|
||||
import { useMessage } from '@/composables/useMessage';
|
||||
import AskAI from './AskAI/AskAI.vue';
|
||||
|
@ -27,6 +26,7 @@ import { codeNodeEditorTheme } from './theme';
|
|||
import { useI18n } from '@/composables/useI18n';
|
||||
import { useTelemetry } from '@/composables/useTelemetry';
|
||||
import { dropInCodeEditor, mappingDropCursor } from '@/plugins/codemirror/dragAndDrop';
|
||||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
|
||||
type Props = {
|
||||
mode: CodeExecutionMode;
|
||||
|
@ -67,9 +67,9 @@ const { autocompletionExtension } = useCompleter(() => props.mode, editor);
|
|||
const { createLinter } = useLinter(() => props.mode, editor);
|
||||
|
||||
const rootStore = useRootStore();
|
||||
const posthog = usePostHog();
|
||||
const i18n = useI18n();
|
||||
const telemetry = useTelemetry();
|
||||
const settingsStore = useSettingsStore();
|
||||
|
||||
onMounted(() => {
|
||||
if (!props.isReadOnly) codeNodeEditorEventBus.on('highlightLine', highlightLine);
|
||||
|
@ -146,8 +146,8 @@ onBeforeUnmount(() => {
|
|||
if (!props.isReadOnly) codeNodeEditorEventBus.off('highlightLine', highlightLine);
|
||||
});
|
||||
|
||||
const aiEnabled = computed(() => {
|
||||
return posthog.isAiEnabled() && props.language === 'javaScript';
|
||||
const askAiEnabled = computed(() => {
|
||||
return settingsStore.isAskAiEnabled && props.language === 'javaScript';
|
||||
});
|
||||
|
||||
const placeholder = computed(() => {
|
||||
|
@ -218,7 +218,7 @@ watch(
|
|||
},
|
||||
);
|
||||
watch(
|
||||
aiEnabled,
|
||||
askAiEnabled,
|
||||
async (isEnabled) => {
|
||||
if (isEnabled && !props.modelValue) {
|
||||
emit('update:modelValue', placeholder.value);
|
||||
|
@ -391,7 +391,7 @@ async function onDrop(value: string, event: MouseEvent) {
|
|||
@mouseout="onMouseOut"
|
||||
>
|
||||
<el-tabs
|
||||
v-if="aiEnabled"
|
||||
v-if="askAiEnabled"
|
||||
ref="tabs"
|
||||
v-model="activeTab"
|
||||
type="card"
|
||||
|
|
|
@ -43,7 +43,7 @@ import {
|
|||
type Themed,
|
||||
} from 'n8n-workflow';
|
||||
import { useCanvasStore } from '@/stores/canvas.store';
|
||||
import { usePostHog } from '../../../../stores/posthog.store';
|
||||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
|
||||
interface ViewStack {
|
||||
uuid?: string;
|
||||
|
@ -79,6 +79,7 @@ export const useViewStacks = defineStore('nodeCreatorViewStacks', () => {
|
|||
const nodeCreatorStore = useNodeCreatorStore();
|
||||
const { getActiveItemIndex } = useKeyboardNavigation();
|
||||
const i18n = useI18n();
|
||||
const settingsStore = useSettingsStore();
|
||||
|
||||
const viewStacks = ref<ViewStack[]>([]);
|
||||
|
||||
|
@ -360,8 +361,8 @@ export const useViewStacks = defineStore('nodeCreatorViewStacks', () => {
|
|||
const subcategory = stack?.subcategory ?? DEFAULT_SUBCATEGORY;
|
||||
let itemsInSubcategory = itemsBySubcategory.value[subcategory];
|
||||
|
||||
const aiEnabled = usePostHog().isAiEnabled();
|
||||
if (!aiEnabled) {
|
||||
const isAskAiEnabled = settingsStore.isAskAiEnabled;
|
||||
if (!isAskAiEnabled) {
|
||||
itemsInSubcategory = itemsInSubcategory.filter(
|
||||
(item) => item.key !== AI_TRANSFORM_NODE_TYPE,
|
||||
);
|
||||
|
|
|
@ -21,7 +21,7 @@ import { i18n } from '@/plugins/i18n';
|
|||
import { sortBy } from 'lodash-es';
|
||||
import * as changeCase from 'change-case';
|
||||
|
||||
import { usePostHog } from '@/stores/posthog.store';
|
||||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
|
||||
export function transformNodeType(
|
||||
node: SimplifiedNodeType,
|
||||
|
@ -78,8 +78,8 @@ export function sortNodeCreateElements(nodes: INodeCreateElement[]) {
|
|||
}
|
||||
|
||||
export function searchNodes(searchFilter: string, items: INodeCreateElement[]) {
|
||||
const aiEnabled = usePostHog().isAiEnabled();
|
||||
if (!aiEnabled) {
|
||||
const askAiEnabled = useSettingsStore().isAskAiEnabled;
|
||||
if (!askAiEnabled) {
|
||||
items = items.filter((item) => item.key !== AI_TRANSFORM_NODE_TYPE);
|
||||
}
|
||||
|
||||
|
|
|
@ -668,13 +668,6 @@ export const KEEP_AUTH_IN_NDV_FOR_NODES = [
|
|||
export const MAIN_AUTH_FIELD_NAME = 'authentication';
|
||||
export const NODE_RESOURCE_FIELD_NAME = 'resource';
|
||||
|
||||
export const ASK_AI_EXPERIMENT = {
|
||||
name: '011_ask_AI',
|
||||
control: 'control',
|
||||
gpt3: 'gpt3',
|
||||
gpt4: 'gpt4',
|
||||
};
|
||||
|
||||
export const TEMPLATE_CREDENTIAL_SETUP_EXPERIMENT = '017_template_credential_setup_v2';
|
||||
|
||||
export const CANVAS_AUTO_ADD_MANUAL_TRIGGER_EXPERIMENT = {
|
||||
|
@ -701,7 +694,6 @@ export const CREDENTIAL_DOCS_EXPERIMENT = {
|
|||
variant: 'variant',
|
||||
};
|
||||
export const EXPERIMENTS_TO_TRACK = [
|
||||
ASK_AI_EXPERIMENT.name,
|
||||
TEMPLATE_CREDENTIAL_SETUP_EXPERIMENT,
|
||||
CANVAS_AUTO_ADD_MANUAL_TRIGGER_EXPERIMENT.name,
|
||||
AI_ASSISTANT_EXPERIMENT.name,
|
||||
|
|
|
@ -15,7 +15,7 @@ import { merge } from 'lodash-es';
|
|||
import { DEFAULT_POSTHOG_SETTINGS } from './posthog.test';
|
||||
import { AI_ASSISTANT_EXPERIMENT, VIEWS } from '@/constants';
|
||||
import { reactive } from 'vue';
|
||||
import * as chatAPI from '@/api/assistant';
|
||||
import * as chatAPI from '@/api/ai';
|
||||
import * as telemetryModule from '@/composables/useTelemetry';
|
||||
import type { Telemetry } from '@/plugins/telemetry';
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { chatWithAssistant, replaceCode } from '@/api/assistant';
|
||||
import { chatWithAssistant, replaceCode } from '@/api/ai';
|
||||
import {
|
||||
VIEWS,
|
||||
EDITABLE_CANVAS_VIEWS,
|
||||
|
|
|
@ -6,11 +6,7 @@ import { useUsersStore } from '@/stores/users.store';
|
|||
import { useRootStore } from '@/stores/root.store';
|
||||
import { useSettingsStore } from '@/stores/settings.store';
|
||||
import type { FeatureFlags, IDataObject } from 'n8n-workflow';
|
||||
import {
|
||||
ASK_AI_EXPERIMENT,
|
||||
EXPERIMENTS_TO_TRACK,
|
||||
LOCAL_STORAGE_EXPERIMENT_OVERRIDES,
|
||||
} from '@/constants';
|
||||
import { EXPERIMENTS_TO_TRACK, LOCAL_STORAGE_EXPERIMENT_OVERRIDES } from '@/constants';
|
||||
import { useDebounce } from '@/composables/useDebounce';
|
||||
import { useTelemetry } from '@/composables/useTelemetry';
|
||||
|
||||
|
@ -42,14 +38,6 @@ export const usePostHog = defineStore('posthog', () => {
|
|||
return overrides.value[experiment] ?? featureFlags.value?.[experiment];
|
||||
};
|
||||
|
||||
const isAiEnabled = () => {
|
||||
const isAiExperimentEnabled = [ASK_AI_EXPERIMENT.gpt3, ASK_AI_EXPERIMENT.gpt4].includes(
|
||||
(getVariant(ASK_AI_EXPERIMENT.name) ?? '') as string,
|
||||
);
|
||||
|
||||
return isAiExperimentEnabled && settingsStore.settings.ai.enabled;
|
||||
};
|
||||
|
||||
const isVariantEnabled = (experiment: string, variant: string) => {
|
||||
return getVariant(experiment) === variant;
|
||||
};
|
||||
|
@ -195,7 +183,6 @@ export const usePostHog = defineStore('posthog', () => {
|
|||
|
||||
return {
|
||||
init,
|
||||
isAiEnabled,
|
||||
isFeatureEnabled,
|
||||
isVariantEnabled,
|
||||
getVariant,
|
||||
|
|
|
@ -87,6 +87,8 @@ export const useSettingsStore = defineStore(STORES.SETTINGS, () => {
|
|||
|
||||
const isAiAssistantEnabled = computed(() => settings.value.aiAssistant?.enabled);
|
||||
|
||||
const isAskAiEnabled = computed(() => settings.value.askAi?.enabled);
|
||||
|
||||
const showSetupPage = computed(() => userManagement.value.showSetupOnFirstLoad);
|
||||
|
||||
const deploymentType = computed(() => settings.value.deployment?.type || 'default');
|
||||
|
@ -410,6 +412,7 @@ export const useSettingsStore = defineStore(STORES.SETTINGS, () => {
|
|||
saveManualExecutions,
|
||||
saveDataProgressExecution,
|
||||
isCommunityPlan,
|
||||
isAskAiEnabled,
|
||||
reset,
|
||||
testLdapConnection,
|
||||
getLdapConfig,
|
||||
|
|
|
@ -195,3 +195,16 @@ export namespace ReplaceCodeRequest {
|
|||
parameters: INodeParameters;
|
||||
}
|
||||
}
|
||||
|
||||
export namespace AskAiRequest {
|
||||
export interface RequestPayload {
|
||||
question: string;
|
||||
context: {
|
||||
schema: ChatRequest.NodeExecutionSchema[];
|
||||
inputSchema: ChatRequest.NodeExecutionSchema;
|
||||
pushRef: string;
|
||||
ndvPushRef: string;
|
||||
};
|
||||
forNode: 'code' | 'transform';
|
||||
}
|
||||
}
|
||||
|
|
3847
pnpm-lock.yaml
3847
pnpm-lock.yaml
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue