feat(editor): Auto-add LLM chain for new LLM nodes on empty canvas (#10245)

Co-authored-by: JP van Oosten <jp@n8n.io>
This commit is contained in:
oleg 2024-08-05 13:59:02 +02:00 committed by GitHub
parent 42a0b594d6
commit 06419d9483
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
21 changed files with 94 additions and 25 deletions

View file

@ -37,7 +37,7 @@ export const INSTANCE_MEMBERS = [
export const MANUAL_TRIGGER_NODE_NAME = 'Manual Trigger'; export const MANUAL_TRIGGER_NODE_NAME = 'Manual Trigger';
export const MANUAL_TRIGGER_NODE_DISPLAY_NAME = 'When clicking Test workflow'; export const MANUAL_TRIGGER_NODE_DISPLAY_NAME = 'When clicking Test workflow';
export const MANUAL_CHAT_TRIGGER_NODE_NAME = 'Chat Trigger'; export const MANUAL_CHAT_TRIGGER_NODE_NAME = 'Chat Trigger';
export const MANUAL_CHAT_TRIGGER_NODE_DISPLAY_NAME = 'When chat message received'; export const CHAT_TRIGGER_NODE_DISPLAY_NAME = 'When chat message received';
export const SCHEDULE_TRIGGER_NODE_NAME = 'Schedule Trigger'; export const SCHEDULE_TRIGGER_NODE_NAME = 'Schedule Trigger';
export const CODE_NODE_NAME = 'Code'; export const CODE_NODE_NAME = 'Code';
export const SET_NODE_NAME = 'Set'; export const SET_NODE_NAME = 'Set';

View file

@ -13,7 +13,7 @@ import {
AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME,
AI_MEMORY_POSTGRES_NODE_NAME, AI_MEMORY_POSTGRES_NODE_NAME,
AI_TOOL_CALCULATOR_NODE_NAME, AI_TOOL_CALCULATOR_NODE_NAME,
MANUAL_CHAT_TRIGGER_NODE_DISPLAY_NAME, CHAT_TRIGGER_NODE_DISPLAY_NAME,
MANUAL_CHAT_TRIGGER_NODE_NAME, MANUAL_CHAT_TRIGGER_NODE_NAME,
MANUAL_TRIGGER_NODE_DISPLAY_NAME, MANUAL_TRIGGER_NODE_DISPLAY_NAME,
MANUAL_TRIGGER_NODE_NAME, MANUAL_TRIGGER_NODE_NAME,
@ -148,7 +148,7 @@ function setupTestWorkflow(chatTrigger: boolean = false) {
if (!chatTrigger) { if (!chatTrigger) {
// Remove chat trigger // Remove chat trigger
WorkflowPage.getters WorkflowPage.getters
.canvasNodeByName(MANUAL_CHAT_TRIGGER_NODE_DISPLAY_NAME) .canvasNodeByName(CHAT_TRIGGER_NODE_DISPLAY_NAME)
.find('[data-test-id="delete-node-button"]') .find('[data-test-id="delete-node-button"]')
.click({ force: true }); .click({ force: true });

View file

@ -10,7 +10,9 @@ import {
disableNode, disableNode,
getExecuteWorkflowButton, getExecuteWorkflowButton,
navigateToNewWorkflowPage, navigateToNewWorkflowPage,
getNodes,
openNode, openNode,
getConnectionBySourceAndTarget,
} from '../composables/workflow'; } from '../composables/workflow';
import { import {
clickCreateNewCredential, clickCreateNewCredential,
@ -41,6 +43,7 @@ import {
AI_TOOL_WIKIPEDIA_NODE_NAME, AI_TOOL_WIKIPEDIA_NODE_NAME,
BASIC_LLM_CHAIN_NODE_NAME, BASIC_LLM_CHAIN_NODE_NAME,
EDIT_FIELDS_SET_NODE_NAME, EDIT_FIELDS_SET_NODE_NAME,
CHAT_TRIGGER_NODE_DISPLAY_NAME,
} from './../constants'; } from './../constants';
describe('Langchain Integration', () => { describe('Langchain Integration', () => {
@ -331,4 +334,27 @@ describe('Langchain Integration', () => {
closeManualChatModal(); closeManualChatModal();
}); });
it('should auto-add chat trigger and basic LLM chain when adding LLM node', () => {
addNodeToCanvas(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, true);
getConnectionBySourceAndTarget(
CHAT_TRIGGER_NODE_DISPLAY_NAME,
BASIC_LLM_CHAIN_NODE_NAME,
).should('exist');
getConnectionBySourceAndTarget(
AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME,
BASIC_LLM_CHAIN_NODE_NAME,
).should('exist');
getNodes().should('have.length', 3);
});
it('should not auto-add nodes if AI nodes are already present', () => {
addNodeToCanvas(AGENT_NODE_NAME, true);
addNodeToCanvas(AI_LANGUAGE_MODEL_OPENAI_CHAT_MODEL_NODE_NAME, true);
getConnectionBySourceAndTarget(CHAT_TRIGGER_NODE_DISPLAY_NAME, AGENT_NODE_NAME).should('exist');
getNodes().should('have.length', 3);
});
}); });

View file

@ -74,7 +74,7 @@ export class LmChatAnthropic implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -28,7 +28,7 @@ export class LmChatOllama implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -26,7 +26,7 @@ export class LmChatOpenAi implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -26,7 +26,7 @@ export class LmCohere implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Text Completion Models'], 'Language Models': ['Text Completion Models'],
}, },
resources: { resources: {

View file

@ -27,7 +27,7 @@ export class LmOllama implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Text Completion Models'], 'Language Models': ['Text Completion Models'],
}, },
resources: { resources: {

View file

@ -38,7 +38,7 @@ export class LmOpenAi implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Text Completion Models'], 'Language Models': ['Text Completion Models'],
}, },
resources: { resources: {

View file

@ -26,7 +26,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Text Completion Models'], 'Language Models': ['Text Completion Models'],
}, },
resources: { resources: {

View file

@ -29,7 +29,7 @@ export class LmChatAwsBedrock implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -27,7 +27,7 @@ export class LmChatAzureOpenAi implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -27,7 +27,7 @@ export class LmChatGoogleGemini implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -25,7 +25,7 @@ export class LmChatGooglePalm implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -32,7 +32,7 @@ export class LmChatGoogleVertex implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -26,7 +26,7 @@ export class LmChatGroq implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -27,7 +27,7 @@ export class LmChatMistralCloud implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Chat Models (Recommended)'], 'Language Models': ['Chat Models (Recommended)'],
}, },
resources: { resources: {

View file

@ -25,7 +25,7 @@ export class LmGooglePalm implements INodeType {
codex: { codex: {
categories: ['AI'], categories: ['AI'],
subcategories: { subcategories: {
AI: ['Language Models'], AI: ['Language Models', 'Root Nodes'],
'Language Models': ['Text Completion Models'], 'Language Models': ['Text Completion Models'],
}, },
resources: { resources: {

View file

@ -1808,8 +1808,8 @@ export type AddedNode = {
} & Partial<INodeUi>; } & Partial<INodeUi>;
export type AddedNodeConnection = { export type AddedNodeConnection = {
from: { nodeIndex: number; outputIndex?: number }; from: { nodeIndex: number; outputIndex?: number; type?: NodeConnectionType };
to: { nodeIndex: number; inputIndex?: number }; to: { nodeIndex: number; inputIndex?: number; type?: NodeConnectionType };
}; };
export type AddedNodesAndConnections = { export type AddedNodesAndConnections = {

View file

@ -1,5 +1,10 @@
import { computed } from 'vue'; import { computed } from 'vue';
import type { IDataObject, INodeParameters } from 'n8n-workflow'; import {
CHAIN_LLM_LANGCHAIN_NODE_TYPE,
NodeConnectionType,
type IDataObject,
type INodeParameters,
} from 'n8n-workflow';
import type { import type {
ActionTypeDescription, ActionTypeDescription,
AddedNode, AddedNode,
@ -11,6 +16,7 @@ import type {
} from '@/Interface'; } from '@/Interface';
import { import {
AGENT_NODE_TYPE, AGENT_NODE_TYPE,
AI_CATEGORY_LANGUAGE_MODELS,
BASIC_CHAIN_NODE_TYPE, BASIC_CHAIN_NODE_TYPE,
CHAT_TRIGGER_NODE_TYPE, CHAT_TRIGGER_NODE_TYPE,
MANUAL_CHAT_TRIGGER_NODE_TYPE, MANUAL_CHAT_TRIGGER_NODE_TYPE,
@ -37,11 +43,12 @@ import { useExternalHooks } from '@/composables/useExternalHooks';
import { sortNodeCreateElements, transformNodeType } from '../utils'; import { sortNodeCreateElements, transformNodeType } from '../utils';
import { useI18n } from '@/composables/useI18n'; import { useI18n } from '@/composables/useI18n';
import { useCanvasStore } from '@/stores/canvas.store';
export const useActions = () => { export const useActions = () => {
const nodeCreatorStore = useNodeCreatorStore(); const nodeCreatorStore = useNodeCreatorStore();
const nodeTypesStore = useNodeTypesStore();
const i18n = useI18n(); const i18n = useI18n();
const singleNodeOpenSources = [ const singleNodeOpenSources = [
NODE_CREATOR_OPEN_SOURCES.PLUS_ENDPOINT, NODE_CREATOR_OPEN_SOURCES.PLUS_ENDPOINT,
NODE_CREATOR_OPEN_SOURCES.NODE_CONNECTION_ACTION, NODE_CREATOR_OPEN_SOURCES.NODE_CONNECTION_ACTION,
@ -216,6 +223,19 @@ export const useActions = () => {
return isCompatibleNode && isChatTriggerMissing; return isCompatibleNode && isChatTriggerMissing;
} }
// AI-226: Prepend LLM Chain node when adding a language model
function shouldPrependLLMChain(addedNodes: AddedNode[]): boolean {
const canvasHasAINodes = useCanvasStore().aiNodes.length > 0;
if (canvasHasAINodes) return false;
return addedNodes.some((node) => {
const nodeType = nodeTypesStore.getNodeType(node.type);
return Object.keys(nodeType?.codex?.subcategories ?? {}).includes(
AI_CATEGORY_LANGUAGE_MODELS,
);
});
}
function getAddedNodesAndConnections(addedNodes: AddedNode[]): AddedNodesAndConnections { function getAddedNodesAndConnections(addedNodes: AddedNode[]): AddedNodesAndConnections {
if (addedNodes.length === 0) { if (addedNodes.length === 0) {
return { nodes: [], connections: [] }; return { nodes: [], connections: [] };
@ -230,7 +250,14 @@ export const useActions = () => {
nodeToAutoOpen.openDetail = true; nodeToAutoOpen.openDetail = true;
} }
if (shouldPrependChatTrigger(addedNodes)) { if (shouldPrependLLMChain(addedNodes) || shouldPrependChatTrigger(addedNodes)) {
if (shouldPrependLLMChain(addedNodes)) {
addedNodes.unshift({ type: CHAIN_LLM_LANGCHAIN_NODE_TYPE, isAutoAdd: true });
connections.push({
from: { nodeIndex: 2, type: NodeConnectionType.AiLanguageModel },
to: { nodeIndex: 1 },
});
}
addedNodes.unshift({ type: CHAT_TRIGGER_NODE_TYPE, isAutoAdd: true }); addedNodes.unshift({ type: CHAT_TRIGGER_NODE_TYPE, isAutoAdd: true });
connections.push({ connections.push({
from: { nodeIndex: 0 }, from: { nodeIndex: 0 },

View file

@ -610,7 +610,7 @@ export default defineComponent({
return this.workflowsStore.getWorkflowExecution; return this.workflowsStore.getWorkflowExecution;
}, },
workflowRunning(): boolean { workflowRunning(): boolean {
return this.uiStore.isActionActive['workflowRunning']; return this.uiStore.isActionActive.workflowRunning;
}, },
currentWorkflow(): string { currentWorkflow(): string {
return this.$route.params.name?.toString() || this.workflowsStore.workflowId; return this.$route.params.name?.toString() || this.workflowsStore.workflowId;
@ -4428,7 +4428,7 @@ export default defineComponent({
from.outputIndex ?? 0, from.outputIndex ?? 0,
toNode.name, toNode.name,
to.inputIndex ?? 0, to.inputIndex ?? 0,
NodeConnectionType.Main, from.type ?? NodeConnectionType.Main,
); );
} }
@ -4449,6 +4449,22 @@ export default defineComponent({
}); });
} }
const lastNodeType = this.nodeTypesStore.getNodeType(lastAddedNode.type);
const isSubNode = NodeHelpers.isSubNodeType(lastNodeType);
// When adding a sub-node and there's more than one node added at the time, it must mean that it's
// connected to a root node, so we adjust the position of the sub-node to make it appear in the correct
// in relation to the root node
if (isSubNode && nodes.length > 1) {
this.onMoveNode({
nodeName: lastAddedNode.name,
position: [
lastAddedNode.position[0] - NodeViewUtils.NODE_SIZE * 2.5,
lastAddedNode.position[1] + NodeViewUtils.NODE_SIZE * 1.5,
],
});
}
this.nodeHelpers.addPinDataConnections(this.workflowsStore.pinnedWorkflowData); this.nodeHelpers.addPinDataConnections(this.workflowsStore.pinnedWorkflowData);
}, },