mirror of
https://github.com/n8n-io/n8n.git
synced 2024-12-23 11:44:06 -08:00
feat: AI nodes usability fixes + Summarization Chain V2 (#7949)
Fixes: - Refactor connection snapping when dragging and enable it also for non-main connection types - Fix propagation of errors from sub-nodes - Fix chat scrolling when sending/receiving messages - Prevent empty chat messages - Fix sub-node selected styles - Fix output names text overflow Usability improvements: - Auto-add manual chat trigger for agents & chain nodes - Various labels and description updates - Make the output parser input optional for Basic LLM Chain - Summarization Chain V2 with a simplified document loader & text chunking mode #### How to test the change: Example workflow showcasing different operation mode of the new summarization chain: [Summarization_V2.json](https://github.com/n8n-io/n8n/files/13599901/Summarization_V2.json) ## Issues fixed Include links to Github issue or Community forum post or **Linear ticket**: > Important in order to close automatically and provide context to reviewers - https://www.notion.so/n8n/David-Langchain-Posthog-notes-7a9294938420403095f4508f1a21d31d - https://linear.app/n8n/issue/N8N-7070/ux-fixes-batch - https://linear.app/n8n/issue/N8N-7071/ai-sub-node-bugs ## Review / Merge checklist - [x] PR title and summary are descriptive. **Remember, the title automatically goes into the changelog. Use `(no-changelog)` otherwise.** ([conventions](https://github.com/n8n-io/n8n/blob/master/.github/pull_request_title_conventions.md)) - [x] [Docs updated](https://github.com/n8n-io/n8n-docs) or follow-up ticket created. - [ ] Tests included. > A bug is not considered fixed, unless a test is added to prevent it from happening again. A feature is not complete without tests. > > *(internal)* You can use Slack commands to trigger [e2e tests](https://www.notion.so/n8n/How-to-use-Test-Instances-d65f49dfc51f441ea44367fb6f67eb0a?pvs=4#a39f9e5ba64a48b58a71d81c837e8227) or [deploy test instance](https://www.notion.so/n8n/How-to-use-Test-Instances-d65f49dfc51f441ea44367fb6f67eb0a?pvs=4#f6a177d32bde4b57ae2da0b8e454bfce) or [deploy early access version on Cloud](https://www.notion.so/n8n/Cloudbot-3dbe779836004972b7057bc989526998?pvs=4#fef2d36ab02247e1a0f65a74f6fb534e). --------- Signed-off-by: Oleg Ivaniv <me@olegivaniv.com> Co-authored-by: Elias Meire <elias@meire.dev>
This commit is contained in:
parent
dbd62a4992
commit
dcf12867b3
|
@ -64,7 +64,7 @@ export class OpenAiAssistant implements INodeType {
|
|||
default: 'existing',
|
||||
options: [
|
||||
{
|
||||
name: 'Create New Assistant',
|
||||
name: 'Use New Assistant',
|
||||
value: 'new',
|
||||
},
|
||||
{
|
||||
|
@ -94,7 +94,6 @@ export class OpenAiAssistant implements INodeType {
|
|||
typeOptions: {
|
||||
rows: 5,
|
||||
},
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': ['new'],
|
||||
|
@ -237,11 +236,28 @@ export class OpenAiAssistant implements INodeType {
|
|||
value: 'code_interpreter',
|
||||
},
|
||||
{
|
||||
name: 'Retrieval',
|
||||
name: 'Knowledge Retrieval',
|
||||
value: 'retrieval',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Connect your own custom tools to this node on the canvas',
|
||||
name: 'noticeTools',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName:
|
||||
'Upload files for retrieval using the <a href="https://platform.openai.com/playground" target="_blank">OpenAI website<a/>',
|
||||
name: 'noticeTools',
|
||||
type: 'notice',
|
||||
typeOptions: {
|
||||
noticeTheme: 'info',
|
||||
},
|
||||
displayOptions: { show: { '/nativeTools': ['retrieval'] } },
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
|
|
|
@ -191,13 +191,33 @@ async function getChain(
|
|||
return Array.isArray(response) ? response : [response];
|
||||
}
|
||||
|
||||
function getInputs(parameters: IDataObject) {
|
||||
const hasOutputParser = parameters?.hasOutputParser;
|
||||
const inputs = [
|
||||
{ displayName: '', type: NodeConnectionType.Main },
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
];
|
||||
|
||||
// If `hasOutputParser` is undefined it must be version 1.1 or earlier so we
|
||||
// always add the output parser input
|
||||
if (hasOutputParser === undefined || hasOutputParser === true) {
|
||||
inputs.push({ displayName: 'Output Parser', type: NodeConnectionType.AiOutputParser });
|
||||
}
|
||||
return inputs;
|
||||
}
|
||||
|
||||
export class ChainLlm implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Basic LLM Chain',
|
||||
name: 'chainLlm',
|
||||
icon: 'fa:link',
|
||||
group: ['transform'],
|
||||
version: [1, 1.1],
|
||||
version: [1, 1.1, 1.2],
|
||||
description: 'A simple chain to prompt a large language model',
|
||||
defaults: {
|
||||
name: 'Basic LLM Chain',
|
||||
|
@ -217,25 +237,11 @@ export class ChainLlm implements INodeType {
|
|||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
NodeConnectionType.Main,
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Output Parser',
|
||||
type: NodeConnectionType.AiOutputParser,
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
inputs: `={{ ((parameter) => { ${getInputs.toString()}; return getInputs(parameter) })($parameter) }}`,
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [],
|
||||
properties: [
|
||||
getTemplateNoticeField(1951),
|
||||
getTemplateNoticeField(1978),
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'prompt',
|
||||
|
@ -256,7 +262,7 @@ export class ChainLlm implements INodeType {
|
|||
default: '={{ $json.chat_input }}',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1.1],
|
||||
'@version': [1.1, 1.2],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -400,6 +406,28 @@ export class ChainLlm implements INodeType {
|
|||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Require Specific Output Format',
|
||||
name: 'hasOutputParser',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'@version': [1.2],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: `Connect an <a data-action='openSelectiveNodeCreator' data-action-parameter-connectiontype='${NodeConnectionType.AiOutputParser}'>output parser</a> on the canvas to specify the output format you require`,
|
||||
name: 'notice',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
hasOutputParser: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
|
@ -413,10 +441,14 @@ export class ChainLlm implements INodeType {
|
|||
0,
|
||||
)) as BaseLanguageModel;
|
||||
|
||||
const outputParsers = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
0,
|
||||
)) as BaseOutputParser[];
|
||||
let outputParsers: BaseOutputParser[] = [];
|
||||
|
||||
if (this.getNodeParameter('hasOutputParser', 0, true) === true) {
|
||||
outputParsers = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiOutputParser,
|
||||
0,
|
||||
)) as BaseOutputParser[];
|
||||
}
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
const prompt = this.getNodeParameter('prompt', itemIndex) as string;
|
||||
|
|
|
@ -1,277 +1,39 @@
|
|||
import {
|
||||
NodeConnectionType,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
import type { INodeTypeBaseDescription, IVersionedNodeType } from 'n8n-workflow';
|
||||
import { VersionedNodeType } from 'n8n-workflow';
|
||||
|
||||
import type { SummarizationChainParams } from 'langchain/chains';
|
||||
import { loadSummarizationChain } from 'langchain/chains';
|
||||
import type { BaseLanguageModel } from 'langchain/dist/base_language';
|
||||
import type { Document } from 'langchain/document';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
|
||||
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
|
||||
import { getTemplateNoticeField } from '../../../utils/sharedFields';
|
||||
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from './prompt';
|
||||
import { ChainSummarizationV1 } from './V1/ChainSummarizationV1.node';
|
||||
import { ChainSummarizationV2 } from './V2/ChainSummarizationV2.node';
|
||||
|
||||
export class ChainSummarization implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Summarization Chain',
|
||||
name: 'chainSummarization',
|
||||
icon: 'fa:link',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
description: 'Transforms text into a concise summary',
|
||||
|
||||
defaults: {
|
||||
name: 'Summarization Chain',
|
||||
color: '#909298',
|
||||
},
|
||||
codex: {
|
||||
alias: ['LangChain'],
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Chains'],
|
||||
export class ChainSummarization extends VersionedNodeType {
|
||||
constructor() {
|
||||
const baseDescription: INodeTypeBaseDescription = {
|
||||
displayName: 'Summarization Chain',
|
||||
name: 'chainSummarization',
|
||||
icon: 'fa:link',
|
||||
group: ['transform'],
|
||||
description: 'Transforms text into a concise summary',
|
||||
codex: {
|
||||
alias: ['LangChain'],
|
||||
categories: ['AI'],
|
||||
subcategories: {
|
||||
AI: ['Chains'],
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainsummarization/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
resources: {
|
||||
primaryDocumentation: [
|
||||
{
|
||||
url: 'https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainsummarization/',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
NodeConnectionType.Main,
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Document',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiDocument,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [],
|
||||
properties: [
|
||||
getTemplateNoticeField(1951),
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The type of summarization to run',
|
||||
default: 'map_reduce',
|
||||
options: [
|
||||
{
|
||||
name: 'Map Reduce (Recommended)',
|
||||
value: 'map_reduce',
|
||||
description:
|
||||
'Summarize each document (or chunk) individually, then summarize those summaries',
|
||||
},
|
||||
{
|
||||
name: 'Refine',
|
||||
value: 'refine',
|
||||
description:
|
||||
'Summarize the first document (or chunk). Then update that summary based on the next document (or chunk), and repeat.',
|
||||
},
|
||||
{
|
||||
name: 'Stuff',
|
||||
value: 'stuff',
|
||||
description: 'Pass all documents (or chunks) at once. Ideal for small datasets.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Final Prompt to Combine',
|
||||
name: 'combineMapPrompt',
|
||||
type: 'string',
|
||||
hint: 'The prompt to combine individual summaries',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['map_reduce'],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Individual Summary Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to summarize an individual document (or chunk)',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['map_reduce'],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['stuff'],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Subsequent (Refine) Prompt',
|
||||
name: 'refinePrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['refine'],
|
||||
},
|
||||
},
|
||||
default: REFINE_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to refine the summary based on the next document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Initial Prompt',
|
||||
name: 'refineQuestionPrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['refine'],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt for the first document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Vector Store QA Chain');
|
||||
const type = this.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine';
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseLanguageModel;
|
||||
|
||||
const documentInput = (await this.getInputConnectionData(NodeConnectionType.AiDocument, 0)) as
|
||||
| N8nJsonLoader
|
||||
| Array<Document<Record<string, unknown>>>;
|
||||
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
prompt?: string;
|
||||
refineQuestionPrompt?: string;
|
||||
refinePrompt?: string;
|
||||
combineMapPrompt?: string;
|
||||
defaultVersion: 2,
|
||||
};
|
||||
|
||||
const chainArgs: SummarizationChainParams = {
|
||||
type,
|
||||
const nodeVersions: IVersionedNodeType['nodeVersions'] = {
|
||||
1: new ChainSummarizationV1(baseDescription),
|
||||
2: new ChainSummarizationV2(baseDescription),
|
||||
};
|
||||
|
||||
// Map reduce prompt override
|
||||
if (type === 'map_reduce') {
|
||||
const mapReduceArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'map_reduce';
|
||||
};
|
||||
if (options.combineMapPrompt) {
|
||||
mapReduceArgs.combineMapPrompt = new PromptTemplate({
|
||||
template: options.combineMapPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
if (options.prompt) {
|
||||
mapReduceArgs.combinePrompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Stuff prompt override
|
||||
if (type === 'stuff') {
|
||||
const stuffArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'stuff';
|
||||
};
|
||||
if (options.prompt) {
|
||||
stuffArgs.prompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Refine prompt override
|
||||
if (type === 'refine') {
|
||||
const refineArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'refine';
|
||||
};
|
||||
|
||||
if (options.refinePrompt) {
|
||||
refineArgs.refinePrompt = new PromptTemplate({
|
||||
template: options.refinePrompt,
|
||||
inputVariables: ['existing_answer', 'text'],
|
||||
});
|
||||
}
|
||||
|
||||
if (options.refineQuestionPrompt) {
|
||||
refineArgs.questionPrompt = new PromptTemplate({
|
||||
template: options.refineQuestionPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const chain = loadSummarizationChain(model, chainArgs);
|
||||
|
||||
const items = this.getInputData();
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
let processedDocuments: Document[];
|
||||
if (documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader) {
|
||||
processedDocuments = await documentInput.processItem(items[itemIndex], itemIndex);
|
||||
} else {
|
||||
processedDocuments = documentInput;
|
||||
}
|
||||
|
||||
const response = await chain.call({
|
||||
input_documents: processedDocuments,
|
||||
});
|
||||
|
||||
returnData.push({ json: { response } });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
super(nodeVersions, baseDescription);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,263 @@
|
|||
import {
|
||||
NodeConnectionType,
|
||||
type INodeTypeBaseDescription,
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { SummarizationChainParams } from 'langchain/chains';
|
||||
import { loadSummarizationChain } from 'langchain/chains';
|
||||
import type { BaseLanguageModel } from 'langchain/dist/base_language';
|
||||
import type { Document } from 'langchain/document';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader';
|
||||
import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader';
|
||||
import { getTemplateNoticeField } from '../../../../utils/sharedFields';
|
||||
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
|
||||
|
||||
export class ChainSummarizationV1 implements INodeType {
|
||||
description: INodeTypeDescription;
|
||||
|
||||
constructor(baseDescription: INodeTypeBaseDescription) {
|
||||
this.description = {
|
||||
...baseDescription,
|
||||
version: 1,
|
||||
defaults: {
|
||||
name: 'Summarization Chain',
|
||||
color: '#909298',
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: [
|
||||
NodeConnectionType.Main,
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
displayName: 'Document',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiDocument,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [],
|
||||
properties: [
|
||||
getTemplateNoticeField(1951),
|
||||
{
|
||||
displayName: 'Type',
|
||||
name: 'type',
|
||||
type: 'options',
|
||||
description: 'The type of summarization to run',
|
||||
default: 'map_reduce',
|
||||
options: [
|
||||
{
|
||||
name: 'Map Reduce (Recommended)',
|
||||
value: 'map_reduce',
|
||||
description:
|
||||
'Summarize each document (or chunk) individually, then summarize those summaries',
|
||||
},
|
||||
{
|
||||
name: 'Refine',
|
||||
value: 'refine',
|
||||
description:
|
||||
'Summarize the first document (or chunk). Then update that summary based on the next document (or chunk), and repeat.',
|
||||
},
|
||||
{
|
||||
name: 'Stuff',
|
||||
value: 'stuff',
|
||||
description: 'Pass all documents (or chunks) at once. Ideal for small datasets.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Final Prompt to Combine',
|
||||
name: 'combineMapPrompt',
|
||||
type: 'string',
|
||||
hint: 'The prompt to combine individual summaries',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['map_reduce'],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Individual Summary Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to summarize an individual document (or chunk)',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['map_reduce'],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['stuff'],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Subsequent (Refine) Prompt',
|
||||
name: 'refinePrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['refine'],
|
||||
},
|
||||
},
|
||||
default: REFINE_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to refine the summary based on the next document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Initial Prompt',
|
||||
name: 'refineQuestionPrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/type': ['refine'],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt for the first document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 6,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Vector Store QA Chain');
|
||||
const type = this.getNodeParameter('type', 0) as 'map_reduce' | 'stuff' | 'refine';
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseLanguageModel;
|
||||
|
||||
const documentInput = (await this.getInputConnectionData(NodeConnectionType.AiDocument, 0)) as
|
||||
| N8nJsonLoader
|
||||
| Array<Document<Record<string, unknown>>>;
|
||||
|
||||
const options = this.getNodeParameter('options', 0, {}) as {
|
||||
prompt?: string;
|
||||
refineQuestionPrompt?: string;
|
||||
refinePrompt?: string;
|
||||
combineMapPrompt?: string;
|
||||
};
|
||||
|
||||
const chainArgs: SummarizationChainParams = {
|
||||
type,
|
||||
};
|
||||
|
||||
// Map reduce prompt override
|
||||
if (type === 'map_reduce') {
|
||||
const mapReduceArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'map_reduce';
|
||||
};
|
||||
if (options.combineMapPrompt) {
|
||||
mapReduceArgs.combineMapPrompt = new PromptTemplate({
|
||||
template: options.combineMapPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
if (options.prompt) {
|
||||
mapReduceArgs.combinePrompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Stuff prompt override
|
||||
if (type === 'stuff') {
|
||||
const stuffArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'stuff';
|
||||
};
|
||||
if (options.prompt) {
|
||||
stuffArgs.prompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Refine prompt override
|
||||
if (type === 'refine') {
|
||||
const refineArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'refine';
|
||||
};
|
||||
|
||||
if (options.refinePrompt) {
|
||||
refineArgs.refinePrompt = new PromptTemplate({
|
||||
template: options.refinePrompt,
|
||||
inputVariables: ['existing_answer', 'text'],
|
||||
});
|
||||
}
|
||||
|
||||
if (options.refineQuestionPrompt) {
|
||||
refineArgs.questionPrompt = new PromptTemplate({
|
||||
template: options.refineQuestionPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const chain = loadSummarizationChain(model, chainArgs);
|
||||
|
||||
const items = this.getInputData();
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
let processedDocuments: Document[];
|
||||
if (documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader) {
|
||||
processedDocuments = await documentInput.processItem(items[itemIndex], itemIndex);
|
||||
} else {
|
||||
processedDocuments = documentInput;
|
||||
}
|
||||
|
||||
const response = await chain.call({
|
||||
input_documents: processedDocuments,
|
||||
});
|
||||
|
||||
returnData.push({ json: { response } });
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,420 @@
|
|||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import type {
|
||||
INodeTypeBaseDescription,
|
||||
IExecuteFunctions,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
IDataObject,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { loadSummarizationChain } from 'langchain/chains';
|
||||
import type { BaseLanguageModel } from 'langchain/dist/base_language';
|
||||
import type { Document } from 'langchain/document';
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import { RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import { N8nJsonLoader } from '../../../../utils/N8nJsonLoader';
|
||||
import { N8nBinaryLoader } from '../../../../utils/N8nBinaryLoader';
|
||||
import { getTemplateNoticeField } from '../../../../utils/sharedFields';
|
||||
import { REFINE_PROMPT_TEMPLATE, DEFAULT_PROMPT_TEMPLATE } from '../prompt';
|
||||
import { getChainPromptsArgs } from '../helpers';
|
||||
|
||||
function getInputs(parameters: IDataObject) {
|
||||
const chunkingMode = parameters?.chunkingMode;
|
||||
const operationMode = parameters?.operationMode;
|
||||
const inputs = [
|
||||
{ displayName: '', type: NodeConnectionType.Main },
|
||||
{
|
||||
displayName: 'Model',
|
||||
maxConnections: 1,
|
||||
type: NodeConnectionType.AiLanguageModel,
|
||||
required: true,
|
||||
},
|
||||
];
|
||||
|
||||
if (operationMode === 'documentLoader') {
|
||||
inputs.push({
|
||||
displayName: 'Document',
|
||||
type: NodeConnectionType.AiDocument,
|
||||
required: true,
|
||||
maxConnections: 1,
|
||||
});
|
||||
return inputs;
|
||||
}
|
||||
|
||||
if (chunkingMode === 'advanced') {
|
||||
inputs.push({
|
||||
displayName: 'Text Splitter',
|
||||
type: NodeConnectionType.AiTextSplitter,
|
||||
required: false,
|
||||
maxConnections: 1,
|
||||
});
|
||||
return inputs;
|
||||
}
|
||||
return inputs;
|
||||
}
|
||||
|
||||
export class ChainSummarizationV2 implements INodeType {
|
||||
description: INodeTypeDescription;
|
||||
|
||||
constructor(baseDescription: INodeTypeBaseDescription) {
|
||||
this.description = {
|
||||
...baseDescription,
|
||||
version: [2],
|
||||
defaults: {
|
||||
name: 'Summarization Chain',
|
||||
color: '#909298',
|
||||
},
|
||||
// eslint-disable-next-line n8n-nodes-base/node-class-description-inputs-wrong-regular-node
|
||||
inputs: `={{ ((parameter) => { ${getInputs.toString()}; return getInputs(parameter) })($parameter) }}`,
|
||||
outputs: [NodeConnectionType.Main],
|
||||
credentials: [],
|
||||
properties: [
|
||||
getTemplateNoticeField(1951),
|
||||
{
|
||||
displayName: 'Data to Summarize',
|
||||
name: 'operationMode',
|
||||
noDataExpression: true,
|
||||
type: 'options',
|
||||
description: 'How to pass data into the summarization chain',
|
||||
default: 'nodeInputJson',
|
||||
options: [
|
||||
{
|
||||
name: 'Use Node Input (JSON)',
|
||||
value: 'nodeInputJson',
|
||||
description: 'Summarize the JSON data coming into this node from the previous one',
|
||||
},
|
||||
{
|
||||
name: 'Use Node Input (Binary)',
|
||||
value: 'nodeInputBinary',
|
||||
description: 'Summarize the binary data coming into this node from the previous one',
|
||||
},
|
||||
{
|
||||
name: 'Use Document Loader',
|
||||
value: 'documentLoader',
|
||||
description: 'Use a loader sub-node with more configuration options',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Chunking Strategy',
|
||||
name: 'chunkingMode',
|
||||
noDataExpression: true,
|
||||
type: 'options',
|
||||
description: 'Chunk splitting strategy',
|
||||
default: 'simple',
|
||||
options: [
|
||||
{
|
||||
name: 'Simple (Define Below)',
|
||||
value: 'simple',
|
||||
},
|
||||
{
|
||||
name: 'Advanced',
|
||||
value: 'advanced',
|
||||
description: 'Use a splitter sub-node with more configuration options',
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operationMode': ['nodeInputJson', 'nodeInputBinary'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Characters Per Chunk',
|
||||
name: 'chunkSize',
|
||||
description:
|
||||
'Controls the max size (in terms of number of characters) of the final document chunk',
|
||||
type: 'number',
|
||||
default: 1000,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/chunkingMode': ['simple'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Chunk Overlap (Characters)',
|
||||
name: 'chunkOverlap',
|
||||
type: 'number',
|
||||
description: 'Specifies how much characters overlap there should be between chunks',
|
||||
default: 200,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/chunkingMode': ['simple'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
default: {},
|
||||
placeholder: 'Add Option',
|
||||
options: [
|
||||
{
|
||||
displayName: 'Input Data Field Name',
|
||||
name: 'binaryDataKey',
|
||||
type: 'string',
|
||||
default: 'data',
|
||||
description:
|
||||
'The name of the field in the agent or chain’s input that contains the binary file to be processed',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operationMode': ['nodeInputBinary'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Summarization Method and Prompts',
|
||||
name: 'summarizationMethodAndPrompts',
|
||||
type: 'fixedCollection',
|
||||
default: {
|
||||
values: {
|
||||
summarizationMethod: 'map_reduce',
|
||||
prompt: DEFAULT_PROMPT_TEMPLATE,
|
||||
combineMapPrompt: DEFAULT_PROMPT_TEMPLATE,
|
||||
},
|
||||
},
|
||||
placeholder: 'Add Option',
|
||||
typeOptions: {},
|
||||
options: [
|
||||
{
|
||||
name: 'values',
|
||||
displayName: 'Values',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Summarization Method',
|
||||
name: 'summarizationMethod',
|
||||
type: 'options',
|
||||
description: 'The type of summarization to run',
|
||||
default: 'map_reduce',
|
||||
options: [
|
||||
{
|
||||
name: 'Map Reduce (Recommended)',
|
||||
value: 'map_reduce',
|
||||
description:
|
||||
'Summarize each document (or chunk) individually, then summarize those summaries',
|
||||
},
|
||||
{
|
||||
name: 'Refine',
|
||||
value: 'refine',
|
||||
description:
|
||||
'Summarize the first document (or chunk). Then update that summary based on the next document (or chunk), and repeat.',
|
||||
},
|
||||
{
|
||||
name: 'Stuff',
|
||||
value: 'stuff',
|
||||
description:
|
||||
'Pass all documents (or chunks) at once. Ideal for small datasets.',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
displayName: 'Final Prompt to Combine',
|
||||
name: 'combineMapPrompt',
|
||||
type: 'string',
|
||||
hint: 'The prompt to combine individual summaries',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
'/options.summarizationMethodAndPrompts.values.summarizationMethod': [
|
||||
'stuff',
|
||||
'refine',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
typeOptions: {
|
||||
rows: 9,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Individual Summary Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to summarize an individual document (or chunk)',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
'/options.summarizationMethodAndPrompts.values.summarizationMethod': [
|
||||
'stuff',
|
||||
'refine',
|
||||
],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 9,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Prompt',
|
||||
name: 'prompt',
|
||||
type: 'string',
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
displayOptions: {
|
||||
hide: {
|
||||
'/options.summarizationMethodAndPrompts.values.summarizationMethod': [
|
||||
'refine',
|
||||
'map_reduce',
|
||||
],
|
||||
},
|
||||
},
|
||||
typeOptions: {
|
||||
rows: 9,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Subsequent (Refine) Prompt',
|
||||
name: 'refinePrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
'/options.summarizationMethodAndPrompts.values.summarizationMethod': [
|
||||
'stuff',
|
||||
'map_reduce',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: REFINE_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt to refine the summary based on the next document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 9,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Initial Prompt',
|
||||
name: 'refineQuestionPrompt',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
'/options.summarizationMethodAndPrompts.values.summarizationMethod': [
|
||||
'stuff',
|
||||
'map_reduce',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: DEFAULT_PROMPT_TEMPLATE,
|
||||
hint: 'The prompt for the first document (or chunk)',
|
||||
typeOptions: {
|
||||
rows: 9,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
this.logger.verbose('Executing Summarization Chain V2');
|
||||
const operationMode = this.getNodeParameter('operationMode', 0, 'nodeInputJson') as
|
||||
| 'nodeInputJson'
|
||||
| 'nodeInputBinary'
|
||||
| 'documentLoader';
|
||||
const chunkingMode = this.getNodeParameter('chunkingMode', 0, 'simple') as
|
||||
| 'simple'
|
||||
| 'advanced';
|
||||
|
||||
const model = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiLanguageModel,
|
||||
0,
|
||||
)) as BaseLanguageModel;
|
||||
|
||||
const items = this.getInputData();
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
const summarizationMethodAndPrompts = this.getNodeParameter(
|
||||
'options.summarizationMethodAndPrompts.values',
|
||||
itemIndex,
|
||||
{},
|
||||
) as {
|
||||
prompt?: string;
|
||||
refineQuestionPrompt?: string;
|
||||
refinePrompt?: string;
|
||||
summarizationMethod: 'map_reduce' | 'stuff' | 'refine';
|
||||
combineMapPrompt?: string;
|
||||
};
|
||||
|
||||
const chainArgs = getChainPromptsArgs(
|
||||
summarizationMethodAndPrompts.summarizationMethod ?? 'map_reduce',
|
||||
summarizationMethodAndPrompts,
|
||||
);
|
||||
|
||||
const chain = loadSummarizationChain(model, chainArgs);
|
||||
const item = items[itemIndex];
|
||||
|
||||
let processedDocuments: Document[];
|
||||
|
||||
// Use dedicated document loader input to load documents
|
||||
if (operationMode === 'documentLoader') {
|
||||
const documentInput = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiDocument,
|
||||
0,
|
||||
)) as N8nJsonLoader | Array<Document<Record<string, unknown>>>;
|
||||
|
||||
const isN8nLoader =
|
||||
documentInput instanceof N8nJsonLoader || documentInput instanceof N8nBinaryLoader;
|
||||
|
||||
processedDocuments = isN8nLoader
|
||||
? await documentInput.processItem(item, itemIndex)
|
||||
: documentInput;
|
||||
|
||||
const response = await chain.call({
|
||||
input_documents: processedDocuments,
|
||||
});
|
||||
|
||||
returnData.push({ json: { response } });
|
||||
}
|
||||
|
||||
// Take the input and use binary or json loader
|
||||
if (['nodeInputJson', 'nodeInputBinary'].includes(operationMode)) {
|
||||
let textSplitter: TextSplitter | undefined;
|
||||
|
||||
switch (chunkingMode) {
|
||||
// In simple mode we use recursive character splitter with default settings
|
||||
case 'simple':
|
||||
const chunkSize = this.getNodeParameter('chunkSize', itemIndex, 1000) as number;
|
||||
const chunkOverlap = this.getNodeParameter('chunkOverlap', itemIndex, 200) as number;
|
||||
|
||||
textSplitter = new RecursiveCharacterTextSplitter({ chunkOverlap, chunkSize });
|
||||
break;
|
||||
|
||||
// In advanced mode user can connect text splitter node so we just retrieve it
|
||||
case 'advanced':
|
||||
textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
0,
|
||||
)) as TextSplitter | undefined;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
let processor: N8nJsonLoader | N8nBinaryLoader;
|
||||
if (operationMode === 'nodeInputBinary') {
|
||||
const binaryDataKey = this.getNodeParameter(
|
||||
'options.binaryDataKey',
|
||||
itemIndex,
|
||||
'data',
|
||||
) as string;
|
||||
processor = new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter);
|
||||
} else {
|
||||
processor = new N8nJsonLoader(this, 'options.', textSplitter);
|
||||
}
|
||||
|
||||
const processedItem = await processor.processItem(item, itemIndex);
|
||||
const response = await chain.call({
|
||||
input_documents: processedItem,
|
||||
});
|
||||
returnData.push({ json: { response } });
|
||||
}
|
||||
}
|
||||
|
||||
return this.prepareOutputData(returnData);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
import type { SummarizationChainParams } from 'langchain/chains';
|
||||
import { PromptTemplate } from 'langchain/prompts';
|
||||
|
||||
interface ChainTypeOptions {
|
||||
combineMapPrompt?: string;
|
||||
prompt?: string;
|
||||
refinePrompt?: string;
|
||||
refineQuestionPrompt?: string;
|
||||
}
|
||||
|
||||
export function getChainPromptsArgs(
|
||||
type: 'stuff' | 'map_reduce' | 'refine',
|
||||
options: ChainTypeOptions,
|
||||
) {
|
||||
const chainArgs: SummarizationChainParams = {
|
||||
type,
|
||||
};
|
||||
// Map reduce prompt override
|
||||
if (type === 'map_reduce') {
|
||||
const mapReduceArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'map_reduce';
|
||||
};
|
||||
if (options.combineMapPrompt) {
|
||||
mapReduceArgs.combineMapPrompt = new PromptTemplate({
|
||||
template: options.combineMapPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
if (options.prompt) {
|
||||
mapReduceArgs.combinePrompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Stuff prompt override
|
||||
if (type === 'stuff') {
|
||||
const stuffArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'stuff';
|
||||
};
|
||||
if (options.prompt) {
|
||||
stuffArgs.prompt = new PromptTemplate({
|
||||
template: options.prompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Refine prompt override
|
||||
if (type === 'refine') {
|
||||
const refineArgs = chainArgs as SummarizationChainParams & {
|
||||
type: 'refine';
|
||||
};
|
||||
|
||||
if (options.refinePrompt) {
|
||||
refineArgs.refinePrompt = new PromptTemplate({
|
||||
template: options.refinePrompt,
|
||||
inputVariables: ['existing_answer', 'text'],
|
||||
});
|
||||
}
|
||||
|
||||
if (options.refineQuestionPrompt) {
|
||||
refineArgs.questionPrompt = new PromptTemplate({
|
||||
template: options.refineQuestionPrompt,
|
||||
inputVariables: ['text'],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return chainArgs;
|
||||
}
|
|
@ -17,6 +17,7 @@ import { getConnectionHintNoticeField, metadataFilterField } from '../../../util
|
|||
import 'mammoth'; // for docx
|
||||
import 'epub2'; // for epub
|
||||
import 'pdf-parse'; // for pdf
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
|
||||
export class DocumentBinaryInputLoader implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -177,7 +178,13 @@ export class DocumentBinaryInputLoader implements INodeType {
|
|||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply Data for Binary Input Loader');
|
||||
const processor = new N8nBinaryLoader(this);
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
0,
|
||||
)) as TextSplitter | undefined;
|
||||
|
||||
const binaryDataKey = this.getNodeParameter('binaryDataKey', 0) as string;
|
||||
const processor = new N8nBinaryLoader(this, undefined, binaryDataKey, textSplitter);
|
||||
|
||||
return {
|
||||
response: logWrapper(processor, this),
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nBinaryLoader } from '../../../utils/N8nBinaryLoader';
|
||||
import { metadataFilterField } from '../../../utils/sharedFields';
|
||||
|
@ -257,11 +258,16 @@ export class DocumentDefaultDataLoader implements INodeType {
|
|||
|
||||
async supplyData(this: IExecuteFunctions, itemIndex: number): Promise<SupplyData> {
|
||||
const dataType = this.getNodeParameter('dataType', itemIndex, 'json') as 'json' | 'binary';
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
0,
|
||||
)) as TextSplitter | undefined;
|
||||
const binaryDataKey = this.getNodeParameter('binaryDataKey', itemIndex, '') as string;
|
||||
|
||||
const processor =
|
||||
dataType === 'binary'
|
||||
? new N8nBinaryLoader(this, 'options.')
|
||||
: new N8nJsonLoader(this, 'options.');
|
||||
? new N8nBinaryLoader(this, 'options.', binaryDataKey, textSplitter)
|
||||
: new N8nJsonLoader(this, 'options.', textSplitter);
|
||||
|
||||
return {
|
||||
response: logWrapper(processor, this),
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
type SupplyData,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import { logWrapper } from '../../../utils/logWrapper';
|
||||
import { N8nJsonLoader } from '../../../utils/N8nJsonLoader';
|
||||
import { getConnectionHintNoticeField, metadataFilterField } from '../../../utils/sharedFields';
|
||||
|
@ -80,7 +81,12 @@ export class DocumentJsonInputLoader implements INodeType {
|
|||
|
||||
async supplyData(this: IExecuteFunctions): Promise<SupplyData> {
|
||||
this.logger.verbose('Supply Data for JSON Input Loader');
|
||||
const processor = new N8nJsonLoader(this);
|
||||
const textSplitter = (await this.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
0,
|
||||
)) as TextSplitter | undefined;
|
||||
|
||||
const processor = new N8nJsonLoader(this, undefined, textSplitter);
|
||||
|
||||
return {
|
||||
response: logWrapper(processor, this),
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import type { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
|
||||
import { NodeOperationError, NodeConnectionType, BINARY_ENCODING } from 'n8n-workflow';
|
||||
import { NodeOperationError, BINARY_ENCODING } from 'n8n-workflow';
|
||||
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import type { Document } from 'langchain/document';
|
||||
|
@ -30,9 +30,15 @@ export class N8nBinaryLoader {
|
|||
|
||||
private optionsPrefix: string;
|
||||
|
||||
constructor(context: IExecuteFunctions, optionsPrefix = '') {
|
||||
private binaryDataKey: string;
|
||||
|
||||
private textSplitter?: TextSplitter;
|
||||
|
||||
constructor(context: IExecuteFunctions, optionsPrefix = '', binaryDataKey = '', textSplitter?: TextSplitter) {
|
||||
this.context = context;
|
||||
this.textSplitter = textSplitter;
|
||||
this.optionsPrefix = optionsPrefix;
|
||||
this.binaryDataKey = binaryDataKey;
|
||||
}
|
||||
|
||||
async processAll(items?: INodeExecutionData[]): Promise<Document[]> {
|
||||
|
@ -53,17 +59,15 @@ export class N8nBinaryLoader {
|
|||
const selectedLoader: keyof typeof SUPPORTED_MIME_TYPES = this.context.getNodeParameter(
|
||||
'loader',
|
||||
itemIndex,
|
||||
'auto',
|
||||
) as keyof typeof SUPPORTED_MIME_TYPES;
|
||||
|
||||
const binaryDataKey = this.context.getNodeParameter('binaryDataKey', itemIndex) as string;
|
||||
const docs: Document[] = [];
|
||||
const metadata = getMetadataFiltersValues(this.context, itemIndex);
|
||||
|
||||
if (!item) return [];
|
||||
|
||||
// TODO: Should we support traversing the object to find the binary data?
|
||||
const binaryData = this.context.helpers.assertBinaryData(itemIndex, binaryDataKey);
|
||||
|
||||
const binaryData = this.context.helpers.assertBinaryData(itemIndex, this.binaryDataKey)
|
||||
const { mimeType } = binaryData;
|
||||
|
||||
// Check if loader matches the mime-type of the data
|
||||
|
@ -174,12 +178,8 @@ export class N8nBinaryLoader {
|
|||
loader = new TextLoader(filePathOrBlob);
|
||||
}
|
||||
|
||||
const textSplitter = (await this.context.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
0,
|
||||
)) as TextSplitter | undefined;
|
||||
|
||||
const loadedDoc = textSplitter ? await loader.loadAndSplit(textSplitter) : await loader.load();
|
||||
const loadedDoc = this.textSplitter ? await loader.loadAndSplit(this.textSplitter) : await loader.load();
|
||||
|
||||
docs.push(...loadedDoc);
|
||||
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import {
|
||||
type IExecuteFunctions,
|
||||
type INodeExecutionData,
|
||||
NodeConnectionType,
|
||||
NodeOperationError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import type { CharacterTextSplitter } from 'langchain/text_splitter';
|
||||
import type { TextSplitter } from 'langchain/text_splitter';
|
||||
import type { Document } from 'langchain/document';
|
||||
import { JSONLoader } from 'langchain/document_loaders/fs/json';
|
||||
import { TextLoader } from 'langchain/document_loaders/fs/text';
|
||||
|
@ -16,8 +15,11 @@ export class N8nJsonLoader {
|
|||
|
||||
private optionsPrefix: string;
|
||||
|
||||
constructor(context: IExecuteFunctions, optionsPrefix = '') {
|
||||
private textSplitter?: TextSplitter;
|
||||
|
||||
constructor(context: IExecuteFunctions, optionsPrefix = '', textSplitter?: TextSplitter) {
|
||||
this.context = context;
|
||||
this.textSplitter = textSplitter;
|
||||
this.optionsPrefix = optionsPrefix;
|
||||
}
|
||||
|
||||
|
@ -46,11 +48,6 @@ export class N8nJsonLoader {
|
|||
'',
|
||||
) as string;
|
||||
const pointersArray = pointers.split(',').map((pointer) => pointer.trim());
|
||||
|
||||
const textSplitter = (await this.context.getInputConnectionData(
|
||||
NodeConnectionType.AiTextSplitter,
|
||||
0,
|
||||
)) as CharacterTextSplitter | undefined;
|
||||
const metadata = getMetadataFiltersValues(this.context, itemIndex) ?? [];
|
||||
|
||||
if (!item) return [];
|
||||
|
@ -81,8 +78,8 @@ export class N8nJsonLoader {
|
|||
throw new NodeOperationError(this.context.getNode(), 'Document loader is not initialized');
|
||||
}
|
||||
|
||||
const docs = textSplitter
|
||||
? await documentLoader.loadAndSplit(textSplitter)
|
||||
const docs = this.textSplitter
|
||||
? await documentLoader.loadAndSplit(this.textSplitter)
|
||||
: await documentLoader.load();
|
||||
|
||||
if (metadata) {
|
||||
|
|
|
@ -48,6 +48,8 @@ export async function callMethodAsync<T>(
|
|||
try {
|
||||
return await parameters.method.call(this, ...parameters.arguments);
|
||||
} catch (e) {
|
||||
// Propagate errors from sub-nodes
|
||||
if (e.functionality === 'configuration-node') throw e;
|
||||
const connectedNode = parameters.executeFunctions.getNode();
|
||||
|
||||
const error = new NodeOperationError(connectedNode, e, {
|
||||
|
@ -89,6 +91,8 @@ export function callMethodSync<T>(
|
|||
try {
|
||||
return parameters.method.call(this, ...parameters.arguments);
|
||||
} catch (e) {
|
||||
// Propagate errors from sub-nodes
|
||||
if (e.functionality === 'configuration-node') throw e;
|
||||
const connectedNode = parameters.executeFunctions.getNode();
|
||||
const error = new NodeOperationError(connectedNode, e);
|
||||
parameters.executeFunctions.addOutputData(
|
||||
|
|
|
@ -3282,6 +3282,8 @@ export function getExecuteFunctions(
|
|||
try {
|
||||
return await nodeType.supplyData.call(context, itemIndex);
|
||||
} catch (error) {
|
||||
// Propagate errors from sub-nodes
|
||||
if (error.functionality === 'configuration-node') throw error;
|
||||
if (!(error instanceof ExecutionBaseError)) {
|
||||
error = new NodeOperationError(connectedNode, error, {
|
||||
itemIndex,
|
||||
|
|
|
@ -2,12 +2,14 @@
|
|||
import { useI18n } from '@/composables/useI18n';
|
||||
import { FontAwesomeIcon } from '@fortawesome/vue-fontawesome';
|
||||
import N8nTooltip from '../N8nTooltip';
|
||||
import { ElTag } from 'element-plus';
|
||||
|
||||
export interface Props {
|
||||
active?: boolean;
|
||||
isAi?: boolean;
|
||||
isTrigger?: boolean;
|
||||
description?: string;
|
||||
tag?: string;
|
||||
title: string;
|
||||
showActionArrow?: boolean;
|
||||
}
|
||||
|
@ -35,6 +37,9 @@ const i18n = useI18n();
|
|||
<div>
|
||||
<div :class="$style.details">
|
||||
<span :class="$style.name" v-text="title" data-test-id="node-creator-item-name" />
|
||||
<el-tag v-if="tag" :class="$style.tag" size="small" round type="success">
|
||||
{{ tag }}
|
||||
</el-tag>
|
||||
<font-awesome-icon
|
||||
icon="bolt"
|
||||
v-if="isTrigger"
|
||||
|
@ -82,7 +87,9 @@ const i18n = useI18n();
|
|||
.creatorNode:hover .panelIcon {
|
||||
color: var(--action-arrow-color-hover, var(--color-text-light));
|
||||
}
|
||||
|
||||
.tag {
|
||||
margin-left: var(--spacing-2xs);
|
||||
}
|
||||
.panelIcon {
|
||||
flex-grow: 1;
|
||||
display: flex;
|
||||
|
|
|
@ -67,7 +67,14 @@ export default defineComponent({
|
|||
sanitizeHtml(text: string): string {
|
||||
return sanitizeHtml(text, {
|
||||
allowedAttributes: {
|
||||
a: ['data-key', 'href', 'target', 'data-action', 'data-action-parameter-connectiontype'],
|
||||
a: [
|
||||
'data-key',
|
||||
'href',
|
||||
'target',
|
||||
'data-action',
|
||||
'data-action-parameter-connectiontype',
|
||||
'data-action-parameter-creatorview',
|
||||
],
|
||||
},
|
||||
});
|
||||
},
|
||||
|
|
|
@ -906,6 +906,7 @@ export interface ViewItemProps {
|
|||
title: string;
|
||||
description: string;
|
||||
icon: string;
|
||||
tag?: string;
|
||||
}
|
||||
export interface LabelItemProps {
|
||||
key: string;
|
||||
|
|
|
@ -10,3 +10,5 @@ window.ResizeObserver =
|
|||
observe: vi.fn(),
|
||||
unobserve: vi.fn(),
|
||||
}));
|
||||
|
||||
Element.prototype.scrollIntoView = vi.fn();
|
||||
|
|
|
@ -906,9 +906,6 @@ export default defineComponent({
|
|||
--node-width: 75px;
|
||||
--node-height: 75px;
|
||||
|
||||
& [class*='node-wrapper--connection-type'] {
|
||||
--configurable-node-options: -10px;
|
||||
}
|
||||
.node-default {
|
||||
.node-options {
|
||||
background: color-mix(in srgb, var(--color-canvas-background) 80%, transparent);
|
||||
|
@ -976,7 +973,6 @@ export default defineComponent({
|
|||
);
|
||||
--configurable-node-icon-offset: 40px;
|
||||
--configurable-node-icon-size: 30px;
|
||||
--configurable-node-options: -10px;
|
||||
|
||||
.node-description {
|
||||
top: calc(50%);
|
||||
|
@ -1004,7 +1000,7 @@ export default defineComponent({
|
|||
}
|
||||
|
||||
.node-options {
|
||||
left: var(--configurable-node-options, 65px);
|
||||
left: 0;
|
||||
height: 25px;
|
||||
}
|
||||
|
||||
|
@ -1053,12 +1049,6 @@ export default defineComponent({
|
|||
.node-wrapper--config & {
|
||||
--node--selected--box-shadow-radius: 4px;
|
||||
border-radius: 60px;
|
||||
background-color: hsla(
|
||||
var(--color-foreground-base-h),
|
||||
60%,
|
||||
var(--color-foreground-base-l),
|
||||
80%
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1442,7 +1432,7 @@ export default defineComponent({
|
|||
|
||||
// Some nodes allow for dynamic connection labels
|
||||
// so we need to make sure the label does not overflow
|
||||
&[data-endpoint-label-length='medium'] {
|
||||
&.node-connection-type-main[data-endpoint-label-length='medium'] {
|
||||
max-width: calc(var(--stalk-size) - (var(--endpoint-size-small)));
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
<n8n-node-creator-node
|
||||
:class="$style.view"
|
||||
:title="view.title"
|
||||
:tag="view.tag"
|
||||
:isTrigger="false"
|
||||
:description="view.description"
|
||||
:showActionArrow="true"
|
||||
|
|
|
@ -10,9 +10,14 @@ import type {
|
|||
LabelCreateElement,
|
||||
} from '@/Interface';
|
||||
import {
|
||||
AGENT_NODE_TYPE,
|
||||
BASIC_CHAIN_NODE_TYPE,
|
||||
MANUAL_CHAT_TRIGGER_NODE_TYPE,
|
||||
MANUAL_TRIGGER_NODE_TYPE,
|
||||
NODE_CREATOR_OPEN_SOURCES,
|
||||
NO_OP_NODE_TYPE,
|
||||
OPEN_AI_ASSISTANT_NODE_TYPE,
|
||||
QA_CHAIN_NODE_TYPE,
|
||||
SCHEDULE_TRIGGER_NODE_TYPE,
|
||||
SPLIT_IN_BATCHES_NODE_TYPE,
|
||||
STICKY_NODE_TYPE,
|
||||
|
@ -34,6 +39,12 @@ export const useActions = () => {
|
|||
const nodeCreatorStore = useNodeCreatorStore();
|
||||
const instance = getCurrentInstance();
|
||||
|
||||
const singleNodeOpenSources = [
|
||||
NODE_CREATOR_OPEN_SOURCES.PLUS_ENDPOINT,
|
||||
NODE_CREATOR_OPEN_SOURCES.NODE_CONNECTION_ACTION,
|
||||
NODE_CREATOR_OPEN_SOURCES.NODE_CONNECTION_DROP,
|
||||
];
|
||||
|
||||
const actionsCategoryLocales = computed(() => {
|
||||
return {
|
||||
actions: instance?.proxy.$locale.baseText('nodeCreator.actionsCategory.actions') ?? '',
|
||||
|
@ -156,11 +167,6 @@ export const useActions = () => {
|
|||
const workflowContainsTrigger = workflowTriggerNodes.length > 0;
|
||||
const isTriggerPanel = selectedView === TRIGGER_NODE_CREATOR_VIEW;
|
||||
const onlyStickyNodes = addedNodes.every((node) => node.type === STICKY_NODE_TYPE);
|
||||
const singleNodeOpenSources = [
|
||||
NODE_CREATOR_OPEN_SOURCES.PLUS_ENDPOINT,
|
||||
NODE_CREATOR_OPEN_SOURCES.NODE_CONNECTION_ACTION,
|
||||
NODE_CREATOR_OPEN_SOURCES.NODE_CONNECTION_DROP,
|
||||
];
|
||||
|
||||
// If the node creator was opened from the plus endpoint, node connection action, or node connection drop
|
||||
// then we do not want to append the manual trigger
|
||||
|
@ -173,6 +179,22 @@ export const useActions = () => {
|
|||
!onlyStickyNodes
|
||||
);
|
||||
}
|
||||
function shouldPrependChatTrigger(addedNodes: AddedNode[]): boolean {
|
||||
const { allNodes } = useWorkflowsStore();
|
||||
|
||||
const COMPATIBLE_CHAT_NODES = [
|
||||
QA_CHAIN_NODE_TYPE,
|
||||
AGENT_NODE_TYPE,
|
||||
BASIC_CHAIN_NODE_TYPE,
|
||||
OPEN_AI_ASSISTANT_NODE_TYPE,
|
||||
];
|
||||
|
||||
const isChatTriggerMissing =
|
||||
allNodes.find((node) => node.type === MANUAL_CHAT_TRIGGER_NODE_TYPE) === undefined;
|
||||
const isCompatibleNode = addedNodes.some((node) => COMPATIBLE_CHAT_NODES.includes(node.type));
|
||||
|
||||
return isCompatibleNode && isChatTriggerMissing;
|
||||
}
|
||||
|
||||
function getAddedNodesAndConnections(addedNodes: AddedNode[]): AddedNodesAndConnections {
|
||||
if (addedNodes.length === 0) {
|
||||
|
@ -188,7 +210,13 @@ export const useActions = () => {
|
|||
nodeToAutoOpen.openDetail = true;
|
||||
}
|
||||
|
||||
if (shouldPrependManualTrigger(addedNodes)) {
|
||||
if (shouldPrependChatTrigger(addedNodes)) {
|
||||
addedNodes.unshift({ type: MANUAL_CHAT_TRIGGER_NODE_TYPE, isAutoAdd: true });
|
||||
connections.push({
|
||||
from: { nodeIndex: 0 },
|
||||
to: { nodeIndex: 1 },
|
||||
});
|
||||
} else if (shouldPrependManualTrigger(addedNodes)) {
|
||||
addedNodes.unshift({ type: MANUAL_TRIGGER_NODE_TYPE, isAutoAdd: true });
|
||||
connections.push({
|
||||
from: { nodeIndex: 0 },
|
||||
|
|
|
@ -76,6 +76,7 @@ export interface NodeViewItem {
|
|||
group?: string[];
|
||||
sections?: NodeViewItemSection[];
|
||||
description?: string;
|
||||
tag?: string;
|
||||
forceIncludeNodes?: string[];
|
||||
};
|
||||
category?: string | string[];
|
||||
|
@ -451,6 +452,7 @@ export function RegularView(nodes: SimplifiedNodeType[]) {
|
|||
title: i18n.baseText('nodeCreator.aiPanel.langchainAiNodes'),
|
||||
icon: 'robot',
|
||||
description: i18n.baseText('nodeCreator.aiPanel.nodesForAi'),
|
||||
tag: i18n.baseText('nodeCreator.aiPanel.newTag'),
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
@ -88,7 +88,11 @@ const outputTypeParsers: {
|
|||
if (Array.isArray(chatHistory)) {
|
||||
const responseText = chatHistory
|
||||
.map((content: MemoryMessage) => {
|
||||
if (content.type === 'constructor' && content.id?.includes('schema') && content.kwargs) {
|
||||
if (
|
||||
content.type === 'constructor' &&
|
||||
content.id?.includes('messages') &&
|
||||
content.kwargs
|
||||
) {
|
||||
interface MessageContent {
|
||||
type: string;
|
||||
image_url?: {
|
||||
|
|
|
@ -16,11 +16,12 @@
|
|||
>
|
||||
<template #content>
|
||||
<div v-loading="isLoading" class="workflow-lm-chat" data-test-id="workflow-lm-chat-dialog">
|
||||
<div class="messages ignore-key-press" ref="messagesContainer">
|
||||
<div class="messages ignore-key-press">
|
||||
<div
|
||||
v-for="message in messages"
|
||||
:key="`${message.executionId}__${message.sender}`"
|
||||
:class="['message', message.sender]"
|
||||
ref="messageContainer"
|
||||
>
|
||||
<div :class="['content', message.sender]">
|
||||
{{ message.text }}
|
||||
|
@ -80,21 +81,29 @@
|
|||
v-model="currentMessage"
|
||||
class="message-input"
|
||||
type="textarea"
|
||||
:minlength="1"
|
||||
ref="inputField"
|
||||
m
|
||||
:placeholder="$locale.baseText('chat.window.chat.placeholder')"
|
||||
data-test-id="workflow-chat-input"
|
||||
@keydown.stop="updated"
|
||||
/>
|
||||
<n8n-button
|
||||
@click.stop="sendChatMessage(currentMessage)"
|
||||
class="send-button"
|
||||
:loading="isLoading"
|
||||
:label="$locale.baseText('chat.window.chat.sendButtonText')"
|
||||
size="large"
|
||||
icon="comment"
|
||||
type="primary"
|
||||
data-test-id="workflow-chat-send-button"
|
||||
/>
|
||||
<n8n-tooltip :disabled="currentMessage.length > 0">
|
||||
<n8n-button
|
||||
@click.stop="sendChatMessage(currentMessage)"
|
||||
class="send-button"
|
||||
:disabled="currentMessage === ''"
|
||||
:loading="isLoading"
|
||||
:label="$locale.baseText('chat.window.chat.sendButtonText')"
|
||||
size="large"
|
||||
icon="comment"
|
||||
type="primary"
|
||||
data-test-id="workflow-chat-send-button"
|
||||
/>
|
||||
<template #content>
|
||||
{{ $locale.baseText('chat.window.chat.provideMessage') }}
|
||||
</template>
|
||||
</n8n-tooltip>
|
||||
|
||||
<n8n-info-tip class="mt-s">
|
||||
{{ $locale.baseText('chatEmbed.infoTip.description') }}
|
||||
|
@ -218,25 +227,22 @@ export default defineComponent({
|
|||
}
|
||||
},
|
||||
async sendChatMessage(message: string) {
|
||||
if (this.currentMessage.trim() === '') {
|
||||
this.showError(
|
||||
new Error(this.$locale.baseText('chat.window.chat.provideMessage')),
|
||||
this.$locale.baseText('chat.window.chat.emptyChatMessage'),
|
||||
);
|
||||
return;
|
||||
}
|
||||
this.messages.push({
|
||||
text: message,
|
||||
sender: 'user',
|
||||
} as ChatMessage);
|
||||
|
||||
this.currentMessage = '';
|
||||
|
||||
await this.$nextTick();
|
||||
this.scrollToLatestMessage();
|
||||
await this.startWorkflowWithMessage(message);
|
||||
|
||||
// Scroll to bottom
|
||||
const containerRef = this.$refs.messagesContainer as HTMLElement | undefined;
|
||||
if (containerRef) {
|
||||
// Wait till message got added else it will not scroll correctly
|
||||
await this.$nextTick();
|
||||
containerRef.scrollTo({
|
||||
top: containerRef.scrollHeight,
|
||||
behavior: 'smooth',
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
setConnectedNode() {
|
||||
|
@ -477,10 +483,20 @@ export default defineComponent({
|
|||
|
||||
void this.$nextTick(() => {
|
||||
that.setNode();
|
||||
this.scrollToLatestMessage();
|
||||
});
|
||||
}
|
||||
}, 500);
|
||||
},
|
||||
scrollToLatestMessage() {
|
||||
const containerRef = this.$refs.messageContainer as HTMLElement[] | undefined;
|
||||
if (containerRef) {
|
||||
containerRef[containerRef.length - 1]?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'start',
|
||||
});
|
||||
}
|
||||
},
|
||||
closeDialog() {
|
||||
this.modalBus.emit('close');
|
||||
void this.externalHooks.run('workflowSettings.dialogVisibleChanged', {
|
||||
|
|
|
@ -2,11 +2,12 @@
|
|||
* Creates event listeners for `data-action` attribute to allow for actions to be called from locale without using
|
||||
* unsafe onclick attribute
|
||||
*/
|
||||
import { reactive, computed, onMounted, onUnmounted, getCurrentInstance } from 'vue';
|
||||
import { reactive, computed, onMounted, onUnmounted } from 'vue';
|
||||
import { globalLinkActionsEventBus } from '@/event-bus';
|
||||
|
||||
const state = reactive({
|
||||
customActions: {} as Record<string, Function>,
|
||||
delegatedClickHandler: null as null | ((e: MouseEvent) => void),
|
||||
});
|
||||
|
||||
export default () => {
|
||||
|
@ -56,15 +57,17 @@ export default () => {
|
|||
}));
|
||||
|
||||
onMounted(() => {
|
||||
const instance = getCurrentInstance();
|
||||
if (state.delegatedClickHandler) return;
|
||||
|
||||
state.delegatedClickHandler = delegateClick;
|
||||
window.addEventListener('click', delegateClick);
|
||||
|
||||
globalLinkActionsEventBus.on('registerGlobalLinkAction', registerCustomAction);
|
||||
});
|
||||
|
||||
onUnmounted(() => {
|
||||
const instance = getCurrentInstance();
|
||||
window.removeEventListener('click', delegateClick);
|
||||
state.delegatedClickHandler = null;
|
||||
|
||||
globalLinkActionsEventBus.off('registerGlobalLinkAction', registerCustomAction);
|
||||
});
|
||||
|
|
|
@ -127,6 +127,9 @@ export const MICROSOFT_EXCEL_NODE_TYPE = 'n8n-nodes-base.microsoftExcel';
|
|||
export const MANUAL_TRIGGER_NODE_TYPE = 'n8n-nodes-base.manualTrigger';
|
||||
export const MANUAL_CHAT_TRIGGER_NODE_TYPE = '@n8n/n8n-nodes-langchain.manualChatTrigger';
|
||||
export const AGENT_NODE_TYPE = '@n8n/n8n-nodes-langchain.agent';
|
||||
export const OPEN_AI_ASSISTANT_NODE_TYPE = '@n8n/n8n-nodes-langchain.openAiAssistant';
|
||||
export const BASIC_CHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.chainLlm';
|
||||
export const QA_CHAIN_NODE_TYPE = '@n8n/n8n-nodes-langchain.chainRetrievalQa';
|
||||
export const MICROSOFT_TEAMS_NODE_TYPE = 'n8n-nodes-base.microsoftTeams';
|
||||
export const N8N_NODE_TYPE = 'n8n-nodes-base.n8n';
|
||||
export const NO_OP_NODE_TYPE = 'n8n-nodes-base.noOp';
|
||||
|
|
|
@ -572,7 +572,8 @@ export const workflowHelpers = defineComponent({
|
|||
workflow.nodes[nodeName].disabled !== true &&
|
||||
workflow.nodes[nodeName].type === WEBHOOK_NODE_TYPE
|
||||
) {
|
||||
checkWebhook = [nodeName, ...checkWebhook, ...workflow.getChildNodes(nodeName)];
|
||||
const childNodes = workflow.getChildNodes(nodeName);
|
||||
checkWebhook = [nodeName, ...checkWebhook, ...childNodes];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -582,8 +583,14 @@ export const workflowHelpers = defineComponent({
|
|||
// If no webhook nodes got found try to find another trigger node
|
||||
const startNode = workflow.getStartNode();
|
||||
if (startNode !== undefined) {
|
||||
checkNodes = workflow.getChildNodes(startNode.name);
|
||||
checkNodes.push(startNode.name);
|
||||
checkNodes = [...workflow.getChildNodes(startNode.name), startNode.name];
|
||||
|
||||
// For the short-listed checkNodes, we also need to check them for any
|
||||
// connected sub-nodes
|
||||
for (const nodeName of checkNodes) {
|
||||
const childNodes = workflow.getParentNodes(nodeName, 'ALL_NON_MAIN');
|
||||
checkNodes.push(...childNodes);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,6 +83,7 @@ export const workflowRun = defineComponent({
|
|||
|
||||
try {
|
||||
// Check first if the workflow has any issues before execute it
|
||||
this.refreshNodeIssues();
|
||||
const issuesExist = this.workflowsStore.nodesIssuesExist;
|
||||
if (issuesExist) {
|
||||
// If issues exist get all of the issues of all nodes
|
||||
|
@ -112,7 +113,9 @@ export const workflowRun = defineComponent({
|
|||
};
|
||||
|
||||
for (const nodeIssue of nodeIssues) {
|
||||
errorMessages.push(`<strong>${nodeName}</strong>: ${nodeIssue}`);
|
||||
errorMessages.push(
|
||||
`<a data-action='openNodeDetail' data-action-parameter-node='${nodeName}'>${nodeName}</a>: ${nodeIssue}`,
|
||||
);
|
||||
trackNodeIssue.error = trackNodeIssue.error.concat(', ', nodeIssue);
|
||||
}
|
||||
trackNodeIssues.push(trackNodeIssue);
|
||||
|
|
|
@ -140,6 +140,8 @@
|
|||
"chat.window.noExecution": "Nothing got executed yet",
|
||||
"chat.window.chat.placeholder": "Type in message",
|
||||
"chat.window.chat.sendButtonText": "Send",
|
||||
"chat.window.chat.provideMessage": "Please provide a message",
|
||||
"chat.window.chat.emptyChatMessage": "Empty chat message",
|
||||
"chat.window.chat.chatMessageOptions.reuseMessage": "Reuse Message",
|
||||
"chat.window.chat.chatMessageOptions.repostMessage": "Repost Message",
|
||||
"chat.window.chat.chatMessageOptions.executionId": "Execution ID",
|
||||
|
@ -925,6 +927,7 @@
|
|||
"nodeCreator.aiPanel.aiOtherNodesDescription": "Embeddings, Vector Stores, LLMs and other AI nodes",
|
||||
"nodeCreator.aiPanel.selectAiNode": "Select an Al Node to add to your workflow",
|
||||
"nodeCreator.aiPanel.nodesForAi": "Build autonomous agents, summarize or interrogate documents, etc.",
|
||||
"nodeCreator.aiPanel.newTag": "New",
|
||||
"nodeCreator.aiPanel.langchainAiNodes": "Advanced AI",
|
||||
"nodeCreator.aiPanel.title": "When should this workflow run?",
|
||||
"nodeCreator.aiPanel.infoBox": "Check out our <a href=\"/collections/8\" target=\"_blank\">templates</a> for workflow examples and inspiration.",
|
||||
|
|
|
@ -282,9 +282,9 @@ export const getInputNameOverlay = (
|
|||
label.innerHTML += ' <strong style="color: var(--color-primary)">*</strong>';
|
||||
}
|
||||
label.classList.add('node-input-endpoint-label');
|
||||
label.classList.add(`node-connection-type-${inputName ?? 'main'}`);
|
||||
if (inputName !== NodeConnectionType.Main) {
|
||||
label.classList.add('node-input-endpoint-label--data');
|
||||
label.classList.add(`node-connection-type-${inputName}`);
|
||||
}
|
||||
return label;
|
||||
},
|
||||
|
@ -317,9 +317,9 @@ export const getOutputNameOverlay = (
|
|||
if (ep?.__meta?.endpointLabelLength) {
|
||||
label.setAttribute('data-endpoint-label-length', ep?.__meta?.endpointLabelLength);
|
||||
}
|
||||
label.classList.add(`node-connection-type-${getScope(outputName) ?? 'main'}`);
|
||||
if (outputName !== NodeConnectionType.Main) {
|
||||
label.classList.add('node-output-endpoint-label--data');
|
||||
label.classList.add(`node-connection-type-${getScope(outputName)}`);
|
||||
}
|
||||
if (category) {
|
||||
label.classList.add(`node-connection-category-${category}`);
|
||||
|
@ -998,3 +998,61 @@ export const getFixedNodesList = <T extends { position: XYPosition }>(workflowNo
|
|||
|
||||
return nodes;
|
||||
};
|
||||
|
||||
/**
|
||||
* Calculates the intersecting distances of the mouse event coordinates with the given element's boundaries,
|
||||
* adjusted by the specified offset.
|
||||
*
|
||||
* @param {Element} element - The DOM element to check against.
|
||||
* @param {MouseEvent | TouchEvent} mouseEvent - The mouse or touch event with the coordinates.
|
||||
* @param {number} offset - Offset to adjust the element's boundaries.
|
||||
* @returns { {x: number | null, y: number | null} | null } Object containing intersecting distances along x and y axes or null if no intersection.
|
||||
*/
|
||||
export function calculateElementIntersection(
|
||||
element: Element,
|
||||
mouseEvent: MouseEvent | TouchEvent,
|
||||
offset: number,
|
||||
): { x: number | null; y: number | null } | null {
|
||||
const { top, left, right, bottom } = element.getBoundingClientRect();
|
||||
const [x, y] = getMousePosition(mouseEvent);
|
||||
|
||||
let intersectX: number | null = null;
|
||||
let intersectY: number | null = null;
|
||||
|
||||
if (x >= left - offset && x <= right + offset) {
|
||||
intersectX = Math.min(x - (left - offset), right + offset - x);
|
||||
}
|
||||
if (y >= top - offset && y <= bottom + offset) {
|
||||
intersectY = Math.min(y - (top - offset), bottom + offset - y);
|
||||
}
|
||||
|
||||
if (intersectX === null && intersectY === null) return null;
|
||||
|
||||
return { x: intersectX, y: intersectY };
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the mouse event coordinates intersect with the given element's boundaries,
|
||||
* adjusted by the specified offset.
|
||||
*
|
||||
* @param {Element} element - The DOM element to check against.
|
||||
* @param {MouseEvent | TouchEvent} mouseEvent - The mouse or touch event with the coordinates.
|
||||
* @param {number} offset - Offset to adjust the element's boundaries.
|
||||
* @returns {boolean} True if the mouse coordinates intersect with the element.
|
||||
*/
|
||||
export function isElementIntersection(
|
||||
element: Element,
|
||||
mouseEvent: MouseEvent | TouchEvent,
|
||||
offset: number,
|
||||
): boolean {
|
||||
const intersection = calculateElementIntersection(element, mouseEvent, offset);
|
||||
|
||||
if (intersection === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isWithinVerticalBounds = intersection.y !== null;
|
||||
const isWithinHorizontalBounds = intersection.x !== null;
|
||||
|
||||
return isWithinVerticalBounds && isWithinHorizontalBounds;
|
||||
}
|
||||
|
|
|
@ -113,6 +113,7 @@
|
|||
@mouseenter="showTriggerMissingToltip(true)"
|
||||
@mouseleave="showTriggerMissingToltip(false)"
|
||||
@click="onRunContainerClick"
|
||||
v-if="!isManualChatOnly"
|
||||
>
|
||||
<keyboard-shortcut-tooltip
|
||||
:label="runButtonText"
|
||||
|
@ -676,6 +677,9 @@ export default defineComponent({
|
|||
containsTrigger(): boolean {
|
||||
return this.triggerNodes.length > 0;
|
||||
},
|
||||
isManualChatOnly(): boolean {
|
||||
return this.containsChatNodes && this.triggerNodes.length === 1;
|
||||
},
|
||||
containsChatNodes(): boolean {
|
||||
return !!this.nodes.find(
|
||||
(node) => node.type === MANUAL_CHAT_TRIGGER_NODE_TYPE && node.disabled !== true,
|
||||
|
@ -2526,28 +2530,35 @@ export default defineComponent({
|
|||
.catch((e) => {});
|
||||
}
|
||||
},
|
||||
onEventConnectionAbort(connection: Connection) {
|
||||
async onEventConnectionAbort(connection: Connection) {
|
||||
try {
|
||||
if (this.dropPrevented) {
|
||||
this.dropPrevented = false;
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.pullConnActiveNodeName) {
|
||||
const sourceNode = this.workflowsStore.getNodeById(connection.parameters.nodeId);
|
||||
const connectionType = connection.parameters.type ?? NodeConnectionType.Main;
|
||||
const overrideTargetEndpoint = connection?.connector
|
||||
?.overrideTargetEndpoint as Endpoint | null;
|
||||
|
||||
if (sourceNode) {
|
||||
const sourceNodeName = sourceNode.name;
|
||||
const isTarget = connection.parameters.connection === 'target';
|
||||
const sourceNodeName = isTarget ? this.pullConnActiveNodeName : sourceNode.name;
|
||||
const targetNodeName = isTarget ? sourceNode.name : this.pullConnActiveNodeName;
|
||||
const outputIndex = connection.parameters.index;
|
||||
NodeViewUtils.resetConnectionAfterPull(connection);
|
||||
await this.$nextTick();
|
||||
|
||||
this.connectTwoNodes(
|
||||
sourceNodeName,
|
||||
outputIndex,
|
||||
this.pullConnActiveNodeName,
|
||||
0,
|
||||
NodeConnectionType.Main,
|
||||
targetNodeName,
|
||||
overrideTargetEndpoint?.parameters?.index ?? 0,
|
||||
connectionType,
|
||||
);
|
||||
this.pullConnActiveNodeName = null;
|
||||
this.dropPrevented = true;
|
||||
this.dropPrevented = false;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -2894,6 +2905,8 @@ export default defineComponent({
|
|||
const sourceNode = this.workflowsStore.getNodeById(info.connection.parameters.nodeId);
|
||||
const sourceNodeName = sourceNode.name;
|
||||
const outputIndex = info.connection.parameters.index;
|
||||
const overrideTargetEndpoint = info.connection.connector
|
||||
.overrideTargetEndpoint as Endpoint | null;
|
||||
|
||||
if (connectionInfo) {
|
||||
this.historyStore.pushCommandToUndo(new RemoveConnectionCommand(connectionInfo));
|
||||
|
@ -2902,7 +2915,7 @@ export default defineComponent({
|
|||
sourceNodeName,
|
||||
outputIndex,
|
||||
this.pullConnActiveNodeName,
|
||||
0,
|
||||
overrideTargetEndpoint?.parameters?.index ?? 0,
|
||||
NodeConnectionType.Main,
|
||||
);
|
||||
this.pullConnActiveNodeName = null;
|
||||
|
@ -2932,58 +2945,86 @@ export default defineComponent({
|
|||
this.pullConnActiveNodeName = null;
|
||||
this.pullConnActive = true;
|
||||
this.canvasStore.newNodeInsertPosition = null;
|
||||
NodeViewUtils.hideConnectionActions(connection);
|
||||
NodeViewUtils.resetConnection(connection);
|
||||
|
||||
const nodes = [...document.querySelectorAll('.node-wrapper')];
|
||||
const scope = connection.scope as ConnectionTypes;
|
||||
const scopedEndpoints = Array.from(
|
||||
document.querySelectorAll(`[data-jtk-scope-${scope}=true]`),
|
||||
);
|
||||
const connectionType = connection.parameters.connection;
|
||||
const requiredType = connectionType === 'source' ? 'target' : 'source';
|
||||
|
||||
const filteredEndpoints = scopedEndpoints.filter((el) => {
|
||||
const endpoint = el.jtk.endpoint as Endpoint;
|
||||
if (!endpoint) return false;
|
||||
|
||||
// Prevent snapping(but not connecting) to the same node
|
||||
const isSameNode = endpoint.parameters.nodeId === connection.parameters.nodeId;
|
||||
const endpointType = endpoint.parameters.connection;
|
||||
|
||||
return !isSameNode && endpointType === requiredType;
|
||||
});
|
||||
|
||||
const onMouseMove = (e: MouseEvent | TouchEvent) => {
|
||||
if (!connection) {
|
||||
return;
|
||||
}
|
||||
|
||||
const element = document.querySelector('.jtk-endpoint.jtk-drag-hover');
|
||||
if (element) {
|
||||
const endpoint = element.jtk.endpoint;
|
||||
NodeViewUtils.showDropConnectionState(connection, endpoint);
|
||||
return;
|
||||
}
|
||||
const intersectingEndpoints = filteredEndpoints
|
||||
.filter((element: Element) => {
|
||||
const endpoint = element.jtk.endpoint as Endpoint;
|
||||
|
||||
const inputMargin = 24;
|
||||
const intersecting = nodes.find((element: Element) => {
|
||||
const { top, left, right, bottom } = element.getBoundingClientRect();
|
||||
const [x, y] = NodeViewUtils.getMousePosition(e);
|
||||
if (top <= y && bottom >= y && left - inputMargin <= x && right >= x) {
|
||||
const nodeName = (element as HTMLElement).dataset.name as string;
|
||||
const node = this.workflowsStore.getNodeByName(nodeName);
|
||||
if (node) {
|
||||
const nodeType = this.nodeTypesStore.getNodeType(node.type, node.typeVersion);
|
||||
if (element.classList.contains('jtk-floating-endpoint')) {
|
||||
return false;
|
||||
}
|
||||
const isEndpointIntersect = NodeViewUtils.isElementIntersection(element, e, 50);
|
||||
const isNodeElementIntersect = NodeViewUtils.isElementIntersection(
|
||||
endpoint.element,
|
||||
e,
|
||||
30,
|
||||
);
|
||||
|
||||
const workflow = this.getCurrentWorkflow();
|
||||
const workflowNode = workflow.getNode(nodeName);
|
||||
const inputs = NodeHelpers.getNodeInputs(workflow, workflowNode!, nodeType);
|
||||
if (isEndpointIntersect || isNodeElementIntersect) {
|
||||
const node = this.workflowsStore.getNodeById(endpoint.parameters.nodeId);
|
||||
|
||||
if (nodeType && inputs.length === 1) {
|
||||
this.pullConnActiveNodeName = node.name;
|
||||
const endpointUUID = this.getInputEndpointUUID(
|
||||
nodeName,
|
||||
connection.parameters.type,
|
||||
0,
|
||||
);
|
||||
if (endpointUUID) {
|
||||
const endpoint = this.instance?.getEndpoint(endpointUUID);
|
||||
if (node) {
|
||||
const nodeType = this.nodeTypesStore.getNodeType(node.type, node.typeVersion);
|
||||
|
||||
NodeViewUtils.showDropConnectionState(connection, endpoint);
|
||||
if (!nodeType) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
return false;
|
||||
})
|
||||
.sort((a, b) => {
|
||||
const aEndpointIntersect = NodeViewUtils.calculateElementIntersection(a, e, 50);
|
||||
const bEndpointIntersect = NodeViewUtils.calculateElementIntersection(b, e, 50);
|
||||
|
||||
if (!intersecting) {
|
||||
// If both intersections are null, treat them as equal
|
||||
if (!aEndpointIntersect?.y && !bEndpointIntersect?.y) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// If one intersection is null, sort the non-null one first
|
||||
if (!aEndpointIntersect?.y) return 1;
|
||||
if (!bEndpointIntersect?.y) return -1;
|
||||
|
||||
// Otherwise, sort by ascending Y distance
|
||||
return bEndpointIntersect.y - aEndpointIntersect.y;
|
||||
});
|
||||
|
||||
if (intersectingEndpoints.length > 0) {
|
||||
const intersectingEndpoint = intersectingEndpoints[0];
|
||||
const endpoint = intersectingEndpoint.jtk.endpoint as Endpoint;
|
||||
const node = this.workflowsStore.getNodeById(endpoint.parameters.nodeId);
|
||||
|
||||
this.pullConnActiveNodeName = node?.name ?? null;
|
||||
|
||||
NodeViewUtils.showDropConnectionState(connection, endpoint);
|
||||
} else {
|
||||
NodeViewUtils.showPullConnectionState(connection);
|
||||
this.pullConnActiveNodeName = null;
|
||||
}
|
||||
|
@ -4444,6 +4485,23 @@ export default defineComponent({
|
|||
NodeConnectionType.Main,
|
||||
);
|
||||
}
|
||||
|
||||
const lastAddedNode = this.nodes[this.nodes.length - 1];
|
||||
const workflow = this.getCurrentWorkflow();
|
||||
const lastNodeInputs = workflow.getParentNodesByDepth(lastAddedNode.name, 1);
|
||||
|
||||
// If the last added node has multiple inputs, move them down
|
||||
if (lastNodeInputs.length > 1) {
|
||||
lastNodeInputs.slice(1).forEach((node, index) => {
|
||||
const nodeUi = this.workflowsStore.getNodeByName(node.name);
|
||||
if (!nodeUi) return;
|
||||
|
||||
this.onMoveNode({
|
||||
nodeName: nodeUi.name,
|
||||
position: [nodeUi.position[0], nodeUi.position[1] + 100 * (index + 1)],
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
async saveCurrentWorkflowExternal(callback: () => void) {
|
||||
|
@ -4691,25 +4749,37 @@ export default defineComponent({
|
|||
|
||||
this.registerCustomAction({
|
||||
key: 'openSelectiveNodeCreator',
|
||||
action: ({ connectiontype, node }: { connectiontype: NodeConnectionType; node: string }) => {
|
||||
this.onToggleNodeCreator({
|
||||
source: NODE_CREATOR_OPEN_SOURCES.NOTICE_ERROR_MESSAGE,
|
||||
createNodeActive: true,
|
||||
nodeCreatorView: AI_NODE_CREATOR_VIEW,
|
||||
});
|
||||
action: async ({
|
||||
connectiontype,
|
||||
node,
|
||||
creatorview,
|
||||
}: {
|
||||
connectiontype: NodeConnectionType;
|
||||
node: string;
|
||||
creatorview?: string;
|
||||
}) => {
|
||||
const nodeName = node ?? this.ndvStore.activeNodeName;
|
||||
const nodeData = nodeName ? this.workflowsStore.getNodeByName(nodeName) : null;
|
||||
|
||||
this.ndvStore.activeNodeName = null;
|
||||
// Select the node so that the node creator knows which node to connect to
|
||||
const nodeData = this.workflowsStore.getNodeByName(node);
|
||||
if (connectiontype && nodeData) {
|
||||
this.insertNodeAfterSelected({
|
||||
index: 0,
|
||||
endpointUuid: `${nodeData.id}-input${connectiontype}0`,
|
||||
eventSource: NODE_CREATOR_OPEN_SOURCES.NOTICE_ERROR_MESSAGE,
|
||||
outputType: connectiontype,
|
||||
sourceId: nodeData.id,
|
||||
});
|
||||
}
|
||||
await this.redrawNode(node);
|
||||
// Wait for UI to update
|
||||
setTimeout(() => {
|
||||
if (creatorview) {
|
||||
this.onToggleNodeCreator({
|
||||
createNodeActive: true,
|
||||
nodeCreatorView: creatorview,
|
||||
});
|
||||
} else if (connectiontype && nodeData) {
|
||||
this.insertNodeAfterSelected({
|
||||
index: 0,
|
||||
endpointUuid: `${nodeData.id}-input${connectiontype}0`,
|
||||
eventSource: NODE_CREATOR_OPEN_SOURCES.NOTICE_ERROR_MESSAGE,
|
||||
outputType: connectiontype,
|
||||
sourceId: nodeData.id,
|
||||
});
|
||||
}
|
||||
}, 0);
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -4910,14 +4980,14 @@ export default defineComponent({
|
|||
&.connection-drag-scope-active-connection-target {
|
||||
// Apply style to compatible output endpoints
|
||||
.diamond-output-endpoint[data-jtk-scope-#{$node-type}='true'] {
|
||||
transform: scale(1.375) rotate(45deg);
|
||||
transform: scale(1.5) rotate(45deg);
|
||||
}
|
||||
|
||||
.add-input-endpoint[data-jtk-scope-#{$node-type}='true'] {
|
||||
// Apply style to dragged compatible input endpoint
|
||||
&.jtk-dragging {
|
||||
.add-input-endpoint-default {
|
||||
transform: translate(-4px, -4px) scale(1.375);
|
||||
transform: translate(-5px, -5px) scale(1.5);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4939,7 +5009,7 @@ export default defineComponent({
|
|||
// Apply style to dragged compatible output endpoint
|
||||
.diamond-output-endpoint[data-jtk-scope-#{$node-type}='true'] {
|
||||
&.jtk-dragging {
|
||||
transform: scale(1.375) rotate(45deg);
|
||||
transform: scale(1.5) rotate(45deg);
|
||||
}
|
||||
|
||||
// Apply style to non-dragged compatible input endpoints
|
||||
|
@ -4951,7 +5021,7 @@ export default defineComponent({
|
|||
// Apply style to compatible output endpoints
|
||||
.add-input-endpoint[data-jtk-scope-#{$node-type}='true'] {
|
||||
.add-input-endpoint-default {
|
||||
transform: translate(-4px, -4px) scale(1.375);
|
||||
transform: translate(-5px, -5px) scale(1.5);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -151,7 +151,7 @@ export default defineComponent({
|
|||
async mounted() {
|
||||
this.scrollToTop();
|
||||
|
||||
if (this.template && (this.template as ITemplatesWorkflowFull).full) {
|
||||
if (this.template && this.template.full) {
|
||||
this.loading = false;
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -28,6 +28,13 @@ export class OpenAi implements INodeType {
|
|||
baseURL: 'https://api.openai.com',
|
||||
},
|
||||
properties: [
|
||||
{
|
||||
displayName:
|
||||
'For more advanced uses, consider using an <a data-action="openSelectiveNodeCreator" data-action-parameter-creatorview="AI">advanced AI</a> node',
|
||||
name: 'noticeAdvanceAi',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Resource',
|
||||
name: 'resource',
|
||||
|
|
Loading…
Reference in a new issue