mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-13 16:14:07 -08:00
Merge branch 'master' of https://github.com/n8n-io/n8n into node-1419-notion-node-database-page-param-cant-parse-pip-links
This commit is contained in:
commit
a46d22d510
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
|
@ -11,6 +11,8 @@ Photos and videos are recommended.
|
|||
Include links to **Linear ticket** or Github issue or Community forum post.
|
||||
Important in order to close *automatically* and provide context to reviewers.
|
||||
-->
|
||||
<!-- Use "closes #<issue-number>", "fixes #<issue-number>", or "resolves #<issue-number>" to automatically close issues when the PR is merged. -->
|
||||
|
||||
|
||||
## Review / Merge checklist
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
const modelField: INodeProperties = {
|
||||
displayName: 'Model',
|
||||
|
@ -214,6 +215,7 @@ export class LmChatAnthropic implements INodeType {
|
|||
topK: options.topK,
|
||||
topP: options.topP,
|
||||
callbacks: [new N8nLlmTracing(this, { tokensUsageParser })],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -12,6 +12,7 @@ import { ChatOllama } from '@langchain/ollama';
|
|||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { ollamaModel, ollamaOptions, ollamaDescription } from '../LMOllama/description';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmChatOllama implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -64,6 +65,7 @@ export class LmChatOllama implements INodeType {
|
|||
model: modelName,
|
||||
format: options.format === 'default' ? undefined : options.format,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -1,19 +1,18 @@
|
|||
/* eslint-disable n8n-nodes-base/node-dirname-against-convention */
|
||||
|
||||
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
|
||||
import {
|
||||
NodeConnectionType,
|
||||
type INodeType,
|
||||
type INodeTypeDescription,
|
||||
type ISupplyDataFunctions,
|
||||
type SupplyData,
|
||||
type JsonObject,
|
||||
NodeApiError,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { ChatOpenAI, type ClientOptions } from '@langchain/openai';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { openAiFailedAttemptHandler } from '../../vendors/OpenAi/helpers/error-handling';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { RateLimitError } from 'openai';
|
||||
import { getCustomErrorMessage } from '../../vendors/OpenAi/helpers/error-handling';
|
||||
|
||||
export class LmChatOpenAi implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -276,25 +275,7 @@ export class LmChatOpenAi implements INodeType {
|
|||
response_format: { type: options.responseFormat },
|
||||
}
|
||||
: undefined,
|
||||
onFailedAttempt: (error: any) => {
|
||||
// If the error is a rate limit error, we want to handle it differently
|
||||
// because OpenAI has multiple different rate limit errors
|
||||
if (error instanceof RateLimitError) {
|
||||
const errorCode = error?.code;
|
||||
if (errorCode) {
|
||||
const customErrorMessage = getCustomErrorMessage(errorCode);
|
||||
|
||||
const apiError = new NodeApiError(this.getNode(), error as unknown as JsonObject);
|
||||
if (customErrorMessage) {
|
||||
apiError.message = customErrorMessage;
|
||||
}
|
||||
|
||||
throw apiError;
|
||||
}
|
||||
}
|
||||
|
||||
throw error;
|
||||
},
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this, openAiFailedAttemptHandler),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
import { Cohere } from '@langchain/cohere';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmCohere implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -99,6 +100,7 @@ export class LmCohere implements INodeType {
|
|||
apiKey: credentials.apiKey as string,
|
||||
...options,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -11,6 +11,7 @@ import { Ollama } from '@langchain/community/llms/ollama';
|
|||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { ollamaDescription, ollamaModel, ollamaOptions } from './description';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmOllama implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -62,6 +63,7 @@ export class LmOllama implements INodeType {
|
|||
model: modelName,
|
||||
...options,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -10,6 +10,7 @@ import type {
|
|||
|
||||
import { OpenAI, type ClientOptions } from '@langchain/openai';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
type LmOpenAiOptions = {
|
||||
baseURL?: string;
|
||||
|
@ -260,6 +261,7 @@ export class LmOpenAi implements INodeType {
|
|||
timeout: options.timeout ?? 60000,
|
||||
maxRetries: options.maxRetries ?? 2,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
import { HuggingFaceInference } from '@langchain/community/llms/hf';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmOpenHuggingFaceInference implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -143,6 +144,7 @@ export class LmOpenHuggingFaceInference implements INodeType {
|
|||
apiKey: credentials.apiKey as string,
|
||||
...options,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmChatAwsBedrock implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -151,6 +152,7 @@ export class LmChatAwsBedrock implements INodeType {
|
|||
sessionToken: credentials.sessionToken as string,
|
||||
},
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmChatAzureOpenAi implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -195,6 +196,7 @@ export class LmChatAzureOpenAi implements INodeType {
|
|||
response_format: { type: options.responseFormat },
|
||||
}
|
||||
: undefined,
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -11,6 +11,7 @@ import type { SafetySetting } from '@google/generative-ai';
|
|||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { additionalOptions } from '../gemini-common/additional-options';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmChatGoogleGemini implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -144,6 +145,7 @@ export class LmChatGoogleGemini implements INodeType {
|
|||
maxOutputTokens: options.maxOutputTokens,
|
||||
safetySettings,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -17,6 +17,7 @@ import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
|||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { additionalOptions } from '../gemini-common/additional-options';
|
||||
import { makeErrorFromStatus } from './error-handling';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmChatGoogleVertex implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -170,7 +171,8 @@ export class LmChatGoogleVertex implements INodeType {
|
|||
safetySettings,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
// Handle ChatVertexAI invocation errors to provide better error messages
|
||||
onFailedAttempt: (error: any) => {
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this, (error: any) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
const customError = makeErrorFromStatus(Number(error?.response?.status), {
|
||||
modelName,
|
||||
});
|
||||
|
@ -180,7 +182,7 @@ export class LmChatGoogleVertex implements INodeType {
|
|||
}
|
||||
|
||||
throw error;
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
import { ChatGroq } from '@langchain/groq';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmChatGroq implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -144,6 +145,7 @@ export class LmChatGroq implements INodeType {
|
|||
maxTokens: options.maxTokensToSample,
|
||||
temperature: options.temperature,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -11,6 +11,7 @@ import type { ChatMistralAIInput } from '@langchain/mistralai';
|
|||
import { ChatMistralAI } from '@langchain/mistralai';
|
||||
import { getConnectionHintNoticeField } from '../../../utils/sharedFields';
|
||||
import { N8nLlmTracing } from '../N8nLlmTracing';
|
||||
import { makeN8nLlmFailedAttemptHandler } from '../n8nLlmFailedAttemptHandler';
|
||||
|
||||
export class LmChatMistralCloud implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -190,6 +191,7 @@ export class LmChatMistralCloud implements INodeType {
|
|||
modelName,
|
||||
...options,
|
||||
callbacks: [new N8nLlmTracing(this)],
|
||||
onFailedAttempt: makeN8nLlmFailedAttemptHandler(this),
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
import { BaseCallbackHandler } from '@langchain/core/callbacks/base';
|
||||
import type { SerializedFields } from '@langchain/core/dist/load/map_keys';
|
||||
import { getModelNameForTiktoken } from '@langchain/core/language_models/base';
|
||||
import { encodingForModel } from '@langchain/core/utils/tiktoken';
|
||||
import type {
|
||||
Serialized,
|
||||
SerializedNotImplemented,
|
||||
SerializedSecret,
|
||||
} from '@langchain/core/load/serializable';
|
||||
import type { LLMResult } from '@langchain/core/outputs';
|
||||
import type { IDataObject, ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import { pick } from 'lodash';
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
import type { SerializedFields } from '@langchain/core/dist/load/map_keys';
|
||||
import type { LLMResult } from '@langchain/core/outputs';
|
||||
import { encodingForModel } from '@langchain/core/utils/tiktoken';
|
||||
import type { IDataObject, ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
|
||||
import { pick } from 'lodash';
|
||||
import { NodeConnectionType, NodeError, NodeOperationError } from 'n8n-workflow';
|
||||
|
||||
import { logAiEvent } from '../../utils/helpers';
|
||||
|
||||
type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {
|
||||
|
@ -30,6 +31,10 @@ const TIKTOKEN_ESTIMATE_MODEL = 'gpt-4o';
|
|||
export class N8nLlmTracing extends BaseCallbackHandler {
|
||||
name = 'N8nLlmTracing';
|
||||
|
||||
// This flag makes sure that LangChain will wait for the handlers to finish before continuing
|
||||
// This is crucial for the handleLLMError handler to work correctly (it should be called before the error is propagated to the root node)
|
||||
awaitHandlers = true;
|
||||
|
||||
connectionType = NodeConnectionType.AiLanguageModel;
|
||||
|
||||
promptTokensEstimate = 0;
|
||||
|
@ -135,6 +140,7 @@ export class N8nLlmTracing extends BaseCallbackHandler {
|
|||
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [
|
||||
[{ json: { ...response } }],
|
||||
]);
|
||||
|
||||
logAiEvent(this.executionFunctions, 'ai-llm-generated-output', {
|
||||
messages: parsedMessages,
|
||||
options: runDetails.options,
|
||||
|
@ -172,6 +178,8 @@ export class N8nLlmTracing extends BaseCallbackHandler {
|
|||
runId: string,
|
||||
parentRunId?: string | undefined,
|
||||
) {
|
||||
const runDetails = this.runsMap[runId] ?? { index: Object.keys(this.runsMap).length };
|
||||
|
||||
// Filter out non-x- headers to avoid leaking sensitive information in logs
|
||||
if (typeof error === 'object' && error?.hasOwnProperty('headers')) {
|
||||
const errorWithHeaders = error as { headers: Record<string, unknown> };
|
||||
|
@ -183,6 +191,19 @@ export class N8nLlmTracing extends BaseCallbackHandler {
|
|||
});
|
||||
}
|
||||
|
||||
if (error instanceof NodeError) {
|
||||
this.executionFunctions.addOutputData(this.connectionType, runDetails.index, error);
|
||||
} else {
|
||||
// If the error is not a NodeError, we wrap it in a NodeOperationError
|
||||
this.executionFunctions.addOutputData(
|
||||
this.connectionType,
|
||||
runDetails.index,
|
||||
new NodeOperationError(this.executionFunctions.getNode(), error as JsonObject, {
|
||||
functionality: 'configuration-node',
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
logAiEvent(this.executionFunctions, 'ai-llm-errored', {
|
||||
error: Object.keys(error).length === 0 ? error.toString() : error,
|
||||
runId,
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
import { n8nDefaultFailedAttemptHandler } from './n8nDefaultFailedAttemptHandler';
|
||||
|
||||
class MockHttpError extends Error {
|
||||
response: { status: number };
|
||||
|
||||
constructor(message: string, code: number) {
|
||||
super(message);
|
||||
this.response = { status: code };
|
||||
}
|
||||
}
|
||||
|
||||
describe('n8nDefaultFailedAttemptHandler', () => {
|
||||
it('should throw error if message starts with "Cancel"', () => {
|
||||
const error = new Error('Cancel operation');
|
||||
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
|
||||
});
|
||||
|
||||
it('should throw error if message starts with "AbortError"', () => {
|
||||
const error = new Error('AbortError occurred');
|
||||
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
|
||||
});
|
||||
|
||||
it('should throw error if name is "AbortError"', () => {
|
||||
class MockAbortError extends Error {
|
||||
constructor() {
|
||||
super('Some error');
|
||||
this.name = 'AbortError';
|
||||
}
|
||||
}
|
||||
|
||||
const error = new MockAbortError();
|
||||
|
||||
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
|
||||
});
|
||||
|
||||
it('should throw error if code is "ECONNABORTED"', () => {
|
||||
class MockAbortError extends Error {
|
||||
code: string;
|
||||
|
||||
constructor() {
|
||||
super('Some error');
|
||||
this.code = 'ECONNABORTED';
|
||||
}
|
||||
}
|
||||
|
||||
const error = new MockAbortError();
|
||||
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
|
||||
});
|
||||
|
||||
it('should throw error if status is in STATUS_NO_RETRY', () => {
|
||||
const error = new MockHttpError('Some error', 400);
|
||||
expect(() => n8nDefaultFailedAttemptHandler(error)).toThrow(error);
|
||||
});
|
||||
|
||||
it('should not throw error if status is not in STATUS_NO_RETRY', () => {
|
||||
const error = new MockHttpError('Some error', 500);
|
||||
error.response = { status: 500 };
|
||||
|
||||
expect(() => n8nDefaultFailedAttemptHandler(error)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should not throw error if no conditions are met', () => {
|
||||
const error = new Error('Some random error');
|
||||
expect(() => n8nDefaultFailedAttemptHandler(error)).not.toThrow();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,41 @@
|
|||
const STATUS_NO_RETRY = [
|
||||
400, // Bad Request
|
||||
401, // Unauthorized
|
||||
402, // Payment Required
|
||||
403, // Forbidden
|
||||
404, // Not Found
|
||||
405, // Method Not Allowed
|
||||
406, // Not Acceptable
|
||||
407, // Proxy Authentication Required
|
||||
409, // Conflict
|
||||
];
|
||||
|
||||
/**
|
||||
* This function is used as a default handler for failed attempts in all LLMs.
|
||||
* It is based on a default handler from the langchain core package.
|
||||
* It throws an error when it encounters a known error that should not be retried.
|
||||
* @param error
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const n8nDefaultFailedAttemptHandler = (error: any) => {
|
||||
if (
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access,@typescript-eslint/no-unsafe-call
|
||||
error?.message?.startsWith?.('Cancel') ||
|
||||
error?.message?.startsWith?.('AbortError') ||
|
||||
error?.name === 'AbortError'
|
||||
) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any,@typescript-eslint/no-unsafe-member-access
|
||||
if (error?.code === 'ECONNABORTED') {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const status =
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any,@typescript-eslint/no-unsafe-member-access
|
||||
error?.response?.status ?? error?.status;
|
||||
if (status && STATUS_NO_RETRY.includes(+status)) {
|
||||
throw error;
|
||||
}
|
||||
};
|
|
@ -0,0 +1,65 @@
|
|||
import { mock } from 'jest-mock-extended';
|
||||
import type { ISupplyDataFunctions } from 'n8n-workflow';
|
||||
import { ApplicationError, NodeApiError } from 'n8n-workflow';
|
||||
|
||||
import { makeN8nLlmFailedAttemptHandler } from './n8nLlmFailedAttemptHandler';
|
||||
|
||||
describe('makeN8nLlmFailedAttemptHandler', () => {
|
||||
const ctx = mock<ISupplyDataFunctions>({
|
||||
getNode: jest.fn(),
|
||||
});
|
||||
|
||||
it('should throw a wrapped error, when NO custom handler is provided', () => {
|
||||
const handler = makeN8nLlmFailedAttemptHandler(ctx);
|
||||
|
||||
expect(() => handler(new Error('Test error'))).toThrow(NodeApiError);
|
||||
});
|
||||
|
||||
it('should wrapped error when custom handler is provided', () => {
|
||||
const customHandler = jest.fn();
|
||||
const handler = makeN8nLlmFailedAttemptHandler(ctx, customHandler);
|
||||
|
||||
expect(() => handler(new Error('Test error'))).toThrow(NodeApiError);
|
||||
expect(customHandler).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw wrapped exception from custom handler', () => {
|
||||
const customHandler = jest.fn(() => {
|
||||
throw new ApplicationError('Custom handler error');
|
||||
});
|
||||
const handler = makeN8nLlmFailedAttemptHandler(ctx, customHandler);
|
||||
|
||||
expect(() => handler(new Error('Test error'))).toThrow('Custom handler error');
|
||||
expect(customHandler).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not throw if retries are left', () => {
|
||||
const customHandler = jest.fn();
|
||||
const handler = makeN8nLlmFailedAttemptHandler(ctx, customHandler);
|
||||
|
||||
const error = new Error('Test error');
|
||||
(error as any).retriesLeft = 1;
|
||||
|
||||
expect(() => handler(error)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should throw NodeApiError if no retries are left', () => {
|
||||
const handler = makeN8nLlmFailedAttemptHandler(ctx);
|
||||
|
||||
const error = new Error('Test error');
|
||||
(error as any).retriesLeft = 0;
|
||||
|
||||
expect(() => handler(error)).toThrow(NodeApiError);
|
||||
});
|
||||
|
||||
it('should throw NodeApiError if no retries are left with custom handler', () => {
|
||||
const customHandler = jest.fn();
|
||||
const handler = makeN8nLlmFailedAttemptHandler(ctx, customHandler);
|
||||
|
||||
const error = new Error('Test error');
|
||||
(error as any).retriesLeft = 0;
|
||||
|
||||
expect(() => handler(error)).toThrow(NodeApiError);
|
||||
expect(customHandler).toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,46 @@
|
|||
import type { FailedAttemptHandler } from '@langchain/core/dist/utils/async_caller';
|
||||
import type { ISupplyDataFunctions, JsonObject } from 'n8n-workflow';
|
||||
import { NodeApiError } from 'n8n-workflow';
|
||||
|
||||
import { n8nDefaultFailedAttemptHandler } from './n8nDefaultFailedAttemptHandler';
|
||||
|
||||
/**
|
||||
* This function returns a custom failed attempt handler for using with LangChain models.
|
||||
* It first tries to use a custom handler passed as an argument, and if that doesn't throw an error, it uses the default handler.
|
||||
* It always wraps the error in a NodeApiError.
|
||||
* It throws an error ONLY if there are no retries left.
|
||||
*/
|
||||
export const makeN8nLlmFailedAttemptHandler = (
|
||||
ctx: ISupplyDataFunctions,
|
||||
handler?: FailedAttemptHandler,
|
||||
): FailedAttemptHandler => {
|
||||
return (error: any) => {
|
||||
try {
|
||||
// Try custom error handler first
|
||||
handler?.(error);
|
||||
|
||||
// If it didn't throw an error, use the default handler
|
||||
n8nDefaultFailedAttemptHandler(error);
|
||||
} catch (e) {
|
||||
// Wrap the error in a NodeApiError
|
||||
const apiError = new NodeApiError(ctx.getNode(), e as unknown as JsonObject, {
|
||||
functionality: 'configuration-node',
|
||||
});
|
||||
|
||||
throw apiError;
|
||||
}
|
||||
|
||||
// If no error was thrown, check if it is the last retry
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
|
||||
if (error?.retriesLeft > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If there are no retries left, throw the error wrapped in a NodeApiError
|
||||
const apiError = new NodeApiError(ctx.getNode(), error as unknown as JsonObject, {
|
||||
functionality: 'configuration-node',
|
||||
});
|
||||
|
||||
throw apiError;
|
||||
};
|
||||
};
|
|
@ -1,4 +1,5 @@
|
|||
import { OpenAIError } from 'openai/error';
|
||||
import { RateLimitError } from 'openai';
|
||||
|
||||
const errorMap: Record<string, string> = {
|
||||
insufficient_quota: 'OpenAI: Insufficient quota',
|
||||
|
@ -12,3 +13,20 @@ export function getCustomErrorMessage(errorCode: string): string | undefined {
|
|||
export function isOpenAiError(error: any): error is OpenAIError {
|
||||
return error instanceof OpenAIError;
|
||||
}
|
||||
|
||||
export const openAiFailedAttemptHandler = (error: any) => {
|
||||
if (error instanceof RateLimitError) {
|
||||
// If the error is a rate limit error, we want to handle it differently
|
||||
// because OpenAI has multiple different rate limit errors
|
||||
const errorCode = error?.code;
|
||||
if (errorCode) {
|
||||
const customErrorMessage = getCustomErrorMessage(errorCode);
|
||||
|
||||
if (customErrorMessage) {
|
||||
error.message = customErrorMessage;
|
||||
}
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -17,13 +17,6 @@ import { logAiEvent, isToolsInstance, isBaseChatMemory, isBaseChatMessageHistory
|
|||
import { N8nBinaryLoader } from './N8nBinaryLoader';
|
||||
import { N8nJsonLoader } from './N8nJsonLoader';
|
||||
|
||||
const errorsMap: { [key: string]: { message: string; description: string } } = {
|
||||
'You exceeded your current quota, please check your plan and billing details.': {
|
||||
message: 'OpenAI quota exceeded',
|
||||
description: 'You exceeded your current quota, please check your plan and billing details.',
|
||||
},
|
||||
};
|
||||
|
||||
export async function callMethodAsync<T>(
|
||||
this: T,
|
||||
parameters: {
|
||||
|
@ -37,30 +30,25 @@ export async function callMethodAsync<T>(
|
|||
try {
|
||||
return await parameters.method.call(this, ...parameters.arguments);
|
||||
} catch (e) {
|
||||
// Propagate errors from sub-nodes
|
||||
if (e.functionality === 'configuration-node') throw e;
|
||||
const connectedNode = parameters.executeFunctions.getNode();
|
||||
|
||||
const error = new NodeOperationError(connectedNode, e, {
|
||||
functionality: 'configuration-node',
|
||||
});
|
||||
|
||||
if (errorsMap[error.message]) {
|
||||
error.description = errorsMap[error.message].description;
|
||||
error.message = errorsMap[error.message].message;
|
||||
}
|
||||
|
||||
parameters.executeFunctions.addOutputData(
|
||||
parameters.connectionType,
|
||||
parameters.currentNodeRunIndex,
|
||||
error,
|
||||
);
|
||||
|
||||
if (error.message) {
|
||||
if (!error.description) {
|
||||
error.description = error.message;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
throw new NodeOperationError(
|
||||
connectedNode,
|
||||
`Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`,
|
||||
|
@ -82,8 +70,6 @@ export function callMethodSync<T>(
|
|||
try {
|
||||
return parameters.method.call(this, ...parameters.arguments);
|
||||
} catch (e) {
|
||||
// Propagate errors from sub-nodes
|
||||
if (e.functionality === 'configuration-node') throw e;
|
||||
const connectedNode = parameters.executeFunctions.getNode();
|
||||
const error = new NodeOperationError(connectedNode, e);
|
||||
parameters.executeFunctions.addOutputData(
|
||||
|
@ -91,6 +77,7 @@ export function callMethodSync<T>(
|
|||
parameters.currentNodeRunIndex,
|
||||
error,
|
||||
);
|
||||
|
||||
throw new NodeOperationError(
|
||||
connectedNode,
|
||||
`Error on node "${connectedNode.name}" which is connected via input "${parameters.connectionType}"`,
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
import type { GlobalConfig } from '@n8n/config';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import { ServerResponse } from 'node:http';
|
||||
import type WebSocket from 'ws';
|
||||
|
||||
import type { TaskRunnerAuthController } from '@/runners/auth/task-runner-auth.controller';
|
||||
import { TaskRunnerServer } from '@/runners/task-runner-server';
|
||||
|
||||
import type { TaskRunnerServerInitRequest } from '../runner-types';
|
||||
|
||||
describe('TaskRunnerServer', () => {
|
||||
describe('handleUpgradeRequest', () => {
|
||||
it('should close WebSocket when response status code is > 200', () => {
|
||||
const ws = mock<WebSocket>();
|
||||
const request = mock<TaskRunnerServerInitRequest>({
|
||||
url: '/runners/_ws',
|
||||
ws,
|
||||
});
|
||||
|
||||
const server = new TaskRunnerServer(
|
||||
mock(),
|
||||
mock<GlobalConfig>({ taskRunners: { path: '/runners' } }),
|
||||
mock<TaskRunnerAuthController>(),
|
||||
mock(),
|
||||
);
|
||||
|
||||
// @ts-expect-error Private property
|
||||
server.handleUpgradeRequest(request, mock(), Buffer.from(''));
|
||||
|
||||
const response = new ServerResponse(request);
|
||||
response.writeHead = (statusCode) => {
|
||||
if (statusCode > 200) ws.close();
|
||||
return response;
|
||||
};
|
||||
|
||||
response.writeHead(401);
|
||||
expect(ws.close).toHaveBeenCalledWith(); // no args
|
||||
});
|
||||
|
||||
it('should not close WebSocket when response status code is 200', () => {
|
||||
const ws = mock<WebSocket>();
|
||||
const request = mock<TaskRunnerServerInitRequest>({
|
||||
url: '/runners/_ws',
|
||||
ws,
|
||||
});
|
||||
|
||||
const server = new TaskRunnerServer(
|
||||
mock(),
|
||||
mock<GlobalConfig>({ taskRunners: { path: '/runners' } }),
|
||||
mock<TaskRunnerAuthController>(),
|
||||
mock(),
|
||||
);
|
||||
|
||||
// @ts-expect-error Private property
|
||||
server.handleUpgradeRequest(request, mock(), Buffer.from(''));
|
||||
|
||||
const response = new ServerResponse(request);
|
||||
response.writeHead = (statusCode) => {
|
||||
if (statusCode > 200) ws.close();
|
||||
return response;
|
||||
};
|
||||
|
||||
response.writeHead(200);
|
||||
expect(ws.close).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,6 +1,12 @@
|
|||
import type { PartialAdditionalData, TaskData } from '@n8n/task-runner';
|
||||
import { mock } from 'jest-mock-extended';
|
||||
import type { Workflow } from 'n8n-workflow';
|
||||
import type {
|
||||
IExecuteContextData,
|
||||
INode,
|
||||
INodeExecutionData,
|
||||
IRunExecutionData,
|
||||
Workflow,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
import { DataRequestResponseBuilder } from '../data-request-response-builder';
|
||||
|
||||
|
@ -19,6 +25,22 @@ const additionalData = mock<PartialAdditionalData>({
|
|||
restartExecutionId: undefined,
|
||||
});
|
||||
|
||||
const node = mock<INode>();
|
||||
const outputItems: INodeExecutionData[] = [
|
||||
{
|
||||
json: {
|
||||
uid: 'abb74fd4-bef2-4fae-9d53-ea24e9eb3032',
|
||||
email: 'Dan.Schmidt31@yahoo.com',
|
||||
firstname: 'Toni',
|
||||
lastname: 'Schuster',
|
||||
password: 'Q!D6C2',
|
||||
},
|
||||
pairedItem: {
|
||||
item: 0,
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const workflow: TaskData['workflow'] = mock<Workflow>({
|
||||
id: '1',
|
||||
name: 'Test Workflow',
|
||||
|
@ -30,9 +52,39 @@ const workflow: TaskData['workflow'] = mock<Workflow>({
|
|||
staticData: {},
|
||||
});
|
||||
|
||||
const contextData = mock<IExecuteContextData>();
|
||||
const metadata = {
|
||||
'0': [],
|
||||
};
|
||||
|
||||
const runExecutionData = mock<IRunExecutionData>({
|
||||
executionData: {
|
||||
contextData,
|
||||
metadata,
|
||||
nodeExecutionStack: [
|
||||
{
|
||||
node,
|
||||
data: {
|
||||
main: [outputItems],
|
||||
},
|
||||
source: {},
|
||||
},
|
||||
],
|
||||
waitingExecution: {
|
||||
node: {
|
||||
'0': {
|
||||
main: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
waitingExecutionSource: {},
|
||||
},
|
||||
});
|
||||
|
||||
const taskData = mock<TaskData>({
|
||||
additionalData,
|
||||
workflow,
|
||||
runExecutionData,
|
||||
});
|
||||
|
||||
describe('DataRequestResponseBuilder', () => {
|
||||
|
@ -71,4 +123,20 @@ describe('DataRequestResponseBuilder', () => {
|
|||
staticData: workflow.staticData,
|
||||
});
|
||||
});
|
||||
|
||||
it('clears nodeExecutionStack, waitingExecution and waitingExecutionSource from runExecutionData', () => {
|
||||
const result = builder.buildFromTaskData(taskData);
|
||||
|
||||
expect(result.runExecutionData).toStrictEqual({
|
||||
startData: runExecutionData.startData,
|
||||
resultData: runExecutionData.resultData,
|
||||
executionData: {
|
||||
contextData,
|
||||
metadata,
|
||||
nodeExecutionStack: [],
|
||||
waitingExecution: {},
|
||||
waitingExecutionSource: null,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -115,24 +115,8 @@ const taskData: DataRequestResponse = {
|
|||
contextData: {},
|
||||
nodeExecutionStack: [],
|
||||
metadata: {},
|
||||
waitingExecution: {
|
||||
[codeNode.name]: {
|
||||
'0': {
|
||||
main: [codeNodeInputItems],
|
||||
},
|
||||
},
|
||||
},
|
||||
waitingExecutionSource: {
|
||||
[codeNode.name]: {
|
||||
'0': {
|
||||
main: [
|
||||
{
|
||||
previousNode: debugHelperNode.name,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
waitingExecution: {},
|
||||
waitingExecutionSource: {},
|
||||
},
|
||||
},
|
||||
runIndex: 0,
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
import type { DataRequestResponse, PartialAdditionalData, TaskData } from '@n8n/task-runner';
|
||||
import type { IWorkflowExecuteAdditionalData, Workflow, WorkflowParameters } from 'n8n-workflow';
|
||||
import type {
|
||||
IRunExecutionData,
|
||||
IWorkflowExecuteAdditionalData,
|
||||
Workflow,
|
||||
WorkflowParameters,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
/**
|
||||
* Transforms TaskData to DataRequestResponse. The main purpose of the
|
||||
|
@ -20,7 +25,7 @@ export class DataRequestResponseBuilder {
|
|||
mode: taskData.mode,
|
||||
envProviderState: taskData.envProviderState,
|
||||
node: taskData.node,
|
||||
runExecutionData: taskData.runExecutionData,
|
||||
runExecutionData: this.buildRunExecutionData(taskData.runExecutionData),
|
||||
runIndex: taskData.runIndex,
|
||||
selfData: taskData.selfData,
|
||||
siblingParameters: taskData.siblingParameters,
|
||||
|
@ -59,4 +64,23 @@ export class DataRequestResponseBuilder {
|
|||
staticData: workflow.staticData,
|
||||
};
|
||||
}
|
||||
|
||||
private buildRunExecutionData(runExecutionData: IRunExecutionData) {
|
||||
return {
|
||||
startData: runExecutionData.startData,
|
||||
resultData: runExecutionData.resultData,
|
||||
executionData: runExecutionData.executionData
|
||||
? {
|
||||
contextData: runExecutionData.executionData.contextData,
|
||||
metadata: runExecutionData.executionData.metadata,
|
||||
|
||||
// These are related to workflow execution and are not something
|
||||
// that are accessible by nodes, so we always omit them
|
||||
nodeExecutionStack: [],
|
||||
waitingExecution: {},
|
||||
waitingExecutionSource: null,
|
||||
}
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,17 +52,9 @@ export class DataRequestResponseStripper {
|
|||
runData: this.stripRunData(runExecutionData.resultData.runData),
|
||||
pinData: this.stripPinData(runExecutionData.resultData.pinData),
|
||||
},
|
||||
executionData: runExecutionData.executionData
|
||||
? {
|
||||
// TODO: Figure out what these two are and can they be stripped
|
||||
contextData: runExecutionData.executionData?.contextData,
|
||||
nodeExecutionStack: runExecutionData.executionData.nodeExecutionStack,
|
||||
|
||||
metadata: runExecutionData.executionData.metadata,
|
||||
waitingExecution: runExecutionData.executionData.waitingExecution,
|
||||
waitingExecutionSource: runExecutionData.executionData.waitingExecutionSource,
|
||||
}
|
||||
: undefined,
|
||||
// TODO: We could send `runExecutionData.contextData` only if requested,
|
||||
// since it's only needed if $input.context or $("node").context is used.
|
||||
executionData: runExecutionData.executionData,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -181,7 +181,7 @@ export class TaskRunnerServer {
|
|||
|
||||
const response = new ServerResponse(request);
|
||||
response.writeHead = (statusCode) => {
|
||||
if (statusCode > 200) ws.close(100);
|
||||
if (statusCode > 200) ws.close();
|
||||
return response;
|
||||
};
|
||||
|
||||
|
|
|
@ -1585,3 +1585,44 @@ export type ApiKey = {
|
|||
createdAt: string;
|
||||
updatedAt: string;
|
||||
};
|
||||
|
||||
export type InputPanel = {
|
||||
displayMode: IRunDataDisplayMode;
|
||||
nodeName?: string;
|
||||
run?: number;
|
||||
branch?: number;
|
||||
data: {
|
||||
isEmpty: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export type OutputPanel = {
|
||||
branch?: number;
|
||||
displayMode: IRunDataDisplayMode;
|
||||
data: {
|
||||
isEmpty: boolean;
|
||||
};
|
||||
editMode: {
|
||||
enabled: boolean;
|
||||
value: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type Draggable = {
|
||||
isDragging: boolean;
|
||||
type: string;
|
||||
data: string;
|
||||
dimensions: DOMRect | null;
|
||||
activeTarget: { id: string; stickyPosition: null | XYPosition } | null;
|
||||
};
|
||||
|
||||
export type MainPanelType = 'regular' | 'dragless' | 'inputless' | 'unknown' | 'wide';
|
||||
|
||||
export type MainPanelDimensions = Record<
|
||||
MainPanelType,
|
||||
{
|
||||
relativeLeft: number;
|
||||
relativeRight: number;
|
||||
relativeWidth: number;
|
||||
}
|
||||
>;
|
||||
|
|
|
@ -26,7 +26,8 @@ describe('InlineExpressionTip.vue', () => {
|
|||
beforeEach(() => {
|
||||
mockNdvState = {
|
||||
hasInputData: true,
|
||||
isNDVDataEmpty: vi.fn(() => true),
|
||||
isInputPanelEmpty: true,
|
||||
isOutputPanelEmpty: true,
|
||||
setHighlightDraggables: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
@ -42,7 +43,8 @@ describe('InlineExpressionTip.vue', () => {
|
|||
test('should show the drag-n-drop tip', async () => {
|
||||
mockNdvState = {
|
||||
hasInputData: true,
|
||||
isNDVDataEmpty: vi.fn(() => false),
|
||||
isInputPanelEmpty: false,
|
||||
isOutputPanelEmpty: false,
|
||||
focusedMappableInput: 'Some Input',
|
||||
setHighlightDraggables: vi.fn(),
|
||||
};
|
||||
|
@ -62,7 +64,8 @@ describe('InlineExpressionTip.vue', () => {
|
|||
mockNdvState = {
|
||||
hasInputData: false,
|
||||
isInputParentOfActiveNode: true,
|
||||
isNDVDataEmpty: vi.fn(() => false),
|
||||
isInputPanelEmpty: false,
|
||||
isOutputPanelEmpty: false,
|
||||
focusedMappableInput: 'Some Input',
|
||||
setHighlightDraggables: vi.fn(),
|
||||
};
|
||||
|
@ -77,7 +80,8 @@ describe('InlineExpressionTip.vue', () => {
|
|||
test('should show the correct tip for objects', async () => {
|
||||
mockNdvState = {
|
||||
hasInputData: true,
|
||||
isNDVDataEmpty: vi.fn(() => false),
|
||||
isInputPanelEmpty: false,
|
||||
isOutputPanelEmpty: false,
|
||||
focusedMappableInput: 'Some Input',
|
||||
setHighlightDraggables: vi.fn(),
|
||||
};
|
||||
|
@ -106,7 +110,8 @@ describe('InlineExpressionTip.vue', () => {
|
|||
test('should show the correct tip for primitives', async () => {
|
||||
mockNdvState = {
|
||||
hasInputData: true,
|
||||
isNDVDataEmpty: vi.fn(() => false),
|
||||
isInputPanelEmpty: false,
|
||||
isOutputPanelEmpty: false,
|
||||
focusedMappableInput: 'Some Input',
|
||||
setHighlightDraggables: vi.fn(),
|
||||
};
|
||||
|
|
|
@ -30,7 +30,7 @@ const canAddDotToExpression = ref(false);
|
|||
const resolvedExpressionHasFields = ref(false);
|
||||
|
||||
const canDragToFocusedInput = computed(
|
||||
() => !ndvStore.isNDVDataEmpty('input') && ndvStore.focusedMappableInput,
|
||||
() => !ndvStore.isInputPanelEmpty && ndvStore.focusedMappableInput,
|
||||
);
|
||||
|
||||
const emptyExpression = computed(() => props.unresolvedExpression.trim().length === 0);
|
||||
|
|
|
@ -9,7 +9,7 @@ import { useNDVStore } from '@/stores/ndv.store';
|
|||
import { ndvEventBus } from '@/event-bus';
|
||||
import NDVFloatingNodes from '@/components/NDVFloatingNodes.vue';
|
||||
import { useDebounce } from '@/composables/useDebounce';
|
||||
import type { XYPosition } from '@/Interface';
|
||||
import type { MainPanelType, XYPosition } from '@/Interface';
|
||||
import { ref, onMounted, onBeforeUnmount, computed, watch } from 'vue';
|
||||
import { useUIStore } from '@/stores/ui.store';
|
||||
|
||||
|
@ -20,7 +20,7 @@ const PANEL_WIDTH = 350;
|
|||
const PANEL_WIDTH_LARGE = 420;
|
||||
const MIN_WINDOW_WIDTH = 2 * (SIDE_MARGIN + SIDE_PANELS_MARGIN) + MIN_PANEL_WIDTH;
|
||||
|
||||
const initialMainPanelWidth: { [key: string]: number } = {
|
||||
const initialMainPanelWidth: Record<MainPanelType, number> = {
|
||||
regular: MAIN_NODE_PANEL_WIDTH,
|
||||
dragless: MAIN_NODE_PANEL_WIDTH,
|
||||
unknown: MAIN_NODE_PANEL_WIDTH,
|
||||
|
@ -106,22 +106,16 @@ watch(containerWidth, (width) => {
|
|||
setPositions(mainPanelDimensions.value.relativeLeft);
|
||||
});
|
||||
|
||||
const currentNodePaneType = computed((): string => {
|
||||
const currentNodePaneType = computed((): MainPanelType => {
|
||||
if (!hasInputSlot.value) return 'inputless';
|
||||
if (!props.isDraggable) return 'dragless';
|
||||
if (props.nodeType === null) return 'unknown';
|
||||
return props.nodeType.parameterPane ?? 'regular';
|
||||
});
|
||||
|
||||
const mainPanelDimensions = computed(
|
||||
(): {
|
||||
relativeWidth: number;
|
||||
relativeLeft: number;
|
||||
relativeRight: number;
|
||||
} => {
|
||||
return ndvStore.getMainPanelDimensions(currentNodePaneType.value);
|
||||
},
|
||||
);
|
||||
const mainPanelDimensions = computed(() => {
|
||||
return ndvStore.mainPanelDimensions[currentNodePaneType.value];
|
||||
});
|
||||
|
||||
const calculatedPositions = computed(
|
||||
(): { inputPanelRelativeRight: number; outputPanelRelativeLeft: number } => {
|
||||
|
|
|
@ -141,6 +141,10 @@ export function AIView(_nodes: SimplifiedNodeType[]): NodeView {
|
|||
const chainNodes = getAiNodesBySubcategory(nodeTypesStore.allLatestNodeTypes, AI_CATEGORY_CHAINS);
|
||||
const agentNodes = getAiNodesBySubcategory(nodeTypesStore.allLatestNodeTypes, AI_CATEGORY_AGENTS);
|
||||
|
||||
const websiteCategoryURL = templatesStore.websiteTemplateRepositoryParameters;
|
||||
|
||||
websiteCategoryURL.append('utm_user_role', 'AdvancedAI');
|
||||
|
||||
return {
|
||||
value: AI_NODE_CREATOR_VIEW,
|
||||
title: i18n.baseText('nodeCreator.aiPanel.aiNodes'),
|
||||
|
@ -154,7 +158,7 @@ export function AIView(_nodes: SimplifiedNodeType[]): NodeView {
|
|||
icon: 'box-open',
|
||||
description: i18n.baseText('nodeCreator.aiPanel.linkItem.description'),
|
||||
name: 'ai_templates_root',
|
||||
url: templatesStore.getWebsiteCategoryURL(undefined, 'AdvancedAI'),
|
||||
url: websiteCategoryURL.toString(),
|
||||
tag: {
|
||||
type: 'info',
|
||||
text: i18n.baseText('nodeCreator.triggerHelperPanel.manualTriggerTag'),
|
||||
|
|
|
@ -316,7 +316,7 @@ async function onClick() {
|
|||
codeGenerationInProgress.value = false;
|
||||
}
|
||||
|
||||
if (isChatNode.value || (isChatChild.value && ndvStore.isNDVDataEmpty('input'))) {
|
||||
if (isChatNode.value || (isChatChild.value && ndvStore.isInputPanelEmpty)) {
|
||||
ndvStore.setActiveNodeName(null);
|
||||
nodeViewEventBus.emit('openChat');
|
||||
} else if (isListeningForEvents.value) {
|
||||
|
|
|
@ -78,7 +78,7 @@ const { isSubNodeType } = useNodeType({
|
|||
});
|
||||
const pinnedData = usePinnedData(activeNode, {
|
||||
runIndex: props.runIndex,
|
||||
displayMode: ndvStore.getPanelDisplayMode('output'),
|
||||
displayMode: ndvStore.outputPanelDisplayMode,
|
||||
});
|
||||
|
||||
// Data
|
||||
|
|
|
@ -54,7 +54,8 @@ describe('ParameterInput.vue', () => {
|
|||
type: 'test',
|
||||
typeVersion: 1,
|
||||
},
|
||||
isNDVDataEmpty: vi.fn(() => false),
|
||||
isInputPanelEmpty: false,
|
||||
isOutputPanelEmpty: false,
|
||||
};
|
||||
mockNodeTypesState = {
|
||||
allNodeTypes: [],
|
||||
|
|
|
@ -523,7 +523,7 @@ const isHtmlNode = computed(() => !!node.value && node.value.type === HTML_NODE_
|
|||
const isInputTypeString = computed(() => props.parameter.type === 'string');
|
||||
const isInputTypeNumber = computed(() => props.parameter.type === 'number');
|
||||
|
||||
const isInputDataEmpty = computed(() => ndvStore.isNDVDataEmpty('input'));
|
||||
const isInputDataEmpty = computed(() => ndvStore.isInputPanelEmpty);
|
||||
const isDropDisabled = computed(
|
||||
() =>
|
||||
props.parameter.noDataExpression ||
|
||||
|
|
|
@ -185,12 +185,17 @@ const node = toRef(props, 'node');
|
|||
|
||||
const pinnedData = usePinnedData(node, {
|
||||
runIndex: props.runIndex,
|
||||
displayMode: ndvStore.getPanelDisplayMode(props.paneType),
|
||||
displayMode:
|
||||
props.paneType === 'input' ? ndvStore.inputPanelDisplayMode : ndvStore.outputPanelDisplayMode,
|
||||
});
|
||||
const { isSubNodeType } = useNodeType({
|
||||
node,
|
||||
});
|
||||
|
||||
const displayMode = computed(() =>
|
||||
props.paneType === 'input' ? ndvStore.inputPanelDisplayMode : ndvStore.outputPanelDisplayMode,
|
||||
);
|
||||
|
||||
const isReadOnlyRoute = computed(() => route.meta.readOnlyCanvas === true);
|
||||
const isWaitNodeWaiting = computed(
|
||||
() =>
|
||||
|
@ -200,7 +205,6 @@ const isWaitNodeWaiting = computed(
|
|||
);
|
||||
|
||||
const { activeNode } = storeToRefs(ndvStore);
|
||||
const displayMode = computed(() => ndvStore.getPanelDisplayMode(props.paneType));
|
||||
const nodeType = computed(() => {
|
||||
if (!node.value) return null;
|
||||
|
||||
|
@ -386,7 +390,10 @@ const currentOutputIndex = computed(() => {
|
|||
return props.overrideOutputs[0];
|
||||
}
|
||||
|
||||
return outputIndex.value;
|
||||
// In some cases nodes may switch their outputCount while the user still
|
||||
// has a higher outputIndex selected. We could adjust outputIndex directly,
|
||||
// but that loses data as we can keep the user selection if the branch reappears.
|
||||
return Math.min(outputIndex.value, maxOutputIndex.value);
|
||||
});
|
||||
const branches = computed(() => {
|
||||
const capitalize = (name: string) => name.charAt(0).toLocaleUpperCase() + name.slice(1);
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { parse } from 'flatted';
|
||||
import { h, ref } from 'vue';
|
||||
import type { useRouter } from 'vue-router';
|
||||
import { TelemetryHelpers } from 'n8n-workflow';
|
||||
import type {
|
||||
ExpressionError,
|
||||
IDataObject,
|
||||
|
@ -12,8 +13,8 @@ import type {
|
|||
IExecuteContextData,
|
||||
NodeOperationError,
|
||||
INodeTypeDescription,
|
||||
NodeError,
|
||||
} from 'n8n-workflow';
|
||||
import { TelemetryHelpers } from 'n8n-workflow';
|
||||
import type { PushMessage, PushPayload } from '@n8n/api-types';
|
||||
|
||||
import type { IExecutionResponse, IExecutionsCurrentSummaryExtended } from '@/Interface';
|
||||
|
@ -322,8 +323,7 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
|
||||
if (
|
||||
runDataExecuted.data.resultData.error?.name === 'ExpressionError' &&
|
||||
(runDataExecuted.data.resultData.error as ExpressionError).context.functionality ===
|
||||
'pairedItem'
|
||||
(runDataExecuted.data.resultData.error as ExpressionError).functionality === 'pairedItem'
|
||||
) {
|
||||
const error = runDataExecuted.data.resultData.error as ExpressionError;
|
||||
|
||||
|
@ -377,8 +377,9 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
duration: 0,
|
||||
});
|
||||
} else if (
|
||||
runDataExecuted.data.resultData.error?.name === 'NodeOperationError' &&
|
||||
(runDataExecuted.data.resultData.error as NodeOperationError).functionality ===
|
||||
(runDataExecuted.data.resultData.error?.name === 'NodeOperationError' ||
|
||||
runDataExecuted.data.resultData.error?.name === 'NodeApiError') &&
|
||||
(runDataExecuted.data.resultData.error as NodeError).functionality ===
|
||||
'configuration-node'
|
||||
) {
|
||||
// If the error is a configuration error of the node itself doesn't get executed so we can't use lastNodeExecuted for the title
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
import type {
|
||||
INodeUi,
|
||||
Draggable,
|
||||
InputPanel,
|
||||
IRunDataDisplayMode,
|
||||
MainPanelDimensions,
|
||||
MainPanelType,
|
||||
NDVState,
|
||||
NodePanelType,
|
||||
OutputPanel,
|
||||
TargetItem,
|
||||
XYPosition,
|
||||
} from '@/Interface';
|
||||
import { useStorage } from '@/composables/useStorage';
|
||||
import {
|
||||
|
@ -13,316 +16,411 @@ import {
|
|||
LOCAL_STORAGE_TABLE_HOVER_IS_ONBOARDED,
|
||||
STORES,
|
||||
} from '@/constants';
|
||||
import type { INodeExecutionData, INodeIssues } from 'n8n-workflow';
|
||||
import type { INodeIssues } from 'n8n-workflow';
|
||||
import { NodeConnectionType } from 'n8n-workflow';
|
||||
import { defineStore } from 'pinia';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { useWorkflowsStore } from './workflows.store';
|
||||
import { computed, ref } from 'vue';
|
||||
|
||||
export const useNDVStore = defineStore(STORES.NDV, {
|
||||
state: (): NDVState => ({
|
||||
activeNodeName: null,
|
||||
mainPanelDimensions: {},
|
||||
pushRef: '',
|
||||
input: {
|
||||
displayMode: 'schema',
|
||||
nodeName: undefined,
|
||||
run: undefined,
|
||||
branch: undefined,
|
||||
data: {
|
||||
isEmpty: true,
|
||||
},
|
||||
const DEFAULT_MAIN_PANEL_DIMENSIONS = {
|
||||
relativeLeft: 1,
|
||||
relativeRight: 1,
|
||||
relativeWidth: 1,
|
||||
};
|
||||
|
||||
export const useNDVStore = defineStore(STORES.NDV, () => {
|
||||
const localStorageMappingIsOnboarded = useStorage(LOCAL_STORAGE_MAPPING_IS_ONBOARDED);
|
||||
const localStorageTableHoverIsOnboarded = useStorage(LOCAL_STORAGE_TABLE_HOVER_IS_ONBOARDED);
|
||||
const localStorageAutoCompleteIsOnboarded = useStorage(LOCAL_STORAGE_AUTOCOMPLETE_IS_ONBOARDED);
|
||||
|
||||
const activeNodeName = ref<string | null>(null);
|
||||
const mainPanelDimensions = ref<MainPanelDimensions>({
|
||||
unknown: { ...DEFAULT_MAIN_PANEL_DIMENSIONS },
|
||||
regular: { ...DEFAULT_MAIN_PANEL_DIMENSIONS },
|
||||
dragless: { ...DEFAULT_MAIN_PANEL_DIMENSIONS },
|
||||
inputless: { ...DEFAULT_MAIN_PANEL_DIMENSIONS },
|
||||
wide: { ...DEFAULT_MAIN_PANEL_DIMENSIONS },
|
||||
});
|
||||
const pushRef = ref('');
|
||||
const input = ref<InputPanel>({
|
||||
displayMode: 'schema',
|
||||
nodeName: undefined,
|
||||
run: undefined,
|
||||
branch: undefined,
|
||||
data: {
|
||||
isEmpty: true,
|
||||
},
|
||||
output: {
|
||||
displayMode: 'table',
|
||||
branch: undefined,
|
||||
data: {
|
||||
isEmpty: true,
|
||||
},
|
||||
editMode: {
|
||||
enabled: false,
|
||||
value: '',
|
||||
},
|
||||
});
|
||||
const output = ref<OutputPanel>({
|
||||
displayMode: 'table',
|
||||
branch: undefined,
|
||||
data: {
|
||||
isEmpty: true,
|
||||
},
|
||||
focusedMappableInput: '',
|
||||
focusedInputPath: '',
|
||||
mappingTelemetry: {},
|
||||
hoveringItem: null,
|
||||
expressionOutputItemIndex: 0,
|
||||
draggable: {
|
||||
editMode: {
|
||||
enabled: false,
|
||||
value: '',
|
||||
},
|
||||
});
|
||||
const focusedMappableInput = ref('');
|
||||
const focusedInputPath = ref('');
|
||||
const mappingTelemetry = ref<Record<string, string | number | boolean>>({});
|
||||
const hoveringItem = ref<null | TargetItem>(null);
|
||||
const expressionOutputItemIndex = ref(0);
|
||||
const draggable = ref<Draggable>({
|
||||
isDragging: false,
|
||||
type: '',
|
||||
data: '',
|
||||
dimensions: null,
|
||||
activeTarget: null,
|
||||
});
|
||||
const isMappingOnboarded = ref(localStorageMappingIsOnboarded.value === 'true');
|
||||
const isTableHoverOnboarded = ref(localStorageTableHoverIsOnboarded.value === 'true');
|
||||
|
||||
const isAutocompleteOnboarded = ref(localStorageAutoCompleteIsOnboarded.value === 'true');
|
||||
|
||||
const highlightDraggables = ref(false);
|
||||
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
|
||||
const activeNode = computed(() => {
|
||||
return workflowsStore.getNodeByName(activeNodeName.value || '');
|
||||
});
|
||||
|
||||
const ndvInputData = computed(() => {
|
||||
const executionData = workflowsStore.getWorkflowExecution;
|
||||
const inputNodeName: string | undefined = input.value.nodeName;
|
||||
const inputRunIndex: number = input.value.run ?? 0;
|
||||
const inputBranchIndex: number = input.value.branch ?? 0;
|
||||
|
||||
if (
|
||||
!executionData ||
|
||||
!inputNodeName ||
|
||||
inputRunIndex === undefined ||
|
||||
inputBranchIndex === undefined
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return (
|
||||
executionData.data?.resultData?.runData?.[inputNodeName]?.[inputRunIndex]?.data?.main?.[
|
||||
inputBranchIndex
|
||||
] ?? []
|
||||
);
|
||||
});
|
||||
|
||||
const ndvInputNodeName = computed(() => {
|
||||
return input.value.nodeName;
|
||||
});
|
||||
|
||||
const ndvInputDataWithPinnedData = computed(() => {
|
||||
const data = ndvInputData.value;
|
||||
return ndvInputNodeName.value
|
||||
? (workflowsStore.pinDataByNodeName(ndvInputNodeName.value) ?? data)
|
||||
: data;
|
||||
});
|
||||
|
||||
const hasInputData = computed(() => {
|
||||
return ndvInputDataWithPinnedData.value.length > 0;
|
||||
});
|
||||
|
||||
const inputPanelDisplayMode = computed(() => input.value.displayMode);
|
||||
|
||||
const outputPanelDisplayMode = computed(() => output.value.displayMode);
|
||||
|
||||
const isDraggableDragging = computed(() => draggable.value.isDragging);
|
||||
|
||||
const draggableType = computed(() => draggable.value.type);
|
||||
|
||||
const draggableData = computed(() => draggable.value.data);
|
||||
|
||||
const canDraggableDrop = computed(() => draggable.value.activeTarget !== null);
|
||||
|
||||
const outputPanelEditMode = computed(() => output.value.editMode);
|
||||
|
||||
const draggableStickyPos = computed(() => draggable.value.activeTarget?.stickyPosition ?? null);
|
||||
|
||||
const ndvNodeInputNumber = computed(() => {
|
||||
const returnData: { [nodeName: string]: number[] } = {};
|
||||
const workflow = workflowsStore.getCurrentWorkflow();
|
||||
const activeNodeConections = (
|
||||
workflow.connectionsByDestinationNode[activeNode.value?.name || ''] ?? {}
|
||||
).main;
|
||||
|
||||
if (!activeNodeConections || activeNodeConections.length < 2) return returnData;
|
||||
|
||||
for (const [index, connection] of activeNodeConections.entries()) {
|
||||
for (const node of connection) {
|
||||
if (!returnData[node.node]) {
|
||||
returnData[node.node] = [];
|
||||
}
|
||||
returnData[node.node].push(index + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
});
|
||||
|
||||
const ndvInputRunIndex = computed(() => input.value.run);
|
||||
|
||||
const ndvInputBranchIndex = computed(() => input.value.branch);
|
||||
|
||||
const isInputPanelEmpty = computed(() => input.value.data.isEmpty);
|
||||
|
||||
const isOutputPanelEmpty = computed(() => output.value.data.isEmpty);
|
||||
|
||||
const isInputParentOfActiveNode = computed(() => {
|
||||
const inputNodeName = ndvInputNodeName.value;
|
||||
if (!activeNode.value || !inputNodeName) {
|
||||
return false;
|
||||
}
|
||||
const workflow = workflowsStore.getCurrentWorkflow();
|
||||
const parentNodes = workflow.getParentNodes(activeNode.value.name, NodeConnectionType.Main, 1);
|
||||
return parentNodes.includes(inputNodeName);
|
||||
});
|
||||
|
||||
const getHoveringItem = computed(() => {
|
||||
if (isInputParentOfActiveNode.value) {
|
||||
return hoveringItem.value;
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
const expressionTargetItem = computed(() => {
|
||||
if (getHoveringItem.value) {
|
||||
return getHoveringItem.value;
|
||||
}
|
||||
|
||||
if (expressionOutputItemIndex.value && ndvInputNodeName.value) {
|
||||
return {
|
||||
nodeName: ndvInputNodeName.value,
|
||||
runIndex: ndvInputRunIndex.value ?? 0,
|
||||
outputIndex: ndvInputBranchIndex.value ?? 0,
|
||||
itemIndex: expressionOutputItemIndex.value,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
const isNDVOpen = computed(() => activeNodeName.value !== null);
|
||||
|
||||
const setActiveNodeName = (nodeName: string | null): void => {
|
||||
activeNodeName.value = nodeName;
|
||||
};
|
||||
|
||||
const setInputNodeName = (nodeName: string | undefined): void => {
|
||||
input.value.nodeName = nodeName;
|
||||
};
|
||||
|
||||
const setInputRunIndex = (run?: number): void => {
|
||||
input.value.run = run;
|
||||
};
|
||||
|
||||
const setMainPanelDimensions = (params: {
|
||||
panelType: MainPanelType;
|
||||
dimensions: { relativeLeft?: number; relativeRight?: number; relativeWidth?: number };
|
||||
}): void => {
|
||||
mainPanelDimensions.value[params.panelType] = {
|
||||
...mainPanelDimensions.value[params.panelType],
|
||||
...params.dimensions,
|
||||
};
|
||||
};
|
||||
|
||||
const setNDVPushRef = (): void => {
|
||||
pushRef.value = `ndv-${uuid()}`;
|
||||
};
|
||||
|
||||
const resetNDVPushRef = (): void => {
|
||||
pushRef.value = '';
|
||||
};
|
||||
|
||||
const setPanelDisplayMode = (params: {
|
||||
pane: NodePanelType;
|
||||
mode: IRunDataDisplayMode;
|
||||
}): void => {
|
||||
if (params.pane === 'input') {
|
||||
input.value.displayMode = params.mode;
|
||||
} else {
|
||||
output.value.displayMode = params.mode;
|
||||
}
|
||||
};
|
||||
|
||||
const setOutputPanelEditModeEnabled = (isEnabled: boolean): void => {
|
||||
output.value.editMode.enabled = isEnabled;
|
||||
};
|
||||
|
||||
const setOutputPanelEditModeValue = (payload: string): void => {
|
||||
output.value.editMode.value = payload;
|
||||
};
|
||||
|
||||
const setMappableNDVInputFocus = (paramName: string): void => {
|
||||
focusedMappableInput.value = paramName;
|
||||
};
|
||||
|
||||
const draggableStartDragging = ({
|
||||
type,
|
||||
data,
|
||||
dimensions,
|
||||
}: { type: string; data: string; dimensions: DOMRect | null }): void => {
|
||||
draggable.value = {
|
||||
isDragging: true,
|
||||
type,
|
||||
data,
|
||||
dimensions,
|
||||
activeTarget: null,
|
||||
};
|
||||
};
|
||||
|
||||
const draggableStopDragging = (): void => {
|
||||
draggable.value = {
|
||||
isDragging: false,
|
||||
type: '',
|
||||
data: '',
|
||||
dimensions: null,
|
||||
activeTarget: null,
|
||||
},
|
||||
isMappingOnboarded: useStorage(LOCAL_STORAGE_MAPPING_IS_ONBOARDED).value === 'true',
|
||||
isTableHoverOnboarded: useStorage(LOCAL_STORAGE_TABLE_HOVER_IS_ONBOARDED).value === 'true',
|
||||
isAutocompleteOnboarded: useStorage(LOCAL_STORAGE_AUTOCOMPLETE_IS_ONBOARDED).value === 'true',
|
||||
highlightDraggables: false,
|
||||
}),
|
||||
getters: {
|
||||
activeNode(): INodeUi | null {
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
return workflowsStore.getNodeByName(this.activeNodeName || '');
|
||||
},
|
||||
ndvInputData(): INodeExecutionData[] {
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const executionData = workflowsStore.getWorkflowExecution;
|
||||
const inputNodeName: string | undefined = this.input.nodeName;
|
||||
const inputRunIndex: number = this.input.run ?? 0;
|
||||
const inputBranchIndex: number = this.input.branch ?? 0;
|
||||
};
|
||||
};
|
||||
|
||||
if (
|
||||
!executionData ||
|
||||
!inputNodeName ||
|
||||
inputRunIndex === undefined ||
|
||||
inputBranchIndex === undefined
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
const setDraggableTarget = (target: NDVState['draggable']['activeTarget']): void => {
|
||||
draggable.value.activeTarget = target;
|
||||
};
|
||||
|
||||
return (
|
||||
executionData.data?.resultData?.runData?.[inputNodeName]?.[inputRunIndex]?.data?.main?.[
|
||||
inputBranchIndex
|
||||
] ?? []
|
||||
);
|
||||
},
|
||||
ndvInputDataWithPinnedData(): INodeExecutionData[] {
|
||||
const data = this.ndvInputData;
|
||||
return this.ndvInputNodeName
|
||||
? (useWorkflowsStore().pinDataByNodeName(this.ndvInputNodeName) ?? data)
|
||||
: data;
|
||||
},
|
||||
hasInputData(): boolean {
|
||||
return this.ndvInputDataWithPinnedData.length > 0;
|
||||
},
|
||||
getPanelDisplayMode() {
|
||||
return (panel: NodePanelType) => this[panel].displayMode;
|
||||
},
|
||||
inputPanelDisplayMode(): IRunDataDisplayMode {
|
||||
return this.input.displayMode;
|
||||
},
|
||||
outputPanelDisplayMode(): IRunDataDisplayMode {
|
||||
return this.output.displayMode;
|
||||
},
|
||||
isDraggableDragging(): boolean {
|
||||
return this.draggable.isDragging;
|
||||
},
|
||||
draggableType(): string {
|
||||
return this.draggable.type;
|
||||
},
|
||||
draggableData(): string {
|
||||
return this.draggable.data;
|
||||
},
|
||||
canDraggableDrop(): boolean {
|
||||
return this.draggable.activeTarget !== null;
|
||||
},
|
||||
outputPanelEditMode(): NDVState['output']['editMode'] {
|
||||
return this.output.editMode;
|
||||
},
|
||||
getMainPanelDimensions() {
|
||||
return (panelType: string) => {
|
||||
const defaults = { relativeRight: 1, relativeLeft: 1, relativeWidth: 1 };
|
||||
return { ...defaults, ...this.mainPanelDimensions[panelType] };
|
||||
};
|
||||
},
|
||||
draggableStickyPos(): XYPosition | null {
|
||||
return this.draggable.activeTarget?.stickyPosition ?? null;
|
||||
},
|
||||
ndvInputNodeName(): string | undefined {
|
||||
return this.input.nodeName;
|
||||
},
|
||||
ndvInputRunIndex(): number | undefined {
|
||||
return this.input.run;
|
||||
},
|
||||
ndvInputBranchIndex(): number | undefined {
|
||||
return this.input.branch;
|
||||
},
|
||||
isNDVDataEmpty() {
|
||||
return (panel: 'input' | 'output'): boolean => this[panel].data.isEmpty;
|
||||
},
|
||||
isInputParentOfActiveNode(): boolean {
|
||||
const inputNodeName = this.ndvInputNodeName;
|
||||
if (!this.activeNode || !inputNodeName) {
|
||||
return false;
|
||||
}
|
||||
const workflow = useWorkflowsStore().getCurrentWorkflow();
|
||||
const parentNodes = workflow.getParentNodes(this.activeNode.name, NodeConnectionType.Main, 1);
|
||||
return parentNodes.includes(inputNodeName);
|
||||
},
|
||||
getHoveringItem(): TargetItem | null {
|
||||
if (this.isInputParentOfActiveNode) {
|
||||
return this.hoveringItem;
|
||||
}
|
||||
const setMappingTelemetry = (telemetry: { [key: string]: string | number | boolean }): void => {
|
||||
mappingTelemetry.value = { ...mappingTelemetry.value, ...telemetry };
|
||||
};
|
||||
|
||||
return null;
|
||||
},
|
||||
expressionTargetItem(): TargetItem | null {
|
||||
if (this.getHoveringItem) {
|
||||
return this.getHoveringItem;
|
||||
}
|
||||
const resetMappingTelemetry = (): void => {
|
||||
mappingTelemetry.value = {};
|
||||
};
|
||||
|
||||
if (this.expressionOutputItemIndex && this.ndvInputNodeName) {
|
||||
return {
|
||||
nodeName: this.ndvInputNodeName,
|
||||
runIndex: this.ndvInputRunIndex ?? 0,
|
||||
outputIndex: this.ndvInputBranchIndex ?? 0,
|
||||
itemIndex: this.expressionOutputItemIndex,
|
||||
};
|
||||
}
|
||||
const setHoveringItem = (item: TargetItem | null): void => {
|
||||
if (item) setTableHoverOnboarded();
|
||||
hoveringItem.value = item;
|
||||
};
|
||||
|
||||
return null;
|
||||
},
|
||||
isNDVOpen(): boolean {
|
||||
return this.activeNodeName !== null;
|
||||
},
|
||||
ndvNodeInputNumber() {
|
||||
const returnData: { [nodeName: string]: number[] } = {};
|
||||
const workflow = useWorkflowsStore().getCurrentWorkflow();
|
||||
const activeNodeConections = (
|
||||
workflow.connectionsByDestinationNode[this.activeNode?.name || ''] ?? {}
|
||||
).main;
|
||||
const setNDVBranchIndex = (e: { pane: NodePanelType; branchIndex: number }): void => {
|
||||
if (e.pane === 'input') {
|
||||
input.value.branch = e.branchIndex;
|
||||
} else {
|
||||
output.value.branch = e.branchIndex;
|
||||
}
|
||||
};
|
||||
|
||||
if (!activeNodeConections || activeNodeConections.length < 2) return returnData;
|
||||
const setNDVPanelDataIsEmpty = (params: {
|
||||
panel: NodePanelType;
|
||||
isEmpty: boolean;
|
||||
}): void => {
|
||||
if (params.panel === 'input') {
|
||||
input.value.data.isEmpty = params.isEmpty;
|
||||
} else {
|
||||
output.value.data.isEmpty = params.isEmpty;
|
||||
}
|
||||
};
|
||||
|
||||
for (const [index, connection] of activeNodeConections.entries()) {
|
||||
for (const node of connection) {
|
||||
if (!returnData[node.node]) {
|
||||
returnData[node.node] = [];
|
||||
}
|
||||
returnData[node.node].push(index + 1);
|
||||
}
|
||||
}
|
||||
const setMappingOnboarded = () => {
|
||||
isMappingOnboarded.value = true;
|
||||
localStorageMappingIsOnboarded.value = 'true';
|
||||
};
|
||||
|
||||
return returnData;
|
||||
},
|
||||
},
|
||||
actions: {
|
||||
setActiveNodeName(nodeName: string | null): void {
|
||||
this.activeNodeName = nodeName;
|
||||
},
|
||||
setInputNodeName(nodeName: string | undefined): void {
|
||||
this.input = {
|
||||
...this.input,
|
||||
nodeName,
|
||||
};
|
||||
},
|
||||
setInputRunIndex(run?: number): void {
|
||||
this.input = {
|
||||
...this.input,
|
||||
run,
|
||||
};
|
||||
},
|
||||
setMainPanelDimensions(params: {
|
||||
panelType: string;
|
||||
dimensions: { relativeLeft?: number; relativeRight?: number; relativeWidth?: number };
|
||||
}): void {
|
||||
this.mainPanelDimensions = {
|
||||
...this.mainPanelDimensions,
|
||||
[params.panelType]: {
|
||||
...this.mainPanelDimensions[params.panelType],
|
||||
...params.dimensions,
|
||||
const setTableHoverOnboarded = () => {
|
||||
isTableHoverOnboarded.value = true;
|
||||
localStorageTableHoverIsOnboarded.value = 'true';
|
||||
};
|
||||
|
||||
const setAutocompleteOnboarded = () => {
|
||||
isAutocompleteOnboarded.value = true;
|
||||
localStorageAutoCompleteIsOnboarded.value = 'true';
|
||||
};
|
||||
|
||||
const setHighlightDraggables = (highlight: boolean) => {
|
||||
highlightDraggables.value = highlight;
|
||||
};
|
||||
|
||||
const updateNodeParameterIssues = (issues: INodeIssues): void => {
|
||||
const activeNode = workflowsStore.getNodeByName(activeNodeName.value || '');
|
||||
|
||||
if (activeNode) {
|
||||
const nodeIndex = workflowsStore.workflow.nodes.findIndex((node) => {
|
||||
return node.name === activeNode.name;
|
||||
});
|
||||
|
||||
workflowsStore.updateNodeAtIndex(nodeIndex, {
|
||||
issues: {
|
||||
...activeNode.issues,
|
||||
...issues,
|
||||
},
|
||||
};
|
||||
},
|
||||
setNDVPushRef(): void {
|
||||
this.pushRef = `ndv-${uuid()}`;
|
||||
},
|
||||
resetNDVPushRef(): void {
|
||||
this.pushRef = '';
|
||||
},
|
||||
setPanelDisplayMode(params: { pane: NodePanelType; mode: IRunDataDisplayMode }): void {
|
||||
this[params.pane].displayMode = params.mode;
|
||||
},
|
||||
setOutputPanelEditModeEnabled(isEnabled: boolean): void {
|
||||
this.output.editMode.enabled = isEnabled;
|
||||
},
|
||||
setOutputPanelEditModeValue(payload: string): void {
|
||||
this.output.editMode.value = payload;
|
||||
},
|
||||
setMappableNDVInputFocus(paramName: string): void {
|
||||
this.focusedMappableInput = paramName;
|
||||
},
|
||||
draggableStartDragging({
|
||||
type,
|
||||
data,
|
||||
dimensions,
|
||||
}: {
|
||||
type: string;
|
||||
data: string;
|
||||
dimensions: DOMRect | null;
|
||||
}): void {
|
||||
this.draggable = {
|
||||
isDragging: true,
|
||||
type,
|
||||
data,
|
||||
dimensions,
|
||||
activeTarget: null,
|
||||
};
|
||||
},
|
||||
draggableStopDragging(): void {
|
||||
this.draggable = {
|
||||
isDragging: false,
|
||||
type: '',
|
||||
data: '',
|
||||
dimensions: null,
|
||||
activeTarget: null,
|
||||
};
|
||||
},
|
||||
setDraggableTarget(target: NDVState['draggable']['activeTarget']): void {
|
||||
this.draggable.activeTarget = target;
|
||||
},
|
||||
setMappingTelemetry(telemetry: { [key: string]: string | number | boolean }): void {
|
||||
this.mappingTelemetry = { ...this.mappingTelemetry, ...telemetry };
|
||||
},
|
||||
resetMappingTelemetry(): void {
|
||||
this.mappingTelemetry = {};
|
||||
},
|
||||
setHoveringItem(item: null | NDVState['hoveringItem']): void {
|
||||
if (item) this.setTableHoverOnboarded();
|
||||
this.hoveringItem = item;
|
||||
},
|
||||
setNDVBranchIndex(e: { pane: 'input' | 'output'; branchIndex: number }): void {
|
||||
this[e.pane].branch = e.branchIndex;
|
||||
},
|
||||
setNDVPanelDataIsEmpty(payload: { panel: 'input' | 'output'; isEmpty: boolean }): void {
|
||||
this[payload.panel].data.isEmpty = payload.isEmpty;
|
||||
},
|
||||
setMappingOnboarded() {
|
||||
this.isMappingOnboarded = true;
|
||||
useStorage(LOCAL_STORAGE_MAPPING_IS_ONBOARDED).value = 'true';
|
||||
},
|
||||
setTableHoverOnboarded() {
|
||||
this.isTableHoverOnboarded = true;
|
||||
useStorage(LOCAL_STORAGE_TABLE_HOVER_IS_ONBOARDED).value = 'true';
|
||||
},
|
||||
setAutocompleteOnboarded() {
|
||||
this.isAutocompleteOnboarded = true;
|
||||
useStorage(LOCAL_STORAGE_AUTOCOMPLETE_IS_ONBOARDED).value = 'true';
|
||||
},
|
||||
setHighlightDraggables(highlight: boolean) {
|
||||
this.highlightDraggables = highlight;
|
||||
},
|
||||
updateNodeParameterIssues(issues: INodeIssues): void {
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const activeNode = workflowsStore.getNodeByName(this.activeNodeName || '');
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
if (activeNode) {
|
||||
const nodeIndex = workflowsStore.workflow.nodes.findIndex((node) => {
|
||||
return node.name === activeNode.name;
|
||||
});
|
||||
const setFocusedInputPath = (path: string) => {
|
||||
focusedInputPath.value = path;
|
||||
};
|
||||
|
||||
workflowsStore.updateNodeAtIndex(nodeIndex, {
|
||||
issues: {
|
||||
...activeNode.issues,
|
||||
...issues,
|
||||
},
|
||||
});
|
||||
}
|
||||
},
|
||||
setFocusedInputPath(path: string) {
|
||||
this.focusedInputPath = path;
|
||||
},
|
||||
},
|
||||
return {
|
||||
activeNode,
|
||||
ndvInputData,
|
||||
ndvInputNodeName,
|
||||
ndvInputDataWithPinnedData,
|
||||
hasInputData,
|
||||
inputPanelDisplayMode,
|
||||
outputPanelDisplayMode,
|
||||
isDraggableDragging,
|
||||
draggableType,
|
||||
draggableData,
|
||||
canDraggableDrop,
|
||||
outputPanelEditMode,
|
||||
draggableStickyPos,
|
||||
ndvNodeInputNumber,
|
||||
ndvInputRunIndex,
|
||||
ndvInputBranchIndex,
|
||||
isInputParentOfActiveNode,
|
||||
getHoveringItem,
|
||||
expressionTargetItem,
|
||||
isNDVOpen,
|
||||
isInputPanelEmpty,
|
||||
isOutputPanelEmpty,
|
||||
focusedMappableInput,
|
||||
isMappingOnboarded,
|
||||
pushRef,
|
||||
activeNodeName,
|
||||
focusedInputPath,
|
||||
input,
|
||||
output,
|
||||
hoveringItem,
|
||||
highlightDraggables,
|
||||
mappingTelemetry,
|
||||
draggable,
|
||||
isAutocompleteOnboarded,
|
||||
expressionOutputItemIndex,
|
||||
isTableHoverOnboarded,
|
||||
mainPanelDimensions,
|
||||
setActiveNodeName,
|
||||
setInputNodeName,
|
||||
setInputRunIndex,
|
||||
setMainPanelDimensions,
|
||||
setNDVPushRef,
|
||||
resetNDVPushRef,
|
||||
setPanelDisplayMode,
|
||||
setOutputPanelEditModeEnabled,
|
||||
setOutputPanelEditModeValue,
|
||||
setMappableNDVInputFocus,
|
||||
draggableStartDragging,
|
||||
draggableStopDragging,
|
||||
setDraggableTarget,
|
||||
setMappingTelemetry,
|
||||
resetMappingTelemetry,
|
||||
setHoveringItem,
|
||||
setNDVBranchIndex,
|
||||
setNDVPanelDataIsEmpty,
|
||||
setMappingOnboarded,
|
||||
setTableHoverOnboarded,
|
||||
setAutocompleteOnboarded,
|
||||
setHighlightDraggables,
|
||||
updateNodeParameterIssues,
|
||||
setFocusedInputPath,
|
||||
};
|
||||
});
|
||||
|
|
|
@ -6,24 +6,17 @@ import type {
|
|||
ITemplatesCollection,
|
||||
ITemplatesCollectionFull,
|
||||
ITemplatesQuery,
|
||||
ITemplateState,
|
||||
ITemplatesWorkflow,
|
||||
ITemplatesWorkflowFull,
|
||||
IWorkflowTemplate,
|
||||
} from '@/Interface';
|
||||
import { useSettingsStore } from './settings.store';
|
||||
import {
|
||||
getCategories,
|
||||
getCollectionById,
|
||||
getCollections,
|
||||
getTemplateById,
|
||||
getWorkflows,
|
||||
getWorkflowTemplate,
|
||||
} from '@/api/templates';
|
||||
import * as templatesApi from '@/api/templates';
|
||||
import { getFixedNodesList } from '@/utils/nodeViewUtils';
|
||||
import { useRootStore } from '@/stores/root.store';
|
||||
import { useUsersStore } from './users.store';
|
||||
import { useWorkflowsStore } from './workflows.store';
|
||||
import { computed, ref } from 'vue';
|
||||
|
||||
const TEMPLATES_PAGE_SIZE = 20;
|
||||
|
||||
|
@ -33,398 +26,424 @@ function getSearchKey(query: ITemplatesQuery): string {
|
|||
|
||||
export type TemplatesStore = ReturnType<typeof useTemplatesStore>;
|
||||
|
||||
export const useTemplatesStore = defineStore(STORES.TEMPLATES, {
|
||||
state: (): ITemplateState => ({
|
||||
categories: [],
|
||||
collections: {},
|
||||
workflows: {},
|
||||
collectionSearches: {},
|
||||
workflowSearches: {},
|
||||
currentSessionId: '',
|
||||
previousSessionId: '',
|
||||
currentN8nPath: `${window.location.protocol}//${window.location.host}${window.BASE_PATH}`,
|
||||
}),
|
||||
getters: {
|
||||
allCategories(): ITemplatesCategory[] {
|
||||
return Object.values(this.categories).sort((a: ITemplatesCategory, b: ITemplatesCategory) =>
|
||||
a.name > b.name ? 1 : -1,
|
||||
);
|
||||
},
|
||||
getTemplateById() {
|
||||
return (id: string): null | ITemplatesWorkflow => this.workflows[id];
|
||||
},
|
||||
getFullTemplateById() {
|
||||
return (id: string): null | ITemplatesWorkflowFull => {
|
||||
const template = this.workflows[id];
|
||||
return template && 'full' in template && template.full ? template : null;
|
||||
};
|
||||
},
|
||||
getCollectionById() {
|
||||
return (id: string): null | ITemplatesCollection => this.collections[id];
|
||||
},
|
||||
getCategoryById() {
|
||||
return (id: string): null | ITemplatesCategory => this.categories[id as unknown as number];
|
||||
},
|
||||
getSearchedCollections() {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = this.collectionSearches[searchKey];
|
||||
if (!search) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return search.collectionIds.map((collectionId: string) => this.collections[collectionId]);
|
||||
};
|
||||
},
|
||||
getSearchedWorkflows() {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = this.workflowSearches[searchKey];
|
||||
if (!search) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return search.workflowIds.map((workflowId: string) => this.workflows[workflowId]);
|
||||
};
|
||||
},
|
||||
getSearchedWorkflowsTotal() {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = this.workflowSearches[searchKey];
|
||||
|
||||
return search ? search.totalWorkflows : 0;
|
||||
};
|
||||
},
|
||||
isSearchLoadingMore() {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = this.workflowSearches[searchKey];
|
||||
|
||||
return Boolean(search && search.loadingMore);
|
||||
};
|
||||
},
|
||||
isSearchFinished() {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = this.workflowSearches[searchKey];
|
||||
|
||||
return Boolean(
|
||||
search && !search.loadingMore && search.totalWorkflows === search.workflowIds.length,
|
||||
);
|
||||
};
|
||||
},
|
||||
hasCustomTemplatesHost(): boolean {
|
||||
const settingsStore = useSettingsStore();
|
||||
return settingsStore.templatesHost !== TEMPLATES_URLS.DEFAULT_API_HOST;
|
||||
},
|
||||
/**
|
||||
* Constructs URLSearchParams object based on the default parameters for the template repository
|
||||
* and provided additional parameters
|
||||
*/
|
||||
websiteTemplateRepositoryParameters(_roleOverride?: string) {
|
||||
const rootStore = useRootStore();
|
||||
const userStore = useUsersStore();
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
const defaultParameters: Record<string, string> = {
|
||||
...TEMPLATES_URLS.UTM_QUERY,
|
||||
utm_instance: this.currentN8nPath,
|
||||
utm_n8n_version: rootStore.versionCli,
|
||||
utm_awc: String(workflowsStore.activeWorkflows.length),
|
||||
};
|
||||
const userRole: string | null | undefined =
|
||||
userStore.currentUserCloudInfo?.role ??
|
||||
(userStore.currentUser?.personalizationAnswers &&
|
||||
'role' in userStore.currentUser.personalizationAnswers
|
||||
? userStore.currentUser.personalizationAnswers.role
|
||||
: undefined);
|
||||
|
||||
if (userRole) {
|
||||
defaultParameters.utm_user_role = userRole;
|
||||
export const useTemplatesStore = defineStore(STORES.TEMPLATES, () => {
|
||||
const categories = ref<ITemplatesCategory[]>([]);
|
||||
const collections = ref<Record<string, ITemplatesCollection>>({});
|
||||
const workflows = ref<Record<string, ITemplatesWorkflow | ITemplatesWorkflowFull>>({});
|
||||
const workflowSearches = ref<
|
||||
Record<
|
||||
string,
|
||||
{
|
||||
workflowIds: string[];
|
||||
totalWorkflows: number;
|
||||
loadingMore?: boolean;
|
||||
categories?: ITemplatesCategory[];
|
||||
}
|
||||
return (additionalParameters: Record<string, string> = {}) => {
|
||||
return new URLSearchParams({
|
||||
...defaultParameters,
|
||||
...additionalParameters,
|
||||
});
|
||||
};
|
||||
},
|
||||
/**
|
||||
* Construct the URL for the template repository on the website
|
||||
* @returns {string}
|
||||
*/
|
||||
websiteTemplateRepositoryURL(): string {
|
||||
return `${
|
||||
TEMPLATES_URLS.BASE_WEBSITE_URL
|
||||
}?${this.websiteTemplateRepositoryParameters().toString()}`;
|
||||
},
|
||||
/**
|
||||
* Construct the URL for the template category page on the website for a given category id
|
||||
*/
|
||||
getWebsiteCategoryURL() {
|
||||
return (id?: string, roleOverride?: string) => {
|
||||
const payload: Record<string, string> = {};
|
||||
if (id) {
|
||||
payload.categories = id;
|
||||
}
|
||||
if (roleOverride) {
|
||||
payload.utm_user_role = roleOverride;
|
||||
}
|
||||
return `${TEMPLATES_URLS.BASE_WEBSITE_URL}/?${this.websiteTemplateRepositoryParameters(payload).toString()}`;
|
||||
};
|
||||
},
|
||||
},
|
||||
actions: {
|
||||
addCategories(categories: ITemplatesCategory[]): void {
|
||||
categories.forEach((category: ITemplatesCategory) => {
|
||||
this.categories = {
|
||||
...this.categories,
|
||||
[category.id]: category,
|
||||
};
|
||||
});
|
||||
},
|
||||
addCollections(collections: Array<ITemplatesCollection | ITemplatesCollectionFull>): void {
|
||||
collections.forEach((collection) => {
|
||||
const workflows = (collection.workflows || []).map((workflow) => ({ id: workflow.id }));
|
||||
const cachedCollection = this.collections[collection.id] || {};
|
||||
|
||||
this.collections = {
|
||||
...this.collections,
|
||||
[collection.id]: {
|
||||
...cachedCollection,
|
||||
...collection,
|
||||
workflows,
|
||||
},
|
||||
};
|
||||
});
|
||||
},
|
||||
addWorkflows(workflows: Array<ITemplatesWorkflow | ITemplatesWorkflowFull>): void {
|
||||
workflows.forEach((workflow: ITemplatesWorkflow) => {
|
||||
const cachedWorkflow = this.workflows[workflow.id] || {};
|
||||
|
||||
this.workflows = {
|
||||
...this.workflows,
|
||||
[workflow.id]: {
|
||||
...cachedWorkflow,
|
||||
...workflow,
|
||||
},
|
||||
};
|
||||
});
|
||||
},
|
||||
addCollectionSearch(data: {
|
||||
collections: ITemplatesCollection[];
|
||||
query: ITemplatesQuery;
|
||||
}): void {
|
||||
const collectionIds = data.collections.map((collection) => String(collection.id));
|
||||
const searchKey = getSearchKey(data.query);
|
||||
|
||||
this.collectionSearches = {
|
||||
...this.collectionSearches,
|
||||
[searchKey]: {
|
||||
collectionIds,
|
||||
},
|
||||
};
|
||||
},
|
||||
addWorkflowsSearch(data: {
|
||||
totalWorkflows: number;
|
||||
workflows: ITemplatesWorkflow[];
|
||||
query: ITemplatesQuery;
|
||||
}): void {
|
||||
const workflowIds = data.workflows.map((workflow) => workflow.id);
|
||||
const searchKey = getSearchKey(data.query);
|
||||
const cachedResults = this.workflowSearches[searchKey];
|
||||
if (!cachedResults) {
|
||||
this.workflowSearches = {
|
||||
...this.workflowSearches,
|
||||
[searchKey]: {
|
||||
workflowIds: workflowIds as unknown as string[],
|
||||
totalWorkflows: data.totalWorkflows,
|
||||
categories: this.categories,
|
||||
},
|
||||
};
|
||||
|
||||
return;
|
||||
>
|
||||
>({});
|
||||
const collectionSearches = ref<
|
||||
Record<
|
||||
string,
|
||||
{
|
||||
collectionIds: string[];
|
||||
}
|
||||
>
|
||||
>({});
|
||||
const currentSessionId = ref<string>('');
|
||||
const previousSessionId = ref<string>('');
|
||||
const currentN8nPath = ref<string>(
|
||||
`${window.location.protocol}//${window.location.host}${window.BASE_PATH}`,
|
||||
);
|
||||
|
||||
this.workflowSearches = {
|
||||
...this.workflowSearches,
|
||||
[searchKey]: {
|
||||
workflowIds: [...cachedResults.workflowIds, ...workflowIds] as string[],
|
||||
totalWorkflows: data.totalWorkflows,
|
||||
categories: this.categories,
|
||||
},
|
||||
};
|
||||
},
|
||||
setWorkflowSearchLoading(query: ITemplatesQuery): void {
|
||||
const settingsStore = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const userStore = useUsersStore();
|
||||
const workflowsStore = useWorkflowsStore();
|
||||
|
||||
const allCategories = computed(() => {
|
||||
return categories.value.sort((a: ITemplatesCategory, b: ITemplatesCategory) =>
|
||||
a.name > b.name ? 1 : -1,
|
||||
);
|
||||
});
|
||||
|
||||
const getTemplatesById = computed(() => {
|
||||
return (id: string): null | ITemplatesWorkflow => workflows.value[id];
|
||||
});
|
||||
|
||||
const getFullTemplateById = computed(() => {
|
||||
return (id: string): null | ITemplatesWorkflowFull => {
|
||||
const template = workflows.value[id];
|
||||
return template && 'full' in template && template.full ? template : null;
|
||||
};
|
||||
});
|
||||
|
||||
const getCollectionById = computed(() => collections.value);
|
||||
|
||||
const getCategoryById = computed(() => {
|
||||
return (id: string): null | ITemplatesCategory => categories.value[id as unknown as number];
|
||||
});
|
||||
|
||||
const getSearchedCollections = computed(() => {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const cachedResults = this.workflowSearches[searchKey];
|
||||
if (!cachedResults) {
|
||||
return;
|
||||
const search = collectionSearches.value[searchKey];
|
||||
if (!search) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.workflowSearches[searchKey] = {
|
||||
...this.workflowSearches[searchKey],
|
||||
loadingMore: true,
|
||||
};
|
||||
},
|
||||
setWorkflowSearchLoaded(query: ITemplatesQuery): void {
|
||||
return search.collectionIds.map((collectionId: string) => collections.value[collectionId]);
|
||||
};
|
||||
});
|
||||
|
||||
const getSearchedWorkflows = computed(() => {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const cachedResults = this.workflowSearches[searchKey];
|
||||
if (!cachedResults) {
|
||||
return;
|
||||
const search = workflowSearches.value[searchKey];
|
||||
if (!search) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this.workflowSearches[searchKey] = {
|
||||
...this.workflowSearches[searchKey],
|
||||
loadingMore: false,
|
||||
return search.workflowIds.map((workflowId: string) => workflows.value[workflowId]);
|
||||
};
|
||||
});
|
||||
|
||||
const getSearchedWorkflowsTotal = computed(() => {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = workflowSearches.value[searchKey];
|
||||
|
||||
return search ? search.totalWorkflows : 0;
|
||||
};
|
||||
});
|
||||
|
||||
const isSearchLoadingMore = computed(() => {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = workflowSearches.value[searchKey];
|
||||
|
||||
return Boolean(search && search.loadingMore);
|
||||
};
|
||||
});
|
||||
|
||||
const isSearchFinished = computed(() => {
|
||||
return (query: ITemplatesQuery) => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const search = workflowSearches.value[searchKey];
|
||||
|
||||
return Boolean(
|
||||
search && !search.loadingMore && search.totalWorkflows === search.workflowIds.length,
|
||||
);
|
||||
};
|
||||
});
|
||||
|
||||
const hasCustomTemplatesHost = computed(() => {
|
||||
return settingsStore.templatesHost !== TEMPLATES_URLS.DEFAULT_API_HOST;
|
||||
});
|
||||
|
||||
const websiteTemplateRepositoryParameters = computed(() => {
|
||||
const defaultParameters: Record<string, string> = {
|
||||
...TEMPLATES_URLS.UTM_QUERY,
|
||||
utm_instance: currentN8nPath.value,
|
||||
utm_n8n_version: rootStore.versionCli,
|
||||
utm_awc: String(workflowsStore.activeWorkflows.length),
|
||||
};
|
||||
const userRole: string | null | undefined =
|
||||
userStore.currentUserCloudInfo?.role ??
|
||||
(userStore.currentUser?.personalizationAnswers &&
|
||||
'role' in userStore.currentUser.personalizationAnswers
|
||||
? userStore.currentUser.personalizationAnswers.role
|
||||
: undefined);
|
||||
|
||||
if (userRole) {
|
||||
defaultParameters.utm_user_role = userRole;
|
||||
}
|
||||
return new URLSearchParams({
|
||||
...defaultParameters,
|
||||
});
|
||||
});
|
||||
|
||||
const websiteTemplateRepositoryURL = computed(
|
||||
() =>
|
||||
`${TEMPLATES_URLS.BASE_WEBSITE_URL}?${websiteTemplateRepositoryParameters.value.toString()}`,
|
||||
);
|
||||
|
||||
const addCategories = (_categories: ITemplatesCategory[]): void => {
|
||||
categories.value = _categories;
|
||||
};
|
||||
|
||||
const addCollections = (
|
||||
_collections: Array<ITemplatesCollection | ITemplatesCollectionFull>,
|
||||
): void => {
|
||||
_collections.forEach((collection) => {
|
||||
const workflows = (collection.workflows || []).map((workflow) => ({ id: workflow.id }));
|
||||
const cachedCollection = collections.value[collection.id] || {};
|
||||
|
||||
collections.value[collection.id] = {
|
||||
...cachedCollection,
|
||||
...collection,
|
||||
workflows,
|
||||
};
|
||||
},
|
||||
resetSessionId(): void {
|
||||
this.previousSessionId = this.currentSessionId;
|
||||
this.currentSessionId = '';
|
||||
},
|
||||
setSessionId(): void {
|
||||
if (!this.currentSessionId) {
|
||||
this.currentSessionId = `templates-${Date.now()}`;
|
||||
}
|
||||
},
|
||||
async fetchTemplateById(templateId: string): Promise<ITemplatesWorkflowFull> {
|
||||
const settingsStore = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await getTemplateById(apiEndpoint, templateId, {
|
||||
'n8n-version': versionCli,
|
||||
});
|
||||
};
|
||||
|
||||
const addWorkflows = (_workflows: Array<ITemplatesWorkflow | ITemplatesWorkflowFull>): void => {
|
||||
_workflows.forEach((workflow) => {
|
||||
const cachedWorkflow = workflows.value[workflow.id] || {};
|
||||
workflows.value[workflow.id.toString()] = { ...cachedWorkflow, ...workflow };
|
||||
});
|
||||
};
|
||||
|
||||
const addCollectionsSearch = (data: {
|
||||
_collections: ITemplatesCollection[];
|
||||
query: ITemplatesQuery;
|
||||
}) => {
|
||||
const collectionIds = data._collections.map((collection) => String(collection.id));
|
||||
const searchKey = getSearchKey(data.query);
|
||||
|
||||
collectionSearches.value[searchKey] = {
|
||||
collectionIds,
|
||||
};
|
||||
};
|
||||
|
||||
const addWorkflowsSearch = (data: {
|
||||
totalWorkflows: number;
|
||||
workflows: ITemplatesWorkflow[];
|
||||
query: ITemplatesQuery;
|
||||
}) => {
|
||||
const workflowIds = data.workflows.map((workflow) => workflow.id);
|
||||
const searchKey = getSearchKey(data.query);
|
||||
const cachedResults = workflowSearches.value[searchKey];
|
||||
if (!cachedResults) {
|
||||
workflowSearches.value[searchKey] = {
|
||||
workflowIds: workflowIds as unknown as string[],
|
||||
totalWorkflows: data.totalWorkflows,
|
||||
categories: categories.value,
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
workflowSearches.value[searchKey] = {
|
||||
workflowIds: [...cachedResults.workflowIds, ...workflowIds] as string[],
|
||||
totalWorkflows: data.totalWorkflows,
|
||||
categories: categories.value,
|
||||
};
|
||||
};
|
||||
|
||||
const setWorkflowSearchLoading = (query: ITemplatesQuery): void => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const cachedResults = workflowSearches.value[searchKey];
|
||||
if (!cachedResults) {
|
||||
return;
|
||||
}
|
||||
|
||||
workflowSearches.value[searchKey] = {
|
||||
...workflowSearches.value[searchKey],
|
||||
loadingMore: true,
|
||||
};
|
||||
};
|
||||
|
||||
const setWorkflowSearchLoaded = (query: ITemplatesQuery): void => {
|
||||
const searchKey = getSearchKey(query);
|
||||
const cachedResults = workflowSearches.value[searchKey];
|
||||
if (!cachedResults) {
|
||||
return;
|
||||
}
|
||||
|
||||
workflowSearches.value[searchKey] = {
|
||||
...workflowSearches.value[searchKey],
|
||||
loadingMore: false,
|
||||
};
|
||||
};
|
||||
|
||||
const resetSessionId = (): void => {
|
||||
previousSessionId.value = currentSessionId.value;
|
||||
currentSessionId.value = '';
|
||||
};
|
||||
|
||||
const setSessionId = (): void => {
|
||||
if (!currentSessionId.value) {
|
||||
currentSessionId.value = `templates-${Date.now()}`;
|
||||
}
|
||||
};
|
||||
|
||||
const fetchTemplateById = async (templateId: string): Promise<ITemplatesWorkflowFull> => {
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await templatesApi.getTemplateById(apiEndpoint, templateId, {
|
||||
'n8n-version': versionCli,
|
||||
});
|
||||
|
||||
const template: ITemplatesWorkflowFull = {
|
||||
...response.workflow,
|
||||
full: true,
|
||||
};
|
||||
addWorkflows([template]);
|
||||
|
||||
return template;
|
||||
};
|
||||
|
||||
const fetchCollectionById = async (
|
||||
collectionId: string,
|
||||
): Promise<ITemplatesCollection | null> => {
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await templatesApi.getCollectionById(apiEndpoint, collectionId, {
|
||||
'n8n-version': versionCli,
|
||||
});
|
||||
const collection: ITemplatesCollectionFull = {
|
||||
...response.collection,
|
||||
full: true,
|
||||
};
|
||||
|
||||
addCollections([collection]);
|
||||
addWorkflows(response.collection.workflows);
|
||||
return getCollectionById.value[collectionId];
|
||||
};
|
||||
|
||||
const getCategories = async (): Promise<ITemplatesCategory[]> => {
|
||||
const cachedCategories = allCategories.value;
|
||||
if (cachedCategories.length) {
|
||||
return cachedCategories;
|
||||
}
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await templatesApi.getCategories(apiEndpoint, {
|
||||
'n8n-version': versionCli,
|
||||
});
|
||||
const categories = response.categories;
|
||||
|
||||
addCategories(categories);
|
||||
return categories;
|
||||
};
|
||||
|
||||
const getCollections = async (query: ITemplatesQuery): Promise<ITemplatesCollection[]> => {
|
||||
const cachedResults = getSearchedCollections.value(query);
|
||||
if (cachedResults) {
|
||||
return cachedResults;
|
||||
}
|
||||
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await templatesApi.getCollections(apiEndpoint, query, {
|
||||
'n8n-version': versionCli,
|
||||
});
|
||||
const collections = response.collections;
|
||||
|
||||
addCollections(collections);
|
||||
addCollectionsSearch({ query, _collections: collections });
|
||||
collections.forEach((collection) => addWorkflows(collection.workflows as ITemplatesWorkflow[]));
|
||||
|
||||
return collections;
|
||||
};
|
||||
|
||||
const getWorkflows = async (query: ITemplatesQuery): Promise<ITemplatesWorkflow[]> => {
|
||||
const cachedResults = getSearchedWorkflows.value(query);
|
||||
if (cachedResults) {
|
||||
categories.value = workflowSearches.value[getSearchKey(query)].categories ?? [];
|
||||
return cachedResults;
|
||||
}
|
||||
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const payload = await templatesApi.getWorkflows(
|
||||
apiEndpoint,
|
||||
{ ...query, page: 1, limit: TEMPLATES_PAGE_SIZE },
|
||||
{ 'n8n-version': versionCli },
|
||||
);
|
||||
|
||||
addWorkflows(payload.workflows);
|
||||
addWorkflowsSearch({ ...payload, query });
|
||||
return getSearchedWorkflows.value(query) || [];
|
||||
};
|
||||
|
||||
const getMoreWorkflows = async (query: ITemplatesQuery): Promise<ITemplatesWorkflow[]> => {
|
||||
if (isSearchLoadingMore.value(query) && !isSearchFinished.value(query)) {
|
||||
return [];
|
||||
}
|
||||
const cachedResults = getSearchedWorkflows.value(query) || [];
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
|
||||
setWorkflowSearchLoading(query);
|
||||
try {
|
||||
const payload = await templatesApi.getWorkflows(apiEndpoint, {
|
||||
...query,
|
||||
page: cachedResults.length / TEMPLATES_PAGE_SIZE + 1,
|
||||
limit: TEMPLATES_PAGE_SIZE,
|
||||
});
|
||||
|
||||
const template: ITemplatesWorkflowFull = {
|
||||
...response.workflow,
|
||||
full: true,
|
||||
};
|
||||
this.addWorkflows([template]);
|
||||
setWorkflowSearchLoaded(query);
|
||||
addWorkflows(payload.workflows);
|
||||
addWorkflowsSearch({ ...payload, query });
|
||||
|
||||
return template;
|
||||
},
|
||||
async fetchCollectionById(collectionId: string): Promise<ITemplatesCollection | null> {
|
||||
const settingsStore = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await getCollectionById(apiEndpoint, collectionId, {
|
||||
'n8n-version': versionCli,
|
||||
return getSearchedWorkflows.value(query) || [];
|
||||
} catch (e) {
|
||||
setWorkflowSearchLoaded(query);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
const getWorkflowTemplate = async (templateId: string): Promise<IWorkflowTemplate> => {
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
return await templatesApi.getWorkflowTemplate(apiEndpoint, templateId, {
|
||||
'n8n-version': versionCli,
|
||||
});
|
||||
};
|
||||
|
||||
const getFixedWorkflowTemplate = async (
|
||||
templateId: string,
|
||||
): Promise<IWorkflowTemplate | undefined> => {
|
||||
const template = await getWorkflowTemplate(templateId);
|
||||
if (template?.workflow?.nodes) {
|
||||
template.workflow.nodes = getFixedNodesList(template.workflow.nodes) as INodeUi[];
|
||||
template.workflow.nodes?.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
delete node.credentials;
|
||||
}
|
||||
});
|
||||
const collection: ITemplatesCollectionFull = {
|
||||
...response.collection,
|
||||
full: true,
|
||||
};
|
||||
}
|
||||
|
||||
this.addCollections([collection]);
|
||||
this.addWorkflows(response.collection.workflows);
|
||||
return this.getCollectionById(collectionId);
|
||||
},
|
||||
async getCategories(): Promise<ITemplatesCategory[]> {
|
||||
const cachedCategories = this.allCategories;
|
||||
if (cachedCategories.length) {
|
||||
return cachedCategories;
|
||||
}
|
||||
const settingsStore = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await getCategories(apiEndpoint, { 'n8n-version': versionCli });
|
||||
const categories = response.categories;
|
||||
return template;
|
||||
};
|
||||
|
||||
this.addCategories(categories);
|
||||
return categories;
|
||||
},
|
||||
async getCollections(query: ITemplatesQuery): Promise<ITemplatesCollection[]> {
|
||||
const cachedResults = this.getSearchedCollections(query);
|
||||
if (cachedResults) {
|
||||
return cachedResults;
|
||||
}
|
||||
|
||||
const settingsStore = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
const response = await getCollections(apiEndpoint, query, { 'n8n-version': versionCli });
|
||||
const collections = response.collections;
|
||||
|
||||
this.addCollections(collections);
|
||||
this.addCollectionSearch({ query, collections });
|
||||
collections.forEach((collection) =>
|
||||
this.addWorkflows(collection.workflows as ITemplatesWorkflowFull[]),
|
||||
);
|
||||
|
||||
return collections;
|
||||
},
|
||||
async getWorkflows(query: ITemplatesQuery): Promise<ITemplatesWorkflow[]> {
|
||||
const cachedResults = this.getSearchedWorkflows(query);
|
||||
if (cachedResults) {
|
||||
this.categories = this.workflowSearches[getSearchKey(query)].categories ?? [];
|
||||
return cachedResults;
|
||||
}
|
||||
|
||||
const settingsStore = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
|
||||
const payload = await getWorkflows(
|
||||
apiEndpoint,
|
||||
{ ...query, page: 1, limit: TEMPLATES_PAGE_SIZE },
|
||||
{ 'n8n-version': versionCli },
|
||||
);
|
||||
|
||||
this.addWorkflows(payload.workflows);
|
||||
this.addWorkflowsSearch({ ...payload, query });
|
||||
return this.getSearchedWorkflows(query) || [];
|
||||
},
|
||||
async getMoreWorkflows(query: ITemplatesQuery): Promise<ITemplatesWorkflow[]> {
|
||||
if (this.isSearchLoadingMore(query) && !this.isSearchFinished(query)) {
|
||||
return [];
|
||||
}
|
||||
const cachedResults = this.getSearchedWorkflows(query) || [];
|
||||
const settingsStore = useSettingsStore();
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
|
||||
this.setWorkflowSearchLoading(query);
|
||||
try {
|
||||
const payload = await getWorkflows(apiEndpoint, {
|
||||
...query,
|
||||
page: cachedResults.length / TEMPLATES_PAGE_SIZE + 1,
|
||||
limit: TEMPLATES_PAGE_SIZE,
|
||||
});
|
||||
|
||||
this.setWorkflowSearchLoaded(query);
|
||||
this.addWorkflows(payload.workflows);
|
||||
this.addWorkflowsSearch({ ...payload, query });
|
||||
|
||||
return this.getSearchedWorkflows(query) || [];
|
||||
} catch (e) {
|
||||
this.setWorkflowSearchLoaded(query);
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
async getWorkflowTemplate(templateId: string): Promise<IWorkflowTemplate> {
|
||||
const settingsStore = useSettingsStore();
|
||||
const rootStore = useRootStore();
|
||||
const apiEndpoint: string = settingsStore.templatesHost;
|
||||
const versionCli: string = rootStore.versionCli;
|
||||
return await getWorkflowTemplate(apiEndpoint, templateId, { 'n8n-version': versionCli });
|
||||
},
|
||||
|
||||
async getFixedWorkflowTemplate(templateId: string): Promise<IWorkflowTemplate | undefined> {
|
||||
const template = await this.getWorkflowTemplate(templateId);
|
||||
if (template?.workflow?.nodes) {
|
||||
template.workflow.nodes = getFixedNodesList(template.workflow.nodes) as INodeUi[];
|
||||
template.workflow.nodes?.forEach((node) => {
|
||||
if (node.credentials) {
|
||||
delete node.credentials;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return template;
|
||||
},
|
||||
},
|
||||
return {
|
||||
categories,
|
||||
collections,
|
||||
workflows,
|
||||
workflowSearches,
|
||||
collectionSearches,
|
||||
currentSessionId,
|
||||
previousSessionId,
|
||||
currentN8nPath,
|
||||
allCategories,
|
||||
getTemplatesById,
|
||||
getFullTemplateById,
|
||||
getCollectionById,
|
||||
getCategoryById,
|
||||
getSearchedCollections,
|
||||
getSearchedWorkflows,
|
||||
getSearchedWorkflowsTotal,
|
||||
isSearchLoadingMore,
|
||||
isSearchFinished,
|
||||
hasCustomTemplatesHost,
|
||||
websiteTemplateRepositoryURL,
|
||||
websiteTemplateRepositoryParameters,
|
||||
addCategories,
|
||||
addCollections,
|
||||
addWorkflows,
|
||||
addCollectionsSearch,
|
||||
addWorkflowsSearch,
|
||||
setWorkflowSearchLoading,
|
||||
setWorkflowSearchLoaded,
|
||||
resetSessionId,
|
||||
setSessionId,
|
||||
fetchTemplateById,
|
||||
fetchCollectionById,
|
||||
getCategories,
|
||||
getCollections,
|
||||
getWorkflows,
|
||||
getMoreWorkflows,
|
||||
getWorkflowTemplate,
|
||||
getFixedWorkflowTemplate,
|
||||
};
|
||||
});
|
||||
|
|
|
@ -60,7 +60,7 @@ export const isNoInputConnectionError = (error: unknown): error is ExpressionErr
|
|||
};
|
||||
|
||||
export const isAnyPairedItemError = (error: unknown): error is ExpressionError => {
|
||||
return error instanceof ExpressionError && error.context.functionality === 'pairedItem';
|
||||
return error instanceof ExpressionError && error.functionality === 'pairedItem';
|
||||
};
|
||||
|
||||
export const getResolvableState = (error: unknown, ignoreError = false): ResolvableState => {
|
||||
|
|
|
@ -35,14 +35,14 @@ const collectionId = computed(() => {
|
|||
return Array.isArray(id) ? id[0] : id;
|
||||
});
|
||||
|
||||
const collection = computed(() => templatesStore.getCollectionById(collectionId.value));
|
||||
const collection = computed(() => templatesStore.getCollectionById[collectionId.value]);
|
||||
|
||||
const collectionWorkflows = computed(() => {
|
||||
if (!collection.value || loading.value) {
|
||||
return [];
|
||||
}
|
||||
return collection.value.workflows
|
||||
.map(({ id }) => templatesStore.getTemplateById(id.toString()))
|
||||
.map(({ id }) => templatesStore.getTemplatesById(id.toString()))
|
||||
.filter((workflow): workflow is ITemplatesWorkflow => !!workflow);
|
||||
});
|
||||
|
||||
|
|
|
@ -124,14 +124,16 @@ export class Supabase implements INodeType {
|
|||
const items = this.getInputData();
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
const length = items.length;
|
||||
const qs: IDataObject = {};
|
||||
let qs: IDataObject = {};
|
||||
const resource = this.getNodeParameter('resource', 0);
|
||||
const operation = this.getNodeParameter('operation', 0);
|
||||
|
||||
if (resource === 'row') {
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
|
||||
if (operation === 'create') {
|
||||
const records: IDataObject[] = [];
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
const record: IDataObject = {};
|
||||
const dataToSend = this.getNodeParameter('dataToSend', 0) as
|
||||
|
@ -185,7 +187,6 @@ export class Supabase implements INodeType {
|
|||
}
|
||||
|
||||
if (operation === 'delete') {
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
const filterType = this.getNodeParameter('filterType', 0) as string;
|
||||
for (let i = 0; i < length; i++) {
|
||||
let endpoint = `/${tableId}`;
|
||||
|
@ -241,7 +242,6 @@ export class Supabase implements INodeType {
|
|||
}
|
||||
|
||||
if (operation === 'get') {
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
const endpoint = `/${tableId}`;
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
|
@ -281,11 +281,13 @@ export class Supabase implements INodeType {
|
|||
}
|
||||
|
||||
if (operation === 'getAll') {
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
const returnAll = this.getNodeParameter('returnAll', 0);
|
||||
const filterType = this.getNodeParameter('filterType', 0) as string;
|
||||
|
||||
let endpoint = `/${tableId}`;
|
||||
for (let i = 0; i < length; i++) {
|
||||
qs = {}; // reset qs
|
||||
|
||||
if (filterType === 'manual') {
|
||||
const matchType = this.getNodeParameter('matchType', 0) as string;
|
||||
const keys = this.getNodeParameter('filters.conditions', i, []) as IDataObject[];
|
||||
|
@ -342,7 +344,6 @@ export class Supabase implements INodeType {
|
|||
}
|
||||
|
||||
if (operation === 'update') {
|
||||
const tableId = this.getNodeParameter('tableId', 0) as string;
|
||||
const filterType = this.getNodeParameter('filterType', 0) as string;
|
||||
let endpoint = `/${tableId}`;
|
||||
for (let i = 0; i < length; i++) {
|
||||
|
|
|
@ -40,7 +40,6 @@ export class ExpressionError extends ExecutionBaseError {
|
|||
'causeDetailed',
|
||||
'descriptionTemplate',
|
||||
'descriptionKey',
|
||||
'functionality',
|
||||
'itemIndex',
|
||||
'messageTemplate',
|
||||
'nodeCause',
|
||||
|
@ -48,7 +47,12 @@ export class ExpressionError extends ExecutionBaseError {
|
|||
'runIndex',
|
||||
'type',
|
||||
];
|
||||
|
||||
if (options !== undefined) {
|
||||
if (options.functionality !== undefined) {
|
||||
this.functionality = options.functionality;
|
||||
}
|
||||
|
||||
Object.keys(options as IDataObject).forEach((key) => {
|
||||
if (allowedKeys.includes(key)) {
|
||||
this.context[key] = (options as IDataObject)[key];
|
||||
|
|
|
@ -261,7 +261,7 @@ export class NodeApiError extends NodeError {
|
|||
messageMapping,
|
||||
);
|
||||
|
||||
if (functionality !== undefined) this.context.functionality = functionality;
|
||||
if (functionality !== undefined) this.functionality = functionality;
|
||||
if (runIndex !== undefined) this.context.runIndex = runIndex;
|
||||
if (itemIndex !== undefined) this.context.itemIndex = itemIndex;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue