mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-17 15:47:50 -08:00
refactor(editor): Stop cloning and serializing full execution data for executionFinished
push message (#11703)
This commit is contained in:
parent
7bb9002cbc
commit
15ca2c4e45
|
@ -26,6 +26,7 @@
|
|||
"cypress": "^13.14.2",
|
||||
"cypress-otp": "^1.0.3",
|
||||
"cypress-real-events": "^1.13.0",
|
||||
"flatted": "catalog:",
|
||||
"lodash": "catalog:",
|
||||
"nanoid": "catalog:",
|
||||
"start-server-and-test": "^2.0.8"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Load type definitions that come with Cypress module
|
||||
/// <reference types="cypress" />
|
||||
|
||||
import type { FrontendSettings } from '@n8n/api-types';
|
||||
import type { FrontendSettings, PushPayload, PushType } from '@n8n/api-types';
|
||||
|
||||
Cypress.Keyboard.defaults({
|
||||
keystrokeDelay: 0,
|
||||
|
@ -66,7 +66,7 @@ declare global {
|
|||
droppableSelector: string,
|
||||
options?: Partial<DragAndDropOptions>,
|
||||
): void;
|
||||
push(type: string, data: unknown): void;
|
||||
push<Type extends PushType>(type: Type, data: PushPayload<Type>): void;
|
||||
shouldNotHaveConsoleErrors(): void;
|
||||
window(): Chainable<
|
||||
AUTWindow & {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { stringify } from 'flatted';
|
||||
import type { IDataObject, IPinData, ITaskData, ITaskDataConnections } from 'n8n-workflow';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
import { clickExecuteWorkflowButton } from '../composables/workflow';
|
||||
|
||||
|
@ -39,25 +39,20 @@ export function createMockNodeExecutionData(
|
|||
};
|
||||
}
|
||||
|
||||
export function createMockWorkflowExecutionData({
|
||||
executionId,
|
||||
function createMockWorkflowExecutionData({
|
||||
runData,
|
||||
pinData = {},
|
||||
lastNodeExecuted,
|
||||
}: {
|
||||
executionId: string;
|
||||
runData: Record<string, ITaskData | ITaskData[]>;
|
||||
pinData?: IPinData;
|
||||
lastNodeExecuted: string;
|
||||
}) {
|
||||
return {
|
||||
executionId,
|
||||
data: {
|
||||
data: {
|
||||
data: stringify({
|
||||
startData: {},
|
||||
resultData: {
|
||||
runData,
|
||||
pinData,
|
||||
pinData: {},
|
||||
lastNodeExecuted,
|
||||
},
|
||||
executionData: {
|
||||
|
@ -67,13 +62,12 @@ export function createMockWorkflowExecutionData({
|
|||
waitingExecution: {},
|
||||
waitingExecutionSource: {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
mode: 'manual',
|
||||
startedAt: new Date().toISOString(),
|
||||
stoppedAt: new Date().toISOString(),
|
||||
status: 'success',
|
||||
finished: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -81,14 +75,12 @@ export function runMockWorkflowExecution({
|
|||
trigger,
|
||||
lastNodeExecuted,
|
||||
runData,
|
||||
workflowExecutionData,
|
||||
}: {
|
||||
trigger?: () => void;
|
||||
lastNodeExecuted: string;
|
||||
runData: Array<ReturnType<typeof createMockNodeExecutionData>>;
|
||||
workflowExecutionData?: ReturnType<typeof createMockWorkflowExecutionData>;
|
||||
}) {
|
||||
const executionId = nanoid(8);
|
||||
const executionId = Math.floor(Math.random() * 1_000_000).toString();
|
||||
|
||||
cy.intercept('POST', '/rest/workflows/**/run?**', {
|
||||
statusCode: 201,
|
||||
|
@ -125,13 +117,17 @@ export function runMockWorkflowExecution({
|
|||
resolvedRunData[nodeName] = nodeExecution[nodeName];
|
||||
});
|
||||
|
||||
cy.push(
|
||||
'executionFinished',
|
||||
createMockWorkflowExecutionData({
|
||||
executionId,
|
||||
cy.intercept('GET', `/rest/executions/${executionId}`, {
|
||||
statusCode: 200,
|
||||
body: {
|
||||
data: createMockWorkflowExecutionData({
|
||||
lastNodeExecuted,
|
||||
runData: resolvedRunData,
|
||||
...workflowExecutionData,
|
||||
}),
|
||||
);
|
||||
},
|
||||
}).as('getExecution');
|
||||
|
||||
cy.push('executionFinished', { executionId });
|
||||
|
||||
cy.wait('@getExecution');
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import type { IRun, ITaskData, WorkflowExecuteMode } from 'n8n-workflow';
|
||||
import type { ITaskData, WorkflowExecuteMode } from 'n8n-workflow';
|
||||
|
||||
type ExecutionStarted = {
|
||||
type: 'executionStarted';
|
||||
|
@ -16,8 +16,6 @@ type ExecutionFinished = {
|
|||
type: 'executionFinished';
|
||||
data: {
|
||||
executionId: string;
|
||||
data: IRun;
|
||||
retryOf?: string;
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -122,7 +122,7 @@
|
|||
"express-rate-limit": "7.2.0",
|
||||
"fast-glob": "catalog:",
|
||||
"flat": "5.0.2",
|
||||
"flatted": "3.2.7",
|
||||
"flatted": "catalog:",
|
||||
"formidable": "3.5.1",
|
||||
"handlebars": "4.7.8",
|
||||
"helmet": "7.1.0",
|
||||
|
|
|
@ -309,54 +309,18 @@ function hookFunctionsPush(): IWorkflowExecuteHooks {
|
|||
},
|
||||
],
|
||||
workflowExecuteAfter: [
|
||||
async function (this: WorkflowHooks, fullRunData: IRun): Promise<void> {
|
||||
const { pushRef, executionId, retryOf } = this;
|
||||
async function (this: WorkflowHooks): Promise<void> {
|
||||
const { pushRef, executionId } = this;
|
||||
if (pushRef === undefined) return;
|
||||
|
||||
const { id: workflowId } = this.workflowData;
|
||||
logger.debug('Executing hook (hookFunctionsPush)', {
|
||||
executionId,
|
||||
pushRef,
|
||||
workflowId,
|
||||
});
|
||||
// Push data to session which started the workflow
|
||||
if (pushRef === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clone the object except the runData. That one is not supposed
|
||||
// to be send. Because that data got send piece by piece after
|
||||
// each node which finished executing
|
||||
// Edit: we now DO send the runData to the UI if mode=manual so that it shows the point of crashes
|
||||
let pushRunData;
|
||||
if (fullRunData.mode === 'manual') {
|
||||
pushRunData = fullRunData;
|
||||
} else {
|
||||
pushRunData = {
|
||||
...fullRunData,
|
||||
data: {
|
||||
...fullRunData.data,
|
||||
resultData: {
|
||||
...fullRunData.data.resultData,
|
||||
runData: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Push data to editor-ui once workflow finished
|
||||
logger.debug(`Save execution progress to database for execution ID ${executionId} `, {
|
||||
executionId,
|
||||
workflowId,
|
||||
});
|
||||
// TODO: Look at this again
|
||||
pushInstance.send(
|
||||
'executionFinished',
|
||||
{
|
||||
executionId,
|
||||
data: pushRunData,
|
||||
retryOf,
|
||||
},
|
||||
pushRef,
|
||||
);
|
||||
pushInstance.send('executionFinished', { executionId }, pushRef);
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
"esprima-next": "5.8.4",
|
||||
"fast-json-stable-stringify": "^2.1.0",
|
||||
"file-saver": "^2.0.2",
|
||||
"flatted": "^3.2.4",
|
||||
"flatted": "catalog:",
|
||||
"highlight.js": "catalog:frontend",
|
||||
"humanize-duration": "^3.27.2",
|
||||
"jsonpath": "^1.1.1",
|
||||
|
|
|
@ -392,15 +392,10 @@ export interface IExecutionsListResponse {
|
|||
|
||||
export interface IExecutionsCurrentSummaryExtended {
|
||||
id: string;
|
||||
finished?: boolean;
|
||||
status: ExecutionStatus;
|
||||
mode: WorkflowExecuteMode;
|
||||
retryOf?: string | null;
|
||||
retrySuccessId?: string | null;
|
||||
startedAt: Date;
|
||||
stoppedAt?: Date;
|
||||
workflowId: string;
|
||||
workflowName?: string;
|
||||
}
|
||||
|
||||
export interface IExecutionsStopData {
|
||||
|
@ -839,14 +834,12 @@ export interface IUsedCredential {
|
|||
}
|
||||
|
||||
export interface WorkflowsState {
|
||||
activeExecutions: IExecutionsCurrentSummaryExtended[];
|
||||
activeWorkflows: string[];
|
||||
activeWorkflowExecution: ExecutionSummary | null;
|
||||
currentWorkflowExecutions: ExecutionSummary[];
|
||||
activeExecutionId: string | null;
|
||||
executingNode: string[];
|
||||
executionWaitingForWebhook: boolean;
|
||||
finishedExecutionsCount: number;
|
||||
nodeMetadata: NodeMetadataMap;
|
||||
subWorkflowExecutionError: Error | null;
|
||||
usedCredentials: Record<string, IUsedCredential>;
|
||||
|
@ -1083,11 +1076,6 @@ export interface IVersionsState {
|
|||
currentVersion: IVersion | undefined;
|
||||
}
|
||||
|
||||
export interface IWorkflowsState {
|
||||
currentWorkflowExecutions: ExecutionSummary[];
|
||||
activeWorkflowExecution: ExecutionSummary | null;
|
||||
finishedExecutionsCount: number;
|
||||
}
|
||||
export interface IWorkflowsMap {
|
||||
[name: string]: IWorkflowDb;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import { stringify } from 'flatted';
|
||||
import { useRouter } from 'vue-router';
|
||||
import { createPinia, setActivePinia } from 'pinia';
|
||||
import type { PushMessage, PushPayload } from '@n8n/api-types';
|
||||
import { mock } from 'vitest-mock-extended';
|
||||
import type { WorkflowOperationError } from 'n8n-workflow';
|
||||
|
||||
import { usePushConnection } from '@/composables/usePushConnection';
|
||||
import { usePushConnectionStore } from '@/stores/pushConnection.store';
|
||||
|
@ -8,7 +11,7 @@ import { useOrchestrationStore } from '@/stores/orchestration.store';
|
|||
import { useUIStore } from '@/stores/ui.store';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import { useToast } from '@/composables/useToast';
|
||||
import type { WorkflowOperationError } from 'n8n-workflow';
|
||||
import type { IExecutionResponse } from '@/Interface';
|
||||
|
||||
vi.mock('vue-router', () => {
|
||||
return {
|
||||
|
@ -135,34 +138,40 @@ describe('usePushConnection()', () => {
|
|||
});
|
||||
|
||||
describe('executionFinished', () => {
|
||||
it('should handle executionFinished event correctly', async () => {
|
||||
const executionId = '1';
|
||||
const event: PushMessage = {
|
||||
type: 'executionFinished',
|
||||
data: {
|
||||
executionId: '1',
|
||||
data: {
|
||||
data: {
|
||||
data: { executionId: '1' },
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
workflowsStore.activeExecutionId = executionId;
|
||||
uiStore.isActionActive.workflowRunning = true;
|
||||
});
|
||||
|
||||
it('should handle executionFinished event correctly', async () => {
|
||||
const spy = vi.spyOn(workflowsStore, 'fetchExecutionDataById').mockResolvedValue(
|
||||
mock<IExecutionResponse>({
|
||||
id: executionId,
|
||||
data: stringify({
|
||||
resultData: {
|
||||
runData: {},
|
||||
},
|
||||
},
|
||||
}) as unknown as IExecutionResponse['data'],
|
||||
finished: true,
|
||||
mode: 'manual',
|
||||
startedAt: new Date(),
|
||||
stoppedAt: new Date(),
|
||||
status: 'success',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
workflowsStore.activeExecutionId = '1';
|
||||
uiStore.isActionActive.workflowRunning = true;
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await pushConnection.pushMessageReceived(event);
|
||||
|
||||
expect(result).toBeTruthy();
|
||||
expect(workflowsStore.workflowExecutionData).toBeDefined();
|
||||
expect(uiStore.isActionActive['workflowRunning']).toBeTruthy();
|
||||
expect(spy).toHaveBeenCalledWith(executionId);
|
||||
|
||||
expect(toast.showMessage).toHaveBeenCalledWith({
|
||||
title: 'Workflow executed successfully',
|
||||
|
@ -171,12 +180,10 @@ describe('usePushConnection()', () => {
|
|||
});
|
||||
|
||||
it('should handle isManualExecutionCancelled correctly', async () => {
|
||||
const event: PushMessage = {
|
||||
type: 'executionFinished',
|
||||
data: {
|
||||
executionId: '1',
|
||||
data: {
|
||||
data: {
|
||||
const spy = vi.spyOn(workflowsStore, 'fetchExecutionDataById').mockResolvedValue(
|
||||
mock<IExecutionResponse>({
|
||||
id: executionId,
|
||||
data: stringify({
|
||||
startData: {},
|
||||
resultData: {
|
||||
runData: {
|
||||
|
@ -190,16 +197,12 @@ describe('usePushConnection()', () => {
|
|||
node: 'Last Node',
|
||||
} as unknown as WorkflowOperationError,
|
||||
},
|
||||
},
|
||||
startedAt: new Date(),
|
||||
}) as unknown as IExecutionResponse['data'],
|
||||
mode: 'manual',
|
||||
startedAt: new Date(),
|
||||
status: 'running',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
workflowsStore.activeExecutionId = '1';
|
||||
uiStore.isActionActive['workflowRunning'] = true;
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await pushConnection.pushMessageReceived(event);
|
||||
|
||||
|
@ -215,6 +218,7 @@ describe('usePushConnection()', () => {
|
|||
expect(result).toBeTruthy();
|
||||
expect(workflowsStore.workflowExecutionData).toBeDefined();
|
||||
expect(uiStore.isActionActive.workflowRunning).toBeTruthy();
|
||||
expect(spy).toHaveBeenCalledWith(executionId);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,7 +6,6 @@ import type {
|
|||
ExpressionError,
|
||||
IDataObject,
|
||||
INodeTypeNameVersion,
|
||||
IRun,
|
||||
IRunExecutionData,
|
||||
IWorkflowBase,
|
||||
SubworkflowOperationError,
|
||||
|
@ -15,9 +14,8 @@ import type {
|
|||
INodeTypeDescription,
|
||||
NodeError,
|
||||
} from 'n8n-workflow';
|
||||
import type { PushMessage, PushPayload } from '@n8n/api-types';
|
||||
import type { PushMessage } from '@n8n/api-types';
|
||||
|
||||
import type { IExecutionResponse, IExecutionsCurrentSummaryExtended } from '@/Interface';
|
||||
import { useNodeHelpers } from '@/composables/useNodeHelpers';
|
||||
import { useToast } from '@/composables/useToast';
|
||||
import { WORKFLOW_SETTINGS_MODAL_KEY } from '@/constants';
|
||||
|
@ -38,8 +36,6 @@ import type { PushMessageQueueItem } from '@/types';
|
|||
import { useAssistantStore } from '@/stores/assistant.store';
|
||||
import NodeExecutionErrorMessage from '@/components/NodeExecutionErrorMessage.vue';
|
||||
|
||||
type IPushDataExecutionFinishedPayload = PushPayload<'executionFinished'>;
|
||||
|
||||
export function usePushConnection({ router }: { router: ReturnType<typeof useRouter> }) {
|
||||
const workflowHelpers = useWorkflowHelpers({ router });
|
||||
const nodeHelpers = useNodeHelpers();
|
||||
|
@ -165,52 +161,6 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
}
|
||||
}
|
||||
|
||||
// recovered execution data is handled like executionFinished data, however for security reasons
|
||||
// we need to fetch the data from the server again rather than push it to all clients
|
||||
let recoveredPushData: IPushDataExecutionFinishedPayload | undefined = undefined;
|
||||
if (receivedData.type === 'executionRecovered') {
|
||||
const recoveredExecutionId = receivedData.data?.executionId;
|
||||
const isWorkflowRunning = uiStore.isActionActive['workflowRunning'];
|
||||
if (isWorkflowRunning && workflowsStore.activeExecutionId === recoveredExecutionId) {
|
||||
// pull execution data for the recovered execution from the server
|
||||
const executionData = await workflowsStore.fetchExecutionDataById(
|
||||
workflowsStore.activeExecutionId,
|
||||
);
|
||||
if (executionData?.data) {
|
||||
// data comes in as 'flatten' object, so we need to parse it
|
||||
executionData.data = parse(executionData.data as unknown as string) as IRunExecutionData;
|
||||
const iRunExecutionData: IRunExecutionData = {
|
||||
startData: executionData.data?.startData,
|
||||
resultData: executionData.data?.resultData ?? { runData: {} },
|
||||
executionData: executionData.data?.executionData,
|
||||
};
|
||||
if (workflowsStore.workflowExecutionData?.workflowId === executionData.workflowId) {
|
||||
const activeRunData = workflowsStore.workflowExecutionData?.data?.resultData?.runData;
|
||||
if (activeRunData) {
|
||||
for (const key of Object.keys(activeRunData)) {
|
||||
iRunExecutionData.resultData.runData[key] = activeRunData[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
const iRun: IRun = {
|
||||
data: iRunExecutionData,
|
||||
finished: executionData.finished,
|
||||
mode: executionData.mode,
|
||||
waitTill: executionData.data?.waitTill,
|
||||
startedAt: executionData.startedAt,
|
||||
stoppedAt: executionData.stoppedAt,
|
||||
status: 'crashed',
|
||||
};
|
||||
if (executionData.data) {
|
||||
recoveredPushData = {
|
||||
executionId: executionData.id,
|
||||
data: iRun,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
receivedData.type === 'workflowFailedToActivate' &&
|
||||
workflowsStore.workflowId === receivedData.data.workflowId
|
||||
|
@ -239,36 +189,14 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
}
|
||||
|
||||
if (receivedData.type === 'executionFinished' || receivedData.type === 'executionRecovered') {
|
||||
// The workflow finished executing
|
||||
let pushData: IPushDataExecutionFinishedPayload;
|
||||
if (receivedData.type === 'executionRecovered' && recoveredPushData !== undefined) {
|
||||
pushData = recoveredPushData;
|
||||
} else {
|
||||
pushData = receivedData.data as IPushDataExecutionFinishedPayload;
|
||||
}
|
||||
|
||||
const { activeExecutionId } = workflowsStore;
|
||||
if (activeExecutionId === pushData.executionId) {
|
||||
const activeRunData = workflowsStore.workflowExecutionData?.data?.resultData?.runData;
|
||||
if (activeRunData) {
|
||||
for (const key of Object.keys(activeRunData)) {
|
||||
if (
|
||||
pushData.data?.data?.resultData?.runData?.[key]?.[0]?.data?.main?.[0]?.[0]?.json
|
||||
?.isArtificialRecoveredEventItem === true &&
|
||||
activeRunData[key].length > 0
|
||||
)
|
||||
pushData.data.data.resultData.runData[key] = activeRunData[key];
|
||||
}
|
||||
}
|
||||
workflowsStore.finishActiveExecution(pushData);
|
||||
}
|
||||
|
||||
if (!uiStore.isActionActive['workflowRunning']) {
|
||||
// No workflow is running so ignore the messages
|
||||
return false;
|
||||
}
|
||||
|
||||
if (activeExecutionId !== pushData.executionId) {
|
||||
const { executionId } = receivedData.data;
|
||||
const { activeExecutionId } = workflowsStore;
|
||||
if (executionId !== activeExecutionId) {
|
||||
// The workflow which did finish execution did either not get started
|
||||
// by this session or we do not have the execution id yet.
|
||||
if (isRetry !== true) {
|
||||
|
@ -277,13 +205,32 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
return false;
|
||||
}
|
||||
|
||||
const runDataExecuted = pushData.data;
|
||||
// pull execution data for the execution from the server
|
||||
const executionData = await workflowsStore.fetchExecutionDataById(executionId);
|
||||
if (!executionData?.data) return false;
|
||||
// data comes in as 'flatten' object, so we need to parse it
|
||||
executionData.data = parse(executionData.data as unknown as string) as IRunExecutionData;
|
||||
|
||||
let runDataExecutedErrorMessage = getExecutionError(runDataExecuted.data);
|
||||
const iRunExecutionData: IRunExecutionData = {
|
||||
startData: executionData.data?.startData,
|
||||
resultData: executionData.data?.resultData ?? { runData: {} },
|
||||
executionData: executionData.data?.executionData,
|
||||
};
|
||||
|
||||
if (runDataExecuted.status === 'crashed') {
|
||||
if (workflowsStore.workflowExecutionData?.workflowId === executionData.workflowId) {
|
||||
const activeRunData = workflowsStore.workflowExecutionData?.data?.resultData?.runData;
|
||||
if (activeRunData) {
|
||||
for (const key of Object.keys(activeRunData)) {
|
||||
iRunExecutionData.resultData.runData[key] = activeRunData[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let runDataExecutedErrorMessage = getExecutionError(iRunExecutionData);
|
||||
|
||||
if (executionData.status === 'crashed') {
|
||||
runDataExecutedErrorMessage = i18n.baseText('pushConnection.executionFailed.message');
|
||||
} else if (runDataExecuted.status === 'canceled') {
|
||||
} else if (executionData.status === 'canceled') {
|
||||
runDataExecutedErrorMessage = i18n.baseText(
|
||||
'executionsList.showMessage.stopExecution.message',
|
||||
{
|
||||
|
@ -292,12 +239,12 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
);
|
||||
}
|
||||
|
||||
const lineNumber = runDataExecuted?.data?.resultData?.error?.lineNumber;
|
||||
const lineNumber = iRunExecutionData.resultData?.error?.lineNumber;
|
||||
|
||||
codeNodeEditorEventBus.emit('highlightLine', lineNumber ?? 'final');
|
||||
|
||||
const workflow = workflowHelpers.getCurrentWorkflow();
|
||||
if (runDataExecuted.waitTill !== undefined) {
|
||||
if (executionData.data?.waitTill !== undefined) {
|
||||
const workflowSettings = workflowsStore.workflowSettings;
|
||||
const saveManualExecutions = settingsStore.saveManualExecutions;
|
||||
|
||||
|
@ -318,14 +265,14 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
|
||||
// Workflow did start but had been put to wait
|
||||
workflowHelpers.setDocumentTitle(workflow.name as string, 'IDLE');
|
||||
} else if (runDataExecuted.finished !== true) {
|
||||
} else if (executionData.finished !== true) {
|
||||
workflowHelpers.setDocumentTitle(workflow.name as string, 'ERROR');
|
||||
|
||||
if (
|
||||
runDataExecuted.data.resultData.error?.name === 'ExpressionError' &&
|
||||
(runDataExecuted.data.resultData.error as ExpressionError).functionality === 'pairedItem'
|
||||
iRunExecutionData.resultData.error?.name === 'ExpressionError' &&
|
||||
(iRunExecutionData.resultData.error as ExpressionError).functionality === 'pairedItem'
|
||||
) {
|
||||
const error = runDataExecuted.data.resultData.error as ExpressionError;
|
||||
const error = iRunExecutionData.resultData.error as ExpressionError;
|
||||
|
||||
void workflowHelpers.getWorkflowDataToSave().then((workflowData) => {
|
||||
const eventData: IDataObject = {
|
||||
|
@ -365,8 +312,8 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
});
|
||||
}
|
||||
|
||||
if (runDataExecuted.data.resultData.error?.name === 'SubworkflowOperationError') {
|
||||
const error = runDataExecuted.data.resultData.error as SubworkflowOperationError;
|
||||
if (iRunExecutionData.resultData.error?.name === 'SubworkflowOperationError') {
|
||||
const error = iRunExecutionData.resultData.error as SubworkflowOperationError;
|
||||
|
||||
workflowsStore.subWorkflowExecutionError = error;
|
||||
|
||||
|
@ -377,14 +324,13 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
duration: 0,
|
||||
});
|
||||
} else if (
|
||||
(runDataExecuted.data.resultData.error?.name === 'NodeOperationError' ||
|
||||
runDataExecuted.data.resultData.error?.name === 'NodeApiError') &&
|
||||
(runDataExecuted.data.resultData.error as NodeError).functionality ===
|
||||
'configuration-node'
|
||||
(iRunExecutionData.resultData.error?.name === 'NodeOperationError' ||
|
||||
iRunExecutionData.resultData.error?.name === 'NodeApiError') &&
|
||||
(iRunExecutionData.resultData.error as NodeError).functionality === 'configuration-node'
|
||||
) {
|
||||
// If the error is a configuration error of the node itself doesn't get executed so we can't use lastNodeExecuted for the title
|
||||
let title: string;
|
||||
const nodeError = runDataExecuted.data.resultData.error as NodeOperationError;
|
||||
const nodeError = iRunExecutionData.resultData.error as NodeOperationError;
|
||||
if (nodeError.node.name) {
|
||||
title = `Error in sub-node ‘${nodeError.node.name}‘`;
|
||||
} else {
|
||||
|
@ -403,7 +349,7 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
} else {
|
||||
let title: string;
|
||||
const isManualExecutionCancelled =
|
||||
runDataExecuted.mode === 'manual' && runDataExecuted.status === 'canceled';
|
||||
executionData.mode === 'manual' && executionData.status === 'canceled';
|
||||
|
||||
// Do not show the error message if the workflow got canceled manually
|
||||
if (isManualExecutionCancelled) {
|
||||
|
@ -412,8 +358,8 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
type: 'success',
|
||||
});
|
||||
} else {
|
||||
if (runDataExecuted.data.resultData.lastNodeExecuted) {
|
||||
title = `Problem in node ‘${runDataExecuted.data.resultData.lastNodeExecuted}‘`;
|
||||
if (iRunExecutionData.resultData.lastNodeExecuted) {
|
||||
title = `Problem in node ‘${iRunExecutionData.resultData.lastNodeExecuted}‘`;
|
||||
} else {
|
||||
title = 'Problem executing workflow';
|
||||
}
|
||||
|
@ -471,50 +417,37 @@ export function usePushConnection({ router }: { router: ReturnType<typeof useRou
|
|||
// node that did finish. For that reason copy in here the data
|
||||
// which we already have.
|
||||
if (workflowsStore.getWorkflowRunData) {
|
||||
runDataExecuted.data.resultData.runData = workflowsStore.getWorkflowRunData;
|
||||
iRunExecutionData.resultData.runData = workflowsStore.getWorkflowRunData;
|
||||
}
|
||||
|
||||
workflowsStore.executingNode.length = 0;
|
||||
workflowsStore.setWorkflowExecutionData(runDataExecuted as IExecutionResponse);
|
||||
workflowsStore.setWorkflowExecutionData(executionData);
|
||||
uiStore.removeActiveAction('workflowRunning');
|
||||
|
||||
// Set the node execution issues on all the nodes which produced an error so that
|
||||
// it can be displayed in the node-view
|
||||
nodeHelpers.updateNodesExecutionIssues();
|
||||
|
||||
const lastNodeExecuted: string | undefined = runDataExecuted.data.resultData.lastNodeExecuted;
|
||||
const lastNodeExecuted: string | undefined = iRunExecutionData.resultData.lastNodeExecuted;
|
||||
let itemsCount = 0;
|
||||
if (
|
||||
lastNodeExecuted &&
|
||||
runDataExecuted.data.resultData.runData[lastNodeExecuted] &&
|
||||
iRunExecutionData.resultData.runData[lastNodeExecuted] &&
|
||||
!runDataExecutedErrorMessage
|
||||
) {
|
||||
itemsCount =
|
||||
runDataExecuted.data.resultData.runData[lastNodeExecuted][0].data!.main[0]!.length;
|
||||
iRunExecutionData.resultData.runData[lastNodeExecuted][0].data!.main[0]!.length;
|
||||
}
|
||||
|
||||
void useExternalHooks().run('pushConnection.executionFinished', {
|
||||
itemsCount,
|
||||
nodeName: runDataExecuted.data.resultData.lastNodeExecuted,
|
||||
nodeName: iRunExecutionData.resultData.lastNodeExecuted,
|
||||
errorMessage: runDataExecutedErrorMessage,
|
||||
runDataExecutedStartData: runDataExecuted.data.startData,
|
||||
resultDataError: runDataExecuted.data.resultData.error,
|
||||
runDataExecutedStartData: iRunExecutionData.startData,
|
||||
resultDataError: iRunExecutionData.resultData.error,
|
||||
});
|
||||
} else if (receivedData.type === 'executionStarted') {
|
||||
const pushData = receivedData.data;
|
||||
|
||||
const executionData: IExecutionsCurrentSummaryExtended = {
|
||||
id: pushData.executionId,
|
||||
finished: false,
|
||||
status: 'running',
|
||||
mode: pushData.mode,
|
||||
startedAt: pushData.startedAt,
|
||||
retryOf: pushData.retryOf,
|
||||
workflowId: pushData.workflowId,
|
||||
workflowName: pushData.workflowName,
|
||||
};
|
||||
|
||||
workflowsStore.addActiveExecution(executionData);
|
||||
// Nothing to do
|
||||
} else if (receivedData.type === 'nodeExecuteAfter') {
|
||||
// A node finished to execute. Add its data
|
||||
const pushData = receivedData.data;
|
||||
|
|
|
@ -35,7 +35,6 @@ import { isEmpty } from '@/utils/typesUtils';
|
|||
import { useI18n } from '@/composables/useI18n';
|
||||
import { get } from 'lodash-es';
|
||||
import { useExecutionsStore } from '@/stores/executions.store';
|
||||
import type { PushPayload } from '@n8n/api-types';
|
||||
import { useLocalStorage } from '@vueuse/core';
|
||||
|
||||
const FORM_RELOAD = 'n8n_redirect_to_next_form_test_page';
|
||||
|
@ -515,10 +514,6 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
|
||||
if (execution === undefined) {
|
||||
// execution finished but was not saved (e.g. due to low connectivity)
|
||||
workflowsStore.finishActiveExecution({
|
||||
executionId,
|
||||
data: { finished: true, stoppedAt: new Date() } as IRun,
|
||||
});
|
||||
workflowsStore.executingNode.length = 0;
|
||||
uiStore.removeActiveAction('workflowRunning');
|
||||
|
||||
|
@ -537,12 +532,6 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
startedAt: execution.startedAt,
|
||||
stoppedAt: execution.stoppedAt,
|
||||
} as IRun;
|
||||
const pushData: PushPayload<'executionFinished'> = {
|
||||
data: executedData,
|
||||
executionId,
|
||||
retryOf: execution.retryOf,
|
||||
};
|
||||
workflowsStore.finishActiveExecution(pushData);
|
||||
workflowHelpers.setDocumentTitle(execution.workflowData.name, 'IDLE');
|
||||
workflowsStore.executingNode.length = 0;
|
||||
workflowsStore.setWorkflowExecutionData(executedData as IExecutionResponse);
|
||||
|
|
|
@ -8,13 +8,7 @@ import {
|
|||
WAIT_NODE_TYPE,
|
||||
} from '@/constants';
|
||||
import { useWorkflowsStore } from '@/stores/workflows.store';
|
||||
import type {
|
||||
IExecutionResponse,
|
||||
IExecutionsCurrentSummaryExtended,
|
||||
INodeUi,
|
||||
IWorkflowDb,
|
||||
IWorkflowSettings,
|
||||
} from '@/Interface';
|
||||
import type { IExecutionResponse, INodeUi, IWorkflowDb, IWorkflowSettings } from '@/Interface';
|
||||
import { useNodeTypesStore } from '@/stores/nodeTypes.store';
|
||||
|
||||
import { SEND_AND_WAIT_OPERATION } from 'n8n-workflow';
|
||||
|
@ -619,50 +613,6 @@ describe('useWorkflowsStore', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('finishActiveExecution', () => {
|
||||
it('should update execution', async () => {
|
||||
const cursor = 1;
|
||||
const ids = ['0', '1', '2'];
|
||||
workflowsStore.setActiveExecutions(
|
||||
ids.map((id) => ({ id })) as IExecutionsCurrentSummaryExtended[],
|
||||
);
|
||||
|
||||
const stoppedAt = new Date();
|
||||
|
||||
workflowsStore.finishActiveExecution({
|
||||
executionId: ids[cursor],
|
||||
data: {
|
||||
finished: true,
|
||||
stoppedAt,
|
||||
},
|
||||
} as PushPayload<'executionFinished'>);
|
||||
|
||||
expect(workflowsStore.activeExecutions[cursor]).toStrictEqual({
|
||||
id: ids[cursor],
|
||||
finished: true,
|
||||
stoppedAt,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle parameter casting', async () => {
|
||||
const cursor = 1;
|
||||
const ids = ['0', '1', '2'];
|
||||
workflowsStore.setActiveExecutions(
|
||||
ids.map((id) => ({ id })) as IExecutionsCurrentSummaryExtended[],
|
||||
);
|
||||
|
||||
workflowsStore.finishActiveExecution({
|
||||
executionId: ids[cursor],
|
||||
} as PushPayload<'executionFinished'>);
|
||||
|
||||
expect(workflowsStore.activeExecutions[cursor]).toStrictEqual({
|
||||
id: ids[cursor],
|
||||
finished: undefined,
|
||||
stoppedAt: undefined,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function getMockEditFieldsNode() {
|
||||
|
|
|
@ -11,10 +11,8 @@ import {
|
|||
WAIT_NODE_TYPE,
|
||||
} from '@/constants';
|
||||
import type {
|
||||
ExecutionsQueryFilter,
|
||||
IExecutionPushResponse,
|
||||
IExecutionResponse,
|
||||
IExecutionsCurrentSummaryExtended,
|
||||
IExecutionsListResponse,
|
||||
INewWorkflowData,
|
||||
INodeMetadata,
|
||||
|
@ -125,10 +123,8 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
const usedCredentials = ref<Record<string, IUsedCredential>>({});
|
||||
|
||||
const activeWorkflows = ref<string[]>([]);
|
||||
const activeExecutions = ref<IExecutionsCurrentSummaryExtended[]>([]);
|
||||
const activeWorkflowExecution = ref<ExecutionSummary | null>(null);
|
||||
const currentWorkflowExecutions = ref<ExecutionSummary[]>([]);
|
||||
const finishedExecutionsCount = ref(0);
|
||||
const workflowExecutionData = ref<IExecutionResponse | null>(null);
|
||||
const workflowExecutionPairedItemMappings = ref<Record<string, Set<string>>>({});
|
||||
const activeExecutionId = ref<string | null>(null);
|
||||
|
@ -266,8 +262,6 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
|
||||
const getWorkflowExecution = computed(() => workflowExecutionData.value);
|
||||
|
||||
const getTotalFinishedExecutionsCount = computed(() => finishedExecutionsCount.value);
|
||||
|
||||
const getPastChatMessages = computed(() => Array.from(new Set(chatMessages.value)));
|
||||
|
||||
function getWorkflowResultDataByNodeName(nodeName: string): ITaskData[] | null {
|
||||
|
@ -1338,52 +1332,6 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
return ndvStore.activeNode;
|
||||
}
|
||||
|
||||
function addActiveExecution(newActiveExecution: IExecutionsCurrentSummaryExtended): void {
|
||||
// Check if the execution exists already
|
||||
const activeExecution = activeExecutions.value.find((execution) => {
|
||||
return execution.id === newActiveExecution.id;
|
||||
});
|
||||
|
||||
if (activeExecution !== undefined) {
|
||||
// Exists already so no need to add it again
|
||||
if (activeExecution.workflowName === undefined) {
|
||||
activeExecution.workflowName = newActiveExecution.workflowName;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
activeExecutions.value.unshift(newActiveExecution);
|
||||
activeExecutionId.value = newActiveExecution.id;
|
||||
}
|
||||
|
||||
function finishActiveExecution(finishedActiveExecution: PushPayload<'executionFinished'>): void {
|
||||
// Find the execution to set to finished
|
||||
const activeExecutionIndex = activeExecutions.value.findIndex((execution) => {
|
||||
return execution.id === finishedActiveExecution.executionId;
|
||||
});
|
||||
|
||||
if (activeExecutionIndex === -1) {
|
||||
// The execution could not be found
|
||||
return;
|
||||
}
|
||||
|
||||
Object.assign(activeExecutions.value[activeExecutionIndex], {
|
||||
...(finishedActiveExecution.executionId !== undefined
|
||||
? { id: finishedActiveExecution.executionId }
|
||||
: {}),
|
||||
finished: finishedActiveExecution.data?.finished,
|
||||
stoppedAt: finishedActiveExecution.data?.stoppedAt,
|
||||
});
|
||||
|
||||
if (finishedActiveExecution.data?.data) {
|
||||
setWorkflowExecutionRunData(finishedActiveExecution.data.data);
|
||||
}
|
||||
}
|
||||
|
||||
function setActiveExecutions(newActiveExecutions: IExecutionsCurrentSummaryExtended[]): void {
|
||||
activeExecutions.value = newActiveExecutions;
|
||||
}
|
||||
|
||||
// TODO: For sure needs some kind of default filter like last day, with max 10 results, ...
|
||||
async function getPastExecutions(
|
||||
filter: IDataObject,
|
||||
|
@ -1404,26 +1352,6 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
return await makeRestApiRequest(rootStore.restApiContext, 'GET', '/executions', sendData);
|
||||
}
|
||||
|
||||
async function getActiveExecutions(
|
||||
filter: IDataObject,
|
||||
): Promise<IExecutionsCurrentSummaryExtended[]> {
|
||||
let sendData = {};
|
||||
if (filter) {
|
||||
sendData = {
|
||||
filter,
|
||||
};
|
||||
}
|
||||
const rootStore = useRootStore();
|
||||
const output = await makeRestApiRequest<{ results: IExecutionsCurrentSummaryExtended[] }>(
|
||||
rootStore.restApiContext,
|
||||
'GET',
|
||||
'/executions',
|
||||
sendData,
|
||||
);
|
||||
|
||||
return output.results;
|
||||
}
|
||||
|
||||
async function getExecution(id: string): Promise<IExecutionResponse | undefined> {
|
||||
const rootStore = useRootStore();
|
||||
const response = await makeRestApiRequest<IExecutionFlattedResponse | undefined>(
|
||||
|
@ -1507,36 +1435,6 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
);
|
||||
}
|
||||
|
||||
async function loadCurrentWorkflowExecutions(
|
||||
requestFilter: ExecutionsQueryFilter,
|
||||
): Promise<ExecutionSummary[]> {
|
||||
let retrievedActiveExecutions: IExecutionsCurrentSummaryExtended[] = [];
|
||||
|
||||
if (!requestFilter.workflowId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const rootStore = useRootStore();
|
||||
if ((!requestFilter.status || !requestFilter.finished) && isEmpty(requestFilter.metadata)) {
|
||||
retrievedActiveExecutions = await workflowsApi.getActiveExecutions(
|
||||
rootStore.restApiContext,
|
||||
{
|
||||
workflowId: requestFilter.workflowId,
|
||||
},
|
||||
);
|
||||
}
|
||||
const retrievedFinishedExecutions = await workflowsApi.getExecutions(
|
||||
rootStore.restApiContext,
|
||||
requestFilter,
|
||||
);
|
||||
finishedExecutionsCount.value = retrievedFinishedExecutions.count;
|
||||
return [...retrievedActiveExecutions, ...(retrievedFinishedExecutions.results || [])];
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchExecutionDataById(executionId: string): Promise<IExecutionResponse | null> {
|
||||
const rootStore = useRootStore();
|
||||
return await workflowsApi.getExecutionData(rootStore.restApiContext, executionId);
|
||||
|
@ -1644,10 +1542,8 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
workflow,
|
||||
usedCredentials,
|
||||
activeWorkflows,
|
||||
activeExecutions,
|
||||
activeWorkflowExecution,
|
||||
currentWorkflowExecutions,
|
||||
finishedExecutionsCount,
|
||||
workflowExecutionData,
|
||||
workflowExecutionPairedItemMappings,
|
||||
activeExecutionId,
|
||||
|
@ -1682,7 +1578,6 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
executedNode,
|
||||
getAllLoadedFinishedExecutions,
|
||||
getWorkflowExecution,
|
||||
getTotalFinishedExecutionsCount,
|
||||
getPastChatMessages,
|
||||
isChatPanelOpen: computed(() => isChatPanelOpen.value),
|
||||
isLogsPanelOpen: computed(() => isLogsPanelOpen.value),
|
||||
|
@ -1760,17 +1655,12 @@ export const useWorkflowsStore = defineStore(STORES.WORKFLOWS, () => {
|
|||
clearNodeExecutionData,
|
||||
pinDataByNodeName,
|
||||
activeNode,
|
||||
addActiveExecution,
|
||||
finishActiveExecution,
|
||||
setActiveExecutions,
|
||||
getPastExecutions,
|
||||
getActiveExecutions,
|
||||
getExecution,
|
||||
createNewWorkflow,
|
||||
updateWorkflow,
|
||||
runWorkflow,
|
||||
removeTestWebhook,
|
||||
loadCurrentWorkflowExecutions,
|
||||
fetchExecutionDataById,
|
||||
deleteExecution,
|
||||
addToCurrentExecutions,
|
||||
|
|
|
@ -181,7 +181,6 @@ import { useNpsSurveyStore } from '@/stores/npsSurvey.store';
|
|||
import { getResourcePermissions } from '@/permissions';
|
||||
import { useBeforeUnload } from '@/composables/useBeforeUnload';
|
||||
import NodeViewUnfinishedWorkflowMessage from '@/components/NodeViewUnfinishedWorkflowMessage.vue';
|
||||
import type { PushPayload } from '@n8n/api-types';
|
||||
|
||||
interface AddNodeOptions {
|
||||
position?: XYPosition;
|
||||
|
@ -1728,10 +1727,6 @@ export default defineComponent({
|
|||
if (execution === undefined) {
|
||||
// execution finished but was not saved (e.g. due to low connectivity)
|
||||
|
||||
this.workflowsStore.finishActiveExecution({
|
||||
executionId,
|
||||
data: { finished: true, stoppedAt: new Date() } as IRun,
|
||||
});
|
||||
this.workflowsStore.executingNode.length = 0;
|
||||
this.uiStore.removeActiveAction('workflowRunning');
|
||||
|
||||
|
@ -1753,12 +1748,6 @@ export default defineComponent({
|
|||
startedAt: execution.startedAt,
|
||||
stoppedAt: execution.stoppedAt,
|
||||
} as IRun;
|
||||
const pushData: PushPayload<'executionFinished'> = {
|
||||
data: executedData,
|
||||
executionId,
|
||||
retryOf: execution.retryOf,
|
||||
};
|
||||
this.workflowsStore.finishActiveExecution(pushData);
|
||||
this.workflowHelpers.setDocumentTitle(execution.workflowData.name, 'IDLE');
|
||||
this.workflowsStore.executingNode.length = 0;
|
||||
this.workflowsStore.setWorkflowExecutionData(executedData as IExecutionResponse);
|
||||
|
|
152
pnpm-lock.yaml
152
pnpm-lock.yaml
|
@ -33,6 +33,9 @@ catalogs:
|
|||
fast-glob:
|
||||
specifier: 3.2.12
|
||||
version: 3.2.12
|
||||
flatted:
|
||||
specifier: 3.2.7
|
||||
version: 3.2.7
|
||||
form-data:
|
||||
specifier: 4.0.0
|
||||
version: 4.0.0
|
||||
|
@ -216,6 +219,9 @@ importers:
|
|||
cypress-real-events:
|
||||
specifier: ^1.13.0
|
||||
version: 1.13.0(cypress@13.14.2)
|
||||
flatted:
|
||||
specifier: 'catalog:'
|
||||
version: 3.2.7
|
||||
lodash:
|
||||
specifier: 'catalog:'
|
||||
version: 4.17.21
|
||||
|
@ -265,7 +271,7 @@ importers:
|
|||
version: 4.0.7
|
||||
axios:
|
||||
specifier: 'catalog:'
|
||||
version: 1.7.4(debug@4.3.7)
|
||||
version: 1.7.4
|
||||
dotenv:
|
||||
specifier: 8.6.0
|
||||
version: 8.6.0
|
||||
|
@ -333,7 +339,7 @@ importers:
|
|||
dependencies:
|
||||
axios:
|
||||
specifier: 'catalog:'
|
||||
version: 1.7.4(debug@4.3.7)
|
||||
version: 1.7.4
|
||||
|
||||
packages/@n8n/codemirror-lang:
|
||||
dependencies:
|
||||
|
@ -407,7 +413,7 @@ importers:
|
|||
version: 3.666.0(@aws-sdk/client-sts@3.666.0)
|
||||
'@getzep/zep-cloud':
|
||||
specifier: 1.0.12
|
||||
version: 1.0.12(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(langchain@0.3.5(4ubssgvn2k3t3hxnzmxuoc2aja))
|
||||
version: 1.0.12(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(langchain@0.3.5(7umjwzmwnymi4lyinuvazmp6ki))
|
||||
'@getzep/zep-js':
|
||||
specifier: 0.9.0
|
||||
version: 0.9.0
|
||||
|
@ -434,7 +440,7 @@ importers:
|
|||
version: 0.3.1(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)
|
||||
'@langchain/community':
|
||||
specifier: 0.3.11
|
||||
version: 0.3.11(tzffvezibmkr4px5bpuitcp7xu)
|
||||
version: 0.3.11(simkpjwqw7qnwbripe37u5qu7a)
|
||||
'@langchain/core':
|
||||
specifier: 'catalog:'
|
||||
version: 0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
|
@ -521,7 +527,7 @@ importers:
|
|||
version: 23.0.1
|
||||
langchain:
|
||||
specifier: 0.3.5
|
||||
version: 0.3.5(4ubssgvn2k3t3hxnzmxuoc2aja)
|
||||
version: 0.3.5(7umjwzmwnymi4lyinuvazmp6ki)
|
||||
lodash:
|
||||
specifier: 'catalog:'
|
||||
version: 4.17.21
|
||||
|
@ -774,7 +780,7 @@ importers:
|
|||
version: 1.11.0
|
||||
axios:
|
||||
specifier: 'catalog:'
|
||||
version: 1.7.4(debug@4.3.7)
|
||||
version: 1.7.4
|
||||
bcryptjs:
|
||||
specifier: 2.4.3
|
||||
version: 2.4.3
|
||||
|
@ -839,7 +845,7 @@ importers:
|
|||
specifier: 5.0.2
|
||||
version: 5.0.2
|
||||
flatted:
|
||||
specifier: 3.2.7
|
||||
specifier: 'catalog:'
|
||||
version: 3.2.7
|
||||
formidable:
|
||||
specifier: 3.5.1
|
||||
|
@ -1093,7 +1099,7 @@ importers:
|
|||
dependencies:
|
||||
'@langchain/core':
|
||||
specifier: 'catalog:'
|
||||
version: 0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
version: 0.3.15(openai@4.69.0(zod@3.23.8))
|
||||
'@n8n/client-oauth2':
|
||||
specifier: workspace:*
|
||||
version: link:../@n8n/client-oauth2
|
||||
|
@ -1105,7 +1111,7 @@ importers:
|
|||
version: 1.11.0
|
||||
axios:
|
||||
specifier: 'catalog:'
|
||||
version: 1.7.4(debug@4.3.7)
|
||||
version: 1.7.4
|
||||
concat-stream:
|
||||
specifier: 2.0.0
|
||||
version: 2.0.0
|
||||
|
@ -1395,7 +1401,7 @@ importers:
|
|||
version: 10.11.0(vue@3.5.11(typescript@5.6.2))
|
||||
axios:
|
||||
specifier: 'catalog:'
|
||||
version: 1.7.4(debug@4.3.7)
|
||||
version: 1.7.4
|
||||
bowser:
|
||||
specifier: 2.11.0
|
||||
version: 2.11.0
|
||||
|
@ -1424,7 +1430,7 @@ importers:
|
|||
specifier: ^2.0.2
|
||||
version: 2.0.5
|
||||
flatted:
|
||||
specifier: ^3.2.4
|
||||
specifier: 'catalog:'
|
||||
version: 3.2.7
|
||||
highlight.js:
|
||||
specifier: catalog:frontend
|
||||
|
@ -1875,7 +1881,7 @@ importers:
|
|||
version: 0.15.2
|
||||
axios:
|
||||
specifier: 'catalog:'
|
||||
version: 1.7.4(debug@4.3.7)
|
||||
version: 1.7.4
|
||||
callsites:
|
||||
specifier: 3.1.0
|
||||
version: 3.1.0
|
||||
|
@ -1921,7 +1927,7 @@ importers:
|
|||
devDependencies:
|
||||
'@langchain/core':
|
||||
specifier: 'catalog:'
|
||||
version: 0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
version: 0.3.15(openai@4.69.0)
|
||||
'@types/deep-equal':
|
||||
specifier: ^1.0.1
|
||||
version: 1.0.1
|
||||
|
@ -14076,7 +14082,7 @@ snapshots:
|
|||
'@gar/promisify@1.1.3':
|
||||
optional: true
|
||||
|
||||
'@getzep/zep-cloud@1.0.12(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(langchain@0.3.5(4ubssgvn2k3t3hxnzmxuoc2aja))':
|
||||
'@getzep/zep-cloud@1.0.12(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(langchain@0.3.5(7umjwzmwnymi4lyinuvazmp6ki))':
|
||||
dependencies:
|
||||
form-data: 4.0.0
|
||||
node-fetch: 2.7.0(encoding@0.1.13)
|
||||
|
@ -14085,7 +14091,7 @@ snapshots:
|
|||
zod: 3.23.8
|
||||
optionalDependencies:
|
||||
'@langchain/core': 0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
langchain: 0.3.5(4ubssgvn2k3t3hxnzmxuoc2aja)
|
||||
langchain: 0.3.5(7umjwzmwnymi4lyinuvazmp6ki)
|
||||
transitivePeerDependencies:
|
||||
- encoding
|
||||
|
||||
|
@ -14552,7 +14558,7 @@ snapshots:
|
|||
- aws-crt
|
||||
- encoding
|
||||
|
||||
'@langchain/community@0.3.11(tzffvezibmkr4px5bpuitcp7xu)':
|
||||
'@langchain/community@0.3.11(simkpjwqw7qnwbripe37u5qu7a)':
|
||||
dependencies:
|
||||
'@ibm-cloud/watsonx-ai': 1.1.2
|
||||
'@langchain/core': 0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
|
@ -14562,7 +14568,7 @@ snapshots:
|
|||
flat: 5.0.2
|
||||
ibm-cloud-sdk-core: 5.1.0
|
||||
js-yaml: 4.1.0
|
||||
langchain: 0.3.5(4ubssgvn2k3t3hxnzmxuoc2aja)
|
||||
langchain: 0.3.5(7umjwzmwnymi4lyinuvazmp6ki)
|
||||
langsmith: 0.2.3(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
uuid: 10.0.0
|
||||
zod: 3.23.8
|
||||
|
@ -14575,7 +14581,7 @@ snapshots:
|
|||
'@aws-sdk/client-s3': 3.666.0
|
||||
'@aws-sdk/credential-provider-node': 3.666.0(@aws-sdk/client-sso-oidc@3.666.0(@aws-sdk/client-sts@3.666.0))(@aws-sdk/client-sts@3.666.0)
|
||||
'@azure/storage-blob': 12.18.0(encoding@0.1.13)
|
||||
'@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(langchain@0.3.5(4ubssgvn2k3t3hxnzmxuoc2aja))
|
||||
'@getzep/zep-cloud': 1.0.12(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)(langchain@0.3.5(7umjwzmwnymi4lyinuvazmp6ki))
|
||||
'@getzep/zep-js': 0.9.0
|
||||
'@google-ai/generativelanguage': 2.6.0(encoding@0.1.13)
|
||||
'@google-cloud/storage': 7.12.1(encoding@0.1.13)
|
||||
|
@ -14639,6 +14645,38 @@ snapshots:
|
|||
transitivePeerDependencies:
|
||||
- openai
|
||||
|
||||
'@langchain/core@0.3.15(openai@4.69.0(zod@3.23.8))':
|
||||
dependencies:
|
||||
ansi-styles: 5.2.0
|
||||
camelcase: 6.3.0
|
||||
decamelize: 1.2.0
|
||||
js-tiktoken: 1.0.12
|
||||
langsmith: 0.2.3(openai@4.69.0(zod@3.23.8))
|
||||
mustache: 4.2.0
|
||||
p-queue: 6.6.2
|
||||
p-retry: 4.6.2
|
||||
uuid: 10.0.0
|
||||
zod: 3.23.8
|
||||
zod-to-json-schema: 3.23.3(zod@3.23.8)
|
||||
transitivePeerDependencies:
|
||||
- openai
|
||||
|
||||
'@langchain/core@0.3.15(openai@4.69.0)':
|
||||
dependencies:
|
||||
ansi-styles: 5.2.0
|
||||
camelcase: 6.3.0
|
||||
decamelize: 1.2.0
|
||||
js-tiktoken: 1.0.12
|
||||
langsmith: 0.2.3(openai@4.69.0)
|
||||
mustache: 4.2.0
|
||||
p-queue: 6.6.2
|
||||
p-retry: 4.6.2
|
||||
uuid: 10.0.0
|
||||
zod: 3.23.8
|
||||
zod-to-json-schema: 3.23.3(zod@3.23.8)
|
||||
transitivePeerDependencies:
|
||||
- openai
|
||||
|
||||
'@langchain/google-common@0.1.1(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(zod@3.23.8)':
|
||||
dependencies:
|
||||
'@langchain/core': 0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
|
@ -15256,7 +15294,7 @@ snapshots:
|
|||
|
||||
'@rudderstack/rudder-sdk-node@2.0.9(tslib@2.6.2)':
|
||||
dependencies:
|
||||
axios: 1.7.4(debug@4.3.7)
|
||||
axios: 1.7.4
|
||||
axios-retry: 3.7.0
|
||||
component-type: 1.2.1
|
||||
join-component: 1.1.0
|
||||
|
@ -17512,7 +17550,23 @@ snapshots:
|
|||
'@babel/runtime': 7.24.7
|
||||
is-retry-allowed: 2.2.0
|
||||
|
||||
axios@1.7.4:
|
||||
dependencies:
|
||||
follow-redirects: 1.15.6(debug@4.3.6)
|
||||
form-data: 4.0.0
|
||||
proxy-from-env: 1.1.0
|
||||
transitivePeerDependencies:
|
||||
- debug
|
||||
|
||||
axios@1.7.4(debug@4.3.7):
|
||||
dependencies:
|
||||
follow-redirects: 1.15.6(debug@4.3.7)
|
||||
form-data: 4.0.0
|
||||
proxy-from-env: 1.1.0
|
||||
transitivePeerDependencies:
|
||||
- debug
|
||||
|
||||
axios@1.7.7:
|
||||
dependencies:
|
||||
follow-redirects: 1.15.6(debug@4.3.6)
|
||||
form-data: 4.0.0
|
||||
|
@ -19188,7 +19242,7 @@ snapshots:
|
|||
|
||||
eslint-import-resolver-node@0.3.9:
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
is-core-module: 2.13.1
|
||||
resolve: 1.22.8
|
||||
transitivePeerDependencies:
|
||||
|
@ -19213,7 +19267,7 @@ snapshots:
|
|||
|
||||
eslint-module-utils@2.8.0(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@7.2.0(eslint@8.57.0)(typescript@5.6.2))(eslint-plugin-import@2.29.1)(eslint@8.57.0))(eslint@8.57.0):
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
optionalDependencies:
|
||||
'@typescript-eslint/parser': 7.2.0(eslint@8.57.0)(typescript@5.6.2)
|
||||
eslint: 8.57.0
|
||||
|
@ -19233,7 +19287,7 @@ snapshots:
|
|||
array.prototype.findlastindex: 1.2.3
|
||||
array.prototype.flat: 1.3.2
|
||||
array.prototype.flatmap: 1.3.2
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
doctrine: 2.1.0
|
||||
eslint: 8.57.0
|
||||
eslint-import-resolver-node: 0.3.9
|
||||
|
@ -20026,7 +20080,7 @@ snapshots:
|
|||
array-parallel: 0.1.3
|
||||
array-series: 0.1.5
|
||||
cross-spawn: 4.0.2
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
|
@ -20412,7 +20466,7 @@ snapshots:
|
|||
|
||||
infisical-node@1.3.0:
|
||||
dependencies:
|
||||
axios: 1.7.7(debug@4.3.6)
|
||||
axios: 1.7.7
|
||||
dotenv: 16.3.1
|
||||
tweetnacl: 1.0.3
|
||||
tweetnacl-util: 0.15.1
|
||||
|
@ -21342,7 +21396,7 @@ snapshots:
|
|||
|
||||
kuler@2.0.0: {}
|
||||
|
||||
langchain@0.3.5(4ubssgvn2k3t3hxnzmxuoc2aja):
|
||||
langchain@0.3.5(7umjwzmwnymi4lyinuvazmp6ki):
|
||||
dependencies:
|
||||
'@langchain/core': 0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8))
|
||||
'@langchain/openai': 0.3.11(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)
|
||||
|
@ -21366,7 +21420,7 @@ snapshots:
|
|||
'@langchain/groq': 0.1.2(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)
|
||||
'@langchain/mistralai': 0.1.1(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))(encoding@0.1.13)
|
||||
'@langchain/ollama': 0.1.1(@langchain/core@0.3.15(openai@4.69.0(encoding@0.1.13)(zod@3.23.8)))
|
||||
axios: 1.7.4(debug@4.3.7)
|
||||
axios: 1.7.4
|
||||
cheerio: 1.0.0
|
||||
handlebars: 4.7.8
|
||||
transitivePeerDependencies:
|
||||
|
@ -21385,6 +21439,28 @@ snapshots:
|
|||
optionalDependencies:
|
||||
openai: 4.69.0(encoding@0.1.13)(zod@3.23.8)
|
||||
|
||||
langsmith@0.2.3(openai@4.69.0(zod@3.23.8)):
|
||||
dependencies:
|
||||
'@types/uuid': 10.0.0
|
||||
commander: 10.0.1
|
||||
p-queue: 6.6.2
|
||||
p-retry: 4.6.2
|
||||
semver: 7.6.0
|
||||
uuid: 10.0.0
|
||||
optionalDependencies:
|
||||
openai: 4.69.0(zod@3.23.8)
|
||||
|
||||
langsmith@0.2.3(openai@4.69.0):
|
||||
dependencies:
|
||||
'@types/uuid': 10.0.0
|
||||
commander: 10.0.1
|
||||
p-queue: 6.6.2
|
||||
p-retry: 4.6.2
|
||||
semver: 7.6.0
|
||||
uuid: 10.0.0
|
||||
optionalDependencies:
|
||||
openai: 4.69.0(zod@3.23.8)
|
||||
|
||||
lazy-ass@1.6.0: {}
|
||||
|
||||
ldapts@4.2.6:
|
||||
|
@ -22719,6 +22795,22 @@ snapshots:
|
|||
- encoding
|
||||
- supports-color
|
||||
|
||||
openai@4.69.0(zod@3.23.8):
|
||||
dependencies:
|
||||
'@types/node': 18.16.16
|
||||
'@types/node-fetch': 2.6.4
|
||||
abort-controller: 3.0.0
|
||||
agentkeepalive: 4.2.1
|
||||
form-data-encoder: 1.7.2
|
||||
formdata-node: 4.4.1
|
||||
node-fetch: 2.7.0(encoding@0.1.13)
|
||||
optionalDependencies:
|
||||
zod: 3.23.8
|
||||
transitivePeerDependencies:
|
||||
- encoding
|
||||
- supports-color
|
||||
optional: true
|
||||
|
||||
openapi-sampler@1.5.1:
|
||||
dependencies:
|
||||
'@types/json-schema': 7.0.15
|
||||
|
@ -22899,7 +22991,7 @@ snapshots:
|
|||
|
||||
pdf-parse@1.1.1:
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
node-ensure: 0.0.0
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
@ -23101,7 +23193,7 @@ snapshots:
|
|||
|
||||
posthog-node@3.2.1:
|
||||
dependencies:
|
||||
axios: 1.7.7(debug@4.3.6)
|
||||
axios: 1.7.7
|
||||
rusha: 0.8.14
|
||||
transitivePeerDependencies:
|
||||
- debug
|
||||
|
@ -23730,7 +23822,7 @@ snapshots:
|
|||
|
||||
rhea@1.0.24:
|
||||
dependencies:
|
||||
debug: 3.2.7(supports-color@8.1.1)
|
||||
debug: 3.2.7(supports-color@5.5.0)
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
|
@ -24106,7 +24198,7 @@ snapshots:
|
|||
asn1.js: 5.4.1
|
||||
asn1.js-rfc2560: 5.0.1(asn1.js@5.4.1)
|
||||
asn1.js-rfc5280: 3.0.0
|
||||
axios: 1.7.7(debug@4.3.6)
|
||||
axios: 1.7.7
|
||||
big-integer: 1.6.51
|
||||
bignumber.js: 9.1.2
|
||||
binascii: 0.0.2
|
||||
|
|
|
@ -14,6 +14,7 @@ catalog:
|
|||
basic-auth: 2.0.1
|
||||
chokidar: 4.0.1
|
||||
fast-glob: 3.2.12
|
||||
flatted: 3.2.7
|
||||
form-data: 4.0.0
|
||||
lodash: 4.17.21
|
||||
luxon: 3.4.4
|
||||
|
|
Loading…
Reference in a new issue