mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-13 16:14:07 -08:00
WIP on partial executions for tools / sub-nodes
This commit is contained in:
parent
d7ba206b30
commit
97082125a8
|
@ -286,6 +286,8 @@ export class WorkflowRunner {
|
|||
|
||||
const startNode = WorkflowHelpers.getExecutionStartNode(data, workflow);
|
||||
|
||||
console.log('startNode from workflow-runner', startNode);
|
||||
|
||||
// Can execute without webhook so go on
|
||||
const workflowExecute = new WorkflowExecute(additionalData, data.executionMode);
|
||||
workflowExecution = workflowExecute.run(
|
||||
|
|
|
@ -102,8 +102,17 @@ export class WorkflowExecute {
|
|||
): PCancelable<IRun> {
|
||||
this.status = 'running';
|
||||
|
||||
let isAiSubNode = false;
|
||||
// @ts-ignore
|
||||
const aiParentNodes = workflow.getChildNodes(destinationNode, NodeConnectionType.AiTool);
|
||||
// Get the nodes to start workflow execution from
|
||||
startNode = startNode || workflow.getStartNode(destinationNode);
|
||||
if (startNode?.name === destinationNode) {
|
||||
if (aiParentNodes.length !== 0) {
|
||||
isAiSubNode = true;
|
||||
//startNode = workflow.getStartNode(aiParentNodes[0]);
|
||||
}
|
||||
}
|
||||
|
||||
if (startNode === undefined) {
|
||||
throw new ApplicationError('No node to start the workflow from could be found');
|
||||
|
@ -1059,6 +1068,11 @@ export class WorkflowExecute {
|
|||
continue;
|
||||
}
|
||||
|
||||
const isAiTool = {};
|
||||
if (workflow.connectionsBySourceNode[executionNode.name]?.hasOwnProperty('ai_tool')) {
|
||||
isAiTool[executionNode.name] = true;
|
||||
}
|
||||
|
||||
// Check if all the data which is needed to run the node is available
|
||||
if (workflow.connectionsByDestinationNode.hasOwnProperty(executionNode.name)) {
|
||||
// Check if the node has incoming connections
|
||||
|
@ -1160,6 +1174,22 @@ export class WorkflowExecute {
|
|||
workflowId: workflow.id,
|
||||
});
|
||||
|
||||
if (isAiTool[executionNode.name]) {
|
||||
executionData.data.inputOverride = {};
|
||||
executionData.data.inputOverride['ai_tool'] = [
|
||||
[{ json: { filter: "employeeName = 'Mario'" } }],
|
||||
];
|
||||
this.runExecutionData.resultData.runData[executionNode.name] = [
|
||||
{
|
||||
inputOverride: {
|
||||
[NodeConnectionType.AiTool]: [
|
||||
[{ json: { filter: "employeeName = 'Mario'" } }],
|
||||
],
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
let runNodeData = await workflow.runNode(
|
||||
executionData,
|
||||
this.runExecutionData,
|
||||
|
@ -1522,6 +1552,10 @@ export class WorkflowExecute {
|
|||
main: nodeSuccessData,
|
||||
} as ITaskDataConnections;
|
||||
|
||||
if (isAiTool[executionNode.name]) {
|
||||
taskData.data = { ai_tool: nodeSuccessData } as ITaskDataConnections;
|
||||
}
|
||||
|
||||
this.runExecutionData.resultData.runData[executionNode.name].push(taskData);
|
||||
|
||||
if (this.runExecutionData.waitTill!) {
|
||||
|
|
|
@ -143,7 +143,18 @@ const isExecutable = computed(() => {
|
|||
);
|
||||
const inputNames = NodeHelpers.getConnectionTypes(inputs);
|
||||
|
||||
if (!inputNames.includes(NodeConnectionType.Main) && !isTriggerNode.value) {
|
||||
const outputs = NodeHelpers.getNodeOutputs(
|
||||
currentWorkflowInstance.value,
|
||||
workflowNode!,
|
||||
props.nodeType,
|
||||
);
|
||||
const outputNames = NodeHelpers.getConnectionTypes(outputs);
|
||||
|
||||
if (
|
||||
!inputNames.includes(NodeConnectionType.Main) &&
|
||||
!outputNames.includes(NodeConnectionType.AiTool) &&
|
||||
!isTriggerNode.value
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -109,6 +109,8 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
|
||||
toast.clearAllStickyNotifications();
|
||||
|
||||
let isSubNode = false;
|
||||
|
||||
try {
|
||||
// Get the direct parents of the node
|
||||
let directParentNodes: string[] = [];
|
||||
|
@ -118,7 +120,20 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
NodeConnectionType.Main,
|
||||
-1,
|
||||
);
|
||||
|
||||
// Confusingly, this is called "getChildNodes", but it gets the connected root-nodes through AiTool
|
||||
const aiParentNodes = workflow.getChildNodes(
|
||||
options.destinationNode,
|
||||
NodeConnectionType.AiTool,
|
||||
);
|
||||
if (aiParentNodes.length !== 0) isSubNode = true;
|
||||
aiParentNodes.forEach((nodeName) => {
|
||||
directParentNodes = directParentNodes.concat(
|
||||
workflow.getParentNodes(nodeName, NodeConnectionType.Main, -1),
|
||||
);
|
||||
});
|
||||
}
|
||||
console.log('directParentNodes', directParentNodes);
|
||||
|
||||
const runData = workflowsStore.getWorkflowRunData;
|
||||
|
||||
|
@ -159,7 +174,6 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
executedNode = options.triggerNode;
|
||||
}
|
||||
|
||||
// If the destination node is specified, check if it is a chat node or has a chat parent
|
||||
if (
|
||||
options.destinationNode &&
|
||||
(workflowsStore.checkIfNodeHasChatParent(options.destinationNode) ||
|
||||
|
@ -207,6 +221,7 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
}
|
||||
}
|
||||
|
||||
console.log('startNodeNames', startNodeNames);
|
||||
const startNodes: StartNodeData[] = startNodeNames.map((name) => {
|
||||
// Find for each start node the source data
|
||||
let sourceData = get(runData, [name, 0, 'source', 0], null);
|
||||
|
@ -226,6 +241,8 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
};
|
||||
});
|
||||
|
||||
console.log('startNodes', startNodes);
|
||||
|
||||
// -1 means the backend chooses the default
|
||||
// 0 is the old flow
|
||||
// 1 is the new flow
|
||||
|
@ -240,6 +257,7 @@ export function useRunWorkflow(useRunWorkflowOpts: { router: ReturnType<typeof u
|
|||
if ('destinationNode' in options) {
|
||||
startRunData.destinationNode = options.destinationNode;
|
||||
}
|
||||
console.log('startRunData', startRunData);
|
||||
|
||||
// Init the execution data to represent the start of the execution
|
||||
// that data which gets reused is already set and data of newly executed
|
||||
|
|
Loading…
Reference in a new issue