fix(editor): Fix error rendering and indexing of LLM sub-node outputs (#10688)

This commit is contained in:
oleg 2024-09-11 16:17:13 +02:00 committed by GitHub
parent 5c47a5f691
commit 50459bacab
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 69 additions and 31 deletions

View file

@ -20,7 +20,7 @@ type TokensUsageParser = (llmOutput: LLMResult['llmOutput']) => {
totalTokens: number; totalTokens: number;
}; };
type LastInput = { type RunDetail = {
index: number; index: number;
messages: BaseMessage[] | string[] | string; messages: BaseMessage[] | string[] | string;
options: SerializedSecret | SerializedNotImplemented | SerializedFields; options: SerializedSecret | SerializedNotImplemented | SerializedFields;
@ -38,11 +38,13 @@ export class N8nLlmTracing extends BaseCallbackHandler {
completionTokensEstimate = 0; completionTokensEstimate = 0;
lastInput: LastInput = { /**
index: 0, * A map to associate LLM run IDs to run details.
messages: [], * Key: Unique identifier for each LLM run (run ID)
options: {}, * Value: RunDetails object
}; *
*/
runsMap: Record<string, RunDetail> = {};
options = { options = {
// Default(OpenAI format) parser // Default(OpenAI format) parser
@ -83,7 +85,11 @@ export class N8nLlmTracing extends BaseCallbackHandler {
return encodedListLength.reduce((acc, curr) => acc + curr, 0); return encodedListLength.reduce((acc, curr) => acc + curr, 0);
} }
async handleLLMEnd(output: LLMResult) { async handleLLMEnd(output: LLMResult, runId: string) {
// The fallback should never happen since handleLLMStart should always set the run details
// but just in case, we set the index to the length of the runsMap
const runDetails = this.runsMap[runId] ?? { index: Object.keys(this.runsMap).length };
output.generations = output.generations.map((gen) => output.generations = output.generations.map((gen) =>
gen.map((g) => pick(g, ['text', 'generationInfo'])), gen.map((g) => pick(g, ['text', 'generationInfo'])),
); );
@ -120,47 +126,43 @@ export class N8nLlmTracing extends BaseCallbackHandler {
} }
const parsedMessages = const parsedMessages =
typeof this.lastInput.messages === 'string' typeof runDetails.messages === 'string'
? this.lastInput.messages ? runDetails.messages
: this.lastInput.messages.map((message) => { : runDetails.messages.map((message) => {
if (typeof message === 'string') return message; if (typeof message === 'string') return message;
if (typeof message?.toJSON === 'function') return message.toJSON(); if (typeof message?.toJSON === 'function') return message.toJSON();
return message; return message;
}); });
this.executionFunctions.addOutputData(this.connectionType, this.lastInput.index, [ this.executionFunctions.addOutputData(this.connectionType, runDetails.index, [
[{ json: { ...response } }], [{ json: { ...response } }],
]); ]);
void logAiEvent(this.executionFunctions, 'n8n.ai.llm.generated', { void logAiEvent(this.executionFunctions, 'n8n.ai.llm.generated', {
messages: parsedMessages, messages: parsedMessages,
options: this.lastInput.options, options: runDetails.options,
response, response,
}); });
} }
async handleLLMStart(llm: Serialized, prompts: string[]) { async handleLLMStart(llm: Serialized, prompts: string[], runId: string) {
const estimatedTokens = await this.estimateTokensFromStringList(prompts); const estimatedTokens = await this.estimateTokensFromStringList(prompts);
const options = llm.type === 'constructor' ? llm.kwargs : llm; const options = llm.type === 'constructor' ? llm.kwargs : llm;
const { index } = this.executionFunctions.addInputData( const { index } = this.executionFunctions.addInputData(this.connectionType, [
this.connectionType,
[ [
[ {
{ json: {
json: { messages: prompts,
messages: prompts, estimatedTokens,
estimatedTokens, options,
options,
},
}, },
], },
], ],
this.lastInput.index + 1, ]);
);
// Save the last input for later use when processing `handleLLMEnd` event // Save the run details for later use when processing `handleLLMEnd` event
this.lastInput = { this.runsMap[runId] = {
index, index,
options, options,
messages: prompts, messages: prompts,

View file

@ -56,6 +56,7 @@ import { useWorkflowsStore } from '@/stores/workflows.store';
import { useNDVStore } from '@/stores/ndv.store'; import { useNDVStore } from '@/stores/ndv.store';
import { useNodeTypesStore } from '@/stores/nodeTypes.store'; import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import { useNodeHelpers } from '@/composables/useNodeHelpers'; import { useNodeHelpers } from '@/composables/useNodeHelpers';
import { useNodeType } from '@/composables/useNodeType';
import { useToast } from '@/composables/useToast'; import { useToast } from '@/composables/useToast';
import { isEqual, isObject } from 'lodash-es'; import { isEqual, isObject } from 'lodash-es';
import { useExternalHooks } from '@/composables/useExternalHooks'; import { useExternalHooks } from '@/composables/useExternalHooks';
@ -171,12 +172,16 @@ export default defineComponent({
runIndex: props.runIndex, runIndex: props.runIndex,
displayMode: ndvStore.getPanelDisplayMode(props.paneType), displayMode: ndvStore.getPanelDisplayMode(props.paneType),
}); });
const { isSubNodeType } = useNodeType({
node,
});
return { return {
...useToast(), ...useToast(),
externalHooks, externalHooks,
nodeHelpers, nodeHelpers,
pinnedData, pinnedData,
isSubNodeType,
}; };
}, },
data() { data() {
@ -308,6 +313,12 @@ export default defineComponent({
if (!this.node) { if (!this.node) {
return null; return null;
} }
// If the node is a sub-node, we need to get the parent node error to check for input errors
if (this.isSubNodeType && this.paneType === 'input') {
const parentNode = this.workflow.getChildNodes(this.node?.name ?? '', 'ALL_NON_MAIN')[0];
return this.workflowRunData?.[parentNode]?.[this.runIndex]?.error as NodeError;
}
return this.workflowRunData?.[this.node?.name]?.[this.runIndex]?.error as NodeError; return this.workflowRunData?.[this.node?.name]?.[this.runIndex]?.error as NodeError;
}, },
hasRunError(): boolean { hasRunError(): boolean {

View file

@ -9,10 +9,12 @@ import hljs from 'highlight.js/lib/core';
import { useClipboard } from '@/composables/useClipboard'; import { useClipboard } from '@/composables/useClipboard';
import { useI18n } from '@/composables/useI18n'; import { useI18n } from '@/composables/useI18n';
import { useToast } from '@/composables/useToast'; import { useToast } from '@/composables/useToast';
import { NodeConnectionType, type IDataObject } from 'n8n-workflow'; import { NodeConnectionType } from 'n8n-workflow';
import type { NodeError, IDataObject } from 'n8n-workflow';
const props = defineProps<{ const props = defineProps<{
runData: IAiDataContent; runData: IAiDataContent;
error?: NodeError;
}>(); }>();
const i18n = useI18n(); const i18n = useI18n();
@ -149,7 +151,7 @@ onMounted(() => {
<p :class="$style.blockTitle">{{ capitalize(runData.inOut) }}</p> <p :class="$style.blockTitle">{{ capitalize(runData.inOut) }}</p>
<!-- @click.stop to prevent event from bubbling to blockHeader and toggling expanded state when clicking on rawSwitch --> <!-- @click.stop to prevent event from bubbling to blockHeader and toggling expanded state when clicking on rawSwitch -->
<el-switch <el-switch
v-if="contentParsed" v-if="contentParsed && !error"
v-model="isShowRaw" v-model="isShowRaw"
:class="$style.rawSwitch" :class="$style.rawSwitch"
active-text="RAW JSON" active-text="RAW JSON"
@ -162,8 +164,10 @@ onMounted(() => {
[$style.blockContentExpanded]: isExpanded, [$style.blockContentExpanded]: isExpanded,
}" }"
> >
<NodeErrorView v-if="error" :error="error" :class="$style.error" />
<div <div
v-for="({ parsedContent, raw }, index) in parsedRun" v-for="({ parsedContent, raw }, index) in parsedRun"
v-else
:key="index" :key="index"
:class="$style.contentText" :class="$style.contentText"
:data-content-type="parsedContent?.type" :data-content-type="parsedContent?.type"
@ -299,4 +303,7 @@ onMounted(() => {
padding: 0; padding: 0;
color: var(--color-text-base); color: var(--color-text-base);
} }
.error {
padding: var(--spacing-s) 0;
}
</style> </style>

View file

@ -2,7 +2,12 @@
import type { IAiData, IAiDataContent } from '@/Interface'; import type { IAiData, IAiDataContent } from '@/Interface';
import { useNodeTypesStore } from '@/stores/nodeTypes.store'; import { useNodeTypesStore } from '@/stores/nodeTypes.store';
import { useWorkflowsStore } from '@/stores/workflows.store'; import { useWorkflowsStore } from '@/stores/workflows.store';
import type { INodeExecutionData, INodeTypeDescription, NodeConnectionType } from 'n8n-workflow'; import type {
INodeExecutionData,
INodeTypeDescription,
NodeConnectionType,
NodeError,
} from 'n8n-workflow';
import { computed } from 'vue'; import { computed } from 'vue';
import NodeIcon from '@/components/NodeIcon.vue'; import NodeIcon from '@/components/NodeIcon.vue';
import AiRunContentBlock from './AiRunContentBlock.vue'; import AiRunContentBlock from './AiRunContentBlock.vue';
@ -85,6 +90,16 @@ const runMeta = computed(() => {
} }
return extractRunMeta(outputRun.value); return extractRunMeta(outputRun.value);
}); });
const executionRunData = computed(() => {
return workflowsStore.getWorkflowExecution?.data?.resultData?.runData;
});
const outputError = computed(() => {
return executionRunData.value?.[props.inputData.node]?.[props.inputData.runIndex]?.error as
| NodeError
| undefined;
});
</script> </script>
<template> <template>
@ -155,7 +170,10 @@ const runMeta = computed(() => {
</header> </header>
<main v-for="(run, index) in props.inputData.data" :key="index" :class="$style.content"> <main v-for="(run, index) in props.inputData.data" :key="index" :class="$style.content">
<AiRunContentBlock :run-data="run" /> <AiRunContentBlock
:run-data="run"
:error="run.inOut === 'output' ? outputError : undefined"
/>
</main> </main>
</div> </div>
</template> </template>