fix(OpenAI Node): Throw node operations error in case of openAi client error (#10448)

Co-authored-by: Shireen Missi <shireen@n8n.io>
This commit is contained in:
Michael Kret 2024-08-16 17:59:50 +03:00 committed by GitHub
parent 77ebd93bd3
commit 0d3ed46199
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -4,7 +4,12 @@ import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAI as OpenAIClient } from 'openai';
import { NodeConnectionType, NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
import {
ApplicationError,
NodeConnectionType,
NodeOperationError,
updateDisplayOptions,
} from 'n8n-workflow';
import type {
IDataObject,
IExecuteFunctions,
@ -228,25 +233,36 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
}
}
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke(chainValues);
if (memory) {
await memory.saveContext({ input }, { output: response.output });
let filteredResponse: IDataObject = {};
try {
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke(chainValues);
if (memory) {
await memory.saveContext({ input }, { output: response.output });
if (response.threadId && response.runId) {
const threadRun = await client.beta.threads.runs.retrieve(response.threadId, response.runId);
response.usage = threadRun.usage;
if (response.threadId && response.runId) {
const threadRun = await client.beta.threads.runs.retrieve(
response.threadId,
response.runId,
);
response.usage = threadRun.usage;
}
}
if (
options.preserveOriginalTools !== false &&
nodeVersion >= 1.3 &&
(assistantTools ?? [])?.length
) {
await client.beta.assistants.update(assistantId, {
tools: assistantTools,
});
}
filteredResponse = omit(response, ['signal', 'timeout']) as IDataObject;
} catch (error) {
if (!(error instanceof ApplicationError)) {
throw new NodeOperationError(this.getNode(), error.message, { itemIndex: i });
}
}
if (
options.preserveOriginalTools !== false &&
nodeVersion >= 1.3 &&
(assistantTools ?? [])?.length
) {
await client.beta.assistants.update(assistantId, {
tools: assistantTools,
});
}
const filteredResponse = omit(response, ['signal', 'timeout']);
return [{ json: filteredResponse, pairedItem: { item: i } }];
}