feat(OpenAI Node): Use v2 assistants API and add support for memory (#9406)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
oleg 2024-05-16 16:24:19 +02:00 committed by GitHub
parent 40bce7f443
commit ce3eb12a6b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 175 additions and 28 deletions

View file

@ -133,6 +133,24 @@ const properties: INodeProperties[] = [
type: 'collection', type: 'collection',
default: {}, default: {},
options: [ options: [
{
displayName: 'Output Randomness (Temperature)',
name: 'temperature',
default: 1,
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
description:
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.',
type: 'number',
},
{
displayName: 'Output Randomness (Top P)',
name: 'topP',
default: 1,
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
description:
'An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
type: 'number',
},
{ {
displayName: 'Fail if Assistant Already Exists', displayName: 'Fail if Assistant Already Exists',
name: 'failIfExists', name: 'failIfExists',
@ -176,7 +194,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
do { do {
const response = (await apiRequest.call(this, 'GET', '/assistants', { const response = (await apiRequest.call(this, 'GET', '/assistants', {
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v2',
}, },
qs: { qs: {
limit: 100, limit: 100,
@ -219,7 +237,6 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
name, name,
description: assistantDescription, description: assistantDescription,
instructions, instructions,
file_ids,
}; };
const tools = []; const tools = [];
@ -228,12 +245,28 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
tools.push({ tools.push({
type: 'code_interpreter', type: 'code_interpreter',
}); });
body.tool_resources = {
...((body.tool_resources as object) ?? {}),
code_interpreter: {
file_ids,
},
};
} }
if (knowledgeRetrieval) { if (knowledgeRetrieval) {
tools.push({ tools.push({
type: 'retrieval', type: 'file_search',
}); });
body.tool_resources = {
...((body.tool_resources as object) ?? {}),
file_search: {
vector_stores: [
{
file_ids,
},
],
},
};
} }
if (tools.length) { if (tools.length) {
@ -243,7 +276,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
const response = await apiRequest.call(this, 'POST', '/assistants', { const response = await apiRequest.call(this, 'POST', '/assistants', {
body, body,
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v2',
}, },
}); });

View file

@ -19,7 +19,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
const response = await apiRequest.call(this, 'DELETE', `/assistants/${assistantId}`, { const response = await apiRequest.call(this, 'DELETE', `/assistants/${assistantId}`, {
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v2',
}, },
}); });

View file

@ -30,7 +30,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
do { do {
const response = await apiRequest.call(this, 'GET', '/assistants', { const response = await apiRequest.call(this, 'GET', '/assistants', {
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v2',
}, },
qs: { qs: {
limit: 100, limit: 100,

View file

@ -4,9 +4,17 @@ import { OpenAIAssistantRunnable } from 'langchain/experimental/openai_assistant
import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema'; import type { OpenAIToolType } from 'langchain/dist/experimental/openai_assistant/schema';
import { OpenAI as OpenAIClient } from 'openai'; import { OpenAI as OpenAIClient } from 'openai';
import { NodeOperationError, updateDisplayOptions } from 'n8n-workflow'; import { NodeConnectionType, NodeOperationError, updateDisplayOptions } from 'n8n-workflow';
import type { IExecuteFunctions, INodeExecutionData, INodeProperties } from 'n8n-workflow'; import type {
IDataObject,
IExecuteFunctions,
INodeExecutionData,
INodeProperties,
} from 'n8n-workflow';
import type { BufferWindowMemory } from 'langchain/memory';
import omit from 'lodash/omit';
import type { BaseMessage } from '@langchain/core/messages';
import { formatToOpenAIAssistantTool } from '../../helpers/utils'; import { formatToOpenAIAssistantTool } from '../../helpers/utils';
import { assistantRLC } from '../descriptions'; import { assistantRLC } from '../descriptions';
@ -110,6 +118,12 @@ const displayOptions = {
}; };
export const description = updateDisplayOptions(displayOptions, properties); export const description = updateDisplayOptions(displayOptions, properties);
const mapChatMessageToThreadMessage = (
message: BaseMessage,
): OpenAIClient.Beta.Threads.ThreadCreateParams.Message => ({
role: message._getType() === 'ai' ? 'assistant' : 'user',
content: message.content.toString(),
});
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> { export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const credentials = await this.getCredentials('openAiApi'); const credentials = await this.getCredentials('openAiApi');
@ -182,11 +196,47 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
tools: tools ?? [], tools: tools ?? [],
}); });
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke({ const memory = (await this.getInputConnectionData(NodeConnectionType.AiMemory, 0)) as
| BufferWindowMemory
| undefined;
const chainValues: IDataObject = {
content: input, content: input,
signal: this.getExecutionCancelSignal(), signal: this.getExecutionCancelSignal(),
timeout: options.timeout ?? 10000, timeout: options.timeout ?? 10000,
}); };
let thread: OpenAIClient.Beta.Threads.Thread;
if (memory) {
const chatMessages = await memory.chatHistory.getMessages();
// Construct a new thread from the chat history to map the memory
if (chatMessages.length) {
const first32Messages = chatMessages.slice(0, 32);
// There is a undocumented limit of 32 messages per thread when creating a thread with messages
const mappedMessages: OpenAIClient.Beta.Threads.ThreadCreateParams.Message[] =
first32Messages.map(mapChatMessageToThreadMessage);
thread = await client.beta.threads.create({ messages: mappedMessages });
const overLimitMessages = chatMessages.slice(32).map(mapChatMessageToThreadMessage);
// Send the remaining messages that exceed the limit of 32 sequentially
for (const message of overLimitMessages) {
await client.beta.threads.messages.create(thread.id, message);
}
chainValues.threadId = thread.id;
}
}
const response = await agentExecutor.withConfig(getTracingConfig(this)).invoke(chainValues);
if (memory) {
await memory.saveContext({ input }, { output: response.output });
if (response.threadId && response.runId) {
const threadRun = await client.beta.threads.runs.retrieve(response.threadId, response.runId);
response.usage = threadRun.usage;
}
}
if ( if (
options.preserveOriginalTools !== false && options.preserveOriginalTools !== false &&
@ -197,6 +247,6 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
tools: assistantTools, tools: assistantTools,
}); });
} }
const filteredResponse = omit(response, ['signal', 'timeout']);
return [{ json: response, pairedItem: { item: i } }]; return [{ json: filteredResponse, pairedItem: { item: i } }];
} }

View file

@ -84,6 +84,25 @@ const properties: INodeProperties[] = [
default: false, default: false,
description: 'Whether to remove all custom tools (functions) from the assistant', description: 'Whether to remove all custom tools (functions) from the assistant',
}, },
{
displayName: 'Output Randomness (Temperature)',
name: 'temperature',
default: 1,
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
description:
'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive. We generally recommend altering this or temperature but not both.',
type: 'number',
},
{
displayName: 'Output Randomness (Top P)',
name: 'topP',
default: 1,
typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 },
description:
'An alternative to sampling with temperature, controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.',
type: 'number',
},
], ],
}, },
]; ];
@ -109,6 +128,8 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
knowledgeRetrieval, knowledgeRetrieval,
file_ids, file_ids,
removeCustomTools, removeCustomTools,
temperature,
topP,
} = options; } = options;
const assistantDescription = options.description as string; const assistantDescription = options.description as string;
@ -128,7 +149,19 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
); );
} }
body.file_ids = files; body.tool_resources = {
...((body.tool_resources as object) ?? {}),
code_interpreter: {
file_ids,
},
file_search: {
vector_stores: [
{
file_ids,
},
],
},
};
} }
if (modelId) { if (modelId) {
@ -147,11 +180,19 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
body.instructions = instructions; body.instructions = instructions;
} }
if (temperature) {
body.temperature = temperature;
}
if (topP) {
body.topP = topP;
}
let tools = let tools =
(( ((
await apiRequest.call(this, 'GET', `/assistants/${assistantId}`, { await apiRequest.call(this, 'GET', `/assistants/${assistantId}`, {
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v2',
}, },
}) })
).tools as IDataObject[]) || []; ).tools as IDataObject[]) || [];
@ -166,14 +207,14 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
tools = tools.filter((tool) => tool.type !== 'code_interpreter'); tools = tools.filter((tool) => tool.type !== 'code_interpreter');
} }
if (knowledgeRetrieval && !tools.find((tool) => tool.type === 'retrieval')) { if (knowledgeRetrieval && !tools.find((tool) => tool.type === 'file_search')) {
tools.push({ tools.push({
type: 'retrieval', type: 'file_search',
}); });
} }
if (knowledgeRetrieval === false && tools.find((tool) => tool.type === 'retrieval')) { if (knowledgeRetrieval === false && tools.find((tool) => tool.type === 'file_search')) {
tools = tools.filter((tool) => tool.type !== 'retrieval'); tools = tools.filter((tool) => tool.type !== 'file_search');
} }
if (removeCustomTools) { if (removeCustomTools) {
@ -185,7 +226,7 @@ export async function execute(this: IExecuteFunctions, i: number): Promise<INode
const response = await apiRequest.call(this, 'POST', `/assistants/${assistantId}`, { const response = await apiRequest.call(this, 'POST', `/assistants/${assistantId}`, {
body, body,
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v2',
}, },
}); });

View file

@ -46,6 +46,7 @@ const configureNodeInputs = (resource: string, operation: string, hideTools: str
if (resource === 'assistant' && operation === 'message') { if (resource === 'assistant' && operation === 'message') {
return [ return [
{ type: NodeConnectionType.Main }, { type: NodeConnectionType.Main },
{ type: NodeConnectionType.AiMemory, displayName: 'Memory', maxConnections: 1 },
{ type: NodeConnectionType.AiTool, displayName: 'Tools' }, { type: NodeConnectionType.AiTool, displayName: 'Tools' },
]; ];
} }

View file

@ -78,7 +78,7 @@ export async function assistantSearch(
): Promise<INodeListSearchResult> { ): Promise<INodeListSearchResult> {
const { data, has_more, last_id } = await apiRequest.call(this, 'GET', '/assistants', { const { data, has_more, last_id } = await apiRequest.call(this, 'GET', '/assistants', {
headers: { headers: {
'OpenAI-Beta': 'assistants=v1', 'OpenAI-Beta': 'assistants=v2',
}, },
qs: { qs: {
limit: 100, limit: 100,

View file

@ -84,13 +84,24 @@ describe('OpenAi, Assistant resource', () => {
expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants', { expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants', {
body: { body: {
description: 'description', description: 'description',
file_ids: [],
instructions: 'some instructions', instructions: 'some instructions',
model: 'gpt-model', model: 'gpt-model',
name: 'name', name: 'name',
tools: [{ type: 'code_interpreter' }, { type: 'retrieval' }], tool_resources: {
code_interpreter: {
file_ids: [],
}, },
headers: { 'OpenAI-Beta': 'assistants=v1' }, file_search: {
vector_stores: [
{
file_ids: [],
},
],
},
},
tools: [{ type: 'code_interpreter' }, { type: 'file_search' }],
},
headers: { 'OpenAI-Beta': 'assistants=v2' },
}); });
}); });
@ -124,7 +135,7 @@ describe('OpenAi, Assistant resource', () => {
); );
expect(transport.apiRequest).toHaveBeenCalledWith('DELETE', '/assistants/assistant-id', { expect(transport.apiRequest).toHaveBeenCalledWith('DELETE', '/assistants/assistant-id', {
headers: { 'OpenAI-Beta': 'assistants=v1' }, headers: { 'OpenAI-Beta': 'assistants=v2' },
}); });
}); });
@ -185,17 +196,28 @@ describe('OpenAi, Assistant resource', () => {
expect(transport.apiRequest).toHaveBeenCalledTimes(2); expect(transport.apiRequest).toHaveBeenCalledTimes(2);
expect(transport.apiRequest).toHaveBeenCalledWith('GET', '/assistants/assistant-id', { expect(transport.apiRequest).toHaveBeenCalledWith('GET', '/assistants/assistant-id', {
headers: { 'OpenAI-Beta': 'assistants=v1' }, headers: { 'OpenAI-Beta': 'assistants=v2' },
}); });
expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants/assistant-id', { expect(transport.apiRequest).toHaveBeenCalledWith('POST', '/assistants/assistant-id', {
body: { body: {
file_ids: [],
instructions: 'some instructions', instructions: 'some instructions',
model: 'gpt-model', model: 'gpt-model',
name: 'name', name: 'name',
tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'retrieval' }], tool_resources: {
code_interpreter: {
file_ids: [],
}, },
headers: { 'OpenAI-Beta': 'assistants=v1' }, file_search: {
vector_stores: [
{
file_ids: [],
},
],
},
},
tools: [{ type: 'existing_tool' }, { type: 'code_interpreter' }, { type: 'file_search' }],
},
headers: { 'OpenAI-Beta': 'assistants=v2' },
}); });
}); });
}); });