feat(OpenAI Node): Allow to select Image analyze model & improve types (#9660)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
This commit is contained in:
oleg 2024-06-07 14:37:04 +02:00 committed by GitHub
parent eccc637b63
commit 1fdd657a0c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 74 additions and 43 deletions

View file

@ -9,7 +9,7 @@ import { apiRequest } from '../../transport';
import { modelRLC } from '../descriptions'; import { modelRLC } from '../descriptions';
const properties: INodeProperties[] = [ const properties: INodeProperties[] = [
modelRLC, modelRLC('modelSearch'),
{ {
displayName: 'Name', displayName: 'Name',
name: 'name', name: 'name',

View file

@ -67,7 +67,7 @@ const properties: INodeProperties[] = [
description: description:
'Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more <a href="https://platform.openai.com/docs/assistants/tools/knowledge-retrieval" target="_blank">here</a>', 'Whether to augments the assistant with knowledge from outside its model, such as proprietary product information or documents, find more <a href="https://platform.openai.com/docs/assistants/tools/knowledge-retrieval" target="_blank">here</a>',
}, },
{ ...modelRLC, required: false }, { ...modelRLC('modelSearch'), required: false },
{ {
displayName: 'Name', displayName: 'Name',
name: 'name', name: 'name',

View file

@ -1,6 +1,6 @@
import type { INodeProperties } from 'n8n-workflow'; import type { INodeProperties } from 'n8n-workflow';
export const modelRLC: INodeProperties = { export const modelRLC = (searchListMethod: string = 'modelSearch'): INodeProperties => ({
displayName: 'Model', displayName: 'Model',
name: 'modelId', name: 'modelId',
type: 'resourceLocator', type: 'resourceLocator',
@ -12,7 +12,7 @@ export const modelRLC: INodeProperties = {
name: 'list', name: 'list',
type: 'list', type: 'list',
typeOptions: { typeOptions: {
searchListMethod: 'modelSearch', searchListMethod,
searchable: true, searchable: true,
}, },
}, },
@ -23,7 +23,7 @@ export const modelRLC: INodeProperties = {
placeholder: 'e.g. gpt-4', placeholder: 'e.g. gpt-4',
}, },
], ],
}; });
export const assistantRLC: INodeProperties = { export const assistantRLC: INodeProperties = {
displayName: 'Assistant', displayName: 'Assistant',

View file

@ -6,8 +6,13 @@ import type {
} from 'n8n-workflow'; } from 'n8n-workflow';
import { updateDisplayOptions, NodeOperationError } from 'n8n-workflow'; import { updateDisplayOptions, NodeOperationError } from 'n8n-workflow';
import { apiRequest } from '../../transport'; import { apiRequest } from '../../transport';
import { modelRLC } from '../descriptions';
const properties: INodeProperties[] = [ const properties: INodeProperties[] = [
{
...modelRLC('imageModelSearch'),
displayOptions: { show: { '@version': [{ _cnd: { gte: 1.4 } }] } },
},
{ {
displayName: 'Text Input', displayName: 'Text Input',
name: 'text', name: 'text',
@ -123,7 +128,11 @@ const displayOptions = {
export const description = updateDisplayOptions(displayOptions, properties); export const description = updateDisplayOptions(displayOptions, properties);
export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> { export async function execute(this: IExecuteFunctions, i: number): Promise<INodeExecutionData[]> {
const model = 'gpt-4-vision-preview'; let model = 'gpt-4-vision-preview';
if (this.getNode().typeVersion >= 1.4) {
model = this.getNodeParameter('modelId', i, 'gpt-4o', { extractValue: true }) as string;
}
const text = this.getNodeParameter('text', i, '') as string; const text = this.getNodeParameter('text', i, '') as string;
const inputType = this.getNodeParameter('inputType', i) as string; const inputType = this.getNodeParameter('inputType', i) as string;
const options = this.getNodeParameter('options', i, {}); const options = this.getNodeParameter('options', i, {});

View file

@ -14,7 +14,7 @@ import { getConnectedTools } from '../../../../../utils/helpers';
import { MODELS_NOT_SUPPORT_FUNCTION_CALLS } from '../../helpers/constants'; import { MODELS_NOT_SUPPORT_FUNCTION_CALLS } from '../../helpers/constants';
const properties: INodeProperties[] = [ const properties: INodeProperties[] = [
modelRLC, modelRLC('modelSearch'),
{ {
displayName: 'Messages', displayName: 'Messages',
name: 'messages', name: 'messages',

View file

@ -69,7 +69,7 @@ export const versionDescription: INodeTypeDescription = {
name: 'openAi', name: 'openAi',
icon: { light: 'file:openAi.svg', dark: 'file:openAi.dark.svg' }, icon: { light: 'file:openAi.svg', dark: 'file:openAi.dark.svg' },
group: ['transform'], group: ['transform'],
version: [1, 1.1, 1.2, 1.3], version: [1, 1.1, 1.2, 1.3, 1.4],
subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`, subtitle: `={{(${prettifyOperation})($parameter.resource, $parameter.operation)}}`,
description: 'Message an assistant or GPT, analyze images, generate audio, etc.', description: 'Message an assistant or GPT, analyze images, generate audio, etc.',
defaults: { defaults: {

View file

@ -5,6 +5,8 @@ import type {
INodeListSearchResult, INodeListSearchResult,
} from 'n8n-workflow'; } from 'n8n-workflow';
import type { Model } from 'openai/resources/models';
import type { Assistant } from 'openai/resources/beta/assistants';
import { apiRequest } from '../transport'; import { apiRequest } from '../transport';
export async function fileSearch( export async function fileSearch(
@ -38,37 +40,52 @@ export async function fileSearch(
} }
} }
const getModelSearch =
(filterCondition: (model: Model) => boolean) =>
async (ctx: ILoadOptionsFunctions, filter?: string): Promise<INodeListSearchResult> => {
let { data } = (await apiRequest.call(ctx, 'GET', '/models')) as { data: Model[] };
data = data?.filter((model) => filterCondition(model));
let results: INodeListSearchItems[] = [];
if (filter) {
for (const model of data || []) {
if (model.id?.toLowerCase().includes(filter.toLowerCase())) {
results.push({
name: model.id.toUpperCase(),
value: model.id,
});
}
}
} else {
results = (data || []).map((model) => ({
name: model.id.toUpperCase(),
value: model.id,
}));
}
results = results.sort((a, b) => a.name.localeCompare(b.name));
return {
results,
};
};
export async function modelSearch( export async function modelSearch(
this: ILoadOptionsFunctions, this: ILoadOptionsFunctions,
filter?: string, filter?: string,
): Promise<INodeListSearchResult> { ): Promise<INodeListSearchResult> {
let { data } = await apiRequest.call(this, 'GET', '/models'); return await getModelSearch((model) => model.id.startsWith('gpt-'))(this, filter);
}
data = data?.filter((model: IDataObject) => (model.id as string).startsWith('gpt-')); export async function imageModelSearch(
this: ILoadOptionsFunctions,
let results: INodeListSearchItems[] = []; filter?: string,
): Promise<INodeListSearchResult> {
if (filter) { return await getModelSearch(
for (const model of data || []) { (model) => model.id.includes('vision') || model.id.includes('gpt-4o'),
if ((model.id as string)?.toLowerCase().includes(filter.toLowerCase())) { )(this, filter);
results.push({
name: (model.id as string).toUpperCase(),
value: model.id as string,
});
}
}
} else {
results = (data || []).map((model: IDataObject) => ({
name: (model.id as string).toUpperCase(),
value: model.id as string,
}));
}
results = results.sort((a, b) => a.name.localeCompare(b.name));
return {
results,
};
} }
export async function assistantSearch( export async function assistantSearch(
@ -76,7 +93,7 @@ export async function assistantSearch(
filter?: string, filter?: string,
paginationToken?: string, paginationToken?: string,
): Promise<INodeListSearchResult> { ): Promise<INodeListSearchResult> {
const { data, has_more, last_id } = await apiRequest.call(this, 'GET', '/assistants', { const { data, has_more, last_id } = (await apiRequest.call(this, 'GET', '/assistants', {
headers: { headers: {
'OpenAI-Beta': 'assistants=v2', 'OpenAI-Beta': 'assistants=v2',
}, },
@ -84,9 +101,14 @@ export async function assistantSearch(
limit: 100, limit: 100,
after: paginationToken, after: paginationToken,
}, },
}); })) as {
data: Assistant[];
has_more: boolean;
last_id: string;
first_id: string;
};
if (has_more === true) { if (has_more) {
paginationToken = last_id; paginationToken = last_id;
} else { } else {
paginationToken = undefined; paginationToken = undefined;
@ -96,10 +118,10 @@ export async function assistantSearch(
const results: INodeListSearchItems[] = []; const results: INodeListSearchItems[] = [];
for (const assistant of data || []) { for (const assistant of data || []) {
if ((assistant.name as string)?.toLowerCase().includes(filter.toLowerCase())) { if (assistant.name?.toLowerCase().includes(filter.toLowerCase())) {
results.push({ results.push({
name: assistant.name as string, name: assistant.name,
value: assistant.id as string, value: assistant.id,
}); });
} }
} }
@ -109,9 +131,9 @@ export async function assistantSearch(
}; };
} else { } else {
return { return {
results: (data || []).map((assistant: IDataObject) => ({ results: (data || []).map((assistant) => ({
name: assistant.name as string, name: assistant.name ?? assistant.id,
value: assistant.id as string, value: assistant.id,
})), })),
paginationToken, paginationToken,
}; };