diff --git a/packages/nodes-base/credentials/OpenAiApi.credentials.ts b/packages/nodes-base/credentials/OpenAiApi.credentials.ts new file mode 100644 index 0000000000..afd55e5475 --- /dev/null +++ b/packages/nodes-base/credentials/OpenAiApi.credentials.ts @@ -0,0 +1,50 @@ +import { + IAuthenticateGeneric, + ICredentialTestRequest, + ICredentialType, + INodeProperties, +} from 'n8n-workflow'; + +export class OpenAiApi implements ICredentialType { + name = 'openAiApi'; + + displayName = 'OpenAi'; + + documentationUrl = 'openAiApi'; + + properties: INodeProperties[] = [ + { + displayName: 'API Key', + name: 'apiKey', + type: 'string', + typeOptions: { password: true }, + required: true, + default: '', + }, + { + displayName: 'Organization ID', + name: 'organizationId', + type: 'string', + default: '', + description: + "For users who belong to multiple organizations, you can set which organization is used for an API request. Usage from these API requests will count against the specified organization's subscription quota.", + }, + ]; + + authenticate: IAuthenticateGeneric = { + type: 'generic', + properties: { + headers: { + Authorization: '=Bearer {{$credentials.apiKey}}', + 'OpenAI-Organization': '={{$credentials.organizationId}}', + }, + }, + }; + + test: ICredentialTestRequest = { + request: { + baseURL: 'https://api.openai.com', + url: '/v1/models', + }, + }; +} diff --git a/packages/nodes-base/nodes/OpenAi/ImageDescription.ts b/packages/nodes-base/nodes/OpenAi/ImageDescription.ts new file mode 100644 index 0000000000..d98b2f7d60 --- /dev/null +++ b/packages/nodes-base/nodes/OpenAi/ImageDescription.ts @@ -0,0 +1,186 @@ +import { INodeExecutionData, INodeProperties } from 'n8n-workflow'; + +export const imageOperations: INodeProperties[] = [ + { + displayName: 'Operation', + name: 'operation', + type: 'options', + noDataExpression: true, + displayOptions: { + show: { + resource: ['image'], + }, + }, + options: [ + { + name: 'Create', + value: 'create', + action: 'Create an Image', + description: 'Create an image for a given text', + routing: { + request: { + method: 'POST', + url: '/v1/images/generations', + }, + }, + }, + ], + routing: { + output: { + postReceive: [ + { + type: 'rootProperty', + properties: { + property: 'data', + }, + }, + ], + }, + }, + default: 'create', + }, +]; + +const createOperations: INodeProperties[] = [ + { + displayName: 'Prompt', + name: 'prompt', + type: 'string', + placeholder: 'e.g. A cute cat eating a dinosaur', + description: + 'A text description of the desired image(s). The maximum length is 1000 characters.', + displayOptions: { + show: { + resource: ['image'], + operation: ['create'], + }, + }, + default: '', + routing: { + send: { + type: 'body', + property: 'prompt', + }, + }, + }, + { + displayName: 'Response Format', + name: 'responseFormat', + type: 'options', + default: 'binaryData', + description: 'The format in which to return the image(s)', + displayOptions: { + show: { + resource: ['image'], + operation: ['create'], + }, + }, + options: [ + { + name: 'Binary Data', + value: 'binaryData', + }, + { + name: 'Image Url', + value: 'imageUrl', + }, + ], + routing: { + send: { + type: 'body', + property: 'response_format', + value: '={{ $value === "imageUrl" ? "url" : "b64_json" }}', + }, + output: { + postReceive: [ + async function (items: INodeExecutionData[]): Promise { + if (this.getNode().parameters.responseFormat === 'imageUrl') { + return items; + } + + const result: INodeExecutionData[] = []; + for (let i = 0; i < items.length; i++) { + result.push({ + json: {}, + binary: { + data: await this.helpers.prepareBinaryData( + Buffer.from(items[i].json.b64_json as string, 'base64'), + 'data', + ), + }, + } as INodeExecutionData); + } + + return result; + }, + ], + }, + }, + }, + { + displayName: 'Options', + name: 'options', + placeholder: 'Add Option', + description: 'Additional options to add', + type: 'collection', + default: {}, + displayOptions: { + show: { + resource: ['image'], + operation: ['create'], + }, + }, + options: [ + { + displayName: 'Number of Images', + name: 'n', + default: 1, + description: 'Number of images to generate', + type: 'number', + typeOptions: { + minValue: 1, + maxValue: 10, + }, + routing: { + send: { + type: 'body', + property: 'n', + }, + }, + }, + { + displayName: 'Resolution', + name: 'size', + type: 'options', + options: [ + { + name: '256x256', + value: '256x256', + }, + { + name: '512x512', + value: '512x512', + }, + { + name: '1024x1024', + value: '1024x1024', + }, + ], + routing: { + send: { + type: 'body', + property: 'size', + }, + }, + default: '1024x1024', + }, + ], + }, +]; + +export const imageFields: INodeProperties[] = [ + /* -------------------------------------------------------------------------- */ + /* image:create */ + /* -------------------------------------------------------------------------- */ + ...createOperations, +]; diff --git a/packages/nodes-base/nodes/OpenAi/OpenAi.node.json b/packages/nodes-base/nodes/OpenAi/OpenAi.node.json new file mode 100644 index 0000000000..65ab4550ce --- /dev/null +++ b/packages/nodes-base/nodes/OpenAi/OpenAi.node.json @@ -0,0 +1,19 @@ +{ + "node": "n8n-nodes-base.openAi", + "nodeVersion": "1.0", + "codexVersion": "1.0", + "categories": ["Utility"], + "resources": { + "credentialDocumentation": [ + { + "url": "https://docs.n8n.io/credentials/openAiApi" + } + ], + "primaryDocumentation": [ + { + "url": "https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-base.openai/" + } + ] + }, + "alias": ["ChatGPT", "DallE"] +} diff --git a/packages/nodes-base/nodes/OpenAi/OpenAi.node.ts b/packages/nodes-base/nodes/OpenAi/OpenAi.node.ts new file mode 100644 index 0000000000..ec07f31167 --- /dev/null +++ b/packages/nodes-base/nodes/OpenAi/OpenAi.node.ts @@ -0,0 +1,54 @@ +import { INodeType, INodeTypeDescription } from 'n8n-workflow'; +import { imageFields, imageOperations } from './ImageDescription'; +import { textFields, textOperations } from './TextDescription'; + +export class OpenAi implements INodeType { + description: INodeTypeDescription = { + displayName: 'OpenAI', + name: 'openAi', + icon: 'file:openAi.svg', + group: ['transform'], + version: 1, + subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}', + description: 'Consume Open AI', + defaults: { + name: 'OpenAI', + }, + inputs: ['main'], + outputs: ['main'], + credentials: [ + { + name: 'openAiApi', + required: true, + }, + ], + requestDefaults: { + baseURL: 'https://api.openai.com', + }, + properties: [ + { + displayName: 'Resource', + name: 'resource', + type: 'options', + noDataExpression: true, + options: [ + { + name: 'Image', + value: 'image', + }, + { + name: 'Text', + value: 'text', + }, + ], + default: 'text', + }, + + ...imageOperations, + ...imageFields, + + ...textOperations, + ...textFields, + ], + }; +} diff --git a/packages/nodes-base/nodes/OpenAi/TextDescription.ts b/packages/nodes-base/nodes/OpenAi/TextDescription.ts new file mode 100644 index 0000000000..92022dfcd2 --- /dev/null +++ b/packages/nodes-base/nodes/OpenAi/TextDescription.ts @@ -0,0 +1,472 @@ +import { INodeExecutionData, INodeProperties } from 'n8n-workflow'; + +export const textOperations: INodeProperties[] = [ + { + displayName: 'Operation', + name: 'operation', + type: 'options', + noDataExpression: true, + displayOptions: { + show: { + resource: ['text'], + }, + }, + options: [ + { + name: 'Complete', + value: 'complete', + action: 'Create a Completion', + description: 'Create one or more completions for a given text', + routing: { + request: { + method: 'POST', + url: '/v1/completions', + }, + }, + }, + { + name: 'Edit', + value: 'edit', + action: 'Create an Edit', + description: 'Create an edited version for a given text', + routing: { + request: { + method: 'POST', + url: '/v1/edits', + }, + }, + }, + { + name: 'Moderate', + value: 'moderate', + action: 'Create a Moderation', + description: "Classify if a text violates OpenAI's content policy", + routing: { + request: { + method: 'POST', + url: '/v1/moderations', + }, + }, + }, + ], + default: 'complete', + }, +]; + +const completeOperations: INodeProperties[] = [ + { + displayName: 'Model', + name: 'model', + type: 'options', + description: + 'The model which will generate the completion. Learn more.', + displayOptions: { + show: { + operation: ['complete'], + resource: ['text'], + }, + }, + typeOptions: { + loadOptions: { + routing: { + request: { + method: 'GET', + url: '/v1/models', + }, + output: { + postReceive: [ + { + type: 'rootProperty', + properties: { + property: 'data', + }, + }, + { + type: 'filter', + properties: { + pass: "={{ !$responseItem.id.startsWith('audio-') && !['cushman:2020-05-03', 'davinci-if:3.0.0', 'davinci-instruct-beta:2.0.0', 'if'].includes($responseItem.id) && !$responseItem.id.includes('-edit-') && !$responseItem.id.endsWith(':001') }}", + }, + }, + { + type: 'setKeyValue', + properties: { + // eslint-disable-next-line n8n-nodes-base/node-param-display-name-miscased-id + name: '={{$responseItem.id}}', + value: '={{$responseItem.id}}', + }, + }, + { + type: 'sort', + properties: { + key: 'name', + }, + }, + ], + }, + }, + }, + }, + routing: { + send: { + type: 'body', + property: 'model', + }, + }, + default: 'text-davinci-003', + }, + { + displayName: 'Prompt', + name: 'prompt', + type: 'string', + description: 'The prompt to generate completion(s) for', + placeholder: 'e.g. Say this is a test', + displayOptions: { + show: { + resource: ['text'], + operation: ['complete'], + }, + }, + default: '', + typeOptions: { + rows: 2, + }, + routing: { + send: { + type: 'body', + property: 'prompt', + }, + }, + }, +]; + +const editOperations: INodeProperties[] = [ + { + displayName: 'Model', + name: 'model', + type: 'options', + description: + 'The model which will generate the edited version. Learn more.', + displayOptions: { + show: { + operation: ['edit'], + resource: ['text'], + }, + }, + options: [ + { + name: 'code-davinci-edit-001', + value: 'code-davinci-edit-001', + }, + { + name: 'text-davinci-edit-001', + value: 'text-davinci-edit-001', + }, + ], + routing: { + send: { + type: 'body', + property: 'model', + }, + }, + default: 'text-davinci-edit-001', + }, + { + displayName: 'Input', + name: 'input', + type: 'string', + placeholder: 'e.g. What day of the wek is it?', + description: 'The input text to be edited', + displayOptions: { + show: { + resource: ['text'], + operation: ['edit'], + }, + }, + default: '', + routing: { + send: { + type: 'body', + property: 'input', + }, + }, + }, + { + displayName: 'Instruction', + name: 'instruction', + type: 'string', + placeholder: 'e.g. Fix the spelling mistakes', + description: 'The instruction that tells the model how to edit the input text', + displayOptions: { + show: { + resource: ['text'], + operation: ['edit'], + }, + }, + default: '', + routing: { + send: { + type: 'body', + property: 'instruction', + }, + }, + }, +]; + +const moderateOperations: INodeProperties[] = [ + { + displayName: 'Model', + name: 'model', + type: 'options', + description: + 'The model which will classify the text. Learn more.', + displayOptions: { + show: { + resource: ['text'], + operation: ['moderate'], + }, + }, + options: [ + { + name: 'text-moderation-stable', + value: 'text-moderation-stable', + }, + { + name: 'text-moderation-latest', + value: 'text-moderation-latest', + }, + ], + routing: { + send: { + type: 'body', + property: 'model', + }, + }, + default: 'text-moderation-latest', + }, + { + displayName: 'Input', + name: 'input', + type: 'string', + placeholder: 'e.g. I want to kill them', + description: 'The input text to classify', + displayOptions: { + show: { + resource: ['text'], + operation: ['moderate'], + }, + }, + default: '', + routing: { + send: { + type: 'body', + property: 'input', + }, + }, + }, + + { + displayName: 'Simplify', + name: 'simplifyOutput', + type: 'boolean', + default: true, + displayOptions: { + show: { + operation: ['moderate'], + resource: ['text'], + }, + }, + routing: { + output: { + postReceive: [ + { + type: 'set', + enabled: '={{$value}}', + properties: { + value: '={{ { "data": $response.body.results } }}', + }, + }, + { + type: 'rootProperty', + enabled: '={{$value}}', + properties: { + property: 'data', + }, + }, + ], + }, + }, + description: 'Whether to return a simplified version of the response instead of the raw data', + }, +]; + +const sharedOperations: INodeProperties[] = [ + { + displayName: 'Simplify', + name: 'simplifyOutput', + type: 'boolean', + default: true, + displayOptions: { + show: { + operation: ['complete', 'edit'], + resource: ['text'], + }, + }, + routing: { + output: { + postReceive: [ + { + type: 'set', + enabled: '={{$value}}', + properties: { + value: '={{ { "data": $response.body.choices } }}', + }, + }, + { + type: 'rootProperty', + enabled: '={{$value}}', + properties: { + property: 'data', + }, + }, + async function (items: INodeExecutionData[]): Promise { + if (this.getNode().parameters.simplifyOutput === false) { + return items; + } + return items.map((item) => { + return { + json: { + ...item.json, + text: (item.json.text as string).trim(), + }, + }; + }); + }, + ], + }, + }, + description: 'Whether to return a simplified version of the response instead of the raw data', + }, + + { + displayName: 'Options', + name: 'options', + placeholder: 'Add Option', + description: 'Additional options to add', + type: 'collection', + default: {}, + displayOptions: { + show: { + operation: ['complete', 'edit'], + resource: ['text'], + }, + }, + options: [ + { + displayName: 'Echo Prompt', + name: 'echo', + type: 'boolean', + description: 'Whether the prompt should be echo back in addition to the completion', + default: false, + displayOptions: { + show: { + '/operation': ['complete'], + }, + }, + routing: { + send: { + type: 'body', + property: 'echo', + }, + }, + }, + { + displayName: 'Maximum Number of Tokens', + name: 'maxTokens', + default: 16, + description: + 'The maximum number of tokens to generate in the completion. Most models have a context length of 2048 tokens (except for the newest models, which support 4096).', + type: 'number', + displayOptions: { + show: { + '/operation': ['complete'], + }, + }, + typeOptions: { + maxValue: 4096, + }, + routing: { + send: { + type: 'body', + property: 'max_tokens', + }, + }, + }, + { + displayName: 'Number of Completions', + name: 'n', + default: 1, + description: + 'How many completions to generate for each prompt. Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for max_tokens and stop.', + type: 'number', + routing: { + send: { + type: 'body', + property: 'n', + }, + }, + }, + { + displayName: 'Sampling Temperature', + name: 'temperature', + default: 1, + typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + description: + 'Controls randomness: Lowering results in less random completions. As the temperature approaches zero, the model will become deterministic and repetitive.', + type: 'number', + routing: { + send: { + type: 'body', + property: 'temperature', + }, + }, + }, + { + displayName: 'Top P', + name: 'topP', + default: 1, + typeOptions: { maxValue: 1, minValue: 0, numberPrecision: 1 }, + description: + 'Controls diversity via nucleus sampling: 0.5 means half of all likelihood-weighted options are considered. We generally recommend altering this or temperature but not both.', + type: 'number', + routing: { + send: { + type: 'body', + property: 'top_p', + }, + }, + }, + ], + }, +]; + +export const textFields: INodeProperties[] = [ + /* -------------------------------------------------------------------------- */ + /* text:complete */ + /* -------------------------------------------------------------------------- */ + ...completeOperations, + + /* -------------------------------------------------------------------------- */ + /* text:edit */ + /* -------------------------------------------------------------------------- */ + ...editOperations, + + /* -------------------------------------------------------------------------- */ + /* text:moderate */ + /* -------------------------------------------------------------------------- */ + ...moderateOperations, + + /* -------------------------------------------------------------------------- */ + /* text:ALL */ + /* -------------------------------------------------------------------------- */ + ...sharedOperations, +]; diff --git a/packages/nodes-base/nodes/OpenAi/openAi.svg b/packages/nodes-base/nodes/OpenAi/openAi.svg new file mode 100644 index 0000000000..d9445723ff --- /dev/null +++ b/packages/nodes-base/nodes/OpenAi/openAi.svg @@ -0,0 +1,7 @@ + + + OpenAI + + + + diff --git a/packages/nodes-base/package.json b/packages/nodes-base/package.json index 4d5c5fc09b..c48a7ee876 100644 --- a/packages/nodes-base/package.json +++ b/packages/nodes-base/package.json @@ -224,6 +224,7 @@ "dist/credentials/OdooApi.credentials.js", "dist/credentials/OneSimpleApi.credentials.js", "dist/credentials/OnfleetApi.credentials.js", + "dist/credentials/OpenAiApi.credentials.js", "dist/credentials/OpenWeatherMapApi.credentials.js", "dist/credentials/OrbitApi.credentials.js", "dist/credentials/OuraApi.credentials.js", @@ -589,6 +590,7 @@ "dist/nodes/Notion/NotionTrigger.node.js", "dist/nodes/Odoo/Odoo.node.js", "dist/nodes/OneSimpleApi/OneSimpleApi.node.js", + "dist/nodes/OpenAi/OpenAi.node.js", "dist/nodes/OpenThesaurus/OpenThesaurus.node.js", "dist/nodes/OpenWeatherMap/OpenWeatherMap.node.js", "dist/nodes/Orbit/Orbit.node.js", diff --git a/packages/workflow/src/Interfaces.ts b/packages/workflow/src/Interfaces.ts index d17ea19a31..e9ff0fb3af 100644 --- a/packages/workflow/src/Interfaces.ts +++ b/packages/workflow/src/Interfaces.ts @@ -1296,6 +1296,7 @@ export type PostReceiveAction = response: IN8nHttpFullResponse, ) => Promise) | IPostReceiveBinaryData + | IPostReceiveFilter | IPostReceiveLimit | IPostReceiveRootProperty | IPostReceiveSet @@ -1325,7 +1326,7 @@ export interface IPostReceiveBase { type: string; enabled?: boolean | string; properties: { - [key: string]: string | number | IDataObject; + [key: string]: string | number | boolean | IDataObject; }; errorMessage?: string; } @@ -1337,6 +1338,13 @@ export interface IPostReceiveBinaryData extends IPostReceiveBase { }; } +export interface IPostReceiveFilter extends IPostReceiveBase { + type: 'filter'; + properties: { + pass: boolean | string; + }; +} + export interface IPostReceiveLimit extends IPostReceiveBase { type: 'limit'; properties: { diff --git a/packages/workflow/src/RoutingNode.ts b/packages/workflow/src/RoutingNode.ts index 7505c8ea08..4a119eb2dd 100644 --- a/packages/workflow/src/RoutingNode.ts +++ b/packages/workflow/src/RoutingNode.ts @@ -265,7 +265,6 @@ export class RoutingNode { if (action.type === 'rootProperty') { try { return inputData.flatMap((item) => { - // let itemContent = item.json[action.properties.property]; let itemContent = get(item.json, action.properties.property); if (!Array.isArray(itemContent)) { @@ -285,6 +284,28 @@ export class RoutingNode { }); } } + if (action.type === 'filter') { + const passValue = action.properties.pass; + + inputData = inputData.filter((item) => { + // If the value is an expression resolve it + return this.getParameterValue( + passValue, + itemIndex, + runIndex, + executeSingleFunctions.getExecuteData(), + { + $response: responseData, + $responseItem: item.json, + $value: parameterValue, + $version: this.node.typeVersion, + }, + false, + ) as boolean; + }); + + return inputData; + } if (action.type === 'limit') { const maxResults = this.getParameterValue( action.properties.maxResults,