n8n/packages/nodes-base/nodes/NocoDB/NocoDB.node.ts
mertmit d65a9ed118
feat(NocoDB Node): Add support v0.90.0+ (#3146)
* feat(NocoDB Node): add support for new NocoDB API

* fix(NocoDB Node): fix binary row update on old NocoDB API

* fix(NocoDB Node): fix getAll endpoint

* feat(NocoDB Node): allow xc-token as credential

* fix(NocoDB Node): get all for new api

* feat(NocoDB Node): list options & change to single data apis

* Moved to new format and reduced some code reuse

* Added API Version to Node Settings

* Improvements to remove code reuse and use bulk endpoints

* Added new credentials to close off PR#2909

* Credential testing working

*  Improvements

*  Add generic authentication type to credentials

* 🔥 Remove credentials verification

* Fixed Get All not working with manual limit

* Removed json object from project / table fields

* added fix from n8n-4159

* 👕 Fix linting issue

* feat: Improvements to pairedItem

* refactor: Consolidate hoisted package versions (#3724)

* 📦 Consolidate hoisted package versions

* 📦 Update `package-lock.json`

* 📦 Update `package-lock.json`

* 📦 Update `package-lock.json`

* refactor: Upgrade to ESLint 8 (#3722)

* ⬆️ Upgrade to ESLint 8

* 📦 Update package-lock.json

* 👕 Add lint exceptions

* 👕 Add more lint exceptions

*  Remove `tslint` from some packages

* 👕 Except init file

* 📦 Update `package-lock.json`

* 📦 Update `package-lock.json`

* 👕 Add exceptions to new lines coming from `master `

Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>

* refactor: Format all credentials (#3720)

* Apply Prettier to all credentials

* Fix quotes for lint

* 👕 Remove `quotemark` rule

* 👕 Run Prettier to take over quotes

* ⬆️ Upgrade `eslint-plugin-n8n-nodes-base`

* 📦 Update `package-lock.json`

Co-authored-by: Omar Ajoue <krynble@gmail.com>
Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>

* fix: Fix node_type property in all events (#3759)

* ⬆️ Update package-lock.json file

* fix(Mautic Node): Fix authentication issue (#3761)

* Fixes mautic credential issue

* removed unused imports

Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>

* fix(AWS DynamoDB Node): Fix expression attribute names (#3763)

* Fix expression attribute names in getAll

* fix: EAN value should be a string, not object

* Removed extra code for working out what credentials are in use

* fix(editor): Fix linking buttons color (#3770)

* fix color of icon

* center buttons

* fix(editor): Restore pindata header colors (#3758)

* 🎨 Restore `color-secondary-tint` colors

* 🔥 Remove typing from JS file

* fix(editor): Fix sticky duplication and position bug (#3755)

* fix bug when inserting sticky

* center sticky on insert

* export as const

* refactor(editor): Move all colors to css variables (#3723)

* update white color

* update white color

* update more whites

* update color

* update curr running

* update text color #555

* update white color

* set search bar colors

* update colors

* update node executing

* update text colors

* update light color

* update theme

* update theme

* update overlays carousel

* update theme vars

* add dark theme tokens

* update text

* update table colors

* fix conflict

* update colors

* feat(Metabase Node): Add Metabase Node (#3033)

* Boilerplate with new node's version for metabse

* Metabases MVP features

* Added new credential for metabse, added custom auth for metabase

* Fixed bug with one enpoint not working

* Clean up code

* Uniformised the renovate token

* Made two example of responses for review

* Fixed lint issues

* Feature add datasources

* Changed output from databases

* Changed questions data output

* Fixed issue when testing credentials with new node format

* Add the possibility to get raw data

* Removed handle for the metabase meta results, changed export's name

* Add binary extraction for the result data

* Fixed binary download issue

*  Add preAuthentication method to credentials

* Revert "Added new credential for metabse, added custom auth for metabase"

This reverts commit 5f1b7607ad.

* Revert "Added new credential for metabse, added custom auth for metabase"

This reverts commit 5f1b7607ad.

* Added preAuth and fixed autfixable linting rules

* Fixed linting errors

* Linting fixes

* Remove / at the end of url, and add placeholder for cred url

* Make export to Json retun only json and no binary

* Fix lint issues

* Add action and exception for lint rule

* Remove unnecessary credential file

*  Simplify and cleanup

Co-authored-by: ricardo <ricardoespinoza105@gmail.com>
Co-authored-by: Omar Ajoue <krynble@gmail.com>
Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>

* fix(editor): Fix spaces bug (#3774)

* refactor(editor): Change welcome sticky content (#3769)

* Updated Welcome sticky content

* Updated welcome sticky thumbnail image

Image was swapped out, used exact same file name + sizing so did not require code changes.

* Replaced welcome sticky thumbnail image

* fix(Fix Rocketchat Node): Fix authentication issue (#3778)

* Add suggested VSCode settings (#3783)

*  Add suggested settings

* 🔥 Remove app-level setting

* 🎨 Update indentation

* fix(core): Add windows support to import:credentials --separate (#3589)

* feat(Item List Node): Add operation for creating array from input items (#3149)

* 🔨 create array operation

* 🔨 removed semicolumn

* 🔨 updated UI

*  display option fix

*  aggregate operation description update, default aggregate item

* refactor: Add Onboarding call prompts (#3682)

*  Implemented initial onboarding call prompt logic

*  Added onboarding call prompt feature environment variable

*  Implemented onboarding session signup modal

* 📈 Added initial telemetry for the onboarding call prompt

* ✔️ Fixing linter error in server.ts

* 💄 Updating onboaring call prompt and modal wording and styling

*  Implemented initial version of fake doors feature

*  Added parameters to onboarding call prompt request

*  Finished implementing fake doors in settings

* 🔨 Updating onboarding call prompt fetching logic (fetching before timeout starts)

* 👌 Updating onboarding call prompt and fake door components based on the front-end review feedback

*  Updated fake doors so they support UI location specification. Added credentials UI fake doors.

*  Added checkbox to the signup form, improved N8NCheckbox formatting to better handle overflow

* 💄 Moving seignup checkbox label text to i18n file, updating checkbox component css to force text wrap

*  Update API calls to work with the new workflow request and response formats

* 👌 Updating fake door front-end based on the review feedback

* 👌 Updating onboarding call prompt and fake doors UI based in the product feedback

*   Updated onboarding call prompts front-end to work with new endpoints and added new telemetry events

* 🐛 Fixing onboarding call prompts not appearing in first user sessions

* ️ add createdAt to PublicUser

* 👌 Updating onboarding call prompts front-end to work with the latest back-end and addressing latest product review

*  Improving error handling when submitting user emails on signup

* 💄 Updating info text on Logging feature page

* 💄 Updating first onboarding call prompt timeout to 5 minutes

* 💄 Fixing `N8nCheckbox` component font overflow

Co-authored-by: Ben Hesseldieck <b.hesseldieck@gmail.com>

* feat(Kafka Trigger Node): Add additional options (#3600)

* 🔨 additional options to kafka trigger

*  option for maxInFlightRequests

*  Small change

Co-authored-by: ricardo <ricardoespinoza105@gmail.com>

* fix(editor): Fix pin data in executions when pinData is null. (#3787)

* ⬆️ Update package-lock.json file

* 🔖 Release n8n-workflow@0.110.0

* ⬆️ Set n8n-workflow@0.110.0 on n8n-core

* 🔖 Release n8n-core@0.128.0

* ⬆️ Set n8n-core@0.128.0 and n8n-workflow@0.110.0 on n8n-node-dev

* 🔖 Release n8n-node-dev@0.67.0

* ⬆️ Set n8n-core@0.128.0 and n8n-workflow@0.110.0 on n8n-nodes-base

* 🔖 Release n8n-nodes-base@0.186.0

* 🔖 Release n8n-design-system@0.28.0

* ⬆️ Set n8n-design-system@0.28.0 and n8n-workflow@0.110.0 on n8n-editor-ui

* 🔖 Release n8n-editor-ui@0.154.0

* ⬆️ Set n8n-core@0.128.0, n8n-editor-ui@0.154.0, n8n-nodes-base@0.186.0 and n8n-workflow@0.110.0 on n8n

* 🔖 Release n8n@0.188.0

* 🔖 Update main package.json to 0.188.0

* 📚 Update CHANGELOG.md with version 0.188.0

* 👕 Adjust line endings for Prettier lintings (#3786)

* build: Use package-lock.json file with custom build

* 💄 Updating onboarding prompt label

* ⬆️ Set eslint@8.0.0 on n8n-workflow (#3768)

* ⬆️ Upgrade `n8n-workflow` to ESLint 8

* 📦 Update `package-lock.json`

* 📦 Re-update `package-lock.json`

*  Fix on error behaviour for Delete, Get and Update

Co-authored-by: Jonathan Bennetts <jonathan.bennetts@gmail.com>
Co-authored-by: ricardo <ricardoespinoza105@gmail.com>
Co-authored-by: Ricardo Espinoza <ricardo@n8n.io>
Co-authored-by: Jan Oberhauser <jan.oberhauser@gmail.com>
Co-authored-by: Iván Ovejero <ivov.src@gmail.com>
Co-authored-by: Omar Ajoue <krynble@gmail.com>
Co-authored-by: Ahsan Virani <ahsan.virani@gmail.com>
Co-authored-by: Nicholas Penree <nick@penree.com>
Co-authored-by: Mutasem Aldmour <4711238+mutdmour@users.noreply.github.com>
Co-authored-by: agobrech <45268029+agobrech@users.noreply.github.com>
Co-authored-by: maxtkacz <maxtkacz@gmail.com>
Co-authored-by: कारतोफ्फेलस्क्रिप्ट™ <netroy@users.noreply.github.com>
Co-authored-by: Michael Kret <88898367+michael-radency@users.noreply.github.com>
Co-authored-by: Milorad FIlipović <miloradfilipovic19@gmail.com>
Co-authored-by: Ben Hesseldieck <b.hesseldieck@gmail.com>
Co-authored-by: Alex Grozav <alex@grozav.com>
Co-authored-by: Milorad Filipovic <milorad@n8n.io>
2022-08-03 12:57:57 +02:00

602 lines
17 KiB
TypeScript

/* eslint-disable n8n-nodes-base/node-filename-against-convention */
import {
IExecuteFunctions,
} from 'n8n-core';
import {
IBinaryData,
IDataObject,
ILoadOptionsFunctions,
INodeExecutionData,
INodeType,
INodeTypeDescription,
NodeApiError,
NodeOperationError,
} from 'n8n-workflow';
import {
apiRequest,
apiRequestAllItems,
downloadRecordAttachments,
} from './GenericFunctions';
import {
operationFields
} from './OperationDescription';
export class NocoDB implements INodeType {
description: INodeTypeDescription = {
displayName: 'NocoDB',
name: 'nocoDb',
icon: 'file:nocodb.svg',
group: ['input'],
version: [1, 2],
subtitle: '={{$parameter["operation"] + ": " + $parameter["resource"]}}',
description: 'Read, update, write and delete data from NocoDB',
defaults: {
name: 'NocoDB',
},
inputs: ['main'],
outputs: ['main'],
credentials: [
{
name: 'nocoDb',
required: true,
displayOptions: {
show: {
authentication: [
'nocoDb',
],
},
},
},
{
name: 'nocoDbApiToken',
required: true,
displayOptions: {
show: {
authentication: [
'nocoDbApiToken',
],
},
},
},
],
properties: [
{
displayName: 'Authentication',
name: 'authentication',
type: 'options',
options: [
{
name: 'User Token',
value: 'nocoDb',
},
{
name: 'API Token',
value: 'nocoDbApiToken',
},
],
default: 'nocoDb',
},
{
displayName: 'API Version',
name: 'version',
type: 'options',
displayOptions: {
show: {
'@version': [
1,
],
},
},
isNodeSetting: true,
options: [
{
name: 'Before v0.90.0',
value: 1,
},
{
name: 'v0.90.0 Onwards',
value: 2,
},
],
default: 1,
},
{
displayName: 'API Version',
name: 'version',
type: 'options',
displayOptions: {
show: {
'@version': [
2,
],
},
},
isNodeSetting: true,
options: [
{
name: 'Before v0.90.0',
value: 1,
},
{
name: 'v0.90.0 Onwards',
value: 2,
},
],
default: 2,
},
{
displayName: 'Resource',
name: 'resource',
type: 'options',
noDataExpression: true,
options: [
{
name: 'Row',
value: 'row',
},
],
default: 'row',
},
{
displayName: 'Operation',
name: 'operation',
type: 'options',
noDataExpression: true,
displayOptions: {
show: {
resource: [
'row',
],
},
},
options: [
{
name: 'Create',
value: 'create',
description: 'Create a row',
action: 'Create a row',
},
{
name: 'Delete',
value: 'delete',
description: 'Delete a row',
action: 'Delete a row',
},
{
name: 'Get',
value: 'get',
description: 'Retrieve a row',
action: 'Get a row',
},
{
name: 'Get All',
value: 'getAll',
description: 'Retrieve all rows',
action: 'Get all rows',
},
{
name: 'Update',
value: 'update',
description: 'Update a row',
action: 'Update a row',
},
],
default: 'get',
},
...operationFields,
],
};
methods = {
loadOptions: {
async getProjects(this: ILoadOptionsFunctions) {
try {
const requestMethod = 'GET';
const endpoint = '/api/v1/db/meta/projects/';
const responseData = await apiRequest.call(this, requestMethod, endpoint, {}, {});
return responseData.list.map((i: IDataObject) => ({ name: i.title, value: i.id }));
} catch (e) {
throw new NodeOperationError(this.getNode(), `Error while fetching projects! (${e})`);
}
},
// This only supports using the Project ID
async getTables(this: ILoadOptionsFunctions) {
const projectId = this.getNodeParameter('projectId', 0) as string;
if (projectId) {
try {
const requestMethod = 'GET';
const endpoint = `/api/v1/db/meta/projects/${projectId}/tables`;
const responseData = await apiRequest.call(this, requestMethod, endpoint, {}, {});
return responseData.list.map((i: IDataObject) => ({ name: i.title, value: i.id }));
} catch (e) {
throw new NodeOperationError(this.getNode(), `Error while fetching tables! (${e})`);
}
} else {
throw new NodeOperationError(this.getNode(), `No project selected!`);
}
},
},
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const returnData: IDataObject[] = [];
let responseData;
const version = this.getNodeParameter('version', 0) as number;
const resource = this.getNodeParameter('resource', 0) as string;
const operation = this.getNodeParameter('operation', 0) as string;
let returnAll = false;
let requestMethod = '';
let qs: IDataObject = {};
let endPoint = '';
const projectId = this.getNodeParameter('projectId', 0) as string;
const table = this.getNodeParameter('table', 0) as string;
if (resource === 'row') {
if (operation === 'create') {
requestMethod = 'POST';
if (version === 1) {
endPoint = `/nc/${projectId}/api/v1/${table}/bulk`;
} else if (version === 2) {
endPoint = `/api/v1/db/data/bulk/noco/${projectId}/${table}`;
}
const body: IDataObject[] = [];
for (let i = 0; i < items.length; i++) {
const newItem: IDataObject = {};
const dataToSend = this.getNodeParameter('dataToSend', i) as 'defineBelow' | 'autoMapInputData';
if (dataToSend === 'autoMapInputData') {
const incomingKeys = Object.keys(items[i].json);
const rawInputsToIgnore = this.getNodeParameter('inputsToIgnore', i) as string;
const inputDataToIgnore = rawInputsToIgnore.split(',').map(c => c.trim());
for (const key of incomingKeys) {
if (inputDataToIgnore.includes(key)) continue;
newItem[key] = items[i].json[key];
}
} else {
const fields = this.getNodeParameter('fieldsUi.fieldValues', i, []) as Array<{
fieldName: string;
binaryData: boolean;
fieldValue?: string;
binaryProperty?: string;
}>;
for (const field of fields) {
if (!field.binaryData) {
newItem[field.fieldName] = field.fieldValue;
} else if (field.binaryProperty) {
if (!items[i].binary) {
throw new NodeOperationError(this.getNode(), 'No binary data exists on item!', { itemIndex: i });
}
const binaryPropertyName = field.binaryProperty;
if (binaryPropertyName && !items[i].binary![binaryPropertyName]) {
throw new NodeOperationError(this.getNode(), `Binary property ${binaryPropertyName} does not exist on item!`, { itemIndex: i });
}
const binaryData = items[i].binary![binaryPropertyName] as IBinaryData;
const dataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
const formData = {
file: {
value: dataBuffer,
options: {
filename: binaryData.fileName,
contentType: binaryData.mimeType,
},
},
json: JSON.stringify({
api: 'xcAttachmentUpload',
project_id: projectId,
dbAlias: 'db',
args: {},
}),
};
const qs = { project_id: projectId };
let postUrl = '';
if (version === 1) {
postUrl = '/dashboard';
} else if (version === 2) {
postUrl = '/api/v1/db/storage/upload';
}
responseData = await apiRequest.call(this, 'POST', postUrl, {}, qs, undefined, { formData });
newItem[field.fieldName] = JSON.stringify([responseData]);
}
}
}
body.push(newItem);
}
try {
responseData = await apiRequest.call(this, requestMethod, endPoint, body, qs);
// Calculate ID manually and add to return data
let id = responseData[0];
for (let i = body.length - 1; i >= 0; i--) {
body[i].id = id--;
}
returnData.push(...body);
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ error: error.toString() });
}
throw new NodeApiError(this.getNode(), error);
}
}
if (operation === 'delete') {
requestMethod = 'DELETE';
if (version === 1) {
endPoint = `/nc/${projectId}/api/v1/${table}/bulk`;
} else if (version === 2) {
endPoint = `/api/v1/db/data/bulk/noco/${projectId}/${table}`;
}
const body: IDataObject[] = [];
for (let i = 0; i < items.length; i++) {
const id = this.getNodeParameter('id', i) as string;
body.push({ id });
}
try {
responseData = await apiRequest.call(this, requestMethod, endPoint, body, qs);
if (version === 1) {
returnData.push(...items.map(item => item.json));
} else if (version === 2 ) {
returnData.push(...responseData.map((result: number, index: number) => {
if (result === 0) {
const errorMessage = `The row with the ID "${body[index].id}" could not be deleted. It probably doesn't exist.`;
if (this.continueOnFail()) {
return { error: errorMessage };
}
throw new NodeApiError(this.getNode(), { message: errorMessage }, { message: errorMessage, itemIndex: index });
}
return {
success: true,
};
}));
}
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ error: error.toString() });
}
throw new NodeApiError(this.getNode(), error);
}
}
if (operation === 'getAll') {
const data = [];
const downloadAttachments = this.getNodeParameter('downloadAttachments', 0) as boolean;
try {
for (let i = 0; i < items.length; i++) {
requestMethod = 'GET';
if (version === 1) {
endPoint = `/nc/${projectId}/api/v1/${table}`;
} else if (version === 2 ) {
endPoint = `/api/v1/db/data/noco/${projectId}/${table}`;
}
returnAll = this.getNodeParameter('returnAll', 0) as boolean;
qs = this.getNodeParameter('options', i, {}) as IDataObject;
if (qs.sort) {
const properties = (qs.sort as IDataObject).property as Array<{ field: string, direction: string }>;
qs.sort = properties.map(prop => `${prop.direction === 'asc' ? '' : '-'}${prop.field}`).join(',');
}
if (qs.fields) {
qs.fields = (qs.fields as IDataObject[]).join(',');
}
if (returnAll === true) {
responseData = await apiRequestAllItems.call(this, requestMethod, endPoint, {}, qs);
} else {
qs.limit = this.getNodeParameter('limit', 0) as number;
responseData = await apiRequest.call(this, requestMethod, endPoint, {}, qs);
if (version === 2) {
responseData = responseData.list;
}
}
returnData.push.apply(returnData, responseData);
if (downloadAttachments === true) {
const downloadFieldNames = (this.getNodeParameter('downloadFieldNames', 0) as string).split(',');
const response = await downloadRecordAttachments.call(this, responseData, downloadFieldNames);
data.push(...response);
}
}
if (downloadAttachments) {
return [data];
}
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ error: error.toString() });
}
throw error;
}
}
if (operation === 'get') {
requestMethod = 'GET';
const newItems: INodeExecutionData[] = [];
for (let i = 0; i < items.length; i++) {
try {
const id = this.getNodeParameter('id', i) as string;
if (version === 1) {
endPoint = `/nc/${projectId}/api/v1/${table}/${id}`;
} else if (version === 2) {
endPoint = `/api/v1/db/data/noco/${projectId}/${table}/${id}`;
}
responseData = await apiRequest.call(this, requestMethod, endPoint, {}, qs);
let newItem: INodeExecutionData = { json: {} };
if (version === 1) {
newItem = { json: responseData };
} else if (version === 2 ) {
if (Object.keys(responseData).length === 0) {
// Get did fail
const errorMessage = `The row with the ID "${id}" could not be queried. It probably doesn't exist.`;
if (this.continueOnFail()) {
newItem = {
json: { error: errorMessage },
};
}
throw new NodeApiError(this.getNode(), { message: errorMessage }, { message: errorMessage, itemIndex: i });
} else {
// Get did work
newItem = { json: responseData };
}
}
// const newItem: INodeExecutionData = { json: responseData };
const downloadAttachments = this.getNodeParameter('downloadAttachments', i) as boolean;
if (downloadAttachments === true) {
const downloadFieldNames = (this.getNodeParameter('downloadFieldNames', i) as string).split(',');
const data = await downloadRecordAttachments.call(this, [responseData], downloadFieldNames);
newItem.binary = data[0].binary;
}
newItems.push(newItem);
} catch (error) {
if (this.continueOnFail()) {
newItems.push({ json: { error: error.toString() } });
continue;
}
throw new NodeApiError(this.getNode(), error);
}
}
return this.prepareOutputData(newItems);
}
if (operation === 'update') {
let requestMethod = 'PATCH';
if (version === 1) {
endPoint = `/nc/${projectId}/api/v1/${table}/bulk`;
requestMethod = 'PUT';
} else if (version === 2) {
endPoint = `/api/v1/db/data/bulk/noco/${projectId}/${table}`;
}
const body: IDataObject[] = [];
for (let i = 0; i < items.length; i++) {
const id = this.getNodeParameter('id', i) as string;
const newItem: IDataObject = { id };
const dataToSend = this.getNodeParameter('dataToSend', i) as 'defineBelow' | 'autoMapInputData';
if (dataToSend === 'autoMapInputData') {
const incomingKeys = Object.keys(items[i].json);
const rawInputsToIgnore = this.getNodeParameter('inputsToIgnore', i) as string;
const inputDataToIgnore = rawInputsToIgnore.split(',').map(c => c.trim());
for (const key of incomingKeys) {
if (inputDataToIgnore.includes(key)) continue;
newItem[key] = items[i].json[key];
}
} else {
const fields = this.getNodeParameter('fieldsUi.fieldValues', i, []) as Array<{
fieldName: string;
binaryData: boolean;
fieldValue?: string;
binaryProperty?: string;
}>;
for (const field of fields) {
if (!field.binaryData) {
newItem[field.fieldName] = field.fieldValue;
} else if (field.binaryProperty) {
if (!items[i].binary) {
throw new NodeOperationError(this.getNode(), 'No binary data exists on item!', { itemIndex: i });
}
const binaryPropertyName = field.binaryProperty;
if (binaryPropertyName && !items[i].binary![binaryPropertyName]) {
throw new NodeOperationError(this.getNode(), `Binary property ${binaryPropertyName} does not exist on item!`, { itemIndex: i });
}
const binaryData = items[i].binary![binaryPropertyName] as IBinaryData;
const dataBuffer = await this.helpers.getBinaryDataBuffer(i, binaryPropertyName);
const formData = {
file: {
value: dataBuffer,
options: {
filename: binaryData.fileName,
contentType: binaryData.mimeType,
},
},
json: JSON.stringify({
api: 'xcAttachmentUpload',
project_id: projectId,
dbAlias: 'db',
args: {},
}),
};
const qs = { project_id: projectId };
let postUrl = '';
if (version === 1) {
postUrl = '/dashboard';
} else if (version === 2) {
postUrl = '/api/v1/db/storage/upload';
}
responseData = await apiRequest.call(this, 'POST', postUrl, {}, qs, undefined, { formData });
newItem[field.fieldName] = JSON.stringify([responseData]);
}
}
}
body.push(newItem);
}
try {
responseData = await apiRequest.call(this, requestMethod, endPoint, body, qs);
if (version === 1) {
returnData.push(...body);
} else if (version === 2 ) {
returnData.push(...responseData.map((result: number, index: number) => {
if (result === 0) {
const errorMessage = `The row with the ID "${body[index].id}" could not be updated. It probably doesn't exist.`;
if (this.continueOnFail()) {
return { error: errorMessage };
}
throw new NodeApiError(this.getNode(), { message: errorMessage }, { message: errorMessage, itemIndex: index });
}
return {
success: true,
};
}));
}
} catch (error) {
if (this.continueOnFail()) {
returnData.push({ error: error.toString() });
}
throw new NodeApiError(this.getNode(), error);
}
}
}
return [this.helpers.returnJsonArray(returnData)];
}
}