mirror of
https://github.com/n8n-io/n8n.git
synced 2024-11-10 06:34:05 -08:00
Merge branch 'master' of https://github.com/leonardlin/n8n
This commit is contained in:
commit
1abfc89ab3
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n",
|
||||
"version": "0.30.0",
|
||||
"version": "0.32.0",
|
||||
"description": "n8n Workflow Automation Tool",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -90,8 +90,8 @@
|
|||
"localtunnel": "^1.9.1",
|
||||
"mongodb": "^3.2.3",
|
||||
"n8n-core": "~0.14.0",
|
||||
"n8n-editor-ui": "~0.23.0",
|
||||
"n8n-nodes-base": "~0.25.0",
|
||||
"n8n-editor-ui": "~0.24.0",
|
||||
"n8n-nodes-base": "~0.27.0",
|
||||
"n8n-workflow": "~0.15.0",
|
||||
"open": "^6.1.0",
|
||||
"pg": "^7.11.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n-editor-ui",
|
||||
"version": "0.23.0",
|
||||
"version": "0.24.0",
|
||||
"description": "Workflow Editor UI for n8n",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
|
|
@ -40,6 +40,7 @@ import {
|
|||
faEye,
|
||||
faExclamationTriangle,
|
||||
faExternalLinkAlt,
|
||||
faExchangeAlt,
|
||||
faFile,
|
||||
faFileCode,
|
||||
faFileDownload,
|
||||
|
@ -111,6 +112,7 @@ library.add(faEnvelope);
|
|||
library.add(faEye);
|
||||
library.add(faExclamationTriangle);
|
||||
library.add(faExternalLinkAlt);
|
||||
library.add(faExchangeAlt);
|
||||
library.add(faFile);
|
||||
library.add(faFileCode);
|
||||
library.add(faFileDownload);
|
||||
|
|
|
@ -8,9 +8,6 @@ export class Amqp implements ICredentialType {
|
|||
name = 'amqp';
|
||||
displayName = 'AMQP';
|
||||
properties = [
|
||||
// The credentials to get from user and save encrypted.
|
||||
// Properties can be defined exactly in the same way
|
||||
// as node properties.
|
||||
{
|
||||
displayName: 'Hostname',
|
||||
name: 'hostname',
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import { ContainerOptions, Delivery } from 'rhea';
|
||||
|
||||
import { IExecuteSingleFunctions } from 'n8n-core';
|
||||
import {
|
||||
IDataObject,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
import { Delivery } from 'rhea';
|
||||
|
||||
export class Amqp implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
|
@ -26,23 +26,6 @@ export class Amqp implements INodeType {
|
|||
required: true,
|
||||
}],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Host',
|
||||
name: 'hostname',
|
||||
type: 'string',
|
||||
default: 'localhost',
|
||||
description: 'hostname of the amqp server',
|
||||
},
|
||||
{
|
||||
displayName: 'Port',
|
||||
name: 'port',
|
||||
type: 'number',
|
||||
typeOptions: {
|
||||
minValue: 1,
|
||||
},
|
||||
default: 5672,
|
||||
description: 'TCP Port to connect to',
|
||||
},
|
||||
{
|
||||
displayName: 'Queue / Topic',
|
||||
name: 'sink',
|
||||
|
@ -71,46 +54,47 @@ export class Amqp implements INodeType {
|
|||
}
|
||||
|
||||
const sink = this.getNodeParameter('sink', '') as string;
|
||||
let applicationProperties = this.getNodeParameter('headerParametersJson', {}) as string | object;
|
||||
const applicationProperties = this.getNodeParameter('headerParametersJson', {}) as string | object;
|
||||
|
||||
let headerProperties = applicationProperties;
|
||||
if(typeof applicationProperties === 'string' && applicationProperties != '') {
|
||||
headerProperties = JSON.parse(applicationProperties)
|
||||
if(typeof applicationProperties === 'string' && applicationProperties !== '') {
|
||||
headerProperties = JSON.parse(applicationProperties);
|
||||
}
|
||||
|
||||
if (sink == '') {
|
||||
if (sink === '') {
|
||||
throw new Error('Queue or Topic required!');
|
||||
}
|
||||
|
||||
let container = require('rhea');
|
||||
const container = require('rhea');
|
||||
|
||||
let connectOptions = {
|
||||
const connectOptions: ContainerOptions = {
|
||||
host: credentials.hostname,
|
||||
port: credentials.port,
|
||||
reconnect: true, // this id the default anyway
|
||||
reconnect_limit: 50, // try for max 50 times, based on a back-off algorithm
|
||||
}
|
||||
};
|
||||
if (credentials.username || credentials.password) {
|
||||
container.options.username = credentials.username;
|
||||
container.options.password = credentials.password;
|
||||
}
|
||||
|
||||
let allSent = new Promise( function( resolve ) {
|
||||
container.on('sendable', function (context: any) {
|
||||
const allSent = new Promise(( resolve ) => {
|
||||
container.on('sendable', (context: any) => { // tslint:disable-line:no-any
|
||||
|
||||
let message = {
|
||||
const message = {
|
||||
application_properties: headerProperties,
|
||||
body: JSON.stringify(item)
|
||||
}
|
||||
let sendResult = context.sender.send(message);
|
||||
};
|
||||
|
||||
const sendResult = context.sender.send(message);
|
||||
|
||||
resolve(sendResult);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
container.connect(connectOptions).open_sender(sink);
|
||||
|
||||
let sendResult: Delivery = await allSent as Delivery; // sendResult has a a property that causes circular reference if returned
|
||||
const sendResult: Delivery = await allSent as Delivery; // sendResult has a a property that causes circular reference if returned
|
||||
|
||||
return { json: { id: sendResult.id } } as INodeExecutionData;
|
||||
}
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import { ContainerOptions } from 'rhea';
|
||||
|
||||
import { ITriggerFunctions } from 'n8n-core';
|
||||
import {
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
ITriggerResponse,
|
||||
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
||||
|
@ -67,40 +68,40 @@ export class AmqpTrigger implements INodeType {
|
|||
const clientname = this.getNodeParameter('clientname', '') as string;
|
||||
const subscription = this.getNodeParameter('subscription', '') as string;
|
||||
|
||||
if (sink == '') {
|
||||
if (sink === '') {
|
||||
throw new Error('Queue or Topic required!');
|
||||
}
|
||||
let durable: boolean = false;
|
||||
let durable = false;
|
||||
if(subscription && clientname) {
|
||||
durable = true;
|
||||
}
|
||||
|
||||
let container = require('rhea');
|
||||
let connectOptions = {
|
||||
const container = require('rhea');
|
||||
const connectOptions: ContainerOptions = {
|
||||
host: credentials.hostname,
|
||||
port: credentials.port,
|
||||
reconnect: true, // this id the default anyway
|
||||
reconnect_limit: 50, // try for max 50 times, based on a back-off algorithm
|
||||
container_id: (durable ? clientname : null)
|
||||
}
|
||||
};
|
||||
if (credentials.username || credentials.password) {
|
||||
container.options.username = credentials.username;
|
||||
container.options.password = credentials.password;
|
||||
}
|
||||
|
||||
let lastMsgId: any = undefined;
|
||||
let self = this;
|
||||
let lastMsgId: number | undefined = undefined;
|
||||
const self = this;
|
||||
|
||||
container.on('message', function (context: any) {
|
||||
if (context.message.message_id && context.message.message_id == lastMsgId) {
|
||||
container.on('message', (context: any) => { // tslint:disable-line:no-any
|
||||
if (context.message.message_id && context.message.message_id === lastMsgId) {
|
||||
// ignore duplicate message check, don't think it's necessary, but it was in the rhea-lib example code
|
||||
lastMsgId = context.message.message_id;
|
||||
return;
|
||||
}
|
||||
self.emit([self.helpers.returnJsonArray([context.message])]);
|
||||
});
|
||||
|
||||
let connection = container.connect(connectOptions);
|
||||
|
||||
const connection = container.connect(connectOptions);
|
||||
let clientOptions = undefined;
|
||||
if (durable) {
|
||||
clientOptions = {
|
||||
|
@ -111,14 +112,14 @@ export class AmqpTrigger implements INodeType {
|
|||
expiry_policy: 'never'
|
||||
},
|
||||
credit_window: 1 // prefetch 1
|
||||
}
|
||||
};
|
||||
} else {
|
||||
clientOptions = {
|
||||
source: {
|
||||
address: sink,
|
||||
},
|
||||
credit_window: 1 // prefetch 1
|
||||
}
|
||||
};
|
||||
}
|
||||
connection.open_receiver(clientOptions);
|
||||
|
||||
|
@ -135,15 +136,11 @@ export class AmqpTrigger implements INodeType {
|
|||
// for AMQP it doesn't make much sense to wait here but
|
||||
// for a new user who doesn't know how this works, it's better to wait and show a respective info message
|
||||
async function manualTriggerFunction() {
|
||||
|
||||
await new Promise( function( resolve ) {
|
||||
let timeoutHandler = setTimeout(function() {
|
||||
self.emit([self.helpers.returnJsonArray([{
|
||||
error: 'Aborted, no message received within 30secs. This 30sec timeout is only set for "manually triggered execution". Active Workflows will listen indefinitely.'
|
||||
}])]);
|
||||
resolve(true);
|
||||
await new Promise(( resolve, reject ) => {
|
||||
const timeoutHandler = setTimeout(() => {
|
||||
reject(new Error('Aborted, no message received within 30secs. This 30sec timeout is only set for "manually triggered execution". Active Workflows will listen indefinitely.'));
|
||||
}, 30000);
|
||||
container.on('message', function (context: any) {
|
||||
container.on('message', (context: any) => { // tslint:disable-line:no-any
|
||||
clearTimeout(timeoutHandler);
|
||||
resolve(true);
|
||||
});
|
||||
|
|
|
@ -45,6 +45,27 @@ export class GoogleSheet {
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* Clears values from a sheet
|
||||
*
|
||||
* @param {string} range
|
||||
* @returns {Promise<object>}
|
||||
* @memberof GoogleSheet
|
||||
*/
|
||||
async clearData(range: string): Promise<object> {
|
||||
const client = await this.getAuthenticationClient();
|
||||
|
||||
const response = await Sheets.spreadsheets.values.clear(
|
||||
{
|
||||
auth: client,
|
||||
spreadsheetId: this.id,
|
||||
range,
|
||||
}
|
||||
);
|
||||
|
||||
return response.data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the cell values
|
||||
*/
|
||||
|
@ -316,10 +337,11 @@ export class GoogleSheet {
|
|||
* @param {number} keyRowIndex Index of the row which contains the keys
|
||||
* @param {number} dataStartRowIndex Index of the first row which contains data
|
||||
* @param {ILookupValues[]} lookupValues The lookup values which decide what data to return
|
||||
* @param {boolean} [returnAllMatches] Returns all the found matches instead of only the first one
|
||||
* @returns {Promise<IDataObject[]>}
|
||||
* @memberof GoogleSheet
|
||||
*/
|
||||
async lookupValues(inputData: string[][], keyRowIndex: number, dataStartRowIndex: number, lookupValues: ILookupValues[]): Promise<IDataObject[]> {
|
||||
async lookupValues(inputData: string[][], keyRowIndex: number, dataStartRowIndex: number, lookupValues: ILookupValues[], returnAllMatches?: boolean): Promise<IDataObject[]> {
|
||||
const keys: string[] = [];
|
||||
|
||||
if (keyRowIndex < 0 || dataStartRowIndex < keyRowIndex || keyRowIndex >= inputData.length) {
|
||||
|
@ -351,13 +373,18 @@ export class GoogleSheet {
|
|||
for (rowIndex = dataStartRowIndex; rowIndex < inputData.length; rowIndex++) {
|
||||
if (inputData[rowIndex][returnColumnIndex].toString() === lookupValue.lookupValue.toString()) {
|
||||
returnData.push(inputData[rowIndex]);
|
||||
continue lookupLoop;
|
||||
|
||||
if (returnAllMatches !== true) {
|
||||
continue lookupLoop;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If value could not be found add an empty one that the order of
|
||||
// the returned items stays the same
|
||||
returnData.push([]);
|
||||
if (returnAllMatches !== true) {
|
||||
returnData.push([]);
|
||||
}
|
||||
}
|
||||
|
||||
return this.structureData(returnData, 1, keys, true);
|
||||
|
|
|
@ -47,6 +47,11 @@ export class GoogleSheets implements INodeType {
|
|||
value: 'append',
|
||||
description: 'Appends the data to a Sheet',
|
||||
},
|
||||
{
|
||||
name: 'Clear',
|
||||
value: 'clear',
|
||||
description: 'Clears data from a Sheet',
|
||||
},
|
||||
{
|
||||
name: 'Lookup',
|
||||
value: 'lookup',
|
||||
|
@ -172,6 +177,7 @@ export class GoogleSheets implements INodeType {
|
|||
hide: {
|
||||
operation: [
|
||||
'append',
|
||||
'clear',
|
||||
],
|
||||
rawData: [
|
||||
true
|
||||
|
@ -193,6 +199,9 @@ export class GoogleSheets implements INodeType {
|
|||
},
|
||||
displayOptions: {
|
||||
hide: {
|
||||
operation: [
|
||||
'clear',
|
||||
],
|
||||
rawData: [
|
||||
true
|
||||
],
|
||||
|
@ -266,7 +275,31 @@ export class GoogleSheets implements INodeType {
|
|||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'append',
|
||||
'lookup',
|
||||
'read',
|
||||
'update',
|
||||
],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Return All Matches',
|
||||
name: 'returnAllMatches',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/operation': [
|
||||
'lookup',
|
||||
],
|
||||
},
|
||||
},
|
||||
description: 'By default only the first result gets returned. If options gets set all found matches get returned.',
|
||||
},
|
||||
{
|
||||
displayName: 'Value Input Mode',
|
||||
name: 'valueInputMode',
|
||||
|
@ -411,6 +444,15 @@ export class GoogleSheets implements INodeType {
|
|||
// TODO: Should add this data somewhere
|
||||
// TODO: Should have something like add metadata which does not get passed through
|
||||
|
||||
return this.prepareOutputData(items);
|
||||
} else if (operation === 'clear') {
|
||||
// ----------------------------------
|
||||
// clear
|
||||
// ----------------------------------
|
||||
|
||||
await sheet.clearData(range);
|
||||
|
||||
const items = this.getInputData();
|
||||
return this.prepareOutputData(items);
|
||||
} else if (operation === 'lookup') {
|
||||
// ----------------------------------
|
||||
|
@ -436,7 +478,7 @@ export class GoogleSheets implements INodeType {
|
|||
});
|
||||
}
|
||||
|
||||
const returnData = await sheet.lookupValues(sheetData, keyRow, dataStartRow, lookupValues);
|
||||
const returnData = await sheet.lookupValues(sheetData, keyRow, dataStartRow, lookupValues, options.returnAllMatches as boolean | undefined);
|
||||
|
||||
return [this.helpers.returnJsonArray(returnData)];
|
||||
} else if (operation === 'read') {
|
||||
|
|
|
@ -144,7 +144,7 @@ export class GraphQL implements INodeType {
|
|||
displayName: 'Response Data Property Name',
|
||||
name: 'dataPropertyName',
|
||||
type: 'string',
|
||||
default: 'response',
|
||||
default: 'data',
|
||||
required: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
|
|
|
@ -200,7 +200,7 @@ export class HttpRequest implements INodeType {
|
|||
name: 'jsonParameters',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'If the query and/or body parameter should be set via the value-key pair UI or JSON/RAW',
|
||||
description: 'If the query and/or body parameter should be set via the value-key pair UI or JSON/RAW.',
|
||||
},
|
||||
|
||||
{
|
||||
|
@ -246,11 +246,11 @@ export class HttpRequest implements INodeType {
|
|||
},
|
||||
{
|
||||
displayName: 'MIME Type',
|
||||
name: 'bodyContentCustomMIMEType',
|
||||
name: 'bodyContentCustomMimeType',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'text/xml',
|
||||
description: 'Specify the mime type for raw/custom body type',
|
||||
description: 'Specify the mime type for raw/custom body type.',
|
||||
required: false,
|
||||
displayOptions: {
|
||||
show: {
|
||||
|
@ -615,11 +615,11 @@ export class HttpRequest implements INodeType {
|
|||
}
|
||||
|
||||
// Add Content Type if any are set
|
||||
if (options.bodyContentCustomMIMEType) {
|
||||
if (options.bodyContentCustomMimeType) {
|
||||
if(requestOptions.headers === undefined) {
|
||||
requestOptions.headers = {};
|
||||
}
|
||||
requestOptions.headers['Content-Type'] = options.bodyContentCustomMIMEType;
|
||||
requestOptions.headers['Content-Type'] = options.bodyContentCustomMimeType;
|
||||
}
|
||||
|
||||
// Add credentials if any are set
|
||||
|
|
356
packages/nodes-base/nodes/MoveBinaryData.node.ts
Normal file
356
packages/nodes-base/nodes/MoveBinaryData.node.ts
Normal file
|
@ -0,0 +1,356 @@
|
|||
import {
|
||||
get,
|
||||
set,
|
||||
unset,
|
||||
} from 'lodash';
|
||||
|
||||
import { IExecuteFunctions } from 'n8n-core';
|
||||
import {
|
||||
IDataObject,
|
||||
INodeExecutionData,
|
||||
INodeType,
|
||||
INodeTypeDescription,
|
||||
} from 'n8n-workflow';
|
||||
|
||||
|
||||
export class MoveBinaryData implements INodeType {
|
||||
description: INodeTypeDescription = {
|
||||
displayName: 'Move Binary Data',
|
||||
name: 'moveBinaryData',
|
||||
icon: 'fa:exchange-alt',
|
||||
group: ['transform'],
|
||||
version: 1,
|
||||
subtitle: '={{$parameter["mode"]==="binaryToJson" ? "Binary to JSON" : "JSON to Binary"}}',
|
||||
description: 'Move data between binary and JSON properties.',
|
||||
defaults: {
|
||||
name: 'Move Binary Data',
|
||||
color: '#7722CC',
|
||||
},
|
||||
inputs: ['main'],
|
||||
outputs: ['main'],
|
||||
properties: [
|
||||
{
|
||||
displayName: 'Mode',
|
||||
name: 'mode',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Binary to JSON',
|
||||
value: 'binaryToJson',
|
||||
description: 'Move data from Binary to JSON',
|
||||
},
|
||||
{
|
||||
name: 'JSON to Binary',
|
||||
value: 'jsonToBinary',
|
||||
description: 'Move data from JSON to Binary.',
|
||||
},
|
||||
],
|
||||
default: 'binaryToJson',
|
||||
description: 'From and to where data should be moved.',
|
||||
},
|
||||
|
||||
|
||||
// ----------------------------------
|
||||
// binaryToJson
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Set all Data',
|
||||
name: 'setAllData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
mode: [
|
||||
'binaryToJson',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: true,
|
||||
description: 'If all JSON data should be replaced with the data retrieved<br />from binary key. Else the data will be written to a single key.',
|
||||
},
|
||||
{
|
||||
displayName: 'Source Key',
|
||||
name: 'sourceKey',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
mode: [
|
||||
'binaryToJson',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'data',
|
||||
description: 'The name of the binary key to get data from.<br />It is also possible to define deep keys by using dot-notation like for example:<br />"level1.level2.currentKey"',
|
||||
},
|
||||
{
|
||||
displayName: 'Destination Key',
|
||||
name: 'destinationKey',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
mode: [
|
||||
'binaryToJson',
|
||||
],
|
||||
setAllData: [
|
||||
false,
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: '',
|
||||
description: 'The name the JSON key to copy data to. It is also possible<br />to define deep keys by using dot-notation like for example:<br />"level1.level2.newKey"',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// jsonToBinary
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Convert all Data',
|
||||
name: 'convertAllData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
mode: [
|
||||
'jsonToBinary',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: true,
|
||||
description: 'If all JSON data should be converted to binary.<br />Else only the data of one key will be converted.',
|
||||
},
|
||||
{
|
||||
displayName: 'Source Key',
|
||||
name: 'sourceKey',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
convertAllData: [
|
||||
false,
|
||||
],
|
||||
mode: [
|
||||
'jsonToBinary',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'data',
|
||||
description: 'The name of the JSON key to get data from. It is also possible<br />to define deep keys by using dot-notation like for example:<br />"level1.level2.currentKey"',
|
||||
},
|
||||
{
|
||||
displayName: 'Destination Key',
|
||||
name: 'destinationKey',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
mode: [
|
||||
'jsonToBinary',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 'data',
|
||||
required: true,
|
||||
placeholder: 'data',
|
||||
description: 'The name the binary key to copy data to. It is also possible<br />to define deep keys by using dot-notation like for example:<br />"level1.level2.newKey"',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Encoding',
|
||||
name: 'encoding',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': [
|
||||
'binaryToJson',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 'utf8',
|
||||
description: 'Set the encoding of the data stream',
|
||||
},
|
||||
{
|
||||
displayName: 'JSON Parse',
|
||||
name: 'jsonParse',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': [
|
||||
'binaryToJson',
|
||||
],
|
||||
'/setAllData': [
|
||||
false
|
||||
],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Run JSON parse on the data to get propery object data.',
|
||||
},
|
||||
{
|
||||
displayName: 'Keep Source',
|
||||
name: 'keepSource',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'If the source key should be kept. By default does it get deleted.',
|
||||
},
|
||||
{
|
||||
displayName: 'Mime Type',
|
||||
name: 'mimeType',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': [
|
||||
'jsonToBinary',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 'application/json',
|
||||
placeholder: 'application/json',
|
||||
description: 'The mime-type to set. By default will the mime-type for JSON be set.',
|
||||
},
|
||||
{
|
||||
displayName: 'Use Raw Data',
|
||||
name: 'useRawData',
|
||||
type: 'boolean',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/mode': [
|
||||
'jsonToBinary',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: false,
|
||||
description: 'Use data as is and do not JSON.stringify it.',
|
||||
},
|
||||
],
|
||||
}
|
||||
],
|
||||
};
|
||||
|
||||
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
|
||||
|
||||
const items = this.getInputData();
|
||||
|
||||
const mode = this.getNodeParameter('mode', 0) as string;
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
let item: INodeExecutionData;
|
||||
let newItem: INodeExecutionData;
|
||||
let options: IDataObject;
|
||||
for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
|
||||
item = items[itemIndex];
|
||||
options = this.getNodeParameter('options', 0, {}) as IDataObject;
|
||||
|
||||
// Copy the whole JSON data as data on any level can be renamed
|
||||
newItem = {
|
||||
json: {},
|
||||
};
|
||||
|
||||
if (mode === 'binaryToJson') {
|
||||
const setAllData = this.getNodeParameter('setAllData', itemIndex) as boolean;
|
||||
const sourceKey = this.getNodeParameter('sourceKey', itemIndex) as string;
|
||||
|
||||
const value = get(item.binary, sourceKey);
|
||||
|
||||
if (value === undefined) {
|
||||
// No data found so skip
|
||||
continue;
|
||||
}
|
||||
|
||||
const encoding = (options.encoding as string) || 'utf8';
|
||||
let convertedValue = new Buffer(value.data, 'base64').toString(encoding);
|
||||
|
||||
if (setAllData === true) {
|
||||
// Set the full data
|
||||
newItem.json = JSON.parse(convertedValue);
|
||||
} else {
|
||||
// Does get added to existing data so copy it first
|
||||
newItem.json = JSON.parse(JSON.stringify(item.json));
|
||||
|
||||
if (options.jsonParse) {
|
||||
convertedValue = JSON.parse(convertedValue);
|
||||
}
|
||||
|
||||
const destinationKey = this.getNodeParameter('destinationKey', itemIndex, '') as string;
|
||||
set(newItem.json, destinationKey, convertedValue);
|
||||
}
|
||||
|
||||
if (options.keepSource === true) {
|
||||
// Binary data does not get touched so simply reference it
|
||||
newItem.binary = item.binary;
|
||||
} else {
|
||||
// Binary data will change so copy it
|
||||
newItem.binary = JSON.parse(JSON.stringify(item.binary));
|
||||
unset(newItem.binary, sourceKey);
|
||||
}
|
||||
|
||||
} else if (mode === 'jsonToBinary') {
|
||||
const convertAllData = this.getNodeParameter('convertAllData', itemIndex) as boolean;
|
||||
const destinationKey = this.getNodeParameter('destinationKey', itemIndex) as string;
|
||||
|
||||
let value: IDataObject | string = item.json;
|
||||
if (convertAllData === false) {
|
||||
const sourceKey = this.getNodeParameter('sourceKey', itemIndex) as string;
|
||||
value = get(item.json, sourceKey) as IDataObject;
|
||||
}
|
||||
|
||||
if (value === undefined) {
|
||||
// No data found so skip
|
||||
continue;
|
||||
}
|
||||
|
||||
if (item.binary !== undefined) {
|
||||
// Item already has binary data so copy it
|
||||
newItem.binary = JSON.parse(JSON.stringify(item.binary));
|
||||
} else {
|
||||
// Item does not have binary data yet so initialize empty
|
||||
newItem.binary = {};
|
||||
}
|
||||
|
||||
if (options.useRawData !== true) {
|
||||
value = JSON.stringify(value);
|
||||
}
|
||||
|
||||
const convertedValue = {
|
||||
data: new Buffer(value as string).toString('base64'),
|
||||
mimeType: options.mimeType || 'application/json',
|
||||
};
|
||||
set(newItem.binary!, destinationKey, convertedValue);
|
||||
|
||||
if (options.keepSource === true) {
|
||||
// JSON data does not get touched so simply reference it
|
||||
newItem.json = item.json;
|
||||
} else {
|
||||
// JSON data will change so copy it
|
||||
|
||||
if (convertAllData === true) {
|
||||
// Data should not be kept and all data got converted. So simply set new as empty
|
||||
newItem.json = {};
|
||||
} else {
|
||||
// Data should not be kept and only one key has to get removed. So copy all
|
||||
// data and then remove the not needed one
|
||||
newItem.json = JSON.parse(JSON.stringify(item.json));
|
||||
const sourceKey = this.getNodeParameter('sourceKey', itemIndex) as string;
|
||||
|
||||
unset(newItem.json, sourceKey);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(`The operation "${mode}" is not known!`);
|
||||
}
|
||||
|
||||
returnData.push(newItem);
|
||||
}
|
||||
|
||||
return [returnData];
|
||||
}
|
||||
}
|
|
@ -171,6 +171,11 @@ export class Pipedrive implements INodeType {
|
|||
value: 'delete',
|
||||
description: 'Delete a deal',
|
||||
},
|
||||
{
|
||||
name: 'Duplicate',
|
||||
value: 'duplicate',
|
||||
description: 'Duplicate a deal',
|
||||
},
|
||||
{
|
||||
name: 'Get',
|
||||
value: 'get',
|
||||
|
@ -947,6 +952,28 @@ export class Pipedrive implements INodeType {
|
|||
description: 'ID of the deal to delete.',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// deal:duplicate
|
||||
// ----------------------------------
|
||||
{
|
||||
displayName: 'Deal ID',
|
||||
name: 'dealId',
|
||||
type: 'number',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'duplicate',
|
||||
],
|
||||
resource: [
|
||||
'deal',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: 0,
|
||||
required: true,
|
||||
description: 'ID of the deal to duplicate.',
|
||||
},
|
||||
|
||||
// ----------------------------------
|
||||
// deal:get
|
||||
// ----------------------------------
|
||||
|
@ -2157,6 +2184,16 @@ export class Pipedrive implements INodeType {
|
|||
const dealId = this.getNodeParameter('dealId', i) as number;
|
||||
endpoint = `/deals/${dealId}`;
|
||||
|
||||
} else if (operation === 'duplicate') {
|
||||
// ----------------------------------
|
||||
// deal:duplicate
|
||||
// ----------------------------------
|
||||
|
||||
requestMethod = 'POST';
|
||||
|
||||
const dealId = this.getNodeParameter('dealId', i) as number;
|
||||
endpoint = `/deals/${dealId}/duplicate`;
|
||||
|
||||
} else if (operation === 'get') {
|
||||
// ----------------------------------
|
||||
// deal:get
|
||||
|
|
|
@ -285,6 +285,9 @@ export class Postgres implements INodeType {
|
|||
throw new Error(`The operation "${operation}" is not supported!`);
|
||||
}
|
||||
|
||||
// Close the connection
|
||||
await pgp.end();
|
||||
|
||||
return this.prepareOutputData(returnItems);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -156,11 +156,34 @@ export class SpreadsheetFile implements INodeType {
|
|||
'toFile',
|
||||
],
|
||||
},
|
||||
|
||||
},
|
||||
placeholder: '',
|
||||
description: 'Name of the binary property in which to save<br />the binary data of the spreadsheet file.',
|
||||
},
|
||||
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Option',
|
||||
displayOptions: {
|
||||
show: {
|
||||
operation: [
|
||||
'toFile',
|
||||
],
|
||||
},
|
||||
},
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'File Name',
|
||||
name: 'fileName',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description: 'File name to set in binary data. By default will "spreadsheet.<fileFormat>" be used.',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
};
|
||||
|
||||
|
@ -214,6 +237,7 @@ export class SpreadsheetFile implements INodeType {
|
|||
// Write the workflow data to spreadsheet file
|
||||
const binaryPropertyName = this.getNodeParameter('binaryPropertyName', 0) as string;
|
||||
const fileFormat = this.getNodeParameter('fileFormat', 0) as string;
|
||||
const options = this.getNodeParameter('options', 0, {}) as IDataObject;
|
||||
|
||||
// Get the json data of the items and flatten it
|
||||
let item: INodeExecutionData;
|
||||
|
@ -258,7 +282,12 @@ export class SpreadsheetFile implements INodeType {
|
|||
binary: {},
|
||||
};
|
||||
|
||||
newItem.binary![binaryPropertyName] = await this.helpers.prepareBinaryData(wbout, `spreadsheet.${fileFormat}`);
|
||||
let fileName = `spreadsheet.${fileFormat}`;
|
||||
if (options.fileName !== undefined) {
|
||||
fileName = options.fileName as string;
|
||||
}
|
||||
|
||||
newItem.binary![binaryPropertyName] = await this.helpers.prepareBinaryData(wbout, fileName);
|
||||
|
||||
const newItems = [];
|
||||
newItems.push(newItem);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "n8n-nodes-base",
|
||||
"version": "0.25.0",
|
||||
"version": "0.27.1",
|
||||
"description": "Base nodes of n8n",
|
||||
"license": "SEE LICENSE IN LICENSE.md",
|
||||
"homepage": "https://n8n.io",
|
||||
|
@ -92,6 +92,7 @@
|
|||
"dist/nodes/Mailgun/Mailgun.node.js",
|
||||
"dist/nodes/Mattermost/Mattermost.node.js",
|
||||
"dist/nodes/Merge.node.js",
|
||||
"dist/nodes/MoveBinaryData.node.js",
|
||||
"dist/nodes/MongoDb/MongoDb.node.js",
|
||||
"dist/nodes/NextCloud/NextCloud.node.js",
|
||||
"dist/nodes/NoOp.node.js",
|
||||
|
|
Loading…
Reference in a new issue