Merge branch 'master' into extract-postgres-functionality

This commit is contained in:
Ben Hesseldieck 2020-07-08 10:50:45 +02:00
commit 9dee7a2a98
16 changed files with 1126 additions and 30 deletions

View file

@ -128,15 +128,23 @@ const config = convict({
credentials: {
overwrite: {
// Allows to set default values for credentials which
// get automatically prefilled and the user does not get
// displayed and can not change.
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
doc: 'Overwrites for credentials',
format: '*',
default: '{}',
env: 'CREDENTIALS_OVERWRITE'
}
data: {
// Allows to set default values for credentials which
// get automatically prefilled and the user does not get
// displayed and can not change.
// Format: { CREDENTIAL_NAME: { PARAMTER: VALUE }}
doc: 'Overwrites for credentials',
format: '*',
default: '{}',
env: 'CREDENTIALS_OVERWRITE_DATA'
},
endpoint: {
doc: 'Fetch credentials from API',
format: String,
default: '',
env: 'CREDENTIALS_OVERWRITE_ENDPOINT',
},
},
},
executions: {

View file

@ -20,7 +20,7 @@ class CredentialsOverwritesClass {
return;
}
const data = await GenericHelpers.getConfigValue('credentials.overwrite') as string;
const data = await GenericHelpers.getConfigValue('credentials.overwrite.data') as string;
try {
this.overwriteData = JSON.parse(data);
@ -30,6 +30,7 @@ class CredentialsOverwritesClass {
}
applyOverwrite(type: string, data: ICredentialDataDecryptedObject) {
const overwrites = this.get(type);
if (overwrites === undefined) {

View file

@ -58,6 +58,9 @@ import {
WorkflowExecuteAdditionalData,
WorkflowRunner,
GenericHelpers,
CredentialsOverwrites,
ICredentialsOverwrite,
LoadNodesAndCredentials,
} from './';
import {
@ -105,6 +108,7 @@ class App {
testWebhooks: TestWebhooks.TestWebhooks;
endpointWebhook: string;
endpointWebhookTest: string;
endpointPresetCredentials: string;
externalHooks: IExternalHooksClass;
saveDataErrorExecution: string;
saveDataSuccessExecution: string;
@ -119,6 +123,8 @@ class App {
sslKey: string;
sslCert: string;
presetCredentialsLoaded: boolean;
constructor() {
this.app = express();
@ -141,6 +147,9 @@ class App {
this.sslCert = config.get('ssl_cert');
this.externalHooks = ExternalHooks();
this.presetCredentialsLoaded = false;
this.endpointPresetCredentials = config.get('credentials.overwrite.endpoint') as string;
}
@ -1650,6 +1659,40 @@ class App {
});
if (this.endpointPresetCredentials !== '') {
// POST endpoint to set preset credentials
this.app.post(`/${this.endpointPresetCredentials}`, async (req: express.Request, res: express.Response) => {
if (this.presetCredentialsLoaded === false) {
const body = req.body as ICredentialsOverwrite;
if (req.headers['content-type'] !== 'application/json') {
ResponseHelper.sendErrorResponse(res, new Error('Body must be a valid JSON, make sure the content-type is application/json'));
return;
}
const loadNodesAndCredentials = LoadNodesAndCredentials();
const credentialsOverwrites = CredentialsOverwrites();
await credentialsOverwrites.init(body);
const credentialTypes = CredentialTypes();
await credentialTypes.init(loadNodesAndCredentials.credentialTypes);
this.presetCredentialsLoaded = true;
ResponseHelper.sendSuccessResponse(res, { success: true }, true, 200);
} else {
ResponseHelper.sendErrorResponse(res, new Error('Preset credentials can be set once'));
}
});
}
// Serve the website
const startTime = (new Date()).toUTCString();
const editorUiPath = require.resolve('n8n-editor-ui');

View file

@ -30,7 +30,7 @@
"@types/express": "^4.16.1",
"@types/jest": "^24.0.18",
"@types/lodash.get": "^4.4.6",
"@types/mmmagic": "^0.4.29",
"@types/mime-types": "^2.1.0",
"@types/node": "^10.10.1",
"@types/request-promise-native": "^1.0.15",
"jest": "^24.9.0",
@ -43,8 +43,9 @@
"client-oauth2": "^4.2.5",
"cron": "^1.7.2",
"crypto-js": "3.1.9-1",
"file-type": "^14.6.2",
"lodash.get": "^4.4.2",
"mmmagic": "^0.5.2",
"mime-types": "^2.1.27",
"n8n-workflow": "~0.33.0",
"p-cancelable": "^2.0.0",
"request": "^2.88.2",

View file

@ -44,14 +44,9 @@ import * as express from 'express';
import * as path from 'path';
import { OptionsWithUrl, OptionsWithUri } from 'request';
import * as requestPromise from 'request-promise-native';
import { Magic, MAGIC_MIME_TYPE } from 'mmmagic';
import { createHmac } from 'crypto';
const magic = new Magic(MAGIC_MIME_TYPE);
import { fromBuffer } from 'file-type';
import { lookup } from 'mime-types';
/**
@ -66,18 +61,28 @@ const magic = new Magic(MAGIC_MIME_TYPE);
*/
export async function prepareBinaryData(binaryData: Buffer, filePath?: string, mimeType?: string): Promise<IBinaryData> {
if (!mimeType) {
// If not mime type is given figure it out
mimeType = await new Promise<string>(
(resolve, reject) => {
magic.detect(binaryData, (err: Error, mimeType: string) => {
if (err) {
return reject(err);
}
// If no mime type is given figure it out
return resolve(mimeType);
});
if (filePath) {
// Use file path to guess mime type
const mimeTypeLookup = lookup(filePath);
if (mimeTypeLookup) {
mimeType = mimeTypeLookup;
}
);
}
if (!mimeType) {
// Use buffer to guess mime type
const fileTypeData = await fromBuffer(binaryData);
if (fileTypeData) {
mimeType = fileTypeData.mime;
}
}
if (!mimeType) {
// Fall back to text
mimeType = 'text/plain';
}
}
const returnData: IBinaryData = {

View file

@ -0,0 +1,47 @@
import { ICredentialType, NodePropertyTypes } from 'n8n-workflow';
export class MicrosoftSql implements ICredentialType {
name = 'microsoftSql';
displayName = 'Microsoft SQL';
properties = [
{
displayName: 'Server',
name: 'server',
type: 'string' as NodePropertyTypes,
default: 'localhost'
},
{
displayName: 'Database',
name: 'database',
type: 'string' as NodePropertyTypes,
default: 'master'
},
{
displayName: 'User',
name: 'user',
type: 'string' as NodePropertyTypes,
default: 'sa'
},
{
displayName: 'Password',
name: 'password',
type: 'string' as NodePropertyTypes,
typeOptions: {
password: true
},
default: ''
},
{
displayName: 'Port',
name: 'port',
type: 'number' as NodePropertyTypes,
default: 1433
},
{
displayName: 'Domain',
name: 'domain',
type: 'string' as NodePropertyTypes,
default: ''
}
];
}

View file

@ -0,0 +1,18 @@
import {
ICredentialType,
NodePropertyTypes,
} from 'n8n-workflow';
export class PostmarkApi implements ICredentialType {
name = 'postmarkApi';
displayName = 'Postmark API';
properties = [
{
displayName: 'Server API Token',
name: 'serverToken',
type: 'string' as NodePropertyTypes,
default: '',
},
];
}

View file

@ -0,0 +1,144 @@
import { IDataObject, INodeExecutionData } from 'n8n-workflow';
import { ITables } from './TableInterface';
/**
* Returns a copy of the item which only contains the json data and
* of that only the defined properties
*
* @param {INodeExecutionData} item The item to copy
* @param {string[]} properties The properties it should include
* @returns
*/
export function copyInputItem(
item: INodeExecutionData,
properties: string[],
): IDataObject {
// Prepare the data to insert and copy it to be returned
let newItem: IDataObject = {};
for (const property of properties) {
if (item.json[property] === undefined) {
newItem[property] = null;
} else {
newItem[property] = JSON.parse(JSON.stringify(item.json[property]));
}
}
return newItem;
}
/**
* Creates an ITables with the columns for the operations
*
* @param {INodeExecutionData[]} items The items to extract the tables/columns for
* @param {function} getNodeParam getter for the Node's Parameters
* @returns {ITables} {tableName: {colNames: [items]}};
*/
export function createTableStruct(
getNodeParam: Function,
items: INodeExecutionData[],
additionalProperties: string[] = [],
keyName?: string,
): ITables {
return items.reduce((tables, item, index) => {
const table = getNodeParam('table', index) as string;
const columnString = getNodeParam('columns', index) as string;
const columns = columnString.split(',').map(column => column.trim());
const itemCopy = copyInputItem(item, columns.concat(additionalProperties));
const keyParam = keyName
? (getNodeParam(keyName, index) as string)
: undefined;
if (tables[table] === undefined) {
tables[table] = {};
}
if (tables[table][columnString] === undefined) {
tables[table][columnString] = [];
}
if (keyName) {
itemCopy[keyName] = keyParam;
}
tables[table][columnString].push(itemCopy);
return tables;
}, {} as ITables);
}
/**
* Executes a queue of queries on given ITables.
*
* @param {ITables} tables The ITables to be processed.
* @param {function} buildQueryQueue function that builds the queue of promises
* @returns {Promise}
*/
export function executeQueryQueue(
tables: ITables,
buildQueryQueue: Function,
): Promise<any[]> {
return Promise.all(
Object.keys(tables).map(table => {
const columnsResults = Object.keys(tables[table]).map(columnString => {
return Promise.all(
buildQueryQueue({
table: table,
columnString: columnString,
items: tables[table][columnString],
}),
);
});
return Promise.all(columnsResults);
}),
);
}
/**
* Extracts the values from the item for INSERT
*
* @param {IDataObject} item The item to extract
* @returns {string} (Val1, Val2, ...)
*/
export function extractValues(item: IDataObject): string {
return `(${Object.values(item as any)
.map(val => (typeof val === 'string' ? `'${val}'` : val)) // maybe other types such as dates have to be handled as well
.join(',')})`;
}
/**
* Extracts the SET from the item for UPDATE
*
* @param {IDataObject} item The item to extract from
* @param {string[]} columns The columns to update
* @returns {string} col1 = val1, col2 = val2
*/
export function extractUpdateSet(item: IDataObject, columns: string[]): string {
return columns
.map(
column =>
`${column} = ${
typeof item[column] === 'string' ? `'${item[column]}'` : item[column]
}`,
)
.join(',');
}
/**
* Extracts the WHERE condition from the item for UPDATE
*
* @param {IDataObject} item The item to extract from
* @param {string} key The column name to build the condition with
* @returns {string} id = '123'
*/
export function extractUpdateCondition(item: IDataObject, key: string): string {
return `${key} = ${
typeof item[key] === 'string' ? `'${item[key]}'` : item[key]
}`;
}
/**
* Extracts the WHERE condition from the items for DELETE
*
* @param {IDataObject[]} items The items to extract the values from
* @param {string} key The column name to extract the value from for the delete condition
* @returns {string} (Val1, Val2, ...)
*/
export function extractDeleteValues(items: IDataObject[], key: string): string {
return `(${items
.map(item => (typeof item[key] === 'string' ? `'${item[key]}'` : item[key]))
.join(',')})`;
}

View file

@ -0,0 +1,410 @@
import { IExecuteFunctions } from 'n8n-core';
import {
IDataObject,
INodeExecutionData,
INodeType,
INodeTypeDescription,
} from 'n8n-workflow';
import { chunk, flatten } from '../../utils/utilities';
import * as mssql from 'mssql';
import { ITables } from './TableInterface';
import {
copyInputItem,
createTableStruct,
executeQueryQueue,
extractDeleteValues,
extractUpdateCondition,
extractUpdateSet,
extractValues,
} from './GenericFunctions';
export class MicrosoftSql implements INodeType {
description: INodeTypeDescription = {
displayName: 'Microsoft SQL',
name: 'microsoftSql',
icon: 'file:mssql.png',
group: ['input'],
version: 1,
description: 'Gets, add and update data in Microsoft SQL.',
defaults: {
name: 'Microsoft SQL',
color: '#1d4bab',
},
inputs: ['main'],
outputs: ['main'],
credentials: [
{
name: 'microsoftSql',
required: true,
},
],
properties: [
{
displayName: 'Operation',
name: 'operation',
type: 'options',
options: [
{
name: 'Execute Query',
value: 'executeQuery',
description: 'Executes a SQL query.',
},
{
name: 'Insert',
value: 'insert',
description: 'Insert rows in database.',
},
{
name: 'Update',
value: 'update',
description: 'Updates rows in database.',
},
{
name: 'Delete',
value: 'delete',
description: 'Deletes rows in database.',
},
],
default: 'insert',
description: 'The operation to perform.',
},
// ----------------------------------
// executeQuery
// ----------------------------------
{
displayName: 'Query',
name: 'query',
type: 'string',
typeOptions: {
rows: 5,
},
displayOptions: {
show: {
operation: ['executeQuery'],
},
},
default: '',
placeholder: 'SELECT id, name FROM product WHERE id < 40',
required: true,
description: 'The SQL query to execute.',
},
// ----------------------------------
// insert
// ----------------------------------
{
displayName: 'Table',
name: 'table',
type: 'string',
displayOptions: {
show: {
operation: ['insert'],
},
},
default: '',
required: true,
description: 'Name of the table in which to insert data to.',
},
{
displayName: 'Columns',
name: 'columns',
type: 'string',
displayOptions: {
show: {
operation: ['insert'],
},
},
default: '',
placeholder: 'id,name,description',
description:
'Comma separated list of the properties which should used as columns for the new rows.',
},
// ----------------------------------
// update
// ----------------------------------
{
displayName: 'Table',
name: 'table',
type: 'string',
displayOptions: {
show: {
operation: ['update'],
},
},
default: '',
required: true,
description: 'Name of the table in which to update data in',
},
{
displayName: 'Update Key',
name: 'updateKey',
type: 'string',
displayOptions: {
show: {
operation: ['update'],
},
},
default: 'id',
required: true,
description:
'Name of the property which decides which rows in the database should be updated. Normally that would be "id".',
},
{
displayName: 'Columns',
name: 'columns',
type: 'string',
displayOptions: {
show: {
operation: ['update'],
},
},
default: '',
placeholder: 'name,description',
description:
'Comma separated list of the properties which should used as columns for rows to update.',
},
// ----------------------------------
// delete
// ----------------------------------
{
displayName: 'Table',
name: 'table',
type: 'string',
displayOptions: {
show: {
operation: ['delete'],
},
},
default: '',
required: true,
description: 'Name of the table in which to delete data.',
},
{
displayName: 'Delete Key',
name: 'deleteKey',
type: 'string',
displayOptions: {
show: {
operation: ['delete'],
},
},
default: 'id',
required: true,
description:
'Name of the property which decides which rows in the database should be deleted. Normally that would be "id".',
},
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const credentials = this.getCredentials('microsoftSql');
if (credentials === undefined) {
throw new Error('No credentials got returned!');
}
const config = {
server: credentials.server as string,
port: credentials.port as number,
database: credentials.database as string,
user: credentials.user as string,
password: credentials.password as string,
domain: credentials.domain ? (credentials.domain as string) : undefined,
};
const pool = new mssql.ConnectionPool(config);
await pool.connect();
let returnItems: INodeExecutionData[] = [];
const items = this.getInputData();
const operation = this.getNodeParameter('operation', 0) as string;
try {
if (operation === 'executeQuery') {
// ----------------------------------
// executeQuery
// ----------------------------------
const rawQuery = this.getNodeParameter('query', 0) as string;
const queryResult = await pool.request().query(rawQuery);
const result =
queryResult.recordsets.length > 1
? flatten(queryResult.recordsets)
: queryResult.recordsets[0];
returnItems = this.helpers.returnJsonArray(result as IDataObject[]);
} else if (operation === 'insert') {
// ----------------------------------
// insert
// ----------------------------------
const tables = createTableStruct(this.getNodeParameter, items);
const queriesResults = await executeQueryQueue(
tables,
({
table,
columnString,
items,
}: {
table: string;
columnString: string;
items: IDataObject[];
}): Array<Promise<object>> => {
return chunk(items, 1000).map(insertValues => {
const values = insertValues
.map((item: IDataObject) => extractValues(item))
.join(',');
return pool
.request()
.query(
`INSERT INTO ${table}(${columnString}) VALUES ${values};`,
);
});
},
);
const rowsAffected = flatten(queriesResults).reduce(
(acc: number, resp: mssql.IResult<object>): number =>
(acc += resp.rowsAffected.reduce((sum, val) => (sum += val))),
0,
);
returnItems = this.helpers.returnJsonArray({
rowsAffected,
} as IDataObject);
} else if (operation === 'update') {
// ----------------------------------
// update
// ----------------------------------
const updateKeys = items.map(
(item, index) => this.getNodeParameter('updateKey', index) as string,
);
const tables = createTableStruct(
this.getNodeParameter,
items,
['updateKey'],
'updateKey',
);
const queriesResults = await executeQueryQueue(
tables,
({
table,
columnString,
items,
}: {
table: string;
columnString: string;
items: IDataObject[];
}): Array<Promise<object>> => {
return items.map(item => {
const columns = columnString
.split(',')
.map(column => column.trim());
const setValues = extractUpdateSet(item, columns);
const condition = extractUpdateCondition(
item,
item.updateKey as string,
);
return pool
.request()
.query(`UPDATE ${table} SET ${setValues} WHERE ${condition};`);
});
},
);
const rowsAffected = flatten(queriesResults).reduce(
(acc: number, resp: mssql.IResult<object>): number =>
(acc += resp.rowsAffected.reduce((sum, val) => (sum += val))),
0,
);
returnItems = this.helpers.returnJsonArray({
rowsAffected,
} as IDataObject);
} else if (operation === 'delete') {
// ----------------------------------
// delete
// ----------------------------------
const tables = items.reduce((tables, item, index) => {
const table = this.getNodeParameter('table', index) as string;
const deleteKey = this.getNodeParameter('deleteKey', index) as string;
if (tables[table] === undefined) {
tables[table] = {};
}
if (tables[table][deleteKey] === undefined) {
tables[table][deleteKey] = [];
}
tables[table][deleteKey].push(item);
return tables;
}, {} as ITables);
const queriesResults = await Promise.all(
Object.keys(tables).map(table => {
const deleteKeyResults = Object.keys(tables[table]).map(
deleteKey => {
const deleteItemsList = chunk(
tables[table][deleteKey].map(item =>
copyInputItem(item as INodeExecutionData, [deleteKey]),
),
1000,
);
const queryQueue = deleteItemsList.map(deleteValues => {
return pool
.request()
.query(
`DELETE FROM ${table} WHERE ${deleteKey} IN ${extractDeleteValues(
deleteValues,
deleteKey,
)};`,
);
});
return Promise.all(queryQueue);
},
);
return Promise.all(deleteKeyResults);
}),
);
const rowsAffected = flatten(queriesResults).reduce(
(acc: number, resp: mssql.IResult<object>): number =>
(acc += resp.rowsAffected.reduce((sum, val) => (sum += val))),
0,
);
returnItems = this.helpers.returnJsonArray({
rowsAffected,
} as IDataObject);
} else {
await pool.close();
throw new Error(`The operation "${operation}" is not supported!`);
}
} catch (err) {
if (this.continueOnFail() === true) {
returnItems = items;
} else {
await pool.close();
throw err;
}
}
// Close the connection
await pool.close();
return this.prepareOutputData(returnItems);
}
}

View file

@ -0,0 +1,7 @@
import { IDataObject } from 'n8n-workflow';
export interface ITables {
[key: string]: {
[key: string]: Array<IDataObject>;
};
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

@ -0,0 +1,93 @@
import {
OptionsWithUri,
} from 'request';
import {
IExecuteFunctions,
ILoadOptionsFunctions,
} from 'n8n-core';
import {
IDataObject,
IHookFunctions,
IWebhookFunctions
} from 'n8n-workflow';
export async function postmarkApiRequest(this: IExecuteFunctions | IWebhookFunctions | IHookFunctions | ILoadOptionsFunctions, method : string, endpoint : string, body: any = {}, option: IDataObject = {}): Promise<any> { // tslint:disable-line:no-any
const credentials = this.getCredentials('postmarkApi');
if (credentials === undefined) {
throw new Error('No credentials got returned!');
}
let options: OptionsWithUri = {
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'X-Postmark-Server-Token' : credentials.serverToken
},
method,
body,
uri: 'https://api.postmarkapp.com' + endpoint,
json: true
};
if (body === {}) {
delete options.body;
}
options = Object.assign({}, options, option);
try {
return await this.helpers.request!(options);
} catch (error) {
throw new Error(`Postmark: ${error.statusCode} Message: ${error.message}`);
}
}
// tslint:disable-next-line: no-any
export function convertTriggerObjectToStringArray (webhookObject : any) : string[] {
const triggers = webhookObject.Triggers;
const webhookEvents : string[] = [];
// Translate Webhook trigger settings to string array
if (triggers.Open.Enabled) {
webhookEvents.push('open');
}
if (triggers.Open.PostFirstOpenOnly) {
webhookEvents.push('firstOpen');
}
if (triggers.Click.Enabled) {
webhookEvents.push('click');
}
if (triggers.Delivery.Enabled) {
webhookEvents.push('delivery');
}
if (triggers.Bounce.Enabled) {
webhookEvents.push('bounce');
}
if (triggers.Bounce.IncludeContent) {
webhookEvents.push('includeContent');
}
if (triggers.SpamComplaint.Enabled) {
webhookEvents.push('spamComplaint');
}
if (triggers.SpamComplaint.IncludeContent) {
if (!webhookEvents.includes('IncludeContent')) {
webhookEvents.push('includeContent');
}
}
if (triggers.SubscriptionChange.Enabled) {
webhookEvents.push('subscriptionChange');
}
return webhookEvents;
}
export function eventExists (currentEvents : string[], webhookEvents: string[]) {
for (const currentEvent of currentEvents) {
if (!webhookEvents.includes(currentEvent)) {
return false;
}
}
return true;
}

View file

@ -0,0 +1,256 @@
import {
IHookFunctions,
IWebhookFunctions,
} from 'n8n-core';
import {
INodeTypeDescription,
INodeType,
IWebhookResponseData,
} from 'n8n-workflow';
import {
convertTriggerObjectToStringArray,
eventExists,
postmarkApiRequest
} from './GenericFunctions';
export class PostmarkTrigger implements INodeType {
description: INodeTypeDescription = {
displayName: 'Postmark Trigger',
name: 'postmarkTrigger',
icon: 'file:postmark.png',
group: ['trigger'],
version: 1,
description: 'Starts the workflow when Postmark events occur.',
defaults: {
name: 'Postmark Trigger',
color: '#fedd00',
},
inputs: [],
outputs: ['main'],
credentials: [
{
name: 'postmarkApi',
required: true,
},
],
webhooks: [
{
name: 'default',
httpMethod: 'POST',
responseMode: 'onReceived',
path: 'webhook',
},
],
properties: [
{
displayName: 'Events',
name: 'events',
type: 'multiOptions',
options: [
{
name: 'Bounce',
value: 'bounce',
description: 'Trigger on bounce.',
},
{
name: 'Click',
value: 'click',
description: 'Trigger on click.',
},
{
name: 'Delivery',
value: 'delivery',
description: 'Trigger on delivery.',
},
{
name: 'Open',
value: 'open',
description: 'Trigger webhook on open.',
},
{
name: 'Spam Complaint',
value: 'spamComplaint',
description: 'Trigger on spam complaint.',
},
{
name: 'Subscription Change',
value: 'subscriptionChange',
description: 'Trigger on subscription change.',
},
],
default: [],
required: true,
description: 'Webhook events that will be enabled for that endpoint.',
},
{
displayName: 'First Open',
name: 'firstOpen',
description: 'Only fires on first open for event "Open".',
type: 'boolean',
default: false,
displayOptions: {
show: {
events: [
'open',
],
},
},
},
{
displayName: 'Include Content',
name: 'includeContent',
description: 'Includes message content for events "Bounce" and "Spam Complaint".',
type: 'boolean',
default: false,
displayOptions: {
show: {
events: [
'bounce',
'spamComplaint',
],
},
},
},
],
};
// @ts-ignore (because of request)
webhookMethods = {
default: {
async checkExists(this: IHookFunctions): Promise<boolean> {
const webhookData = this.getWorkflowStaticData('node');
const webhookUrl = this.getNodeWebhookUrl('default');
const events = this.getNodeParameter('events') as string[];
if (this.getNodeParameter('includeContent') as boolean) {
events.push('includeContent');
}
if (this.getNodeParameter('firstOpen') as boolean) {
events.push('firstOpen');
}
// Get all webhooks
const endpoint = `/webhooks`;
const responseData = await postmarkApiRequest.call(this, 'GET', endpoint, {});
// No webhooks exist
if (responseData.Webhooks.length === 0) {
return false;
}
// If webhooks exist, check if any match current settings
for (const webhook of responseData.Webhooks) {
if (webhook.Url === webhookUrl && eventExists(events, convertTriggerObjectToStringArray(webhook))) {
webhookData.webhookId = webhook.ID;
// webhook identical to current settings. re-assign webhook id to found webhook.
return true;
}
}
return false;
},
async create(this: IHookFunctions): Promise<boolean> {
const webhookUrl = this.getNodeWebhookUrl('default');
const endpoint = `/webhooks`;
// tslint:disable-next-line: no-any
const body : any = {
Url: webhookUrl,
Triggers: {
Open:{
Enabled: false,
PostFirstOpenOnly: false
},
Click:{
Enabled: false
},
Delivery:{
Enabled: false
},
Bounce:{
Enabled: false,
IncludeContent: false
},
SpamComplaint:{
Enabled: false,
IncludeContent: false
},
SubscriptionChange: {
Enabled: false
}
}
};
const events = this.getNodeParameter('events') as string[];
if (events.includes('open')) {
body.Triggers.Open.Enabled = true;
body.Triggers.Open.PostFirstOpenOnly = this.getNodeParameter('firstOpen') as boolean;
}
if (events.includes('click')) {
body.Triggers.Click.Enabled = true;
}
if (events.includes('delivery')) {
body.Triggers.Delivery.Enabled = true;
}
if (events.includes('bounce')) {
body.Triggers.Bounce.Enabled = true;
body.Triggers.Bounce.IncludeContent = this.getNodeParameter('includeContent') as boolean;
}
if (events.includes('spamComplaint')) {
body.Triggers.SpamComplaint.Enabled = true;
body.Triggers.SpamComplaint.IncludeContent = this.getNodeParameter('includeContent') as boolean;
}
if (events.includes('subscriptionChange')) {
body.Triggers.SubscriptionChange.Enabled = true;
}
const responseData = await postmarkApiRequest.call(this, 'POST', endpoint, body);
if (responseData.ID === undefined) {
// Required data is missing so was not successful
return false;
}
const webhookData = this.getWorkflowStaticData('node');
webhookData.webhookId = responseData.ID as string;
return true;
},
async delete(this: IHookFunctions): Promise<boolean> {
const webhookData = this.getWorkflowStaticData('node');
if (webhookData.webhookId !== undefined) {
const endpoint = `/webhooks/${webhookData.webhookId}`;
const body = {};
try {
await postmarkApiRequest.call(this, 'DELETE', endpoint, body);
} catch (e) {
return false;
}
// Remove from the static workflow data so that it is clear
// that no webhooks are registred anymore
delete webhookData.webhookId;
delete webhookData.webhookEvents;
}
return true;
},
},
};
async webhook(this: IWebhookFunctions): Promise<IWebhookResponseData> {
const req = this.getRequestObject();
return {
workflowData: [
this.helpers.returnJsonArray(req.body)
],
};
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

View file

@ -0,0 +1,57 @@
/**
* Creates an array of elements split into groups the length of `size`.
* If `array` can't be split evenly, the final chunk will be the remaining
* elements.
*
* @param {Array} array The array to process.
* @param {number} [size=1] The length of each chunk
* @returns {Array} Returns the new array of chunks.
* @example
*
* chunk(['a', 'b', 'c', 'd'], 2)
* // => [['a', 'b'], ['c', 'd']]
*
* chunk(['a', 'b', 'c', 'd'], 3)
* // => [['a', 'b', 'c'], ['d']]
*/
export function chunk(array: any[], size: number = 1) {
const length = array == null ? 0 : array.length;
if (!length || size < 1) {
return [];
}
let index = 0;
let resIndex = 0;
const result = new Array(Math.ceil(length / size));
while (index < length) {
result[resIndex++] = array.slice(index, (index += size));
}
return result;
}
/**
* Takes a multidimensional array and converts it to a one-dimensional array.
*
* @param {Array} nestedArray The array to be flattened.
* @returns {Array} Returns the new flattened array.
* @example
*
* flatten([['a', 'b'], ['c', 'd']])
* // => ['a', 'b', 'c', 'd']
*
*/
export function flatten(nestedArray: any[][]) {
const result = [];
(function loop(array: any[]) {
for (var i = 0; i < array.length; i++) {
if (Array.isArray(array[i])) {
loop(array[i]);
} else {
result.push(array[i]);
}
}
})(nestedArray);
return result;
}

View file

@ -96,6 +96,7 @@
"dist/credentials/MicrosoftExcelOAuth2Api.credentials.js",
"dist/credentials/MicrosoftOAuth2Api.credentials.js",
"dist/credentials/MicrosoftOneDriveOAuth2Api.credentials.js",
"dist/credentials/MicrosoftSql.credentials.js",
"dist/credentials/MoceanApi.credentials.js",
"dist/credentials/MondayComApi.credentials.js",
"dist/credentials/MongoDb.credentials.js",
@ -110,6 +111,7 @@
"dist/credentials/PayPalApi.credentials.js",
"dist/credentials/PipedriveApi.credentials.js",
"dist/credentials/Postgres.credentials.js",
"dist/credentials/PostmarkApi.credentials.js",
"dist/credentials/Redis.credentials.js",
"dist/credentials/RocketchatApi.credentials.js",
"dist/credentials/RundeckApi.credentials.js",
@ -206,7 +208,7 @@
"dist/nodes/Google/Calendar/GoogleCalendar.node.js",
"dist/nodes/Google/Drive/GoogleDrive.node.js",
"dist/nodes/Google/Sheet/GoogleSheets.node.js",
"dist/nodes/Google/Task/GoogleTasks.node.js",
"dist/nodes/Google/Task/GoogleTasks.node.js",
"dist/nodes/GraphQL/GraphQL.node.js",
"dist/nodes/Gumroad/GumroadTrigger.node.js",
"dist/nodes/Harvest/Harvest.node.js",
@ -240,6 +242,7 @@
"dist/nodes/MessageBird/MessageBird.node.js",
"dist/nodes/Microsoft/Excel/MicrosoftExcel.node.js",
"dist/nodes/Microsoft/OneDrive/MicrosoftOneDrive.node.js",
"dist/nodes/Microsoft/Sql/MicrosoftSql.node.js",
"dist/nodes/MoveBinaryData.node.js",
"dist/nodes/Mocean/Mocean.node.js",
"dist/nodes/MondayCom/MondayCom.node.js",
@ -256,6 +259,7 @@
"dist/nodes/Pipedrive/Pipedrive.node.js",
"dist/nodes/Pipedrive/PipedriveTrigger.node.js",
"dist/nodes/Postgres/Postgres.node.js",
"dist/nodes/Postmark/PostmarkTrigger.node.js",
"dist/nodes/ReadBinaryFile.node.js",
"dist/nodes/ReadBinaryFiles.node.js",
"dist/nodes/ReadPdf.node.js",
@ -320,6 +324,7 @@
"@types/lodash.set": "^4.3.6",
"@types/moment-timezone": "^0.5.12",
"@types/mongodb": "^3.5.4",
"@types/mssql": "^6.0.2",
"@types/node": "^10.10.1",
"@types/nodemailer": "^6.4.0",
"@types/redis": "^2.8.11",
@ -352,6 +357,7 @@
"moment": "2.24.0",
"moment-timezone": "^0.5.28",
"mongodb": "^3.5.5",
"mssql": "^6.2.0",
"mysql2": "^2.0.1",
"n8n-core": "~0.37.0",
"nodemailer": "^6.4.6",