n8n/packages/nodes-base/nodes/Kafka/Kafka.node.ts
Omar Ajoue d6239d5bfb
Add full continue-on-fail support to all nodes (#1996)
* Update Compression node

* Update Crypto node

* Update DateTime node

* Update EditImage node

* Update EmailSend node

* Update ExecuteWorkflow node

* Update FTP node

* Update Function node

* Update FunctionItem node

* Update ExecuteCommand node

* Update OpenWeatherMap node

* Update ReadBinaryFile node

* Update ReadPdf node

* Update RssFeedRead node & add URL validation

* Update SpreadsheetFile node

* Update Switch node

* Update WriteBinaryFile node

* Update Xml node

* Update ActiveCampaign node

* Update Airtable node

* Update ApiTemplateIo node

* Update Asana node

* Update AwsLambda node

* Update AwsSns node

* Update AwsComprehend node

* Update AwsRekognition node

* Update AwsS3 node

* Fix Error item

* Update AwsSes node

* Update AwsSqs node

* Update Amqp node

* Update Bitly node

* Update Box node

* Update Brandfetch node

* Update CircleCi node

* Update Clearbit node

* Update ClickUp node

* Update Cockpit node

* Update CoinGecko node

* Update Contentful node

* Update ConvertKit node

* Update Cortex node

* Update CustomerIo node

* Update DeepL node

* Update Demio node

* Update Disqus node

* Update Drift node

* Update Dropbox node

* Update GetResponse node

* Refactor & Update Ghost node

* Update Github node

* Update Gitlab node

* Update GoogleAnalytics node

* Update GoogleBooks node

* Update GoogleCalendar node

* Update GoogleDrive node

* Update Gmail node

* Update GoogleSheets node

* Update GoogleSlides node

* Update GoogleTasks node

* Update Gotify node

* Update GraphQL node

* Update HackerNews node

* Update Harvest node

* Update HtmlExtract node

* Update Hubspot node

* Update Hunter node

* Update Intercom node

* Update Kafka node

* Refactor & update Line node

* Update LinkedIn node

* Update Mailchimp node

* Update Mandrill node

* Update Matrix node

* Update Mautic node

* Update Medium node

* Update MessageBird node

* Update Mindee node

* Update Mocean node

* Update MondayCom node

* Update MicrosoftExcel node

* Update MicrosoftOneDrive node

* Update MicrosoftOutlook node

* Update Affinity node

* Update Chargebee node

* Update Discourse node

* Update Freshdesk node

* Update YouTube node

* Update InvoiceNinja node

* Update MailerLite node

* Update Mailgun node

* Update Mailjet node

* Update Mattermost node

* Update Nasa node

* Update NextCloud node

* Update OpenThesaurus node

* Update Orbit node

* Update PagerDuty node

* Update PayPal node

* Update Peekalink node

* Update Phantombuster node

* Update PostHog node

* Update ProfitWell node

* Refactor & Update Pushbullet node

* Update QuickBooks node

* Update Raindrop node

* Update Reddit node

* Update Rocketchat node

* Update S3 node

* Update Salesforce node

* Update SendGrid node

* Update SentryIo node

* Update Shopify node

* Update Signl4 node

* Update Slack node

* Update Spontit node

* Update Spotify node

* Update Storyblok node

* Refactor & Update Strapi node

* Refactor & Update Strava node

* Update Taiga node

* Refactor & update Tapfiliate node

* Update Telegram node

* Update TheHive node

* Update Todoist node

* Update TravisCi node

* Update Trello node

* Update Twilio node

* Update Twist node

* Update Twitter node

* Update Uplead node

* Update UProc node

* Update Vero node

* Update Webflow node

* Update Wekan node

* Update Wordpress node

* Update Xero node

* Update Yourls node

* Update Zendesk node

* Update ZohoCrm node

* Refactor & Update Zoom node

* Update Zulip node

* Update Clockify node

* Update MongoDb node

* Update MySql node

* Update MicrosoftTeams node

* Update Stackby node

* Refactor Discourse node

* Support corner-case in Github node update

* Support corner-case in Gitlab node update

* Refactor & Update GoogleContacts node

* Refactor Mindee node

* Update Coda node

* Lint fixes

* Update Beeminder node

* Update Google Firebase RealtimeDatabase node

* Update HelpScout node

* Update Mailcheck node

* Update Paddle node

* Update Pipedrive node

* Update Pushover node

* Update Segment node

* Refactor & Update Vonage node

* Added new conditions to warnings on execute batch cmd

* Added keep only properties flag

* Fixed code for keep only props

* Added dependencies for image editing

Co-authored-by: dali <servfrdali@yahoo.fr>
2021-07-20 08:58:54 +02:00

285 lines
6 KiB
TypeScript

import {
CompressionTypes,
Kafka as apacheKafka,
KafkaConfig,
SASLOptions,
TopicMessages,
} from 'kafkajs';
import {
IExecuteFunctions,
} from 'n8n-core';
import {
IDataObject,
INodeExecutionData,
INodeType,
INodeTypeDescription,
NodeOperationError,
} from 'n8n-workflow';
export class Kafka implements INodeType {
description: INodeTypeDescription = {
displayName: 'Kafka',
name: 'kafka',
icon: 'file:kafka.svg',
group: ['transform'],
version: 1,
description: 'Sends messages to a Kafka topic',
defaults: {
name: 'Kafka',
color: '#000000',
},
inputs: ['main'],
outputs: ['main'],
credentials: [
{
name: 'kafka',
required: true,
},
],
properties: [
{
displayName: 'Topic',
name: 'topic',
type: 'string',
default: '',
placeholder: 'topic-name',
description: 'Name of the queue of topic to publish to.',
},
{
displayName: 'Send Input Data',
name: 'sendInputData',
type: 'boolean',
default: true,
description: 'Send the the data the node receives as JSON to Kafka.',
},
{
displayName: 'Message',
name: 'message',
type: 'string',
displayOptions: {
show: {
sendInputData: [
false,
],
},
},
default: '',
description: 'The message to be sent.',
},
{
displayName: 'JSON Parameters',
name: 'jsonParameters',
type: 'boolean',
default: false,
},
{
displayName: 'Headers',
name: 'headersUi',
placeholder: 'Add Header',
type: 'fixedCollection',
displayOptions: {
show: {
jsonParameters: [
false,
],
},
},
typeOptions: {
multipleValues: true,
},
default: {},
options: [
{
name: 'headerValues',
displayName: 'Header',
values: [
{
displayName: 'Key',
name: 'key',
type: 'string',
default: '',
},
{
displayName: 'Value',
name: 'value',
type: 'string',
default: '',
},
],
},
],
},
{
displayName: 'Headers (JSON)',
name: 'headerParametersJson',
type: 'json',
displayOptions: {
show: {
jsonParameters: [
true,
],
},
},
default: '',
description: 'Header parameters as JSON (flat object).',
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
default: {},
placeholder: 'Add Option',
options: [
{
displayName: 'Acks',
name: 'acks',
type: 'boolean',
default: false,
description: 'Whether or not producer must wait for acknowledgement from all replicas.',
},
{
displayName: 'Compression',
name: 'compression',
type: 'boolean',
default: false,
description: 'Send the data in a compressed format using the GZIP codec.',
},
{
displayName: 'Timeout',
name: 'timeout',
type: 'number',
default: 30000,
description: 'The time to await a response in ms.',
},
],
},
],
};
async execute(this: IExecuteFunctions): Promise<INodeExecutionData[][]> {
const items = this.getInputData();
const length = items.length as unknown as number;
const topicMessages: TopicMessages[] = [];
let responseData: IDataObject[];
try {
const options = this.getNodeParameter('options', 0) as IDataObject;
const sendInputData = this.getNodeParameter('sendInputData', 0) as boolean;
const timeout = options.timeout as number;
let compression = CompressionTypes.None;
const acks = (options.acks === true) ? 1 : 0;
if (options.compression === true) {
compression = CompressionTypes.GZIP;
}
const credentials = this.getCredentials('kafka') as IDataObject;
const brokers = (credentials.brokers as string || '').split(',').map(item => item.trim()) as string[];
const clientId = credentials.clientId as string;
const ssl = credentials.ssl as boolean;
const config: KafkaConfig = {
clientId,
brokers,
ssl,
};
if (credentials.authentication === true) {
if(!(credentials.username && credentials.password)) {
throw new NodeOperationError(this.getNode(), 'Username and password are required for authentication');
}
config.sasl = {
username: credentials.username as string,
password: credentials.password as string,
mechanism: credentials.saslMechanism as string,
} as SASLOptions;
}
const kafka = new apacheKafka(config);
const producer = kafka.producer();
await producer.connect();
let message: string;
for (let i = 0; i < length; i++) {
if (sendInputData === true) {
message = JSON.stringify(items[i].json);
} else {
message = this.getNodeParameter('message', i) as string;
}
const topic = this.getNodeParameter('topic', i) as string;
const jsonParameters = this.getNodeParameter('jsonParameters', i) as boolean;
let headers;
if (jsonParameters === true) {
headers = this.getNodeParameter('headerParametersJson', i) as string;
try {
headers = JSON.parse(headers);
} catch (exception) {
throw new NodeOperationError(this.getNode(), 'Headers must be a valid json');
}
} else {
const values = (this.getNodeParameter('headersUi', i) as IDataObject).headerValues as IDataObject[];
headers = {};
if (values !== undefined) {
for (const value of values) {
//@ts-ignore
headers[value.key] = value.value;
}
}
}
topicMessages.push(
{
topic,
messages: [{
value: message,
headers,
}],
});
}
responseData = await producer.sendBatch(
{
topicMessages,
timeout,
compression,
acks,
},
);
if (responseData.length === 0) {
responseData.push({
success: true,
});
}
await producer.disconnect();
return [this.helpers.returnJsonArray(responseData)];
} catch (error) {
if (this.continueOnFail()) {
return [this.helpers.returnJsonArray({ error: error.message })];
} else {
throw error;
}
}
}
}