n8n/packages/nodes-base/nodes/Kafka/KafkaTrigger.node.ts
Iván Ovejero 1d27a9e87e
Improve node error handling (#1309)
* Add path mapping and response error interfaces

* Add error handling and throwing functionality

* Refactor error handling into a single function

* Re-implement error handling in Hacker News node

* Fix linting details

* Re-implement error handling in Spotify node

* Re-implement error handling in G Suite Admin node

* 🚧 create basic setup NodeError

* 🚧 add httpCodes

* 🚧 add path priolist

* 🚧 handle statusCode in error, adjust interfaces

* 🚧 fixing type issues w/Ivan

* 🚧 add error exploration

* 👔 fix linter issues

* 🔧 improve object check

* 🚧 remove path passing from NodeApiError

* 🚧 add multi error + refactor findProperty method

* 👔 allow any

* 🔧 handle multi error message callback

*  change return type of callback

*  add customCallback to MultiError

* 🚧 refactor to use INode

* 🔨 handle arrays, continue search after first null property found

* 🚫 refactor method access

* 🚧 setup NodeErrorView

*  change timestamp to Date.now

* 📚 Add documentation for methods and constants

* 🚧 change message setting

* 🚚 move NodeErrors to workflow

*  add new ErrorView for Nodes

* 🎨 improve error notification

* 🎨 refactor interfaces

*  add WorkflowOperationError, refactor error throwing

* 👕 fix linter issues

* 🎨 rename param

* 🐛 fix handling normal errors

*  add usage of NodeApiError

* 🎨 fix throw new error instead of constructor

* 🎨 remove unnecessary code/comments

* 🎨 adjusted spacing + updated status messages

* 🎨 fix tab indentation

*  Replace current errors with custom errors (#1576)

*  Introduce NodeApiError in catch blocks

*  Introduce NodeOperationError in nodes

*  Add missing errors and remove incompatible

*  Fix NodeOperationError in incompatible nodes

* 🔧 Adjust error handling in missed nodes

PayPal, FileMaker, Reddit, Taiga and Facebook Graph API nodes

* 🔨 Adjust Strava Trigger node error handling

* 🔨 Adjust AWS nodes error handling

* 🔨 Remove duplicate instantiation of NodeApiError

* 🐛 fix strava trigger node error handling

* Add XML parsing to NodeApiError constructor (#1633)

* 🐛 Remove type annotation from catch variable

*  Add XML parsing to NodeApiError

*  Simplify error handling in Rekognition node

*  Pass in XML flag in generic functions

* 🔥 Remove try/catch wrappers at call sites

* 🔨 Refactor setting description from XML

* 🔨 Refactor let to const in resource loaders

*  Find property in parsed XML

*  Change let to const

* 🔥 Remove unneeded try/catch block

* 👕 Fix linting issues

* 🐛 Fix errors from merge conflict resolution

*  Add custom errors to latest contributions

* 👕 Fix linting issues

*  Refactor MongoDB helpers for custom errors

* 🐛 Correct custom error type

*  Apply feedback to A nodes

*  Apply feedback to missed A node

*  Apply feedback to B-D nodes

*  Apply feedback to E-F nodes

*  Apply feedback to G nodes

*  Apply feedback to H-L nodes

*  Apply feedback to M nodes

*  Apply feedback to P nodes

*  Apply feedback to R nodes

*  Apply feedback to S nodes

*  Apply feedback to T nodes

*  Apply feedback to V-Z nodes

*  Add HTTP code to iterable node error

* 🔨 Standardize e as error

* 🔨 Standardize err as error

*  Fix error handling for non-standard nodes

Co-authored-by: Ben Hesseldieck <b.hesseldieck@gmail.com>

Co-authored-by: Ben Hesseldieck <b.hesseldieck@gmail.com>
Co-authored-by: Ben Hesseldieck <1849459+BHesseldieck@users.noreply.github.com>
2021-04-16 18:33:36 +02:00

200 lines
4.5 KiB
TypeScript

import {
Kafka as apacheKafka,
KafkaConfig,
logLevel,
SASLOptions,
} from 'kafkajs';
import {
ITriggerFunctions,
} from 'n8n-core';
import {
IDataObject,
INodeType,
INodeTypeDescription,
ITriggerResponse,
NodeOperationError,
} from 'n8n-workflow';
export class KafkaTrigger implements INodeType {
description: INodeTypeDescription = {
displayName: 'Kafka Trigger',
name: 'kafkaTrigger',
icon: 'file:kafka.svg',
group: ['trigger'],
version: 1,
description: 'Consume messages from a Kafka topic',
defaults: {
name: 'Kafka Trigger',
color: '#000000',
},
inputs: [],
outputs: ['main'],
credentials: [
{
name: 'kafka',
required: true,
},
],
properties: [
{
displayName: 'Topic',
name: 'topic',
type: 'string',
default: '',
required: true,
placeholder: 'topic-name',
description: 'Name of the queue of topic to consume from.',
},
{
displayName: 'Group ID',
name: 'groupId',
type: 'string',
default: '',
required: true,
placeholder: 'n8n-kafka',
description: 'ID of the consumer group.',
},
{
displayName: 'Options',
name: 'options',
type: 'collection',
default: {},
placeholder: 'Add Option',
options: [
{
displayName: 'Allow Topic Creation',
name: 'allowAutoTopicCreation',
type: 'boolean',
default: false,
description: 'Allow sending message to a previously non exisiting topic .',
},
{
displayName: 'JSON Parse Message',
name: 'jsonParseMessage',
type: 'boolean',
default: false,
description: 'Try to parse the message to an object.',
},
{
displayName: 'Only Message',
name: 'onlyMessage',
type: 'boolean',
displayOptions: {
show: {
jsonParseMessage: [
true,
],
},
},
default: false,
description: 'Returns only the message property.',
},
{
displayName: 'Session Timeout',
name: 'sessionTimeout',
type: 'number',
default: 30000,
description: 'The time to await a response in ms.',
},
],
},
],
};
async trigger(this: ITriggerFunctions): Promise<ITriggerResponse> {
const topic = this.getNodeParameter('topic') as string;
const groupId = this.getNodeParameter('groupId') as string;
const credentials = this.getCredentials('kafka') as IDataObject;
const brokers = (credentials.brokers as string || '').split(',').map(item => item.trim()) as string[];
const clientId = credentials.clientId as string;
const ssl = credentials.ssl as boolean;
const config: KafkaConfig = {
clientId,
brokers,
ssl,
logLevel: logLevel.ERROR,
};
if (credentials.authentication === true) {
if(!(credentials.username && credentials.password)) {
throw new NodeOperationError(this.getNode(), 'Username and password are required for authentication');
}
config.sasl = {
username: credentials.username as string,
password: credentials.password as string,
mechanism: credentials.saslMechanism as string,
} as SASLOptions;
}
const kafka = new apacheKafka(config);
const consumer = kafka.consumer({ groupId });
await consumer.connect();
await consumer.subscribe({ topic, fromBeginning: true });
const self = this;
const options = this.getNodeParameter('options', {}) as IDataObject;
const startConsumer = async () => {
await consumer.run({
eachMessage: async ({ topic, message }) => {
let data: IDataObject = {};
let value = message.value?.toString() as string;
if (options.jsonParseMessage) {
try {
value = JSON.parse(value);
} catch (error) { }
}
data.message = value;
data.topic = topic;
if (options.onlyMessage) {
//@ts-ignore
data = value;
}
self.emit([self.helpers.returnJsonArray([data])]);
},
});
};
startConsumer();
// The "closeFunction" function gets called by n8n whenever
// the workflow gets deactivated and can so clean up.
async function closeFunction() {
await consumer.disconnect();
}
// The "manualTriggerFunction" function gets called by n8n
// when a user is in the workflow editor and starts the
// workflow manually. So the function has to make sure that
// the emit() gets called with similar data like when it
// would trigger by itself so that the user knows what data
// to expect.
async function manualTriggerFunction() {
startConsumer();
}
return {
closeFunction,
manualTriggerFunction,
};
}
}