mirror of
https://github.com/n8n-io/n8n.git
synced 2025-01-12 05:17:28 -08:00
🔀 Merge branch 'Add-schema-registry-into-kafka' of https://github.com/rgeorgel/n8n into rgeorgel-Add-schema-registry-into-kafka
This commit is contained in:
commit
fb6c243edd
|
@ -6,6 +6,8 @@ import {
|
||||||
TopicMessages,
|
TopicMessages,
|
||||||
} from 'kafkajs';
|
} from 'kafkajs';
|
||||||
|
|
||||||
|
import { SchemaRegistry } from '@kafkajs/confluent-schema-registry';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
IExecuteFunctions,
|
IExecuteFunctions,
|
||||||
} from 'n8n-core';
|
} from 'n8n-core';
|
||||||
|
@ -73,6 +75,44 @@ export class Kafka implements INodeType {
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Use Schema Registry',
|
||||||
|
name: 'useSchemaRegistry',
|
||||||
|
type: 'boolean',
|
||||||
|
default: false,
|
||||||
|
description: 'Use Confluent Schema Registry.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Schema Registry URL',
|
||||||
|
name: 'schemaRegistryUrl',
|
||||||
|
type: 'string',
|
||||||
|
required: true,
|
||||||
|
displayOptions: {
|
||||||
|
show: {
|
||||||
|
useSchemaRegistry: [
|
||||||
|
true,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
placeholder: 'https://schema-registry-domain:8081',
|
||||||
|
default: '',
|
||||||
|
description: 'URL of the schema registry.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Event Name',
|
||||||
|
name: 'eventName',
|
||||||
|
type: 'string',
|
||||||
|
required: true,
|
||||||
|
displayOptions: {
|
||||||
|
show: {
|
||||||
|
useSchemaRegistry: [
|
||||||
|
true,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
default: '',
|
||||||
|
description: 'Namespace and Name of Schema in Schema Registry (namespace.name).',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Headers',
|
displayName: 'Headers',
|
||||||
name: 'headersUi',
|
name: 'headersUi',
|
||||||
|
@ -170,6 +210,8 @@ export class Kafka implements INodeType {
|
||||||
const options = this.getNodeParameter('options', 0) as IDataObject;
|
const options = this.getNodeParameter('options', 0) as IDataObject;
|
||||||
const sendInputData = this.getNodeParameter('sendInputData', 0) as boolean;
|
const sendInputData = this.getNodeParameter('sendInputData', 0) as boolean;
|
||||||
|
|
||||||
|
const useSchemaRegistry = this.getNodeParameter('useSchemaRegistry', 0) as boolean;
|
||||||
|
|
||||||
const timeout = options.timeout as number;
|
const timeout = options.timeout as number;
|
||||||
|
|
||||||
let compression = CompressionTypes.None;
|
let compression = CompressionTypes.None;
|
||||||
|
@ -211,7 +253,7 @@ export class Kafka implements INodeType {
|
||||||
|
|
||||||
await producer.connect();
|
await producer.connect();
|
||||||
|
|
||||||
let message: string;
|
let message: string | Buffer;
|
||||||
|
|
||||||
for (let i = 0; i < length; i++) {
|
for (let i = 0; i < length; i++) {
|
||||||
if (sendInputData === true) {
|
if (sendInputData === true) {
|
||||||
|
@ -220,6 +262,20 @@ export class Kafka implements INodeType {
|
||||||
message = this.getNodeParameter('message', i) as string;
|
message = this.getNodeParameter('message', i) as string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (useSchemaRegistry) {
|
||||||
|
try {
|
||||||
|
const schemaRegistryUrl = this.getNodeParameter('schemaRegistryUrl', 0) as string;
|
||||||
|
const eventName = this.getNodeParameter('eventName', 0) as string;
|
||||||
|
|
||||||
|
const registry = new SchemaRegistry({ host: schemaRegistryUrl });
|
||||||
|
const id = await registry.getLatestSchemaId(eventName);
|
||||||
|
|
||||||
|
message = await registry.encode(id, JSON.parse(message));
|
||||||
|
} catch (exception) {
|
||||||
|
throw new NodeOperationError(this.getNode(), 'Verify your Schema Registry configuration');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const topic = this.getNodeParameter('topic', i) as string;
|
const topic = this.getNodeParameter('topic', i) as string;
|
||||||
|
|
||||||
const jsonParameters = this.getNodeParameter('jsonParameters', i) as boolean;
|
const jsonParameters = this.getNodeParameter('jsonParameters', i) as boolean;
|
||||||
|
|
|
@ -5,6 +5,8 @@ import {
|
||||||
SASLOptions,
|
SASLOptions,
|
||||||
} from 'kafkajs';
|
} from 'kafkajs';
|
||||||
|
|
||||||
|
import { SchemaRegistry } from '@kafkajs/confluent-schema-registry';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ITriggerFunctions,
|
ITriggerFunctions,
|
||||||
} from 'n8n-core';
|
} from 'n8n-core';
|
||||||
|
@ -55,6 +57,29 @@ export class KafkaTrigger implements INodeType {
|
||||||
placeholder: 'n8n-kafka',
|
placeholder: 'n8n-kafka',
|
||||||
description: 'ID of the consumer group.',
|
description: 'ID of the consumer group.',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Use Schema Registry',
|
||||||
|
name: 'useSchemaRegistry',
|
||||||
|
type: 'boolean',
|
||||||
|
default: false,
|
||||||
|
description: 'Use Confluent Schema Registry.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Schema Registry URL',
|
||||||
|
name: 'schemaRegistryUrl',
|
||||||
|
type: 'string',
|
||||||
|
required: true,
|
||||||
|
displayOptions: {
|
||||||
|
show: {
|
||||||
|
useSchemaRegistry: [
|
||||||
|
true,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
placeholder: 'https://schema-registry-domain:8081',
|
||||||
|
default: '',
|
||||||
|
description: 'URL of the schema registry.',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Options',
|
displayName: 'Options',
|
||||||
name: 'options',
|
name: 'options',
|
||||||
|
@ -104,6 +129,13 @@ export class KafkaTrigger implements INodeType {
|
||||||
default: 30000,
|
default: 30000,
|
||||||
description: 'The time to await a response in ms.',
|
description: 'The time to await a response in ms.',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Return headers',
|
||||||
|
name: 'returnHeaders',
|
||||||
|
type: 'boolean',
|
||||||
|
default: false,
|
||||||
|
description: 'Return the headers received from Kafka',
|
||||||
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -153,6 +185,10 @@ export class KafkaTrigger implements INodeType {
|
||||||
|
|
||||||
const self = this;
|
const self = this;
|
||||||
|
|
||||||
|
const useSchemaRegistry = this.getNodeParameter('useSchemaRegistry', 0) as boolean;
|
||||||
|
|
||||||
|
const schemaRegistryUrl = this.getNodeParameter('schemaRegistryUrl', 0) as string;
|
||||||
|
|
||||||
const startConsumer = async () => {
|
const startConsumer = async () => {
|
||||||
await consumer.run({
|
await consumer.run({
|
||||||
eachMessage: async ({ topic, message }) => {
|
eachMessage: async ({ topic, message }) => {
|
||||||
|
@ -166,6 +202,23 @@ export class KafkaTrigger implements INodeType {
|
||||||
} catch (error) { }
|
} catch (error) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (useSchemaRegistry) {
|
||||||
|
try {
|
||||||
|
const registry = new SchemaRegistry({ host: schemaRegistryUrl });
|
||||||
|
value = await registry.decode(message.value as Buffer);
|
||||||
|
} catch (error) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.returnHeaders) {
|
||||||
|
const headers: {[key: string]: string} = {};
|
||||||
|
for (const key in message.headers) {
|
||||||
|
const header = message.headers[key];
|
||||||
|
headers[key] = header?.toString('utf8') || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
data.headers = headers;
|
||||||
|
}
|
||||||
|
|
||||||
data.message = value;
|
data.message = value;
|
||||||
data.topic = topic;
|
data.topic = topic;
|
||||||
|
|
||||||
|
|
|
@ -696,6 +696,7 @@
|
||||||
"typescript": "~4.3.5"
|
"typescript": "~4.3.5"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@kafkajs/confluent-schema-registry": "1.0.6",
|
||||||
"@types/lossless-json": "^1.0.0",
|
"@types/lossless-json": "^1.0.0",
|
||||||
"@types/promise-ftp": "^1.3.4",
|
"@types/promise-ftp": "^1.3.4",
|
||||||
"@types/snowflake-sdk": "^1.5.1",
|
"@types/snowflake-sdk": "^1.5.1",
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"lib": [
|
"lib": [
|
||||||
|
"dom",
|
||||||
"es2017",
|
"es2017",
|
||||||
"es2019.array"
|
"es2019.array"
|
||||||
],
|
],
|
||||||
|
|
Loading…
Reference in a new issue