mirror of
https://github.com/n8n-io/n8n.git
synced 2025-03-05 20:50:17 -08:00
Add Schema Registry to the Kafka producer
This commit is contained in:
parent
894ed83efc
commit
4af53d1b95
|
@ -6,6 +6,8 @@ import {
|
||||||
TopicMessages,
|
TopicMessages,
|
||||||
} from 'kafkajs';
|
} from 'kafkajs';
|
||||||
|
|
||||||
|
import { SchemaRegistry } from '@kafkajs/confluent-schema-registry';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
IExecuteFunctions,
|
IExecuteFunctions,
|
||||||
} from 'n8n-core';
|
} from 'n8n-core';
|
||||||
|
@ -74,6 +76,43 @@ export class Kafka implements INodeType {
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Use Schema Registry',
|
||||||
|
name: 'useSchemaRegistry',
|
||||||
|
type: 'boolean',
|
||||||
|
default: false,
|
||||||
|
description: 'Use Apache Avro serialization format and Confluent\' wire formats.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Schema Registry URL',
|
||||||
|
name: 'schemaRegistryUrl',
|
||||||
|
type: 'string',
|
||||||
|
required: true,
|
||||||
|
displayOptions: {
|
||||||
|
show: {
|
||||||
|
useSchemaRegistry: [
|
||||||
|
true,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
default: '',
|
||||||
|
description: 'URL of the schema registry.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
displayName: 'Event Name',
|
||||||
|
name: 'eventName',
|
||||||
|
type: 'string',
|
||||||
|
required: true,
|
||||||
|
displayOptions: {
|
||||||
|
show: {
|
||||||
|
useSchemaRegistry: [
|
||||||
|
true,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
default: '',
|
||||||
|
description: 'Namespace and Name of Schema in Schema Registry (namespace.name).',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
displayName: 'Headers',
|
displayName: 'Headers',
|
||||||
name: 'headersUi',
|
name: 'headersUi',
|
||||||
|
@ -170,6 +209,8 @@ export class Kafka implements INodeType {
|
||||||
const options = this.getNodeParameter('options', 0) as IDataObject;
|
const options = this.getNodeParameter('options', 0) as IDataObject;
|
||||||
const sendInputData = this.getNodeParameter('sendInputData', 0) as boolean;
|
const sendInputData = this.getNodeParameter('sendInputData', 0) as boolean;
|
||||||
|
|
||||||
|
const useSchemaRegistry = this.getNodeParameter('useSchemaRegistry', 0) as boolean;
|
||||||
|
|
||||||
const timeout = options.timeout as number;
|
const timeout = options.timeout as number;
|
||||||
|
|
||||||
let compression = CompressionTypes.None;
|
let compression = CompressionTypes.None;
|
||||||
|
@ -211,7 +252,7 @@ export class Kafka implements INodeType {
|
||||||
|
|
||||||
await producer.connect();
|
await producer.connect();
|
||||||
|
|
||||||
let message: string;
|
let message: string | Buffer;
|
||||||
|
|
||||||
for (let i = 0; i < length; i++) {
|
for (let i = 0; i < length; i++) {
|
||||||
if (sendInputData === true) {
|
if (sendInputData === true) {
|
||||||
|
@ -220,6 +261,18 @@ export class Kafka implements INodeType {
|
||||||
message = this.getNodeParameter('message', i) as string;
|
message = this.getNodeParameter('message', i) as string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (useSchemaRegistry) {
|
||||||
|
try {
|
||||||
|
const schemaRegistryUrl = this.getNodeParameter('schemaRegistryUrl', 0) as string;
|
||||||
|
const eventName = this.getNodeParameter('eventName', 0) as string;
|
||||||
|
|
||||||
|
const registry = new SchemaRegistry({ host: schemaRegistryUrl });
|
||||||
|
const id = await registry.getLatestSchemaId(eventName);
|
||||||
|
|
||||||
|
message = await registry.encode(id, JSON.parse(message));
|
||||||
|
} catch (error) {}
|
||||||
|
}
|
||||||
|
|
||||||
const topic = this.getNodeParameter('topic', i) as string;
|
const topic = this.getNodeParameter('topic', i) as string;
|
||||||
|
|
||||||
const jsonParameters = this.getNodeParameter('jsonParameters', i) as boolean;
|
const jsonParameters = this.getNodeParameter('jsonParameters', i) as boolean;
|
||||||
|
|
Loading…
Reference in a new issue