diff --git a/docker/kafka-ui.yaml b/docker/kafka-ui.yaml
index b62dfd37da..d8e5fc7837 100644
--- a/docker/kafka-ui.yaml
+++ b/docker/kafka-ui.yaml
@@ -72,6 +72,8 @@ services:
schemaregistry0:
image: confluentinc/cp-schema-registry:5.1.0
+ ports:
+ - 8085:8085
depends_on:
- zookeeper0
- kafka0
diff --git a/kafka-ui-api/pom.xml b/kafka-ui-api/pom.xml
index 9235bcb983..ad43299fcf 100644
--- a/kafka-ui-api/pom.xml
+++ b/kafka-ui-api/pom.xml
@@ -87,6 +87,12 @@
kafka-avro-serializer
${confluent.version}
+
+ io.confluent
+ kafka-protobuf-serializer
+ ${confluent.version}
+
+
org.apache.avro
avro
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/config/ClustersProperties.java
index 851714c4b0..b140de24d8 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/config/ClustersProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/config/ClustersProperties.java
@@ -21,6 +21,8 @@ public class ClustersProperties {
String zookeeper;
String schemaRegistry;
String schemaNameTemplate = "%s-value";
+ String protobufFile;
+ String protobufMessageName;
int jmxPort;
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/DeserializationService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/DeserializationService.java
index 1fdeddca24..4ebf5dca39 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/DeserializationService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/DeserializationService.java
@@ -29,7 +29,15 @@ public class DeserializationService {
}
private RecordDeserializer createRecordDeserializerForCluster(KafkaCluster cluster) {
- return new SchemaRegistryRecordDeserializer(cluster, objectMapper);
+ try {
+ if (cluster.getProtobufFile()!=null) {
+ return new ProtobufFileRecordDeserializer(cluster.getProtobufFile(), cluster.getProtobufMessageName(), objectMapper);
+ } else {
+ return new SchemaRegistryRecordDeserializer(cluster, objectMapper);
+ }
+ } catch (Throwable e) {
+ throw new RuntimeException("Can't init deserializer", e);
+ }
}
public RecordDeserializer getRecordDeserializerForCluster(KafkaCluster cluster) {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/ProtobufFileRecordDeserializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/ProtobufFileRecordDeserializer.java
new file mode 100644
index 0000000000..60a2f17788
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/ProtobufFileRecordDeserializer.java
@@ -0,0 +1,45 @@
+package com.provectus.kafka.ui.cluster.deserialization;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.protobuf.DynamicMessage;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.utils.Bytes;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class ProtobufFileRecordDeserializer implements RecordDeserializer {
+ private final ProtobufSchema protobufSchema;
+ private final ObjectMapper objectMapper;
+
+ public ProtobufFileRecordDeserializer(Path protobufSchemaPath, String messageName, ObjectMapper objectMapper) throws IOException {
+ this.objectMapper = objectMapper;
+ final String schemaString = Files.lines(protobufSchemaPath).collect(Collectors.joining());
+ this.protobufSchema = new ProtobufSchema(schemaString).copy(messageName);
+ }
+
+ @Override
+ public Object deserialize(ConsumerRecord record) {
+ try {
+ final DynamicMessage message = DynamicMessage.parseFrom(
+ protobufSchema.toDescriptor(),
+ new ByteArrayInputStream(record.value().get())
+ );
+ byte[] bytes = ProtobufSchemaUtils.toJson(message);
+ return parseJson(bytes);
+ } catch (Throwable e) {
+ throw new RuntimeException("Failed to parse record from topic " + record.topic(), e);
+ }
+ }
+
+ private Object parseJson(byte[] bytes) throws IOException {
+ return objectMapper.readValue(bytes, new TypeReference