Procházet zdrojové kódy

ISSUE-502 Send messages & avro key deserializer (#582)

* ISSUE-502 Send messages & avro key deserializer

* Fixed sonarcube issue

* Fixed sonarcube issues

* Schema endpoint & fixes

* fixed sonar issue
German Osin před 4 roky
rodič
revize
1460fef68e
47 změnil soubory, kde provedl 1672 přidání a 337 odebrání
  1. 1 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
  2. 19 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java
  3. 0 46
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/ProtobufFileRecordDeserializer.java
  4. 0 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/RecordDeserializer.java
  5. 0 232
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/SchemaRegistryRecordDeserializer.java
  6. 0 19
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/SimpleRecordDeserializer.java
  7. 1 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java
  8. 7 6
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/DeserializationService.java
  9. 103 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/ProtobufFileRecordSerDe.java
  10. 18 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/RecordSerDe.java
  11. 41 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/SimpleRecordSerDe.java
  12. 38 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/AvroMessageFormatter.java
  13. 44 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/AvroMessageReader.java
  14. 29 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/JsonMessageFormatter.java
  15. 40 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/JsonMessageReader.java
  16. 8 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormat.java
  17. 5 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormatter.java
  18. 34 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageReader.java
  19. 27 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/ProtobufMessageFormatter.java
  20. 41 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/ProtobufMessageReader.java
  21. 278 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryRecordSerDe.java
  22. 18 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/StringMessageFormatter.java
  23. 24 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java
  24. 6 6
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java
  25. 35 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java
  26. 7 7
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ClusterUtil.java
  27. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/OffsetsSeek.java
  28. 0 5
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/OffsetsSeekForward.java
  29. 21 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ArrayFieldSchema.java
  30. 137 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java
  31. 24 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/EnumJsonType.java
  32. 8 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/FieldSchema.java
  33. 66 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonSchema.java
  34. 7 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonSchemaConverter.java
  35. 41 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonType.java
  36. 22 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/MapFieldSchema.java
  37. 46 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ObjectFieldSchema.java
  38. 27 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/OneOfFieldSchema.java
  39. 134 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverter.java
  40. 18 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/RefFieldSchema.java
  41. 17 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/SimpleFieldSchema.java
  42. 21 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/SimpleJsonType.java
  43. 7 5
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/SchemaRegistryRecordDeserializerTest.java
  44. 1 1
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/OffsetsSeekTest.java
  45. 91 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverterTest.java
  46. 63 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java
  47. 96 0
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

+ 1 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -21,6 +21,7 @@ public class ClustersProperties {
     String zookeeper;
     String schemaRegistry;
     String schemaNameTemplate = "%s-value";
+    String keySchemaNameTemplate = "%s-key";
     String protobufFile;
     String protobufMessageName;
     List<ConnectCluster> kafkaConnect;

+ 19 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java

@@ -2,9 +2,11 @@ package com.provectus.kafka.ui.controller;
 
 import com.provectus.kafka.ui.api.MessagesApi;
 import com.provectus.kafka.ui.model.ConsumerPosition;
+import com.provectus.kafka.ui.model.CreateTopicMessage;
 import com.provectus.kafka.ui.model.SeekDirection;
 import com.provectus.kafka.ui.model.SeekType;
 import com.provectus.kafka.ui.model.TopicMessage;
+import com.provectus.kafka.ui.model.TopicMessageSchema;
 import com.provectus.kafka.ui.service.ClusterService;
 import java.util.Collections;
 import java.util.List;
@@ -48,6 +50,23 @@ public class MessagesController implements MessagesApi {
             .ok(clusterService.getMessages(clusterName, topicName, consumerPosition, q, limit)));
   }
 
+  @Override
+  public Mono<ResponseEntity<TopicMessageSchema>> getTopicSchema(
+      String clusterName, String topicName, ServerWebExchange exchange) {
+    return Mono.just(clusterService.getTopicSchema(clusterName, topicName))
+        .map(ResponseEntity::ok);
+  }
+
+  @Override
+  public Mono<ResponseEntity<Void>> sendTopicMessages(
+      String clusterName, String topicName, @Valid Mono<CreateTopicMessage> createTopicMessage,
+      ServerWebExchange exchange) {
+    return createTopicMessage.flatMap(msg ->
+        clusterService.sendMessage(clusterName, topicName, msg)
+    ).map(ResponseEntity::ok);
+  }
+
+
   private Mono<ConsumerPosition> parseConsumerPosition(
       SeekType seekType, List<String> seekTo,  SeekDirection seekDirection) {
     return Mono.justOrEmpty(seekTo)

+ 0 - 46
kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/ProtobufFileRecordDeserializer.java

@@ -1,46 +0,0 @@
-package com.provectus.kafka.ui.deserialization;
-
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.protobuf.DynamicMessage;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Map;
-import java.util.stream.Collectors;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.utils.Bytes;
-
-public class ProtobufFileRecordDeserializer implements RecordDeserializer {
-  private final ProtobufSchema protobufSchema;
-  private final ObjectMapper objectMapper;
-
-  public ProtobufFileRecordDeserializer(Path protobufSchemaPath, String messageName,
-                                        ObjectMapper objectMapper) throws IOException {
-    this.objectMapper = objectMapper;
-    final String schemaString = Files.lines(protobufSchemaPath).collect(Collectors.joining());
-    this.protobufSchema = new ProtobufSchema(schemaString).copy(messageName);
-  }
-
-  @Override
-  public Object deserialize(ConsumerRecord<Bytes, Bytes> msg) {
-    try {
-      final var message = DynamicMessage.parseFrom(
-          protobufSchema.toDescriptor(),
-          new ByteArrayInputStream(msg.value().get())
-      );
-      byte[] bytes = ProtobufSchemaUtils.toJson(message);
-      return parseJson(bytes);
-    } catch (Throwable e) {
-      throw new RuntimeException("Failed to parse record from topic " + msg.topic(), e);
-    }
-  }
-
-  private Object parseJson(byte[] bytes) throws IOException {
-    return objectMapper.readValue(bytes, new TypeReference<Map<String, Object>>() {
-    });
-  }
-}

+ 0 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/RecordDeserializer.java

@@ -1,9 +0,0 @@
-package com.provectus.kafka.ui.deserialization;
-
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.utils.Bytes;
-
-public interface RecordDeserializer {
-
-  Object deserialize(ConsumerRecord<Bytes, Bytes> msg);
-}

+ 0 - 232
kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/SchemaRegistryRecordDeserializer.java

@@ -1,232 +0,0 @@
-package com.provectus.kafka.ui.deserialization;
-
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.protobuf.Message;
-import com.provectus.kafka.ui.model.KafkaCluster;
-import io.confluent.kafka.schemaregistry.ParsedSchema;
-import io.confluent.kafka.schemaregistry.SchemaProvider;
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
-import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
-import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.concurrent.ConcurrentHashMap;
-import lombok.SneakyThrows;
-import lombok.extern.log4j.Log4j2;
-import org.apache.avro.generic.GenericRecord;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.errors.SerializationException;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.utils.Bytes;
-
-@Log4j2
-public class SchemaRegistryRecordDeserializer implements RecordDeserializer {
-
-  private static final int CLIENT_IDENTITY_MAP_CAPACITY = 100;
-
-  private final KafkaCluster cluster;
-  private final SchemaRegistryClient schemaRegistryClient;
-  private final KafkaAvroDeserializer avroDeserializer;
-  private final KafkaProtobufDeserializer<?> protobufDeserializer;
-  private final ObjectMapper objectMapper;
-  private final StringDeserializer stringDeserializer;
-
-  private final Map<String, MessageFormat> topicFormatMap = new ConcurrentHashMap<>();
-
-  public SchemaRegistryRecordDeserializer(KafkaCluster cluster, ObjectMapper objectMapper) {
-    this.cluster = cluster;
-    this.objectMapper = objectMapper;
-
-    this.schemaRegistryClient = Optional.ofNullable(cluster.getSchemaRegistry())
-        .map(schemaRegistryUrl -> {
-              List<SchemaProvider> schemaProviders =
-                  List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider());
-              return new CachedSchemaRegistryClient(
-                  Collections.singletonList(schemaRegistryUrl),
-                  CLIENT_IDENTITY_MAP_CAPACITY,
-                  schemaProviders,
-                  Collections.emptyMap()
-              );
-            }
-        ).orElse(null);
-
-    this.avroDeserializer = Optional.ofNullable(this.schemaRegistryClient)
-        .map(KafkaAvroDeserializer::new)
-        .orElse(null);
-    this.protobufDeserializer = Optional.ofNullable(this.schemaRegistryClient)
-        .map(KafkaProtobufDeserializer::new)
-        .orElse(null);
-    this.stringDeserializer = new StringDeserializer();
-  }
-
-  public Object deserialize(ConsumerRecord<Bytes, Bytes> record) {
-    MessageFormat format = getMessageFormat(record);
-
-    try {
-      Object parsedValue;
-      switch (format) {
-        case AVRO:
-          parsedValue = parseAvroRecord(record);
-          break;
-        case PROTOBUF:
-          parsedValue = parseProtobufRecord(record);
-          break;
-        case JSON:
-          parsedValue = parseJsonRecord(record);
-          break;
-        case STRING:
-          parsedValue = parseStringRecord(record);
-          break;
-        default:
-          throw new IllegalArgumentException(
-              "Unknown message format " + format + " for topic " + record.topic());
-      }
-      return parsedValue;
-    } catch (IOException e) {
-      throw new RuntimeException("Failed to parse record from topic " + record.topic(), e);
-    }
-  }
-
-  private MessageFormat getMessageFormat(ConsumerRecord<Bytes, Bytes> record) {
-    return topicFormatMap.computeIfAbsent(record.topic(), k -> detectFormat(record));
-  }
-
-  private MessageFormat detectFormat(ConsumerRecord<Bytes, Bytes> msg) {
-    if (schemaRegistryClient != null) {
-      try {
-        final Optional<String> type = getSchemaFromMessage(msg).or(() -> getSchemaBySubject(msg));
-        if (type.isPresent()) {
-          if (type.get().equals(MessageFormat.PROTOBUF.name())) {
-            try {
-              protobufDeserializer.deserialize(msg.topic(), msg.value().get());
-              return MessageFormat.PROTOBUF;
-            } catch (Throwable e) {
-              log.info("Failed to get Protobuf schema for topic {}", msg.topic(), e);
-            }
-          } else if (type.get().equals(MessageFormat.AVRO.name())) {
-            try {
-              avroDeserializer.deserialize(msg.topic(), msg.value().get());
-              return MessageFormat.AVRO;
-            } catch (Throwable e) {
-              log.info("Failed to get Avro schema for topic {}", msg.topic(), e);
-            }
-          } else if (type.get().equals(MessageFormat.JSON.name())) {
-            try {
-              parseJsonRecord(msg);
-              return MessageFormat.JSON;
-            } catch (IOException e) {
-              log.info("Failed to parse json from topic {}", msg.topic());
-            }
-          }
-        }
-      } catch (Exception e) {
-        log.warn("Failed to get Schema for topic {}", msg.topic(), e);
-      }
-    }
-
-    try {
-      parseJsonRecord(msg);
-      return MessageFormat.JSON;
-    } catch (IOException e) {
-      log.info("Failed to parse json from topic {}", msg.topic());
-    }
-
-    return MessageFormat.STRING;
-  }
-
-  @SneakyThrows
-  private Optional<String> getSchemaFromMessage(ConsumerRecord<Bytes, Bytes> msg) {
-    Optional<String> result = Optional.empty();
-    final Bytes value = msg.value();
-    if (value != null) {
-      ByteBuffer buffer = ByteBuffer.wrap(value.get());
-      if (buffer.get() == 0) {
-        int id = buffer.getInt();
-        result = Optional.ofNullable(
-            schemaRegistryClient.getSchemaById(id)
-        ).map(ParsedSchema::schemaType);
-      }
-    }
-    return result;
-  }
-
-  @SneakyThrows
-  private Optional<String> getSchemaBySubject(ConsumerRecord<Bytes, Bytes> msg) {
-    String schemaName = String.format(cluster.getSchemaNameTemplate(), msg.topic());
-    final List<Integer> versions = schemaRegistryClient.getAllVersions(schemaName);
-    if (!versions.isEmpty()) {
-      final Integer version = versions.iterator().next();
-      final String subjectName = String.format(cluster.getSchemaNameTemplate(), msg.topic());
-      final Schema schema = schemaRegistryClient.getByVersion(subjectName, version, false);
-      return Optional.ofNullable(schema).map(Schema::getSchemaType);
-    } else {
-      return Optional.empty();
-    }
-  }
-
-  private Object parseAvroRecord(ConsumerRecord<Bytes, Bytes> msg) throws IOException {
-    String topic = msg.topic();
-    if (msg.value() != null && avroDeserializer != null) {
-      byte[] valueBytes = msg.value().get();
-      GenericRecord avroRecord = (GenericRecord) avroDeserializer.deserialize(topic, valueBytes);
-      byte[] bytes = AvroSchemaUtils.toJson(avroRecord);
-      return parseJson(bytes);
-    } else {
-      return Map.of();
-    }
-  }
-
-  private Object parseProtobufRecord(ConsumerRecord<Bytes, Bytes> msg) throws IOException {
-    String topic = msg.topic();
-    if (msg.value() != null && protobufDeserializer != null) {
-      byte[] valueBytes = msg.value().get();
-      final Message message = protobufDeserializer.deserialize(topic, valueBytes);
-      byte[] bytes = ProtobufSchemaUtils.toJson(message);
-      return parseJson(bytes);
-    } else {
-      return Map.of();
-    }
-  }
-
-  private Object parseJsonRecord(ConsumerRecord<Bytes, Bytes> msg) throws IOException {
-    var value = msg.value();
-    if (value == null) {
-      return Map.of();
-    }
-    byte[] valueBytes = value.get();
-    return parseJson(valueBytes);
-  }
-
-  private Object parseJson(byte[] bytes) throws IOException {
-    return objectMapper.readValue(bytes, new TypeReference<Map<String, Object>>() {
-    });
-  }
-
-  private Object parseStringRecord(ConsumerRecord<Bytes, Bytes> msg) {
-    String topic = msg.topic();
-    if (msg.value() == null) {
-      return Map.of();
-    }
-    byte[] valueBytes = msg.value().get();
-    return stringDeserializer.deserialize(topic, valueBytes);
-  }
-
-  public enum MessageFormat {
-    AVRO,
-    JSON,
-    STRING,
-    PROTOBUF
-  }
-}

+ 0 - 19
kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/SimpleRecordDeserializer.java

@@ -1,19 +0,0 @@
-package com.provectus.kafka.ui.deserialization;
-
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.serialization.StringDeserializer;
-import org.apache.kafka.common.utils.Bytes;
-
-public class SimpleRecordDeserializer implements RecordDeserializer {
-
-  private final StringDeserializer stringDeserializer = new StringDeserializer();
-
-  @Override
-  public Object deserialize(ConsumerRecord<Bytes, Bytes> msg) {
-    if (msg.value() != null) {
-      return stringDeserializer.deserialize(msg.topic(), msg.value().get());
-    } else {
-      return "empty";
-    }
-  }
-}

+ 1 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java

@@ -17,6 +17,7 @@ public class KafkaCluster {
   private final String schemaRegistry;
   private final List<KafkaConnectCluster> kafkaConnect;
   private final String schemaNameTemplate;
+  private final String keySchemaNameTemplate;
   private final ServerStatus status;
   private final ServerStatus zookeeperStatus;
   private final InternalClusterMetrics metrics;

+ 7 - 6
kafka-ui-api/src/main/java/com/provectus/kafka/ui/deserialization/DeserializationService.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/DeserializationService.java

@@ -1,7 +1,8 @@
-package com.provectus.kafka.ui.deserialization;
+package com.provectus.kafka.ui.serde;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.serde.schemaregistry.SchemaRegistryRecordSerDe;
 import com.provectus.kafka.ui.service.ClustersStorage;
 import java.util.Map;
 import java.util.stream.Collectors;
@@ -15,7 +16,7 @@ public class DeserializationService {
 
   private final ClustersStorage clustersStorage;
   private final ObjectMapper objectMapper;
-  private Map<String, RecordDeserializer> clusterDeserializers;
+  private Map<String, RecordSerDe> clusterDeserializers;
 
 
   @PostConstruct
@@ -27,20 +28,20 @@ public class DeserializationService {
         ));
   }
 
-  private RecordDeserializer createRecordDeserializerForCluster(KafkaCluster cluster) {
+  private RecordSerDe createRecordDeserializerForCluster(KafkaCluster cluster) {
     try {
       if (cluster.getProtobufFile() != null) {
-        return new ProtobufFileRecordDeserializer(cluster.getProtobufFile(),
+        return new ProtobufFileRecordSerDe(cluster.getProtobufFile(),
             cluster.getProtobufMessageName(), objectMapper);
       } else {
-        return new SchemaRegistryRecordDeserializer(cluster, objectMapper);
+        return new SchemaRegistryRecordSerDe(cluster, objectMapper);
       }
     } catch (Throwable e) {
       throw new RuntimeException("Can't init deserializer", e);
     }
   }
 
-  public RecordDeserializer getRecordDeserializerForCluster(KafkaCluster cluster) {
+  public RecordSerDe getRecordDeserializerForCluster(KafkaCluster cluster) {
     return clusterDeserializers.get(cluster.getName());
   }
 }

+ 103 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/ProtobufFileRecordSerDe.java

@@ -0,0 +1,103 @@
+package com.provectus.kafka.ui.serde;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.protobuf.DynamicMessage;
+import com.google.protobuf.util.JsonFormat;
+import com.provectus.kafka.ui.model.MessageSchema;
+import com.provectus.kafka.ui.model.TopicMessageSchema;
+import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
+import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.utils.Bytes;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+public class ProtobufFileRecordSerDe implements RecordSerDe {
+  private final ProtobufSchema protobufSchema;
+  private final ObjectMapper objectMapper;
+  private final Path protobufSchemaPath;
+  private final ProtobufSchemaConverter schemaConverter = new ProtobufSchemaConverter();
+
+  public ProtobufFileRecordSerDe(Path protobufSchemaPath, String messageName,
+                                 ObjectMapper objectMapper) throws IOException {
+    this.objectMapper = objectMapper;
+    this.protobufSchemaPath = protobufSchemaPath;
+    try (final Stream<String> lines = Files.lines(protobufSchemaPath)) {
+      this.protobufSchema = new ProtobufSchema(
+          lines.collect(Collectors.joining())
+      ).copy(messageName);
+    }
+  }
+
+  @Override
+  public Tuple2<String, Object> deserialize(ConsumerRecord<Bytes, Bytes> msg) {
+    try {
+      final var message = DynamicMessage.parseFrom(
+          protobufSchema.toDescriptor(),
+          new ByteArrayInputStream(msg.value().get())
+      );
+      byte[] bytes = ProtobufSchemaUtils.toJson(message);
+      return Tuples.of(
+          msg.key() != null ? new String(msg.key().get()) : "",
+          parseJson(bytes)
+      );
+    } catch (Throwable e) {
+      throw new RuntimeException("Failed to parse record from topic " + msg.topic(), e);
+    }
+  }
+
+  @Override
+  public ProducerRecord<byte[], byte[]> serialize(String topic, byte[] key, byte[] data,
+                                                  Optional<Integer> partition) {
+    DynamicMessage.Builder builder = protobufSchema.newMessageBuilder();
+    try {
+      JsonFormat.parser().merge(new String(data), builder);
+      final DynamicMessage message = builder.build();
+      return partition
+          .map(p -> new ProducerRecord<>(topic, p, key, message.toByteArray()))
+          .orElseGet(() -> new ProducerRecord<>(topic, key, message.toByteArray()));
+
+    } catch (Throwable e) {
+      throw new RuntimeException("Failed to merge record for topic " + topic, e);
+    }
+  }
+
+  @Override
+  public TopicMessageSchema getTopicSchema(String topic) {
+
+    final JsonSchema jsonSchema = schemaConverter.convert(
+        protobufSchemaPath.toUri(),
+        protobufSchema.toDescriptor()
+    );
+    final MessageSchema keySchema = new MessageSchema()
+        .name(protobufSchema.fullName())
+        .source(MessageSchema.SourceEnum.PROTO_FILE)
+        .schema(JsonSchema.stringSchema().toJson(objectMapper));
+
+    final MessageSchema valueSchema = new MessageSchema()
+        .name(protobufSchema.fullName())
+        .source(MessageSchema.SourceEnum.PROTO_FILE)
+        .schema(jsonSchema.toJson(objectMapper));
+
+    return new TopicMessageSchema()
+        .key(keySchema)
+        .value(valueSchema);
+  }
+
+  private Object parseJson(byte[] bytes) throws IOException {
+    return objectMapper.readValue(bytes, new TypeReference<Map<String, Object>>() {
+    });
+  }
+}

+ 18 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/RecordSerDe.java

@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.serde;
+
+import com.provectus.kafka.ui.model.TopicMessageSchema;
+import java.util.Optional;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.utils.Bytes;
+import reactor.util.function.Tuple2;
+
+public interface RecordSerDe {
+
+  Tuple2<String, Object> deserialize(ConsumerRecord<Bytes, Bytes> msg);
+
+  ProducerRecord<byte[], byte[]> serialize(String topic, byte[] key, byte[] data,
+                                           Optional<Integer> partition);
+
+  TopicMessageSchema getTopicSchema(String topic);
+}

+ 41 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/SimpleRecordSerDe.java

@@ -0,0 +1,41 @@
+package com.provectus.kafka.ui.serde;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.provectus.kafka.ui.model.MessageSchema;
+import com.provectus.kafka.ui.model.TopicMessageSchema;
+import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
+import java.util.Optional;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.utils.Bytes;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+public class SimpleRecordSerDe implements RecordSerDe {
+
+  @Override
+  public Tuple2<String, Object> deserialize(ConsumerRecord<Bytes, Bytes> msg) {
+    return Tuples.of(
+        msg.key() != null ? new String(msg.key().get()) : "",
+        msg.value() != null ? new String(msg.value().get()) : ""
+    );
+  }
+
+  @Override
+  public ProducerRecord<byte[], byte[]> serialize(String topic, byte[] key, byte[] data,
+                                                  Optional<Integer> partition) {
+    return partition.map(p -> new ProducerRecord<>(topic, p, key, data))
+        .orElseGet(() -> new ProducerRecord<>(topic, key, data));
+  }
+
+  @Override
+  public TopicMessageSchema getTopicSchema(String topic) {
+    final MessageSchema schema = new MessageSchema()
+        .name("unknown")
+        .source(MessageSchema.SourceEnum.UNKNOWN)
+        .schema(JsonSchema.stringSchema().toJson(new ObjectMapper()));
+    return new TopicMessageSchema()
+        .key(schema)
+        .value(schema);
+  }
+}

+ 38 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/AvroMessageFormatter.java

@@ -0,0 +1,38 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import java.io.IOException;
+import java.util.Map;
+import lombok.SneakyThrows;
+import org.apache.avro.generic.GenericRecord;
+
+public class AvroMessageFormatter implements MessageFormatter {
+  private final KafkaAvroDeserializer avroDeserializer;
+  private final ObjectMapper objectMapper;
+
+  public AvroMessageFormatter(SchemaRegistryClient client, ObjectMapper objectMapper) {
+    this.avroDeserializer = new KafkaAvroDeserializer(client);
+    this.objectMapper = objectMapper;
+  }
+
+  @Override
+  @SneakyThrows
+  public Object format(String topic, byte[] value) {
+    if (value != null) {
+      GenericRecord avroRecord = (GenericRecord) avroDeserializer.deserialize(topic, value);
+      byte[] bytes = AvroSchemaUtils.toJson(avroRecord);
+      return parseJson(bytes);
+    } else {
+      return Map.of();
+    }
+  }
+
+  private Object parseJson(byte[] bytes) throws IOException {
+    return objectMapper.readValue(bytes, new TypeReference<Map<String, Object>>() {
+    });
+  }
+}

+ 44 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/AvroMessageReader.java

@@ -0,0 +1,44 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import io.confluent.kafka.serializers.KafkaAvroSerializer;
+import java.io.IOException;
+import org.apache.avro.Schema;
+import org.apache.avro.util.Utf8;
+import org.apache.kafka.common.serialization.Serializer;
+
+public class AvroMessageReader extends MessageReader<Object> {
+
+  public AvroMessageReader(String topic, boolean isKey,
+                           SchemaRegistryClient client,
+                           SchemaMetadata schema)
+      throws IOException, RestClientException {
+    super(topic, isKey, client, schema);
+  }
+
+  @Override
+  protected Serializer<Object> createSerializer(SchemaRegistryClient client) {
+    return new KafkaAvroSerializer(client);
+  }
+
+  @Override
+  protected Object read(byte[] value, ParsedSchema schema) {
+    Schema rawSchema = ((AvroSchema) schema).rawSchema();
+
+    try {
+      Object object = AvroSchemaUtils.toObject(new String(value), (AvroSchema) schema);
+      if (rawSchema.getType().equals(Schema.Type.STRING)) {
+        object = ((Utf8) object).toString();
+      }
+      return object;
+    } catch (Throwable e) {
+      throw new RuntimeException("Failed to merge record for topic " + topic, e);
+    }
+
+  }
+}

+ 29 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/JsonMessageFormatter.java

@@ -0,0 +1,29 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.IOException;
+import java.util.Map;
+import lombok.SneakyThrows;
+
+public class JsonMessageFormatter implements MessageFormatter {
+  private final ObjectMapper objectMapper;
+
+  public JsonMessageFormatter(ObjectMapper objectMapper) {
+    this.objectMapper = objectMapper;
+  }
+
+  @Override
+  @SneakyThrows
+  public Object format(String topic, byte[] value) {
+    if (value == null) {
+      return Map.of();
+    }
+    return parseJson(value);
+  }
+
+  private Object parseJson(byte[] bytes) throws IOException {
+    return objectMapper.readValue(bytes, new TypeReference<Map<String, Object>>() {
+    });
+  }
+}

+ 40 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/JsonMessageReader.java

@@ -0,0 +1,40 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import java.io.IOException;
+import lombok.SneakyThrows;
+import org.apache.kafka.common.serialization.Serializer;
+
+public class JsonMessageReader extends MessageReader<JsonNode> {
+  private static final ObjectMapper mapper = new ObjectMapper();
+
+  public JsonMessageReader(String topic, boolean isKey,
+                           SchemaRegistryClient client, SchemaMetadata schema) throws IOException,
+      RestClientException {
+    super(topic, isKey, client, schema);
+  }
+
+  @Override
+  protected Serializer<JsonNode> createSerializer(SchemaRegistryClient client) {
+    return new JsonNodeSerializer();
+  }
+
+  @Override
+  @SneakyThrows
+  protected JsonNode read(byte[] value, ParsedSchema schema) {
+    return mapper.readTree(new String(value));
+  }
+
+  private static class JsonNodeSerializer implements Serializer<JsonNode> {
+    @Override
+    public byte[] serialize(String topic, JsonNode data) {
+      return data.toString().getBytes();
+    }
+  }
+}

+ 8 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormat.java

@@ -0,0 +1,8 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+public enum MessageFormat {
+  AVRO,
+  JSON,
+  STRING,
+  PROTOBUF
+}

+ 5 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormatter.java

@@ -0,0 +1,5 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+public interface MessageFormatter {
+  Object format(String topic, byte[] value);
+}

+ 34 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageReader.java

@@ -0,0 +1,34 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import java.io.IOException;
+import org.apache.kafka.common.serialization.Serializer;
+
+public abstract class MessageReader<T> {
+  protected final Serializer<T> serializer;
+  protected final String topic;
+  protected final boolean isKey;
+
+  private ParsedSchema schema;
+
+  protected MessageReader(String topic, boolean isKey, SchemaRegistryClient client,
+                          SchemaMetadata schema) throws IOException, RestClientException {
+    this.topic = topic;
+    this.isKey = isKey;
+    this.serializer = createSerializer(client);
+    this.schema = client.getSchemaById(schema.getId());
+  }
+
+  protected abstract Serializer<T> createSerializer(SchemaRegistryClient client);
+
+  public byte[] read(byte[] value) {
+    final T read = this.read(value, schema);
+    return this.serializer.serialize(topic, read);
+  }
+
+  protected abstract T read(byte[] value, ParsedSchema schema);
+}

+ 27 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/ProtobufMessageFormatter.java

@@ -0,0 +1,27 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import com.google.protobuf.Message;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
+import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
+import java.util.Map;
+import lombok.SneakyThrows;
+
+public class ProtobufMessageFormatter implements MessageFormatter {
+  private final KafkaProtobufDeserializer<?> protobufDeserializer;
+
+  public ProtobufMessageFormatter(SchemaRegistryClient client) {
+    this.protobufDeserializer = new KafkaProtobufDeserializer<>(client);
+  }
+
+  @Override
+  @SneakyThrows
+  public Object format(String topic, byte[] value) {
+    if (value != null) {
+      final Message message = protobufDeserializer.deserialize(topic, value);
+      return ProtobufSchemaUtils.toJson(message);
+    } else {
+      return Map.of();
+    }
+  }
+}

+ 41 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/ProtobufMessageReader.java

@@ -0,0 +1,41 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import com.google.protobuf.DynamicMessage;
+import com.google.protobuf.Message;
+import com.google.protobuf.util.JsonFormat;
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer;
+import java.io.IOException;
+import org.apache.kafka.common.serialization.Serializer;
+
+public class ProtobufMessageReader extends MessageReader<Message> {
+
+  public ProtobufMessageReader(String topic, boolean isKey,
+                               SchemaRegistryClient client, SchemaMetadata schema)
+      throws IOException, RestClientException {
+    super(topic, isKey, client, schema);
+  }
+
+  @Override
+  protected Serializer<Message> createSerializer(SchemaRegistryClient client) {
+    return new KafkaProtobufSerializer<>(client);
+  }
+
+  @Override
+  protected Message read(byte[] value, ParsedSchema schema) {
+    ProtobufSchema protobufSchema = (ProtobufSchema) schema;
+    DynamicMessage.Builder builder = protobufSchema.newMessageBuilder();
+    try {
+      JsonFormat.parser().merge(new String(value), builder);
+      return builder.build();
+    } catch (Throwable e) {
+      throw new RuntimeException("Failed to merge record for topic " + topic, e);
+    }
+  }
+
+}

+ 278 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryRecordSerDe.java

@@ -0,0 +1,278 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.MessageSchema;
+import com.provectus.kafka.ui.model.TopicMessageSchema;
+import com.provectus.kafka.ui.serde.RecordSerDe;
+import com.provectus.kafka.ui.util.jsonschema.AvroJsonSchemaConverter;
+import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
+import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.SchemaProvider;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
+import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import lombok.SneakyThrows;
+import lombok.extern.log4j.Log4j2;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.utils.Bytes;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+@Log4j2
+public class SchemaRegistryRecordSerDe implements RecordSerDe {
+
+  private static final int CLIENT_IDENTITY_MAP_CAPACITY = 100;
+
+  private final KafkaCluster cluster;
+  private final SchemaRegistryClient schemaRegistryClient;
+  private final Map<String, MessageFormatter> valueFormatMap = new ConcurrentHashMap<>();
+  private final Map<String, MessageFormatter> keyFormatMap = new ConcurrentHashMap<>();
+
+  private AvroMessageFormatter avroFormatter;
+  private ProtobufMessageFormatter protobufFormatter;
+  private final JsonMessageFormatter jsonFormatter;
+  private final StringMessageFormatter stringFormatter = new StringMessageFormatter();
+  private final ProtobufSchemaConverter protoSchemaConverter = new ProtobufSchemaConverter();
+  private final AvroJsonSchemaConverter avroSchemaConverter = new AvroJsonSchemaConverter();
+  private final ObjectMapper objectMapper = new ObjectMapper();
+
+  public SchemaRegistryRecordSerDe(KafkaCluster cluster, ObjectMapper objectMapper) {
+    this.cluster = cluster;
+
+    this.schemaRegistryClient = Optional.ofNullable(cluster.getSchemaRegistry())
+        .map(schemaRegistryUrl -> {
+              List<SchemaProvider> schemaProviders =
+                  List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider());
+              return new CachedSchemaRegistryClient(
+                  Collections.singletonList(schemaRegistryUrl),
+                  CLIENT_IDENTITY_MAP_CAPACITY,
+                  schemaProviders,
+                  Collections.emptyMap()
+              );
+            }
+        ).orElse(null);
+
+    this.jsonFormatter = new JsonMessageFormatter(objectMapper);
+
+    if (schemaRegistryClient != null) {
+      this.avroFormatter = new AvroMessageFormatter(schemaRegistryClient, objectMapper);
+      this.protobufFormatter = new ProtobufMessageFormatter(schemaRegistryClient);
+    }
+  }
+
+  public Tuple2<String, Object> deserialize(ConsumerRecord<Bytes, Bytes> msg) {
+    MessageFormatter valueFormatter = getMessageFormatter(msg, false);
+    MessageFormatter keyFormatter = getMessageFormatter(msg, true);
+    try {
+      return Tuples.of(
+          msg.key() != null
+              ? keyFormatter.format(msg.topic(), msg.key().get()).toString()
+              : "",
+          valueFormatter.format(
+              msg.topic(),
+              msg.value() != null ? msg.value().get() : null
+          )
+      );
+    } catch (Throwable e) {
+      throw new RuntimeException("Failed to parse record from topic " + msg.topic(), e);
+    }
+  }
+
+  @Override
+  @SneakyThrows
+  public ProducerRecord<byte[], byte[]> serialize(String topic, byte[] key, byte[] data,
+                                                  Optional<Integer> partition) {
+    final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
+    final Optional<SchemaMetadata> maybeKeySchema = getSchemaBySubject(topic, true);
+
+    final Optional<byte[]> serializedValue = serialize(maybeValueSchema, topic, data);
+    final Optional<byte[]> serializedKey = serialize(maybeKeySchema, topic, key);
+
+    if (serializedValue.isPresent()) {
+      return partition
+          .map(p ->
+              new ProducerRecord<>(topic, p, serializedKey.orElse(key), serializedValue.get())
+          ).orElseGet(() ->
+              new ProducerRecord<>(topic, serializedKey.orElse(key), serializedValue.get())
+          );
+    } else {
+      throw new RuntimeException("Subject was not found for topic " + topic);
+    }
+  }
+
+  @SneakyThrows
+  private Optional<byte[]> serialize(
+      Optional<SchemaMetadata> maybeSchema, String topic, byte[] value) {
+    if (maybeSchema.isPresent()) {
+      final SchemaMetadata schema = maybeSchema.get();
+
+      MessageReader<?> reader;
+      if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
+        reader = new ProtobufMessageReader(topic, false, schemaRegistryClient, schema);
+      } else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
+        reader = new AvroMessageReader(topic, false, schemaRegistryClient, schema);
+      } else {
+        reader = new JsonMessageReader(topic, false, schemaRegistryClient, schema);
+      }
+
+      return Optional.of(reader.read(value));
+    } else {
+      return Optional.empty();
+    }
+
+  }
+
+  @Override
+  public TopicMessageSchema getTopicSchema(String topic) {
+    final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
+    final Optional<SchemaMetadata> maybeKeySchema = getSchemaBySubject(topic, true);
+
+    String sourceValueSchema = maybeValueSchema.map(this::convertSchema)
+        .orElseGet(() -> JsonSchema.stringSchema().toJson(objectMapper));
+
+    String sourceKeySchema = maybeKeySchema.map(this::convertSchema)
+        .orElseGet(() -> JsonSchema.stringSchema().toJson(objectMapper));
+
+    final MessageSchema keySchema = new MessageSchema()
+        .name(maybeKeySchema.map(
+            (s) -> schemaSubject(topic, true)
+        ).orElse("unknown"))
+        .source(MessageSchema.SourceEnum.SCHEMA_REGISTRY)
+        .schema(sourceKeySchema);
+
+    final MessageSchema valueSchema = new MessageSchema()
+        .name(maybeValueSchema.map(
+            (s) -> schemaSubject(topic, false)
+        ).orElse("unknown"))
+        .source(MessageSchema.SourceEnum.SCHEMA_REGISTRY)
+        .schema(sourceValueSchema);
+
+    return new TopicMessageSchema()
+        .key(keySchema)
+        .value(valueSchema);
+  }
+
+  @SneakyThrows
+  private String convertSchema(SchemaMetadata schema) {
+
+    String jsonSchema;
+    URI basePath = new URI(cluster.getSchemaRegistry()).resolve(Integer.toString(schema.getId()));
+    final ParsedSchema schemaById = schemaRegistryClient.getSchemaById(schema.getId());
+
+    if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
+      final ProtobufSchema protobufSchema = (ProtobufSchema) schemaById;
+      jsonSchema = protoSchemaConverter
+          .convert(basePath, protobufSchema.toDescriptor())
+          .toJson(objectMapper);
+    } else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
+      final AvroSchema avroSchema = (AvroSchema) schemaById;
+      jsonSchema = avroSchemaConverter
+          .convert(basePath, avroSchema.rawSchema())
+          .toJson(objectMapper);
+    } else if (schema.getSchemaType().equals(MessageFormat.JSON.name())) {
+      jsonSchema = schema.getSchema();
+    } else {
+      jsonSchema = JsonSchema.stringSchema().toJson(objectMapper);
+    }
+
+    return jsonSchema;
+  }
+
+  private MessageFormatter getMessageFormatter(ConsumerRecord<Bytes, Bytes> msg, boolean isKey) {
+    if (isKey) {
+      return keyFormatMap.computeIfAbsent(msg.topic(), k -> detectFormat(msg, true));
+    } else {
+      return valueFormatMap.computeIfAbsent(msg.topic(), k -> detectFormat(msg, false));
+    }
+  }
+
+  private MessageFormatter detectFormat(ConsumerRecord<Bytes, Bytes> msg, boolean isKey) {
+    if (schemaRegistryClient != null) {
+      try {
+        final Optional<String> type = getSchemaFromMessage(msg, isKey)
+            .or(() -> getSchemaBySubject(msg.topic(), isKey).map(SchemaMetadata::getSchemaType));
+        if (type.isPresent()) {
+          if (type.get().equals(MessageFormat.PROTOBUF.name())) {
+            if (tryFormatter(protobufFormatter, msg).isPresent()) {
+              return protobufFormatter;
+            }
+          } else if (type.get().equals(MessageFormat.AVRO.name())) {
+            if (tryFormatter(avroFormatter, msg).isPresent()) {
+              return avroFormatter;
+            }
+          } else if (type.get().equals(MessageFormat.JSON.name())) {
+            if (tryFormatter(jsonFormatter, msg).isPresent()) {
+              return jsonFormatter;
+            }
+          }
+        }
+      } catch (Exception e) {
+        log.warn("Failed to get Schema for topic {}", msg.topic(), e);
+      }
+    }
+
+    if (tryFormatter(jsonFormatter, msg).isPresent()) {
+      return jsonFormatter;
+    }
+
+    return stringFormatter;
+  }
+
+  private Optional<MessageFormatter> tryFormatter(
+      MessageFormatter formatter, ConsumerRecord<Bytes, Bytes> msg) {
+    try {
+      formatter.format(msg.topic(), msg.value().get());
+      return Optional.of(formatter);
+    } catch (Throwable e) {
+      log.info("Failed to parse by {} from topic {}", formatter.getClass(), msg.topic());
+    }
+
+    return Optional.empty();
+  }
+
+  @SneakyThrows
+  private Optional<String> getSchemaFromMessage(ConsumerRecord<Bytes, Bytes> msg, boolean isKey) {
+    Optional<String> result = Optional.empty();
+    final Bytes value = isKey ? msg.key() : msg.value();
+    if (value != null) {
+      ByteBuffer buffer = ByteBuffer.wrap(value.get());
+      if (buffer.get() == 0) {
+        int id = buffer.getInt();
+        result = Optional.ofNullable(
+            schemaRegistryClient.getSchemaById(id)
+        ).map(ParsedSchema::schemaType);
+      }
+    }
+    return result;
+  }
+
+  @SneakyThrows
+  private Optional<SchemaMetadata> getSchemaBySubject(String topic, boolean isKey) {
+    return Optional.ofNullable(
+        schemaRegistryClient.getLatestSchemaMetadata(
+            schemaSubject(topic, isKey)
+        )
+    );
+  }
+
+  private String schemaSubject(String topic, boolean isKey) {
+    return String.format(
+        isKey ? cluster.getKeySchemaNameTemplate()
+            : cluster.getSchemaNameTemplate(), topic
+    );
+  }
+}

+ 18 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/StringMessageFormatter.java

@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.serde.schemaregistry;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+import lombok.SneakyThrows;
+
+public class StringMessageFormatter implements MessageFormatter {
+
+  @Override
+  @SneakyThrows
+  public Object format(String topic, byte[] value) {
+    if (value != null) {
+      return new String(value, StandardCharsets.UTF_8);
+    } else {
+      return Map.of();
+    }
+  }
+}

+ 24 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java

@@ -13,6 +13,7 @@ import com.provectus.kafka.ui.model.ClusterStats;
 import com.provectus.kafka.ui.model.ConsumerGroup;
 import com.provectus.kafka.ui.model.ConsumerGroupDetails;
 import com.provectus.kafka.ui.model.ConsumerPosition;
+import com.provectus.kafka.ui.model.CreateTopicMessage;
 import com.provectus.kafka.ui.model.ExtendedAdminClient;
 import com.provectus.kafka.ui.model.InternalTopic;
 import com.provectus.kafka.ui.model.KafkaCluster;
@@ -23,8 +24,10 @@ import com.provectus.kafka.ui.model.TopicConsumerGroups;
 import com.provectus.kafka.ui.model.TopicCreation;
 import com.provectus.kafka.ui.model.TopicDetails;
 import com.provectus.kafka.ui.model.TopicMessage;
+import com.provectus.kafka.ui.model.TopicMessageSchema;
 import com.provectus.kafka.ui.model.TopicUpdate;
 import com.provectus.kafka.ui.model.TopicsResponse;
+import com.provectus.kafka.ui.serde.DeserializationService;
 import com.provectus.kafka.ui.util.ClusterUtil;
 import java.util.Collections;
 import java.util.Comparator;
@@ -58,6 +61,7 @@ public class ClusterService {
   private final ClusterMapper clusterMapper;
   private final KafkaService kafkaService;
   private final ConsumingService consumingService;
+  private final DeserializationService deserializationService;
 
   public List<Cluster> getClusters() {
     return clustersStorage.getKafkaClusters()
@@ -294,6 +298,26 @@ public class ClusterService {
         .orElse(Mono.empty());
   }
 
+  public TopicMessageSchema getTopicSchema(String clusterName, String topicName) {
+    var cluster = clustersStorage.getClusterByName(clusterName)
+        .orElseThrow(ClusterNotFoundException::new);
+    if (!cluster.getTopics().containsKey(topicName)) {
+      throw new TopicNotFoundException();
+    }
+    return deserializationService
+        .getRecordDeserializerForCluster(cluster)
+        .getTopicSchema(topicName);
+  }
+
+  public Mono<Void> sendMessage(String clusterName, String topicName, CreateTopicMessage msg) {
+    var cluster = clustersStorage.getClusterByName(clusterName)
+        .orElseThrow(ClusterNotFoundException::new);
+    if (!cluster.getTopics().containsKey(topicName)) {
+      throw new TopicNotFoundException();
+    }
+    return kafkaService.sendMessage(cluster, topicName, msg).then();
+  }
+
   @NotNull
   private Mono<Void> reThrowCustomException(Throwable e) {
     if (e instanceof GroupIdNotFoundException) {

+ 6 - 6
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java

@@ -2,12 +2,12 @@ package com.provectus.kafka.ui.service;
 
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.provectus.kafka.ui.deserialization.DeserializationService;
-import com.provectus.kafka.ui.deserialization.RecordDeserializer;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.SeekDirection;
 import com.provectus.kafka.ui.model.TopicMessage;
+import com.provectus.kafka.ui.serde.DeserializationService;
+import com.provectus.kafka.ui.serde.RecordSerDe;
 import com.provectus.kafka.ui.util.ClusterUtil;
 import com.provectus.kafka.ui.util.OffsetsSeek;
 import com.provectus.kafka.ui.util.OffsetsSeekBackward;
@@ -61,7 +61,7 @@ public class ConsumingService {
             ? new OffsetsSeekForward(topic, consumerPosition)
             : new OffsetsSeekBackward(topic, consumerPosition, recordsLimit)
     );
-    RecordDeserializer recordDeserializer =
+    RecordSerDe recordDeserializer =
         deserializationService.getRecordDeserializerForCluster(cluster);
     return Flux.create(emitter)
         .subscribeOn(Schedulers.boundedElastic())
@@ -166,10 +166,10 @@ public class ConsumingService {
                 .sorted(REVERED_COMPARING).collect(Collectors.toList());
           }
 
-          for (ConsumerRecord<Bytes, Bytes> record : iterable) {
+          for (ConsumerRecord<Bytes, Bytes> msg : iterable) {
             if (!sink.isCancelled() && !waitingOffsets.endReached()) {
-              sink.next(record);
-              waitingOffsets.markPolled(record);
+              sink.next(msg);
+              waitingOffsets.markPolled(msg);
             } else {
               break;
             }

+ 35 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.service;
 
 import com.provectus.kafka.ui.model.ConsumerGroup;
+import com.provectus.kafka.ui.model.CreateTopicMessage;
 import com.provectus.kafka.ui.model.ExtendedAdminClient;
 import com.provectus.kafka.ui.model.InternalBrokerDiskUsage;
 import com.provectus.kafka.ui.model.InternalBrokerMetrics;
@@ -15,6 +16,8 @@ import com.provectus.kafka.ui.model.ServerStatus;
 import com.provectus.kafka.ui.model.TopicConsumerGroups;
 import com.provectus.kafka.ui.model.TopicCreation;
 import com.provectus.kafka.ui.model.TopicUpdate;
+import com.provectus.kafka.ui.serde.DeserializationService;
+import com.provectus.kafka.ui.serde.RecordSerDe;
 import com.provectus.kafka.ui.util.ClusterUtil;
 import com.provectus.kafka.ui.util.JmxClusterUtil;
 import com.provectus.kafka.ui.util.JmxMetricsName;
@@ -28,6 +31,7 @@ import java.util.LongSummaryStatistics;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Properties;
+import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
@@ -47,6 +51,10 @@ import org.apache.kafka.clients.admin.RecordsToDelete;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.clients.consumer.OffsetAndMetadata;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerConfig;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.clients.producer.RecordMetadata;
 import org.apache.kafka.common.Node;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.config.ConfigResource;
@@ -71,6 +79,7 @@ public class KafkaService {
   private final Map<String, ExtendedAdminClient> adminClientCache = new ConcurrentHashMap<>();
   private final JmxClusterUtil jmxClusterUtil;
   private final ClustersStorage clustersStorage;
+  private final DeserializationService deserializationService;
   @Value("${kafka.admin-client-timeout}")
   private int clientTimeout;
 
@@ -631,5 +640,31 @@ public class KafkaService {
         .map(ac -> ac.deleteRecords(records)).then();
   }
 
+  public Mono<RecordMetadata> sendMessage(KafkaCluster cluster, String topic,
+                                          CreateTopicMessage msg) {
+    RecordSerDe serde =
+        deserializationService.getRecordDeserializerForCluster(cluster);
+
+    Properties properties = new Properties();
+    properties.putAll(cluster.getProperties());
+    properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
+    try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(properties)) {
+      final ProducerRecord<byte[], byte[]> producerRecord = serde.serialize(topic,
+          msg.getKey() != null ? msg.getKey().getBytes() : null,
+          msg.getContent().toString().getBytes(),
+          Optional.ofNullable(msg.getPartition())
+      );
+
+      CompletableFuture<RecordMetadata> cf = new CompletableFuture<>();
+      producer.send(producerRecord, (metadata, exception) -> {
+        if (exception != null) {
+          cf.completeExceptionally(exception);
+        } else {
+          cf.complete(metadata);
+        }
+      });
+      return Mono.fromFuture(cf);
+    }
+  }
 
 }

+ 7 - 7
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ClusterUtil.java

@@ -3,7 +3,6 @@ package com.provectus.kafka.ui.util;
 import static com.provectus.kafka.ui.util.KafkaConstants.TOPIC_DEFAULT_CONFIGS;
 import static org.apache.kafka.common.config.TopicConfig.MESSAGE_FORMAT_VERSION_CONFIG;
 
-import com.provectus.kafka.ui.deserialization.RecordDeserializer;
 import com.provectus.kafka.ui.model.ConsumerGroup;
 import com.provectus.kafka.ui.model.ConsumerGroupDetails;
 import com.provectus.kafka.ui.model.ConsumerTopicPartitionDetail;
@@ -14,6 +13,7 @@ import com.provectus.kafka.ui.model.InternalTopic;
 import com.provectus.kafka.ui.model.InternalTopicConfig;
 import com.provectus.kafka.ui.model.ServerStatus;
 import com.provectus.kafka.ui.model.TopicMessage;
+import com.provectus.kafka.ui.serde.RecordSerDe;
 import java.time.Instant;
 import java.time.OffsetDateTime;
 import java.time.ZoneId;
@@ -43,6 +43,7 @@ import org.apache.kafka.common.config.ConfigResource;
 import org.apache.kafka.common.record.TimestampType;
 import org.apache.kafka.common.utils.Bytes;
 import reactor.core.publisher.Mono;
+import reactor.util.function.Tuple2;
 
 @Slf4j
 public class ClusterUtil {
@@ -197,7 +198,7 @@ public class ClusterUtil {
   }
 
   public static TopicMessage mapToTopicMessage(ConsumerRecord<Bytes, Bytes> consumerRecord,
-                                               RecordDeserializer recordDeserializer) {
+                                               RecordSerDe recordDeserializer) {
     Map<String, String> headers = new HashMap<>();
     consumerRecord.headers().iterator()
         .forEachRemaining(header -> headers.put(header.key(), new String(header.value())));
@@ -212,12 +213,11 @@ public class ClusterUtil {
     topicMessage.setOffset(consumerRecord.offset());
     topicMessage.setTimestamp(timestamp);
     topicMessage.setTimestampType(timestampType);
-    if (consumerRecord.key() != null) {
-      topicMessage.setKey(consumerRecord.key().toString());
-    }
+
     topicMessage.setHeaders(headers);
-    Object parsedValue = recordDeserializer.deserialize(consumerRecord);
-    topicMessage.setContent(parsedValue);
+    Tuple2<String, Object> parsed = recordDeserializer.deserialize(consumerRecord);
+    topicMessage.setKey(parsed.getT1());
+    topicMessage.setContent(parsed.getT2());
 
     return topicMessage;
   }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/OffsetsSeek.java

@@ -18,7 +18,7 @@ public abstract class OffsetsSeek {
   protected final String topic;
   protected final ConsumerPosition consumerPosition;
 
-  public OffsetsSeek(String topic, ConsumerPosition consumerPosition) {
+  protected OffsetsSeek(String topic, ConsumerPosition consumerPosition) {
     this.topic = topic;
     this.consumerPosition = consumerPosition;
   }

+ 0 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/OffsetsSeekForward.java

@@ -1,16 +1,11 @@
 package com.provectus.kafka.ui.util;
 
 import com.provectus.kafka.ui.model.ConsumerPosition;
-import com.provectus.kafka.ui.model.SeekType;
-import com.provectus.kafka.ui.service.ConsumingService;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.stream.Collectors;
-import lombok.RequiredArgsConstructor;
 import lombok.extern.log4j.Log4j2;
 import org.apache.kafka.clients.consumer.Consumer;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.utils.Bytes;
 

+ 21 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ArrayFieldSchema.java

@@ -0,0 +1,21 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+public class ArrayFieldSchema implements FieldSchema {
+  private final FieldSchema itemsSchema;
+
+  public ArrayFieldSchema(FieldSchema itemsSchema) {
+    this.itemsSchema = itemsSchema;
+  }
+
+  @Override
+  public JsonNode toJsonNode(ObjectMapper mapper) {
+    final ObjectNode objectNode = mapper.createObjectNode();
+    objectNode.setAll(new SimpleJsonType(JsonType.Type.ARRAY).toJsonNode(mapper));
+    objectNode.set("items", itemsSchema.toJsonNode(mapper));
+    return objectNode;
+  }
+}

+ 137 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java

@@ -0,0 +1,137 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import java.net.URI;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.UUID;
+import java.util.stream.Collectors;
+import org.apache.avro.Schema;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+public class AvroJsonSchemaConverter implements JsonSchemaConverter<Schema> {
+
+  @Override
+  public JsonSchema convert(URI basePath, Schema schema) {
+    final JsonSchema.JsonSchemaBuilder builder = JsonSchema.builder();
+
+    builder.id(basePath.resolve(schema.getName()));
+    JsonType type = convertType(schema);
+    builder.type(type);
+
+    Map<String, FieldSchema> definitions = new HashMap<>();
+    final FieldSchema root = convertSchema("root", schema, definitions, false);
+    builder.definitions(definitions);
+
+    if (type.getType().equals(JsonType.Type.OBJECT)) {
+      final ObjectFieldSchema objectRoot = (ObjectFieldSchema) root;
+      builder.properties(objectRoot.getProperties());
+      builder.required(objectRoot.getRequired());
+    }
+
+    return builder.build();
+  }
+
+
+  private FieldSchema convertField(Schema.Field field, Map<String, FieldSchema> definitions) {
+    return convertSchema(field.name(), field.schema(), definitions, true);
+  }
+
+  private FieldSchema convertSchema(String name, Schema schema,
+                                    Map<String, FieldSchema> definitions, boolean ref) {
+    if (!schema.isUnion() || (schema.getTypes().size() == 2 && schema.isNullable())) {
+      if (schema.isUnion()) {
+        final Optional<Schema> firstType =
+            schema.getTypes().stream().filter(t -> !t.getType().equals(Schema.Type.NULL))
+                .findFirst();
+        schema = firstType.orElseThrow();
+      }
+      JsonType type = convertType(schema);
+      switch (type.getType()) {
+        case BOOLEAN:
+        case NULL:
+        case STRING:
+        case ENUM:
+        case NUMBER:
+        case INTEGER:
+          return new SimpleFieldSchema(type);
+        case OBJECT:
+          if (schema.getType().equals(Schema.Type.MAP)) {
+            return new MapFieldSchema(convertSchema(name, schema.getValueType(), definitions, ref));
+          } else {
+            return createObjectSchema(name, schema, definitions, ref);
+          }
+        case ARRAY:
+          return createArraySchema(name, schema, definitions);
+        default: throw new RuntimeException("Unknown type");
+      }
+    } else {
+      return new OneOfFieldSchema(
+          schema.getTypes().stream()
+              .map(typeSchema ->
+                  convertSchema(
+                      name + UUID.randomUUID().toString(),
+                      typeSchema,
+                      definitions,
+                      true
+                  )
+              ).collect(Collectors.toList())
+      );
+    }
+  }
+
+  private FieldSchema createObjectSchema(String name, Schema schema,
+                                         Map<String, FieldSchema> definitions, boolean ref) {
+    final Map<String, FieldSchema> fields = schema.getFields().stream()
+        .map(f -> Tuples.of(f.name(), convertField(f, definitions)))
+        .collect(Collectors.toMap(
+            Tuple2::getT1,
+            Tuple2::getT2
+        ));
+
+    final List<String> required = schema.getFields().stream()
+        .filter(f -> !f.schema().isNullable())
+        .map(Schema.Field::name).collect(Collectors.toList());
+
+    if (ref) {
+      String definitionName = String.format("Record%s", schema.getName());
+      definitions.put(definitionName, new ObjectFieldSchema(fields, required));
+      return new RefFieldSchema(String.format("#/definitions/%s", definitionName));
+    } else {
+      return new ObjectFieldSchema(fields, required);
+    }
+  }
+
+  private ArrayFieldSchema createArraySchema(String name, Schema schema,
+                                             Map<String, FieldSchema> definitions) {
+    return new ArrayFieldSchema(
+        convertSchema(name, schema.getElementType(), definitions, true)
+    );
+  }
+
+  private JsonType convertType(Schema schema) {
+    switch (schema.getType()) {
+      case INT:
+      case LONG:
+        return new SimpleJsonType(JsonType.Type.INTEGER);
+      case MAP:
+      case RECORD:
+        return new SimpleJsonType(JsonType.Type.OBJECT);
+      case ENUM:
+        return new EnumJsonType(schema.getEnumSymbols());
+      case BYTES:
+      case STRING:
+        return new SimpleJsonType(JsonType.Type.STRING);
+      case NULL: return new SimpleJsonType(JsonType.Type.NULL);
+      case ARRAY: return new SimpleJsonType(JsonType.Type.ARRAY);
+      case FIXED:
+      case FLOAT:
+      case DOUBLE:
+        return new SimpleJsonType(JsonType.Type.NUMBER);
+      case BOOLEAN: return new SimpleJsonType(JsonType.Type.BOOLEAN);
+      default: return new SimpleJsonType(JsonType.Type.STRING);
+    }
+  }
+}

+ 24 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/EnumJsonType.java

@@ -0,0 +1,24 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.util.List;
+import java.util.Map;
+
+
+public class EnumJsonType extends JsonType {
+  private List<String> values;
+
+  public EnumJsonType(List<String> values) {
+    super(Type.ENUM);
+    this.values = values;
+  }
+
+  @Override
+  public Map<String, JsonNode> toJsonNode(ObjectMapper mapper) {
+    return Map.of(
+        this.type.getName(),
+        mapper.valueToTree(values)
+    );
+  }
+}

+ 8 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/FieldSchema.java

@@ -0,0 +1,8 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public interface FieldSchema {
+  JsonNode toJsonNode(ObjectMapper mapper);
+}

+ 66 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonSchema.java

@@ -0,0 +1,66 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import lombok.Builder;
+import lombok.Data;
+import lombok.SneakyThrows;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+@Data
+@Builder
+public class JsonSchema {
+  private final URI id;
+  private final URI schema = URI.create("https://json-schema.org/draft/2020-12/schema");
+  private final String title;
+  private final JsonType type;
+  private final Map<String, FieldSchema> properties;
+  private final Map<String, FieldSchema> definitions;
+  private final List<String> required;
+
+  public String toJson(ObjectMapper mapper) {
+    final ObjectNode objectNode = mapper.createObjectNode();
+    objectNode.set("$id", new TextNode(id.toString()));
+    objectNode.set("$schema", new TextNode(schema.toString()));
+    objectNode.setAll(type.toJsonNode(mapper));
+    if (properties != null && !properties.isEmpty()) {
+      objectNode.set("properties", mapper.valueToTree(
+          properties.entrySet().stream()
+              .map(e -> Tuples.of(e.getKey(), e.getValue().toJsonNode(mapper)))
+              .collect(Collectors.toMap(
+                  Tuple2::getT1,
+                  Tuple2::getT2
+              ))
+      ));
+      if (!required.isEmpty()) {
+        objectNode.set("required", mapper.valueToTree(required));
+      }
+    }
+    if (definitions != null && !definitions.isEmpty()) {
+      objectNode.set("definitions", mapper.valueToTree(
+          definitions.entrySet().stream()
+              .map(e -> Tuples.of(e.getKey(), e.getValue().toJsonNode(mapper)))
+              .collect(Collectors.toMap(
+                  Tuple2::getT1,
+                  Tuple2::getT2
+              ))
+      ));
+    }
+    return objectNode.toString();
+  }
+
+  @SneakyThrows
+  public static JsonSchema stringSchema() {
+    return JsonSchema.builder()
+        .id(new URI("http://unknown.unknown"))
+        .type(new SimpleJsonType(JsonType.Type.STRING))
+        .build();
+  }
+}

+ 7 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonSchemaConverter.java

@@ -0,0 +1,7 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import java.net.URI;
+
+public interface JsonSchemaConverter<T> {
+  JsonSchema convert(URI basePath, T schema);
+}

+ 41 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonType.java

@@ -0,0 +1,41 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.util.Map;
+
+public abstract class JsonType {
+
+  protected final Type type;
+
+  public JsonType(Type type) {
+    this.type = type;
+  }
+
+  public Type getType() {
+    return type;
+  }
+
+  public abstract Map<String, JsonNode> toJsonNode(ObjectMapper mapper);
+
+  public enum Type {
+    NULL,
+    BOOLEAN,
+    OBJECT,
+    ARRAY,
+    NUMBER,
+    INTEGER,
+    ENUM,
+    STRING;
+
+    private final String name;
+
+    Type() {
+      this.name = this.name().toLowerCase();
+    }
+
+    public String getName() {
+      return name;
+    }
+  }
+}

+ 22 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/MapFieldSchema.java

@@ -0,0 +1,22 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+
+public class MapFieldSchema implements FieldSchema {
+  private final FieldSchema itemSchema;
+
+  public MapFieldSchema(FieldSchema itemSchema) {
+    this.itemSchema = itemSchema;
+  }
+
+  @Override
+  public JsonNode toJsonNode(ObjectMapper mapper) {
+    final ObjectNode objectNode = mapper.createObjectNode();
+    objectNode.set("type", new TextNode(JsonType.Type.OBJECT.getName()));
+    objectNode.set("additionalProperties", itemSchema.toJsonNode(mapper));
+    return objectNode;
+  }
+}

+ 46 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ObjectFieldSchema.java

@@ -0,0 +1,46 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+public class ObjectFieldSchema implements FieldSchema {
+  private final Map<String, FieldSchema> properties;
+  private final List<String> required;
+
+  public ObjectFieldSchema(Map<String, FieldSchema> properties,
+                           List<String> required) {
+    this.properties = properties;
+    this.required = required;
+  }
+
+  public Map<String, FieldSchema> getProperties() {
+    return properties;
+  }
+
+  public List<String> getRequired() {
+    return required;
+  }
+
+  @Override
+  public JsonNode toJsonNode(ObjectMapper mapper) {
+    final Map<String, JsonNode> nodes = properties.entrySet().stream()
+        .map(e -> Tuples.of(e.getKey(), e.getValue().toJsonNode(mapper)))
+        .collect(Collectors.toMap(
+            Tuple2::getT1,
+            Tuple2::getT2
+        ));
+    final ObjectNode objectNode = mapper.createObjectNode();
+    objectNode.setAll(new SimpleJsonType(JsonType.Type.OBJECT).toJsonNode(mapper));
+    objectNode.set("properties", mapper.valueToTree(nodes));
+    if (!required.isEmpty()) {
+      objectNode.set("required", mapper.valueToTree(required));
+    }
+    return objectNode;
+  }
+}

+ 27 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/OneOfFieldSchema.java

@@ -0,0 +1,27 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class OneOfFieldSchema implements FieldSchema {
+  private final List<FieldSchema> schemaList;
+
+  public OneOfFieldSchema(
+      List<FieldSchema> schemaList) {
+    this.schemaList = schemaList;
+  }
+
+  @Override
+  public JsonNode toJsonNode(ObjectMapper mapper) {
+    return mapper.createObjectNode()
+        .set("oneOf",
+            mapper.createArrayNode().addAll(
+                schemaList.stream()
+                    .map(s -> s.toJsonNode(mapper))
+                    .collect(Collectors.toList())
+            )
+        );
+  }
+}

+ 134 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverter.java

@@ -0,0 +1,134 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.google.protobuf.Descriptors;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.Descriptor> {
+  @Override
+  public JsonSchema convert(URI basePath, Descriptors.Descriptor schema) {
+    final JsonSchema.JsonSchemaBuilder builder = JsonSchema.builder();
+
+    builder.id(basePath.resolve(schema.getFullName()));
+    builder.type(new SimpleJsonType(JsonType.Type.OBJECT));
+
+    Map<String, FieldSchema> definitions = new HashMap<>();
+    final ObjectFieldSchema root =
+        (ObjectFieldSchema) convertObjectSchema(schema, definitions, false);
+    builder.definitions(definitions);
+
+    builder.properties(root.getProperties());
+    builder.required(root.getRequired());
+
+    return builder.build();
+  }
+
+  private FieldSchema convertObjectSchema(Descriptors.Descriptor schema,
+                                          Map<String, FieldSchema> definitions, boolean ref) {
+    final Map<String, FieldSchema> fields = schema.getFields().stream()
+        .map(f -> Tuples.of(f.getName(), convertField(f, definitions)))
+        .collect(Collectors.toMap(
+            Tuple2::getT1,
+            Tuple2::getT2
+        ));
+
+    final Map<String, OneOfFieldSchema> oneOfFields = schema.getOneofs().stream().map(o ->
+        Tuples.of(
+            o.getName(),
+            new OneOfFieldSchema(
+              o.getFields().stream().map(
+                  Descriptors.FieldDescriptor::getName
+              ).map(fields::get).collect(Collectors.toList())
+            )
+        )
+    ).collect(Collectors.toMap(
+        Tuple2::getT1,
+        Tuple2::getT2
+    ));
+
+    final List<String> allOneOfFields = schema.getOneofs().stream().flatMap(o ->
+                o.getFields().stream().map(Descriptors.FieldDescriptor::getName)
+        ).collect(Collectors.toList());
+
+    final Map<String, FieldSchema> excludedOneOf = fields.entrySet().stream()
+        .filter(f -> !allOneOfFields.contains(f.getKey()))
+        .collect(Collectors.toMap(
+            Map.Entry::getKey,
+            Map.Entry::getValue
+        ));
+
+    Map<String, FieldSchema> finalFields = new HashMap<>(excludedOneOf);
+    finalFields.putAll(oneOfFields);
+
+    final List<String> required = schema.getFields().stream()
+        .filter(f -> !f.isOptional())
+        .map(Descriptors.FieldDescriptor::getName).collect(Collectors.toList());
+
+    if (ref) {
+      String definitionName = String.format("record.%s", schema.getFullName());
+      definitions.put(definitionName, new ObjectFieldSchema(finalFields, required));
+      return new RefFieldSchema(String.format("#/definitions/%s", definitionName));
+    } else {
+      return new ObjectFieldSchema(fields, required);
+    }
+  }
+
+  private FieldSchema convertField(Descriptors.FieldDescriptor field,
+                              Map<String, FieldSchema> definitions) {
+    final JsonType jsonType = convertType(field);
+
+    FieldSchema fieldSchema;
+    if (jsonType.getType().equals(JsonType.Type.OBJECT)) {
+      fieldSchema = convertObjectSchema(field.getMessageType(), definitions, true);
+    } else {
+      fieldSchema = new SimpleFieldSchema(jsonType);
+    }
+
+    if (field.isRepeated()) {
+      return new ArrayFieldSchema(fieldSchema);
+    } else {
+      return fieldSchema;
+    }
+  }
+
+
+  private JsonType convertType(Descriptors.FieldDescriptor field) {
+    switch (field.getType()) {
+      case INT32:
+      case INT64:
+      case SINT32:
+      case SINT64:
+      case UINT32:
+      case UINT64:
+      case FIXED32:
+      case FIXED64:
+      case SFIXED32:
+      case SFIXED64:
+        return new SimpleJsonType(JsonType.Type.INTEGER);
+      case MESSAGE:
+      case GROUP:
+        return new SimpleJsonType(JsonType.Type.OBJECT);
+      case ENUM:
+        return new EnumJsonType(
+            field.getEnumType().getValues().stream()
+                .map(Descriptors.EnumValueDescriptor::getName)
+                .collect(Collectors.toList())
+        );
+      case BYTES:
+      case STRING:
+        return new SimpleJsonType(JsonType.Type.STRING);
+      case FLOAT:
+      case DOUBLE:
+        return new SimpleJsonType(JsonType.Type.NUMBER);
+      case BOOL:
+        return new SimpleJsonType(JsonType.Type.BOOLEAN);
+      default:
+        return new SimpleJsonType(JsonType.Type.STRING);
+    }
+  }
+}

+ 18 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/RefFieldSchema.java

@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.TextNode;
+
+public class RefFieldSchema implements FieldSchema {
+  private final String ref;
+
+  public RefFieldSchema(String ref) {
+    this.ref = ref;
+  }
+
+  @Override
+  public JsonNode toJsonNode(ObjectMapper mapper) {
+    return mapper.createObjectNode().set("$ref", new TextNode(ref));
+  }
+}

+ 17 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/SimpleFieldSchema.java

@@ -0,0 +1,17 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class SimpleFieldSchema implements FieldSchema {
+  private final JsonType type;
+
+  public SimpleFieldSchema(JsonType type) {
+    this.type = type;
+  }
+
+  @Override
+  public JsonNode toJsonNode(ObjectMapper mapper) {
+    return mapper.createObjectNode().setAll(type.toJsonNode(mapper));
+  }
+}

+ 21 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/SimpleJsonType.java

@@ -0,0 +1,21 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.TextNode;
+import java.util.Map;
+
+public class SimpleJsonType extends JsonType {
+
+  public SimpleJsonType(Type type) {
+    super(type);
+  }
+
+  @Override
+  public Map<String, JsonNode> toJsonNode(ObjectMapper mapper) {
+    return Map.of(
+        "type",
+        new TextNode(type.getName())
+    );
+  }
+}

+ 7 - 5
kafka-ui-api/src/test/java/com/provectus/kafka/ui/deserialization/SchemaRegistryRecordDeserializerTest.java → kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/SchemaRegistryRecordDeserializerTest.java

@@ -1,18 +1,20 @@
-package com.provectus.kafka.ui.deserialization;
+package com.provectus.kafka.ui.serde;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.serde.schemaregistry.SchemaRegistryRecordSerDe;
 import java.util.Map;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.common.utils.Bytes;
 import org.junit.jupiter.api.Test;
+import reactor.util.function.Tuples;
 
 class SchemaRegistryRecordDeserializerTest {
 
-  private final SchemaRegistryRecordDeserializer deserializer =
-      new SchemaRegistryRecordDeserializer(
+  private final SchemaRegistryRecordSerDe deserializer =
+      new SchemaRegistryRecordSerDe(
           KafkaCluster.builder()
               .schemaNameTemplate("%s-value")
               .build(),
@@ -25,13 +27,13 @@ class SchemaRegistryRecordDeserializerTest {
     var deserializedRecord = deserializer.deserialize(
         new ConsumerRecord<>("topic", 1, 0, Bytes.wrap("key".getBytes()),
             Bytes.wrap(value.getBytes())));
-    assertEquals(value, deserializedRecord);
+    assertEquals(Tuples.of("key", value), deserializedRecord);
   }
 
   @Test
   public void shouldDeserializeNullValueRecordToEmptyMap() {
     var deserializedRecord = deserializer
         .deserialize(new ConsumerRecord<>("topic", 1, 0, Bytes.wrap("key".getBytes()), null));
-    assertEquals(Map.of(), deserializedRecord);
+    assertEquals(Tuples.of("key", Map.of()), deserializedRecord);
   }
 }

+ 1 - 1
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/OffsetsSeekTest.java

@@ -147,7 +147,7 @@ class OffsetsSeekTest {
     assertThat(consumer.assignment()).containsExactlyInAnyOrder(tp0, tp1, tp2);
     assertThat(consumer.position(tp0)).isZero();
     assertThat(consumer.position(tp1)).isEqualTo(1L);
-    assertThat(consumer.position(tp2)).isEqualTo(0L);
+    assertThat(consumer.position(tp2)).isZero();
   }
 
 

+ 91 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverterTest.java

@@ -0,0 +1,91 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.net.URI;
+import java.net.URISyntaxException;
+import org.apache.avro.Schema;
+import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+
+public class AvroJsonSchemaConverterTest {
+  @Test
+  public void avroConvertTest() throws URISyntaxException {
+    final AvroJsonSchemaConverter converter = new AvroJsonSchemaConverter();
+    URI basePath = new URI("http://example.com/");
+
+    Schema recordSchema = (new Schema.Parser()).parse(
+         " {"
+            + "     \"type\": \"record\","
+            + "     \"name\": \"Message\","
+            + "     \"namespace\": \"com.provectus.kafka\","
+            + "     \"fields\": ["
+            + "         {"
+            + "             \"name\": \"record\","
+            + "             \"type\": {"
+            + "                 \"type\": \"record\","
+            + "                 \"name\": \"InnerMessage\","
+            + "                 \"fields\": ["
+            + "                     {"
+            + "                         \"name\": \"id\","
+            + "                         \"type\": \"long\""
+            + "                     },"
+            + "                     {"
+            + "                         \"name\": \"text\","
+            + "                         \"type\": \"string\""
+            + "                     },"
+            + "                     {"
+            + "                         \"name\": \"long_text\","
+            + "                         \"type\": ["
+            + "                             \"null\","
+            + "                             \"string\""
+            + "                         ],"
+            + "                         \"default\": null"
+            + "                     },"
+            + "                     {"
+            + "                         \"name\": \"order\","
+            + "                         \"type\": {"
+            + "                        \"type\": \"enum\","
+            + "                        \"name\": \"Suit\","
+            + "                        \"symbols\": [\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]"
+            + "                         }"
+            + "                     },"
+            + "                     {"
+            + "                         \"name\": \"array\","
+            + "                         \"type\": {"
+            + "                             \"type\": \"array\","
+            + "                             \"items\": \"string\","
+            + "                             \"default\": []"
+            + "                         }"
+            + "                     },"
+            + "                     {"
+            + "                         \"name\": \"map\","
+            + "                         \"type\": {"
+            + "                             \"type\": \"map\","
+            + "                             \"values\": \"long\","
+            + "                             \"default\": {}"
+            + "                         }"
+            + "                     }"
+            + "                 ]"
+            + "             }"
+            + "         }"
+            + "     ]"
+            + " }"
+    );
+
+    String expected =
+            "{\"$id\":\"http://example.com/Message\","
+            + "\"$schema\":\"https://json-schema.org/draft/2020-12/schema\","
+            + "\"type\":\"object\",\"properties\":{\"record\":{\"$ref\":"
+            + "\"#/definitions/RecordInnerMessage\"}},\"required\":[\"record\"],"
+            + "\"definitions\":{\"RecordInnerMessage\":{\"type\":\"object\",\"properties\":"
+            + "{\"long_text\":{\"type\":\"string\"},\"array\":{\"type\":\"array\",\"items\":"
+            + "{\"type\":\"string\"}},\"id\":{\"type\":\"integer\"},\"text\":{\"type\":\"string\"},"
+            + "\"map\":{\"type\":\"object\",\"additionalProperties\":{\"type\":\"integer\"}},"
+            + "\"order\":{\"enum\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]}},"
+            + "\"required\":[\"id\",\"text\",\"order\",\"array\",\"map\"]}}}";
+
+    final JsonSchema convertRecord = converter.convert(basePath, recordSchema);
+    Assertions.assertEquals(expected, convertRecord.toJson(new ObjectMapper()));
+
+  }
+}

+ 63 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java

@@ -0,0 +1,63 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import java.net.URI;
+import java.net.URISyntaxException;
+import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+
+
+public class ProtobufSchemaConverterTest {
+
+  @Test
+  public void testSimpleProto() throws URISyntaxException {
+
+    String proto = "syntax = \"proto3\";\n"
+        + "package com.acme;\n"
+        + "\n"
+        + "message MyRecord {\n"
+        + "  string f1 = 1;\n"
+        + "  OtherRecord f2 = 2;\n"
+        + "  repeated OtherRecord f3 = 3;\n"
+        + "}\n"
+        + "\n"
+        + "message OtherRecord {\n"
+        + "  int32 other_id = 1;\n"
+        + "  Order order = 2;\n"
+        + "  oneof optionalField {"
+        + "    string name = 3;"
+        + "    uint64 size = 4;"
+        + "  }"
+        + "}\n"
+        + "\n"
+        + "enum Order {\n"
+        + "    FIRST = 1;\n"
+        + "    SECOND = 1;\n"
+        + "}\n";
+
+    String expected =
+        "{\"$id\":\"http://example.com/com.acme.MyRecord\","
+        + "\"$schema\":\"https://json-schema.org/draft/2020-12/schema\","
+        + "\"type\":\"object\",\"properties\":{\"f1\":{\"type\":\"string\"},"
+        + "\"f2\":{\"$ref\":\"#/definitions/record.com.acme.OtherRecord\"},"
+        + "\"f3\":{\"type\":\"array\","
+        + "\"items\":{\"$ref\":\"#/definitions/record.com.acme.OtherRecord\"}}},"
+        + "\"required\":[\"f3\"],"
+        + "\"definitions\":"
+        + "{\"record.com.acme.OtherRecord\":"
+        + "{\"type\":\"object\",\"properties\":"
+        + "{\"optionalField\":{\"oneOf\":[{\"type\":\"string\"},"
+        + "{\"type\":\"integer\"}]},\"other_id\":"
+        + "{\"type\":\"integer\"},\"order\":{\"enum\":[\"FIRST\",\"SECOND\"]}}}}}";
+
+    ProtobufSchema protobufSchema = new ProtobufSchema(proto);
+
+    final ProtobufSchemaConverter converter = new ProtobufSchemaConverter();
+    URI basePath = new URI("http://example.com/");
+
+    final JsonSchema convert =
+        converter.convert(basePath, protobufSchema.toDescriptor("MyRecord"));
+    Assertions.assertEquals(expected, convert.toJson(new ObjectMapper()));
+  }
+}

+ 96 - 0
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -364,6 +364,57 @@ paths:
           description: OK
         404:
           description: Not found
+    post:
+      tags:
+        - Messages
+      summary: sendTopicMessages
+      operationId: sendTopicMessages
+      parameters:
+        - name: clusterName
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: topicName
+          in: path
+          required: true
+          schema:
+            type: string
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/CreateTopicMessage'
+      responses:
+        200:
+          description: OK
+        404:
+          description: Not found
+
+  /api/clusters/{clusterName}/topics/{topicName}/messages/schema:
+    get:
+      tags:
+        - Messages
+      summary: getTopicSchema
+      operationId: getTopicSchema
+      parameters:
+        - name: clusterName
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: topicName
+          in: path
+          required: true
+          schema:
+            type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/TopicMessageSchema'
 
   /api/clusters/{clusterName}/topics/{topicName}/consumergroups:
     get:
@@ -1435,6 +1486,51 @@ components:
         - clusterId
         - consumerGroupId
 
+    CreateTopicMessage:
+      type: object
+      properties:
+        partition:
+          type: integer
+        key:
+          type: string
+        headers:
+          type: object
+          additionalProperties:
+            type: string
+        content:
+          type: object
+      required:
+        - content
+
+    TopicMessageSchema:
+      type: object
+      properties:
+        key:
+          $ref: "#/components/schemas/MessageSchema"
+        value:
+          $ref: "#/components/schemas/MessageSchema"
+      required:
+        - key
+        - value
+
+    MessageSchema:
+      type: object
+      properties:
+        name:
+          type: String
+        source:
+          type: string
+          enum:
+            - SOURCE_SCHEMA_REGISTRY
+            - SOURCE_PROTO_FILE
+            - SOURCE_UNKNOWN
+        schema:
+          type: string
+      required:
+        - name
+        - source
+        - schema
+
     TopicMessage:
       type: object
       properties: