瀏覽代碼

Pluggable serde (#2125)

* checkstyle

* node version reverted

* api comments

* unused properties removed

* serde api fix

* renamings

* PR review fixes

* checkstyle fix

* PR fixes

* UUID serde tests added

* UUID serde tests added

* fixed due to review comments

* 1. kafka-ui-serde-api renamed 2. kafka-clients dependency removed from kafka-ui-serde-api module

* dep fix

* Add serde to send message component (#2620)

* Add serde to send message component

* adds serde selects to messages views

Co-authored-by: iliax <ikuramshin@provectus.com>
Co-authored-by: Roman Zabaluev <rzabaluev@provectus.com>
Co-authored-by: Kristina Kiparoidze <kkiparoidze@provectus.com>
Co-authored-by: Oleg Shur <workshur@gmail.com>
Ilya Kuramshin 2 年之前
父節點
當前提交
049b35fc99
共有 100 個文件被更改,包括 3931 次插入2166 次删除
  1. 1 1
      etc/checkstyle/checkstyle.xml
  2. 7 2
      kafka-ui-api/pom.xml
  3. 14 7
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
  4. 33 15
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java
  5. 4 5
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java
  6. 2 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java
  7. 2 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java
  8. 2 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java
  9. 0 11
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java
  10. 0 20
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
  11. 0 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java
  12. 0 52
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/DeserializationService.java
  13. 0 210
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/ProtobufFileRecordSerDe.java
  14. 0 39
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/RecordSerDe.java
  15. 0 53
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/SimpleRecordSerDe.java
  16. 0 29
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/AvroMessageFormatter.java
  17. 0 25
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/JsonSchemaMessageFormatter.java
  18. 0 15
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormat.java
  19. 0 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormatter.java
  20. 0 28
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/ProtobufMessageFormatter.java
  21. 0 298
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java
  22. 0 11
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/StringMessageFormatter.java
  23. 12 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/BuiltInSerde.java
  24. 12 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ClassloaderUtil.java
  25. 218 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ClusterSerdes.java
  26. 139 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ConsumerRecordDeserializer.java
  27. 173 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/CustomSerdeLoader.java
  28. 41 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ProducerRecordCreator.java
  29. 64 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/PropertyResolverImpl.java
  30. 23 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/RecordHeaderImpl.java
  31. 26 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/RecordHeadersImpl.java
  32. 71 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdeInstance.java
  33. 74 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Base64Serde.java
  34. 70 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int32Serde.java
  35. 76 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int64Serde.java
  36. 178 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java
  37. 65 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/StringSerde.java
  38. 70 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt32Serde.java
  39. 79 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt64Serde.java
  40. 91 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerde.java
  41. 6 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java
  42. 7 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/JsonSchemaSchemaRegistrySerializer.java
  43. 73 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java
  44. 7 8
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/ProtobufSchemaRegistrySerializer.java
  45. 265 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
  46. 11 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerializer.java
  47. 14 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaType.java
  48. 2 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
  49. 140 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/DeserializationService.java
  50. 27 33
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
  51. 2 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
  52. 0 11
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
  53. 0 80
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ClusterUtil.java
  54. 0 37
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ConsumerRecordUtil.java
  55. 3 1
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/emitter/TailingEmitterTest.java
  56. 0 125
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/ProtobufFileRecordSerDeTest.java
  57. 0 40
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/SimpleRecordSerDeTest.java
  58. 0 203
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDeTest.java
  59. 156 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/PropertyResolverImplTest.java
  60. 66 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/Base64SerdeTest.java
  61. 46 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/Int32SerdeTest.java
  62. 47 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/Int64SerdeTest.java
  63. 152 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java
  64. 59 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/UInt32SerdeTest.java
  65. 58 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/UInt64SerdeTest.java
  66. 101 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerdeTest.java
  67. 157 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
  68. 2 1
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java
  69. 31 12
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java
  70. 68 103
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/SendAndReadTests.java
  71. 102 54
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  72. 1 0
      kafka-ui-react-app/jest.config.ts
  73. 6 3
      kafka-ui-react-app/src/components/Connect/List/List.tsx
  74. 17 8
      kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts
  75. 152 127
      kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
  76. 13 5
      kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx
  77. 25 10
      kafka-ui-react-app/src/components/Topics/Topic/Messages/Messages.tsx
  78. 13 4
      kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Messages.spec.tsx
  79. 0 1
      kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/MessagesTable.spec.tsx
  80. 53 4
      kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/FiltersBar/Form.tsx
  81. 1 0
      kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/Messages.styled.ts
  82. 0 25
      kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/Messages.tsx
  83. 62 0
      kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/MessagesContainer.tsx
  84. 13 0
      kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/utils/getDefaultSerdeName.ts
  85. 119 99
      kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx
  86. 24 26
      kafka-ui-react-app/src/components/Topics/Topic/SendMessage/__test__/SendMessage.spec.tsx
  87. 85 0
      kafka-ui-react-app/src/components/Topics/Topic/SendMessage/__test__/utils.spec.ts
  88. 0 89
      kafka-ui-react-app/src/components/Topics/Topic/SendMessage/__test__/validateMessage.spec.ts
  89. 97 0
      kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts
  90. 0 58
      kafka-ui-react-app/src/components/Topics/Topic/SendMessage/validateMessage.ts
  91. 6 3
      kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx
  92. 1 1
      kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx
  93. 0 1
      kafka-ui-react-app/src/components/contexts/TopicMessagesContext.ts
  94. 38 0
      kafka-ui-react-app/src/lib/fixtures/topicMessages.ts
  95. 0 50
      kafka-ui-react-app/src/lib/fixtures/topics.ts
  96. 36 0
      kafka-ui-react-app/src/lib/hooks/api/__tests__/topicMessages.spec.ts
  97. 0 7
      kafka-ui-react-app/src/lib/hooks/api/__tests__/topics.spec.ts
  98. 19 0
      kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx
  99. 1 8
      kafka-ui-react-app/src/lib/hooks/api/topics.ts
  100. 0 53
      kafka-ui-react-app/src/lib/hooks/useSearch.ts

+ 1 - 1
etc/checkstyle/checkstyle.xml

@@ -297,7 +297,7 @@
                       value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF, VARIABLE_DEF"/>
         </module>
         <module name="JavadocMethod">
-            <property name="scope" value="public"/>
+            <property name="accessModifiers" value="public"/>
             <property name="allowMissingParamTags" value="true"/>
             <property name="allowMissingReturnTag" value="true"/>
             <property name="allowedAnnotations" value="Override, Test"/>

+ 7 - 2
kafka-ui-api/pom.xml

@@ -54,6 +54,11 @@
             <artifactId>kafka-ui-contract</artifactId>
             <version>${project.version}</version>
         </dependency>
+        <dependency>
+            <groupId>com.provectus</groupId>
+            <artifactId>kafka-ui-serde-api</artifactId>
+            <version>${project.version}</version>
+        </dependency>
         <dependency>
             <groupId>org.apache.kafka</groupId>
             <artifactId>kafka-clients</artifactId>
@@ -277,12 +282,12 @@
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-checkstyle-plugin</artifactId>
-                <version>3.1.1</version>
+                <version>3.1.2</version>
                 <dependencies>
                     <dependency>
                         <groupId>com.puppycrawl.tools</groupId>
                         <artifactId>checkstyle</artifactId>
-                        <version>8.32</version>
+                        <version>10.3.1</version>
                     </dependency>
                 </dependencies>
                 <executions>

+ 14 - 7
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.config;
 
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -28,13 +29,6 @@ public class ClustersProperties {
     SchemaRegistryAuth schemaRegistryAuth;
     String ksqldbServer;
     KsqldbServerAuth ksqldbServerAuth;
-    String schemaNameTemplate = "%s-value";
-    String keySchemaNameTemplate = "%s-key";
-    String protobufFile;
-    String protobufMessageName;
-    Map<String, String> protobufMessageNameByTopic;
-    String protobufMessageNameForKey;
-    Map<String, String> protobufMessageNameForKeyByTopic;
     List<ConnectCluster> kafkaConnect;
     int jmxPort;
     boolean jmxSsl;
@@ -43,6 +37,9 @@ public class ClustersProperties {
     Properties properties;
     boolean readOnly = false;
     boolean disableLogDirsCollection = false;
+    List<SerdeConfig> serde = new ArrayList<>();
+    String defaultKeySerde;
+    String defaultValueSerde;
   }
 
   @Data
@@ -59,6 +56,16 @@ public class ClustersProperties {
     String password;
   }
 
+  @Data
+  public static class SerdeConfig {
+    String name;
+    String className;
+    String filePath;
+    Map<String, Object> properties = new HashMap<>();
+    String topicKeysPattern;
+    String topicValuesPattern;
+  }
+
   @Data
   @ToString(exclude = "password")
   public static class KsqldbServerAuth {

+ 33 - 15
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java

@@ -1,5 +1,7 @@
 package com.provectus.kafka.ui.controller;
 
+import static com.provectus.kafka.ui.serde.api.Serde.Target.KEY;
+import static com.provectus.kafka.ui.serde.api.Serde.Target.VALUE;
 import static java.util.stream.Collectors.toMap;
 
 import com.provectus.kafka.ui.api.MessagesApi;
@@ -8,10 +10,11 @@ import com.provectus.kafka.ui.model.CreateTopicMessageDTO;
 import com.provectus.kafka.ui.model.MessageFilterTypeDTO;
 import com.provectus.kafka.ui.model.SeekDirectionDTO;
 import com.provectus.kafka.ui.model.SeekTypeDTO;
+import com.provectus.kafka.ui.model.SerdeUsageDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
+import com.provectus.kafka.ui.model.TopicSerdeSuggestionDTO;
+import com.provectus.kafka.ui.service.DeserializationService;
 import com.provectus.kafka.ui.service.MessagesService;
-import com.provectus.kafka.ui.service.TopicsService;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
@@ -35,7 +38,7 @@ public class MessagesController extends AbstractController implements MessagesAp
   private static final int DEFAULT_LOAD_RECORD_LIMIT = 20;
 
   private final MessagesService messagesService;
-  private final TopicsService topicsService;
+  private final DeserializationService deserializationService;
 
   @Override
   public Mono<ResponseEntity<Void>> deleteTopicMessages(
@@ -49,10 +52,17 @@ public class MessagesController extends AbstractController implements MessagesAp
   }
 
   @Override
-  public Mono<ResponseEntity<Flux<TopicMessageEventDTO>>> getTopicMessages(
-      String clusterName, String topicName, SeekTypeDTO seekType, List<String> seekTo,
-      Integer limit, String q, MessageFilterTypeDTO filterQueryType,
-      SeekDirectionDTO seekDirection, ServerWebExchange exchange) {
+  public Mono<ResponseEntity<Flux<TopicMessageEventDTO>>> getTopicMessages(String clusterName,
+                                                                           String topicName,
+                                                                           SeekTypeDTO seekType,
+                                                                           List<String> seekTo,
+                                                                           Integer limit,
+                                                                           String q,
+                                                                           MessageFilterTypeDTO filterQueryType,
+                                                                           SeekDirectionDTO seekDirection,
+                                                                           String keySerde,
+                                                                           String valueSerde,
+                                                                           ServerWebExchange exchange) {
     var positions = new ConsumerPosition(
         seekType != null ? seekType : SeekTypeDTO.BEGINNING,
         parseSeekTo(topicName, seekTo),
@@ -64,18 +74,11 @@ public class MessagesController extends AbstractController implements MessagesAp
     return Mono.just(
         ResponseEntity.ok(
             messagesService.loadMessages(
-                getCluster(clusterName), topicName, positions, q, filterQueryType, recordsLimit)
+                getCluster(clusterName), topicName, positions, q, filterQueryType, recordsLimit, keySerde, valueSerde)
         )
     );
   }
 
-  @Override
-  public Mono<ResponseEntity<TopicMessageSchemaDTO>> getTopicSchema(
-      String clusterName, String topicName, ServerWebExchange exchange) {
-    return Mono.just(topicsService.getTopicSchema(getCluster(clusterName), topicName))
-        .map(ResponseEntity::ok);
-  }
-
   @Override
   public Mono<ResponseEntity<Void>> sendTopicMessages(
       String clusterName, String topicName, @Valid Mono<CreateTopicMessageDTO> createTopicMessage,
@@ -109,4 +112,19 @@ public class MessagesController extends AbstractController implements MessagesAp
         .collect(toMap(Pair::getKey, Pair::getValue));
   }
 
+  @Override
+  public Mono<ResponseEntity<TopicSerdeSuggestionDTO>> getSerdes(String clusterName,
+                                                                 String topicName,
+                                                                 SerdeUsageDTO use,
+                                                                 ServerWebExchange exchange) {
+    return Mono.just(
+        new TopicSerdeSuggestionDTO()
+            .key(use == SerdeUsageDTO.SERIALIZE
+                ? deserializationService.getSerdesForSerialize(getCluster(clusterName), topicName, KEY)
+                : deserializationService.getSerdesForDeserialize(getCluster(clusterName), topicName, KEY))
+            .value(use == SerdeUsageDTO.SERIALIZE
+                ? deserializationService.getSerdesForSerialize(getCluster(clusterName), topicName, VALUE)
+                : deserializationService.getSerdesForDeserialize(getCluster(clusterName), topicName, VALUE))
+    ).map(ResponseEntity::ok);
+  }
 }

+ 4 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java

@@ -3,8 +3,7 @@ package com.provectus.kafka.ui.emitter;
 import com.provectus.kafka.ui.model.TopicMessageDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
-import com.provectus.kafka.ui.serde.RecordSerDe;
-import com.provectus.kafka.ui.util.ClusterUtil;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import java.time.Duration;
 import java.time.Instant;
 import org.apache.kafka.clients.consumer.Consumer;
@@ -23,10 +22,10 @@ public abstract class AbstractEmitter {
   // To workaround this we can assume that after N consecutive empty polls all target messages were read.
   public static final int NO_MORE_DATA_EMPTY_POLLS_COUNT = 3;
 
-  private final RecordSerDe recordDeserializer;
+  private final ConsumerRecordDeserializer recordDeserializer;
   private final ConsumingStats consumingStats = new ConsumingStats();
 
-  protected AbstractEmitter(RecordSerDe recordDeserializer) {
+  protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer) {
     this.recordDeserializer = recordDeserializer;
   }
 
@@ -46,7 +45,7 @@ public abstract class AbstractEmitter {
 
   protected void sendMessage(FluxSink<TopicMessageEventDTO> sink,
                                                        ConsumerRecord<Bytes, Bytes> msg) {
-    final TopicMessageDTO topicMessage = ClusterUtil.mapToTopicMessage(msg, recordDeserializer);
+    final TopicMessageDTO topicMessage = recordDeserializer.deserialize(msg);
     sink.next(
         new TopicMessageEventDTO()
             .type(TopicMessageEventDTO.TypeEnum.MESSAGE)

+ 2 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java

@@ -1,7 +1,7 @@
 package com.provectus.kafka.ui.emitter;
 
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.serde.RecordSerDe;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import com.provectus.kafka.ui.util.OffsetsSeekBackward;
 import java.time.Duration;
 import java.util.ArrayList;
@@ -35,7 +35,7 @@ public class BackwardRecordEmitter
   public BackwardRecordEmitter(
       Function<Map<String, Object>, KafkaConsumer<Bytes, Bytes>> consumerSupplier,
       OffsetsSeekBackward offsetsSeek,
-      RecordSerDe recordDeserializer) {
+      ConsumerRecordDeserializer recordDeserializer) {
     super(recordDeserializer);
     this.offsetsSeek = offsetsSeek;
     this.consumerSupplier = consumerSupplier;

+ 2 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java

@@ -1,7 +1,7 @@
 package com.provectus.kafka.ui.emitter;
 
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.serde.RecordSerDe;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import com.provectus.kafka.ui.util.OffsetsSeek;
 import java.util.function.Supplier;
 import lombok.extern.slf4j.Slf4j;
@@ -22,7 +22,7 @@ public class ForwardRecordEmitter
   public ForwardRecordEmitter(
       Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
       OffsetsSeek offsetsSeek,
-      RecordSerDe recordDeserializer) {
+      ConsumerRecordDeserializer recordDeserializer) {
     super(recordDeserializer);
     this.consumerSupplier = consumerSupplier;
     this.offsetsSeek = offsetsSeek;

+ 2 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java

@@ -1,7 +1,7 @@
 package com.provectus.kafka.ui.emitter;
 
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.serde.RecordSerDe;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import com.provectus.kafka.ui.util.OffsetsSeek;
 import java.util.function.Supplier;
 import lombok.extern.slf4j.Slf4j;
@@ -17,7 +17,7 @@ public class TailingEmitter extends AbstractEmitter
   private final Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier;
   private final OffsetsSeek offsetsSeek;
 
-  public TailingEmitter(RecordSerDe recordDeserializer,
+  public TailingEmitter(ConsumerRecordDeserializer recordDeserializer,
                         Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
                         OffsetsSeek offsetsSeek) {
     super(recordDeserializer);

+ 0 - 11
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java

@@ -34,7 +34,6 @@ import com.provectus.kafka.ui.model.TopicDetailsDTO;
 import com.provectus.kafka.ui.model.schemaregistry.InternalCompatibilityCheck;
 import com.provectus.kafka.ui.model.schemaregistry.InternalCompatibilityLevel;
 import com.provectus.kafka.ui.util.JmxClusterUtil;
-import java.nio.file.Path;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -51,7 +50,6 @@ public interface ClusterMapper {
 
   ClusterDTO toCluster(InternalClusterState clusterState);
 
-  @Mapping(target = "protobufFile", source = "protobufFile", qualifiedByName = "resolvePath")
   @Mapping(target = "properties", source = "properties", qualifiedByName = "setProperties")
   @Mapping(target = "schemaRegistry", source = ".", qualifiedByName = "setSchemaRegistry")
   @Mapping(target = "ksqldbServer", source = ".", qualifiedByName = "setKsqldbServer")
@@ -160,15 +158,6 @@ public interface ClusterMapper {
     return brokerDiskUsage;
   }
 
-  @Named("resolvePath")
-  default Path resolvePath(String path) {
-    if (path != null) {
-      return Path.of(path);
-    } else {
-      return null;
-    }
-  }
-
   @Named("setProperties")
   default Properties setProperties(Properties properties) {
     Properties copy = new Properties();

+ 0 - 20
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java

@@ -63,26 +63,6 @@ public class InternalConsumerGroup {
     return builder.build();
   }
 
-  // removes data for all partitions that are not fit filter
-  public InternalConsumerGroup retainDataForPartitions(Predicate<TopicPartition> partitionsFilter) {
-    var offsetsMap = getOffsets().entrySet().stream()
-        .filter(e -> partitionsFilter.test(e.getKey()))
-        .collect(Collectors.toMap(
-            Map.Entry::getKey,
-            Map.Entry::getValue
-        ));
-
-    var nonEmptyMembers = getMembers().stream()
-        .map(m -> filterConsumerMemberTopic(m, partitionsFilter))
-        .filter(m -> !m.getAssignment().isEmpty())
-        .collect(Collectors.toList());
-
-    return toBuilder()
-        .offsets(offsetsMap)
-        .members(nonEmptyMembers)
-        .build();
-  }
-
   private InternalConsumerGroup.InternalMember filterConsumerMemberTopic(
       InternalConsumerGroup.InternalMember member, Predicate<TopicPartition> partitionsFilter) {
     var topicPartitions = member.getAssignment()

+ 0 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java

@@ -1,8 +1,6 @@
 package com.provectus.kafka.ui.model;
 
-import java.nio.file.Path;
 import java.util.List;
-import java.util.Map;
 import java.util.Properties;
 import lombok.AccessLevel;
 import lombok.AllArgsConstructor;
@@ -23,13 +21,6 @@ public class KafkaCluster {
   private final InternalSchemaRegistry schemaRegistry;
   private final InternalKsqlServer ksqldbServer;
   private final List<KafkaConnectCluster> kafkaConnect;
-  private final String schemaNameTemplate;
-  private final String keySchemaNameTemplate;
-  private final Path protobufFile;
-  private final String protobufMessageName;
-  private final Map<String, String> protobufMessageNameByTopic;
-  private final String protobufMessageNameForKey;
-  private final Map<String, String> protobufMessageNameForKeyByTopic;
   private final Properties properties;
   private final boolean readOnly;
   private final boolean disableLogDirsCollection;

+ 0 - 52
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/DeserializationService.java

@@ -1,52 +0,0 @@
-package com.provectus.kafka.ui.serde;
-
-import com.provectus.kafka.ui.model.KafkaCluster;
-import com.provectus.kafka.ui.serde.schemaregistry.SchemaRegistryAwareRecordSerDe;
-import com.provectus.kafka.ui.service.ClustersStorage;
-import java.util.Map;
-import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import lombok.RequiredArgsConstructor;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.stereotype.Component;
-
-@Slf4j
-@Component
-@RequiredArgsConstructor
-public class DeserializationService {
-
-  private final ClustersStorage clustersStorage;
-  private Map<String, RecordSerDe> clusterDeserializers;
-
-  @PostConstruct
-  public void init() {
-    this.clusterDeserializers = clustersStorage.getKafkaClusters().stream()
-        .collect(Collectors.toMap(
-            KafkaCluster::getName,
-            this::createRecordDeserializerForCluster
-        ));
-  }
-
-  private RecordSerDe createRecordDeserializerForCluster(KafkaCluster cluster) {
-    try {
-      if (cluster.getProtobufFile() != null) {
-        log.info("Using ProtobufFileRecordSerDe for cluster '{}'", cluster.getName());
-        return new ProtobufFileRecordSerDe(cluster.getProtobufFile(),
-            cluster.getProtobufMessageNameByTopic(), cluster.getProtobufMessageNameForKeyByTopic(),
-            cluster.getProtobufMessageName(), cluster.getProtobufMessageNameForKey());
-      } else if (cluster.getSchemaRegistry() != null) {
-        log.info("Using SchemaRegistryAwareRecordSerDe for cluster '{}'", cluster.getName());
-        return new SchemaRegistryAwareRecordSerDe(cluster);
-      } else {
-        log.info("Using SimpleRecordSerDe for cluster '{}'", cluster.getName());
-        return new SimpleRecordSerDe();
-      }
-    } catch (Throwable e) {
-      throw new RuntimeException("Can't init deserializer", e);
-    }
-  }
-
-  public RecordSerDe getRecordDeserializerForCluster(KafkaCluster cluster) {
-    return clusterDeserializers.get(cluster.getName());
-  }
-}

+ 0 - 210
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/ProtobufFileRecordSerDe.java

@@ -1,210 +0,0 @@
-package com.provectus.kafka.ui.serde;
-
-import com.google.protobuf.Descriptors.Descriptor;
-import com.google.protobuf.DynamicMessage;
-import com.google.protobuf.util.JsonFormat;
-import com.provectus.kafka.ui.model.MessageSchemaDTO;
-import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
-import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
-import com.provectus.kafka.ui.serde.schemaregistry.StringMessageFormatter;
-import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
-import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import javax.annotation.Nullable;
-import lombok.SneakyThrows;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.utils.Bytes;
-
-@Slf4j
-public class ProtobufFileRecordSerDe implements RecordSerDe {
-  private static final StringMessageFormatter FALLBACK_FORMATTER = new StringMessageFormatter();
-
-  private final ProtobufSchema protobufSchema;
-  private final Path protobufSchemaPath;
-  private final ProtobufSchemaConverter schemaConverter = new ProtobufSchemaConverter();
-  private final Map<String, Descriptor> messageDescriptorMap;
-  private final Map<String, Descriptor> keyMessageDescriptorMap;
-  private final Descriptor defaultMessageDescriptor;
-  private final Descriptor defaultKeyMessageDescriptor;
-
-  public ProtobufFileRecordSerDe(Path protobufSchemaPath, Map<String, String> messageNameMap,
-                                 Map<String, String> keyMessageNameMap, String defaultMessageName,
-                                 @Nullable String defaultKeyMessageName)
-      throws IOException {
-    this.protobufSchemaPath = protobufSchemaPath;
-    try (final Stream<String> lines = Files.lines(protobufSchemaPath)) {
-      var schema = new ProtobufSchema(
-          lines.collect(Collectors.joining("\n"))
-      );
-      if (defaultMessageName != null) {
-        this.protobufSchema = schema.copy(defaultMessageName);
-      } else {
-        this.protobufSchema = schema;
-      }
-      this.messageDescriptorMap = new HashMap<>();
-      if (messageNameMap != null) {
-        populateDescriptors(messageNameMap, messageDescriptorMap);
-      }
-      this.keyMessageDescriptorMap = new HashMap<>();
-      if (keyMessageNameMap != null) {
-        populateDescriptors(keyMessageNameMap, keyMessageDescriptorMap);
-      }
-      defaultMessageDescriptor = Objects.requireNonNull(protobufSchema.toDescriptor(),
-          "The given message type is not found in protobuf definition: "
-              + defaultMessageName);
-      if (defaultKeyMessageName != null) {
-        defaultKeyMessageDescriptor = schema.copy(defaultKeyMessageName).toDescriptor();
-      } else {
-        defaultKeyMessageDescriptor = null;
-      }
-    }
-  }
-
-  private void populateDescriptors(Map<String, String> messageNameMap, Map<String, Descriptor> messageDescriptorMap) {
-    for (Map.Entry<String, String> entry : messageNameMap.entrySet()) {
-      var descriptor = Objects.requireNonNull(protobufSchema.toDescriptor(entry.getValue()),
-          "The given message type is not found in protobuf definition: "
-              + entry.getValue());
-      messageDescriptorMap.put(entry.getKey(), descriptor);
-    }
-  }
-
-  @Override
-  public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
-    var builder = DeserializedKeyValue.builder();
-
-    if (msg.key() != null) {
-      Descriptor descriptor = getKeyDescriptor(msg.topic());
-      if (descriptor == null) {
-        builder.key(FALLBACK_FORMATTER.format(msg.topic(), msg.key().get()));
-        builder.keyFormat(FALLBACK_FORMATTER.getFormat());
-      } else {
-        try {
-          builder.key(parse(msg.key().get(), descriptor));
-          builder.keyFormat(MessageFormat.PROTOBUF);
-        } catch (Throwable e) {
-          log.debug("Failed to deserialize key as protobuf, falling back to string formatter", e);
-          builder.key(FALLBACK_FORMATTER.format(msg.topic(), msg.key().get()));
-          builder.keyFormat(FALLBACK_FORMATTER.getFormat());
-        }
-      }
-    }
-
-    if (msg.value() != null) {
-      try {
-        builder.value(parse(msg.value().get(), getDescriptor(msg.topic())));
-        builder.valueFormat(MessageFormat.PROTOBUF);
-      } catch (Throwable e) {
-        log.debug("Failed to deserialize value as protobuf, falling back to string formatter", e);
-        builder.key(FALLBACK_FORMATTER.format(msg.topic(), msg.value().get()));
-        builder.keyFormat(FALLBACK_FORMATTER.getFormat());
-      }
-    }
-
-    return builder.build();
-  }
-
-  @Nullable
-  private Descriptor getKeyDescriptor(String topic) {
-    return keyMessageDescriptorMap.getOrDefault(topic, defaultKeyMessageDescriptor);
-  }
-
-  private Descriptor getDescriptor(String topic) {
-    return messageDescriptorMap.getOrDefault(topic, defaultMessageDescriptor);
-  }
-
-  @SneakyThrows
-  private String parse(byte[] value, Descriptor descriptor) {
-    DynamicMessage protoMsg = DynamicMessage.parseFrom(
-        descriptor,
-        new ByteArrayInputStream(value)
-    );
-    byte[] jsonFromProto = ProtobufSchemaUtils.toJson(protoMsg);
-    return new String(jsonFromProto);
-  }
-
-  @Override
-  public ProducerRecord<byte[], byte[]> serialize(String topic,
-                                                  @Nullable String key,
-                                                  @Nullable String data,
-                                                  @Nullable Integer partition) {
-    byte[] keyPayload = null;
-    byte[] valuePayload = null;
-
-    if (key != null) {
-      Descriptor keyDescriptor = getKeyDescriptor(topic);
-      if (keyDescriptor == null) {
-        keyPayload = key.getBytes();
-      } else {
-        DynamicMessage.Builder builder = DynamicMessage.newBuilder(keyDescriptor);
-        try {
-          JsonFormat.parser().merge(key, builder);
-          keyPayload = builder.build().toByteArray();
-        } catch (Throwable e) {
-          throw new RuntimeException("Failed to merge record key for topic " + topic, e);
-        }
-      }
-    }
-
-    if (data != null) {
-      DynamicMessage.Builder builder = DynamicMessage.newBuilder(getDescriptor(topic));
-      try {
-        JsonFormat.parser().merge(data, builder);
-        valuePayload = builder.build().toByteArray();
-      } catch (Throwable e) {
-        throw new RuntimeException("Failed to merge record value for topic " + topic, e);
-      }
-    }
-
-    return new ProducerRecord<>(
-        topic,
-        partition,
-        keyPayload,
-        valuePayload);
-  }
-
-  @Override
-  public TopicMessageSchemaDTO getTopicSchema(String topic) {
-    JsonSchema keyJsonSchema;
-
-    Descriptor keyDescriptor = getKeyDescriptor(topic);
-    if (keyDescriptor == null) {
-      keyJsonSchema = JsonSchema.stringSchema();
-    } else {
-      keyJsonSchema = schemaConverter.convert(
-          protobufSchemaPath.toUri(),
-          keyDescriptor);
-    }
-
-    final MessageSchemaDTO keySchema = new MessageSchemaDTO()
-        .name(protobufSchema.fullName())
-        .source(MessageSchemaDTO.SourceEnum.PROTO_FILE)
-        .schema(keyJsonSchema.toJson());
-
-    final JsonSchema valueJsonSchema = schemaConverter.convert(
-        protobufSchemaPath.toUri(),
-        getDescriptor(topic));
-
-    final MessageSchemaDTO valueSchema = new MessageSchemaDTO()
-        .name(protobufSchema.fullName())
-        .source(MessageSchemaDTO.SourceEnum.PROTO_FILE)
-        .schema(valueJsonSchema.toJson());
-
-    return new TopicMessageSchemaDTO()
-        .key(keySchema)
-        .value(valueSchema);
-  }
-}

+ 0 - 39
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/RecordSerDe.java

@@ -1,39 +0,0 @@
-package com.provectus.kafka.ui.serde;
-
-import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
-import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
-import javax.annotation.Nullable;
-import lombok.Builder;
-import lombok.Value;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.utils.Bytes;
-
-public interface RecordSerDe {
-
-  @Value
-  @Builder
-  class DeserializedKeyValue {
-    @Nullable
-    String key;
-    @Nullable
-    String value;
-    @Nullable
-    MessageFormat keyFormat;
-    @Nullable
-    MessageFormat valueFormat;
-    @Nullable
-    String keySchemaId;
-    @Nullable
-    String valueSchemaId;
-  }
-
-  DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg);
-
-  ProducerRecord<byte[], byte[]> serialize(String topic,
-                                           @Nullable String key,
-                                           @Nullable String data,
-                                           @Nullable Integer partition);
-
-  TopicMessageSchemaDTO getTopicSchema(String topic);
-}

+ 0 - 53
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/SimpleRecordSerDe.java

@@ -1,53 +0,0 @@
-package com.provectus.kafka.ui.serde;
-
-import com.provectus.kafka.ui.model.MessageSchemaDTO;
-import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
-import com.provectus.kafka.ui.serde.schemaregistry.StringMessageFormatter;
-import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
-import javax.annotation.Nullable;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.utils.Bytes;
-
-public class SimpleRecordSerDe implements RecordSerDe {
-
-  private static final StringMessageFormatter FORMATTER = new StringMessageFormatter();
-
-  @Override
-  public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
-    var builder = DeserializedKeyValue.builder();
-    if (msg.key() != null) {
-      builder.key(FORMATTER.format(msg.topic(), msg.key().get()));
-      builder.keyFormat(FORMATTER.getFormat());
-    }
-    if (msg.value() != null) {
-      builder.value(FORMATTER.format(msg.topic(), msg.value().get()));
-      builder.valueFormat(FORMATTER.getFormat());
-    }
-    return builder.build();
-  }
-
-  @Override
-  public ProducerRecord<byte[], byte[]> serialize(String topic,
-                                                  @Nullable String key,
-                                                  @Nullable String data,
-                                                  @Nullable Integer partition) {
-    return new ProducerRecord<>(
-        topic,
-        partition,
-        key != null ? key.getBytes() : null,
-        data != null ? data.getBytes() : null
-    );
-  }
-
-  @Override
-  public TopicMessageSchemaDTO getTopicSchema(String topic) {
-    final MessageSchemaDTO schema = new MessageSchemaDTO()
-        .name("unknown")
-        .source(MessageSchemaDTO.SourceEnum.UNKNOWN)
-        .schema(JsonSchema.stringSchema().toJson());
-    return new TopicMessageSchemaDTO()
-        .key(schema)
-        .value(schema);
-  }
-}

+ 0 - 29
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/AvroMessageFormatter.java

@@ -1,29 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.serializers.KafkaAvroDeserializer;
-import lombok.SneakyThrows;
-
-public class AvroMessageFormatter implements MessageFormatter {
-  private final KafkaAvroDeserializer avroDeserializer;
-
-  public AvroMessageFormatter(SchemaRegistryClient client) {
-    this.avroDeserializer = new KafkaAvroDeserializer(client);
-  }
-
-  @Override
-  @SneakyThrows
-  public String format(String topic, byte[] value) {
-    // deserialized will have type, that depends on schema type (record or primitive),
-    // AvroSchemaUtils.toJson(...) method will take it into account
-    Object deserialized = avroDeserializer.deserialize(topic, value);
-    byte[] jsonBytes = AvroSchemaUtils.toJson(deserialized);
-    return new String(jsonBytes);
-  }
-
-  @Override
-  public MessageFormat getFormat() {
-    return MessageFormat.AVRO;
-  }
-}

+ 0 - 25
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/JsonSchemaMessageFormatter.java

@@ -1,25 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
-
-public class JsonSchemaMessageFormatter implements MessageFormatter {
-
-  private final KafkaJsonSchemaDeserializer<JsonNode> jsonSchemaDeserializer;
-
-  public JsonSchemaMessageFormatter(SchemaRegistryClient client) {
-    this.jsonSchemaDeserializer = new KafkaJsonSchemaDeserializer<>(client);
-  }
-
-  @Override
-  public String format(String topic, byte[] value) {
-    JsonNode json = jsonSchemaDeserializer.deserialize(topic, value);
-    return json.toString();
-  }
-
-  @Override
-  public MessageFormat getFormat() {
-    return MessageFormat.JSON;
-  }
-}

+ 0 - 15
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormat.java

@@ -1,15 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-import java.util.Optional;
-import org.apache.commons.lang3.EnumUtils;
-
-public enum MessageFormat {
-  AVRO,
-  JSON,
-  PROTOBUF,
-  UNKNOWN;
-
-  public static Optional<MessageFormat> fromString(String typeString) {
-    return Optional.ofNullable(EnumUtils.getEnum(MessageFormat.class, typeString));
-  }
-}

+ 0 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageFormatter.java

@@ -1,9 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-public interface MessageFormatter {
-  String format(String topic, byte[] value);
-
-  default MessageFormat getFormat() {
-    return MessageFormat.UNKNOWN;
-  }
-}

+ 0 - 28
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/ProtobufMessageFormatter.java

@@ -1,28 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-import com.google.protobuf.Message;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
-import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
-import lombok.SneakyThrows;
-
-public class ProtobufMessageFormatter implements MessageFormatter {
-  private final KafkaProtobufDeserializer<?> protobufDeserializer;
-
-  public ProtobufMessageFormatter(SchemaRegistryClient client) {
-    this.protobufDeserializer = new KafkaProtobufDeserializer<>(client);
-  }
-
-  @Override
-  @SneakyThrows
-  public String format(String topic, byte[] value) {
-    final Message message = protobufDeserializer.deserialize(topic, value);
-    byte[] jsonBytes = ProtobufSchemaUtils.toJson(message);
-    return new String(jsonBytes);
-  }
-
-  @Override
-  public MessageFormat getFormat() {
-    return MessageFormat.PROTOBUF;
-  }
-}

+ 0 - 298
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java

@@ -1,298 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-
-import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE;
-import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.USER_INFO_CONFIG;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.model.KafkaCluster;
-import com.provectus.kafka.ui.model.MessageSchemaDTO;
-import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
-import com.provectus.kafka.ui.serde.RecordSerDe;
-import com.provectus.kafka.ui.serde.RecordSerDe.DeserializedKeyValue.DeserializedKeyValueBuilder;
-import com.provectus.kafka.ui.util.jsonschema.AvroJsonSchemaConverter;
-import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
-import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
-import io.confluent.kafka.schemaregistry.ParsedSchema;
-import io.confluent.kafka.schemaregistry.SchemaProvider;
-import io.confluent.kafka.schemaregistry.avro.AvroSchema;
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
-import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
-import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
-import java.net.URI;
-import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.concurrent.Callable;
-import java.util.stream.Collectors;
-import javax.annotation.Nullable;
-import lombok.SneakyThrows;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.clients.producer.ProducerRecord;
-import org.apache.kafka.common.utils.Bytes;
-
-@Slf4j
-public class SchemaRegistryAwareRecordSerDe implements RecordSerDe {
-
-  private static final byte SR_RECORD_MAGIC_BYTE = (byte) 0;
-  private static final int SR_RECORD_PREFIX_LENGTH = 5;
-
-  private static final StringMessageFormatter FALLBACK_FORMATTER = new StringMessageFormatter();
-
-  private static final ProtobufSchemaConverter protoSchemaConverter = new ProtobufSchemaConverter();
-  private static final AvroJsonSchemaConverter avroSchemaConverter = new AvroJsonSchemaConverter();
-
-  private final KafkaCluster cluster;
-  private final SchemaRegistryClient schemaRegistryClient;
-
-  private final Map<MessageFormat, MessageFormatter> schemaRegistryFormatters;
-
-  private static SchemaRegistryClient createSchemaRegistryClient(KafkaCluster cluster) {
-    List<SchemaProvider> schemaProviders =
-        List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider());
-
-    Map<String, String> configs = new HashMap<>();
-    String username = cluster.getSchemaRegistry().getUsername();
-    String password = cluster.getSchemaRegistry().getPassword();
-
-    if (username != null && password != null) {
-      configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
-      configs.put(USER_INFO_CONFIG, username + ":" + password);
-    } else if (username != null) {
-      throw new ValidationException(
-          "You specified username but do not specified password");
-    } else if (password != null) {
-      throw new ValidationException(
-          "You specified password but do not specified username");
-    }
-    return new CachedSchemaRegistryClient(
-        cluster.getSchemaRegistry()
-                .getUrl()
-                .stream()
-                .collect(Collectors.toUnmodifiableList()),
-        1_000,
-        schemaProviders,
-        configs
-    );
-  }
-
-  public SchemaRegistryAwareRecordSerDe(KafkaCluster cluster) {
-    this(cluster, createSchemaRegistryClient(cluster));
-  }
-
-  @VisibleForTesting
-  SchemaRegistryAwareRecordSerDe(KafkaCluster cluster, SchemaRegistryClient schemaRegistryClient) {
-    this.cluster = cluster;
-    this.schemaRegistryClient = schemaRegistryClient;
-    this.schemaRegistryFormatters = Map.of(
-        MessageFormat.AVRO, new AvroMessageFormatter(schemaRegistryClient),
-        MessageFormat.JSON, new JsonSchemaMessageFormatter(schemaRegistryClient),
-        MessageFormat.PROTOBUF, new ProtobufMessageFormatter(schemaRegistryClient)
-    );
-  }
-
-  public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
-    try {
-      DeserializedKeyValueBuilder builder = DeserializedKeyValue.builder();
-      if (msg.key() != null) {
-        fillDeserializedKvBuilder(msg, true, builder);
-      }
-      if (msg.value() != null) {
-        fillDeserializedKvBuilder(msg, false, builder);
-      }
-      return builder.build();
-    } catch (Throwable e) {
-      throw new RuntimeException("Failed to parse record from topic " + msg.topic(), e);
-    }
-  }
-
-  private void fillDeserializedKvBuilder(ConsumerRecord<Bytes, Bytes> rec,
-                                         boolean isKey,
-                                         DeserializedKeyValueBuilder builder) {
-    Optional<Integer> schemaId = extractSchemaIdFromMsg(rec, isKey);
-    Optional<MessageFormat> format = schemaId.flatMap(this::getMessageFormatBySchemaId);
-    if (schemaId.isPresent() && format.isPresent() && schemaRegistryFormatters.containsKey(format.get())) {
-      var formatter = schemaRegistryFormatters.get(format.get());
-      try {
-        var deserialized = formatter.format(rec.topic(), isKey ? rec.key().get() : rec.value().get());
-        if (isKey) {
-          builder.key(deserialized);
-          builder.keyFormat(formatter.getFormat());
-          builder.keySchemaId(String.valueOf(schemaId.get()));
-        } else {
-          builder.value(deserialized);
-          builder.valueFormat(formatter.getFormat());
-          builder.valueSchemaId(String.valueOf(schemaId.get()));
-        }
-        return;
-      } catch (Exception e) {
-        log.trace("Can't deserialize record {} with formatter {}",
-            rec, formatter.getClass().getSimpleName(), e);
-      }
-    }
-
-    // fallback
-    if (isKey) {
-      builder.key(FALLBACK_FORMATTER.format(rec.topic(), rec.key().get()));
-      builder.keyFormat(FALLBACK_FORMATTER.getFormat());
-    } else {
-      builder.value(FALLBACK_FORMATTER.format(rec.topic(), rec.value().get()));
-      builder.valueFormat(FALLBACK_FORMATTER.getFormat());
-    }
-
-  }
-
-  @Override
-  public ProducerRecord<byte[], byte[]> serialize(String topic,
-                                                  @Nullable String key,
-                                                  @Nullable String data,
-                                                  @Nullable Integer partition) {
-    final Optional<SchemaMetadata> maybeKeySchema = getSchemaBySubject(topic, true);
-    final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
-
-    final byte[] serializedKey = maybeKeySchema.isPresent()
-        ? serialize(maybeKeySchema.get(), topic, key, true)
-        : serialize(key);
-
-    final byte[] serializedValue = maybeValueSchema.isPresent()
-        ? serialize(maybeValueSchema.get(), topic, data, false)
-        : serialize(data);
-
-    return new ProducerRecord<>(topic, partition, serializedKey, serializedValue);
-  }
-
-  @SneakyThrows
-  private byte[] serialize(SchemaMetadata schema, String topic, String value, boolean isKey) {
-    if (value == null) {
-      return null;
-    }
-    MessageReader<?> reader;
-    if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
-      reader = new ProtobufMessageReader(topic, isKey, schemaRegistryClient, schema);
-    } else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
-      reader = new AvroMessageReader(topic, isKey, schemaRegistryClient, schema);
-    } else if (schema.getSchemaType().equals(MessageFormat.JSON.name())) {
-      reader = new JsonSchemaMessageReader(topic, isKey, schemaRegistryClient, schema);
-    } else {
-      throw new IllegalStateException("Unsupported schema type: " + schema.getSchemaType());
-    }
-
-    return reader.read(value);
-  }
-
-  private byte[] serialize(String value) {
-    if (value == null) {
-      return null;
-    }
-    // if no schema provided serialize input as raw string
-    return value.getBytes();
-  }
-
-  @Override
-  public TopicMessageSchemaDTO getTopicSchema(String topic) {
-    final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
-    final Optional<SchemaMetadata> maybeKeySchema = getSchemaBySubject(topic, true);
-
-    String sourceValueSchema = maybeValueSchema.map(this::convertSchema)
-        .orElseGet(() -> JsonSchema.stringSchema().toJson());
-
-    String sourceKeySchema = maybeKeySchema.map(this::convertSchema)
-        .orElseGet(() -> JsonSchema.stringSchema().toJson());
-
-    final MessageSchemaDTO keySchema = new MessageSchemaDTO()
-        .name(maybeKeySchema.map(
-            s -> schemaSubject(topic, true)
-        ).orElse("unknown"))
-        .source(MessageSchemaDTO.SourceEnum.SCHEMA_REGISTRY)
-        .schema(sourceKeySchema);
-
-    final MessageSchemaDTO valueSchema = new MessageSchemaDTO()
-        .name(maybeValueSchema.map(
-            s -> schemaSubject(topic, false)
-        ).orElse("unknown"))
-        .source(MessageSchemaDTO.SourceEnum.SCHEMA_REGISTRY)
-        .schema(sourceValueSchema);
-
-    return new TopicMessageSchemaDTO()
-        .key(keySchema)
-        .value(valueSchema);
-  }
-
-  @SneakyThrows
-  private String convertSchema(SchemaMetadata schema) {
-
-    String jsonSchema;
-    URI basePath = new URI(cluster.getSchemaRegistry().getPrimaryNodeUri())
-        .resolve(Integer.toString(schema.getId()));
-    final ParsedSchema schemaById = schemaRegistryClient.getSchemaById(schema.getId());
-
-    if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
-      final ProtobufSchema protobufSchema = (ProtobufSchema) schemaById;
-      jsonSchema = protoSchemaConverter
-          .convert(basePath, protobufSchema.toDescriptor())
-          .toJson();
-    } else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
-      final AvroSchema avroSchema = (AvroSchema) schemaById;
-      jsonSchema = avroSchemaConverter
-          .convert(basePath, avroSchema.rawSchema())
-          .toJson();
-    } else if (schema.getSchemaType().equals(MessageFormat.JSON.name())) {
-      jsonSchema = schema.getSchema();
-    } else {
-      jsonSchema = JsonSchema.stringSchema().toJson();
-    }
-
-    return jsonSchema;
-  }
-
-  private Optional<MessageFormat> getMessageFormatBySchemaId(int schemaId) {
-    return wrapClientCall(() -> schemaRegistryClient.getSchemaById(schemaId))
-        .map(ParsedSchema::schemaType)
-        .flatMap(MessageFormat::fromString);
-  }
-
-  private Optional<Integer> extractSchemaIdFromMsg(ConsumerRecord<Bytes, Bytes> msg, boolean isKey) {
-    Bytes bytes = isKey ? msg.key() : msg.value();
-    ByteBuffer buffer = ByteBuffer.wrap(bytes.get());
-    if (buffer.remaining() > SR_RECORD_PREFIX_LENGTH && buffer.get() == SR_RECORD_MAGIC_BYTE) {
-      int id = buffer.getInt();
-      return Optional.of(id);
-    }
-    return Optional.empty();
-  }
-
-  @SneakyThrows
-  private Optional<SchemaMetadata> getSchemaBySubject(String topic, boolean isKey) {
-    return wrapClientCall(() ->
-        schemaRegistryClient.getLatestSchemaMetadata(schemaSubject(topic, isKey)));
-  }
-
-  @SneakyThrows
-  private <T> Optional<T> wrapClientCall(Callable<T> call) {
-    try {
-      return Optional.ofNullable(call.call());
-    } catch (RestClientException restClientException) {
-      if (restClientException.getStatus() == 404) {
-        return Optional.empty();
-      } else {
-        throw new RuntimeException("Error calling SchemaRegistryClient", restClientException);
-      }
-    }
-  }
-
-  private String schemaSubject(String topic, boolean isKey) {
-    return String.format(
-        isKey ? cluster.getKeySchemaNameTemplate()
-            : cluster.getSchemaNameTemplate(), topic
-    );
-  }
-}

+ 0 - 11
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/StringMessageFormatter.java

@@ -1,11 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-import java.nio.charset.StandardCharsets;
-
-public class StringMessageFormatter implements MessageFormatter {
-
-  @Override
-  public String format(String topic, byte[] value) {
-    return new String(value, StandardCharsets.UTF_8);
-  }
-}

+ 12 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/BuiltInSerde.java

@@ -0,0 +1,12 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.Serde;
+
+public interface BuiltInSerde extends Serde {
+
+  default boolean initOnStartup(PropertyResolver kafkaClusterProperties,
+                                PropertyResolver globalProperties) {
+    return true;
+  }
+}

+ 12 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ClassloaderUtil.java

@@ -0,0 +1,12 @@
+package com.provectus.kafka.ui.serdes;
+
+class ClassloaderUtil {
+
+  static ClassLoader compareAndSwapLoaders(ClassLoader loader) {
+    ClassLoader current = Thread.currentThread().getContextClassLoader();
+    if (!current.equals(loader)) {
+      Thread.currentThread().setContextClassLoader(loader);
+    }
+    return current;
+  }
+}

+ 218 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ClusterSerdes.java

@@ -0,0 +1,218 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.google.common.base.Preconditions;
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.exception.ValidationException;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.builtin.Base64Serde;
+import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
+import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
+import com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde;
+import com.provectus.kafka.ui.serdes.builtin.StringSerde;
+import com.provectus.kafka.ui.serdes.builtin.UInt32Serde;
+import com.provectus.kafka.ui.serdes.builtin.UInt64Serde;
+import com.provectus.kafka.ui.serdes.builtin.UuidBinarySerde;
+import com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+import java.util.stream.Stream;
+import javax.annotation.Nullable;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.core.env.Environment;
+
+@Slf4j
+public class ClusterSerdes {
+
+  private static final CustomSerdeLoader CUSTOM_SERDE_LOADER = new CustomSerdeLoader();
+
+  private static final Map<String, Class<? extends BuiltInSerde>> BUILT_IN_SERDES =
+      Map.of(
+          StringSerde.name(), StringSerde.class,
+          Int32Serde.name(), Int32Serde.class,
+          Int64Serde.name(), Int64Serde.class,
+          UInt32Serde.name(), UInt32Serde.class,
+          UInt64Serde.name(), UInt64Serde.class,
+          UuidBinarySerde.name(), UuidBinarySerde.class,
+          Base64Serde.name(), Base64Serde.class,
+          SchemaRegistrySerde.name(), SchemaRegistrySerde.class,
+          ProtobufFileSerde.name(), ProtobufFileSerde.class
+      );
+
+  // using linked map to keep order of serdes added to it
+  private final Map<String, SerdeInstance> serdes = new LinkedHashMap<>();
+
+  @Nullable
+  private final SerdeInstance defaultKeySerde;
+
+  @Nullable
+  private final SerdeInstance defaultValueSerde;
+
+  private final SerdeInstance fallbackSerde;
+
+  public ClusterSerdes(Environment env,
+                       ClustersProperties clustersProperties,
+                       int clusterIndex) {
+    var globalPropertiesResolver = new PropertyResolverImpl(env);
+    var clusterPropertiesResolver = new PropertyResolverImpl(env, "kafka.clusters." + clusterIndex);
+
+    // initializing serdes from config
+    ClustersProperties.Cluster clusterProp = clustersProperties.getClusters().get(clusterIndex);
+    for (int i = 0; i < clusterProp.getSerde().size(); i++) {
+      var sendeConf = clusterProp.getSerde().get(i);
+      if (serdes.containsKey(sendeConf.getName())) {
+        throw new ValidationException("Multiple serdes with same name: " + sendeConf.getName());
+      }
+      var instance = initSerdeFromConfig(
+          sendeConf,
+          new PropertyResolverImpl(env, "kafka.clusters." + clusterIndex + ".serde." + i + ".properties"),
+          clusterPropertiesResolver,
+          globalPropertiesResolver
+      );
+      serdes.put(sendeConf.getName(), instance);
+    }
+
+    // initializing built-in serdes if they haven't been already initialized
+    BUILT_IN_SERDES.forEach((name, clazz) -> {
+      if (!serdes.containsKey(name)) { // serde can be already initialized with custom config
+        var serde = createSerdeInstance(clazz);
+        if (serde.initOnStartup(clusterPropertiesResolver, globalPropertiesResolver)) {
+          serde.configure(
+              PropertyResolverImpl.empty(),
+              clusterPropertiesResolver,
+              globalPropertiesResolver
+          );
+          serdes.put(name, new SerdeInstance(name, serde, null, null, null));
+        }
+      }
+    });
+
+    defaultKeySerde = Optional.ofNullable(clusterProp.getDefaultKeySerde())
+        .map(name -> Preconditions.checkNotNull(serdes.get(name), "Default key serde not found"))
+        .or(() -> Optional.ofNullable(serdes.get(SchemaRegistrySerde.name())))
+        .or(() -> Optional.ofNullable(serdes.get(ProtobufFileSerde.name())))
+        .orElse(null);
+
+    defaultValueSerde = Optional.ofNullable(clusterProp.getDefaultValueSerde())
+        .map(name -> Preconditions.checkNotNull(serdes.get(name), "Default value serde not found"))
+        .or(() -> Optional.ofNullable(serdes.get(SchemaRegistrySerde.name())))
+        .or(() -> Optional.ofNullable(serdes.get(ProtobufFileSerde.name())))
+        .orElse(null);
+
+    fallbackSerde = createFallbackSerde();
+  }
+
+  private SerdeInstance createFallbackSerde() {
+    StringSerde serde = new StringSerde();
+    serde.configure(PropertyResolverImpl.empty(), PropertyResolverImpl.empty(), PropertyResolverImpl.empty());
+    return new SerdeInstance("Fallback", serde, null, null, null);
+  }
+
+  @SneakyThrows
+  private SerdeInstance initSerdeFromConfig(ClustersProperties.SerdeConfig serdeConfig,
+                                            PropertyResolver serdeProps,
+                                            PropertyResolver clusterProps,
+                                            PropertyResolver globalProps) {
+    String name = serdeConfig.getName();
+    // configuring one of prebuilt serdes with custom params
+    if (BUILT_IN_SERDES.containsKey(name)) {
+      if (serdeConfig.getClassName() != null) {
+        throw new ValidationException("className can't be set for built-in serde");
+      }
+      if (serdeConfig.getFilePath() != null) {
+        throw new ValidationException("filePath can't be set for built-in serde");
+      }
+      var clazz = BUILT_IN_SERDES.get(name);
+      Serde serde = createSerdeInstance(clazz);
+      serde.configure(serdeProps, clusterProps, globalProps);
+      return new SerdeInstance(
+          name,
+          serde,
+          nullablePattern(serdeConfig.getTopicKeysPattern()),
+          nullablePattern(serdeConfig.getTopicValuesPattern()),
+          null
+      );
+    }
+    log.info("Loading custom serde {}", serdeConfig.getName());
+    return loadCustom(serdeConfig, serdeProps, clusterProps, globalProps);
+  }
+
+  @SneakyThrows
+  private <T extends Serde> T createSerdeInstance(Class<T> clazz) {
+    return clazz.getDeclaredConstructor().newInstance();
+  }
+
+  public SerdeInstance getFallbackSerde() {
+    return fallbackSerde;
+  }
+
+  private SerdeInstance loadCustom(ClustersProperties.SerdeConfig serdeConfig,
+                                   PropertyResolver serdeProps,
+                                   PropertyResolver clusterProps,
+                                   PropertyResolver globalProps) {
+    var loaded = CUSTOM_SERDE_LOADER.loadAndConfigure(
+        serdeConfig.getClassName(), serdeConfig.getFilePath(), serdeProps, clusterProps, globalProps);
+    return new SerdeInstance(
+        serdeConfig.getName(),
+        loaded.getSerde(),
+        nullablePattern(serdeConfig.getTopicKeysPattern()),
+        nullablePattern(serdeConfig.getTopicValuesPattern()),
+        loaded.getClassLoader()
+    );
+  }
+
+  @Nullable
+  private Pattern nullablePattern(@Nullable String pattern) {
+    return pattern == null ? null : Pattern.compile(pattern);
+  }
+
+  private Optional<SerdeInstance> findSerdeByPatternsOrDefault(String topic,
+                                                               Serde.Target type,
+                                                               Predicate<SerdeInstance> additionalCheck) {
+    // iterating over serdes in the same order they were added in config
+    for (SerdeInstance serdeInstance : serdes.values()) {
+      var pattern = type == Serde.Target.KEY
+          ? serdeInstance.topicKeyPattern
+          : serdeInstance.topicValuePattern;
+      if (pattern != null
+          && pattern.matcher(topic).matches()
+          && additionalCheck.test(serdeInstance)) {
+        return Optional.of(serdeInstance);
+      }
+    }
+    if (type == Serde.Target.KEY
+        && defaultKeySerde != null
+        && additionalCheck.test(defaultKeySerde)) {
+      return Optional.of(defaultKeySerde);
+    }
+    if (type == Serde.Target.VALUE
+        && defaultValueSerde != null
+        && additionalCheck.test(defaultValueSerde)) {
+      return Optional.of(defaultValueSerde);
+    }
+    return Optional.empty();
+  }
+
+  public Optional<SerdeInstance> serdeForName(String name) {
+    return Optional.ofNullable(serdes.get(name));
+  }
+
+  public Stream<SerdeInstance> all() {
+    return serdes.values().stream();
+  }
+
+  public SerdeInstance suggestSerdeForSerialize(String topic, Serde.Target type) {
+    return findSerdeByPatternsOrDefault(topic, type, s -> s.canSerialize(topic, type))
+        .orElse(serdes.get(StringSerde.name()));
+  }
+
+  public SerdeInstance suggestSerdeForDeserialize(String topic, Serde.Target type) {
+    return findSerdeByPatternsOrDefault(topic, type, s -> s.canDeserialize(topic, type))
+        .orElse(serdes.get(StringSerde.name()));
+  }
+
+}

+ 139 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ConsumerRecordDeserializer.java

@@ -0,0 +1,139 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.provectus.kafka.ui.model.TopicMessageDTO;
+import com.provectus.kafka.ui.serde.api.Serde;
+import java.time.Instant;
+import java.time.OffsetDateTime;
+import java.time.ZoneId;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.header.Header;
+import org.apache.kafka.common.header.Headers;
+import org.apache.kafka.common.record.TimestampType;
+import org.apache.kafka.common.utils.Bytes;
+
+@Slf4j
+@RequiredArgsConstructor
+public class ConsumerRecordDeserializer {
+
+  private static final ZoneId UTC_ZONE_ID = ZoneId.of("UTC");
+
+  private final String keySerdeName;
+  private final Serde.Deserializer keyDeserializer;
+
+  private final String valueSerdeName;
+  private final Serde.Deserializer valueDeserializer;
+
+  private final String fallbackSerdeName;
+  private final Serde.Deserializer fallbackKeyDeserializer;
+  private final Serde.Deserializer fallbackValueDeserializer;
+
+  public TopicMessageDTO deserialize(ConsumerRecord<Bytes, Bytes> rec) {
+    var message = new TopicMessageDTO();
+    fillKey(message, rec);
+    fillValue(message, rec);
+    fillHeaders(message, rec);
+
+    message.setPartition(rec.partition());
+    message.setOffset(rec.offset());
+    message.setTimestampType(mapToTimestampType(rec.timestampType()));
+    message.setTimestamp(OffsetDateTime.ofInstant(Instant.ofEpochMilli(rec.timestamp()), UTC_ZONE_ID));
+
+    message.setKeySize(getKeySize(rec));
+    message.setValueSize(getValueSize(rec));
+    message.setHeadersSize(getHeadersSize(rec));
+
+    return message;
+  }
+
+  private static TopicMessageDTO.TimestampTypeEnum mapToTimestampType(TimestampType timestampType) {
+    switch (timestampType) {
+      case CREATE_TIME:
+        return TopicMessageDTO.TimestampTypeEnum.CREATE_TIME;
+      case LOG_APPEND_TIME:
+        return TopicMessageDTO.TimestampTypeEnum.LOG_APPEND_TIME;
+      case NO_TIMESTAMP_TYPE:
+        return TopicMessageDTO.TimestampTypeEnum.NO_TIMESTAMP_TYPE;
+      default:
+        throw new IllegalArgumentException("Unknown timestampType: " + timestampType);
+    }
+  }
+
+  private void fillHeaders(TopicMessageDTO message, ConsumerRecord<Bytes, Bytes> rec) {
+    Map<String, String> headers = new HashMap<>();
+    rec.headers().iterator()
+        .forEachRemaining(header ->
+            headers.put(
+                header.key(),
+                header.value() != null ? new String(header.value()) : null
+            ));
+    message.setHeaders(headers);
+  }
+
+  private void fillKey(TopicMessageDTO message, ConsumerRecord<Bytes, Bytes> rec) {
+    if (rec.key() == null) {
+      return;
+    }
+    try {
+      var deserResult = keyDeserializer.deserialize(new RecordHeadersImpl(), rec.key().get());
+      message.setKey(deserResult.getResult());
+      message.setKeySerde(keySerdeName);
+      message.setKeyDeserializeProperties(deserResult.getAdditionalProperties());
+    } catch (Exception e) {
+      log.trace("Error deserializing key for key topic: {}, partition {}, offset {}, with serde {}",
+          rec.topic(), rec.partition(), rec.offset(), keySerdeName, e);
+      var deserResult = fallbackKeyDeserializer.deserialize(new RecordHeadersImpl(), rec.key().get());
+      message.setKey(deserResult.getResult());
+      message.setKeySerde(fallbackSerdeName);
+    }
+  }
+
+  private void fillValue(TopicMessageDTO message, ConsumerRecord<Bytes, Bytes> rec) {
+    if (rec.value() == null) {
+      return;
+    }
+    try {
+      var deserResult = valueDeserializer.deserialize(
+          new RecordHeadersImpl(rec.headers()), rec.value().get());
+      message.setContent(deserResult.getResult());
+      message.setValueSerde(valueSerdeName);
+      message.setValueDeserializeProperties(deserResult.getAdditionalProperties());
+    } catch (Exception e) {
+      log.trace("Error deserializing key for value topic: {}, partition {}, offset {}, with serde {}",
+          rec.topic(), rec.partition(), rec.offset(), valueSerdeName, e);
+      var deserResult = fallbackValueDeserializer.deserialize(
+          new RecordHeadersImpl(rec.headers()), rec.value().get());
+      message.setContent(deserResult.getResult());
+      message.setValueSerde(fallbackSerdeName);
+    }
+  }
+
+  private static Long getHeadersSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
+    Headers headers = consumerRecord.headers();
+    if (headers != null) {
+      return Arrays.stream(headers.toArray())
+          .mapToLong(ConsumerRecordDeserializer::headerSize)
+          .sum();
+    }
+    return 0L;
+  }
+
+  private static Long getKeySize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
+    return consumerRecord.key() != null ? (long) consumerRecord.key().get().length : null;
+  }
+
+  private static Long getValueSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
+    return consumerRecord.value() != null ? (long) consumerRecord.value().get().length : null;
+  }
+
+  private static int headerSize(Header header) {
+    int key = header.key() != null ? header.key().getBytes().length : 0;
+    int val = header.value() != null ? header.value().length : 0;
+    return key + val;
+  }
+
+}

+ 173 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/CustomSerdeLoader.java

@@ -0,0 +1,173 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.Serde;
+import java.io.IOException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.stream.Collectors;
+import lombok.SneakyThrows;
+import lombok.Value;
+
+
+class CustomSerdeLoader {
+
+  @Value
+  static class CustomSerde {
+    Serde serde;
+    ClassLoader classLoader;
+  }
+
+  // serde location -> classloader
+  private final Map<Path, ClassLoader> classloaders = new ConcurrentHashMap<>();
+
+  @SneakyThrows
+  CustomSerde loadAndConfigure(String className,
+                               String filePath,
+                               PropertyResolver serdeProps,
+                               PropertyResolver clusterProps,
+                               PropertyResolver globalProps) {
+    Path locationPath = Path.of(filePath);
+    var serdeClassloader = createClassloader(locationPath);
+    var origCL = ClassloaderUtil.compareAndSwapLoaders(serdeClassloader);
+    try {
+      var serdeClass = serdeClassloader.loadClass(className);
+      var serde = (Serde) serdeClass.getDeclaredConstructor().newInstance();
+      serde.configure(serdeProps, clusterProps, globalProps);
+      return new CustomSerde(serde, serdeClassloader);
+    } finally {
+      ClassloaderUtil.compareAndSwapLoaders(origCL);
+    }
+  }
+
+  private static boolean isArchive(Path path) {
+    String archivePath = path.toString().toLowerCase();
+    return Files.isReadable(path)
+        && Files.isRegularFile(path)
+        && (archivePath.endsWith(".jar") || archivePath.endsWith(".zip"));
+  }
+
+  @SneakyThrows
+  private static List<URL> findArchiveFiles(Path location) {
+    if (isArchive(location)) {
+      return List.of(location.toUri().toURL());
+    }
+    if (Files.isDirectory(location)) {
+      List<URL> archiveFiles = new ArrayList<>();
+      try (var files = Files.walk(location)) {
+        var paths = files.filter(CustomSerdeLoader::isArchive).collect(Collectors.toList());
+        for (Path path : paths) {
+          archiveFiles.add(path.toUri().toURL());
+        }
+      }
+      return archiveFiles;
+    }
+    return List.of();
+  }
+
+  private ClassLoader createClassloader(Path location) {
+    if (!Files.exists(location)) {
+      throw new IllegalStateException("Location does not exist");
+    }
+    var archives = findArchiveFiles(location);
+    if (archives.isEmpty()) {
+      throw new IllegalStateException("No archive files were found");
+    }
+    // we assume that location's content does not change during serdes creation
+    // so, we can reuse already created classloaders
+    return classloaders.computeIfAbsent(location, l ->
+        AccessController.doPrivileged(
+            (PrivilegedAction<URLClassLoader>) () ->
+                new ChildFirstClassloader(
+                    archives.toArray(URL[]::new),
+                    CustomSerdeLoader.class.getClassLoader())));
+  }
+
+  //---------------------------------------------------------------------------------
+
+  // This Classloader first tries to load classes by itself. If class not fount
+  // search is propagated to parent (this is opposite to how usual classloaders work)
+  private static class ChildFirstClassloader extends URLClassLoader {
+
+    private static final String JAVA_PACKAGE_PREFIX = "java.";
+
+    ChildFirstClassloader(URL[] urls, ClassLoader parent) {
+      super(urls, parent);
+    }
+
+    @Override
+    protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
+      // first check whether it's a system class, delegate to the system loader
+      if (name.startsWith(JAVA_PACKAGE_PREFIX)) {
+        return findSystemClass(name);
+      }
+      Class<?> loadedClass = findLoadedClass(name);
+      if (loadedClass == null) {
+        try {
+          // start searching from current classloader
+          loadedClass = findClass(name);
+        } catch (ClassNotFoundException e) {
+          // if not found - going to parent
+          loadedClass = super.loadClass(name, resolve);
+        }
+      }
+      if (resolve) {
+        resolveClass(loadedClass);
+      }
+      return loadedClass;
+    }
+
+    @Override
+    public Enumeration<URL> getResources(String name) throws IOException {
+      List<URL> allRes = new LinkedList<>();
+      Enumeration<URL> thisRes = findResources(name);
+      if (thisRes != null) {
+        while (thisRes.hasMoreElements()) {
+          allRes.add(thisRes.nextElement());
+        }
+      }
+      // then try finding resources from parent classloaders
+      Enumeration<URL> parentRes = super.findResources(name);
+      if (parentRes != null) {
+        while (parentRes.hasMoreElements()) {
+          allRes.add(parentRes.nextElement());
+        }
+      }
+      return new Enumeration<>() {
+        final Iterator<URL> it = allRes.iterator();
+
+        @Override
+        public boolean hasMoreElements() {
+          return it.hasNext();
+        }
+
+        @Override
+        public URL nextElement() {
+          return it.next();
+        }
+      };
+    }
+
+    @Override
+    public URL getResource(String name) {
+      URL res = findResource(name);
+      if (res == null) {
+        res = super.getResource(name);
+      }
+      return res;
+    }
+  }
+
+}

+ 41 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/ProducerRecordCreator.java

@@ -0,0 +1,41 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.provectus.kafka.ui.serde.api.Serde;
+import java.util.Map;
+import javax.annotation.Nullable;
+import lombok.RequiredArgsConstructor;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.header.Header;
+import org.apache.kafka.common.header.internals.RecordHeader;
+import org.apache.kafka.common.header.internals.RecordHeaders;
+
+@RequiredArgsConstructor
+public class ProducerRecordCreator {
+
+  private final Serde.Serializer keySerializer;
+  private final Serde.Serializer valuesSerializer;
+
+  public ProducerRecord<byte[], byte[]> create(String topic,
+                                               @Nullable Integer partition,
+                                               @Nullable String key,
+                                               @Nullable String value,
+                                               @Nullable Map<String, String> headers) {
+    return new ProducerRecord<>(
+        topic,
+        partition,
+        key == null ? null : keySerializer.serialize(key),
+        value == null ? null : valuesSerializer.serialize(value),
+        createHeaders(headers)
+    );
+  }
+
+  private Iterable<Header> createHeaders(@Nullable Map<String, String> clientHeaders) {
+    if (clientHeaders == null) {
+      return new RecordHeaders();
+    }
+    RecordHeaders headers = new RecordHeaders();
+    clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v.getBytes())));
+    return headers;
+  }
+
+}

+ 64 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/PropertyResolverImpl.java

@@ -0,0 +1,64 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.google.common.base.Preconditions;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import javax.annotation.Nullable;
+import org.springframework.boot.context.properties.bind.Bindable;
+import org.springframework.boot.context.properties.bind.Binder;
+import org.springframework.boot.context.properties.source.ConfigurationPropertyName;
+import org.springframework.core.env.Environment;
+import org.springframework.core.env.StandardEnvironment;
+
+
+public class PropertyResolverImpl implements PropertyResolver {
+
+  private final Binder binder;
+
+  @Nullable
+  private final String prefix;
+
+  public static PropertyResolverImpl empty() {
+    return new PropertyResolverImpl(new StandardEnvironment(), null);
+  }
+
+  public PropertyResolverImpl(Environment env) {
+    this(env, null);
+  }
+
+  public PropertyResolverImpl(Environment env, @Nullable String prefix) {
+    this.binder = Binder.get(env);
+    this.prefix = prefix;
+  }
+
+  private ConfigurationPropertyName targetPropertyName(String key) {
+    Preconditions.checkNotNull(key);
+    Preconditions.checkState(!key.isBlank());
+    String propertyName = prefix == null ? key : prefix + "." + key;
+    return ConfigurationPropertyName.adapt(propertyName, '.');
+  }
+
+  @Override
+  public <T> Optional<T> getProperty(String key, Class<T> targetType) {
+    var targetKey = targetPropertyName(key);
+    var result = binder.bind(targetKey, Bindable.of(targetType));
+    return result.isBound() ? Optional.of(result.get()) : Optional.empty();
+  }
+
+  @Override
+  public <T> Optional<List<T>> getListProperty(String key, Class<T> itemType) {
+    var targetKey = targetPropertyName(key);
+    var listResult = binder.bind(targetKey, Bindable.listOf(itemType));
+    return listResult.isBound() ? Optional.of(listResult.get()) : Optional.empty();
+  }
+
+  @Override
+  public <K, V> Optional<Map<K, V>> getMapProperty(String key, Class<K> keyType, Class<V> valueType) {
+    var targetKey = targetPropertyName(key);
+    var mapResult = binder.bind(targetKey, Bindable.mapOf(keyType, valueType));
+    return mapResult.isBound() ? Optional.of(mapResult.get()) : Optional.empty();
+  }
+
+}

+ 23 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/RecordHeaderImpl.java

@@ -0,0 +1,23 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.provectus.kafka.ui.serde.api.RecordHeader;
+import org.apache.kafka.common.header.Header;
+
+public class RecordHeaderImpl implements RecordHeader  {
+
+  private final Header header;
+
+  public RecordHeaderImpl(Header header) {
+    this.header = header;
+  }
+
+  @Override
+  public String key() {
+    return header.key();
+  }
+
+  @Override
+  public byte[] value() {
+    return header.value();
+  }
+}

+ 26 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/RecordHeadersImpl.java

@@ -0,0 +1,26 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.google.common.collect.Iterators;
+import com.provectus.kafka.ui.serde.api.RecordHeader;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import java.util.Iterator;
+import org.apache.kafka.common.header.Headers;
+
+
+public class RecordHeadersImpl implements RecordHeaders {
+
+  private final Headers headers;
+
+  public RecordHeadersImpl() {
+    this(new org.apache.kafka.common.header.internals.RecordHeaders());
+  }
+
+  public RecordHeadersImpl(Headers headers) {
+    this.headers = headers;
+  }
+
+  @Override
+  public Iterator<RecordHeader> iterator() {
+    return Iterators.transform(headers.iterator(), RecordHeaderImpl::new);
+  }
+}

+ 71 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdeInstance.java

@@ -0,0 +1,71 @@
+package com.provectus.kafka.ui.serdes;
+
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serde.api.Serde;
+import java.util.Optional;
+import java.util.function.Supplier;
+import java.util.regex.Pattern;
+import javax.annotation.Nullable;
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
+
+
+@RequiredArgsConstructor
+public class SerdeInstance {
+
+  @Getter
+  private final String name;
+
+  private final Serde serde;
+
+  @Nullable
+  final Pattern topicKeyPattern;
+
+  @Nullable
+  final Pattern topicValuePattern;
+
+  @Nullable // will be set for custom serdes
+  private final ClassLoader classLoader;
+
+  private <T> T wrapWithClassloader(Supplier<T> call) {
+    if (classLoader == null) {
+      return call.get();
+    }
+    var origCl = ClassloaderUtil.compareAndSwapLoaders(classLoader);
+    try {
+      return call.get();
+    } finally {
+      ClassloaderUtil.compareAndSwapLoaders(origCl);
+    }
+  }
+
+  public Optional<SchemaDescription> getSchema(String topic, Serde.Target type) {
+    return wrapWithClassloader(() -> serde.getSchema(topic, type));
+  }
+
+  public Optional<String> description() {
+    return wrapWithClassloader(serde::getDescription);
+  }
+
+  public boolean canSerialize(String topic, Serde.Target type) {
+    return wrapWithClassloader(() -> serde.canSerialize(topic, type));
+  }
+
+  public boolean canDeserialize(String topic, Serde.Target type) {
+    return wrapWithClassloader(() -> serde.canDeserialize(topic, type));
+  }
+
+  public Serde.Serializer serializer(String topic, Serde.Target type) {
+    return wrapWithClassloader(() -> {
+      var serializer = serde.serializer(topic, type);
+      return input -> wrapWithClassloader(() -> serializer.serialize(input));
+    });
+  }
+
+  public Serde.Deserializer deserializer(String topic, Serde.Target type) {
+    return wrapWithClassloader(() -> {
+      var deserializer = serde.deserializer(topic, type);
+      return (headers, data) -> wrapWithClassloader(() -> deserializer.deserialize(headers, data));
+    });
+  }
+}

+ 74 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Base64Serde.java

@@ -0,0 +1,74 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.util.Base64;
+import java.util.Map;
+import java.util.Optional;
+import org.apache.kafka.common.header.Headers;
+
+public class Base64Serde implements BuiltInSerde {
+
+  public static String name() {
+    return "Base64";
+  }
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.empty();
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    return new Serializer() {
+      @Override
+      public byte[] serialize(String input) {
+        input = input.trim();
+        // it is actually a hack to provide ability to sent empty array as a key/value
+        if (input.length() == 0) {
+          return new byte[]{};
+        }
+        return Base64.getDecoder().decode(input);
+      }
+    };
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    var encoder = Base64.getEncoder();
+    return new Deserializer() {
+      @Override
+      public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+        return new DeserializeResult(
+            encoder.encodeToString(data),
+            DeserializeResult.Type.STRING,
+            Map.of()
+        );
+      }
+    };
+  }
+}

+ 70 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int32Serde.java

@@ -0,0 +1,70 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.google.common.primitives.Ints;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.util.Map;
+import java.util.Optional;
+
+public class Int32Serde implements BuiltInSerde {
+
+  public static String name() {
+    return "Int32";
+  }
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.of(
+        new SchemaDescription(
+            String.format(
+                "{ "
+                    + "  \"type\" : \"integer\", "
+                    + "  \"minimum\" : %s, "
+                    + "  \"maximum\" : %s "
+                    + "}",
+                Integer.MIN_VALUE,
+                Integer.MAX_VALUE
+            ),
+            Map.of()
+        )
+    );
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    return input -> Ints.toByteArray(Integer.parseInt(input));
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return (headers, data) ->
+        new DeserializeResult(
+            String.valueOf(Ints.fromByteArray(data)),
+            DeserializeResult.Type.JSON,
+            Map.of()
+        );
+  }
+}

+ 76 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/Int64Serde.java

@@ -0,0 +1,76 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.google.common.primitives.Longs;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.util.Map;
+import java.util.Optional;
+
+public class Int64Serde implements BuiltInSerde {
+
+  public static String name() {
+    return "Int64";
+  }
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.of(
+        new SchemaDescription(
+            String.format(
+                "{ "
+                    + "  \"type\" : \"integer\", "
+                    + "  \"minimum\" : %s, "
+                    + "  \"maximum\" : %s "
+                    + "}",
+                Long.MIN_VALUE,
+                Long.MAX_VALUE
+            ),
+            Map.of()
+        )
+    );
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    return input -> Longs.toByteArray(Long.parseLong(input));
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return new Deserializer() {
+      @Override
+      public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+        return new DeserializeResult(
+            String.valueOf(Longs.fromByteArray(data)),
+            DeserializeResult.Type.JSON,
+            Map.of()
+        );
+      }
+    };
+  }
+}

+ 178 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java

@@ -0,0 +1,178 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.DynamicMessage;
+import com.google.protobuf.util.JsonFormat;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
+import java.io.ByteArrayInputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import javax.annotation.Nullable;
+import lombok.SneakyThrows;
+
+
+public class ProtobufFileSerde implements BuiltInSerde {
+
+  public static String name() {
+    return "ProtobufFile";
+  }
+
+  private static final ProtobufSchemaConverter SCHEMA_CONVERTER = new ProtobufSchemaConverter();
+
+  private Path protobufSchemaPath;
+
+  private Map<String, Descriptor> messageDescriptorMap = new HashMap<>();
+  private Map<String, Descriptor> keyMessageDescriptorMap = new HashMap<>();
+
+  private Descriptor defaultMessageDescriptor;
+
+  @Nullable
+  private Descriptor defaultKeyMessageDescriptor;
+
+  @Override
+  public boolean initOnStartup(PropertyResolver kafkaClusterProperties,
+                               PropertyResolver globalProperties) {
+    return kafkaClusterProperties.getProperty("protobufFile", String.class)
+        .isPresent();
+  }
+
+  @SneakyThrows
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+    protobufSchemaPath = Path.of(
+        kafkaClusterProperties.getProperty("protobufFile", String.class)
+            .orElseThrow());
+    ProtobufSchema protobufSchema;
+    try (Stream<String> lines = Files.lines(protobufSchemaPath)) {
+      protobufSchema = new ProtobufSchema(lines.collect(Collectors.joining("\n")));
+    }
+    configure(
+        protobufSchemaPath,
+        defaultMessageDescriptor = kafkaClusterProperties.getProperty("protobufMessageName", String.class)
+            .map(msgName -> Objects.requireNonNull(protobufSchema.toDescriptor(msgName),
+                "The given message type not found in protobuf definition: " + msgName))
+            // this is strange logic, but we need it to support serde's backward-compatibility
+            .orElseGet(protobufSchema::toDescriptor),
+        defaultKeyMessageDescriptor = kafkaClusterProperties.getProperty("protobufMessageNameForKey", String.class)
+            .map(msgName -> Objects.requireNonNull(protobufSchema.toDescriptor(msgName),
+                "The given message type not found in protobuf definition: " + msgName))
+            .orElse(null),
+        kafkaClusterProperties.getMapProperty("protobufMessageNameByTopic", String.class, String.class)
+            .map(map -> populateDescriptors(protobufSchema, map))
+            .orElse(Map.of()),
+        kafkaClusterProperties.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class)
+            .map(map -> populateDescriptors(protobufSchema, map))
+            .orElse(Map.of())
+    );
+  }
+
+  @VisibleForTesting
+  void configure(
+      Path protobufSchemaPath,
+      Descriptor defaultMessageDescriptor,
+      @Nullable Descriptor defaultKeyMessageDescriptor,
+      Map<String, Descriptor> messageDescriptorMap,
+      Map<String, Descriptor> keyMessageDescriptorMap) {
+    this.protobufSchemaPath = protobufSchemaPath;
+    this.defaultMessageDescriptor = defaultMessageDescriptor;
+    this.defaultKeyMessageDescriptor = defaultKeyMessageDescriptor;
+    this.messageDescriptorMap = messageDescriptorMap;
+    this.keyMessageDescriptorMap = keyMessageDescriptorMap;
+  }
+
+  private Map<String, Descriptor> populateDescriptors(ProtobufSchema protobufSchema,
+                                                      Map<String, String> messageNameMap) {
+    Map<String, Descriptor> descriptors = new HashMap<>();
+    for (Map.Entry<String, String> entry : messageNameMap.entrySet()) {
+      var descriptor = Objects.requireNonNull(protobufSchema.toDescriptor(entry.getValue()),
+          "The given message type is not found in protobuf definition: "
+              + entry.getValue());
+      descriptors.put(entry.getKey(), descriptor);
+    }
+    return descriptors;
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  private Optional<Descriptor> descriptorFor(String topic, Target type) {
+    return type == Target.KEY
+        ?
+        Optional.ofNullable(keyMessageDescriptorMap.get(topic))
+            .or(() -> Optional.ofNullable(defaultKeyMessageDescriptor))
+        :
+        Optional.ofNullable(messageDescriptorMap.get(topic))
+            .or(() -> Optional.ofNullable(defaultMessageDescriptor));
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return descriptorFor(topic, type).isPresent();
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return descriptorFor(topic, type).isPresent();
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    var descriptor = descriptorFor(topic, type).orElseThrow();
+    return new Serializer() {
+      @SneakyThrows
+      @Override
+      public byte[] serialize(String input) {
+        DynamicMessage.Builder builder = DynamicMessage.newBuilder(descriptor);
+        JsonFormat.parser().merge(input, builder);
+        return builder.build().toByteArray();
+      }
+    };
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    var descriptor = descriptorFor(topic, type).orElseThrow();
+    return new Deserializer() {
+      @SneakyThrows
+      @Override
+      public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+        var protoMsg = DynamicMessage.parseFrom(descriptor, new ByteArrayInputStream(data));
+        byte[] jsonFromProto = ProtobufSchemaUtils.toJson(protoMsg);
+        var result = new String(jsonFromProto);
+        return new DeserializeResult(
+            result,
+            DeserializeResult.Type.JSON,
+            Map.of()
+        );
+      }
+    };
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return descriptorFor(topic, type)
+        .map(descriptor ->
+            new SchemaDescription(
+                SCHEMA_CONVERTER.convert(protobufSchemaPath.toUri(), descriptor).toJson(),
+                Map.of("messageName", descriptor.getFullName())
+            ));
+  }
+}

+ 65 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/StringSerde.java

@@ -0,0 +1,65 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+import java.util.Optional;
+
+public class StringSerde implements BuiltInSerde {
+
+  public static String name() {
+    return "String";
+  }
+
+  private Charset encoding;
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+    encoding = Charset.forName(
+        serdeProperties.getProperty("encoding", String.class)
+            .orElse(StandardCharsets.UTF_8.name())
+    );
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.empty();
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    return input -> input.getBytes(encoding);
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return (headers, data) ->
+        new DeserializeResult(
+            new String(data, encoding),
+            DeserializeResult.Type.STRING,
+            Map.of()
+        );
+  }
+
+}

+ 70 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt32Serde.java

@@ -0,0 +1,70 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.google.common.primitives.Ints;
+import com.google.common.primitives.UnsignedInteger;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.util.Map;
+import java.util.Optional;
+
+public class UInt32Serde implements BuiltInSerde {
+
+  public static String name() {
+    return "UInt32";
+  }
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.of(
+        new SchemaDescription(
+            String.format(
+                "{ "
+                    + "  \"type\" : \"integer\", "
+                    + "  \"minimum\" : 0, "
+                    + "  \"maximum\" : %s"
+                    + "}",
+                UnsignedInteger.MAX_VALUE
+            ),
+            Map.of()
+        )
+    );
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    return input -> Ints.toByteArray(Integer.parseUnsignedInt(input));
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return (headers, data) ->
+        new DeserializeResult(
+            UnsignedInteger.fromIntBits(Ints.fromByteArray(data)).toString(),
+            DeserializeResult.Type.JSON,
+            Map.of()
+        );
+  }
+}

+ 79 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UInt64Serde.java

@@ -0,0 +1,79 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.google.common.primitives.Longs;
+import com.google.common.primitives.UnsignedInteger;
+import com.google.common.primitives.UnsignedLong;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.util.Map;
+import java.util.Optional;
+import org.apache.kafka.common.header.Headers;
+
+
+public class UInt64Serde implements BuiltInSerde {
+
+  public static String name() {
+    return "UInt64";
+  }
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.of(
+        new SchemaDescription(
+            String.format(
+                "{ "
+                    + "  \"type\" : \"integer\", "
+                    + "  \"minimum\" : 0, "
+                    + "  \"maximum\" : %s "
+                    + "}",
+                UnsignedInteger.MAX_VALUE
+            ),
+            Map.of()
+        )
+    );
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    return input -> Longs.toByteArray(Long.parseUnsignedLong(input));
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return new Deserializer() {
+      @Override
+      public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+        return new DeserializeResult(
+            UnsignedLong.fromLongBits(Longs.fromByteArray(data)).toString(),
+            DeserializeResult.Type.JSON,
+            Map.of()
+        );
+      }
+    };
+  }
+}

+ 91 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerde.java

@@ -0,0 +1,91 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.provectus.kafka.ui.exception.ValidationException;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.Optional;
+import java.util.UUID;
+import org.apache.kafka.common.header.Headers;
+
+
+public class UuidBinarySerde implements BuiltInSerde {
+
+  public static String name() {
+    return "UUIDBinary";
+  }
+
+  private boolean mostSignificantBitsFirst;
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+    mostSignificantBitsFirst = serdeProperties.getProperty("mostSignificantBitsFirst", Boolean.class)
+        .orElse(true);
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.empty();
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    return new Serializer() {
+      @Override
+      public byte[] serialize(String input) {
+        UUID uuid = UUID.fromString(input);
+        ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
+        if (mostSignificantBitsFirst) {
+          bb.putLong(uuid.getMostSignificantBits());
+          bb.putLong(uuid.getLeastSignificantBits());
+        } else {
+          bb.putLong(uuid.getLeastSignificantBits());
+          bb.putLong(uuid.getMostSignificantBits());
+        }
+        return bb.array();
+      }
+    };
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return new Deserializer() {
+      @Override
+      public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+        if (data.length != 16) {
+          throw new ValidationException("UUID data should be 16 bytes, but it is " + data.length);
+        }
+        ByteBuffer bb = ByteBuffer.wrap(data);
+        long msb = bb.getLong();
+        long lsb = bb.getLong();
+        UUID uuid = mostSignificantBitsFirst ? new UUID(msb, lsb) : new UUID(lsb, msb);
+        return new DeserializeResult(
+            uuid.toString(),
+            DeserializeResult.Type.STRING,
+            Map.of()
+        );
+      }
+    };
+  }
+}

+ 6 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/AvroMessageReader.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java

@@ -1,23 +1,20 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
+package com.provectus.kafka.ui.serdes.builtin.sr;
 
 import io.confluent.kafka.schemaregistry.ParsedSchema;
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
 import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
 import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
 import io.confluent.kafka.serializers.KafkaAvroSerializer;
-import java.io.IOException;
 import java.util.Map;
 import org.apache.kafka.common.serialization.Serializer;
 
-public class AvroMessageReader extends MessageReader<Object> {
+class AvroSchemaRegistrySerializer extends SchemaRegistrySerializer<Object> {
 
-  public AvroMessageReader(String topic, boolean isKey,
-                           SchemaRegistryClient client,
-                           SchemaMetadata schema)
-      throws IOException, RestClientException {
+  AvroSchemaRegistrySerializer(String topic, boolean isKey,
+                               SchemaRegistryClient client,
+                               SchemaMetadata schema) {
     super(topic, isKey, client, schema);
   }
 
@@ -36,7 +33,7 @@ public class AvroMessageReader extends MessageReader<Object> {
   }
 
   @Override
-  protected Object read(String value, ParsedSchema schema) {
+  protected Object serialize(String value, ParsedSchema schema) {
     try {
       return AvroSchemaUtils.toObject(value, (AvroSchema) schema);
     } catch (Throwable e) {

+ 7 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/JsonSchemaMessageReader.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/JsonSchemaSchemaRegistrySerializer.java

@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
+package com.provectus.kafka.ui.serdes.builtin.sr;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonNode;
@@ -8,22 +8,20 @@ import com.provectus.kafka.ui.util.annotations.KafkaClientInternalsDependant;
 import io.confluent.kafka.schemaregistry.ParsedSchema;
 import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
 import io.confluent.kafka.schemaregistry.json.JsonSchema;
 import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
 import io.confluent.kafka.serializers.json.KafkaJsonSchemaSerializer;
-import java.io.IOException;
 import java.util.Map;
 import org.apache.kafka.common.serialization.Serializer;
 
-public class JsonSchemaMessageReader extends MessageReader<JsonNode> {
+class JsonSchemaSchemaRegistrySerializer extends SchemaRegistrySerializer<JsonNode> {
 
   private static final ObjectMapper MAPPER = new ObjectMapper();
 
-  public JsonSchemaMessageReader(String topic,
-                                 boolean isKey,
-                                 SchemaRegistryClient client,
-                                 SchemaMetadata schema) throws IOException, RestClientException {
+  JsonSchemaSchemaRegistrySerializer(String topic,
+                                            boolean isKey,
+                                            SchemaRegistryClient client,
+                                            SchemaMetadata schema) {
     super(topic, isKey, client, schema);
   }
 
@@ -42,7 +40,7 @@ public class JsonSchemaMessageReader extends MessageReader<JsonNode> {
   }
 
   @Override
-  protected JsonNode read(String value, ParsedSchema schema) {
+  protected JsonNode serialize(String value, ParsedSchema schema) {
     try {
       JsonNode json = MAPPER.readTree(value);
       ((JsonSchema) schema).validate(json);

+ 73 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java

@@ -0,0 +1,73 @@
+package com.provectus.kafka.ui.serdes.builtin.sr;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.google.protobuf.Message;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
+import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
+import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
+import java.util.Map;
+import lombok.SneakyThrows;
+
+interface MessageFormatter {
+
+  String format(String topic, byte[] value);
+
+  static Map<SchemaType, MessageFormatter> createMap(SchemaRegistryClient schemaRegistryClient) {
+    return Map.of(
+        SchemaType.AVRO, new AvroMessageFormatter(schemaRegistryClient),
+        SchemaType.JSON, new JsonSchemaMessageFormatter(schemaRegistryClient),
+        SchemaType.PROTOBUF, new ProtobufMessageFormatter(schemaRegistryClient)
+    );
+  }
+
+  class AvroMessageFormatter implements MessageFormatter {
+    private final KafkaAvroDeserializer avroDeserializer;
+
+    AvroMessageFormatter(SchemaRegistryClient client) {
+      this.avroDeserializer = new KafkaAvroDeserializer(client);
+    }
+
+    @Override
+    @SneakyThrows
+    public String format(String topic, byte[] value) {
+      // deserialized will have type, that depends on schema type (record or primitive),
+      // AvroSchemaUtils.toJson(...) method will take it into account
+      Object deserialized = avroDeserializer.deserialize(topic, value);
+      byte[] jsonBytes = AvroSchemaUtils.toJson(deserialized);
+      return new String(jsonBytes);
+    }
+  }
+
+  class ProtobufMessageFormatter implements MessageFormatter {
+    private final KafkaProtobufDeserializer<?> protobufDeserializer;
+
+    ProtobufMessageFormatter(SchemaRegistryClient client) {
+      this.protobufDeserializer = new KafkaProtobufDeserializer<>(client);
+    }
+
+    @Override
+    @SneakyThrows
+    public String format(String topic, byte[] value) {
+      final Message message = protobufDeserializer.deserialize(topic, value);
+      byte[] jsonBytes = ProtobufSchemaUtils.toJson(message);
+      return new String(jsonBytes);
+    }
+  }
+
+  class JsonSchemaMessageFormatter implements MessageFormatter {
+    private final KafkaJsonSchemaDeserializer<JsonNode> jsonSchemaDeserializer;
+
+    JsonSchemaMessageFormatter(SchemaRegistryClient client) {
+      this.jsonSchemaDeserializer = new KafkaJsonSchemaDeserializer<>(client);
+    }
+
+    @Override
+    public String format(String topic, byte[] value) {
+      JsonNode json = jsonSchemaDeserializer.deserialize(topic, value);
+      return json.toString();
+    }
+  }
+}

+ 7 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/ProtobufMessageReader.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/ProtobufSchemaRegistrySerializer.java

@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
+package com.provectus.kafka.ui.serdes.builtin.sr;
 
 import com.google.protobuf.DynamicMessage;
 import com.google.protobuf.Message;
@@ -6,19 +6,18 @@ import com.google.protobuf.util.JsonFormat;
 import io.confluent.kafka.schemaregistry.ParsedSchema;
 import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
 import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
 import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
 import io.confluent.kafka.serializers.protobuf.KafkaProtobufSerializer;
-import java.io.IOException;
 import java.util.Map;
+import lombok.SneakyThrows;
 import org.apache.kafka.common.serialization.Serializer;
 
-public class ProtobufMessageReader extends MessageReader<Message> {
+class ProtobufSchemaRegistrySerializer extends SchemaRegistrySerializer<Message> {
 
-  public ProtobufMessageReader(String topic, boolean isKey,
-                               SchemaRegistryClient client, SchemaMetadata schema)
-      throws IOException, RestClientException {
+  @SneakyThrows
+  public ProtobufSchemaRegistrySerializer(String topic, boolean isKey,
+                                          SchemaRegistryClient client, SchemaMetadata schema) {
     super(topic, isKey, client, schema);
   }
 
@@ -37,7 +36,7 @@ public class ProtobufMessageReader extends MessageReader<Message> {
   }
 
   @Override
-  protected Message read(String value, ParsedSchema schema) {
+  protected Message serialize(String value, ParsedSchema schema) {
     ProtobufSchema protobufSchema = (ProtobufSchema) schema;
     DynamicMessage.Builder builder = protobufSchema.newMessageBuilder();
     try {

+ 265 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java

@@ -0,0 +1,265 @@
+package com.provectus.kafka.ui.serdes.builtin.sr;
+
+import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE;
+import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.USER_INFO_CONFIG;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.provectus.kafka.ui.exception.ValidationException;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import com.provectus.kafka.ui.util.jsonschema.AvroJsonSchemaConverter;
+import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
+import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
+import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.Callable;
+import javax.annotation.Nullable;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
+
+
+public class SchemaRegistrySerde implements BuiltInSerde {
+
+  public static String name() {
+    return "SchemaRegistry";
+  }
+
+  private SchemaRegistryClient schemaRegistryClient;
+  private List<String> schemaRegistryUrls;
+  private String valueSchemaNameTemplate;
+  private String keySchemaNameTemplate;
+
+  private Map<SchemaType, MessageFormatter> schemaRegistryFormatters;
+
+  @Override
+  public boolean initOnStartup(PropertyResolver kafkaClusterProperties,
+                               PropertyResolver globalProperties) {
+    return kafkaClusterProperties.getListProperty("schemaRegistry", String.class)
+        .filter(lst -> !lst.isEmpty())
+        .isPresent();
+  }
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+    var urls = serdeProperties.getListProperty("url", String.class)
+        .or(() -> kafkaClusterProperties.getListProperty("schemaRegistry", String.class))
+        .filter(lst -> !lst.isEmpty())
+        .orElseThrow(() -> new ValidationException("No urls provided for schema registry"));
+    configure(
+        urls,
+        createSchemaRegistryClient(
+            urls,
+            serdeProperties.getProperty("username", String.class)
+                .or(() -> kafkaClusterProperties.getProperty("schemaRegistryAuth.username", String.class))
+                .orElse(null),
+            serdeProperties.getProperty("password", String.class)
+                .or(() -> kafkaClusterProperties.getProperty("schemaRegistryAuth.password", String.class))
+                .orElse(null)
+        ),
+        serdeProperties.getProperty("keySchemaNameTemplate", String.class)
+            .or(() -> kafkaClusterProperties.getProperty("keySchemaNameTemplate", String.class))
+            .orElse("%s-key"),
+        serdeProperties.getProperty("schemaNameTemplate", String.class)
+            .or(() -> kafkaClusterProperties.getProperty("schemaNameTemplate", String.class))
+            .orElse("%s-value")
+    );
+  }
+
+  @VisibleForTesting
+  void configure(
+      List<String> schemaRegistryUrls,
+      SchemaRegistryClient schemaRegistryClient,
+      String keySchemaNameTemplate,
+      String valueSchemaNameTemplate) {
+    this.schemaRegistryUrls = schemaRegistryUrls;
+    this.schemaRegistryClient = schemaRegistryClient;
+    this.keySchemaNameTemplate = keySchemaNameTemplate;
+    this.valueSchemaNameTemplate = valueSchemaNameTemplate;
+    this.schemaRegistryFormatters = MessageFormatter.createMap(schemaRegistryClient);
+  }
+
+  private static SchemaRegistryClient createSchemaRegistryClient(List<String> urls,
+                                                                 @Nullable String username,
+                                                                 @Nullable String password) {
+    Map<String, String> configs = new HashMap<>();
+    if (username != null && password != null) {
+      configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
+      configs.put(USER_INFO_CONFIG, username + ":" + password);
+    } else if (username != null) {
+      throw new ValidationException(
+          "You specified username but do not specified password");
+    } else if (password != null) {
+      throw new ValidationException(
+          "You specified password but do not specified username");
+    }
+    return new CachedSchemaRegistryClient(
+        urls,
+        1_000,
+        List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider()),
+        configs
+    );
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    String subject = schemaSubject(topic, type);
+    return getSchemaBySubject(subject).isPresent();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    String subject = schemaSubject(topic, type);
+    return getSchemaBySubject(subject)
+        .map(schemaMetadata ->
+            new SchemaDescription(
+                convertSchema(schemaMetadata),
+                Map.of(
+                    "schemaId", schemaMetadata.getId(),
+                    "latestVersion", schemaMetadata.getVersion(),
+                    "type", schemaMetadata.getSchemaType() // AVRO / PROTOBUF / JSON
+                )
+            ));
+  }
+
+  @SneakyThrows
+  private String convertSchema(SchemaMetadata schema) {
+    URI basePath = new URI(schemaRegistryUrls.get(0))
+        .resolve(Integer.toString(schema.getId()));
+    ParsedSchema schemaById = schemaRegistryClient.getSchemaById(schema.getId());
+    SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
+        .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
+    switch (schemaType) {
+      case PROTOBUF:
+        return new ProtobufSchemaConverter()
+            .convert(basePath, ((ProtobufSchema) schemaById).toDescriptor())
+            .toJson();
+      case AVRO:
+        return new AvroJsonSchemaConverter()
+            .convert(basePath, ((AvroSchema) schemaById).rawSchema())
+            .toJson();
+      case JSON:
+        return schema.getSchema();
+      default:
+        throw new IllegalStateException();
+    }
+  }
+
+  private Optional<SchemaMetadata> getSchemaBySubject(String subject) {
+    return wrapWith404Handler(() -> schemaRegistryClient.getLatestSchemaMetadata(subject));
+  }
+
+  @SneakyThrows
+  private <T> Optional<T> wrapWith404Handler(Callable<T> call) {
+    try {
+      return Optional.ofNullable(call.call());
+    } catch (RestClientException restClientException) {
+      if (restClientException.getStatus() == 404) {
+        return Optional.empty();
+      } else {
+        throw new RuntimeException("Error calling SchemaRegistryClient", restClientException);
+      }
+    }
+  }
+
+  private String schemaSubject(String topic, Target type) {
+    return String.format(type == Target.KEY ? keySchemaNameTemplate : valueSchemaNameTemplate, topic);
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    String subject = schemaSubject(topic, type);
+    var schema = getSchemaBySubject(subject)
+        .orElseThrow(() -> new ValidationException(String.format("No schema for subject '%s' found", subject)));
+    boolean isKey = type == Target.KEY;
+    SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
+        .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
+    switch (schemaType) {
+      case PROTOBUF:
+        return new ProtobufSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+      case AVRO:
+        return new AvroSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+      case JSON:
+        return new JsonSchemaSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+      default:
+        throw new IllegalStateException();
+    }
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return new SrDeserializer(topic);
+  }
+
+  ///--------------------------------------------------------------
+
+  private static final byte SR_RECORD_MAGIC_BYTE = (byte) 0;
+  private static final int SR_RECORD_PREFIX_LENGTH = 5;
+
+  @RequiredArgsConstructor
+  private class SrDeserializer implements Deserializer {
+
+    private final String topic;
+
+    @Override
+    public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+      var schemaId = extractSchemaIdFromMsg(data);
+      SchemaType format = getMessageFormatBySchemaId(schemaId);
+      MessageFormatter formatter = schemaRegistryFormatters.get(format);
+      return new DeserializeResult(
+          formatter.format(topic, data),
+          DeserializeResult.Type.JSON,
+          Map.of(
+              "schemaId", schemaId,
+              "type", format.name()
+          )
+      );
+    }
+  }
+
+  private SchemaType getMessageFormatBySchemaId(int schemaId) {
+    return wrapWith404Handler(() -> schemaRegistryClient.getSchemaById(schemaId))
+        .map(ParsedSchema::schemaType)
+        .flatMap(SchemaType::fromString)
+        .orElseThrow(() -> new ValidationException(String.format("Schema for id '%d' not found ", schemaId)));
+  }
+
+  private int extractSchemaIdFromMsg(byte[] data) {
+    ByteBuffer buffer = ByteBuffer.wrap(data);
+    if (buffer.remaining() > SR_RECORD_PREFIX_LENGTH && buffer.get() == SR_RECORD_MAGIC_BYTE) {
+      return buffer.getInt();
+    }
+    throw new ValidationException(
+        String.format(
+            "Data doesn't contain magic byte and schema id prefix, so it can't be deserialized with %s serde",
+            name())
+    );
+  }
+}

+ 11 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/MessageReader.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerializer.java

@@ -1,20 +1,21 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
+package com.provectus.kafka.ui.serdes.builtin.sr;
 
+import com.provectus.kafka.ui.serde.api.Serde;
 import io.confluent.kafka.schemaregistry.ParsedSchema;
 import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
-import java.io.IOException;
+import lombok.SneakyThrows;
 import org.apache.kafka.common.serialization.Serializer;
 
-public abstract class MessageReader<T> {
+abstract class SchemaRegistrySerializer<T> implements Serde.Serializer {
   protected final Serializer<T> serializer;
   protected final String topic;
   protected final boolean isKey;
   protected final ParsedSchema schema;
 
-  protected MessageReader(String topic, boolean isKey, SchemaRegistryClient client,
-                          SchemaMetadata schema) throws IOException, RestClientException {
+  @SneakyThrows
+  protected SchemaRegistrySerializer(String topic, boolean isKey, SchemaRegistryClient client,
+                                     SchemaMetadata schema) {
     this.topic = topic;
     this.isKey = isKey;
     this.serializer = createSerializer(client);
@@ -23,10 +24,11 @@ public abstract class MessageReader<T> {
 
   protected abstract Serializer<T> createSerializer(SchemaRegistryClient client);
 
-  public byte[] read(String value) {
-    final T read = this.read(value, schema);
+  @Override
+  public byte[] serialize(String input) {
+    final T read = this.serialize(input, schema);
     return this.serializer.serialize(topic, read);
   }
 
-  protected abstract T read(String value, ParsedSchema schema);
+  protected abstract T serialize(String value, ParsedSchema schema);
 }

+ 14 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaType.java

@@ -0,0 +1,14 @@
+package com.provectus.kafka.ui.serdes.builtin.sr;
+
+import java.util.Optional;
+import org.apache.commons.lang3.EnumUtils;
+
+enum SchemaType {
+  AVRO,
+  JSON,
+  PROTOBUF;
+
+  public static Optional<SchemaType> fromString(String typeString) {
+    return Optional.ofNullable(EnumUtils.getEnum(SchemaType.class, typeString));
+  }
+}

+ 2 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java

@@ -91,13 +91,12 @@ public class ConsumerGroupService {
                   )
                   .flatMap((Map<String, Map<TopicPartition, Long>> groupOffsets) ->
                       // 4. getting description for groups with non-emtpy offsets
-                      ac.describeConsumerGroups(new ArrayList<>(groupOffsets.keySet()))
+                      ac.describeConsumerGroups(groupOffsets.keySet())
                           .map((Map<String, ConsumerGroupDescription> descriptions) ->
                               descriptions.values().stream().map(desc ->
-                                      // 5. gathering and filter non-target-topic data
+                                      // 5. gathering into InternalConsumerGroup
                                       InternalConsumerGroup.create(
                                               desc, groupOffsets.get(desc.groupId()), endOffsets)
-                                          .retainDataForPartitions(p -> p.topic().equals(topic))
                                   )
                                   .collect(Collectors.toList())));
             }));

+ 140 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/DeserializationService.java

@@ -0,0 +1,140 @@
+package com.provectus.kafka.ui.service;
+
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.SerdeDescriptionDTO;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.ClusterSerdes;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
+import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
+import com.provectus.kafka.ui.serdes.SerdeInstance;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import javax.annotation.Nullable;
+import javax.validation.ValidationException;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Component;
+
+@Slf4j
+@Component
+public class DeserializationService {
+
+  private final Map<KafkaCluster, ClusterSerdes> clusterSerdes = new ConcurrentHashMap<>();
+
+  public DeserializationService(Environment env,
+                                ClustersStorage clustersStorage,
+                                ClustersProperties clustersProperties) {
+    for (int i = 0; i < clustersProperties.getClusters().size(); i++) {
+      var clusterProperties = clustersProperties.getClusters().get(i);
+      var cluster = clustersStorage.getClusterByName(clusterProperties.getName()).get();
+      clusterSerdes.put(cluster, new ClusterSerdes(env, clustersProperties, i));
+    }
+  }
+
+  private Serde.Serializer getSerializer(KafkaCluster cluster,
+                                         String topic,
+                                         Serde.Target type,
+                                         String serdeName) {
+    var serdes = this.clusterSerdes.get(cluster);
+    var serde = serdes.serdeForName(serdeName)
+        .orElseThrow(() -> new ValidationException(
+            String.format("Serde %s not found", serdeName)));
+    if (!serde.canSerialize(topic, type)) {
+      throw new ValidationException(
+          String.format("Serde %s can't be applied for '%s' topic's %s serialization", serde, topic, type));
+    }
+    return serde.serializer(topic, type);
+  }
+
+  private SerdeInstance getSerdeForDeserialize(KafkaCluster cluster,
+                                               String topic,
+                                               Serde.Target type,
+                                               @Nullable String serdeName) {
+    var serdes = this.clusterSerdes.get(cluster);
+    if (serdeName != null) {
+      var serde = serdes.serdeForName(serdeName)
+          .orElseThrow(() -> new ValidationException(String.format("Serde '%s' not found", serdeName)));
+      if (!serde.canDeserialize(topic, type)) {
+        throw new ValidationException(
+            String.format("Serde '%s' can't be applied to '%s' topic %s", serdeName, topic, type));
+      }
+      return serde;
+    } else {
+      return serdes.suggestSerdeForDeserialize(topic, type);
+    }
+  }
+
+  public ProducerRecordCreator producerRecordCreator(KafkaCluster cluster,
+                                                     String topic,
+                                                     String keySerdeName,
+                                                     String valueSerdeName) {
+    return new ProducerRecordCreator(
+        getSerializer(cluster, topic, Serde.Target.KEY, keySerdeName),
+        getSerializer(cluster, topic, Serde.Target.VALUE, valueSerdeName)
+    );
+  }
+
+  public ConsumerRecordDeserializer deserializerFor(KafkaCluster cluster,
+                                                    String topic,
+                                                    @Nullable String keySerdeName,
+                                                    @Nullable String valueSerdeName) {
+    var keySerde = getSerdeForDeserialize(cluster, topic, Serde.Target.KEY, keySerdeName);
+    var valueSerde = getSerdeForDeserialize(cluster, topic, Serde.Target.VALUE, valueSerdeName);
+    var fallbackSerde = clusterSerdes.get(cluster).getFallbackSerde();
+    return new ConsumerRecordDeserializer(
+        keySerde.getName(),
+        keySerde.deserializer(topic, Serde.Target.KEY),
+        valueSerde.getName(),
+        valueSerde.deserializer(topic, Serde.Target.VALUE),
+        fallbackSerde.getName(),
+        fallbackSerde.deserializer(topic, Serde.Target.KEY),
+        fallbackSerde.deserializer(topic, Serde.Target.VALUE)
+    );
+  }
+
+  public List<SerdeDescriptionDTO> getSerdesForSerialize(KafkaCluster cluster,
+                                                         String topic,
+                                                         Serde.Target serdeType) {
+    var serdes = clusterSerdes.get(cluster);
+    var preferred = serdes.suggestSerdeForSerialize(topic, serdeType);
+    var result = new ArrayList<SerdeDescriptionDTO>();
+    result.add(toDto(preferred, topic, serdeType, true));
+    serdes.all()
+        .filter(s -> !s.getName().equals(preferred.getName()))
+        .filter(s -> s.canSerialize(topic, serdeType))
+        .forEach(s -> result.add(toDto(s, topic, serdeType, false)));
+    return result;
+  }
+
+  public List<SerdeDescriptionDTO> getSerdesForDeserialize(KafkaCluster cluster,
+                                                           String topic,
+                                                           Serde.Target serdeType) {
+    var serdes = clusterSerdes.get(cluster);
+    var preferred = serdes.suggestSerdeForDeserialize(topic, serdeType);
+    var result = new ArrayList<SerdeDescriptionDTO>();
+    result.add(toDto(preferred, topic, serdeType, true));
+    serdes.all()
+        .filter(s -> !s.getName().equals(preferred.getName()))
+        .filter(s -> s.canDeserialize(topic, serdeType))
+        .forEach(s -> result.add(toDto(s, topic, serdeType, false)));
+    return result;
+  }
+
+  private SerdeDescriptionDTO toDto(SerdeInstance serdeInstance,
+                                    String topic,
+                                    Serde.Target serdeType,
+                                    boolean preferred) {
+    var schemaOpt = serdeInstance.getSchema(topic, serdeType);
+    return new SerdeDescriptionDTO()
+        .name(serdeInstance.getName())
+        .description(serdeInstance.description().orElse(null))
+        .schema(schemaOpt.map(SchemaDescription::getSchema).orElse(null))
+        .additionalProperties(schemaOpt.map(SchemaDescription::getAdditionalProperties).orElse(null))
+        .preferred(preferred);
+  }
+
+}

+ 27 - 33
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java

@@ -12,8 +12,8 @@ import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.MessageFilterTypeDTO;
 import com.provectus.kafka.ui.model.SeekDirectionDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.serde.DeserializationService;
-import com.provectus.kafka.ui.serde.RecordSerDe;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
+import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
 import com.provectus.kafka.ui.util.OffsetsSeekBackward;
 import com.provectus.kafka.ui.util.OffsetsSeekForward;
 import com.provectus.kafka.ui.util.ResultSizeLimiter;
@@ -35,9 +35,6 @@ import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.clients.producer.RecordMetadata;
 import org.apache.kafka.common.TopicPartition;
-import org.apache.kafka.common.header.Header;
-import org.apache.kafka.common.header.internals.RecordHeader;
-import org.apache.kafka.common.header.internals.RecordHeaders;
 import org.apache.kafka.common.serialization.ByteArraySerializer;
 import org.springframework.stereotype.Service;
 import reactor.core.publisher.Flux;
@@ -96,8 +93,13 @@ public class MessagesService {
         && msg.getPartition() > topicDescription.partitions().size() - 1) {
       return Mono.error(new ValidationException("Invalid partition"));
     }
-    RecordSerDe serde =
-        deserializationService.getRecordDeserializerForCluster(cluster);
+    ProducerRecordCreator producerRecordCreator =
+        deserializationService.producerRecordCreator(
+            cluster,
+            topicDescription.name(),
+            msg.getKeySerde().get(),
+            msg.getValueSerde().get()
+        );
 
     Properties properties = new Properties();
     properties.putAll(cluster.getProperties());
@@ -105,19 +107,13 @@ public class MessagesService {
     properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
     properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
     try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(properties)) {
-      ProducerRecord<byte[], byte[]> producerRecord = serde.serialize(
+      ProducerRecord<byte[], byte[]> producerRecord = producerRecordCreator.create(
           topicDescription.name(),
+          msg.getPartition(),
           msg.getKey().orElse(null),
           msg.getContent().orElse(null),
-          msg.getPartition()
+          msg.getHeaders()
       );
-      producerRecord = new ProducerRecord<>(
-          producerRecord.topic(),
-          producerRecord.partition(),
-          producerRecord.key(),
-          producerRecord.value(),
-          createHeaders(msg.getHeaders()));
-
       CompletableFuture<RecordMetadata> cf = new CompletableFuture<>();
       producer.send(producerRecord, (metadata, exception) -> {
         if (exception != null) {
@@ -132,32 +128,30 @@ public class MessagesService {
     }
   }
 
-  private Iterable<Header> createHeaders(@Nullable Map<String, String> clientHeaders) {
-    if (clientHeaders == null) {
-      return new RecordHeaders();
-    }
-    RecordHeaders headers = new RecordHeaders();
-    clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v.getBytes())));
-    return headers;
-  }
-
   public Flux<TopicMessageEventDTO> loadMessages(KafkaCluster cluster, String topic,
                                                  ConsumerPosition consumerPosition, String query,
                                                  MessageFilterTypeDTO filterQueryType,
-                                                 int limit) {
+                                                 int limit,
+                                                 @Nullable String keySerde,
+                                                 @Nullable String valueSerde) {
     return withExistingTopic(cluster, topic)
         .flux()
-        .flatMap(td -> loadMessagesImpl(cluster, topic, consumerPosition, query, filterQueryType, limit));
+        .flatMap(td -> loadMessagesImpl(cluster, topic, consumerPosition, query,
+            filterQueryType, limit, keySerde, valueSerde));
   }
 
-  private Flux<TopicMessageEventDTO> loadMessagesImpl(KafkaCluster cluster, String topic,
-                                                 ConsumerPosition consumerPosition, String query,
-                                                 MessageFilterTypeDTO filterQueryType,
-                                                 int limit) {
+  private Flux<TopicMessageEventDTO> loadMessagesImpl(KafkaCluster cluster,
+                                                      String topic,
+                                                      ConsumerPosition consumerPosition,
+                                                      String query,
+                                                      MessageFilterTypeDTO filterQueryType,
+                                                      int limit,
+                                                      @Nullable String keySerde,
+                                                      @Nullable String valueSerde) {
 
     java.util.function.Consumer<? super FluxSink<TopicMessageEventDTO>> emitter;
-    RecordSerDe recordDeserializer =
-        deserializationService.getRecordDeserializerForCluster(cluster);
+    ConsumerRecordDeserializer recordDeserializer =
+        deserializationService.deserializerFor(cluster, topic, keySerde, valueSerde);
     if (consumerPosition.getSeekDirection().equals(SeekDirectionDTO.FORWARD)) {
       emitter = new ForwardRecordEmitter(
           () -> consumerGroupService.createConsumer(cluster),

+ 2 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java

@@ -334,7 +334,7 @@ public class ReactiveAdminClient implements Closeable {
         .map(lst -> lst.stream().map(ConsumerGroupListing::groupId).collect(toList()));
   }
 
-  public Mono<Map<String, ConsumerGroupDescription>> describeConsumerGroups(List<String> groupIds) {
+  public Mono<Map<String, ConsumerGroupDescription>> describeConsumerGroups(Collection<String> groupIds) {
     return toMono(client.describeConsumerGroups(groupIds).all());
   }
 
@@ -372,6 +372,7 @@ public class ReactiveAdminClient implements Closeable {
 
   public Mono<Map<TopicPartition, Long>> listOffsets(Collection<TopicPartition> partitions,
                                                      OffsetSpec offsetSpec) {
+    //TODO: need to split this into multiple calls if number of target partitions is big
     return toMono(
         client.listOffsets(partitions.stream().collect(toMap(tp -> tp, tp -> offsetSpec))).all())
         .map(offsets -> offsets.entrySet()

+ 0 - 11
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java

@@ -20,9 +20,7 @@ import com.provectus.kafka.ui.model.PartitionsIncreaseResponseDTO;
 import com.provectus.kafka.ui.model.ReplicationFactorChangeDTO;
 import com.provectus.kafka.ui.model.ReplicationFactorChangeResponseDTO;
 import com.provectus.kafka.ui.model.TopicCreationDTO;
-import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
 import com.provectus.kafka.ui.model.TopicUpdateDTO;
-import com.provectus.kafka.ui.serde.DeserializationService;
 import com.provectus.kafka.ui.util.JmxClusterUtil;
 import java.time.Duration;
 import java.util.Collection;
@@ -423,15 +421,6 @@ public class TopicsService {
     }
   }
 
-  public TopicMessageSchemaDTO getTopicSchema(KafkaCluster cluster, String topicName) {
-    if (!metricsCache.get(cluster).getTopicDescriptions().containsKey(topicName)) {
-      throw new TopicNotFoundException();
-    }
-    return deserializationService
-        .getRecordDeserializerForCluster(cluster)
-        .getTopicSchema(topicName);
-  }
-
   public Mono<InternalTopic> cloneTopic(
       KafkaCluster cluster, String topicName, String newTopicName) {
     return loadTopic(cluster, topicName).flatMap(topic ->

+ 0 - 80
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ClusterUtil.java

@@ -1,80 +0,0 @@
-package com.provectus.kafka.ui.util;
-
-import com.provectus.kafka.ui.model.MessageFormatDTO;
-import com.provectus.kafka.ui.model.ServerStatusDTO;
-import com.provectus.kafka.ui.model.TopicMessageDTO;
-import com.provectus.kafka.ui.serde.RecordSerDe;
-import java.time.Instant;
-import java.time.OffsetDateTime;
-import java.time.ZoneId;
-import java.util.HashMap;
-import java.util.Map;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.record.TimestampType;
-import org.apache.kafka.common.utils.Bytes;
-
-
-@Slf4j
-public class ClusterUtil {
-
-  private ClusterUtil() {
-  }
-
-  private static final ZoneId UTC_ZONE_ID = ZoneId.of("UTC");
-
-  public static TopicMessageDTO mapToTopicMessage(ConsumerRecord<Bytes, Bytes> consumerRecord,
-                                                  RecordSerDe recordDeserializer) {
-
-    Map<String, String> headers = new HashMap<>();
-    consumerRecord.headers().iterator()
-        .forEachRemaining(header ->
-            headers.put(
-                header.key(),
-                header.value() != null ? new String(header.value()) : null
-            )
-    );
-
-    TopicMessageDTO topicMessage = new TopicMessageDTO();
-
-    OffsetDateTime timestamp =
-        OffsetDateTime.ofInstant(Instant.ofEpochMilli(consumerRecord.timestamp()), UTC_ZONE_ID);
-    TopicMessageDTO.TimestampTypeEnum timestampType =
-        mapToTimestampType(consumerRecord.timestampType());
-    topicMessage.setPartition(consumerRecord.partition());
-    topicMessage.setOffset(consumerRecord.offset());
-    topicMessage.setTimestamp(timestamp);
-    topicMessage.setTimestampType(timestampType);
-
-    topicMessage.setHeaders(headers);
-    var parsed = recordDeserializer.deserialize(consumerRecord);
-    topicMessage.setKey(parsed.getKey());
-    topicMessage.setContent(parsed.getValue());
-    topicMessage.setKeyFormat(parsed.getKeyFormat() != null
-        ? MessageFormatDTO.valueOf(parsed.getKeyFormat().name())
-        : null);
-    topicMessage.setValueFormat(parsed.getValueFormat() != null
-        ? MessageFormatDTO.valueOf(parsed.getValueFormat().name())
-        : null);
-    topicMessage.setKeySize(ConsumerRecordUtil.getKeySize(consumerRecord));
-    topicMessage.setValueSize(ConsumerRecordUtil.getValueSize(consumerRecord));
-    topicMessage.setKeySchemaId(parsed.getKeySchemaId());
-    topicMessage.setValueSchemaId(parsed.getValueSchemaId());
-    topicMessage.setHeadersSize(ConsumerRecordUtil.getHeadersSize(consumerRecord));
-
-    return topicMessage;
-  }
-
-  private static TopicMessageDTO.TimestampTypeEnum mapToTimestampType(TimestampType timestampType) {
-    switch (timestampType) {
-      case CREATE_TIME:
-        return TopicMessageDTO.TimestampTypeEnum.CREATE_TIME;
-      case LOG_APPEND_TIME:
-        return TopicMessageDTO.TimestampTypeEnum.LOG_APPEND_TIME;
-      case NO_TIMESTAMP_TYPE:
-        return TopicMessageDTO.TimestampTypeEnum.NO_TIMESTAMP_TYPE;
-      default:
-        throw new IllegalArgumentException("Unknown timestampType: " + timestampType);
-    }
-  }
-}

+ 0 - 37
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ConsumerRecordUtil.java

@@ -1,37 +0,0 @@
-package com.provectus.kafka.ui.util;
-
-import java.util.Arrays;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.header.Header;
-import org.apache.kafka.common.header.Headers;
-import org.apache.kafka.common.utils.Bytes;
-
-public class ConsumerRecordUtil {
-
-  private ConsumerRecordUtil() {
-  }
-
-  public static Long getHeadersSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
-    Headers headers = consumerRecord.headers();
-    if (headers != null) {
-      return Arrays.stream(consumerRecord.headers().toArray())
-          .mapToLong(ConsumerRecordUtil::headerSize)
-          .sum();
-    }
-    return 0L;
-  }
-
-  public static Long getKeySize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
-    return consumerRecord.key() != null ? (long) consumerRecord.key().get().length : null;
-  }
-
-  public static Long getValueSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
-    return consumerRecord.value() != null ? (long) consumerRecord.value().get().length : null;
-  }
-
-  private static int headerSize(Header header) {
-    int key = header.key() != null ? header.key().getBytes().length : 0;
-    int val = header.value() != null ? header.value().length : 0;
-    return key + val;
-  }
-}

+ 3 - 1
kafka-ui-api/src/test/java/com/provectus/kafka/ui/emitter/TailingEmitterTest.java

@@ -114,7 +114,9 @@ class TailingEmitterTest extends AbstractIntegrationTest {
             new ConsumerPosition(SeekTypeDTO.LATEST, Map.of(), SeekDirectionDTO.TAILING),
             query,
             MessageFilterTypeDTO.STRING_CONTAINS,
-            0);
+            0,
+            "String",
+            "String");
   }
 
   private List<TopicMessageEventDTO> startTailing(String filterQuery) {

+ 0 - 125
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/ProtobufFileRecordSerDeTest.java

@@ -1,125 +0,0 @@
-package com.provectus.kafka.ui.serde;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-
-import com.google.protobuf.DynamicMessage;
-import com.google.protobuf.util.JsonFormat;
-import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Collections;
-import java.util.Map;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.utils.Bytes;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-
-class ProtobufFileRecordSerDeTest {
-
-  // Sample message of type `test.Person`
-  private static byte[] personMessage;
-  // Sample message of type `test.AddressBook`
-  private static byte[] addressBookMessage;
-  private static Path protobufSchemaPath;
-
-  @BeforeAll
-  static void setUp() throws URISyntaxException, IOException {
-    protobufSchemaPath = Paths.get(ProtobufFileRecordSerDeTest.class.getClassLoader()
-        .getResource("address-book.proto").toURI());
-    ProtobufSchema protobufSchema = new ProtobufSchema(Files.readString(protobufSchemaPath));
-
-    DynamicMessage.Builder builder = protobufSchema.newMessageBuilder("test.Person");
-    JsonFormat.parser().merge(
-        "{ \"name\": \"My Name\",\"id\": 101, \"email\": \"user1@example.com\" }", builder);
-    personMessage = builder.build().toByteArray();
-
-    builder = protobufSchema.newMessageBuilder("test.AddressBook");
-    JsonFormat.parser().merge(
-        "{\"version\": 1, \"people\": ["
-            + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"user2@example.com\" }]}", builder);
-    addressBookMessage = builder.build().toByteArray();
-  }
-
-  @Test
-  void testDeserialize() throws IOException {
-    var messageNameMap = Map.of(
-        "topic1", "test.Person",
-        "topic2", "test.AddressBook");
-    var keyMessageNameMap = Map.of(
-        "topic2", "test.Person");
-    var deserializer =
-        new ProtobufFileRecordSerDe(protobufSchemaPath, messageNameMap, keyMessageNameMap, null, null);
-    var msg1 = deserializer
-        .deserialize(new ConsumerRecord<>("topic1", 1, 0, Bytes.wrap("key".getBytes()),
-            Bytes.wrap(personMessage)));
-    assertEquals(MessageFormat.PROTOBUF, msg1.getValueFormat());
-    assertTrue(msg1.getValue().contains("user1@example.com"));
-
-    var msg2 = deserializer
-        .deserialize(new ConsumerRecord<>("topic2", 1, 1, Bytes.wrap(personMessage),
-            Bytes.wrap(addressBookMessage)));
-    assertEquals(MessageFormat.PROTOBUF, msg2.getKeyFormat());
-    assertTrue(msg2.getKey().contains("user1@example.com"));
-    assertTrue(msg2.getValue().contains("user2@example.com"));
-  }
-
-  @Test
-  void testNoDefaultMessageName() throws IOException {
-    // by default the first message type defined in proto definition is used
-    var deserializer =
-        new ProtobufFileRecordSerDe(protobufSchemaPath, Collections.emptyMap(), null, null, null);
-    var msg = deserializer
-        .deserialize(new ConsumerRecord<>("topic", 1, 0, Bytes.wrap("key".getBytes()),
-            Bytes.wrap(personMessage)));
-    assertTrue(msg.getValue().contains("user1@example.com"));
-  }
-
-  @Test
-  void testDefaultMessageName() throws IOException {
-    var messageNameMap = Map.of("topic1", "test.Person");
-    var deserializer =
-        new ProtobufFileRecordSerDe(protobufSchemaPath, messageNameMap, null, "test.AddressBook", null);
-    var msg = deserializer
-        .deserialize(new ConsumerRecord<>("a_random_topic", 1, 0, Bytes.wrap(addressBookMessage),
-            Bytes.wrap(addressBookMessage)));
-    assertTrue(msg.getValue().contains("user2@example.com"));
-  }
-
-  @Test
-  void testDefaultKeyMessageName() throws IOException {
-    var messageNameMap = Map.of("topic1", "test.Person");
-    var deserializer =
-        new ProtobufFileRecordSerDe(protobufSchemaPath, messageNameMap, messageNameMap, "test.AddressBook",
-            "test.AddressBook");
-    var msg = deserializer
-        .deserialize(new ConsumerRecord<>("a_random_topic", 1, 0, Bytes.wrap(addressBookMessage),
-            Bytes.wrap(addressBookMessage)));
-    assertTrue(msg.getKey().contains("user2@example.com"));
-  }
-
-  @Test
-  void testSerialize() throws IOException {
-    var messageNameMap = Map.of("topic1", "test.Person");
-    var serializer =
-        new ProtobufFileRecordSerDe(protobufSchemaPath, messageNameMap, null, "test.AddressBook", null);
-    var serialized = serializer.serialize("topic1", "key1", "{\"name\":\"MyName\"}", 0);
-    assertNotNull(serialized.value());
-  }
-
-  @Test
-  void testSerializeKeyAndValue() throws IOException {
-    var messageNameMap = Map.of("topic1", "test.Person");
-    var serializer =
-        new ProtobufFileRecordSerDe(protobufSchemaPath, messageNameMap, messageNameMap, "test.AddressBook",
-                "test.AddressBook");
-    var serialized = serializer.serialize("topic1", "{\"name\":\"MyName\"}", "{\"name\":\"MyName\"}", 0);
-    assertNotNull(serialized.key());
-    assertNotNull(serialized.value());
-  }
-}

+ 0 - 40
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/SimpleRecordSerDeTest.java

@@ -1,40 +0,0 @@
-package com.provectus.kafka.ui.serde;
-
-import static com.provectus.kafka.ui.serde.RecordSerDe.DeserializedKeyValue;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.utils.Bytes;
-import org.junit.jupiter.api.Test;
-
-class SimpleRecordSerDeTest {
-
-  private final SimpleRecordSerDe serde = new SimpleRecordSerDe();
-
-  @Test
-  public void shouldDeserializeStringValue() {
-    var value = "test";
-    var deserializedRecord = serde.deserialize(
-        new ConsumerRecord<>("topic", 1, 0, Bytes.wrap("key".getBytes()),
-            Bytes.wrap(value.getBytes())));
-    DeserializedKeyValue expected = DeserializedKeyValue.builder()
-        .key("key")
-        .keyFormat(MessageFormat.UNKNOWN)
-        .value(value)
-        .valueFormat(MessageFormat.UNKNOWN)
-        .build();
-    assertEquals(expected, deserializedRecord);
-  }
-
-  @Test
-  public void shouldDeserializeNullValueRecordToEmptyMap() {
-    var deserializedRecord = serde
-        .deserialize(new ConsumerRecord<>("topic", 1, 0, Bytes.wrap("key".getBytes()), null));
-    DeserializedKeyValue expected = DeserializedKeyValue.builder()
-        .key("key")
-        .keyFormat(MessageFormat.UNKNOWN)
-        .build();
-    assertEquals(expected, deserializedRecord);
-  }
-}

+ 0 - 203
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDeTest.java

@@ -1,203 +0,0 @@
-package com.provectus.kafka.ui.serde.schemaregistry;
-
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyZeroInteractions;
-import static org.mockito.Mockito.when;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.json.JsonMapper;
-import com.provectus.kafka.ui.model.KafkaCluster;
-import io.confluent.kafka.schemaregistry.avro.AvroSchema;
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
-import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import org.apache.avro.generic.GenericDatumWriter;
-import org.apache.avro.io.Encoder;
-import org.apache.avro.io.EncoderFactory;
-import org.apache.kafka.clients.consumer.ConsumerRecord;
-import org.apache.kafka.common.utils.Bytes;
-import org.junit.jupiter.api.Nested;
-import org.junit.jupiter.api.Test;
-
-class SchemaRegistryAwareRecordSerDeTest {
-
-  private final SchemaRegistryClient registryClient = mock(SchemaRegistryClient.class);
-
-  private final SchemaRegistryAwareRecordSerDe serde = new SchemaRegistryAwareRecordSerDe(
-      KafkaCluster.builder().build(),
-      registryClient
-  );
-
-  @Nested
-  class Deserialize {
-
-    @Test
-    void callsSchemaFormatterWhenValueHasMagicByteAndValidSchemaId() throws Exception {
-      AvroSchema schema = new AvroSchema(
-          "{"
-              + "  \"type\": \"record\","
-              + "  \"name\": \"TestAvroRecord1\","
-              + "  \"fields\": ["
-              + "    {"
-              + "      \"name\": \"field1\","
-              + "      \"type\": \"string\""
-              + "    },"
-              + "    {"
-              + "      \"name\": \"field2\","
-              + "      \"type\": \"int\""
-              + "    }"
-              + "  ]"
-              + "}"
-      );
-
-      String jsonValueForSchema = "{ \"field1\":\"testStr\", \"field2\": 123 }";
-
-      int schemaId = 1234;
-      when(registryClient.getSchemaById(schemaId)).thenReturn(schema);
-      when(registryClient.getSchemaBySubjectAndId(null, schemaId)).thenReturn(schema);
-
-      var result = serde.deserialize(
-          new ConsumerRecord<>(
-              "test-topic",
-              1,
-              100,
-              Bytes.wrap("key".getBytes()),
-              bytesWithMagicByteAndSchemaId(schemaId, jsonToAvro(jsonValueForSchema, schema))
-          )
-      );
-
-      // called once by serde code
-      verify(registryClient, times(1)).getSchemaById(schemaId);
-      //called once by formatter (will be cached)
-      verify(registryClient, times(1)).getSchemaBySubjectAndId(null, schemaId);
-
-      assertThat(result.getKeySchemaId()).isNull();
-      assertThat(result.getKeyFormat()).isEqualTo(MessageFormat.UNKNOWN);
-      assertThat(result.getKey()).isEqualTo("key");
-
-      assertThat(result.getValueSchemaId()).isEqualTo(schemaId + "");
-      assertThat(result.getValueFormat()).isEqualTo(MessageFormat.AVRO);
-      assertJsonsEqual(jsonValueForSchema, result.getValue());
-    }
-
-    @Test
-    void fallsBackToStringFormatterIfValueContainsMagicByteButSchemaNotFound() throws Exception {
-      int nonExistingSchemaId = 12341234;
-      when(registryClient.getSchemaById(nonExistingSchemaId))
-          .thenThrow(new RestClientException("not fount", 404, 404));
-
-      Bytes value = bytesWithMagicByteAndSchemaId(nonExistingSchemaId, "somedata".getBytes());
-      var result = serde.deserialize(
-          new ConsumerRecord<>(
-              "test-topic",
-              1,
-              100,
-              Bytes.wrap("key".getBytes()),
-              value
-          )
-      );
-
-      // called to get schema by id - will throw not found
-      verify(registryClient, times(1)).getSchemaById(nonExistingSchemaId);
-
-      assertThat(result.getKeySchemaId()).isNull();
-      assertThat(result.getKeyFormat()).isEqualTo(MessageFormat.UNKNOWN);
-      assertThat(result.getKey()).isEqualTo("key");
-
-      assertThat(result.getValueSchemaId()).isNull();
-      assertThat(result.getValueFormat()).isEqualTo(MessageFormat.UNKNOWN);
-      assertThat(result.getValue()).isEqualTo(new String(value.get()));
-    }
-
-    @Test
-    void fallsBackToStringFormatterIfMagicByteAndSchemaIdFoundButFormatterFailed() throws Exception {
-      int schemaId = 1234;
-
-      final var schema = new AvroSchema("{ \"type\": \"string\" }");
-
-      when(registryClient.getSchemaById(schemaId))
-          .thenReturn(schema);
-      when(registryClient.getSchemaBySubjectAndId(null, schemaId)).thenReturn(schema);
-
-      // will cause exception in avro deserializer
-      Bytes nonAvroValue = bytesWithMagicByteAndSchemaId(schemaId, "123".getBytes());
-      var result = serde.deserialize(
-          new ConsumerRecord<>(
-              "test-topic",
-              1,
-              100,
-              Bytes.wrap("key".getBytes()),
-              nonAvroValue
-          )
-      );
-
-      // called once by serde code
-      verify(registryClient, times(1)).getSchemaById(schemaId);
-      //called once by formatter (will be cached)
-      verify(registryClient, times(1)).getSchemaBySubjectAndId(null, schemaId);
-
-      assertThat(result.getKeySchemaId()).isNull();
-      assertThat(result.getKeyFormat()).isEqualTo(MessageFormat.UNKNOWN);
-      assertThat(result.getKey()).isEqualTo("key");
-
-      assertThat(result.getValueSchemaId()).isNull();
-      assertThat(result.getValueFormat()).isEqualTo(MessageFormat.UNKNOWN);
-      assertThat(result.getValue()).isEqualTo(new String(nonAvroValue.get()));
-    }
-
-    @Test
-    void useStringFormatterWithoutRegistryManipulationIfMagicByteNotSet() {
-      var result = serde.deserialize(
-          new ConsumerRecord<>(
-              "test-topic",
-              1,
-              100,
-              Bytes.wrap("key".getBytes()),
-              Bytes.wrap("val".getBytes())
-          )
-      );
-
-      verifyZeroInteractions(registryClient);
-
-      assertThat(result.getKeySchemaId()).isNull();
-      assertThat(result.getKeyFormat()).isEqualTo(MessageFormat.UNKNOWN);
-      assertThat(result.getKey()).isEqualTo("key");
-
-      assertThat(result.getValueSchemaId()).isNull();
-      assertThat(result.getValueFormat()).isEqualTo(MessageFormat.UNKNOWN);
-      assertThat(result.getValue()).isEqualTo("val");
-    }
-
-    private void assertJsonsEqual(String expected, String actual) throws JsonProcessingException {
-      var mapper = new JsonMapper();
-      assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected));
-    }
-
-    private Bytes bytesWithMagicByteAndSchemaId(int schemaId, byte[] body) {
-      return new Bytes(
-          ByteBuffer.allocate(1 + 4 + body.length)
-              .put((byte) 0)
-              .putInt(schemaId)
-              .put(body)
-              .array()
-      );
-    }
-
-    private byte[] jsonToAvro(String json, AvroSchema schema) throws IOException {
-      GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema.rawSchema());
-      ByteArrayOutputStream output = new ByteArrayOutputStream();
-      Encoder encoder = EncoderFactory.get().binaryEncoder(output, null);
-      writer.write(AvroSchemaUtils.toObject(json, schema), encoder);
-      encoder.flush();
-      return output.toByteArray();
-    }
-  }
-
-
-}

+ 156 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/PropertyResolverImplTest.java

@@ -0,0 +1,156 @@
+package com.provectus.kafka.ui.serdes;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatCode;
+
+import java.util.List;
+import java.util.Map;
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.context.properties.bind.BindException;
+import org.springframework.mock.env.MockEnvironment;
+
+class PropertyResolverImplTest {
+
+  private static final String TEST_STRING_VALUE = "testStr";
+  private static final int TEST_INT_VALUE = 123;
+  private static final List<String> TEST_STRING_LIST = List.of("v1", "v2", "v3");
+  private static final List<Integer> TEST_INT_LIST = List.of(1, 2, 3);
+
+  private final MockEnvironment env = new MockEnvironment();
+
+  @Data
+  @AllArgsConstructor
+  public static class CustomPropertiesClass {
+    private String f1;
+    private Integer f2;
+  }
+
+  @Test
+  void returnsEmptyOptionalWhenPropertyNotExist() {
+    var resolver = new PropertyResolverImpl(env);
+    assertThat(resolver.getProperty("nonExistingProp", String.class)).isEmpty();
+    assertThat(resolver.getListProperty("nonExistingProp", String.class)).isEmpty();
+    assertThat(resolver.getMapProperty("nonExistingProp", String.class, String.class)).isEmpty();
+  }
+
+  @Test
+  void throwsExceptionWhenPropertyCantBeResolverToRequstedClass() {
+    env.setProperty("prop.0.strProp", "testStr");
+    env.setProperty("prop.0.strLst", "v1,v2,v3");
+    env.setProperty("prop.0.strMap.k1", "v1");
+
+    var resolver = new PropertyResolverImpl(env);
+    assertThatCode(() -> resolver.getProperty("prop.0.strProp", Integer.class))
+        .isInstanceOf(BindException.class);
+    assertThatCode(() -> resolver.getListProperty("prop.0.strLst", Integer.class))
+        .isInstanceOf(BindException.class);
+    assertThatCode(() -> resolver.getMapProperty("prop.0.strMap", Integer.class, String.class))
+        .isInstanceOf(BindException.class);
+  }
+
+  @Test
+  void resolvedSingleValueProperties() {
+    env.setProperty("prop.0.strProp", "testStr");
+    env.setProperty("prop.0.intProp", "123");
+
+    var resolver = new PropertyResolverImpl(env);
+    assertThat(resolver.getProperty("prop.0.strProp", String.class))
+        .hasValue("testStr");
+    assertThat(resolver.getProperty("prop.0.intProp", Integer.class))
+        .hasValue(123);
+  }
+
+  @Test
+  void resolvesListProperties() {
+    env.setProperty("prop.0.strLst", "v1,v2,v3");
+    env.setProperty("prop.0.intLst", "1,2,3");
+
+    var resolver = new PropertyResolverImpl(env);
+    assertThat(resolver.getListProperty("prop.0.strLst", String.class))
+        .hasValue(List.of("v1", "v2", "v3"));
+    assertThat(resolver.getListProperty("prop.0.intLst", Integer.class))
+        .hasValue(List.of(1, 2, 3));
+  }
+
+  @Test
+  void resolvesCustomConfigClassProperties() {
+    env.setProperty("prop.0.custProps.f1", "f1val");
+    env.setProperty("prop.0.custProps.f2", "1234");
+
+    var resolver = new PropertyResolverImpl(env);
+    assertThat(resolver.getProperty("prop.0.custProps", CustomPropertiesClass.class))
+        .hasValue(new CustomPropertiesClass("f1val", 1234));
+  }
+
+  @Test
+  void resolvesMapProperties() {
+    env.setProperty("prop.0.strMap.k1", "v1");
+    env.setProperty("prop.0.strMap.k2", "v2");
+    env.setProperty("prop.0.intToLongMap.100", "111");
+    env.setProperty("prop.0.intToLongMap.200", "222");
+
+    var resolver = new PropertyResolverImpl(env);
+    assertThat(resolver.getMapProperty("prop.0.strMap", String.class, String.class))
+        .hasValue(Map.of("k1", "v1", "k2", "v2"));
+    assertThat(resolver.getMapProperty("prop.0.intToLongMap", Integer.class, Long.class))
+        .hasValue(Map.of(100, 111L, 200, 222L));
+  }
+
+
+  @Nested
+  class WithPrefix {
+
+    @Test
+    void resolvedSingleValueProperties() {
+      env.setProperty("prop.0.strProp", "testStr");
+      env.setProperty("prop.0.intProp", "123");
+
+      var resolver = new PropertyResolverImpl(env, "prop.0");
+      assertThat(resolver.getProperty("strProp", String.class))
+          .hasValue(TEST_STRING_VALUE);
+
+      assertThat(resolver.getProperty("intProp", Integer.class))
+          .hasValue(TEST_INT_VALUE);
+    }
+
+    @Test
+    void resolvesListProperties() {
+      env.setProperty("prop.0.strLst", "v1,v2,v3");
+      env.setProperty("prop.0.intLst", "1,2,3");
+
+      var resolver = new PropertyResolverImpl(env, "prop.0");
+      assertThat(resolver.getListProperty("strLst", String.class))
+          .hasValue(TEST_STRING_LIST);
+      assertThat(resolver.getListProperty("intLst", Integer.class))
+          .hasValue(TEST_INT_LIST);
+    }
+
+    @Test
+    void resolvesCustomConfigClassProperties() {
+      env.setProperty("prop.0.custProps.f1", "f1val");
+      env.setProperty("prop.0.custProps.f2", "1234");
+
+      var  resolver = new PropertyResolverImpl(env, "prop.0");
+      assertThat(resolver.getProperty("custProps", CustomPropertiesClass.class))
+          .hasValue(new CustomPropertiesClass("f1val", 1234));
+    }
+
+    @Test
+    void resolvesMapProperties() {
+      env.setProperty("prop.0.strMap.k1", "v1");
+      env.setProperty("prop.0.strMap.k2", "v2");
+      env.setProperty("prop.0.intToLongMap.100", "111");
+      env.setProperty("prop.0.intToLongMap.200", "222");
+
+      var resolver = new PropertyResolverImpl(env, "prop.0.");
+      assertThat(resolver.getMapProperty("strMap", String.class, String.class))
+          .hasValue(Map.of("k1", "v1", "k2", "v2"));
+      assertThat(resolver.getMapProperty("intToLongMap", Integer.class, Long.class))
+          .hasValue(Map.of(100, 111L, 200, 222L));
+    }
+  }
+
+}

+ 66 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/Base64SerdeTest.java

@@ -0,0 +1,66 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.RecordHeadersImpl;
+import java.util.Base64;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+
+class Base64SerdeTest {
+
+  private static final byte[] TEST_BYTES = "some bytes go here".getBytes();
+  private static final String TEST_BYTES_BASE64_ENCODED = Base64.getEncoder().encodeToString(TEST_BYTES);
+
+  private Serde base64Serde;
+
+  @BeforeEach
+  void init() {
+    base64Serde = new Base64Serde();
+    base64Serde.configure(
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty()
+    );
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void serializesInputAsBase64String(Serde.Target type) {
+    var serializer = base64Serde.serializer("anyTopic", type);
+    byte[] bytes = serializer.serialize(TEST_BYTES_BASE64_ENCODED);
+    assertThat(bytes).isEqualTo(TEST_BYTES);
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void deserializesDataAsBase64Bytes(Serde.Target type) {
+    var deserializer = base64Serde.deserializer("anyTopic", type);
+    var result = deserializer.deserialize(new RecordHeadersImpl(), TEST_BYTES);
+    assertThat(result.getResult()).isEqualTo(TEST_BYTES_BASE64_ENCODED);
+    assertThat(result.getType()).isEqualTo(DeserializeResult.Type.STRING);
+    assertThat(result.getAdditionalProperties()).isEmpty();
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void getSchemaReturnsEmpty(Serde.Target type) {
+    assertThat(base64Serde.getSchema("anyTopic", type)).isEmpty();
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void canDeserializeReturnsTrueForAllInputs(Serde.Target type) {
+    assertThat(base64Serde.canDeserialize("anyTopic", type)).isTrue();
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void canSerializeReturnsTrueForAllInput(Serde.Target type) {
+    assertThat(base64Serde.canSerialize("anyTopic", type)).isTrue();
+  }
+}

+ 46 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/Int32SerdeTest.java

@@ -0,0 +1,46 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.google.common.primitives.Ints;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.RecordHeadersImpl;
+import org.apache.kafka.common.header.internals.RecordHeaders;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+
+class Int32SerdeTest {
+
+  private Int32Serde serde;
+
+  @BeforeEach
+  void init() {
+    serde = new Int32Serde();
+    serde.configure(
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty()
+    );
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void serializeUses4BytesIntRepresentation(Serde.Target type) {
+    var serializer = serde.serializer("anyTopic", type);
+    byte[] bytes = serializer.serialize("1234");
+    assertThat(bytes).isEqualTo(Ints.toByteArray(1234));
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void deserializeUses4BytesIntRepresentation(Serde.Target type) {
+    var deserializer = serde.deserializer("anyTopic", type);
+    var result = deserializer.deserialize(new RecordHeadersImpl(), Ints.toByteArray(1234));
+    assertThat(result.getResult()).isEqualTo("1234");
+    assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
+  }
+
+}

+ 47 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/Int64SerdeTest.java

@@ -0,0 +1,47 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.google.common.primitives.Longs;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.RecordHeadersImpl;
+import org.apache.kafka.common.header.internals.RecordHeaders;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+
+
+class Int64SerdeTest {
+
+  private Int64Serde serde;
+
+  @BeforeEach
+  void init() {
+    serde = new Int64Serde();
+    serde.configure(
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty()
+    );
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void serializeUses8BytesLongRepresentation(Serde.Target type) {
+    var serializer = serde.serializer("anyTopic", type);
+    byte[] bytes = serializer.serialize("1234");
+    assertThat(bytes).isEqualTo(Longs.toByteArray(1234));
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void deserializeUses8BytesLongRepresentation(Serde.Target type) {
+    var deserializer = serde.deserializer("anyTopic", type);
+    var result = deserializer.deserialize(new RecordHeadersImpl(), Longs.toByteArray(1234));
+    assertThat(result.getResult()).isEqualTo("1234");
+    assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
+  }
+
+}

+ 152 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java

@@ -0,0 +1,152 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.google.protobuf.DynamicMessage;
+import com.google.protobuf.util.JsonFormat;
+import com.provectus.kafka.ui.serde.api.Serde;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Map;
+import lombok.SneakyThrows;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+class ProtobufFileSerdeTest {
+
+  private static final String samplePersonMsgJson =
+      "{ \"name\": \"My Name\",\"id\": 101, \"email\": \"user1@example.com\", \"phones\":[] }";
+
+  private static final String sampleBookMsgJson = "{\"version\": 1, \"people\": ["
+      + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"addrBook@example.com\", \"phones\":[]}]}";
+
+  // Sample message of type `test.Person`
+  private static byte[] personMessageBytes;
+  // Sample message of type `test.AddressBook`
+  private static byte[] addressBookMessageBytes;
+  private static Path protobufSchemaPath;
+  private static ProtobufSchema protobufSchema;
+
+  @BeforeAll
+  static void setUp() throws Exception {
+    protobufSchemaPath = Paths.get(ProtobufFileSerdeTest.class.getClassLoader()
+        .getResource("address-book.proto").toURI());
+    protobufSchema = new ProtobufSchema(Files.readString(protobufSchemaPath));
+
+    DynamicMessage.Builder builder = protobufSchema.newMessageBuilder("test.Person");
+    JsonFormat.parser().merge(samplePersonMsgJson, builder);
+    personMessageBytes = builder.build().toByteArray();
+
+    builder = protobufSchema.newMessageBuilder("test.AddressBook");
+    JsonFormat.parser().merge(sampleBookMsgJson, builder);
+    addressBookMessageBytes = builder.build().toByteArray();
+  }
+
+
+  @Test
+  void testDeserialize() {
+    var messageNameMap = Map.of(
+        "persons", protobufSchema.toDescriptor("test.Person"),
+        "books", protobufSchema.toDescriptor("test.AddressBook")
+    );
+    var keyMessageNameMap = Map.of(
+        "books", protobufSchema.toDescriptor("test.AddressBook"));
+
+    var serde = new ProtobufFileSerde();
+    serde.configure(
+        protobufSchemaPath,
+        null,
+        null,
+        messageNameMap,
+        keyMessageNameMap
+    );
+
+    var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
+        .deserialize(null, personMessageBytes);
+    assertJsonEquals(samplePersonMsgJson, deserializedPerson.getResult());
+
+    var deserializedBook = serde.deserializer("books", Serde.Target.KEY)
+        .deserialize(null, addressBookMessageBytes);
+    assertJsonEquals(sampleBookMsgJson, deserializedBook.getResult());
+  }
+
+  @Test
+  void testDefaultMessageName() {
+    var serde = new ProtobufFileSerde();
+    serde.configure(
+        protobufSchemaPath,
+        protobufSchema.toDescriptor("test.Person"),
+        protobufSchema.toDescriptor("test.AddressBook"),
+        Map.of(),
+        Map.of()
+    );
+
+    var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
+        .deserialize(null, personMessageBytes);
+    assertJsonEquals(samplePersonMsgJson, deserializedPerson.getResult());
+
+    var deserializedBook = serde.deserializer("books", Serde.Target.KEY)
+        .deserialize(null, addressBookMessageBytes);
+    assertJsonEquals(sampleBookMsgJson, deserializedBook.getResult());
+  }
+
+
+  @Test
+  void testSerialize() {
+    var messageNameMap = Map.of(
+        "persons", protobufSchema.toDescriptor("test.Person"),
+        "books", protobufSchema.toDescriptor("test.AddressBook")
+    );
+    var keyMessageNameMap = Map.of(
+        "books", protobufSchema.toDescriptor("test.AddressBook"));
+
+    var serde = new ProtobufFileSerde();
+    serde.configure(
+        protobufSchemaPath,
+        null,
+        null,
+        messageNameMap,
+        keyMessageNameMap
+    );
+
+    var personBytes = serde.serializer("persons", Serde.Target.VALUE)
+        .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"user1@example.com\" }");
+    assertThat(personBytes).isEqualTo(personMessageBytes);
+
+    var booksBytes = serde.serializer("books", Serde.Target.KEY)
+        .serialize("{\"version\": 1, \"people\": ["
+            + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"addrBook@example.com\" }]}");
+    assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
+  }
+
+  @Test
+  void testSerializeDefaults() {
+    var serde = new ProtobufFileSerde();
+    serde.configure(
+        protobufSchemaPath,
+        protobufSchema.toDescriptor("test.Person"),
+        protobufSchema.toDescriptor("test.AddressBook"),
+        Map.of(),
+        Map.of()
+    );
+
+    var personBytes = serde.serializer("persons", Serde.Target.VALUE)
+        .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"user1@example.com\" }");
+    assertThat(personBytes).isEqualTo(personMessageBytes);
+
+    var booksBytes = serde.serializer("books", Serde.Target.KEY)
+        .serialize("{\"version\": 1, \"people\": ["
+            + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"addrBook@example.com\" }]}");
+    assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
+  }
+
+  @SneakyThrows
+  private void assertJsonEquals(String expectedJson, String actualJson) {
+    var mapper = new JsonMapper();
+    assertThat(mapper.readTree(actualJson)).isEqualTo(mapper.readTree(expectedJson));
+  }
+
+}

+ 59 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/UInt32SerdeTest.java

@@ -0,0 +1,59 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import com.google.common.primitives.Ints;
+import com.google.common.primitives.UnsignedInteger;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.RecordHeadersImpl;
+import org.apache.kafka.common.header.internals.RecordHeaders;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+
+class UInt32SerdeTest {
+
+  private UInt32Serde serde;
+
+  @BeforeEach
+  void init() {
+    serde = new UInt32Serde();
+    serde.configure(
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty()
+    );
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void serializeUses4BytesUInt32Representation(Serde.Target type) {
+    var serializer = serde.serializer("anyTopic", type);
+    String uint32String = UnsignedInteger.MAX_VALUE.toString();
+    byte[] bytes = serializer.serialize(uint32String);
+    assertThat(bytes).isEqualTo(Ints.toByteArray(UnsignedInteger.MAX_VALUE.intValue()));
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void serializeThrowsNfeIfNegativeValuePassed(Serde.Target type) {
+    var serializer = serde.serializer("anyTopic", type);
+    String negativeIntString = "-100";
+    assertThatThrownBy(() -> serializer.serialize(negativeIntString))
+        .isInstanceOf(NumberFormatException.class);
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void deserializeUses4BytesUInt32Representation(Serde.Target type) {
+    var deserializer = serde.deserializer("anyTopic", type);
+    byte[] uint32Bytes = Ints.toByteArray(UnsignedInteger.MAX_VALUE.intValue());
+    var result = deserializer.deserialize(new RecordHeadersImpl(), uint32Bytes);
+    assertThat(result.getResult()).isEqualTo(UnsignedInteger.MAX_VALUE.toString());
+    assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
+  }
+
+}

+ 58 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/UInt64SerdeTest.java

@@ -0,0 +1,58 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import com.google.common.primitives.Longs;
+import com.google.common.primitives.UnsignedLong;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.RecordHeadersImpl;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+
+class UInt64SerdeTest {
+
+  private UInt64Serde serde;
+
+  @BeforeEach
+  void init() {
+    serde = new UInt64Serde();
+    serde.configure(
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty()
+    );
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void serializeUses8BytesUInt64Representation(Serde.Target type) {
+    var serializer = serde.serializer("anyTopic", type);
+    String uint64String = UnsignedLong.MAX_VALUE.toString();
+    byte[] bytes = serializer.serialize(uint64String);
+    assertThat(bytes).isEqualTo(Longs.toByteArray(UnsignedLong.MAX_VALUE.longValue()));
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void serializeThrowsNfeIfNegativeValuePassed(Serde.Target type) {
+    var serializer = serde.serializer("anyTopic", type);
+    String negativeIntString = "-100";
+    assertThatThrownBy(() -> serializer.serialize(negativeIntString))
+        .isInstanceOf(NumberFormatException.class);
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void deserializeUses8BytesUIn64tRepresentation(Serde.Target type) {
+    var deserializer = serde.deserializer("anyTopic", type);
+    byte[] uint64Bytes = Longs.toByteArray(UnsignedLong.MAX_VALUE.longValue());
+    var result = deserializer.deserialize(new RecordHeadersImpl(), uint64Bytes);
+    assertThat(result.getResult()).isEqualTo(UnsignedLong.MAX_VALUE.toString());
+    assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
+  }
+
+}

+ 101 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/UuidBinarySerdeTest.java

@@ -0,0 +1,101 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.RecordHeadersImpl;
+import java.nio.ByteBuffer;
+import java.util.UUID;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+import org.springframework.mock.env.MockEnvironment;
+
+class UuidBinarySerdeTest {
+
+  @Nested
+  class MsbFirst {
+
+    private UuidBinarySerde serde;
+
+    @BeforeEach
+    void init() {
+      serde = new UuidBinarySerde();
+      serde.configure(
+          PropertyResolverImpl.empty(),
+          PropertyResolverImpl.empty(),
+          PropertyResolverImpl.empty()
+      );
+    }
+
+    @ParameterizedTest
+    @EnumSource
+    void serializerUses16bytesUuidBinaryRepresentation(Serde.Target type) {
+      var serializer = serde.serializer("anyTopic", type);
+      var uuid = UUID.randomUUID();
+      byte[] bytes = serializer.serialize(uuid.toString());
+      var bb = ByteBuffer.wrap(bytes);
+      assertThat(bb.getLong()).isEqualTo(uuid.getMostSignificantBits());
+      assertThat(bb.getLong()).isEqualTo(uuid.getLeastSignificantBits());
+    }
+
+    @ParameterizedTest
+    @EnumSource
+    void deserializerUses16bytesUuidBinaryRepresentation(Serde.Target type) {
+      var uuid = UUID.randomUUID();
+      var bb = ByteBuffer.allocate(16);
+      bb.putLong(uuid.getMostSignificantBits());
+      bb.putLong(uuid.getLeastSignificantBits());
+
+      var result = serde.deserializer("anyTopic", type).deserialize(new RecordHeadersImpl(), bb.array());
+      assertThat(result.getType()).isEqualTo(DeserializeResult.Type.STRING);
+      assertThat(result.getAdditionalProperties()).isEmpty();
+      assertThat(result.getResult()).isEqualTo(uuid.toString());
+    }
+  }
+
+  @Nested
+  class MsbLast {
+
+    private UuidBinarySerde serde;
+
+    @BeforeEach
+    void init() {
+      serde = new UuidBinarySerde();
+      serde.configure(
+          new PropertyResolverImpl(new MockEnvironment().withProperty("mostSignificantBitsFirst", "false")),
+          PropertyResolverImpl.empty(),
+          PropertyResolverImpl.empty()
+      );
+    }
+
+    @ParameterizedTest
+    @EnumSource
+    void serializerUses16bytesUuidBinaryRepresentation(Serde.Target type) {
+      var serializer = serde.serializer("anyTopic", type);
+      var uuid = UUID.randomUUID();
+      byte[] bytes = serializer.serialize(uuid.toString());
+      var bb = ByteBuffer.wrap(bytes);
+      assertThat(bb.getLong()).isEqualTo(uuid.getLeastSignificantBits());
+      assertThat(bb.getLong()).isEqualTo(uuid.getMostSignificantBits());
+    }
+
+    @ParameterizedTest
+    @EnumSource
+    void deserializerUses16bytesUuidBinaryRepresentation(Serde.Target type) {
+      var uuid = UUID.randomUUID();
+      var bb = ByteBuffer.allocate(16);
+      bb.putLong(uuid.getLeastSignificantBits());
+      bb.putLong(uuid.getMostSignificantBits());
+
+      var result = serde.deserializer("anyTopic", type).deserialize(new RecordHeadersImpl(), bb.array());
+      assertThat(result.getType()).isEqualTo(DeserializeResult.Type.STRING);
+      assertThat(result.getAdditionalProperties()).isEmpty();
+      assertThat(result.getResult()).isEqualTo(uuid.toString());
+    }
+  }
+
+}

+ 157 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java

@@ -0,0 +1,157 @@
+package com.provectus.kafka.ui.serdes.builtin.sr;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serde.api.Serde;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+import lombok.SneakyThrows;
+import net.bytebuddy.utility.RandomString;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.io.EncoderFactory;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+class SchemaRegistrySerdeTest {
+
+  private final MockSchemaRegistryClient registryClient = new MockSchemaRegistryClient();
+
+  private SchemaRegistrySerde serde;
+
+  @BeforeEach
+  void init() {
+    serde = new SchemaRegistrySerde();
+    serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value");
+  }
+
+  @Test
+  void returnsSchemaDescriptionIfSchemaRegisteredInSR() throws RestClientException, IOException {
+    String topic = "test";
+    registryClient.register(topic + "-key", new AvroSchema("{ \"type\": \"int\" }"));
+    registryClient.register(topic + "-value", new AvroSchema("{ \"type\": \"float\" }"));
+
+    var keySchemaOptional = serde.getSchema(topic, Serde.Target.KEY);
+    assertThat(keySchemaOptional)
+        .map(SchemaDescription::getSchema)
+        .contains("{\"$id\":\"int\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"integer\"}");
+
+    var valueSchemaOptional = serde.getSchema(topic, Serde.Target.VALUE);
+    assertThat(valueSchemaOptional)
+        .map(SchemaDescription::getSchema)
+        .contains("{\"$id\":\"float\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"number\"}");
+  }
+
+  @Test
+  void returnsEmptyDescriptorIfSchemaNotRegisteredInSR() {
+    String topic = "test";
+    assertThat(serde.getSchema(topic, Serde.Target.KEY)).isEmpty();
+    assertThat(serde.getSchema(topic, Serde.Target.VALUE)).isEmpty();
+  }
+
+  @Test
+  void serializeTreatsInputAsJsonAvroSchemaPayload() throws RestClientException, IOException {
+    AvroSchema schema = new AvroSchema(
+        "{"
+            + "  \"type\": \"record\","
+            + "  \"name\": \"TestAvroRecord1\","
+            + "  \"fields\": ["
+            + "    {"
+            + "      \"name\": \"field1\","
+            + "      \"type\": \"string\""
+            + "    },"
+            + "    {"
+            + "      \"name\": \"field2\","
+            + "      \"type\": \"int\""
+            + "    }"
+            + "  ]"
+            + "}"
+    );
+    String jsonValue = "{ \"field1\":\"testStr\", \"field2\": 123 }";
+    String topic = "test";
+
+    int schemaId = registryClient.register(topic + "-value", schema);
+    byte[] serialized = serde.serializer(topic, Serde.Target.VALUE).serialize(jsonValue);
+    byte[] expected = toBytesWithMagicByteAndSchemaId(schemaId, jsonValue, schema);
+    assertThat(serialized).isEqualTo(expected);
+  }
+
+  @Test
+  void deserializeReturnsJsonAvroMsgJsonRepresentation() throws RestClientException, IOException {
+    AvroSchema schema = new AvroSchema(
+        "{"
+            + "  \"type\": \"record\","
+            + "  \"name\": \"TestAvroRecord1\","
+            + "  \"fields\": ["
+            + "    {"
+            + "      \"name\": \"field1\","
+            + "      \"type\": \"string\""
+            + "    },"
+            + "    {"
+            + "      \"name\": \"field2\","
+            + "      \"type\": \"int\""
+            + "    }"
+            + "  ]"
+            + "}"
+    );
+    String jsonValue = "{ \"field1\":\"testStr\", \"field2\": 123 }";
+
+    String topic = "test";
+    int schemaId = registryClient.register(topic + "-value", schema);
+
+    byte[] data = toBytesWithMagicByteAndSchemaId(schemaId, jsonValue, schema);
+    var result = serde.deserializer(topic, Serde.Target.VALUE).deserialize(null, data);
+
+    assertJsonsEqual(jsonValue, result.getResult());
+    assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
+    assertThat(result.getAdditionalProperties())
+        .contains(Map.entry("type", "AVRO"))
+        .contains(Map.entry("schemaId", schemaId));
+  }
+
+  @Test
+  void canDeserializeReturnsTrueAlways() {
+    String topic = RandomString.make(10);
+    assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
+    assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
+  }
+
+  private void assertJsonsEqual(String expected, String actual) throws JsonProcessingException {
+    var mapper = new JsonMapper();
+    assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected));
+  }
+
+  private byte[] toBytesWithMagicByteAndSchemaId(int schemaId, String json, AvroSchema schema) {
+    return toBytesWithMagicByteAndSchemaId(schemaId, jsonToAvro(json, schema));
+  }
+
+  private byte[] toBytesWithMagicByteAndSchemaId(int schemaId, byte[] body) {
+    return ByteBuffer.allocate(1 + 4 + body.length)
+        .put((byte) 0)
+        .putInt(schemaId)
+        .put(body)
+        .array();
+  }
+
+  @SneakyThrows
+  private byte[] jsonToAvro(String json, AvroSchema schema) {
+    GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema.rawSchema());
+    ByteArrayOutputStream output = new ByteArrayOutputStream();
+    Encoder encoder = EncoderFactory.get().binaryEncoder(output, null);
+    writer.write(AvroSchemaUtils.toObject(json, schema), encoder);
+    encoder.flush();
+    return output.toByteArray();
+  }
+
+}

+ 2 - 1
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java

@@ -44,7 +44,8 @@ class MessagesServiceTest extends AbstractIntegrationTest {
 
   @Test
   void loadMessagesReturnsExceptionWhenTopicNotFound() {
-    StepVerifier.create(messagesService.loadMessages(cluster, NON_EXISTING_TOPIC, null, null, null, 1))
+    StepVerifier.create(messagesService
+            .loadMessages(cluster, NON_EXISTING_TOPIC, null, null, null, 1, "String", "String"))
         .expectError(TopicNotFoundException.class)
         .verify();
   }

+ 31 - 12
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java

@@ -13,7 +13,10 @@ import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.producer.KafkaTestProducer;
-import com.provectus.kafka.ui.serde.SimpleRecordSerDe;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import com.provectus.kafka.ui.serdes.builtin.StringSerde;
 import com.provectus.kafka.ui.util.OffsetsSeekBackward;
 import com.provectus.kafka.ui.util.OffsetsSeekForward;
 import java.io.Serializable;
@@ -53,6 +56,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
   static final String TOPIC = RecordEmitterTest.class.getSimpleName() + "_" + UUID.randomUUID();
   static final String EMPTY_TOPIC = TOPIC + "_empty";
   static final List<Record> SENT_RECORDS = new ArrayList<>();
+  static final ConsumerRecordDeserializer RECORD_DESERIALIZER = createRecordsDeserializer();
 
   @BeforeAll
   static void generateMsgs() throws Exception {
@@ -90,13 +94,27 @@ class RecordEmitterTest extends AbstractIntegrationTest {
     deleteTopic(EMPTY_TOPIC);
   }
 
+  private static ConsumerRecordDeserializer createRecordsDeserializer() {
+    Serde s = new StringSerde();
+    s.configure(PropertyResolverImpl.empty(), PropertyResolverImpl.empty(), PropertyResolverImpl.empty());
+    return new ConsumerRecordDeserializer(
+        StringSerde.name(),
+        s.deserializer(null, Serde.Target.KEY),
+        StringSerde.name(),
+        s.deserializer(null, Serde.Target.VALUE),
+        StringSerde.name(),
+        s.deserializer(null, Serde.Target.KEY),
+        s.deserializer(null, Serde.Target.VALUE)
+    );
+  }
+
   @Test
   void pollNothingOnEmptyTopic() {
     var forwardEmitter = new ForwardRecordEmitter(
         this::createConsumer,
         new OffsetsSeekForward(EMPTY_TOPIC,
             new ConsumerPosition(BEGINNING, Map.of(), FORWARD)
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -105,7 +123,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
             EMPTY_TOPIC,
             new ConsumerPosition(BEGINNING, Map.of(), BACKWARD),
             100
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     StepVerifier.create(
@@ -127,7 +145,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         new OffsetsSeekForward(TOPIC,
             new ConsumerPosition(BEGINNING, Map.of(), FORWARD)
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -135,7 +153,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new OffsetsSeekBackward(TOPIC,
             new ConsumerPosition(BEGINNING, Map.of(), BACKWARD),
             PARTITIONS * MSGS_PER_PARTITION
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     List<String> expectedValues = SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList());
@@ -156,7 +174,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         new OffsetsSeekForward(TOPIC,
             new ConsumerPosition(OFFSET, targetOffsets, FORWARD)
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -164,7 +182,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new OffsetsSeekBackward(TOPIC,
             new ConsumerPosition(OFFSET, targetOffsets, BACKWARD),
             PARTITIONS * MSGS_PER_PARTITION
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     var expectedValues = SENT_RECORDS.stream()
@@ -201,7 +219,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         new OffsetsSeekForward(TOPIC,
             new ConsumerPosition(TIMESTAMP, targetTimestamps, FORWARD)
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -209,7 +227,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new OffsetsSeekBackward(TOPIC,
             new ConsumerPosition(TIMESTAMP, targetTimestamps, BACKWARD),
             PARTITIONS * MSGS_PER_PARTITION
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     var expectedValues = SENT_RECORDS.stream()
@@ -240,7 +258,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new OffsetsSeekBackward(TOPIC,
             new ConsumerPosition(OFFSET, targetOffsets, BACKWARD),
             numMessages
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     var expectedValues = SENT_RECORDS.stream()
@@ -266,7 +284,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new OffsetsSeekBackward(TOPIC,
             new ConsumerPosition(OFFSET, offsets, BACKWARD),
             100
-        ), new SimpleRecordSerDe()
+        ), RECORD_DESERIALIZER
     );
 
     expectEmitter(backwardEmitter,
@@ -283,7 +301,8 @@ class RecordEmitterTest extends AbstractIntegrationTest {
             .expectNextCount(expectedValues.size())
             .expectRecordedMatches(r -> r.containsAll(expectedValues))
             .consumeRecordedWith(r -> log.info("Collected collection: {}", r)),
-        v -> {}
+        v -> {
+        }
     );
   }
 

+ 68 - 103
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/SendAndReadTests.java

@@ -7,11 +7,14 @@ import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.CreateTopicMessageDTO;
 import com.provectus.kafka.ui.model.KafkaCluster;
-import com.provectus.kafka.ui.model.MessageFormatDTO;
 import com.provectus.kafka.ui.model.SeekDirectionDTO;
 import com.provectus.kafka.ui.model.SeekTypeDTO;
 import com.provectus.kafka.ui.model.TopicMessageDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
+import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
+import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
+import com.provectus.kafka.ui.serdes.builtin.StringSerde;
+import com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde;
 import io.confluent.kafka.schemaregistry.ParsedSchema;
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
 import io.confluent.kafka.schemaregistry.json.JsonSchema;
@@ -140,7 +143,9 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key("testKey")
+                .keySerde(StringSerde.name())
                 .content("testValue")
+                .valueSerde(StringSerde.name())
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isEqualTo("testKey");
@@ -149,40 +154,30 @@ public class SendAndReadTests extends AbstractIntegrationTest {
   }
 
   @Test
-  void noSchemaJsonKeyJsonValue() {
-    new SendAndReadSpec()
-        .withMsgToSend(
-            new CreateTopicMessageDTO()
-                .key("{ \"f1\": 111, \"f2\": \"testStr1\" }")
-                .content("{ \"f1\": 222, \"f2\": \"testStr2\" }")
-        )
-        .doAssert(polled -> {
-          assertThat(polled.getKey()).isEqualTo("{ \"f1\": 111, \"f2\": \"testStr1\" }");
-          assertThat(polled.getContent()).isEqualTo("{ \"f1\": 222, \"f2\": \"testStr2\" }");
-        });
-  }
-
-  @Test
-  void keyIsIntValueIsDoubleShouldBeSerializedAsStrings() {
+  void keyIsIntValueIsLong() {
     new SendAndReadSpec()
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key("123")
-                .content("234.56")
+                .keySerde(Int32Serde.name())
+                .content("21474836470")
+                .valueSerde(Int64Serde.name())
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isEqualTo("123");
-          assertThat(polled.getContent()).isEqualTo("234.56");
+          assertThat(polled.getContent()).isEqualTo("21474836470");
         });
   }
 
   @Test
-  void noSchemaKeyIsNull() {
+  void keyIsNull() {
     new SendAndReadSpec()
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(null)
+                .keySerde(StringSerde.name())
                 .content("testValue")
+                .valueSerde(StringSerde.name())
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isNull();
@@ -191,12 +186,14 @@ public class SendAndReadTests extends AbstractIntegrationTest {
   }
 
   @Test
-  void noSchemaValueIsNull() {
+  void valueIsNull() {
     new SendAndReadSpec()
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key("testKey")
+                .keySerde(StringSerde.name())
                 .content(null)
+                .valueSerde(StringSerde.name())
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isEqualTo("testKey");
@@ -212,7 +209,9 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key("\"some string\"")
+                .keySerde(SchemaRegistrySerde.name())
                 .content("123")
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isEqualTo("\"some string\"");
@@ -221,14 +220,16 @@ public class SendAndReadTests extends AbstractIntegrationTest {
   }
 
   @Test
-  void nonNullableKvWithAvroSchema() {
+  void recordAvroSchema() {
     new SendAndReadSpec()
         .withKeySchema(AVRO_SCHEMA_1)
         .withValueSchema(AVRO_SCHEMA_2)
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(AVRO_SCHEMA_1_JSON_RECORD)
+                .keySerde(SchemaRegistrySerde.name())
                 .content(AVRO_SCHEMA_2_JSON_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .doAssert(polled -> {
           assertJsonEqual(polled.getKey(), AVRO_SCHEMA_1_JSON_RECORD);
@@ -236,36 +237,6 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         });
   }
 
-  @Test
-  void keyWithNoSchemaValueWithAvroSchema() {
-    new SendAndReadSpec()
-        .withValueSchema(AVRO_SCHEMA_1)
-        .withMsgToSend(
-            new CreateTopicMessageDTO()
-                .key("testKey")
-                .content(AVRO_SCHEMA_1_JSON_RECORD)
-        )
-        .doAssert(polled -> {
-          assertThat(polled.getKey()).isEqualTo("testKey");
-          assertJsonEqual(polled.getContent(), AVRO_SCHEMA_1_JSON_RECORD);
-        });
-  }
-
-  @Test
-  void keyWithAvroSchemaValueWithNoSchema() {
-    new SendAndReadSpec()
-        .withKeySchema(AVRO_SCHEMA_1)
-        .withMsgToSend(
-            new CreateTopicMessageDTO()
-                .key(AVRO_SCHEMA_1_JSON_RECORD)
-                .content("testVal")
-        )
-        .doAssert(polled -> {
-          assertJsonEqual(polled.getKey(), AVRO_SCHEMA_1_JSON_RECORD);
-          assertThat(polled.getContent()).isEqualTo("testVal");
-        });
-  }
-
   @Test
   void keyWithNoSchemaValueWithProtoSchema() {
     new SendAndReadSpec()
@@ -273,7 +244,9 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key("testKey")
+                .keySerde(StringSerde.name())
                 .content(PROTOBUF_SCHEMA_JSON_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isEqualTo("testKey");
@@ -289,7 +262,10 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(null)
+                .keySerde(SchemaRegistrySerde.name())
                 .content(AVRO_SCHEMA_2_JSON_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
+
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isNull();
@@ -298,33 +274,19 @@ public class SendAndReadTests extends AbstractIntegrationTest {
   }
 
   @Test
-  void valueWithAvroSchemaShouldThrowExceptionArgIsNotValidJsonObject() {
+  void valueWithAvroSchemaShouldThrowExceptionIfArgIsNotValidJsonObject() {
     new SendAndReadSpec()
         .withValueSchema(AVRO_SCHEMA_2)
         .withMsgToSend(
             new CreateTopicMessageDTO()
-                // f2 has type object instead of string
-                .content("{ \"f1\": 111, \"f2\": {} }")
+                .keySerde(StringSerde.name())
+                // f2 has type int instead of string
+                .content("{ \"f1\": 111, \"f2\": 123 }")
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .assertSendThrowsException();
   }
 
-  @Test
-  void keyWithAvroSchemaValueWithAvroSchemaValueIsNull() {
-    new SendAndReadSpec()
-        .withKeySchema(AVRO_SCHEMA_1)
-        .withValueSchema(AVRO_SCHEMA_2)
-        .withMsgToSend(
-            new CreateTopicMessageDTO()
-                .key(AVRO_SCHEMA_1_JSON_RECORD)
-                .content(null)
-        )
-        .doAssert(polled -> {
-          assertJsonEqual(polled.getKey(), AVRO_SCHEMA_1_JSON_RECORD);
-          assertThat(polled.getContent()).isNull();
-        });
-  }
-
   @Test
   void keyWithAvroSchemaValueWithProtoSchema() {
     new SendAndReadSpec()
@@ -333,7 +295,9 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(AVRO_SCHEMA_1_JSON_RECORD)
+                .keySerde(SchemaRegistrySerde.name())
                 .content(PROTOBUF_SCHEMA_JSON_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .doAssert(polled -> {
           assertJsonEqual(polled.getKey(), AVRO_SCHEMA_1_JSON_RECORD);
@@ -347,8 +311,12 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withValueSchema(PROTOBUF_SCHEMA)
         .withMsgToSend(
             new CreateTopicMessageDTO()
+                .key(null)
+                .keySerde(StringSerde.name())
                 // f2 field has type object instead of int
-                .content("{ \"f1\" : \"test str\", \"f2\" : {} }"))
+                .content("{ \"f1\" : \"test str\", \"f2\" : {} }")
+                .valueSerde(SchemaRegistrySerde.name())
+        )
         .assertSendThrowsException();
   }
 
@@ -360,7 +328,9 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(PROTOBUF_SCHEMA_JSON_RECORD)
+                .keySerde(SchemaRegistrySerde.name())
                 .content(JSON_SCHEMA_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .doAssert(polled -> {
           assertJsonEqual(polled.getKey(), PROTOBUF_SCHEMA_JSON_RECORD);
@@ -368,29 +338,17 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         });
   }
 
-  @Test
-  void keyWithJsonValueWithJsonSchemaKeyValueIsNull() {
-    new SendAndReadSpec()
-        .withKeySchema(JSON_SCHEMA)
-        .withValueSchema(JSON_SCHEMA)
-        .withMsgToSend(
-            new CreateTopicMessageDTO()
-                .key(JSON_SCHEMA_RECORD)
-        )
-        .doAssert(polled -> {
-          assertJsonEqual(polled.getKey(), JSON_SCHEMA_RECORD);
-          assertThat(polled.getContent()).isNull();
-        });
-  }
-
   @Test
   void valueWithJsonSchemaThrowsExceptionIfArgIsNotValidJsonObject() {
     new SendAndReadSpec()
         .withValueSchema(JSON_SCHEMA)
         .withMsgToSend(
             new CreateTopicMessageDTO()
+                .key(null)
+                .keySerde(StringSerde.name())
                 // 'f2' field has has type object instead of string
                 .content("{ \"f1\": 12, \"f2\": {}, \"schema\": \"some txt\" }")
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .assertSendThrowsException();
   }
@@ -403,17 +361,20 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(AVRO_SCHEMA_1_JSON_RECORD)
+                .keySerde(SchemaRegistrySerde.name())
                 .content(AVRO_SCHEMA_2_JSON_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .doAssert(polled -> {
           assertJsonEqual(polled.getKey(), AVRO_SCHEMA_1_JSON_RECORD);
           assertJsonEqual(polled.getContent(), AVRO_SCHEMA_2_JSON_RECORD);
           assertThat(polled.getKeySize()).isEqualTo(15L);
           assertThat(polled.getValueSize()).isEqualTo(15L);
-          assertThat(polled.getKeyFormat()).isEqualTo(MessageFormatDTO.AVRO);
-          assertThat(polled.getValueFormat()).isEqualTo(MessageFormatDTO.AVRO);
-          assertThat(polled.getKeySchemaId()).isNotEmpty();
-          assertThat(polled.getValueSchemaId()).isNotEmpty();
+          assertThat(polled.getKeyDeserializeProperties().get("schemaId")).isNotNull();
+          assertThat(polled.getValueDeserializeProperties().get("schemaId")).isNotNull();
+          assertThat(polled.getKeyDeserializeProperties().get("type")).isEqualTo("AVRO");
+          assertThat(polled.getValueDeserializeProperties().get("schemaId")).isNotNull();
+          assertThat(polled.getValueDeserializeProperties().get("type")).isEqualTo("AVRO");
         });
   }
 
@@ -425,17 +386,19 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(PROTOBUF_SCHEMA_JSON_RECORD)
+                .keySerde(SchemaRegistrySerde.name())
                 .content(PROTOBUF_SCHEMA_JSON_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
         )
         .doAssert(polled -> {
           assertJsonEqual(polled.getKey(), PROTOBUF_SCHEMA_JSON_RECORD);
           assertJsonEqual(polled.getContent(), PROTOBUF_SCHEMA_JSON_RECORD);
           assertThat(polled.getKeySize()).isEqualTo(18L);
           assertThat(polled.getValueSize()).isEqualTo(18L);
-          assertThat(polled.getKeyFormat()).isEqualTo(MessageFormatDTO.PROTOBUF);
-          assertThat(polled.getValueFormat()).isEqualTo(MessageFormatDTO.PROTOBUF);
-          assertThat(polled.getKeySchemaId()).isNotEmpty();
-          assertThat(polled.getValueSchemaId()).isNotEmpty();
+          assertThat(polled.getValueDeserializeProperties().get("schemaId")).isNotNull();
+          assertThat(polled.getKeyDeserializeProperties().get("type")).isEqualTo("PROTOBUF");
+          assertThat(polled.getValueDeserializeProperties().get("schemaId")).isNotNull();
+          assertThat(polled.getValueDeserializeProperties().get("type")).isEqualTo("PROTOBUF");
         });
   }
 
@@ -447,19 +410,21 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(JSON_SCHEMA_RECORD)
+                .keySerde(SchemaRegistrySerde.name())
                 .content(JSON_SCHEMA_RECORD)
+                .valueSerde(SchemaRegistrySerde.name())
                 .headers(Map.of("header1", "value1"))
         )
         .doAssert(polled -> {
           assertJsonEqual(polled.getKey(), JSON_SCHEMA_RECORD);
           assertJsonEqual(polled.getContent(), JSON_SCHEMA_RECORD);
-          assertThat(polled.getKeyFormat()).isEqualTo(MessageFormatDTO.JSON);
-          assertThat(polled.getValueFormat()).isEqualTo(MessageFormatDTO.JSON);
-          assertThat(polled.getKeySchemaId()).isNotEmpty();
-          assertThat(polled.getValueSchemaId()).isNotEmpty();
           assertThat(polled.getKeySize()).isEqualTo(57L);
           assertThat(polled.getValueSize()).isEqualTo(57L);
           assertThat(polled.getHeadersSize()).isEqualTo(13L);
+          assertThat(polled.getValueDeserializeProperties().get("schemaId")).isNotNull();
+          assertThat(polled.getKeyDeserializeProperties().get("type")).isEqualTo("JSON");
+          assertThat(polled.getValueDeserializeProperties().get("schemaId")).isNotNull();
+          assertThat(polled.getValueDeserializeProperties().get("type")).isEqualTo("JSON");
         });
   }
 
@@ -469,7 +434,9 @@ public class SendAndReadTests extends AbstractIntegrationTest {
         .withMsgToSend(
             new CreateTopicMessageDTO()
                 .key(null)
+                .keySerde(StringSerde.name()) // any serde
                 .content(null)
+                .valueSerde(StringSerde.name()) // any serde
         )
         .doAssert(polled -> {
           assertThat(polled.getKey()).isNull();
@@ -514,10 +481,6 @@ public class SendAndReadTests extends AbstractIntegrationTest {
       if (valueSchema != null) {
         schemaRegistry.schemaRegistryClient().register(topic + "-value", valueSchema);
       }
-
-      // need to update to see new topic & schemas
-      clustersMetricsScheduler.updateMetrics();
-
       return topic;
     }
 
@@ -547,7 +510,9 @@ public class SendAndReadTests extends AbstractIntegrationTest {
                 ),
                 null,
                 null,
-                1
+                1,
+                msgToSend.getKeySerde().get(),
+                msgToSend.getValueSerde().get()
             ).filter(e -> e.getType().equals(TopicMessageEventDTO.TypeEnum.MESSAGE))
             .map(TopicMessageEventDTO::getMessage)
             .blockLast(Duration.ofSeconds(5000));

+ 102 - 54
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -4,7 +4,7 @@ info:
   version: 0.1.0
   title: Api Documentation
   termsOfService: urn:tos
-  contact: {}
+  contact: { }
   license:
     name: Apache 2.0
     url: http://www.apache.org/licenses/LICENSE-2.0
@@ -595,6 +595,37 @@ paths:
         400:
           description: Bad Request
 
+  /api/clusters/{clusterName}/topic/{topicName}/serdes:
+    get:
+      tags:
+        - Messages
+      summary: getSerdes
+      operationId: getSerdes
+      parameters:
+        - name: clusterName
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: topicName
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: use
+          in: query
+          required: true
+          schema:
+            $ref: '#/components/schemas/SerdeUsage'
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/TopicSerdeSuggestion'
+
+
   /api/clusters/{clusterName}/topics/{topicName}/messages:
     get:
       tags:
@@ -639,6 +670,16 @@ paths:
           in: query
           schema:
             $ref: "#/components/schemas/SeekDirection"
+        - name: keySerde
+          in: query
+          description: "Serde that should be used for deserialization. Will be chosen automatically if not set."
+          schema:
+            type: string
+        - name: valueSerde
+          in: query
+          description: "Serde that should be used for deserialization. Will be chosen automatically if not set."
+          schema:
+            type: string
       responses:
         200:
           description: OK
@@ -703,31 +744,6 @@ paths:
         404:
           description: Not found
 
-  /api/clusters/{clusterName}/topics/{topicName}/messages/schema:
-    get:
-      tags:
-        - Messages
-      summary: getTopicSchema
-      operationId: getTopicSchema
-      parameters:
-        - name: clusterName
-          in: path
-          required: true
-          schema:
-            type: string
-        - name: topicName
-          in: path
-          required: true
-          schema:
-            type: string
-      responses:
-        200:
-          description: OK
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/TopicMessageSchema'
-
   /api/clusters/{clusterName}/topics/{topicName}/consumer-groups:
     get:
       tags:
@@ -1765,6 +1781,41 @@ paths:
                 $ref: '#/components/schemas/TimeStampFormat'
 components:
   schemas:
+    TopicSerdeSuggestion:
+      type: object
+      properties:
+        key:
+          type: array
+          items:
+            $ref: '#/components/schemas/SerdeDescription'
+        value:
+          type: array
+          items:
+            $ref: '#/components/schemas/SerdeDescription'
+
+    SerdeDescription:
+      type: object
+      properties:
+        name:
+          type: string
+        description:
+          type: string
+        preferred:
+          description: "This serde was automatically chosen by cluster config. This should be enabled in UI by default. Also it will be used for deserialization if no serdes passed."
+          type: boolean
+        schema:
+          type: string
+        additionalProperties:
+          type: object
+          additionalProperties:
+            type: object
+
+    SerdeUsage:
+      type: string
+      enum:
+        - SERIALIZE
+        - DESERIALIZE
+
     ErrorResponse:
       description: Error object that will be returned with 4XX and 5XX HTTP statuses
       type: object
@@ -2161,6 +2212,10 @@ components:
           type: integer
         cleanUpPolicy:
           $ref: '#/components/schemas/CleanUpPolicy'
+        keySerde:
+          type: string
+        valueSerde:
+          type: string
       required:
         - name
 
@@ -2309,37 +2364,14 @@ components:
         content:
           type: string
           nullable: true
-      required:
-        - partition
-
-    TopicMessageSchema:
-      type: object
-      properties:
-        key:
-          $ref: "#/components/schemas/MessageSchema"
-        value:
-          $ref: "#/components/schemas/MessageSchema"
-      required:
-        - key
-        - value
-
-    MessageSchema:
-      type: object
-      properties:
-        name:
-          type: string
-        source:
+        keySerde:
           type: string
-          enum:
-            - SOURCE_SCHEMA_REGISTRY
-            - SOURCE_PROTO_FILE
-            - SOURCE_UNKNOWN
-        schema:
+          nullable: true
+        valueSerde:
           type: string
+          nullable: true
       required:
-        - name
-        - source
-        - schema
+        - partition
 
     TopicMessageEvent:
       type: object
@@ -2412,8 +2444,10 @@ components:
         content:
           type: string
         keyFormat:
+          #deprecated - wont be filled - use 'keySerde' field instead
           $ref: "#/components/schemas/MessageFormat"
         valueFormat:
+          #deprecated - wont be filled - use 'valueSerde' field instead
           $ref: "#/components/schemas/MessageFormat"
         keySize:
           type: integer
@@ -2422,12 +2456,26 @@ components:
           type: integer
           format: int64
         keySchemaId:
+          deprecated: true
+          description: deprecated - wont be filled - use 'keyDeserializeProperties' field instead
           type: string
         valueSchemaId:
+          deprecated: true
+          description: deprecated - wont be filled - use 'valueDeserializeProperties' field instead
           type: string
         headersSize:
           type: integer
           format: int64
+        keySerde:
+          type: string
+        valueSerde:
+          type: string
+        keyDeserializeProperties:
+          additionalProperties:
+            type: object
+        valueDeserializeProperties:
+          additionalProperties:
+            type: object
       required:
         - partition
         - offset

+ 1 - 0
kafka-ui-react-app/jest.config.ts

@@ -11,6 +11,7 @@ export default {
     '<rootDir>/src/index.tsx',
     '<rootDir>/src/serviceWorker.ts',
   ],
+  coverageReporters: ['json', 'lcov', 'text', 'clover'],
   resolver: '<rootDir>/.jest/resolver.js',
   setupFilesAfterEnv: ['<rootDir>/src/setupTests.ts'],
   testMatch: [

+ 6 - 3
kafka-ui-react-app/src/components/Connect/List/List.tsx

@@ -3,15 +3,18 @@ import useAppParams from 'lib/hooks/useAppParams';
 import { ClusterNameRoute } from 'lib/paths';
 import { Table } from 'components/common/table/Table/Table.styled';
 import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
-import useSearch from 'lib/hooks/useSearch';
 import { useConnectors } from 'lib/hooks/api/kafkaConnect';
+import { useSearchParams } from 'react-router-dom';
 
 import ListItem from './ListItem';
 
 const List: React.FC = () => {
   const { clusterName } = useAppParams<ClusterNameRoute>();
-  const [search] = useSearch();
-  const { data: connectors } = useConnectors(clusterName, search);
+  const [searchParams] = useSearchParams();
+  const { data: connectors } = useConnectors(
+    clusterName,
+    searchParams.get('q') || ''
+  );
 
   return (
     <Table isFullwidth>

+ 17 - 8
kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts

@@ -23,19 +23,16 @@ export const FiltersWrapper = styled.div`
   & > div:first-child {
     display: flex;
     justify-content: space-between;
-    padding-top: 16px;
+    padding-top: 2px;
+    align-items: flex-end;
   }
 `;
 
 export const FilterInputs = styled.div`
   display: flex;
   gap: 8px;
-  align-items: center;
+  align-items: flex-end;
   width: 90%;
-
-  & > div:first-child {
-    width: 25%;
-  }
 `;
 
 export const SeekTypeSelectorWrapper = styled.div`
@@ -98,10 +95,12 @@ export const MetricsIcon = styled.div`
   height: 12px;
 `;
 
-export const ClearAll = styled.span`
+export const ClearAll = styled.div`
   color: ${({ theme }) => theme.metrics.filters.color.normal};
   font-size: 12px;
   cursor: pointer;
+  line-height: 32px;
+  margin-left: 8px;
 `;
 
 export const ButtonContainer = styled.div`
@@ -218,11 +217,15 @@ export const FilterButtonWrapper = styled.div`
 `;
 
 export const ActiveSmartFilterWrapper = styled.div`
-  padding: 5px 0;
+  padding: 8px 0 5px;
   display: flex;
   gap: 10px;
   align-items: center;
   justify-content: flex-start;
+
+  & div:first-child {
+    width: 25%;
+  }
 `;
 
 export const DeleteSavedFilter = styled.div.attrs({ role: 'deleteIcon' })`
@@ -359,3 +362,9 @@ export const SeekTypeSelect = styled(Select)`
   border-bottom-right-radius: 0;
   user-select: none;
 `;
+
+export const Serdes = styled.div`
+  display: flex;
+  gap: 24px;
+  padding 8px 0;
+`;

+ 152 - 127
kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx

@@ -5,6 +5,7 @@ import {
   Partition,
   SeekDirection,
   SeekType,
+  SerdeUsage,
   TopicMessage,
   TopicMessageConsuming,
   TopicMessageEvent,
@@ -12,7 +13,7 @@ import {
 } from 'generated-sources';
 import React, { useContext } from 'react';
 import omitBy from 'lodash/omitBy';
-import { useNavigate, useLocation } from 'react-router-dom';
+import { useNavigate, useLocation, useSearchParams } from 'react-router-dom';
 import MultiSelect from 'components/common/MultiSelect/MultiSelect.styled';
 import { Option } from 'react-multi-select-component';
 import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
@@ -34,6 +35,9 @@ import ClockIcon from 'components/common/Icons/ClockIcon';
 import ArrowDownIcon from 'components/common/Icons/ArrowDownIcon';
 import FileIcon from 'components/common/Icons/FileIcon';
 import { useTopicDetails } from 'lib/hooks/api/topics';
+import { InputLabel } from 'components/common/Input/InputLabel.styled';
+import { getSerdeOptions } from 'components/Topics/Topic/SendMessage/utils';
+import { useSerdes } from 'lib/hooks/api/topicMessages';
 
 import * as S from './Filters.styled';
 import {
@@ -87,12 +91,13 @@ const Filters: React.FC<FiltersProps> = ({
   const { clusterName, topicName } = useAppParams<RouteParamsClusterTopic>();
   const location = useLocation();
   const navigate = useNavigate();
+  const [searchParams] = useSearchParams();
 
   const { data: topic } = useTopicDetails({ clusterName, topicName });
 
   const partitions = topic?.partitions || [];
 
-  const { searchParams, seekDirection, isLive, changeSeekDirection } =
+  const { seekDirection, isLive, changeSeekDirection } =
     useContext(TopicMessagesContext);
 
   const { value: isOpen, toggle } = useBoolean();
@@ -103,7 +108,6 @@ const Filters: React.FC<FiltersProps> = ({
     getSelectedPartitionsFromSeekToParam(searchParams, partitions)
   );
 
-  const [attempt, setAttempt] = React.useState(0);
   const [currentSeekType, setCurrentSeekType] = React.useState<SeekType>(
     (searchParams.get('seekType') as SeekType) || SeekType.OFFSET
   );
@@ -114,6 +118,12 @@ const Filters: React.FC<FiltersProps> = ({
   const [timestamp, setTimestamp] = React.useState<Date | null>(
     getTimestampFromSeekToParam(searchParams)
   );
+  const [keySerde, setKeySerde] = React.useState<string>(
+    searchParams.get('keySerde') as string
+  );
+  const [valueSerde, setValueSerde] = React.useState<string>(
+    searchParams.get('valueSerde') as string
+  );
 
   const [savedFilters, setSavedFilters] = React.useState<MessageFilters[]>(
     JSON.parse(localStorage.getItem('savedFilters') ?? '[]')
@@ -162,19 +172,6 @@ const Filters: React.FC<FiltersProps> = ({
     [partitions]
   );
 
-  const props: Query = React.useMemo(() => {
-    return {
-      q:
-        queryType === MessageFilterType.GROOVY_SCRIPT
-          ? activeFilter.code
-          : query,
-      filterQueryType: queryType,
-      attempt,
-      limit: PER_PAGE,
-      seekDirection,
-    };
-  }, [attempt, query, queryType, seekDirection, activeFilter]);
-
   const handleClearAllFilters = () => {
     setCurrentSeekType(SeekType.OFFSET);
     setOffset('');
@@ -191,56 +188,55 @@ const Filters: React.FC<FiltersProps> = ({
     );
   };
 
-  const handleFiltersSubmit = React.useCallback(
-    (currentOffset: string) => {
-      setAttempt(attempt + 1);
+  const handleFiltersSubmit = (currentOffset: string) => {
+    const nextAttempt = Number(searchParams.get('attempt') || 0) + 1;
+    const props: Query = {
+      q:
+        queryType === MessageFilterType.GROOVY_SCRIPT
+          ? activeFilter.code
+          : query,
+      filterQueryType: queryType,
+      attempt: nextAttempt,
+      limit: PER_PAGE,
+      seekDirection,
+      keySerde: keySerde || (searchParams.get('keySerde') as string),
+      valueSerde: valueSerde || (searchParams.get('valueSerde') as string),
+    };
 
-      if (isSeekTypeControlVisible) {
-        switch (seekDirection) {
-          case SeekDirection.FORWARD:
-            props.seekType = SeekType.BEGINNING;
-            break;
-          case SeekDirection.BACKWARD:
-          case SeekDirection.TAILING:
-            props.seekType = SeekType.LATEST;
-            break;
-          default:
-            props.seekType = currentSeekType;
-        }
-        props.seekTo = selectedPartitions.map(({ value }) => {
-          const offsetProperty =
-            seekDirection === SeekDirection.FORWARD ? 'offsetMin' : 'offsetMax';
-          const offsetBasedSeekTo =
-            currentOffset || partitionMap[value][offsetProperty];
-          const seekToOffset =
-            currentSeekType === SeekType.OFFSET
-              ? offsetBasedSeekTo
-              : timestamp?.getTime();
-
-          return `${value}::${seekToOffset || '0'}`;
-        });
+    if (isSeekTypeControlVisible) {
+      switch (seekDirection) {
+        case SeekDirection.FORWARD:
+          props.seekType = SeekType.BEGINNING;
+          break;
+        case SeekDirection.BACKWARD:
+        case SeekDirection.TAILING:
+          props.seekType = SeekType.LATEST;
+          break;
+        default:
+          props.seekType = currentSeekType;
       }
-
-      const newProps = omitBy(props, (v) => v === undefined || v === '');
-      const qs = Object.keys(newProps)
-        .map((key) => `${key}=${encodeURIComponent(newProps[key] as string)}`)
-        .join('&');
-
-      navigate({
-        search: `?${qs}`,
+      props.seekTo = selectedPartitions.map(({ value }) => {
+        const offsetProperty =
+          seekDirection === SeekDirection.FORWARD ? 'offsetMin' : 'offsetMax';
+        const offsetBasedSeekTo =
+          currentOffset || partitionMap[value][offsetProperty];
+        const seekToOffset =
+          currentSeekType === SeekType.OFFSET
+            ? offsetBasedSeekTo
+            : timestamp?.getTime();
+
+        return `${value}::${seekToOffset || '0'}`;
       });
-    },
-    [
-      seekDirection,
-      queryType,
-      activeFilter,
-      currentSeekType,
-      timestamp,
-      query,
-      selectedPartitions,
-      navigate,
-    ]
-  );
+    }
+
+    const newProps = omitBy(props, (v) => v === undefined || v === '');
+    const qs = Object.keys(newProps)
+      .map((key) => `${key}=${encodeURIComponent(newProps[key] as string)}`)
+      .join('&');
+    navigate({
+      search: `?${qs}`,
+    });
+  };
 
   const handleSSECancel = () => {
     if (!source.current) return;
@@ -389,78 +385,105 @@ const Filters: React.FC<FiltersProps> = ({
     setIsTailing(isLive);
   }, [isLive]);
 
+  const { data: serdes = {} } = useSerdes({
+    clusterName,
+    topicName,
+    use: SerdeUsage.DESERIALIZE,
+  });
+
   return (
     <S.FiltersWrapper>
       <div>
         <S.FilterInputs>
-          <Search placeholder="Search" disabled={isTailing} />
-          <S.SeekTypeSelectorWrapper>
-            <S.SeekTypeSelect
-              id="selectSeekType"
-              onChange={(option) => setCurrentSeekType(option as SeekType)}
-              value={currentSeekType}
+          <div>
+            <InputLabel>Seek Type</InputLabel>
+            <S.SeekTypeSelectorWrapper>
+              <S.SeekTypeSelect
+                id="selectSeekType"
+                onChange={(option) => setCurrentSeekType(option as SeekType)}
+                value={currentSeekType}
+                selectSize="M"
+                minWidth="100px"
+                options={SeekTypeOptions}
+                disabled={isTailing}
+              />
+
+              {currentSeekType === SeekType.OFFSET ? (
+                <S.OffsetSelector
+                  id="offset"
+                  type="text"
+                  inputSize="M"
+                  value={offset}
+                  placeholder="Offset"
+                  onChange={({ target: { value } }) => setOffset(value)}
+                  disabled={isTailing}
+                />
+              ) : (
+                <S.DatePickerInput
+                  selected={timestamp}
+                  onChange={(date: Date | null) => setTimestamp(date)}
+                  showTimeInput
+                  timeInputLabel="Time:"
+                  dateFormat="MMMM d, yyyy HH:mm"
+                  placeholderText="Select timestamp"
+                  disabled={isTailing}
+                />
+              )}
+            </S.SeekTypeSelectorWrapper>
+          </div>
+          <div>
+            <InputLabel>Partitions</InputLabel>
+            <MultiSelect
+              options={partitions.map((p) => ({
+                label: `Partition #${p.partition.toString()}`,
+                value: p.partition,
+              }))}
+              filterOptions={filterOptions}
+              value={selectedPartitions}
+              onChange={setSelectedPartitions}
+              labelledBy="Select partitions"
+              disabled={isTailing}
+            />
+          </div>
+          <div>
+            <InputLabel>Key Serde</InputLabel>
+            <Select
+              id="selectKeySerdeOptions"
+              aria-labelledby="selectKeySerdeOptions"
+              onChange={(option) => setKeySerde(option as string)}
+              minWidth="170px"
+              options={getSerdeOptions(serdes.key || [])}
+              value={searchParams.get('keySerde') as string}
               selectSize="M"
-              minWidth="100px"
-              options={SeekTypeOptions}
               disabled={isTailing}
             />
-            {currentSeekType === SeekType.OFFSET ? (
-              <S.OffsetSelector
-                id="offset"
-                type="text"
-                inputSize="M"
-                value={offset}
-                placeholder="Offset"
-                onChange={({ target: { value } }) => setOffset(value)}
-                disabled={isTailing}
-              />
-            ) : (
-              <S.DatePickerInput
-                selected={timestamp}
-                onChange={(date: Date | null) => setTimestamp(date)}
-                showTimeInput
-                timeInputLabel="Time:"
-                dateFormat="MMMM d, yyyy HH:mm"
-                placeholderText="Select timestamp"
-                disabled={isTailing}
-              />
-            )}
-          </S.SeekTypeSelectorWrapper>
-          <MultiSelect
-            options={partitions.map((p) => ({
-              label: `Partition #${p.partition.toString()}`,
-              value: p.partition,
-            }))}
-            filterOptions={filterOptions}
-            value={selectedPartitions}
-            onChange={setSelectedPartitions}
-            labelledBy="Select partitions"
-            disabled={isTailing}
-          />
+          </div>
+          <div>
+            <InputLabel>Content Serde</InputLabel>
+            <Select
+              id="selectValueSerdeOptions"
+              aria-labelledby="selectValueSerdeOptions"
+              onChange={(option) => setValueSerde(option as string)}
+              options={getSerdeOptions(serdes.value || [])}
+              value={searchParams.get('valueSerde') as string}
+              minWidth="170px"
+              selectSize="M"
+              disabled={isTailing}
+            />
+          </div>
           <S.ClearAll onClick={handleClearAllFilters}>Clear all</S.ClearAll>
-          {isFetching ? (
-            <Button
-              type="button"
-              buttonType="secondary"
-              buttonSize="M"
-              disabled={isSubmitDisabled}
-              onClick={handleSSECancel}
-              style={{ fontWeight: 500 }}
-            >
-              Cancel
-            </Button>
-          ) : (
-            <Button
-              type="submit"
-              buttonType="secondary"
-              buttonSize="M"
-              disabled={isSubmitDisabled}
-              onClick={() => handleFiltersSubmit(offset)}
-              style={{ fontWeight: 500 }}
-            >
-              Submit
-            </Button>
-          )}
+          <Button
+            type="submit"
+            buttonType="secondary"
+            buttonSize="M"
+            disabled={isSubmitDisabled}
+            onClick={() =>
+              isFetching ? handleSSECancel() : handleFiltersSubmit(offset)
+            }
+            style={{ fontWeight: 500 }}
+          >
+            {isFetching ? 'Cancel' : 'Submit'}
+          </Button>
         </S.FilterInputs>
         <Select
           selectSize="M"
@@ -472,6 +495,8 @@ const Filters: React.FC<FiltersProps> = ({
         />
       </div>
       <S.ActiveSmartFilterWrapper>
+        <Search placeholder="Search" disabled={isTailing} />
+
         <Button buttonType="primary" buttonSize="M" onClick={toggle}>
           <PlusIcon />
           Add Filters

+ 13 - 5
kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx

@@ -14,15 +14,20 @@ import { SeekDirection } from 'generated-sources';
 import { clusterTopicPath } from 'lib/paths';
 import { useTopicDetails } from 'lib/hooks/api/topics';
 import { externalTopicPayload } from 'lib/fixtures/topics';
+import { useSerdes } from 'lib/hooks/api/topicMessages';
+import { serdesPayload } from 'lib/fixtures/topicMessages';
 
 jest.mock('lib/hooks/api/topics', () => ({
   useTopicDetails: jest.fn(),
 }));
 
+jest.mock('lib/hooks/api/topicMessages', () => ({
+  useSerdes: jest.fn(),
+}));
+
 const defaultContextValue: ContextProps = {
   isLive: false,
   seekDirection: SeekDirection.FORWARD,
-  searchParams: new URLSearchParams(''),
   changeSeekDirection: jest.fn(),
 };
 
@@ -58,6 +63,9 @@ beforeEach(async () => {
   (useTopicDetails as jest.Mock).mockImplementation(() => ({
     data: externalTopicPayload,
   }));
+  (useSerdes as jest.Mock).mockImplementation(() => ({
+    data: serdesPayload,
+  }));
 });
 
 describe('Filters component', () => {
@@ -143,11 +151,11 @@ describe('Filters component', () => {
     });
 
     it('seekDirection select', () => {
-      userEvent.click(seekTypeSelects[1]);
-      userEvent.selectOptions(seekTypeSelects[1], [
+      userEvent.click(seekTypeSelects[3]);
+      userEvent.selectOptions(seekTypeSelects[3], [
         mockDirectionOptionSelectLabel,
       ]);
-      expect(options[1]).toHaveTextContent(mockDirectionOptionSelectLabel);
+      expect(options[3]).toHaveTextContent(mockDirectionOptionSelectLabel);
     });
   });
 
@@ -155,7 +163,7 @@ describe('Filters component', () => {
     renderComponent();
     userEvent.click(screen.getByText('Stop loading'));
     const option = screen.getAllByRole('option');
-    expect(option[1]).toHaveTextContent('Oldest First');
+    expect(option[3]).toHaveTextContent('Oldest First');
     expect(screen.getByText('Submit')).toBeInTheDocument();
   });
 

+ 25 - 10
kafka-ui-react-app/src/components/Topics/Topic/Messages/Messages.tsx

@@ -1,10 +1,14 @@
 import React, { useCallback, useMemo, useState } from 'react';
 import TopicMessagesContext from 'components/contexts/TopicMessagesContext';
-import { SeekDirection } from 'generated-sources';
-import { useLocation } from 'react-router-dom';
+import { SeekDirection, SerdeUsage } from 'generated-sources';
+import { useSearchParams } from 'react-router-dom';
+import { useSerdes } from 'lib/hooks/api/topicMessages';
+import useAppParams from 'lib/hooks/useAppParams';
+import { RouteParamsClusterTopic } from 'lib/paths';
+import { getDefaultSerdeName } from 'components/Topics/Topic/MessagesV2/utils/getDefaultSerdeName';
 
-import FiltersContainer from './Filters/FiltersContainer';
 import MessagesTable from './MessagesTable';
+import FiltersContainer from './Filters/FiltersContainer';
 
 export const SeekDirectionOptionsObj = {
   [SeekDirection.FORWARD]: {
@@ -27,12 +31,24 @@ export const SeekDirectionOptionsObj = {
 export const SeekDirectionOptions = Object.values(SeekDirectionOptionsObj);
 
 const Messages: React.FC = () => {
-  const location = useLocation();
+  const [searchParams, setSearchParams] = useSearchParams();
+  const { clusterName, topicName } = useAppParams<RouteParamsClusterTopic>();
 
-  const searchParams = React.useMemo(
-    () => new URLSearchParams(location.search),
-    [location.search]
-  );
+  const { data: serdes = {} } = useSerdes({
+    clusterName,
+    topicName,
+    use: SerdeUsage.DESERIALIZE,
+  });
+
+  React.useEffect(() => {
+    if (!searchParams.get('keySerde')) {
+      searchParams.set('keySerde', getDefaultSerdeName(serdes.key || []));
+    }
+    if (!searchParams.get('valueSerde')) {
+      searchParams.set('valueSerde', getDefaultSerdeName(serdes.value || []));
+    }
+    setSearchParams(searchParams);
+  }, [serdes]);
 
   const defaultSeekValue = SeekDirectionOptions[0];
 
@@ -66,11 +82,10 @@ const Messages: React.FC = () => {
   const contextValue = useMemo(
     () => ({
       seekDirection,
-      searchParams,
       changeSeekDirection,
       isLive,
     }),
-    [seekDirection, searchParams, changeSeekDirection]
+    [seekDirection, changeSeekDirection]
   );
 
   return (

+ 13 - 4
kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Messages.spec.tsx

@@ -8,6 +8,12 @@ import Messages, {
 import { SeekDirection, SeekType } from 'generated-sources';
 import userEvent from '@testing-library/user-event';
 import { clusterTopicMessagesPath } from 'lib/paths';
+import { useSerdes } from 'lib/hooks/api/topicMessages';
+import { serdesPayload } from 'lib/fixtures/topicMessages';
+
+jest.mock('lib/hooks/api/topicMessages', () => ({
+  useSerdes: jest.fn(),
+}));
 
 describe('Messages', () => {
   const searchParams = `?filterQueryType=STRING_CONTAINS&attempt=0&limit=100&seekDirection=${SeekDirection.FORWARD}&seekType=${SeekType.OFFSET}&seekTo=0::9`;
@@ -28,20 +34,23 @@ describe('Messages', () => {
     Object.defineProperty(window, 'EventSource', {
       value: EventSourceMock,
     });
+    (useSerdes as jest.Mock).mockImplementation(() => ({
+      data: serdesPayload,
+    }));
   });
   describe('component rendering default behavior with the search params', () => {
     beforeEach(() => {
       renderComponent();
     });
     it('should check default seekDirection if it actually take the value from the url', () => {
-      expect(screen.getAllByRole('listbox')[1]).toHaveTextContent(
+      expect(screen.getAllByRole('listbox')[3]).toHaveTextContent(
         SeekDirectionOptionsObj[SeekDirection.FORWARD].label
       );
     });
 
     it('should check the SeekDirection select changes with live option', async () => {
-      const seekDirectionSelect = screen.getAllByRole('listbox')[1];
-      const seekDirectionOption = screen.getAllByRole('option')[1];
+      const seekDirectionSelect = screen.getAllByRole('listbox')[3];
+      const seekDirectionOption = screen.getAllByRole('option')[3];
 
       expect(seekDirectionOption).toHaveTextContent(
         SeekDirectionOptionsObj[SeekDirection.FORWARD].label
@@ -79,7 +88,7 @@ describe('Messages', () => {
       renderComponent(
         searchParams.replace(SeekDirection.FORWARD, SeekDirection.BACKWARD)
       );
-      expect(screen.getAllByRole('listbox')[1]).toHaveTextContent(
+      expect(screen.getAllByRole('listbox')[3]).toHaveTextContent(
         SeekDirectionOptionsObj[SeekDirection.BACKWARD].label
       );
     });

+ 0 - 1
kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/MessagesTable.spec.tsx

@@ -26,7 +26,6 @@ describe('MessagesTable', () => {
   const contextValue: ContextProps = {
     isLive: false,
     seekDirection: SeekDirection.FORWARD,
-    searchParams,
     changeSeekDirection: jest.fn(),
   };
 

+ 53 - 4
kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/FiltersBar/Form.tsx

@@ -2,13 +2,16 @@ import React from 'react';
 import { useForm } from 'react-hook-form';
 import { useSearchParams } from 'react-router-dom';
 import Input from 'components/common/Input/Input';
-import { ConsumingMode } from 'lib/hooks/api/topicMessages';
+import { ConsumingMode, useSerdes } from 'lib/hooks/api/topicMessages';
 import Select from 'components/common/Select/Select';
 import { InputLabel } from 'components/common/Input/InputLabel.styled';
 import { Option } from 'react-multi-select-component';
 import { Button } from 'components/common/Button/Button';
-import { Partition } from 'generated-sources';
+import { Partition, SerdeUsage } from 'generated-sources';
 import { getModeOptions } from 'components/Topics/Topic/MessagesV2/utils/consumingModes';
+import { getSerdeOptions } from 'components/Topics/Topic/SendMessage/utils';
+import useAppParams from 'lib/hooks/useAppParams';
+import { RouteParamsClusterTopic } from 'lib/paths';
 
 import * as S from './FiltersBar.styled';
 import { setSeekTo } from './utils';
@@ -18,6 +21,8 @@ type FormValues = {
   offset: string;
   time: Date;
   partitions: Option[];
+  keySerde: string;
+  valueSerde: string;
 };
 
 const Form: React.FC<{ isFetching: boolean; partitions: Partition[] }> = ({
@@ -25,6 +30,11 @@ const Form: React.FC<{ isFetching: boolean; partitions: Partition[] }> = ({
   partitions,
 }) => {
   const [searchParams, setSearchParams] = useSearchParams();
+  const routerProps = useAppParams<RouteParamsClusterTopic>();
+  const { data: serdes = {} } = useSerdes({
+    ...routerProps,
+    use: SerdeUsage.DESERIALIZE,
+  });
 
   const {
     handleSubmit,
@@ -39,23 +49,34 @@ const Form: React.FC<{ isFetching: boolean; partitions: Partition[] }> = ({
       time: searchParams.get('t')
         ? new Date(Number(searchParams.get('t')))
         : Date.now(),
+      keySerde: searchParams.get('keySerde') as string,
+      valueSerde: searchParams.get('valueSerde') as string,
     } as FormValues,
   });
   const mode = watch('mode');
   const offset = watch('offset');
   const time = watch('time');
+  const keySerde = watch('keySerde');
+  const valueSerde = watch('valueSerde');
 
   const onSubmit = (values: FormValues) => {
     searchParams.set('m', values.mode);
+    if (values.keySerde) {
+      searchParams.set('keySerde', values.keySerde);
+    }
+    if (values.valueSerde) {
+      searchParams.set('valueSerde', values.valueSerde);
+    }
     searchParams.delete('o');
     searchParams.delete('t');
     searchParams.delete('a');
     searchParams.delete('page');
-    if (values.mode === 'fromOffset' || values.mode === 'toOffset') {
+    if (['fromOffset', 'toOffset'].includes(mode)) {
       searchParams.set('o', values.offset);
-    } else if (values.mode === 'sinceTime' || values.mode === 'untilTime') {
+    } else if (['sinceTime', 'untilTime'].includes(mode)) {
       searchParams.set('t', `${values.time.getTime()}`);
     }
+
     setSeekTo(searchParams, partitions);
     setSearchParams(searchParams);
     reset(values);
@@ -69,6 +90,10 @@ const Form: React.FC<{ isFetching: boolean; partitions: Partition[] }> = ({
   const handleOffsetChange = (e: React.ChangeEvent<HTMLInputElement>) => {
     setValue('offset', e.target.value, { shouldDirty: true });
   };
+  const handleSerdeChange =
+    (type: 'keySerde' | 'valueSerde') => (option: string | number) => {
+      setValue(type, String(option), { shouldDirty: true });
+    };
   const handleRefresh: React.MouseEventHandler<HTMLButtonElement> = (e) => {
     e.stopPropagation();
     e.preventDefault();
@@ -116,6 +141,30 @@ const Form: React.FC<{ isFetching: boolean; partitions: Partition[] }> = ({
           />
         </S.FilterRow>
       )}
+      <S.FilterRow>
+        <InputLabel>Key Serde</InputLabel>
+        <Select
+          id="selectKeySerdeOptions"
+          aria-labelledby="selectKeySerdeOptions"
+          onChange={handleSerdeChange('keySerde')}
+          options={getSerdeOptions(serdes.key || [])}
+          value={keySerde}
+          selectSize="M"
+          minWidth="100%"
+        />
+      </S.FilterRow>
+      <S.FilterRow>
+        <InputLabel>Content Serde</InputLabel>
+        <Select
+          id="selectValueSerdeOptions"
+          aria-labelledby="selectValueSerdeOptions"
+          onChange={handleSerdeChange('valueSerde')}
+          options={getSerdeOptions(serdes.value || [])}
+          value={valueSerde}
+          selectSize="M"
+          minWidth="100%"
+        />
+      </S.FilterRow>
       <S.FilterFooter>
         <Button
           buttonType="secondary"

+ 1 - 0
kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/Messages.styled.ts

@@ -52,6 +52,7 @@ export const StatusBarWrapper = styled.div(
     white-space: nowrap;
     display: flex;
     justify-content: space-between;
+    z-index: 10;
   `
 );
 

+ 0 - 25
kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/Messages.tsx

@@ -15,7 +15,6 @@ import MessagesTable from './MessagesTable/MessagesTable';
 import * as S from './Messages.styled';
 import Meta from './FiltersBar/Meta';
 import Form from './FiltersBar/Form';
-import { setSeekTo } from './FiltersBar/utils';
 import handleNextPageClick from './utils/handleNextPageClick';
 import StatusBar from './StatusBar';
 import AdvancedFilter from './Advanced Filter/AdvancedFilter';
@@ -39,30 +38,6 @@ const Messages = () => {
 
   const partitions = topic.partitions || [];
 
-  /**
-   * Search params:
-   * - `q` - search query
-   * - `m` - way the consumer is going to consume the messages..
-   * - `o` - offset
-   * - `t` - timestamp
-   * - `perPage` - number of messages per page
-   * - `seekTo` - offset or timestamp to seek to.
-   *    Format: `0-101.1-987` - [partition 0, offset 101], [partition 1, offset 987]
-   * - `page` - page number
-   */
-  React.useEffect(() => {
-    if (!mode) {
-      searchParams.set('m', 'newest');
-    }
-    if (!searchParams.get('perPage')) {
-      searchParams.set('perPage', MESSAGES_PER_PAGE);
-    }
-    if (!searchParams.get('seekTo')) {
-      setSeekTo(searchParams, partitions);
-    }
-    setSearchParams(searchParams);
-  }, [topic]);
-
   // Pagination is disabled in live mode, also we don't want to show the button
   // if we are fetching the messages or if we are at the end of the topic
   const isPaginationDisabled =

+ 62 - 0
kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/MessagesContainer.tsx

@@ -0,0 +1,62 @@
+import React, { Suspense } from 'react';
+import { ConsumingMode, useSerdes } from 'lib/hooks/api/topicMessages';
+import useAppParams from 'lib/hooks/useAppParams';
+import { RouteParamsClusterTopic } from 'lib/paths';
+import { useSearchParams } from 'react-router-dom';
+import { useTopicDetails } from 'lib/hooks/api/topics';
+import { MESSAGES_PER_PAGE } from 'lib/constants';
+import { SerdeUsage } from 'generated-sources';
+
+import { setSeekTo } from './FiltersBar/utils';
+import { getDefaultSerdeName } from './utils/getDefaultSerdeName';
+import Messages from './Messages';
+
+const MessagesContainer = () => {
+  const routerProps = useAppParams<RouteParamsClusterTopic>();
+  const [searchParams, setSearchParams] = useSearchParams();
+  const { data: serdes = {} } = useSerdes({
+    ...routerProps,
+    use: SerdeUsage.DESERIALIZE,
+  });
+  const mode = searchParams.get('m') as ConsumingMode;
+  const { data: topic = { partitions: [] } } = useTopicDetails(routerProps);
+  const partitions = topic.partitions || [];
+
+  /**
+   * Search params:
+   * - `q` - search query
+   * - `m` - way the consumer is going to consume the messages..
+   * - `o` - offset
+   * - `t` - timestamp
+   * - `perPage` - number of messages per page
+   * - `seekTo` - offset or timestamp to seek to.
+   *    Format: `0-101.1-987` - [partition 0, offset 101], [partition 1, offset 987]
+   * - `page` - page number
+   */
+  React.useEffect(() => {
+    if (!mode) {
+      searchParams.set('m', 'newest');
+    }
+    if (!searchParams.get('perPage')) {
+      searchParams.set('perPage', MESSAGES_PER_PAGE);
+    }
+    if (!searchParams.get('seekTo')) {
+      setSeekTo(searchParams, partitions);
+    }
+    if (!searchParams.get('keySerde')) {
+      searchParams.set('keySerde', getDefaultSerdeName(serdes.key || []));
+    }
+    if (!searchParams.get('valueSerde')) {
+      searchParams.set('valueSerde', getDefaultSerdeName(serdes.value || []));
+    }
+    setSearchParams(searchParams);
+  }, [topic, serdes]);
+
+  return (
+    <Suspense>
+      <Messages />
+    </Suspense>
+  );
+};
+
+export default MessagesContainer;

+ 13 - 0
kafka-ui-react-app/src/components/Topics/Topic/MessagesV2/utils/getDefaultSerdeName.ts

@@ -0,0 +1,13 @@
+import { SerdeDescription } from 'generated-sources';
+import { getPrefferedDescription } from 'components/Topics/Topic/SendMessage/utils';
+
+export const getDefaultSerdeName = (serdes: SerdeDescription[]) => {
+  const preffered = getPrefferedDescription(serdes);
+  if (preffered) {
+    return preffered.name || '';
+  }
+  if (serdes.length > 0) {
+    return serdes[0].name || '';
+  }
+  return '';
+};

+ 119 - 99
kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx

@@ -1,131 +1,116 @@
-import React, { useEffect } from 'react';
+import React from 'react';
 import { useForm, Controller } from 'react-hook-form';
 import { RouteParamsClusterTopic } from 'lib/paths';
-import jsf from 'json-schema-faker';
 import { Button } from 'components/common/Button/Button';
 import Editor from 'components/common/Editor/Editor';
 import Select, { SelectOption } from 'components/common/Select/Select';
 import useAppParams from 'lib/hooks/useAppParams';
 import { showAlert } from 'lib/errorHandling';
-import {
-  useSendMessage,
-  useTopicDetails,
-  useTopicMessageSchema,
-} from 'lib/hooks/api/topics';
+import { useSendMessage, useTopicDetails } from 'lib/hooks/api/topics';
 import { InputLabel } from 'components/common/Input/InputLabel.styled';
+import { useSerdes } from 'lib/hooks/api/topicMessages';
+import { SerdeUsage } from 'generated-sources';
 
-import validateMessage from './validateMessage';
 import * as S from './SendMessage.styled';
+import {
+  getDefaultValues,
+  getPartitionOptions,
+  getSerdeOptions,
+  validateBySchema,
+} from './utils';
 
-type FieldValues = Partial<{
+interface FormType {
   key: string;
   content: string;
   headers: string;
-  partition: number | string;
-}>;
+  partition: number;
+  keySerde: string;
+  valueSerde: string;
+}
 
 const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
   const { clusterName, topicName } = useAppParams<RouteParamsClusterTopic>();
   const { data: topic } = useTopicDetails({ clusterName, topicName });
-  const { data: messageSchema } = useTopicMessageSchema({
+  const { data: serdes = {} } = useSerdes({
     clusterName,
     topicName,
+    use: SerdeUsage.SERIALIZE,
   });
   const sendMessage = useSendMessage({ clusterName, topicName });
 
-  jsf.option('fillProperties', false);
-  jsf.option('alwaysFakeOptionals', true);
-
-  const partitions = topic?.partitions || [];
-
-  const selectPartitionOptions: Array<SelectOption> = partitions.map((p) => {
-    const value = String(p.partition);
-    return { value, label: value };
-  });
-
-  const keyDefaultValue = React.useMemo(() => {
-    if (!messageSchema) {
-      return undefined;
-    }
-    return JSON.stringify(
-      jsf.generate(JSON.parse(messageSchema.key.schema)),
-      null,
-      '\t'
-    );
-  }, [messageSchema]);
-
-  const contentDefaultValue = React.useMemo(() => {
-    if (!messageSchema) {
-      return undefined;
-    }
-    return JSON.stringify(
-      jsf.generate(JSON.parse(messageSchema.value.schema)),
-      null,
-      '\t'
-    );
-  }, [messageSchema]);
-
+  const defaultValues = React.useMemo(() => getDefaultValues(serdes), [serdes]);
+  const partitionOptions: SelectOption[] = React.useMemo(
+    () => getPartitionOptions(topic?.partitions || []),
+    [topic]
+  );
   const {
     handleSubmit,
-    formState: { isSubmitting, isDirty },
+    formState: { isSubmitting },
     control,
-    reset,
-  } = useForm<FieldValues>({
+  } = useForm<FormType>({
     mode: 'onChange',
     defaultValues: {
-      key: keyDefaultValue,
-      content: contentDefaultValue,
-      headers: undefined,
-      partition: undefined,
+      ...defaultValues,
+      partition: Number(partitionOptions[0].value),
     },
   });
 
-  useEffect(() => {
-    reset({
-      key: keyDefaultValue,
-      content: contentDefaultValue,
-    });
-  }, [keyDefaultValue, contentDefaultValue, reset]);
+  const submit = async ({
+    keySerde,
+    valueSerde,
+    key,
+    content,
+    headers,
+    partition,
+  }: FormType) => {
+    let errors: string[] = [];
 
-  const submit = async (data: {
-    key: string;
-    content: string;
-    headers: string;
-    partition: number;
-  }) => {
-    if (messageSchema) {
-      const { partition, key, content } = data;
-      const errors = validateMessage(key, content, messageSchema);
-      if (data.headers) {
-        try {
-          JSON.parse(data.headers);
-        } catch (error) {
-          errors.push('Wrong header format');
-        }
-      }
-      if (errors.length > 0) {
-        showAlert('error', {
-          id: `${clusterName}-${topicName}-createTopicMessageError`,
-          title: 'Validation Error',
-          message: (
-            <ul>
-              {errors.map((e) => (
-                <li key={e}>{e}</li>
-              ))}
-            </ul>
-          ),
-        });
-        return;
+    if (keySerde) {
+      const selectedKeySerde = serdes.key?.find((k) => k.name === keySerde);
+      errors = validateBySchema(key, selectedKeySerde?.schema, 'key');
+    }
+
+    if (valueSerde) {
+      const selectedValue = serdes.value?.find((v) => v.name === valueSerde);
+      errors = [
+        ...errors,
+        ...validateBySchema(content, selectedValue?.schema, 'content'),
+      ];
+    }
+
+    let parsedHeaders;
+    if (headers) {
+      try {
+        parsedHeaders = JSON.parse(headers);
+      } catch (error) {
+        errors.push('Wrong header format');
       }
-      const headers = data.headers ? JSON.parse(data.headers) : undefined;
-      await sendMessage.mutateAsync({
-        key: !key ? null : key,
-        content: !content ? null : content,
-        headers,
-        partition: !partition ? 0 : partition,
+    }
+
+    if (errors.length > 0) {
+      showAlert('error', {
+        id: `${clusterName}-${topicName}-createTopicMessageError`,
+        title: 'Validation Error',
+        message: (
+          <ul>
+            {errors.map((e) => (
+              <li key={e}>{e}</li>
+            ))}
+          </ul>
+        ),
       });
-      onSubmit();
+      return;
     }
+
+    await sendMessage.mutateAsync({
+      key: key || null,
+      content: content || null,
+      headers: parsedHeaders,
+      partition: partition || 0,
+      keySerde,
+      valueSerde,
+    });
+    onSubmit();
   };
 
   return (
@@ -137,16 +122,51 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
             <Controller
               control={control}
               name="partition"
-              defaultValue={selectPartitionOptions[0].value}
-              render={({ field: { name, onChange } }) => (
+              render={({ field: { name, onChange, value } }) => (
                 <Select
                   id="selectPartitionOptions"
                   aria-labelledby="selectPartitionOptions"
                   name={name}
                   onChange={onChange}
-                  minWidth="100px"
-                  options={selectPartitionOptions}
-                  value={selectPartitionOptions[0].value}
+                  minWidth="100%"
+                  options={partitionOptions}
+                  value={value}
+                />
+              )}
+            />
+          </S.Column>
+          <S.Column>
+            <InputLabel>Key Serde</InputLabel>
+            <Controller
+              control={control}
+              name="keySerde"
+              render={({ field: { name, onChange, value } }) => (
+                <Select
+                  id="selectKeySerdeOptions"
+                  aria-labelledby="selectKeySerdeOptions"
+                  name={name}
+                  onChange={onChange}
+                  minWidth="100%"
+                  options={getSerdeOptions(serdes.key || [])}
+                  value={value}
+                />
+              )}
+            />
+          </S.Column>
+          <S.Column>
+            <InputLabel>Content Serde</InputLabel>
+            <Controller
+              control={control}
+              name="valueSerde"
+              render={({ field: { name, onChange, value } }) => (
+                <Select
+                  id="selectValueSerdeOptions"
+                  aria-labelledby="selectValueSerdeOptions"
+                  name={name}
+                  onChange={onChange}
+                  minWidth="100%"
+                  options={getSerdeOptions(serdes.value || [])}
+                  value={value}
                 />
               )}
             />
@@ -207,7 +227,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
           buttonSize="M"
           buttonType="primary"
           type="submit"
-          disabled={!isDirty || isSubmitting}
+          disabled={isSubmitting}
         >
           Produce Message
         </Button>

+ 24 - 26
kafka-ui-react-app/src/components/Topics/Topic/SendMessage/__test__/SendMessage.spec.tsx

@@ -1,16 +1,14 @@
 import React from 'react';
 import SendMessage from 'components/Topics/Topic/SendMessage/SendMessage';
-import { act, screen } from '@testing-library/react';
+import { act, screen, waitFor } from '@testing-library/react';
 import userEvent from '@testing-library/user-event';
 import { render, WithRoute } from 'lib/testHelpers';
 import { clusterTopicPath } from 'lib/paths';
-import validateMessage from 'components/Topics/Topic/SendMessage/validateMessage';
-import { externalTopicPayload, topicMessageSchema } from 'lib/fixtures/topics';
-import {
-  useSendMessage,
-  useTopicDetails,
-  useTopicMessageSchema,
-} from 'lib/hooks/api/topics';
+import { validateBySchema } from 'components/Topics/Topic/SendMessage/utils';
+import { externalTopicPayload } from 'lib/fixtures/topics';
+import { useSendMessage, useTopicDetails } from 'lib/hooks/api/topics';
+import { useSerdes } from 'lib/hooks/api/topicMessages';
+import { serdesPayload } from 'lib/fixtures/topicMessages';
 
 import Mock = jest.Mock;
 
@@ -23,9 +21,10 @@ jest.mock('json-schema-faker', () => ({
   option: jest.fn(),
 }));
 
-jest.mock('components/Topics/Topic/SendMessage/validateMessage', () =>
-  jest.fn()
-);
+jest.mock('components/Topics/Topic/SendMessage/utils', () => ({
+  ...jest.requireActual('components/Topics/Topic/SendMessage/utils'),
+  validateBySchema: jest.fn(),
+}));
 
 jest.mock('lib/errorHandling', () => ({
   ...jest.requireActual('lib/errorHandling'),
@@ -34,10 +33,13 @@ jest.mock('lib/errorHandling', () => ({
 
 jest.mock('lib/hooks/api/topics', () => ({
   useTopicDetails: jest.fn(),
-  useTopicMessageSchema: jest.fn(),
   useSendMessage: jest.fn(),
 }));
 
+jest.mock('lib/hooks/api/topicMessages', () => ({
+  useSerdes: jest.fn(),
+}));
+
 const clusterName = 'testCluster';
 const topicName = externalTopicPayload.name;
 
@@ -58,14 +60,18 @@ const renderComponent = async () => {
 const renderAndSubmitData = async (error: string[] = []) => {
   await renderComponent();
   await act(() => {
-    userEvent.click(screen.getByRole('listbox'));
+    userEvent.click(screen.getAllByRole('listbox')[0]);
   });
   await act(() => {
     userEvent.click(screen.getAllByRole('option')[1]);
   });
+  (validateBySchema as Mock).mockImplementation(() => error);
+  const submitButton = screen.getByRole('button', {
+    name: 'Produce Message',
+  });
+  await waitFor(() => expect(submitButton).toBeEnabled());
   await act(() => {
-    (validateMessage as Mock).mockImplementation(() => error);
-    userEvent.click(screen.getByText('Produce Message'));
+    userEvent.click(submitButton);
   });
 };
 
@@ -74,15 +80,12 @@ describe('SendMessage', () => {
     (useTopicDetails as jest.Mock).mockImplementation(() => ({
       data: externalTopicPayload,
     }));
+    (useSerdes as jest.Mock).mockImplementation(() => ({
+      data: serdesPayload,
+    }));
   });
 
   describe('when schema is fetched', () => {
-    beforeEach(() => {
-      (useTopicMessageSchema as jest.Mock).mockImplementation(() => ({
-        data: topicMessageSchema,
-      }));
-    });
-
     it('calls sendTopicMessage on submit', async () => {
       const sendTopicMessageMock = jest.fn();
       (useSendMessage as jest.Mock).mockImplementation(() => ({
@@ -105,11 +108,6 @@ describe('SendMessage', () => {
   });
 
   describe('when schema is empty', () => {
-    beforeEach(() => {
-      (useTopicMessageSchema as jest.Mock).mockImplementation(() => ({
-        data: undefined,
-      }));
-    });
     it('renders if schema is not defined', async () => {
       await renderComponent();
       expect(screen.getAllByRole('textbox')[0].nodeValue).toBeNull();

+ 85 - 0
kafka-ui-react-app/src/components/Topics/Topic/SendMessage/__test__/utils.spec.ts

@@ -0,0 +1,85 @@
+import { serdesPayload } from 'lib/fixtures/topicMessages';
+import {
+  getDefaultValues,
+  getSerdeOptions,
+  validateBySchema,
+} from 'components/Topics/Topic/SendMessage/utils';
+import { SerdeDescription } from 'generated-sources';
+
+describe('SendMessage utils', () => {
+  describe('getDefaultValues', () => {
+    it('should return default values', () => {
+      const actual = getDefaultValues(serdesPayload);
+      expect(actual.keySerde).toEqual(
+        serdesPayload.key?.find((item) => item.preferred)?.name
+      );
+      expect(actual.key).not.toBeUndefined();
+      expect(actual.valueSerde).toEqual(
+        serdesPayload.value?.find((item) => item.preferred)?.name
+      );
+      expect(actual.content).not.toBeUndefined();
+    });
+    it('works even with empty serdes', () => {
+      const actual = getDefaultValues({});
+      expect(actual.keySerde).toBeUndefined();
+      expect(actual.key).toBeUndefined();
+      expect(actual.valueSerde).toBeUndefined();
+      expect(actual.content).toBeUndefined();
+    });
+  });
+  describe('getSerdeOptions', () => {
+    it('should return options', () => {
+      const options = getSerdeOptions(serdesPayload.key as SerdeDescription[]);
+      expect(options).toHaveLength(2);
+    });
+    it('should skip options without label', () => {
+      const keySerdes = serdesPayload.key as SerdeDescription[];
+      const payload = [{ ...keySerdes[0], name: undefined }, keySerdes[1]];
+      const options = getSerdeOptions(payload);
+      expect(options).toHaveLength(1);
+    });
+  });
+  describe('validateBySchema', () => {
+    const defaultSchema = '{"type": "integer", "minimum" : 1, "maximum" : 2 }';
+
+    it('should return empty error data if value is empty', () => {
+      expect(validateBySchema('', defaultSchema, 'key')).toHaveLength(0);
+    });
+
+    it('should return empty error data if schema is empty', () => {
+      expect(validateBySchema('My Value', '', 'key')).toHaveLength(0);
+    });
+
+    it('should return parsing error data if schema is not parsed with type of key', () => {
+      const schema = '{invalid';
+      expect(validateBySchema('My Value', schema, 'key')).toEqual([
+        `Error in parsing the "key" field schema`,
+      ]);
+    });
+    it('should return parsing error data if schema is not parsed with type of key', () => {
+      const schema = '{invalid';
+      expect(validateBySchema('My Value', schema, 'content')).toEqual([
+        `Error in parsing the "content" field schema`,
+      ]);
+    });
+    it('should return empty error data if schema type is string', () => {
+      const schema = `{"type": "string"}`;
+      expect(validateBySchema('My Value', schema, 'key')).toHaveLength(0);
+    });
+    it('returns errors on invalid input data', () => {
+      expect(validateBySchema('0', defaultSchema, 'key')).toEqual([
+        'Key/minimum - must be >= 1',
+      ]);
+    });
+    it('returns error on broken key value', () => {
+      expect(validateBySchema('{120', defaultSchema, 'key')).toEqual([
+        'Error in parsing the "key" field value',
+      ]);
+    });
+    it('returns error on broken content value', () => {
+      expect(validateBySchema('{120', defaultSchema, 'content')).toEqual([
+        'Error in parsing the "content" field value',
+      ]);
+    });
+  });
+});

+ 0 - 89
kafka-ui-react-app/src/components/Topics/Topic/SendMessage/__test__/validateMessage.spec.ts

@@ -1,89 +0,0 @@
-import validateMessage from 'components/Topics/Topic/SendMessage/validateMessage';
-import { topicMessageSchema } from 'lib/fixtures/topics';
-import cloneDeep from 'lodash/cloneDeep';
-
-describe('validateMessage', () => {
-  const defaultValidKey = `{"f1": 32, "f2": "multi-state", "schema": "Bedfordshire violet SAS"}`;
-  const defaultValidContent = `{"f1": 21128, "f2": "Health Berkshire", "schema": "Dynamic"}`;
-
-  it('should return empty error data if value is empty', () => {
-    const key = ``;
-    const content = ``;
-    expect(validateMessage(key, content, topicMessageSchema)).toEqual([]);
-  });
-
-  it('should return empty error data if schema is empty', () => {
-    const key = `{"f1": 32, "f2": "multi-state", "schema": "Bedfordshire violet SAS"}`;
-    const content = `{"f1": 21128, "f2": "Health Berkshire", "schema": "Dynamic"}`;
-    const schema = cloneDeep(topicMessageSchema);
-    schema.key.schema = '';
-    schema.value.schema = '';
-    expect(validateMessage(key, content, schema)).toEqual([]);
-  });
-
-  it('should return parsing error data if schema is not parsed with type of key', () => {
-    const schema = cloneDeep(topicMessageSchema);
-    schema.key.schema = '{invalid';
-    expect(
-      validateMessage(defaultValidKey, defaultValidContent, schema)
-    ).toEqual([`Error in parsing the "key" field schema`]);
-  });
-
-  it('should return parsing error data if schema is not parsed with type of value', () => {
-    const schema = cloneDeep(topicMessageSchema);
-    schema.value.schema = '{invalid';
-    expect(
-      validateMessage(defaultValidKey, defaultValidContent, schema)
-    ).toEqual([`Error in parsing the "content" field schema`]);
-  });
-
-  it('should return empty error data if schema type is string', () => {
-    const schema = cloneDeep(topicMessageSchema);
-    schema.key.schema = `{"type": "string"}`;
-    schema.value.schema = `{"type": "string"}`;
-    expect(
-      validateMessage(defaultValidKey, defaultValidContent, schema)
-    ).toEqual([]);
-  });
-
-  it('should return  error data if compile Ajv data throws an error', () => {
-    expect(
-      validateMessage(defaultValidKey, defaultValidContent, topicMessageSchema)
-    ).toEqual([]);
-  });
-
-  it('returns no errors on correct input data', () => {
-    expect(
-      validateMessage(
-        defaultValidContent,
-        defaultValidContent,
-        topicMessageSchema
-      )
-    ).toEqual([]);
-  });
-
-  it('returns errors on invalid input data', () => {
-    const key = `{"f1": "32", "f2": "multi-state", "schema": "Bedfordshire violet SAS"}`;
-    const content = `{"f1": "21128", "f2": "Health Berkshire", "schema": "Dynamic"}`;
-    expect(validateMessage(key, content, topicMessageSchema)).toEqual([
-      'Key/properties/f1/type - must be integer',
-      'Content/properties/f1/type - must be integer',
-    ]);
-  });
-
-  it('returns error on broken key value', () => {
-    const key = `{"f1": "32", "f2": "multi-state", "schema": "Bedfordshire violet SAS"`;
-    const content = `{"f1": 21128, "f2": "Health Berkshire", "schema": "Dynamic"}`;
-    expect(validateMessage(key, content, topicMessageSchema)).toEqual([
-      'Error in parsing the "key" field value',
-    ]);
-  });
-
-  it('returns error on broken content value', () => {
-    const key = `{"f1": 32, "f2": "multi-state", "schema": "Bedfordshire violet SAS"}`;
-    const content = `{"f1": 21128, "f2": "Health Berkshire", "schema": "Dynamic"`;
-    expect(validateMessage(key, content, topicMessageSchema)).toEqual([
-      'Error in parsing the "content" field value',
-    ]);
-  });
-});

+ 97 - 0
kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts

@@ -0,0 +1,97 @@
+import {
+  Partition,
+  SerdeDescription,
+  TopicSerdeSuggestion,
+} from 'generated-sources';
+import jsf from 'json-schema-faker';
+import { compact } from 'lodash';
+import Ajv, { DefinedError } from 'ajv/dist/2020';
+import upperFirst from 'lodash/upperFirst';
+
+jsf.option('fillProperties', false);
+jsf.option('alwaysFakeOptionals', true);
+
+const generateValueFromSchema = (preffered?: SerdeDescription) => {
+  if (!preffered?.schema) {
+    return undefined;
+  }
+  const parsedSchema = JSON.parse(preffered.schema);
+  const value = jsf.generate(parsedSchema);
+  return JSON.stringify(value);
+};
+
+export const getPrefferedDescription = (serdes: SerdeDescription[]) =>
+  serdes.find((s) => s.preferred);
+
+export const getDefaultValues = (serdes: TopicSerdeSuggestion) => {
+  const keySerde = getPrefferedDescription(serdes.key || []);
+  const valueSerde = getPrefferedDescription(serdes.value || []);
+
+  return {
+    key: generateValueFromSchema(keySerde),
+    content: generateValueFromSchema(valueSerde),
+    headers: undefined,
+    partition: undefined,
+    keySerde: keySerde?.name,
+    valueSerde: valueSerde?.name,
+  };
+};
+
+export const getPartitionOptions = (partitions: Partition[]) =>
+  partitions.map(({ partition }) => ({
+    label: `Partition #${partition}`,
+    value: partition,
+  }));
+
+export const getSerdeOptions = (items: SerdeDescription[]) => {
+  const options = items.map(({ name }) => {
+    if (!name) return undefined;
+    return { label: name, value: name };
+  });
+
+  return compact(options);
+};
+
+export const validateBySchema = (
+  value: string,
+  schema: string | undefined,
+  type: 'key' | 'content'
+) => {
+  let errors: string[] = [];
+
+  if (!value || !schema) {
+    return errors;
+  }
+
+  let parcedSchema;
+  let parsedValue;
+
+  try {
+    parcedSchema = JSON.parse(schema);
+  } catch (e) {
+    return [`Error in parsing the "${type}" field schema`];
+  }
+  if (parcedSchema.type === 'string') {
+    return [];
+  }
+  try {
+    parsedValue = JSON.parse(value);
+  } catch (e) {
+    return [`Error in parsing the "${type}" field value`];
+  }
+  try {
+    const validate = new Ajv().compile(parcedSchema);
+    validate(parsedValue);
+    if (validate.errors) {
+      errors = validate.errors.map(
+        ({ schemaPath, message }) =>
+          `${schemaPath.replace('#', upperFirst(type))} - ${message}`
+      );
+    }
+  } catch (e) {
+    const err = e as DefinedError;
+    return [`${upperFirst(type)} ${err.message}`];
+  }
+
+  return errors;
+};

+ 0 - 58
kafka-ui-react-app/src/components/Topics/Topic/SendMessage/validateMessage.ts

@@ -1,58 +0,0 @@
-import { TopicMessageSchema } from 'generated-sources';
-import Ajv, { DefinedError } from 'ajv/dist/2020';
-import upperFirst from 'lodash/upperFirst';
-
-const validateBySchema = (
-  value: string,
-  schema: string | undefined,
-  type: 'key' | 'content'
-) => {
-  let errors: string[] = [];
-
-  if (!value || !schema) {
-    return errors;
-  }
-
-  let parcedSchema;
-  let parsedValue;
-
-  try {
-    parcedSchema = JSON.parse(schema);
-  } catch (e) {
-    return [`Error in parsing the "${type}" field schema`];
-  }
-  if (parcedSchema.type === 'string') {
-    return [];
-  }
-  try {
-    parsedValue = JSON.parse(value);
-  } catch (e) {
-    return [`Error in parsing the "${type}" field value`];
-  }
-  try {
-    const validate = new Ajv().compile(parcedSchema);
-    validate(parsedValue);
-    if (validate.errors) {
-      errors = validate.errors.map(
-        ({ schemaPath, message }) =>
-          `${schemaPath.replace('#', upperFirst(type))} - ${message}`
-      );
-    }
-  } catch (e) {
-    const err = e as DefinedError;
-    return [`${upperFirst(type)} ${err.message}`];
-  }
-
-  return errors;
-};
-
-const validateMessage = (
-  key: string,
-  content: string,
-  messageSchema: TopicMessageSchema | undefined
-): string[] => [
-  ...validateBySchema(key, messageSchema?.key?.schema, 'key'),
-  ...validateBySchema(content, messageSchema?.value?.schema, 'content'),
-];
-
-export default validateMessage;

+ 6 - 3
kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx

@@ -35,7 +35,8 @@ import SlidingSidebar from 'components/common/SlidingSidebar';
 import useBoolean from 'lib/hooks/useBoolean';
 
 import Messages from './Messages/Messages';
-import MessagesV2 from './MessagesV2/Messages';
+// Messages v2
+import MessagesContainer from './MessagesV2/MessagesContainer';
 import Overview from './Overview/Overview';
 import Settings from './Settings/Settings';
 import TopicConsumerGroups from './ConsumerGroups/TopicConsumerGroups';
@@ -185,7 +186,7 @@ const Topic: React.FC = () => {
             path={clusterTopicMessagesRelativePath}
             element={<Messages />}
           />
-          <Route path="v2" element={<MessagesV2 />} />
+          <Route path="v2" element={<MessagesContainer />} />
           <Route
             path={clusterTopicSettingsRelativePath}
             element={<Settings />}
@@ -206,7 +207,9 @@ const Topic: React.FC = () => {
         onClose={closeSidebar}
         title="Produce Message"
       >
-        <SendMessage onSubmit={closeSidebar} />
+        <Suspense fallback={<PageLoader />}>
+          <SendMessage onSubmit={closeSidebar} />
+        </Suspense>
       </SlidingSidebar>
     </>
   );

+ 1 - 1
kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx

@@ -23,7 +23,7 @@ describe('TableHeaderCell', () => {
       <table>
         <thead>
           <tr>
-            <TableHeaderCell {...props} />;
+            <TableHeaderCell {...props} />
           </tr>
         </thead>
       </table>

+ 0 - 1
kafka-ui-react-app/src/components/contexts/TopicMessagesContext.ts

@@ -3,7 +3,6 @@ import { SeekDirection } from 'generated-sources';
 
 export interface ContextProps {
   seekDirection: SeekDirection;
-  searchParams: URLSearchParams;
   changeSeekDirection(val: string): void;
   isLive: boolean;
 }

+ 38 - 0
kafka-ui-react-app/src/lib/fixtures/topicMessages.ts

@@ -0,0 +1,38 @@
+import { TopicSerdeSuggestion } from 'generated-sources';
+
+export const serdesPayload: TopicSerdeSuggestion = {
+  key: [
+    {
+      name: 'String',
+      description: undefined,
+      preferred: false,
+      schema: undefined,
+      additionalProperties: undefined,
+    },
+    {
+      name: 'Int32',
+      description: undefined,
+      preferred: true,
+      schema:
+        '{   "type" : "integer",   "minimum" : -2147483648,   "maximum" : 2147483647 }',
+      additionalProperties: {},
+    },
+  ],
+  value: [
+    {
+      name: 'String',
+      description: undefined,
+      preferred: false,
+      schema: undefined,
+      additionalProperties: undefined,
+    },
+    {
+      name: 'Int64',
+      description: undefined,
+      preferred: true,
+      schema:
+        '{   "type" : "integer",   "minimum" : -9223372036854775808,   "maximum" : 9223372036854775807 }',
+      additionalProperties: {},
+    },
+  ],
+};

+ 0 - 50
kafka-ui-react-app/src/lib/fixtures/topics.ts

@@ -4,7 +4,6 @@ import {
   ConsumerGroupState,
   Topic,
   TopicConfig,
-  MessageSchemaSourceEnum,
   TopicAnalysis,
 } from 'generated-sources';
 
@@ -161,55 +160,6 @@ export const topicConfigPayload: TopicConfig[] = [
   },
 ];
 
-export const topicMessageSchema = {
-  key: {
-    name: 'key',
-    source: MessageSchemaSourceEnum.SCHEMA_REGISTRY,
-    schema: `{
-  "$schema": "https://json-schema.org/draft/2020-12/schema",
-  "$id": "http://example.com/myURI.schema.json",
-  "title": "TestRecord",
-  "type": "object",
-  "additionalProperties": false,
-  "properties": {
-    "f1": {
-      "type": "integer"
-    },
-    "f2": {
-      "type": "string"
-    },
-    "schema": {
-      "type": "string"
-    }
-  }
-}
-`,
-  },
-  value: {
-    name: 'value',
-    source: MessageSchemaSourceEnum.SCHEMA_REGISTRY,
-    schema: `{
-  "$schema": "https://json-schema.org/draft/2020-12/schema",
-  "$id": "http://example.com/myURI1.schema.json",
-  "title": "TestRecord",
-  "type": "object",
-  "additionalProperties": false,
-  "properties": {
-    "f1": {
-      "type": "integer"
-    },
-    "f2": {
-      "type": "string"
-    },
-    "schema": {
-      "type": "string"
-    }
-  }
-}
-`,
-  },
-};
-
 const topicStatsSize = {
   sum: 0,
   avg: 0,

+ 36 - 0
kafka-ui-react-app/src/lib/hooks/api/__tests__/topicMessages.spec.ts

@@ -0,0 +1,36 @@
+import { waitFor } from '@testing-library/react';
+import { renderQueryHook } from 'lib/testHelpers';
+import * as hooks from 'lib/hooks/api/topicMessages';
+import fetchMock from 'fetch-mock';
+import { UseQueryResult } from '@tanstack/react-query';
+import { SerdeUsage } from 'generated-sources';
+
+const clusterName = 'test-cluster';
+const topicName = 'test-topic';
+
+const expectQueryWorks = async (
+  mock: fetchMock.FetchMockStatic,
+  result: { current: UseQueryResult<unknown, unknown> }
+) => {
+  await waitFor(() => expect(result.current.isFetched).toBeTruthy());
+  expect(mock.calls()).toHaveLength(1);
+  expect(result.current.data).toBeDefined();
+};
+
+jest.mock('lib/errorHandling', () => ({
+  ...jest.requireActual('lib/errorHandling'),
+  showServerError: jest.fn(),
+}));
+
+describe('Topic Messages hooks', () => {
+  beforeEach(() => fetchMock.restore());
+  it('handles useSerdes', async () => {
+    const path = `/api/clusters/${clusterName}/topic/${topicName}/serdes?use=SERIALIZE`;
+
+    const mock = fetchMock.getOnce(path, {});
+    const { result } = renderQueryHook(() =>
+      hooks.useSerdes({ clusterName, topicName, use: SerdeUsage.SERIALIZE })
+    );
+    await expectQueryWorks(mock, result);
+  });
+});

+ 0 - 7
kafka-ui-react-app/src/lib/hooks/api/__tests__/topics.spec.ts

@@ -55,13 +55,6 @@ describe('Topics hooks', () => {
     );
     await expectQueryWorks(mock, result);
   });
-  it('handles useTopicMessageSchema', async () => {
-    const mock = fetchMock.getOnce(`${topicPath}/messages/schema`, {});
-    const { result } = renderQueryHook(() =>
-      hooks.useTopicMessageSchema(topicParams)
-    );
-    await expectQueryWorks(mock, result);
-  });
   describe('useTopicAnalysis', () => {
     it('handles useTopicAnalysis', async () => {
       const mock = fetchMock.getOnce(`${topicPath}/analysis`, {});

+ 19 - 0
kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx

@@ -3,6 +3,7 @@ import { fetchEventSource } from '@microsoft/fetch-event-source';
 import { BASE_PARAMS, MESSAGES_PER_PAGE } from 'lib/constants';
 import { ClusterName } from 'redux/interfaces';
 import {
+  GetSerdesRequest,
   SeekDirection,
   SeekType,
   TopicMessage,
@@ -13,6 +14,8 @@ import {
 import { showServerError } from 'lib/errorHandling';
 import toast from 'react-hot-toast';
 import { StopLoading } from 'components/Topics/Topic/MessagesV2/FiltersBar/FiltersBar.styled';
+import { useQuery } from '@tanstack/react-query';
+import { messagesApiClient } from 'lib/api';
 
 interface UseTopicMessagesProps {
   clusterName: ClusterName;
@@ -53,6 +56,8 @@ export const useTopicMessages = ({
         limit,
         seekTo: seekTo.replaceAll('-', '::').replaceAll('.', ','),
         q: searchParams.get('q') || '',
+        keySerde: searchParams.get('keySerde') || '',
+        valueSerde: searchParams.get('valueSerde') || '',
       });
 
       switch (mode) {
@@ -175,3 +180,17 @@ export const useTopicMessages = ({
     isFetching,
   };
 };
+
+export function useSerdes(props: GetSerdesRequest) {
+  const { clusterName, topicName, use } = props;
+  return useQuery(
+    ['clusters', clusterName, 'topics', topicName, 'serdes', use],
+    () => messagesApiClient.getSerdes(props),
+    {
+      refetchOnMount: false,
+      refetchOnWindowFocus: false,
+      refetchOnReconnect: false,
+      refetchInterval: false,
+    }
+  );
+}

+ 1 - 8
kafka-ui-react-app/src/lib/hooks/api/topics.ts

@@ -215,11 +215,6 @@ export function useRecreateTopic(props: GetTopicDetailsRequest) {
   });
 }
 
-export function useTopicMessageSchema(props: GetTopicDetailsRequest) {
-  return useQuery(topicKeys.schema(props), () =>
-    messagesApi.getTopicSchema(props)
-  );
-}
 export function useSendMessage(props: GetTopicDetailsRequest) {
   const client = useQueryClient();
   return useMutation(
@@ -233,9 +228,7 @@ export function useSendMessage(props: GetTopicDetailsRequest) {
         client.invalidateQueries(topicKeys.all(props.clusterName));
       },
       onError: (e) => {
-        showServerError(e as Response, {
-          message: `Error in sending a message to ${props.topicName}`,
-        });
+        showServerError(e as Response);
       },
     }
   );

+ 0 - 53
kafka-ui-react-app/src/lib/hooks/useSearch.ts

@@ -1,53 +0,0 @@
-import { useCallback, useEffect, useMemo } from 'react';
-import { useLocation, useNavigate } from 'react-router-dom';
-
-const SEARCH_QUERY_ARG = 'q';
-
-// meant for use with <Search> component
-// returns value of Q search param (?q='something') and callback to change it
-const useSearch = (initValue = ''): [string, (value: string) => void] => {
-  const navigate = useNavigate();
-  const { search } = useLocation();
-  const queryParams = useMemo(() => new URLSearchParams(search), [search]);
-  const q = useMemo(
-    () => queryParams.get(SEARCH_QUERY_ARG)?.trim(),
-    [queryParams]
-  );
-  const page = useMemo(() => queryParams.get('page')?.trim(), [queryParams]);
-
-  // set intial value
-  useEffect(() => {
-    if (initValue.trim() !== '' && !q) {
-      queryParams.set(SEARCH_QUERY_ARG, initValue.trim());
-      navigate({ search: queryParams.toString() });
-    }
-  }, [navigate, initValue, q, queryParams]);
-
-  const handleChange = useCallback(
-    (value: string) => {
-      const trimmedValue = value.trim();
-      if (trimmedValue !== q) {
-        if (trimmedValue) {
-          queryParams.set(SEARCH_QUERY_ARG, trimmedValue);
-        } else {
-          queryParams.delete(SEARCH_QUERY_ARG);
-        }
-        // If we were on page 3 we can't determine if new search results have 3 pages - so we always reset page
-        if (page) {
-          queryParams.delete('page');
-        }
-        navigate(
-          {
-            search: queryParams.toString(),
-          },
-          { replace: true }
-        );
-      }
-    },
-    [q, page, navigate, queryParams]
-  );
-
-  return [q || initValue.trim() || '', handleChange];
-};
-
-export default useSearch;

Some files were not shown because too many files changed in this diff