Browse Source

fix for string literals should not be duplicated

Shubhadeep Das 1 year ago
parent
commit
c3d18a5c0d

+ 5 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/BrokersController.java

@@ -28,6 +28,8 @@ import reactor.core.publisher.Mono;
 @RequiredArgsConstructor
 @RequiredArgsConstructor
 @Slf4j
 @Slf4j
 public class BrokersController extends AbstractController implements BrokersApi {
 public class BrokersController extends AbstractController implements BrokersApi {
+  private static final String BROKER_ID = "brokerId";
+
   private final BrokerService brokerService;
   private final BrokerService brokerService;
   private final ClusterMapper clusterMapper;
   private final ClusterMapper clusterMapper;
 
 
@@ -94,7 +96,7 @@ public class BrokersController extends AbstractController implements BrokersApi
         .cluster(clusterName)
         .cluster(clusterName)
         .clusterConfigActions(ClusterConfigAction.VIEW)
         .clusterConfigActions(ClusterConfigAction.VIEW)
         .operationName("getBrokerConfig")
         .operationName("getBrokerConfig")
-        .operationParams(Map.of("brokerId", id))
+        .operationParams(Map.of(BROKER_ID, id))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).thenReturn(
     return accessControlService.validateAccess(context).thenReturn(
@@ -113,7 +115,7 @@ public class BrokersController extends AbstractController implements BrokersApi
         .cluster(clusterName)
         .cluster(clusterName)
         .clusterConfigActions(ClusterConfigAction.VIEW, ClusterConfigAction.EDIT)
         .clusterConfigActions(ClusterConfigAction.VIEW, ClusterConfigAction.EDIT)
         .operationName("updateBrokerTopicPartitionLogDir")
         .operationName("updateBrokerTopicPartitionLogDir")
-        .operationParams(Map.of("brokerId", id))
+        .operationParams(Map.of(BROKER_ID, id))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).then(
     return accessControlService.validateAccess(context).then(
@@ -133,7 +135,7 @@ public class BrokersController extends AbstractController implements BrokersApi
         .cluster(clusterName)
         .cluster(clusterName)
         .clusterConfigActions(ClusterConfigAction.VIEW, ClusterConfigAction.EDIT)
         .clusterConfigActions(ClusterConfigAction.VIEW, ClusterConfigAction.EDIT)
         .operationName("updateBrokerConfigByName")
         .operationName("updateBrokerConfigByName")
-        .operationParams(Map.of("brokerId", id))
+        .operationParams(Map.of(BROKER_ID, id))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).then(
     return accessControlService.validateAccess(context).then(

+ 6 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java

@@ -38,6 +38,7 @@ import reactor.core.publisher.Mono;
 public class KafkaConnectController extends AbstractController implements KafkaConnectApi {
 public class KafkaConnectController extends AbstractController implements KafkaConnectApi {
   private static final Set<ConnectorActionDTO> RESTART_ACTIONS
   private static final Set<ConnectorActionDTO> RESTART_ACTIONS
       = Set.of(RESTART, RESTART_FAILED_TASKS, RESTART_ALL_TASKS);
       = Set.of(RESTART, RESTART_FAILED_TASKS, RESTART_ALL_TASKS);
+  private static final String CONNECTOR_NAME = "connectorName";
 
 
   private final KafkaConnectService kafkaConnectService;
   private final KafkaConnectService kafkaConnectService;
   private final AccessControlService accessControlService;
   private final AccessControlService accessControlService;
@@ -116,7 +117,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
         .connect(connectName)
         .connect(connectName)
         .connectActions(ConnectAction.VIEW, ConnectAction.EDIT)
         .connectActions(ConnectAction.VIEW, ConnectAction.EDIT)
         .operationName("deleteConnector")
         .operationName("deleteConnector")
-        .operationParams(Map.of("connectorName", connectName))
+        .operationParams(Map.of(CONNECTOR_NAME, connectName))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).then(
     return accessControlService.validateAccess(context).then(
@@ -184,7 +185,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
         .connect(connectName)
         .connect(connectName)
         .connectActions(ConnectAction.VIEW, ConnectAction.EDIT)
         .connectActions(ConnectAction.VIEW, ConnectAction.EDIT)
         .operationName("setConnectorConfig")
         .operationName("setConnectorConfig")
-        .operationParams(Map.of("connectorName", connectorName))
+        .operationParams(Map.of(CONNECTOR_NAME, connectorName))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).then(
     return accessControlService.validateAccess(context).then(
@@ -211,7 +212,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
         .connect(connectName)
         .connect(connectName)
         .connectActions(connectActions)
         .connectActions(connectActions)
         .operationName("updateConnectorState")
         .operationName("updateConnectorState")
-        .operationParams(Map.of("connectorName", connectorName))
+        .operationParams(Map.of(CONNECTOR_NAME, connectorName))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).then(
     return accessControlService.validateAccess(context).then(
@@ -231,7 +232,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
         .connect(connectName)
         .connect(connectName)
         .connectActions(ConnectAction.VIEW)
         .connectActions(ConnectAction.VIEW)
         .operationName("getConnectorTasks")
         .operationName("getConnectorTasks")
-        .operationParams(Map.of("connectorName", connectorName))
+        .operationParams(Map.of(CONNECTOR_NAME, connectorName))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).thenReturn(
     return accessControlService.validateAccess(context).thenReturn(
@@ -251,7 +252,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
         .connect(connectName)
         .connect(connectName)
         .connectActions(ConnectAction.VIEW, ConnectAction.RESTART)
         .connectActions(ConnectAction.VIEW, ConnectAction.RESTART)
         .operationName("restartConnectorTask")
         .operationName("restartConnectorTask")
-        .operationParams(Map.of("connectorName", connectorName))
+        .operationParams(Map.of(CONNECTOR_NAME, connectorName))
         .build();
         .build();
 
 
     return accessControlService.validateAccess(context).then(
     return accessControlService.validateAccess(context).then(

+ 11 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/AccessContext.java

@@ -52,6 +52,8 @@ public class AccessContext {
   }
   }
 
 
   public static final class AccessContextBuilder {
   public static final class AccessContextBuilder {
+    private static final String ACTIONS_NOT_PRESENT = "actions not present";
+
     private Collection<ApplicationConfigAction> applicationConfigActions = Collections.emptySet();
     private Collection<ApplicationConfigAction> applicationConfigActions = Collections.emptySet();
     private String cluster;
     private String cluster;
     private Collection<ClusterConfigAction> clusterConfigActions = Collections.emptySet();
     private Collection<ClusterConfigAction> clusterConfigActions = Collections.emptySet();
@@ -75,7 +77,7 @@ public class AccessContext {
     }
     }
 
 
     public AccessContextBuilder applicationConfigActions(ApplicationConfigAction... actions) {
     public AccessContextBuilder applicationConfigActions(ApplicationConfigAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.applicationConfigActions = List.of(actions);
       this.applicationConfigActions = List.of(actions);
       return this;
       return this;
     }
     }
@@ -86,7 +88,7 @@ public class AccessContext {
     }
     }
 
 
     public AccessContextBuilder clusterConfigActions(ClusterConfigAction... actions) {
     public AccessContextBuilder clusterConfigActions(ClusterConfigAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.clusterConfigActions = List.of(actions);
       this.clusterConfigActions = List.of(actions);
       return this;
       return this;
     }
     }
@@ -97,7 +99,7 @@ public class AccessContext {
     }
     }
 
 
     public AccessContextBuilder topicActions(TopicAction... actions) {
     public AccessContextBuilder topicActions(TopicAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.topicActions = List.of(actions);
       this.topicActions = List.of(actions);
       return this;
       return this;
     }
     }
@@ -108,7 +110,7 @@ public class AccessContext {
     }
     }
 
 
     public AccessContextBuilder consumerGroupActions(ConsumerGroupAction... actions) {
     public AccessContextBuilder consumerGroupActions(ConsumerGroupAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.consumerGroupActions = List.of(actions);
       this.consumerGroupActions = List.of(actions);
       return this;
       return this;
     }
     }
@@ -119,7 +121,7 @@ public class AccessContext {
     }
     }
 
 
     public AccessContextBuilder connectActions(ConnectAction... actions) {
     public AccessContextBuilder connectActions(ConnectAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.connectActions = List.of(actions);
       this.connectActions = List.of(actions);
       return this;
       return this;
     }
     }
@@ -135,25 +137,25 @@ public class AccessContext {
     }
     }
 
 
     public AccessContextBuilder schemaActions(SchemaAction... actions) {
     public AccessContextBuilder schemaActions(SchemaAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.schemaActions = List.of(actions);
       this.schemaActions = List.of(actions);
       return this;
       return this;
     }
     }
 
 
     public AccessContextBuilder ksqlActions(KsqlAction... actions) {
     public AccessContextBuilder ksqlActions(KsqlAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.ksqlActions = List.of(actions);
       this.ksqlActions = List.of(actions);
       return this;
       return this;
     }
     }
 
 
     public AccessContextBuilder aclActions(AclAction... actions) {
     public AccessContextBuilder aclActions(AclAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.aclActions = List.of(actions);
       this.aclActions = List.of(actions);
       return this;
       return this;
     }
     }
 
 
     public AccessContextBuilder auditActions(AuditAction... actions) {
     public AccessContextBuilder auditActions(AuditAction... actions) {
-      Assert.isTrue(actions.length > 0, "actions not present");
+      Assert.isTrue(actions.length > 0, ACTIONS_NOT_PRESENT);
       this.auditActions = List.of(actions);
       this.auditActions = List.of(actions);
       return this;
       return this;
     }
     }

+ 95 - 78
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerde.java

@@ -27,6 +27,23 @@ public class ConsumerOffsetsSerde implements BuiltInSerde {
 
 
   private static final JsonMapper JSON_MAPPER = createMapper();
   private static final JsonMapper JSON_MAPPER = createMapper();
 
 
+  private static final String ASSIGNMENT = "assignment";
+  private static final String CLIENT_HOST = "client_host";
+  private static final String CLIENT_ID = "client_id";
+  private static final String COMMIT_TIMESTAMP = "commit_timestamp";
+  private static final String CURRENT_STATE_TIMESTAMP = "current_state_timestamp";
+  private static final String GENERATION = "generation";
+  private static final String LEADER = "leader";
+  private static final String MEMBERS = "members";
+  private static final String MEMBER_ID = "member_id";
+  private static final String METADATA = "metadata";
+  private static final String OFFSET = "offset";
+  private static final String PROTOCOL = "protocol";
+  private static final String PROTOCOL_TYPE = "protocol_type";
+  private static final String REBALANCE_TIMEOUT = "rebalance_timeout";
+  private static final String SESSION_TIMEOUT = "session_timeout";
+  private static final String SUBSCRIPTION = "subscription";
+
   public static final String TOPIC = "__consumer_offsets";
   public static final String TOPIC = "__consumer_offsets";
 
 
   public static String name() {
   public static String name() {
@@ -115,128 +132,128 @@ public class ConsumerOffsetsSerde implements BuiltInSerde {
   private Deserializer valueDeserializer() {
   private Deserializer valueDeserializer() {
     final Schema commitOffsetSchemaV0 =
     final Schema commitOffsetSchemaV0 =
         new Schema(
         new Schema(
-            new Field("offset", Type.INT64, ""),
-            new Field("metadata", Type.STRING, ""),
-            new Field("commit_timestamp", Type.INT64, "")
+            new Field(OFFSET, Type.INT64, ""),
+            new Field(METADATA, Type.STRING, ""),
+            new Field(COMMIT_TIMESTAMP, Type.INT64, "")
         );
         );
 
 
     final Schema commitOffsetSchemaV1 =
     final Schema commitOffsetSchemaV1 =
         new Schema(
         new Schema(
-            new Field("offset", Type.INT64, ""),
-            new Field("metadata", Type.STRING, ""),
-            new Field("commit_timestamp", Type.INT64, ""),
+            new Field(OFFSET, Type.INT64, ""),
+            new Field(METADATA, Type.STRING, ""),
+            new Field(COMMIT_TIMESTAMP, Type.INT64, ""),
             new Field("expire_timestamp", Type.INT64, "")
             new Field("expire_timestamp", Type.INT64, "")
         );
         );
 
 
     final Schema commitOffsetSchemaV2 =
     final Schema commitOffsetSchemaV2 =
         new Schema(
         new Schema(
-            new Field("offset", Type.INT64, ""),
-            new Field("metadata", Type.STRING, ""),
-            new Field("commit_timestamp", Type.INT64, "")
+            new Field(OFFSET, Type.INT64, ""),
+            new Field(METADATA, Type.STRING, ""),
+            new Field(COMMIT_TIMESTAMP, Type.INT64, "")
         );
         );
 
 
     final Schema commitOffsetSchemaV3 =
     final Schema commitOffsetSchemaV3 =
         new Schema(
         new Schema(
-            new Field("offset", Type.INT64, ""),
+            new Field(OFFSET, Type.INT64, ""),
             new Field("leader_epoch", Type.INT32, ""),
             new Field("leader_epoch", Type.INT32, ""),
-            new Field("metadata", Type.STRING, ""),
-            new Field("commit_timestamp", Type.INT64, "")
+            new Field(METADATA, Type.STRING, ""),
+            new Field(COMMIT_TIMESTAMP, Type.INT64, "")
         );
         );
 
 
     final Schema commitOffsetSchemaV4 = new Schema(
     final Schema commitOffsetSchemaV4 = new Schema(
-        new Field("offset", Type.INT64, ""),
+        new Field(OFFSET, Type.INT64, ""),
         new Field("leader_epoch", Type.INT32, ""),
         new Field("leader_epoch", Type.INT32, ""),
-        new Field("metadata", Type.COMPACT_STRING, ""),
-        new Field("commit_timestamp", Type.INT64, ""),
+        new Field(METADATA, Type.COMPACT_STRING, ""),
+        new Field(COMMIT_TIMESTAMP, Type.INT64, ""),
         Field.TaggedFieldsSection.of()
         Field.TaggedFieldsSection.of()
     );
     );
 
 
     final Schema metadataSchema0 =
     final Schema metadataSchema0 =
         new Schema(
         new Schema(
-            new Field("protocol_type", Type.STRING, ""),
-            new Field("generation", Type.INT32, ""),
-            new Field("protocol", Type.NULLABLE_STRING, ""),
-            new Field("leader", Type.NULLABLE_STRING, ""),
-            new Field("members", new ArrayOf(new Schema(
-                new Field("member_id", Type.STRING, ""),
-                new Field("client_id", Type.STRING, ""),
-                new Field("client_host", Type.STRING, ""),
-                new Field("session_timeout", Type.INT32, ""),
-                new Field("subscription", Type.BYTES, ""),
-                new Field("assignment", Type.BYTES, "")
+            new Field(PROTOCOL_TYPE, Type.STRING, ""),
+            new Field(GENERATION, Type.INT32, ""),
+            new Field(PROTOCOL, Type.NULLABLE_STRING, ""),
+            new Field(LEADER, Type.NULLABLE_STRING, ""),
+            new Field(MEMBERS, new ArrayOf(new Schema(
+                new Field(MEMBER_ID, Type.STRING, ""),
+                new Field(CLIENT_ID, Type.STRING, ""),
+                new Field(CLIENT_HOST, Type.STRING, ""),
+                new Field(SESSION_TIMEOUT, Type.INT32, ""),
+                new Field(SUBSCRIPTION, Type.BYTES, ""),
+                new Field(ASSIGNMENT, Type.BYTES, "")
             )), "")
             )), "")
         );
         );
 
 
     final Schema metadataSchema1 =
     final Schema metadataSchema1 =
         new Schema(
         new Schema(
-            new Field("protocol_type", Type.STRING, ""),
-            new Field("generation", Type.INT32, ""),
-            new Field("protocol", Type.NULLABLE_STRING, ""),
-            new Field("leader", Type.NULLABLE_STRING, ""),
-            new Field("members", new ArrayOf(new Schema(
-                new Field("member_id", Type.STRING, ""),
-                new Field("client_id", Type.STRING, ""),
-                new Field("client_host", Type.STRING, ""),
-                new Field("rebalance_timeout", Type.INT32, ""),
-                new Field("session_timeout", Type.INT32, ""),
-                new Field("subscription", Type.BYTES, ""),
-                new Field("assignment", Type.BYTES, "")
+            new Field(PROTOCOL_TYPE, Type.STRING, ""),
+            new Field(GENERATION, Type.INT32, ""),
+            new Field(PROTOCOL, Type.NULLABLE_STRING, ""),
+            new Field(LEADER, Type.NULLABLE_STRING, ""),
+            new Field(MEMBERS, new ArrayOf(new Schema(
+                new Field(MEMBER_ID, Type.STRING, ""),
+                new Field(CLIENT_ID, Type.STRING, ""),
+                new Field(CLIENT_HOST, Type.STRING, ""),
+                new Field(REBALANCE_TIMEOUT, Type.INT32, ""),
+                new Field(SESSION_TIMEOUT, Type.INT32, ""),
+                new Field(SUBSCRIPTION, Type.BYTES, ""),
+                new Field(ASSIGNMENT, Type.BYTES, "")
             )), "")
             )), "")
         );
         );
 
 
     final Schema metadataSchema2 =
     final Schema metadataSchema2 =
         new Schema(
         new Schema(
-            new Field("protocol_type", Type.STRING, ""),
-            new Field("generation", Type.INT32, ""),
-            new Field("protocol", Type.NULLABLE_STRING, ""),
-            new Field("leader", Type.NULLABLE_STRING, ""),
-            new Field("current_state_timestamp", Type.INT64, ""),
-            new Field("members", new ArrayOf(new Schema(
-                new Field("member_id", Type.STRING, ""),
-                new Field("client_id", Type.STRING, ""),
-                new Field("client_host", Type.STRING, ""),
-                new Field("rebalance_timeout", Type.INT32, ""),
-                new Field("session_timeout", Type.INT32, ""),
-                new Field("subscription", Type.BYTES, ""),
-                new Field("assignment", Type.BYTES, "")
+            new Field(PROTOCOL_TYPE, Type.STRING, ""),
+            new Field(GENERATION, Type.INT32, ""),
+            new Field(PROTOCOL, Type.NULLABLE_STRING, ""),
+            new Field(LEADER, Type.NULLABLE_STRING, ""),
+            new Field(CURRENT_STATE_TIMESTAMP, Type.INT64, ""),
+            new Field(MEMBERS, new ArrayOf(new Schema(
+                new Field(MEMBER_ID, Type.STRING, ""),
+                new Field(CLIENT_ID, Type.STRING, ""),
+                new Field(CLIENT_HOST, Type.STRING, ""),
+                new Field(REBALANCE_TIMEOUT, Type.INT32, ""),
+                new Field(SESSION_TIMEOUT, Type.INT32, ""),
+                new Field(SUBSCRIPTION, Type.BYTES, ""),
+                new Field(ASSIGNMENT, Type.BYTES, "")
             )), "")
             )), "")
         );
         );
 
 
     final Schema metadataSchema3 =
     final Schema metadataSchema3 =
         new Schema(
         new Schema(
-            new Field("protocol_type", Type.STRING, ""),
-            new Field("generation", Type.INT32, ""),
-            new Field("protocol", Type.NULLABLE_STRING, ""),
-            new Field("leader", Type.NULLABLE_STRING, ""),
-            new Field("current_state_timestamp", Type.INT64, ""),
-            new Field("members", new ArrayOf(new Schema(
-                new Field("member_id", Type.STRING, ""),
+            new Field(PROTOCOL_TYPE, Type.STRING, ""),
+            new Field(GENERATION, Type.INT32, ""),
+            new Field(PROTOCOL, Type.NULLABLE_STRING, ""),
+            new Field(LEADER, Type.NULLABLE_STRING, ""),
+            new Field(CURRENT_STATE_TIMESTAMP, Type.INT64, ""),
+            new Field(MEMBERS, new ArrayOf(new Schema(
+                new Field(MEMBER_ID, Type.STRING, ""),
                 new Field("group_instance_id", Type.NULLABLE_STRING, ""),
                 new Field("group_instance_id", Type.NULLABLE_STRING, ""),
-                new Field("client_id", Type.STRING, ""),
-                new Field("client_host", Type.STRING, ""),
-                new Field("rebalance_timeout", Type.INT32, ""),
-                new Field("session_timeout", Type.INT32, ""),
-                new Field("subscription", Type.BYTES, ""),
-                new Field("assignment", Type.BYTES, "")
+                new Field(CLIENT_ID, Type.STRING, ""),
+                new Field(CLIENT_HOST, Type.STRING, ""),
+                new Field(REBALANCE_TIMEOUT, Type.INT32, ""),
+                new Field(SESSION_TIMEOUT, Type.INT32, ""),
+                new Field(SUBSCRIPTION, Type.BYTES, ""),
+                new Field(ASSIGNMENT, Type.BYTES, "")
             )), "")
             )), "")
         );
         );
 
 
     final Schema metadataSchema4 =
     final Schema metadataSchema4 =
         new Schema(
         new Schema(
-            new Field("protocol_type", Type.COMPACT_STRING, ""),
-            new Field("generation", Type.INT32, ""),
-            new Field("protocol", Type.COMPACT_NULLABLE_STRING, ""),
-            new Field("leader", Type.COMPACT_NULLABLE_STRING, ""),
-            new Field("current_state_timestamp", Type.INT64, ""),
-            new Field("members", new CompactArrayOf(new Schema(
-                new Field("member_id", Type.COMPACT_STRING, ""),
+            new Field(PROTOCOL_TYPE, Type.COMPACT_STRING, ""),
+            new Field(GENERATION, Type.INT32, ""),
+            new Field(PROTOCOL, Type.COMPACT_NULLABLE_STRING, ""),
+            new Field(LEADER, Type.COMPACT_NULLABLE_STRING, ""),
+            new Field(CURRENT_STATE_TIMESTAMP, Type.INT64, ""),
+            new Field(MEMBERS, new CompactArrayOf(new Schema(
+                new Field(MEMBER_ID, Type.COMPACT_STRING, ""),
                 new Field("group_instance_id", Type.COMPACT_NULLABLE_STRING, ""),
                 new Field("group_instance_id", Type.COMPACT_NULLABLE_STRING, ""),
-                new Field("client_id", Type.COMPACT_STRING, ""),
-                new Field("client_host", Type.COMPACT_STRING, ""),
-                new Field("rebalance_timeout", Type.INT32, ""),
-                new Field("session_timeout", Type.INT32, ""),
-                new Field("subscription", Type.COMPACT_BYTES, ""),
-                new Field("assignment", Type.COMPACT_BYTES, ""),
+                new Field(CLIENT_ID, Type.COMPACT_STRING, ""),
+                new Field(CLIENT_HOST, Type.COMPACT_STRING, ""),
+                new Field(REBALANCE_TIMEOUT, Type.INT32, ""),
+                new Field(SESSION_TIMEOUT, Type.INT32, ""),
+                new Field(SUBSCRIPTION, Type.COMPACT_BYTES, ""),
+                new Field(ASSIGNMENT, Type.COMPACT_BYTES, ""),
                 Field.TaggedFieldsSection.of()
                 Field.TaggedFieldsSection.of()
             )), ""),
             )), ""),
             Field.TaggedFieldsSection.of()
             Field.TaggedFieldsSection.of()
@@ -248,7 +265,7 @@ public class ConsumerOffsetsSerde implements BuiltInSerde {
       short version = bb.getShort();
       short version = bb.getShort();
       // ideally, we should distinguish if value is commit or metadata
       // ideally, we should distinguish if value is commit or metadata
       // by checking record's key, but our current serde structure doesn't allow that.
       // by checking record's key, but our current serde structure doesn't allow that.
-      // so, we trying to parse into metadata first and after into commit msg
+      // so, we are trying to parse into metadata first and after into commit msg
       try {
       try {
         result = toJson(
         result = toJson(
             switch (version) {
             switch (version) {

+ 5 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java

@@ -43,6 +43,8 @@ public class SchemaRegistrySerde implements BuiltInSerde {
     return "SchemaRegistry";
     return "SchemaRegistry";
   }
   }
 
 
+  private static final String SCHEMA_REGISTRY = "schemaRegistry";
+
   private SchemaRegistryClient schemaRegistryClient;
   private SchemaRegistryClient schemaRegistryClient;
   private List<String> schemaRegistryUrls;
   private List<String> schemaRegistryUrls;
   private String valueSchemaNameTemplate;
   private String valueSchemaNameTemplate;
@@ -54,7 +56,7 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   @Override
   @Override
   public boolean canBeAutoConfigured(PropertyResolver kafkaClusterProperties,
   public boolean canBeAutoConfigured(PropertyResolver kafkaClusterProperties,
                                      PropertyResolver globalProperties) {
                                      PropertyResolver globalProperties) {
-    return kafkaClusterProperties.getListProperty("schemaRegistry", String.class)
+    return kafkaClusterProperties.getListProperty(SCHEMA_REGISTRY, String.class)
         .filter(lst -> !lst.isEmpty())
         .filter(lst -> !lst.isEmpty())
         .isPresent();
         .isPresent();
   }
   }
@@ -62,7 +64,7 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   @Override
   @Override
   public void autoConfigure(PropertyResolver kafkaClusterProperties,
   public void autoConfigure(PropertyResolver kafkaClusterProperties,
                             PropertyResolver globalProperties) {
                             PropertyResolver globalProperties) {
-    var urls = kafkaClusterProperties.getListProperty("schemaRegistry", String.class)
+    var urls = kafkaClusterProperties.getListProperty(SCHEMA_REGISTRY, String.class)
         .filter(lst -> !lst.isEmpty())
         .filter(lst -> !lst.isEmpty())
         .orElseThrow(() -> new ValidationException("No urls provided for schema registry"));
         .orElseThrow(() -> new ValidationException("No urls provided for schema registry"));
     configure(
     configure(
@@ -88,7 +90,7 @@ public class SchemaRegistrySerde implements BuiltInSerde {
                         PropertyResolver kafkaClusterProperties,
                         PropertyResolver kafkaClusterProperties,
                         PropertyResolver globalProperties) {
                         PropertyResolver globalProperties) {
     var urls = serdeProperties.getListProperty("url", String.class)
     var urls = serdeProperties.getListProperty("url", String.class)
-        .or(() -> kafkaClusterProperties.getListProperty("schemaRegistry", String.class))
+        .or(() -> kafkaClusterProperties.getListProperty(SCHEMA_REGISTRY, String.class))
         .filter(lst -> !lst.isEmpty())
         .filter(lst -> !lst.isEmpty())
         .orElseThrow(() -> new ValidationException("No urls provided for schema registry"));
         .orElseThrow(() -> new ValidationException("No urls provided for schema registry"));
     configure(
     configure(

+ 9 - 6
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/WellKnownMetrics.java

@@ -11,6 +11,9 @@ import org.apache.kafka.common.Node;
 
 
 class WellKnownMetrics {
 class WellKnownMetrics {
 
 
+  private static final String BROKER_TOPIC_METRICS = "BrokerTopicMetrics";
+  private static final String FIFTEEN_MINUTE_RATE = "FifteenMinuteRate";
+
   // per broker
   // per broker
   final Map<Integer, BigDecimal> brokerBytesInFifteenMinuteRate = new HashMap<>();
   final Map<Integer, BigDecimal> brokerBytesInFifteenMinuteRate = new HashMap<>();
   final Map<Integer, BigDecimal> brokerBytesOutFifteenMinuteRate = new HashMap<>();
   final Map<Integer, BigDecimal> brokerBytesOutFifteenMinuteRate = new HashMap<>();
@@ -36,15 +39,15 @@ class WellKnownMetrics {
     if (!brokerBytesInFifteenMinuteRate.containsKey(node.id())
     if (!brokerBytesInFifteenMinuteRate.containsKey(node.id())
         && rawMetric.labels().size() == 1
         && rawMetric.labels().size() == 1
         && "BytesInPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
         && "BytesInPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
-        && containsIgnoreCase(name, "BrokerTopicMetrics")
-        && endsWithIgnoreCase(name, "FifteenMinuteRate")) {
+        && containsIgnoreCase(name, BROKER_TOPIC_METRICS)
+        && endsWithIgnoreCase(name, FIFTEEN_MINUTE_RATE)) {
       brokerBytesInFifteenMinuteRate.put(node.id(),  rawMetric.value());
       brokerBytesInFifteenMinuteRate.put(node.id(),  rawMetric.value());
     }
     }
     if (!brokerBytesOutFifteenMinuteRate.containsKey(node.id())
     if (!brokerBytesOutFifteenMinuteRate.containsKey(node.id())
         && rawMetric.labels().size() == 1
         && rawMetric.labels().size() == 1
         && "BytesOutPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
         && "BytesOutPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
-        && containsIgnoreCase(name, "BrokerTopicMetrics")
-        && endsWithIgnoreCase(name, "FifteenMinuteRate")) {
+        && containsIgnoreCase(name, BROKER_TOPIC_METRICS)
+        && endsWithIgnoreCase(name, FIFTEEN_MINUTE_RATE)) {
       brokerBytesOutFifteenMinuteRate.put(node.id(), rawMetric.value());
       brokerBytesOutFifteenMinuteRate.put(node.id(), rawMetric.value());
     }
     }
   }
   }
@@ -53,8 +56,8 @@ class WellKnownMetrics {
     String name = rawMetric.name();
     String name = rawMetric.name();
     String topic = rawMetric.labels().get("topic");
     String topic = rawMetric.labels().get("topic");
     if (topic != null
     if (topic != null
-        && containsIgnoreCase(name, "BrokerTopicMetrics")
-        && endsWithIgnoreCase(name, "FifteenMinuteRate")) {
+        && containsIgnoreCase(name, BROKER_TOPIC_METRICS)
+        && endsWithIgnoreCase(name, FIFTEEN_MINUTE_RATE)) {
       String nameProperty = rawMetric.labels().get("name");
       String nameProperty = rawMetric.labels().get("name");
       if ("BytesInPerSec".equalsIgnoreCase(nameProperty)) {
       if ("BytesInPerSec".equalsIgnoreCase(nameProperty)) {
         bytesInFifteenMinuteRate.compute(topic, (k, v) -> v == null ? rawMetric.value() : v.add(rawMetric.value()));
         bytesInFifteenMinuteRate.compute(topic, (k, v) -> v == null ? rawMetric.value() : v.add(rawMetric.value()));

+ 5 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java

@@ -52,6 +52,7 @@ import reactor.core.publisher.Mono;
 public class AccessControlService {
 public class AccessControlService {
 
 
   private static final String ACCESS_DENIED = "Access denied";
   private static final String ACCESS_DENIED = "Access denied";
+  private static final String ACTIONS_ARE_EMPTY = "actions are empty";
 
 
   @Nullable
   @Nullable
   private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
   private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
@@ -206,7 +207,7 @@ public class AccessControlService {
     if (context.getTopic() == null && context.getTopicActions().isEmpty()) {
     if (context.getTopic() == null && context.getTopicActions().isEmpty()) {
       return true;
       return true;
     }
     }
-    Assert.isTrue(!context.getTopicActions().isEmpty(), "actions are empty");
+    Assert.isTrue(!context.getTopicActions().isEmpty(), ACTIONS_ARE_EMPTY);
 
 
     Set<String> requiredActions = context.getTopicActions()
     Set<String> requiredActions = context.getTopicActions()
         .stream()
         .stream()
@@ -243,7 +244,7 @@ public class AccessControlService {
     if (context.getConsumerGroup() == null && context.getConsumerGroupActions().isEmpty()) {
     if (context.getConsumerGroup() == null && context.getConsumerGroupActions().isEmpty()) {
       return true;
       return true;
     }
     }
-    Assert.isTrue(!context.getConsumerGroupActions().isEmpty(), "actions are empty");
+    Assert.isTrue(!context.getConsumerGroupActions().isEmpty(), ACTIONS_ARE_EMPTY);
 
 
     Set<String> requiredActions = context.getConsumerGroupActions()
     Set<String> requiredActions = context.getConsumerGroupActions()
         .stream()
         .stream()
@@ -276,7 +277,7 @@ public class AccessControlService {
     if (context.getSchema() == null && context.getSchemaActions().isEmpty()) {
     if (context.getSchema() == null && context.getSchemaActions().isEmpty()) {
       return true;
       return true;
     }
     }
-    Assert.isTrue(!context.getSchemaActions().isEmpty(), "actions are empty");
+    Assert.isTrue(!context.getSchemaActions().isEmpty(), ACTIONS_ARE_EMPTY);
 
 
     Set<String> requiredActions = context.getSchemaActions()
     Set<String> requiredActions = context.getSchemaActions()
         .stream()
         .stream()
@@ -309,7 +310,7 @@ public class AccessControlService {
     if (context.getConnect() == null && context.getConnectActions().isEmpty()) {
     if (context.getConnect() == null && context.getConnectActions().isEmpty()) {
       return true;
       return true;
     }
     }
-    Assert.isTrue(!context.getConnectActions().isEmpty(), "actions are empty");
+    Assert.isTrue(!context.getConnectActions().isEmpty(), ACTIONS_ARE_EMPTY);
 
 
     Set<String> requiredActions = context.getConnectActions()
     Set<String> requiredActions = context.getConnectActions()
         .stream()
         .stream()

+ 10 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java

@@ -43,6 +43,8 @@ public class JsonAvroConversion {
 
 
   private static final JsonMapper MAPPER = new JsonMapper();
   private static final JsonMapper MAPPER = new JsonMapper();
   private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
   private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
+  private static final String FORMAT = "format";
+  private static final String DATE_TIME = "date-time";
 
 
   // converts json into Object that is expected input for KafkaAvroSerializer
   // converts json into Object that is expected input for KafkaAvroSerializer
   // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!)
   // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!)
@@ -347,7 +349,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("uuid"))))
+                Map.of(FORMAT, new TextNode("uuid"))))
     ),
     ),
 
 
     DECIMAL("decimal",
     DECIMAL("decimal",
@@ -385,7 +387,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("date"))))
+                Map.of(FORMAT, new TextNode("date"))))
     ),
     ),
 
 
     TIME_MILLIS("time-millis",
     TIME_MILLIS("time-millis",
@@ -406,7 +408,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("time"))))
+                Map.of(FORMAT, new TextNode("time"))))
     ),
     ),
 
 
     TIME_MICROS("time-micros",
     TIME_MICROS("time-micros",
@@ -427,7 +429,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("time"))))
+                Map.of(FORMAT, new TextNode("time"))))
     ),
     ),
 
 
     TIMESTAMP_MILLIS("timestamp-millis",
     TIMESTAMP_MILLIS("timestamp-millis",
@@ -448,7 +450,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("date-time"))))
+                Map.of(FORMAT, new TextNode(DATE_TIME))))
     ),
     ),
 
 
     TIMESTAMP_MICROS("timestamp-micros",
     TIMESTAMP_MICROS("timestamp-micros",
@@ -473,7 +475,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("date-time"))))
+                Map.of(FORMAT, new TextNode(DATE_TIME))))
     ),
     ),
 
 
     LOCAL_TIMESTAMP_MILLIS("local-timestamp-millis",
     LOCAL_TIMESTAMP_MILLIS("local-timestamp-millis",
@@ -491,7 +493,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("date-time"))))
+                Map.of(FORMAT, new TextNode(DATE_TIME))))
     ),
     ),
 
 
     LOCAL_TIMESTAMP_MICROS("local-timestamp-micros",
     LOCAL_TIMESTAMP_MICROS("local-timestamp-micros",
@@ -508,7 +510,7 @@ public class JsonAvroConversion {
         new SimpleFieldSchema(
         new SimpleFieldSchema(
             new SimpleJsonType(
             new SimpleJsonType(
                 JsonType.Type.STRING,
                 JsonType.Type.STRING,
-                Map.of("format", new TextNode("date-time"))))
+                Map.of(FORMAT, new TextNode(DATE_TIME))))
     );
     );
 
 
     private final String name;
     private final String name;

+ 11 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverter.java

@@ -37,6 +37,9 @@ import reactor.util.function.Tuples;
 
 
 public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.Descriptor> {
 public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.Descriptor> {
 
 
+  private static final String MAXIMUM = "maximum";
+  private static final String MINIMUM = "minimum";
+
   private final Set<String> simpleTypesWrapperNames = Set.of(
   private final Set<String> simpleTypesWrapperNames = Set.of(
       BoolValue.getDescriptor().getFullName(),
       BoolValue.getDescriptor().getFullName(),
       Int32Value.getDescriptor().getFullName(),
       Int32Value.getDescriptor().getFullName(),
@@ -156,15 +159,15 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
       case INT32, FIXED32, SFIXED32, SINT32 -> new SimpleJsonType(
       case INT32, FIXED32, SFIXED32, SINT32 -> new SimpleJsonType(
           JsonType.Type.INTEGER,
           JsonType.Type.INTEGER,
           Map.of(
           Map.of(
-              "maximum", IntNode.valueOf(Integer.MAX_VALUE),
-              "minimum", IntNode.valueOf(Integer.MIN_VALUE)
+              MAXIMUM, IntNode.valueOf(Integer.MAX_VALUE),
+              MINIMUM, IntNode.valueOf(Integer.MIN_VALUE)
           )
           )
       );
       );
       case UINT32 -> new SimpleJsonType(
       case UINT32 -> new SimpleJsonType(
           JsonType.Type.INTEGER,
           JsonType.Type.INTEGER,
           Map.of(
           Map.of(
-              "maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
-              "minimum", IntNode.valueOf(0)
+              MAXIMUM, LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
+              MINIMUM, IntNode.valueOf(0)
           )
           )
       );
       );
       //TODO: actually all *64 types will be printed with quotes (as strings),
       //TODO: actually all *64 types will be printed with quotes (as strings),
@@ -173,15 +176,15 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
       case INT64, FIXED64, SFIXED64, SINT64 -> new SimpleJsonType(
       case INT64, FIXED64, SFIXED64, SINT64 -> new SimpleJsonType(
           JsonType.Type.INTEGER,
           JsonType.Type.INTEGER,
           Map.of(
           Map.of(
-              "maximum", LongNode.valueOf(Long.MAX_VALUE),
-              "minimum", LongNode.valueOf(Long.MIN_VALUE)
+              MAXIMUM, LongNode.valueOf(Long.MAX_VALUE),
+              MINIMUM, LongNode.valueOf(Long.MIN_VALUE)
           )
           )
       );
       );
       case UINT64 -> new SimpleJsonType(
       case UINT64 -> new SimpleJsonType(
           JsonType.Type.INTEGER,
           JsonType.Type.INTEGER,
           Map.of(
           Map.of(
-              "maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
-              "minimum", LongNode.valueOf(0)
+              MAXIMUM, new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
+              MINIMUM, LongNode.valueOf(0)
           )
           )
       );
       );
       case MESSAGE, GROUP -> new SimpleJsonType(JsonType.Type.OBJECT);
       case MESSAGE, GROUP -> new SimpleJsonType(JsonType.Type.OBJECT);

+ 5 - 3
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java

@@ -10,25 +10,27 @@ import lombok.experimental.Accessors;
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Schema {
 public class Schema {
 
 
+  private static final String USER_DIR = "user.dir";
+
   private String name, valuePath;
   private String name, valuePath;
   private SchemaType type;
   private SchemaType type;
 
 
   public static Schema createSchemaAvro() {
   public static Schema createSchemaAvro() {
     return new Schema().setName("schema_avro-" + randomAlphabetic(5))
     return new Schema().setName("schema_avro-" + randomAlphabetic(5))
         .setType(SchemaType.AVRO)
         .setType(SchemaType.AVRO)
-        .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_avro_value.json");
+        .setValuePath(System.getProperty(USER_DIR) + "/src/main/resources/testData/schemas/schema_avro_value.json");
   }
   }
 
 
   public static Schema createSchemaJson() {
   public static Schema createSchemaJson() {
     return new Schema().setName("schema_json-" + randomAlphabetic(5))
     return new Schema().setName("schema_json-" + randomAlphabetic(5))
         .setType(SchemaType.JSON)
         .setType(SchemaType.JSON)
-        .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_json_Value.json");
+        .setValuePath(System.getProperty(USER_DIR) + "/src/main/resources/testData/schemas/schema_json_Value.json");
   }
   }
 
 
   public static Schema createSchemaProtobuf() {
   public static Schema createSchemaProtobuf() {
     return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
     return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
         .setType(SchemaType.PROTOBUF)
         .setType(SchemaType.PROTOBUF)
         .setValuePath(
         .setValuePath(
-            System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_protobuf_value.txt");
+            System.getProperty(USER_DIR) + "/src/main/resources/testData/schemas/schema_protobuf_value.txt");
   }
   }
 }
 }

+ 5 - 3
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java

@@ -19,6 +19,8 @@ import io.qameta.allure.Step;
 
 
 public class TopicCreateEditForm extends BasePage {
 public class TopicCreateEditForm extends BasePage {
 
 
+  private static final String RETENTION_BYTES = "retentionBytes";
+
   protected SelenideElement timeToRetainField = $x("//input[@id='timeToRetain']");
   protected SelenideElement timeToRetainField = $x("//input[@id='timeToRetain']");
   protected SelenideElement partitionsField = $x("//input[@name='partitions']");
   protected SelenideElement partitionsField = $x("//input[@name='partitions']");
   protected SelenideElement nameField = $(id("topicFormName"));
   protected SelenideElement nameField = $(id("topicFormName"));
@@ -138,12 +140,12 @@ public class TopicCreateEditForm extends BasePage {
 
 
   @Step
   @Step
   public TopicCreateEditForm selectRetentionBytes(String visibleValue) {
   public TopicCreateEditForm selectRetentionBytes(String visibleValue) {
-    return selectFromDropDownByVisibleText("retentionBytes", visibleValue);
+    return selectFromDropDownByVisibleText(RETENTION_BYTES, visibleValue);
   }
   }
 
 
   @Step
   @Step
   public TopicCreateEditForm selectRetentionBytes(Long optionValue) {
   public TopicCreateEditForm selectRetentionBytes(Long optionValue) {
-    return selectFromDropDownByOptionValue("retentionBytes", optionValue.toString());
+    return selectFromDropDownByOptionValue(RETENTION_BYTES, optionValue.toString());
   }
   }
 
 
   @Step
   @Step
@@ -202,7 +204,7 @@ public class TopicCreateEditForm extends BasePage {
 
 
   @Step
   @Step
   public String getMaxSizeOnDisk() {
   public String getMaxSizeOnDisk() {
-    return new KafkaUiSelectElement("retentionBytes").getCurrentValue();
+    return new KafkaUiSelectElement(RETENTION_BYTES).getCurrentValue();
   }
   }
 
 
   @Step
   @Step