瀏覽代碼

Fixed bugs (#90)

* Fixed bugs

* More fixes
German Osin 4 年之前
父節點
當前提交
8057dc123f
共有 19 個文件被更改,包括 201 次插入129 次删除
  1. 5 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/SimpleRecordDeserializer.java
  2. 4 5
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java
  3. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalBrokerMetrics.java
  4. 3 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalPartition.java
  5. 0 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalTopic.java
  6. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/KafkaCluster.java
  7. 24 14
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java
  8. 9 21
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/util/ClusterUtil.java
  9. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/util/JmxClusterUtil.java
  10. 20 16
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/kafka/KafkaService.java
  11. 9 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java
  12. 71 20
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  13. 1 0
      kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx
  14. 1 0
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx
  15. 2 2
      kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx
  16. 21 17
      kafka-ui-react-app/src/components/Topics/Details/Overview/Overview.tsx
  17. 11 14
      kafka-ui-react-app/src/redux/api/brokers.ts
  18. 7 5
      kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts
  19. 10 7
      kafka-ui-react-app/src/redux/interfaces/topic.ts

+ 5 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/SimpleRecordDeserializer.java

@@ -10,6 +10,10 @@ public class SimpleRecordDeserializer implements RecordDeserializer {
 
 
 	@Override
 	@Override
 	public Object deserialize(ConsumerRecord<Bytes, Bytes> record) {
 	public Object deserialize(ConsumerRecord<Bytes, Bytes> record) {
-		return stringDeserializer.deserialize(record.topic(), record.value().get());
+		if (record.value()!=null) {
+			return stringDeserializer.deserialize(record.topic(), record.value().get());
+		} else {
+			return "empty";
+		}
 	}
 	}
 }
 }

+ 4 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java

@@ -1,10 +1,7 @@
 package com.provectus.kafka.ui.cluster.mapper;
 package com.provectus.kafka.ui.cluster.mapper;
 
 
 import com.provectus.kafka.ui.cluster.config.ClustersProperties;
 import com.provectus.kafka.ui.cluster.config.ClustersProperties;
-import com.provectus.kafka.ui.cluster.model.InternalClusterMetrics;
-import com.provectus.kafka.ui.cluster.model.InternalTopic;
-import com.provectus.kafka.ui.cluster.model.InternalTopicConfig;
-import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import com.provectus.kafka.ui.cluster.model.*;
 import com.provectus.kafka.ui.model.*;
 import com.provectus.kafka.ui.model.*;
 import org.mapstruct.Mapper;
 import org.mapstruct.Mapper;
 import org.mapstruct.Mapping;
 import org.mapstruct.Mapping;
@@ -19,8 +16,10 @@ public interface ClusterMapper {
     Cluster toCluster(KafkaCluster cluster);
     Cluster toCluster(KafkaCluster cluster);
 
 
     KafkaCluster toKafkaCluster(ClustersProperties.Cluster clusterProperties);
     KafkaCluster toKafkaCluster(ClustersProperties.Cluster clusterProperties);
-    BrokersMetrics toBrokerMetrics(InternalClusterMetrics metrics);
+    ClusterMetrics toClusterMetrics(InternalClusterMetrics metrics);
+    BrokerMetrics toBrokerMetrics(InternalBrokerMetrics metrics);
     Topic toTopic(InternalTopic topic);
     Topic toTopic(InternalTopic topic);
     TopicDetails toTopicDetails(InternalTopic topic);
     TopicDetails toTopicDetails(InternalTopic topic);
     TopicConfig toTopicConfig(InternalTopicConfig topic);
     TopicConfig toTopicConfig(InternalTopicConfig topic);
+    Replica toReplica(InternalReplica replica);
 }
 }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalBrokerMetrics.java

@@ -10,5 +10,5 @@ import java.util.List;
 @Builder(toBuilder = true)
 @Builder(toBuilder = true)
 public class InternalBrokerMetrics {
 public class InternalBrokerMetrics {
     private final Long segmentSize;
     private final Long segmentSize;
-    private final List<Metric> jmxMetrics;
+    private final List<Metric> metrics;
 }
 }

+ 3 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalPartition.java

@@ -6,11 +6,13 @@ import lombok.Data;
 import java.util.List;
 import java.util.List;
 
 
 @Data
 @Data
-@Builder
+@Builder(toBuilder = true)
 public class InternalPartition {
 public class InternalPartition {
     private final int partition;
     private final int partition;
     private final Integer leader;
     private final Integer leader;
     private final List<InternalReplica> replicas;
     private final List<InternalReplica> replicas;
     private final int inSyncReplicasCount;
     private final int inSyncReplicasCount;
     private final int replicasCount;
     private final int replicasCount;
+    private final long offsetMin;
+    private final long offsetMax;
 }
 }

+ 0 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalTopic.java

@@ -1,6 +1,5 @@
 package com.provectus.kafka.ui.cluster.model;
 package com.provectus.kafka.ui.cluster.model;
 
 
-import com.provectus.kafka.ui.model.TopicPartitionDto;
 import lombok.Builder;
 import lombok.Builder;
 import lombok.Data;
 import lombok.Data;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.TopicPartition;

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/KafkaCluster.java

@@ -11,7 +11,7 @@ import java.util.Map;
 public class KafkaCluster {
 public class KafkaCluster {
 
 
     private final String name;
     private final String name;
-    private final int jmxPort;
+    private final Integer jmxPort;
     private final String bootstrapServers;
     private final String bootstrapServers;
     private final String zookeeper;
     private final String zookeeper;
     private final String schemaRegistry;
     private final String schemaRegistry;

+ 24 - 14
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java

@@ -38,17 +38,22 @@ public class ClusterService {
                 .collect(Collectors.toList());
                 .collect(Collectors.toList());
     }
     }
 
 
-    public Mono<BrokersMetrics> getBrokersMetrics(String name, Integer id) {
+    public Mono<BrokerMetrics> getBrokerMetrics(String name, Integer id) {
         return Mono.justOrEmpty(clustersStorage.getClusterByName(name)
         return Mono.justOrEmpty(clustersStorage.getClusterByName(name)
-                .map(KafkaCluster::getMetrics)
-                .map(s -> {
-                    var brokerMetrics = clusterMapper.toBrokerMetrics(s);
-                    brokerMetrics.setMetrics(s.getInternalBrokerMetrics().get(id).getJmxMetrics());
-                    brokerMetrics.setSegmentZise(Long.valueOf(s.getSegmentSize()).intValue());
-                    return brokerMetrics;
-                }));
+                .map( c -> c.getMetrics().getInternalBrokerMetrics())
+                .map( m -> m.get(id))
+                .map(clusterMapper::toBrokerMetrics));
     }
     }
 
 
+    public Mono<ClusterMetrics> getClusterMetrics(String name) {
+        return Mono.justOrEmpty(
+                clustersStorage.getClusterByName(name)
+                        .map(KafkaCluster::getMetrics)
+                        .map(clusterMapper::toClusterMetrics)
+        );
+    }
+
+
     public List<Topic> getTopics(String name) {
     public List<Topic> getTopics(String name) {
         return clustersStorage.getClusterByName(name)
         return clustersStorage.getClusterByName(name)
                 .map(c ->
                 .map(c ->
@@ -60,12 +65,15 @@ public class ClusterService {
 
 
     public Optional<TopicDetails> getTopicDetails(String name, String topicName) {
     public Optional<TopicDetails> getTopicDetails(String name, String topicName) {
         return clustersStorage.getClusterByName(name)
         return clustersStorage.getClusterByName(name)
-                .map(c -> {
-                     var topic = c.getTopics().get(topicName);
-                     return clusterMapper
-                             .toTopicDetails(topic)
-                             .partitions(kafkaService.partitionDtoList(topic, c));
-                });
+                .flatMap( c ->
+                        Optional.ofNullable(
+                          c.getTopics().get(topicName)
+                        ).map(
+                          t -> t.toBuilder().partitions(
+                                  kafkaService.getTopicPartitions(c, t)
+                          ).build()
+                        ).map(clusterMapper::toTopicDetails)
+                );
     }
     }
                                                                            
                                                                            
     public Optional<List<TopicConfig>> getTopicConfigs(String name, String topicName) {
     public Optional<List<TopicConfig>> getTopicConfigs(String name, String topicName) {
@@ -143,6 +151,7 @@ public class ClusterService {
         return clustersStorage.getClusterByName(clusterName).map(cl ->
         return clustersStorage.getClusterByName(clusterName).map(cl ->
                 topicFormData
                 topicFormData
                         .flatMap(t -> kafkaService.updateTopic(cl, topicName, t))
                         .flatMap(t -> kafkaService.updateTopic(cl, topicName, t))
+                        .map(clusterMapper::toTopic)
                         .flatMap(t -> updateCluster(t, clusterName, cl))
                         .flatMap(t -> updateCluster(t, clusterName, cl))
         )
         )
                 .orElse(Mono.empty());
                 .orElse(Mono.empty());
@@ -161,4 +170,5 @@ public class ClusterService {
                 .map(c -> consumingService.loadMessages(c, topicName, consumerPosition, query, limit))
                 .map(c -> consumingService.loadMessages(c, topicName, consumerPosition, query, limit))
                 .orElse(Flux.empty());
                 .orElse(Flux.empty());
     }
     }
+
 }
 }

+ 9 - 21
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/util/ClusterUtil.java

@@ -68,10 +68,12 @@ public class ClusterUtil {
     ) {
     ) {
         return consumer.assignment().topicPartitions().stream()
         return consumer.assignment().topicPartitions().stream()
                 .map(tp -> {
                 .map(tp -> {
-                    Long currentOffset = groupOffsets.get(tp).offset();
-                    Long endOffset = endOffsets.get(tp);
+                    Long currentOffset = Optional.ofNullable(
+                            groupOffsets.get(tp)).map(o -> o.offset()).orElse(0L);
+                    Long endOffset = Optional.ofNullable(endOffsets.get(tp)).orElse(0L);
                     ConsumerTopicPartitionDetail cd = new ConsumerTopicPartitionDetail();
                     ConsumerTopicPartitionDetail cd = new ConsumerTopicPartitionDetail();
                     cd.setConsumerId(consumer.consumerId());
                     cd.setConsumerId(consumer.consumerId());
+                    cd.setHost(consumer.host());
                     cd.setTopic(tp.topic());
                     cd.setTopic(tp.topic());
                     cd.setPartition(tp.partition());
                     cd.setPartition(tp.partition());
                     cd.setCurrentOffset(currentOffset);
                     cd.setCurrentOffset(currentOffset);
@@ -116,7 +118,7 @@ public class ClusterUtil {
 
 
         int urpCount = partitions.stream()
         int urpCount = partitions.stream()
                 .flatMap(partition -> partition.getReplicas().stream())
                 .flatMap(partition -> partition.getReplicas().stream())
-                .filter(InternalReplica::isInSync).mapToInt(e -> 1)
+                .filter(p -> !p.isInSync()).mapToInt(e -> 1)
                 .sum();
                 .sum();
 
 
         int inSyncReplicasCount = partitions.stream()
         int inSyncReplicasCount = partitions.stream()
@@ -199,6 +201,10 @@ public class ClusterUtil {
                 .filter(entry -> entry.name().contains(CLUSTER_VERSION_PARAM_KEY))
                 .filter(entry -> entry.name().contains(CLUSTER_VERSION_PARAM_KEY))
                 .findFirst().orElseThrow().value();
                 .findFirst().orElseThrow().value();
         try {
         try {
+            final String[] parts = version.split("\\.");
+            if (parts.length>2) {
+              version = parts[0] + "." + parts[1];
+            }        
             return Float.parseFloat(version.split("-")[0]) <= 2.3f
             return Float.parseFloat(version.split("-")[0]) <= 2.3f
                     ? ExtendedAdminClient.SupportedFeature.ALTER_CONFIGS : ExtendedAdminClient.SupportedFeature.INCREMENTAL_ALTER_CONFIGS;
                     ? ExtendedAdminClient.SupportedFeature.ALTER_CONFIGS : ExtendedAdminClient.SupportedFeature.INCREMENTAL_ALTER_CONFIGS;
         } catch (Exception e) {
         } catch (Exception e) {
@@ -207,24 +213,6 @@ public class ClusterUtil {
         }
         }
     }
     }
 
 
-    public static Topic convertToTopic(InternalTopic internalTopic) {
-        Topic topic = new Topic();
-        topic.setName(internalTopic.getName());
-        List<Partition> partitions = internalTopic.getPartitions().stream().flatMap(s -> {
-            Partition partition = new Partition();
-            partition.setPartition(s.getPartition());
-            partition.setLeader(s.getLeader());
-            partition.setReplicas(s.getReplicas().stream().flatMap(r -> {
-                Replica replica = new Replica();
-                replica.setBroker(r.getBroker());
-                return Stream.of(replica);
-            }).collect(Collectors.toList()));
-            return Stream.of(partition);
-        }).collect(Collectors.toList());
-        topic.setPartitions(partitions);
-        return topic;
-    }
-
     public static <T, R> Map<T, R> toSingleMap (Stream<Map<T, R>> streamOfMaps) {
     public static <T, R> Map<T, R> toSingleMap (Stream<Map<T, R>> streamOfMaps) {
         return streamOfMaps.reduce((map1, map2) -> Stream.concat(map1.entrySet().stream(), map2.entrySet().stream())
         return streamOfMaps.reduce((map1, map2) -> Stream.concat(map1.entrySet().stream(), map2.entrySet().stream())
                 .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))).orElseThrow();
                 .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))).orElseThrow();

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/util/JmxClusterUtil.java

@@ -100,7 +100,7 @@ public class JmxClusterUtil {
     public List<MetricDto> convertToMetricDto(InternalClusterMetrics internalClusterMetrics) {
     public List<MetricDto> convertToMetricDto(InternalClusterMetrics internalClusterMetrics) {
         return internalClusterMetrics.getInternalBrokerMetrics().values().stream()
         return internalClusterMetrics.getInternalBrokerMetrics().values().stream()
                 .map(c ->
                 .map(c ->
-                        c.getJmxMetrics().stream()
+                        c.getMetrics().stream()
                                 .filter(j -> isSameMetric(j.getCanonicalName()))
                                 .filter(j -> isSameMetric(j.getCanonicalName()))
                                 .map(j -> j.getValue().entrySet().stream()
                                 .map(j -> j.getValue().entrySet().stream()
                                         .map(e -> new MetricDto(j.getCanonicalName(), e.getKey(), e.getValue()))))
                                         .map(e -> new MetricDto(j.getCanonicalName(), e.getKey(), e.getValue()))))

+ 20 - 16
kafka-ui-api/src/main/java/com/provectus/kafka/ui/kafka/KafkaService.java

@@ -265,7 +265,7 @@ public class KafkaService {
     }
     }
 
 
     @SneakyThrows
     @SneakyThrows
-    public Mono<Topic> updateTopic(KafkaCluster cluster, String topicName, TopicFormData topicFormData) {
+    public Mono<InternalTopic> updateTopic(KafkaCluster cluster, String topicName, TopicFormData topicFormData) {
         ConfigResource topicCR = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
         ConfigResource topicCR = new ConfigResource(ConfigResource.Type.TOPIC, topicName);
         return getOrCreateAdminClient(cluster)
         return getOrCreateAdminClient(cluster)
                 .flatMap(ac -> {
                 .flatMap(ac -> {
@@ -281,11 +281,10 @@ public class KafkaService {
 
 
 
 
 
 
-    private Mono<Topic> getUpdatedTopic (ExtendedAdminClient ac, String topicName) {
+    private Mono<InternalTopic> getUpdatedTopic (ExtendedAdminClient ac, String topicName) {
         return getTopicsData(ac.getAdminClient())
         return getTopicsData(ac.getAdminClient())
                 .map(s -> s.stream()
                 .map(s -> s.stream()
-                        .filter(t -> t.getName().equals(topicName)).findFirst().orElseThrow())
-                .map(ClusterUtil::convertToTopic);
+                        .filter(t -> t.getName().equals(topicName)).findFirst().orElseThrow());
     }
     }
 
 
     private Mono<String> incrementalAlterConfig(TopicFormData topicFormData, ConfigResource topicCR, ExtendedAdminClient ac) {
     private Mono<String> incrementalAlterConfig(TopicFormData topicFormData, ConfigResource topicCR, ExtendedAdminClient ac) {
@@ -346,6 +345,8 @@ public class KafkaService {
 
 
     public List<Metric> getJmxMetric(String clusterName, Node node) {
     public List<Metric> getJmxMetric(String clusterName, Node node) {
         return clustersStorage.getClusterByName(clusterName)
         return clustersStorage.getClusterByName(clusterName)
+                        .filter( c -> c.getJmxPort() != null)
+                        .filter( c -> c.getJmxPort() > 0)
                         .map(c -> jmxClusterUtil.getJmxMetrics(c.getJmxPort(), node.host())).orElse(Collections.emptyList());
                         .map(c -> jmxClusterUtil.getJmxMetrics(c.getJmxPort(), node.host())).orElse(Collections.emptyList());
     }
     }
 
 
@@ -357,7 +358,7 @@ public class KafkaService {
         return ClusterUtil.toMono(ac.describeCluster().nodes())
         return ClusterUtil.toMono(ac.describeCluster().nodes())
                 .flatMapIterable(nodes -> nodes)
                 .flatMapIterable(nodes -> nodes)
                 .map(broker -> Map.of(broker.id(), InternalBrokerMetrics.builder().
                 .map(broker -> Map.of(broker.id(), InternalBrokerMetrics.builder().
-                            jmxMetrics(getJmxMetric(clusterName, broker)).build()))
+                        metrics(getJmxMetric(clusterName, broker)).build()))
                 .collectList()
                 .collectList()
                 .map(s -> internalClusterMetrics.toBuilder().internalBrokerMetrics(ClusterUtil.toSingleMap(s.stream())).build());
                 .map(s -> internalClusterMetrics.toBuilder().internalBrokerMetrics(ClusterUtil.toSingleMap(s.stream())).build());
     }
     }
@@ -377,22 +378,25 @@ public class KafkaService {
                     .collect(Collectors.toList())).build();
                     .collect(Collectors.toList())).build();
     }
     }
 
 
-    public List<TopicPartitionDto> partitionDtoList (InternalTopic topic, KafkaCluster cluster) {
-        var topicPartitions = topic.getPartitions().stream().map(t -> new TopicPartition(topic.getName(), t.getPartition())).collect(Collectors.toList());
-        return getTopicPartitionOffset(cluster, topicPartitions);
-    }
+    public List<InternalPartition> getTopicPartitions(KafkaCluster c, InternalTopic topic )  {
+        var tps = topic.getPartitions().stream()
+                .map(t -> new TopicPartition(topic.getName(), t.getPartition()))
+                .collect(Collectors.toList());
+        Map<Integer, InternalPartition> partitions =
+                topic.getPartitions().stream().collect(Collectors.toMap(
+                        InternalPartition::getPartition,
+                        tp -> tp
+                ));
 
 
-    private List<TopicPartitionDto> getTopicPartitionOffset(KafkaCluster c, List<TopicPartition> topicPartitions )  {
         try (var consumer = createConsumer(c)) {
         try (var consumer = createConsumer(c)) {
-            final Map<TopicPartition, Long> earliest = consumer.beginningOffsets(topicPartitions);
-            final Map<TopicPartition, Long> latest = consumer.endOffsets(topicPartitions);
+            final Map<TopicPartition, Long> earliest = consumer.beginningOffsets(tps);
+            final Map<TopicPartition, Long> latest = consumer.endOffsets(tps);
 
 
-            return topicPartitions.stream()
-                    .map( tp -> new TopicPartitionDto()
-                            .topic(tp.topic())
-                            .partition(tp.partition())
+            return tps.stream()
+                    .map( tp -> partitions.get(tp.partition()).toBuilder()
                             .offsetMin(Optional.ofNullable(earliest.get(tp)).orElse(0L))
                             .offsetMin(Optional.ofNullable(earliest.get(tp)).orElse(0L))
                             .offsetMax(Optional.ofNullable(latest.get(tp)).orElse(0L))
                             .offsetMax(Optional.ofNullable(latest.get(tp)).orElse(0L))
+                            .build()
                     ).collect(Collectors.toList());
                     ).collect(Collectors.toList());
         } catch (Exception e) {
         } catch (Exception e) {
             return Collections.emptyList();
             return Collections.emptyList();

+ 9 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java

@@ -30,12 +30,19 @@ public class MetricsRestController implements ApiClustersApi {
     }
     }
 
 
     @Override
     @Override
-    public Mono<ResponseEntity<BrokersMetrics>> getBrokersMetrics(String clusterName, Integer id, ServerWebExchange exchange) {
-        return clusterService.getBrokersMetrics(clusterName, id)
+    public Mono<ResponseEntity<BrokerMetrics>> getBrokersMetrics(String clusterName, Integer id, ServerWebExchange exchange) {
+        return clusterService.getBrokerMetrics(clusterName, id)
                         .map(ResponseEntity::ok)
                         .map(ResponseEntity::ok)
                         .onErrorReturn(ResponseEntity.notFound().build());
                         .onErrorReturn(ResponseEntity.notFound().build());
     }
     }
 
 
+    @Override
+    public Mono<ResponseEntity<ClusterMetrics>> getClusterMetrics(String clusterName, ServerWebExchange exchange) {
+        return clusterService.getClusterMetrics(clusterName)
+                .map(ResponseEntity::ok)
+                .onErrorReturn(ResponseEntity.notFound().build());
+    }
+
     @Override
     @Override
     public Mono<ResponseEntity<Flux<Topic>>> getTopics(String clusterName, ServerWebExchange exchange) {
     public Mono<ResponseEntity<Flux<Topic>>> getTopics(String clusterName, ServerWebExchange exchange) {
         return Mono.just(ResponseEntity.ok(Flux.fromIterable(clusterService.getTopics(clusterName))));
         return Mono.just(ResponseEntity.ok(Flux.fromIterable(clusterService.getTopics(clusterName))));

+ 71 - 20
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -52,6 +52,26 @@ paths:
                 items:
                 items:
                   $ref: '#/components/schemas/Broker'
                   $ref: '#/components/schemas/Broker'
 
 
+  /api/clusters/{clusterName}/metrics:
+    get:
+      tags:
+        - /api/clusters
+      summary: getClusterMetrics
+      operationId: getClusterMetrics
+      parameters:
+        - name: clusterName
+          in: path
+          required: true
+          schema:
+            type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ClusterMetrics'
+
   /api/clusters/{clusterName}/metrics/broker/{id}:
   /api/clusters/{clusterName}/metrics/broker/{id}:
     get:
     get:
       tags:
       tags:
@@ -75,7 +95,7 @@ paths:
           content:
           content:
             application/json:
             application/json:
               schema:
               schema:
-                $ref: '#/components/schemas/BrokersMetrics'
+                $ref: '#/components/schemas/BrokerMetrics'
 
 
   /api/clusters/{clusterName}/topics:
   /api/clusters/{clusterName}/topics:
     get:
     get:
@@ -327,16 +347,16 @@ components:
         - online
         - online
         - offline
         - offline
 
 
-    BrokersMetrics:
+    ClusterMetrics:
       type: object
       type: object
       properties:
       properties:
+        brokerCount:
+          type: integer
         zooKeeperStatus:
         zooKeeperStatus:
           type: integer
           type: integer
         activeControllers:
         activeControllers:
           type: integer
           type: integer
-        uncleanLeaderElectionCount:
-          type: integer
-        underReplicatedPartitionCount:
+        onlinePartitionCount:
           type: integer
           type: integer
         offlinePartitionCount:
         offlinePartitionCount:
           type: integer
           type: integer
@@ -344,6 +364,25 @@ components:
           type: integer
           type: integer
         outOfSyncReplicasCount:
         outOfSyncReplicasCount:
           type: integer
           type: integer
+        underReplicatedPartitionCount:
+          type: integer
+        diskUsage:
+          type: array
+          items:
+            $ref: '#/components/schemas/BrokerDiskUsage'
+
+    BrokerDiskUsage:
+      type: object
+      properties:
+        brokerId:
+          type: integer
+        segmentSize:
+          type: integer
+          format: int64
+
+    BrokerMetrics:
+      type: object
+      properties:
         segmentZise:
         segmentZise:
           type: integer
           type: integer
         metrics:
         metrics:
@@ -358,22 +397,24 @@ components:
           type: string
           type: string
         internal:
         internal:
           type: boolean
           type: boolean
-        partitions:
-          type: array
-          items:
-            $ref: '#/components/schemas/Partition'
-
-    Partition:
-      type: object
-      properties:
-        partition:
+        partitionCount:
           type: integer
           type: integer
-        leader:
+        replicationFactor:
           type: integer
           type: integer
         replicas:
         replicas:
+          type: integer
+        inSyncReplicas:
+          type: integer
+        segmentSize:
+          type: integer
+        segmentCount:
+          type: integer
+        underReplicatedPartitions:
+          type: integer
+        partitions:
           type: array
           type: array
           items:
           items:
-            $ref: '#/components/schemas/Replica'
+            $ref: "#/components/schemas/Partition"
 
 
     Replica:
     Replica:
       type: object
       type: object
@@ -388,10 +429,14 @@ components:
     TopicDetails:
     TopicDetails:
       type: object
       type: object
       properties:
       properties:
+        name:
+          type: string
+        internal:
+          type: boolean
         partitions:
         partitions:
           type: array
           type: array
           items:
           items:
-            $ref: "#/components/schemas/TopicPartitionDto"
+            $ref: "#/components/schemas/Partition"
         partitionCount:
         partitionCount:
           type: integer
           type: integer
         replicationFactor:
         replicationFactor:
@@ -490,13 +535,17 @@ components:
         - OFFSET
         - OFFSET
         - TIMESTAMP
         - TIMESTAMP
 
 
-    TopicPartitionDto:
+    Partition:
       type: object
       type: object
       properties:
       properties:
-        topic:
-          type: string
         partition:
         partition:
           type: integer
           type: integer
+        leader:
+          type: integer
+        replicas:
+          type: array
+          items:
+            $ref: '#/components/schemas/Replica'
         offsetMax:
         offsetMax:
           type: integer
           type: integer
           format: int64
           format: int64
@@ -516,6 +565,8 @@ components:
           type: string
           type: string
         topic:
         topic:
           type: string
           type: string
+        host:
+          type: string
         partition:
         partition:
           type: integer
           type: integer
         currentOffset:
         currentOffset:

+ 1 - 0
kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx

@@ -58,6 +58,7 @@ const Details: React.FC<Props> = ({
             <thead>
             <thead>
               <tr>
               <tr>
                 <th>Consumer ID</th>
                 <th>Consumer ID</th>
+                <th>Host</th>
                 <th>Topic</th>
                 <th>Topic</th>
                 <th>Partition</th>
                 <th>Partition</th>
                 <th>Messages behind</th>
                 <th>Messages behind</th>

+ 1 - 0
kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx

@@ -12,6 +12,7 @@ const ListItem: React.FC<Props> = ({ clusterName, consumer }) => {
   return (
   return (
     <tr>
     <tr>
       <td>{consumer.consumerId}</td>
       <td>{consumer.consumerId}</td>
+      <td>{consumer.host}</td>
       <td>
       <td>
         <NavLink
         <NavLink
           exact
           exact

+ 2 - 2
kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx

@@ -172,8 +172,8 @@ const Messages: React.FC<Props> = ({
     });
     });
   };
   };
 
 
-  const getTimestampDate = (timestamp: number) => {
-    return format(new Date(timestamp * 1000), 'MM.dd.yyyy HH:mm:ss');
+  const getTimestampDate = (timestamp: string) => {
+    return format(Date.parse(timestamp), 'yyyy-MM-dd HH:mm:ss');
   };
   };
 
 
   const getMessageContentHeaders = React.useMemo(() => {
   const getMessageContentHeaders = React.useMemo(() => {

+ 21 - 17
kafka-ui-react-app/src/components/Topics/Details/Overview/Overview.tsx

@@ -23,10 +23,9 @@ const Overview: React.FC<Props> = ({
   replicationFactor,
   replicationFactor,
   fetchTopicDetails,
   fetchTopicDetails,
 }) => {
 }) => {
-  React.useEffect(
-    () => { fetchTopicDetails(clusterName, topicName); },
-    [fetchTopicDetails, clusterName, topicName],
-  );
+  React.useEffect(() => {
+    fetchTopicDetails(clusterName, topicName);
+  }, [fetchTopicDetails, clusterName, topicName]);
 
 
   if (!isFetched) {
   if (!isFetched) {
     return null;
     return null;
@@ -35,18 +34,18 @@ const Overview: React.FC<Props> = ({
   return (
   return (
     <>
     <>
       <MetricsWrapper>
       <MetricsWrapper>
-        <Indicator label="Partitions">
-          {partitionCount}
-        </Indicator>
-        <Indicator label="Replication Factor">
-          {replicationFactor}
-        </Indicator>
+        <Indicator label="Partitions">{partitionCount}</Indicator>
+        <Indicator label="Replication Factor">{replicationFactor}</Indicator>
         <Indicator label="URP" title="Under replicated partitions">
         <Indicator label="URP" title="Under replicated partitions">
           {underReplicatedPartitions}
           {underReplicatedPartitions}
         </Indicator>
         </Indicator>
         <Indicator label="In sync replicas">
         <Indicator label="In sync replicas">
           {inSyncReplicas}
           {inSyncReplicas}
-          <span className="subtitle has-text-weight-light"> of {replicas}</span>
+          <span className="subtitle has-text-weight-light">
+            {' '}
+            of
+            {replicas}
+          </span>
         </Indicator>
         </Indicator>
         <Indicator label="Type">
         <Indicator label="Type">
           <span className="tag is-primary">
           <span className="tag is-primary">
@@ -60,15 +59,20 @@ const Overview: React.FC<Props> = ({
             <tr>
             <tr>
               <th>Partition ID</th>
               <th>Partition ID</th>
               <th>Broker leader</th>
               <th>Broker leader</th>
+              <th>Min offset</th>
+              <th>Max offset</th>
             </tr>
             </tr>
           </thead>
           </thead>
           <tbody>
           <tbody>
-            {partitions && partitions.map(({ partition, leader }) => (
-              <tr key={`partition-list-item-key-${partition}`}>
-                <td>{partition}</td>
-                <td>{leader}</td>
-              </tr>
-            ))}
+            {partitions &&
+              partitions.map(({ partition, leader, offsetMin, offsetMax }) => (
+                <tr key={`partition-list-item-key-${partition}`}>
+                  <td>{partition}</td>
+                  <td>{leader}</td>
+                  <td>{offsetMin}</td>
+                  <td>{offsetMax}</td>
+                </tr>
+              ))}
           </tbody>
           </tbody>
         </table>
         </table>
       </div>
       </div>

+ 11 - 14
kafka-ui-react-app/src/redux/api/brokers.ts

@@ -1,17 +1,14 @@
-import {
-  Broker,
-  ClusterName,
-  BrokerMetrics,
-} from 'redux/interfaces';
-import {
-  BASE_URL,
-  BASE_PARAMS,
-} from 'lib/constants';
+import { Broker, ClusterName, BrokerMetrics } from 'redux/interfaces';
+import { BASE_URL, BASE_PARAMS } from 'lib/constants';
 
 
 export const getBrokers = (clusterName: ClusterName): Promise<Broker[]> =>
 export const getBrokers = (clusterName: ClusterName): Promise<Broker[]> =>
-  fetch(`${BASE_URL}/clusters/${clusterName}/brokers`, { ...BASE_PARAMS })
-    .then(res => res.json());
+  fetch(`${BASE_URL}/clusters/${clusterName}/brokers`, {
+    ...BASE_PARAMS,
+  }).then((res) => res.json());
 
 
-export const getBrokerMetrics = (clusterName: ClusterName): Promise<BrokerMetrics> =>
-  fetch(`${BASE_URL}/clusters/${clusterName}/metrics/broker`, { ...BASE_PARAMS })
-    .then(res => res.json());
+export const getBrokerMetrics = (
+  clusterName: ClusterName
+): Promise<BrokerMetrics> =>
+  fetch(`${BASE_URL}/clusters/${clusterName}/metrics`, {
+    ...BASE_PARAMS,
+  }).then((res) => res.json());

+ 7 - 5
kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts

@@ -16,16 +16,18 @@ export interface ConsumerGroupDetails {
 export interface Consumer {
 export interface Consumer {
   consumerId: string;
   consumerId: string;
   topic: string;
   topic: string;
+  host: string;
   partition: number;
   partition: number;
   messagesBehind: number;
   messagesBehind: number;
   currentOffset: number;
   currentOffset: number;
   endOffset: number;
   endOffset: number;
 }
 }
 
 
-export interface ConsumerGroupDetailedInfo extends ConsumerGroup, ConsumerGroupDetails {
-}
+export interface ConsumerGroupDetailedInfo
+  extends ConsumerGroup,
+    ConsumerGroupDetails {}
 
 
 export interface ConsumerGroupsState {
 export interface ConsumerGroupsState {
-  byID: { [consumerGroupID: string]: ConsumerGroupDetailedInfo },
-  allIDs: string[]
-}
+  byID: { [consumerGroupID: string]: ConsumerGroupDetailedInfo };
+  allIDs: string[];
+}

+ 10 - 7
kafka-ui-react-app/src/redux/interfaces/topic.ts

@@ -26,6 +26,8 @@ export interface TopicReplica {
 export interface TopicPartition {
 export interface TopicPartition {
   partition: number;
   partition: number;
   leader: number;
   leader: number;
+  offsetMin: number;
+  offsetMax: number;
   replicas: TopicReplica[];
   replicas: TopicReplica[];
 }
 }
 
 
@@ -35,25 +37,26 @@ export interface TopicCustomParamOption {
 }
 }
 
 
 export interface TopicDetails {
 export interface TopicDetails {
+  partitions: TopicPartition[];
+}
+
+export interface Topic {
+  name: TopicName;
+  internal: boolean;
   partitionCount?: number;
   partitionCount?: number;
   replicationFactor?: number;
   replicationFactor?: number;
   replicas?: number;
   replicas?: number;
-  segmentSize?: number;
   inSyncReplicas?: number;
   inSyncReplicas?: number;
+  segmentSize?: number;
   segmentCount?: number;
   segmentCount?: number;
   underReplicatedPartitions?: number;
   underReplicatedPartitions?: number;
-}
-
-export interface Topic {
-  name: TopicName;
-  internal: boolean;
   partitions: TopicPartition[];
   partitions: TopicPartition[];
 }
 }
 
 
 export interface TopicMessage {
 export interface TopicMessage {
   partition: number;
   partition: number;
   offset: number;
   offset: number;
-  timestamp: number;
+  timestamp: string;
   timestampType: string;
   timestampType: string;
   key: string;
   key: string;
   headers: Record<string, string>;
   headers: Record<string, string>;