Browse Source

ISSUE-803 Added DISABLELOGDIRSCOLLECTION flag to skip size calculation (#823)

German Osin 3 years ago
parent
commit
673e74e15a

+ 1 - 0
README.md

@@ -173,6 +173,7 @@ For example, if you want to use an environment variable to set the `name` parame
 |`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE`  |How keys are saved to schemaRegistry
 |`KAFKA_CLUSTERS_0_JMXPORT`        	|Open jmxPosrts of a broker
 |`KAFKA_CLUSTERS_0_READONLY`        	|Enable read only mode. Default: false
+|`KAFKA_CLUSTERS_0_DISABLELOGDIRSCOLLECTION`        	|Disable collecting segments information. Should be true for confluent cloud. Default: false
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint 
 |`LOGGING_LEVEL_ROOT`        	| Setting log level (all, debug, info, warn, error, fatal, off). Default: debug

+ 1 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -30,6 +30,7 @@ public class ClustersProperties {
     int jmxPort;
     Properties properties;
     boolean readOnly = false;
+    boolean disableLogDirsCollection = false;
   }
 
   @Data

+ 1 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java

@@ -31,5 +31,6 @@ public class KafkaCluster {
   private final String protobufMessageName;
   private final Properties properties;
   private final Boolean readOnly;
+  private final Boolean disableLogDirsCollection;
   private final List<Feature> features;
 }

+ 38 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java

@@ -129,11 +129,21 @@ public class KafkaService {
                     getClusterMetrics(ac.getAdminClient())
                         .flatMap(i -> fillJmxMetrics(i, cluster.getName(), ac.getAdminClient()))
                         .flatMap(clusterMetrics ->
-                            getTopicsData(ac.getAdminClient()).flatMap(it ->
-                                updateSegmentMetrics(ac.getAdminClient(), clusterMetrics, it)
+                            getTopicsData(ac.getAdminClient()).flatMap(it -> {
+                                  if (cluster.getDisableLogDirsCollection() == null
+                                      || !cluster.getDisableLogDirsCollection()) {
+                                    return updateSegmentMetrics(
+                                        ac.getAdminClient(), clusterMetrics, it
+                                    );
+                                  } else {
+                                    return emptySegmentMetrics(clusterMetrics, it);
+                                  }
+                                }
                             ).map(segmentSizeDto -> buildFromData(cluster, version, segmentSizeDto))
                         )
             )
+        ).doOnError(e ->
+            log.error("Failed to collect cluster {} info", cluster.getName(), e)
         ).onErrorResume(
             e -> Mono.just(cluster.toBuilder()
                 .status(ServerStatus.OFFLINE)
@@ -484,6 +494,28 @@ public class KafkaService {
         .build();
   }
 
+  private Mono<InternalSegmentSizeDto> emptySegmentMetrics(InternalClusterMetrics clusterMetrics,
+                                                            List<InternalTopic> internalTopics) {
+    return Mono.just(
+        InternalSegmentSizeDto.builder()
+        .clusterMetricsWithSegmentSize(
+            clusterMetrics.toBuilder()
+                .segmentSize(0)
+                .segmentCount(0)
+                .internalBrokerDiskUsage(Collections.emptyMap())
+                .build()
+        )
+        .internalTopicWithSegmentSize(
+            internalTopics.stream().collect(
+                Collectors.toMap(
+                    InternalTopic::getName,
+                    i -> i
+                )
+            )
+        ).build()
+    );
+  }
+
   private Mono<InternalSegmentSizeDto> updateSegmentMetrics(AdminClient ac,
                                                             InternalClusterMetrics clusterMetrics,
                                                             List<InternalTopic> internalTopics) {
@@ -491,9 +523,11 @@ public class KafkaService {
         internalTopics.stream().map(InternalTopic::getName).collect(Collectors.toList());
     return ClusterUtil.toMono(ac.describeTopics(names).all()).flatMap(topic ->
         ClusterUtil.toMono(ac.describeCluster().nodes()).flatMap(nodes ->
+
             ClusterUtil.toMono(
-                ac.describeLogDirs(nodes.stream().map(Node::id).collect(Collectors.toList())).all())
-                .map(log -> {
+                ac.describeLogDirs(
+                    nodes.stream().map(Node::id).collect(Collectors.toList())).all()
+                ).map(log -> {
                   final List<Tuple3<Integer, TopicPartition, Long>> topicPartitions =
                       log.entrySet().stream().flatMap(b ->
                           b.getValue().entrySet().stream().flatMap(topicMap ->