Browse Source

checkstyle fixes

iliax 2 years ago
parent
commit
9d2ecf533e
23 changed files with 78 additions and 136 deletions
  1. 0 8
      kafka-ui-api/pom.xml
  2. 0 25
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
  3. 7 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java
  4. 1 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java
  5. 1 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Metrics.java
  6. 2 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsScrapeProperties.java
  7. 2 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java
  8. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Statistics.java
  9. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java
  10. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
  11. 20 48
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
  12. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
  13. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/RawMetric.java
  14. 1 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/IoRatesMetricsScanner.java
  15. 4 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/MetricsScrapping.java
  16. 3 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/ScrapedClusterState.java
  17. 2 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/jmx/JmxMetricsScraper.java
  18. 5 5
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/jmx/JmxSslSocketFactory.java
  19. 10 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/prometheus/PrometheusEndpointParser.java
  20. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/prometheus/PrometheusMetricsRetriever.java
  21. 3 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/PrometheusEndpointUtil.java
  22. 2 1
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java
  23. 9 9
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/metrics/scrape/IoRatesMetricsScannerTest.java

+ 0 - 8
kafka-ui-api/pom.xml

@@ -237,19 +237,11 @@
         <dependency>
         <dependency>
             <groupId>io.prometheus</groupId>
             <groupId>io.prometheus</groupId>
             <artifactId>simpleclient</artifactId>
             <artifactId>simpleclient</artifactId>
-            <version>0.16.0</version>
         </dependency>
         </dependency>
         <dependency>
         <dependency>
             <groupId>io.prometheus</groupId>
             <groupId>io.prometheus</groupId>
             <artifactId>simpleclient_common</artifactId>
             <artifactId>simpleclient_common</artifactId>
-            <version>0.16.0</version>
         </dependency>
         </dependency>
-        <dependency>
-            <groupId>io.prometheus</groupId>
-            <artifactId>simpleclient_pushgateway</artifactId>
-            <version>0.16.0</version>
-        </dependency>
-
 
 
         <dependency>
         <dependency>
             <groupId>org.codehaus.groovy</groupId>
             <groupId>org.codehaus.groovy</groupId>

+ 0 - 25
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -73,31 +73,6 @@ public class ClustersProperties {
     String password;
     String password;
     String keystoreLocation;
     String keystoreLocation;
     String keystorePassword;
     String keystorePassword;
-
-//    JmxScraper jmxScraper;
-//    PrometheusScraper prometheusScraper;
-//
-//    @Data
-//    @ToString(exclude = "password")
-//    public static class JmxScraper {
-//      Integer port;
-//      Boolean ssl;
-//      String username;
-//      String password;
-//      String keystoreLocation;
-//      String keystorePassword;
-//    }
-//
-//    @Data
-//    @ToString(exclude = "password")
-//    public static class PrometheusScraper {
-//      Integer port;
-//      Boolean ssl;
-//      String username;
-//      String password;
-//      String keystoreLocation;
-//      String keystorePassword;
-//    }
   }
   }
 
 
   @Data
   @Data

+ 7 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java

@@ -1,11 +1,10 @@
 package com.provectus.kafka.ui.mapper;
 package com.provectus.kafka.ui.mapper;
 
 
-import static io.prometheus.client.Collector.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 
 
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.model.BrokerConfigDTO;
 import com.provectus.kafka.ui.model.BrokerConfigDTO;
 import com.provectus.kafka.ui.model.BrokerDTO;
 import com.provectus.kafka.ui.model.BrokerDTO;
-import com.provectus.kafka.ui.model.BrokerDiskUsageDTO;
 import com.provectus.kafka.ui.model.BrokerMetricsDTO;
 import com.provectus.kafka.ui.model.BrokerMetricsDTO;
 import com.provectus.kafka.ui.model.ClusterDTO;
 import com.provectus.kafka.ui.model.ClusterDTO;
 import com.provectus.kafka.ui.model.ClusterFeature;
 import com.provectus.kafka.ui.model.ClusterFeature;
@@ -58,11 +57,11 @@ public interface ClusterMapper {
   @Deprecated
   @Deprecated
   default ClusterMetricsDTO toClusterMetrics(Metrics metrics) {
   default ClusterMetricsDTO toClusterMetrics(Metrics metrics) {
     return new ClusterMetricsDTO()
     return new ClusterMetricsDTO()
-        .items(convert(metrics.getSummarizedMetrics()).toList());
+        .items(convert(metrics.getSummarizedMetrics().toList()));
   }
   }
 
 
-  private Stream<MetricDTO> convert(Stream<MetricFamilySamples> metrics) {
-    return metrics
+  private List<MetricDTO> convert(List<MetricFamilySamples> metrics) {
+    return metrics.stream()
         .flatMap(m -> m.samples.stream())
         .flatMap(m -> m.samples.stream())
         .map(s ->
         .map(s ->
             new MetricDTO()
             new MetricDTO()
@@ -71,12 +70,11 @@ public interface ClusterMapper {
                     .boxed()
                     .boxed()
                     .collect(Collectors.toMap(s.labelNames::get, s.labelValues::get)))
                     .collect(Collectors.toMap(s.labelNames::get, s.labelValues::get)))
                 .value(BigDecimal.valueOf(s.value))
                 .value(BigDecimal.valueOf(s.value))
-        );
+        ).toList();
   }
   }
 
 
-  @Deprecated
-  default BrokerMetricsDTO toBrokerMetrics(Stream<MetricFamilySamples> metrics) {
-    return new BrokerMetricsDTO().metrics(convert(metrics).toList());
+  default BrokerMetricsDTO toBrokerMetrics(List<MetricFamilySamples> metrics) {
+    return new BrokerMetricsDTO().metrics(convert(metrics));
   }
   }
 
 
   @Mapping(target = "isSensitive", source = "sensitive")
   @Mapping(target = "isSensitive", source = "sensitive")

+ 1 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java

@@ -1,8 +1,7 @@
 package com.provectus.kafka.ui.model;
 package com.provectus.kafka.ui.model;
 
 
-import static com.provectus.kafka.ui.model.InternalLogDirStats.*;
+import static com.provectus.kafka.ui.model.InternalLogDirStats.SegmentStats;
 
 
-import com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState;
 import java.math.BigDecimal;
 import java.math.BigDecimal;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
@@ -13,7 +12,6 @@ import lombok.Builder;
 import lombok.Data;
 import lombok.Data;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.apache.kafka.clients.admin.TopicDescription;
 import org.apache.kafka.clients.admin.TopicDescription;
-import org.apache.kafka.common.TopicPartition;
 
 
 @Data
 @Data
 @Builder(toBuilder = true)
 @Builder(toBuilder = true)

+ 1 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Metrics.java

@@ -1,18 +1,16 @@
 package com.provectus.kafka.ui.model;
 package com.provectus.kafka.ui.model;
 
 
-import static io.prometheus.client.Collector.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 import static java.util.stream.Collectors.toMap;
 import static java.util.stream.Collectors.toMap;
 
 
 import com.google.common.collect.Streams;
 import com.google.common.collect.Streams;
 import com.provectus.kafka.ui.service.metrics.scrape.inferred.InferredMetrics;
 import com.provectus.kafka.ui.service.metrics.scrape.inferred.InferredMetrics;
 import groovy.lang.Tuple;
 import groovy.lang.Tuple;
-import jakarta.annotation.Nullable;
 import java.math.BigDecimal;
 import java.math.BigDecimal;
 import java.util.Collection;
 import java.util.Collection;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
-import java.util.Objects;
 import java.util.Optional;
 import java.util.Optional;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 import lombok.Builder;
 import lombok.Builder;

+ 2 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsScrapeProperties.java

@@ -1,11 +1,10 @@
 package com.provectus.kafka.ui.model;
 package com.provectus.kafka.ui.model;
 
 
-import static com.provectus.kafka.ui.config.ClustersProperties.*;
+import static com.provectus.kafka.ui.config.ClustersProperties.KeystoreConfig;
+import static com.provectus.kafka.ui.config.ClustersProperties.TruststoreConfig;
 
 
-import com.provectus.kafka.ui.config.ClustersProperties;
 import jakarta.annotation.Nullable;
 import jakarta.annotation.Nullable;
 import lombok.Builder;
 import lombok.Builder;
-import lombok.Data;
 import lombok.Value;
 import lombok.Value;
 
 
 @Value
 @Value

+ 2 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java

@@ -38,7 +38,8 @@ public class PartitionDistributionStats {
     );
     );
   }
   }
 
 
-  static PartitionDistributionStats create(List<TopicDescription> topicDescriptions, int minPartitionsForSkewCalculation) {
+  static PartitionDistributionStats create(List<TopicDescription> topicDescriptions,
+                                           int minPartitionsForSkewCalculation) {
     var partitionLeaders = new HashMap<Node, Integer>();
     var partitionLeaders = new HashMap<Node, Integer>();
     var partitionsReplicated = new HashMap<Node, Integer>();
     var partitionsReplicated = new HashMap<Node, Integer>();
     var isr = new HashMap<Node, Integer>();
     var isr = new HashMap<Node, Integer>();

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Statistics.java

@@ -34,7 +34,7 @@ public class Statistics {
         .build();
         .build();
   }
   }
 
 
-  public Stream<TopicDescription> topicDescriptions(){
+  public Stream<TopicDescription> topicDescriptions() {
     return clusterState.getTopicStates().values().stream().map(ScrapedClusterState.TopicState::description);
     return clusterState.getTopicStates().values().stream().map(ScrapedClusterState.TopicState::description);
   }
   }
 
 

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java

@@ -1,6 +1,6 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
-import static io.prometheus.client.Collector.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 
 
 import com.provectus.kafka.ui.exception.InvalidRequestApiException;
 import com.provectus.kafka.ui.exception.InvalidRequestApiException;
 import com.provectus.kafka.ui.exception.LogDirNotFoundApiException;
 import com.provectus.kafka.ui.exception.LogDirNotFoundApiException;

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java

@@ -10,8 +10,8 @@ import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
 import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
 import com.provectus.kafka.ui.service.masking.DataMasking;
 import com.provectus.kafka.ui.service.masking.DataMasking;
-import com.provectus.kafka.ui.service.metrics.scrape.jmx.JmxMetricsRetriever;
 import com.provectus.kafka.ui.service.metrics.scrape.MetricsScrapping;
 import com.provectus.kafka.ui.service.metrics.scrape.MetricsScrapping;
+import com.provectus.kafka.ui.service.metrics.scrape.jmx.JmxMetricsRetriever;
 import com.provectus.kafka.ui.sr.ApiClient;
 import com.provectus.kafka.ui.sr.ApiClient;
 import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
 import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
 import com.provectus.kafka.ui.util.KafkaServicesValidation;
 import com.provectus.kafka.ui.util.KafkaServicesValidation;

+ 20 - 48
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java

@@ -15,7 +15,6 @@ import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.util.KafkaVersion;
 import com.provectus.kafka.ui.util.KafkaVersion;
 import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
 import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
 import java.io.Closeable;
 import java.io.Closeable;
-import java.time.Duration;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashMap;
@@ -23,7 +22,6 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Optional;
-import java.util.Properties;
 import java.util.Set;
 import java.util.Set;
 import java.util.concurrent.CompletionException;
 import java.util.concurrent.CompletionException;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutionException;
@@ -58,8 +56,6 @@ import org.apache.kafka.clients.admin.NewTopic;
 import org.apache.kafka.clients.admin.OffsetSpec;
 import org.apache.kafka.clients.admin.OffsetSpec;
 import org.apache.kafka.clients.admin.RecordsToDelete;
 import org.apache.kafka.clients.admin.RecordsToDelete;
 import org.apache.kafka.clients.admin.TopicDescription;
 import org.apache.kafka.clients.admin.TopicDescription;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
-import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.clients.consumer.OffsetAndMetadata;
 import org.apache.kafka.clients.consumer.OffsetAndMetadata;
 import org.apache.kafka.common.KafkaException;
 import org.apache.kafka.common.KafkaException;
 import org.apache.kafka.common.KafkaFuture;
 import org.apache.kafka.common.KafkaFuture;
@@ -81,8 +77,6 @@ import org.apache.kafka.common.errors.TopicAuthorizationException;
 import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
 import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
 import org.apache.kafka.common.errors.UnsupportedVersionException;
 import org.apache.kafka.common.errors.UnsupportedVersionException;
 import org.apache.kafka.common.resource.ResourcePatternFilter;
 import org.apache.kafka.common.resource.ResourcePatternFilter;
-import org.apache.kafka.common.serialization.BytesDeserializer;
-import org.apache.kafka.common.utils.Bytes;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
 import reactor.core.scheduler.Schedulers;
 import reactor.core.scheduler.Schedulers;
@@ -184,18 +178,18 @@ public class ReactiveAdminClient implements Closeable {
   // (see MonoSink.success(..) javadoc for details)
   // (see MonoSink.success(..) javadoc for details)
   public static <T> Mono<T> toMono(KafkaFuture<T> future) {
   public static <T> Mono<T> toMono(KafkaFuture<T> future) {
     return Mono.<T>create(sink -> future.whenComplete((res, ex) -> {
     return Mono.<T>create(sink -> future.whenComplete((res, ex) -> {
-          if (ex != null) {
-            // KafkaFuture doc is unclear about what exception wrapper will be used
-            // (from docs it should be ExecutionException, be we actually see CompletionException, so checking both
-            if (ex instanceof CompletionException || ex instanceof ExecutionException) {
-              sink.error(ex.getCause()); //unwrapping exception
-            } else {
-              sink.error(ex);
-            }
-          } else {
-            sink.success(res);
-          }
-        })).doOnCancel(() -> future.cancel(true))
+      if (ex != null) {
+        // KafkaFuture doc is unclear about what exception wrapper will be used
+        // (from docs it should be ExecutionException, be we actually see CompletionException, so checking both
+        if (ex instanceof CompletionException || ex instanceof ExecutionException) {
+          sink.error(ex.getCause()); //unwrapping exception
+        } else {
+          sink.error(ex);
+        }
+      } else {
+        sink.success(res);
+      }
+    })).doOnCancel(() -> future.cancel(true))
         // AdminClient is using single thread for kafka communication
         // AdminClient is using single thread for kafka communication
         // and by default all downstream operations (like map(..)) on created Mono will be executed on this thread.
         // and by default all downstream operations (like map(..)) on created Mono will be executed on this thread.
         // If some of downstream operation are blocking (by mistake) this can lead to
         // If some of downstream operation are blocking (by mistake) this can lead to
@@ -400,12 +394,12 @@ public class ReactiveAdminClient implements Closeable {
         result.controller(), result.clusterId(), result.nodes(), result.authorizedOperations());
         result.controller(), result.clusterId(), result.nodes(), result.authorizedOperations());
     return toMono(allOfFuture).then(
     return toMono(allOfFuture).then(
         Mono.fromCallable(() ->
         Mono.fromCallable(() ->
-            new ClusterDescription(
-                result.controller().get(),
-                result.clusterId().get(),
-                result.nodes().get(),
-                result.authorizedOperations().get()
-            )
+          new ClusterDescription(
+            result.controller().get(),
+            result.clusterId().get(),
+            result.nodes().get(),
+            result.authorizedOperations().get()
+          )
         )
         )
     );
     );
   }
   }
@@ -559,8 +553,8 @@ public class ReactiveAdminClient implements Closeable {
 
 
   @VisibleForTesting
   @VisibleForTesting
   static Set<TopicPartition> filterPartitionsWithLeaderCheck(Collection<TopicDescription> topicDescriptions,
   static Set<TopicPartition> filterPartitionsWithLeaderCheck(Collection<TopicDescription> topicDescriptions,
-                                                             Predicate<TopicPartition> partitionPredicate,
-                                                             boolean failOnUnknownLeader) {
+                                                              Predicate<TopicPartition> partitionPredicate,
+                                                              boolean failOnUnknownLeader) {
     var goodPartitions = new HashSet<TopicPartition>();
     var goodPartitions = new HashSet<TopicPartition>();
     for (TopicDescription description : topicDescriptions) {
     for (TopicDescription description : topicDescriptions) {
       var goodTopicPartitions = new ArrayList<TopicPartition>();
       var goodTopicPartitions = new ArrayList<TopicPartition>();
@@ -726,26 +720,4 @@ public class ReactiveAdminClient implements Closeable {
   public void close() {
   public void close() {
     client.close();
     client.close();
   }
   }
-
-
-  public static void main(String[] args) {
-    Properties props = new Properties();
-    props.put(ConsumerConfig.GROUP_ID_CONFIG, "test_group_1");
-    props.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafka-ui-consumer-" + System.currentTimeMillis());
-    props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
-    props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
-    props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
-    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
-    props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
-    props.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, "false");
-
-    try (var consumer = new KafkaConsumer<Bytes, Bytes>(props)) {
-      consumer.subscribe(List.of("test"));
-      while (true) {
-        consumer.poll(Duration.ofMillis(500));
-        //consumer.commitSync();
-      }
-    }
-  }
-
 }
 }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java

@@ -1,6 +1,6 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
-import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.*;
+import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.TopicState;
 import static java.util.stream.Collectors.toList;
 import static java.util.stream.Collectors.toList;
 import static java.util.stream.Collectors.toMap;
 import static java.util.stream.Collectors.toMap;
 
 

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/RawMetric.java

@@ -25,7 +25,7 @@ public interface RawMetric {
     return new SimpleMetric(name, labels, value);
     return new SimpleMetric(name, labels, value);
   }
   }
 
 
-  static Stream<MetricFamilySamples> groupIntoMFS(Collection<RawMetric> rawMetrics) {
+  static Stream<MetricFamilySamples> groupIntoMfs(Collection<RawMetric> rawMetrics) {
     Map<String, MetricFamilySamples> map = new LinkedHashMap<>();
     Map<String, MetricFamilySamples> map = new LinkedHashMap<>();
     for (RawMetric m : rawMetrics) {
     for (RawMetric m : rawMetrics) {
       var mfs = map.get(m.name());
       var mfs = map.get(m.name());

+ 1 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/IoRatesMetricsScanner.java

@@ -1,12 +1,10 @@
 package com.provectus.kafka.ui.service.metrics.scrape;
 package com.provectus.kafka.ui.service.metrics.scrape;
 
 
-import static io.prometheus.client.Collector.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 import static org.apache.commons.lang3.StringUtils.containsIgnoreCase;
 import static org.apache.commons.lang3.StringUtils.containsIgnoreCase;
 import static org.apache.commons.lang3.StringUtils.endsWithIgnoreCase;
 import static org.apache.commons.lang3.StringUtils.endsWithIgnoreCase;
 
 
 import com.provectus.kafka.ui.model.Metrics;
 import com.provectus.kafka.ui.model.Metrics;
-import com.provectus.kafka.ui.service.metrics.RawMetric;
-import io.prometheus.client.Collector;
 import java.math.BigDecimal;
 import java.math.BigDecimal;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;

+ 4 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/MetricsScrapping.java

@@ -1,7 +1,9 @@
 package com.provectus.kafka.ui.service.metrics.scrape;
 package com.provectus.kafka.ui.service.metrics.scrape;
 
 
-import static com.provectus.kafka.ui.config.ClustersProperties.*;
-import static com.provectus.kafka.ui.model.MetricsScrapeProperties.*;
+import static com.provectus.kafka.ui.config.ClustersProperties.Cluster;
+import static com.provectus.kafka.ui.config.ClustersProperties.KeystoreConfig;
+import static com.provectus.kafka.ui.model.MetricsScrapeProperties.JMX_METRICS_TYPE;
+import static com.provectus.kafka.ui.model.MetricsScrapeProperties.PROMETHEUS_METRICS_TYPE;
 
 
 import com.provectus.kafka.ui.model.Metrics;
 import com.provectus.kafka.ui.model.Metrics;
 import com.provectus.kafka.ui.model.MetricsScrapeProperties;
 import com.provectus.kafka.ui.model.MetricsScrapeProperties;

+ 3 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/ScrapedClusterState.java

@@ -1,7 +1,8 @@
 package com.provectus.kafka.ui.service.metrics.scrape;
 package com.provectus.kafka.ui.service.metrics.scrape;
 
 
-import static com.provectus.kafka.ui.model.InternalLogDirStats.*;
-import static com.provectus.kafka.ui.service.ReactiveAdminClient.*;
+import static com.provectus.kafka.ui.model.InternalLogDirStats.LogDirSpaceStats;
+import static com.provectus.kafka.ui.model.InternalLogDirStats.SegmentStats;
+import static com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
 
 
 import com.google.common.collect.Table;
 import com.google.common.collect.Table;
 import com.provectus.kafka.ui.model.InternalLogDirStats;
 import com.provectus.kafka.ui.model.InternalLogDirStats;

+ 2 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/jmx/JmxMetricsScraper.java

@@ -1,6 +1,6 @@
 package com.provectus.kafka.ui.service.metrics.scrape.jmx;
 package com.provectus.kafka.ui.service.metrics.scrape.jmx;
 
 
-import static io.prometheus.client.Collector.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 
 
 import com.provectus.kafka.ui.model.MetricsScrapeProperties;
 import com.provectus.kafka.ui.model.MetricsScrapeProperties;
 import com.provectus.kafka.ui.service.metrics.RawMetric;
 import com.provectus.kafka.ui.service.metrics.RawMetric;
@@ -29,7 +29,7 @@ public class JmxMetricsScraper  {
         .flatMap(n -> jmxMetricsRetriever.retrieveFromNode(scrapeProperties, n).map(metrics -> Tuples.of(n, metrics)))
         .flatMap(n -> jmxMetricsRetriever.retrieveFromNode(scrapeProperties, n).map(metrics -> Tuples.of(n, metrics)))
         .collectMap(
         .collectMap(
             t -> t.getT1().id(),
             t -> t.getT1().id(),
-            t -> RawMetric.groupIntoMFS(t.getT2()).toList()
+            t -> RawMetric.groupIntoMfs(t.getT2()).toList()
         );
         );
     return collected.map(PerBrokerScrapedMetrics::new);
     return collected.map(PerBrokerScrapedMetrics::new);
   }
   }

+ 5 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/jmx/JmxSslSocketFactory.java

@@ -74,7 +74,7 @@ class JmxSslSocketFactory extends javax.net.ssl.SSLSocketFactory {
 
 
   private static final ThreadLocal<Ssl> SSL_CONTEXT_THREAD_LOCAL = new ThreadLocal<>();
   private static final ThreadLocal<Ssl> SSL_CONTEXT_THREAD_LOCAL = new ThreadLocal<>();
 
 
-  private static final Map<HostAndPort, javax.net.ssl.SSLSocketFactory> CACHED_SSL_FACTORIES = new ConcurrentHashMap<>();
+  private static final Map<HostAndPort, javax.net.ssl.SSLSocketFactory> CACHED_FACTORIES = new ConcurrentHashMap<>();
 
 
   private record HostAndPort(String host, int port) {
   private record HostAndPort(String host, int port) {
   }
   }
@@ -95,7 +95,7 @@ class JmxSslSocketFactory extends javax.net.ssl.SSLSocketFactory {
 
 
   // should be called when (host:port) -> factory cache should be invalidated (ex. on app config reload)
   // should be called when (host:port) -> factory cache should be invalidated (ex. on app config reload)
   public static void clearFactoriesCache() {
   public static void clearFactoriesCache() {
-    CACHED_SSL_FACTORIES.clear();
+    CACHED_FACTORIES.clear();
   }
   }
 
 
   public static void clearThreadLocalContext() {
   public static void clearThreadLocalContext() {
@@ -155,11 +155,11 @@ class JmxSslSocketFactory extends javax.net.ssl.SSLSocketFactory {
   @Override
   @Override
   public Socket createSocket(String host, int port) throws IOException {
   public Socket createSocket(String host, int port) throws IOException {
     var hostAndPort = new HostAndPort(host, port);
     var hostAndPort = new HostAndPort(host, port);
-    if (CACHED_SSL_FACTORIES.containsKey(hostAndPort)) {
-      return CACHED_SSL_FACTORIES.get(hostAndPort).createSocket(host, port);
+    if (CACHED_FACTORIES.containsKey(hostAndPort)) {
+      return CACHED_FACTORIES.get(hostAndPort).createSocket(host, port);
     } else if (threadLocalContextSet()) {
     } else if (threadLocalContextSet()) {
       var factory = createFactoryFromThreadLocalCtx();
       var factory = createFactoryFromThreadLocalCtx();
-      CACHED_SSL_FACTORIES.put(hostAndPort, factory);
+      CACHED_FACTORIES.put(hostAndPort, factory);
       return factory.createSocket(host, port);
       return factory.createSocket(host, port);
     }
     }
     return defaultSocketFactory.createSocket(host, port);
     return defaultSocketFactory.createSocket(host, port);

+ 10 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/prometheus/PrometheusEndpointParser.java

@@ -1,6 +1,6 @@
 package com.provectus.kafka.ui.service.metrics.scrape.prometheus;
 package com.provectus.kafka.ui.service.metrics.scrape.prometheus;
 
 
-import static io.prometheus.client.Collector.MetricFamilySamples.*;
+import static io.prometheus.client.Collector.MetricFamilySamples.Sample;
 
 
 import com.google.common.base.Enums;
 import com.google.common.base.Enums;
 import io.prometheus.client.Collector.MetricFamilySamples;
 import io.prometheus.client.Collector.MetricFamilySamples;
@@ -31,7 +31,8 @@ public class PrometheusEndpointParser {
 
 
     void registerAndReset() {
     void registerAndReset() {
       if (!samples.isEmpty()) {
       if (!samples.isEmpty()) {
-        registered.add(new MetricFamilySamples(name, type, Optional.ofNullable(help).orElse(name), List.copyOf(samples)));
+        registered.add(
+            new MetricFamilySamples(name, type, Optional.ofNullable(help).orElse(name), List.copyOf(samples)));
       }
       }
       //resetting state:
       //resetting state:
       name = null;
       name = null;
@@ -40,8 +41,14 @@ public class PrometheusEndpointParser {
       allowedNames.clear();
       allowedNames.clear();
       samples.clear();
       samples.clear();
     }
     }
+
+    List<MetricFamilySamples> getRegistered() {
+      registerAndReset(); // last in progress metric should be registered
+      return registered;
+    }
   }
   }
 
 
+  // general logic taken from https://github.com/prometheus/client_python/blob/master/prometheus_client/parser.py
   public static List<MetricFamilySamples> parse(Stream<String> lines) {
   public static List<MetricFamilySamples> parse(Stream<String> lines) {
     ParserContext context = new ParserContext();
     ParserContext context = new ParserContext();
     lines.map(String::trim)
     lines.map(String::trim)
@@ -60,8 +67,7 @@ public class PrometheusEndpointParser {
             processSample(context, line);
             processSample(context, line);
           }
           }
         });
         });
-    context.registerAndReset();
-    return context.registered;
+    return context.getRegistered();
   }
   }
 
 
   private static void processHelp(ParserContext context, String[] parts) {
   private static void processHelp(ParserContext context, String[] parts) {

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/scrape/prometheus/PrometheusMetricsRetriever.java

@@ -1,6 +1,6 @@
 package com.provectus.kafka.ui.service.metrics.scrape.prometheus;
 package com.provectus.kafka.ui.service.metrics.scrape.prometheus;
 
 
-import static io.prometheus.client.Collector.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 
 
 import com.provectus.kafka.ui.model.MetricsScrapeProperties;
 import com.provectus.kafka.ui.model.MetricsScrapeProperties;
 import com.provectus.kafka.ui.util.WebClientConfigurator;
 import com.provectus.kafka.ui.util.WebClientConfigurator;

+ 3 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/PrometheusEndpointUtil.java

@@ -1,6 +1,6 @@
 package com.provectus.kafka.ui.util;
 package com.provectus.kafka.ui.util;
 
 
-import static io.prometheus.client.Collector.*;
+import static io.prometheus.client.Collector.MetricFamilySamples;
 
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Iterators;
@@ -47,7 +47,8 @@ public final class PrometheusEndpointUtil {
             .getSummarizedMetrics()
             .getSummarizedMetrics()
             .map(mfs -> addLbl(mfs, "cluster", e.getKey())))
             .map(mfs -> addLbl(mfs, "cluster", e.getKey())))
         // merging MFS with same name, keeping order
         // merging MFS with same name, keeping order
-        .collect(Collectors.toMap(mfs -> mfs.name, mfs -> mfs, PrometheusEndpointUtil::concatSamples, LinkedHashMap::new))
+        .collect(Collectors.toMap(mfs -> mfs.name, mfs -> mfs,
+            PrometheusEndpointUtil::concatSamples, LinkedHashMap::new))
         .values()
         .values()
         .stream();
         .stream();
   }
   }

+ 2 - 1
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.service.integration.odd;
 package com.provectus.kafka.ui.service.integration.odd;
 
 
-import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.*;
+import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.TopicState;
+import static com.provectus.kafka.ui.service.metrics.scrape.ScrapedClusterState.empty;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.mockito.ArgumentMatchers.anyString;
 import static org.mockito.ArgumentMatchers.anyString;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.mock;

+ 9 - 9
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/metrics/scrape/IoRatesMetricsScannerTest.java

@@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.metrics.scrape;
 
 
 import static io.prometheus.client.Collector.MetricFamilySamples;
 import static io.prometheus.client.Collector.MetricFamilySamples;
 import static java.util.Arrays.stream;
 import static java.util.Arrays.stream;
-import static java.util.stream.Collectors.*;
+import static java.util.stream.Collectors.toMap;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThat;
 
 
 import com.provectus.kafka.ui.service.metrics.scrape.prometheus.PrometheusEndpointParser;
 import com.provectus.kafka.ui.service.metrics.scrape.prometheus.PrometheusEndpointParser;
@@ -21,18 +21,18 @@ class IoRatesMetricsScannerTest {
     populateWith(
     populateWith(
         nodeMetrics(
         nodeMetrics(
             new Node(0, "host", 123),
             new Node(0, "host", 123),
-            "kafka_server_BrokerTopicMetrics_FifteenMinuteRate{name=\"BytesInPerSec\",topic=\"test-topic\",} 1.0",
-            "kafka_server_BrokerTopicMetrics_FifteenMinuteRate{name=\"BytesOutPerSec\",topic=\"test-topic\",} 2.0",
-            "kafka_server_brokertopicmetrics_fifteenminuterate{name=\"bytesinpersec\",topic=\"test-topic\",} 1.0",
-            "kafka_server_brokertopicmetrics_fifteenminuterate{name=\"bytesoutpersec\",topic=\"test-topic\",} 2.0",
-            "some_unknown_prefix_brokertopicmetrics_fifteenminuterate{name=\"bytesinpersec\",topic=\"test-topic\",} 1.0",
-            "some_unknown_prefix_brokertopicmetrics_fifteenminuterate{name=\"bytesoutpersec\",topic=\"test-topic\",} 2.0"
+            "kafka_server_BrokerTopicMetrics_FifteenMinuteRate{name=\"BytesInPerSec\",topic=\"test\",} 1.0",
+            "kafka_server_BrokerTopicMetrics_FifteenMinuteRate{name=\"BytesOutPerSec\",topic=\"test\",} 2.0",
+            "kafka_server_brokertopicmetrics_fifteenminuterate{name=\"bytesinpersec\",topic=\"test\",} 1.0",
+            "kafka_server_brokertopicmetrics_fifteenminuterate{name=\"bytesoutpersec\",topic=\"test\",} 2.0",
+            "some_unknown_prefix_brokertopicmetrics_fifteenminuterate{name=\"bytesinpersec\",topic=\"test\",} 1.0",
+            "some_unknown_prefix_brokertopicmetrics_fifteenminuterate{name=\"bytesoutpersec\",topic=\"test\",} 2.0"
         )
         )
     );
     );
     assertThat(ioRatesMetricsScanner.bytesInFifteenMinuteRate)
     assertThat(ioRatesMetricsScanner.bytesInFifteenMinuteRate)
-        .containsEntry("test-topic", new BigDecimal("3.0"));
+        .containsEntry("test", new BigDecimal("3.0"));
     assertThat(ioRatesMetricsScanner.bytesOutFifteenMinuteRate)
     assertThat(ioRatesMetricsScanner.bytesOutFifteenMinuteRate)
-        .containsEntry("test-topic", new BigDecimal("6.0"));
+        .containsEntry("test", new BigDecimal("6.0"));
   }
   }
 
 
   @Test
   @Test