wip
This commit is contained in:
parent
daedc4d7c7
commit
dbab4e367d
35 changed files with 526 additions and 322 deletions
|
@ -1,2 +1,69 @@
|
||||||
|
lowercaseOutputName: true
|
||||||
rules:
|
rules:
|
||||||
- pattern: ".*"
|
# Special cases and very specific rules
|
||||||
|
- pattern: kafka.server<type=(.+), name=(.+), clientId=(.+), topic=(.+), partition=(.*)><>Value
|
||||||
|
name: kafka_server_$1_$2
|
||||||
|
type: GAUGE
|
||||||
|
labels:
|
||||||
|
clientId: '$3'
|
||||||
|
topic: '$4'
|
||||||
|
partition: '$5'
|
||||||
|
- pattern: kafka.server<type=(.+), name=(.+), clientId=(.+), brokerHost=(.+), brokerPort=(.+)><>Value
|
||||||
|
name: kafka_server_$1_$2
|
||||||
|
type: GAUGE
|
||||||
|
labels:
|
||||||
|
clientId: '$3'
|
||||||
|
broker: '$4:$5'
|
||||||
|
|
||||||
|
- pattern: kafka.server<type=KafkaRequestHandlerPool, name=RequestHandlerAvgIdlePercent><>OneMinuteRate
|
||||||
|
name: kafka_server_kafkarequesthandlerpool_requesthandleravgidlepercent_total
|
||||||
|
type: GAUGE
|
||||||
|
|
||||||
|
- pattern: kafka.server<type=socket-server-metrics, clientSoftwareName=(.+), clientSoftwareVersion=(.+), listener=(.+), networkProcessor=(.+)><>connections
|
||||||
|
name: kafka_server_socketservermetrics_connections
|
||||||
|
type: GAUGE
|
||||||
|
labels:
|
||||||
|
client_software_name: '$1'
|
||||||
|
client_software_version: '$2'
|
||||||
|
listener: '$3'
|
||||||
|
network_processor: '$4'
|
||||||
|
|
||||||
|
- pattern: 'kafka.server<type=socket-server-metrics, listener=(.+), networkProcessor=(.+)><>(.+):'
|
||||||
|
name: kafka_server_socketservermetrics_$3
|
||||||
|
type: GAUGE
|
||||||
|
labels:
|
||||||
|
listener: '$1'
|
||||||
|
network_processor: '$2'
|
||||||
|
|
||||||
|
# Count and Value
|
||||||
|
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.+), (.+)=(.+)><>(Count|Value)
|
||||||
|
name: kafka_$1_$2_$3
|
||||||
|
labels:
|
||||||
|
'$4': '$5'
|
||||||
|
'$6': '$7'
|
||||||
|
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.+)><>(Count|Value)
|
||||||
|
name: kafka_$1_$2_$3
|
||||||
|
labels:
|
||||||
|
'$4': '$5'
|
||||||
|
- pattern: kafka.(.*)<type=(.+), name=(.+)><>(Count|Value)
|
||||||
|
name: kafka_$1_$2_$3
|
||||||
|
|
||||||
|
# Percentile
|
||||||
|
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.*), (.+)=(.+)><>(\d+)thPercentile
|
||||||
|
name: kafka_$1_$2_$3
|
||||||
|
type: GAUGE
|
||||||
|
labels:
|
||||||
|
'$4': '$5'
|
||||||
|
'$6': '$7'
|
||||||
|
quantile: '0.$8'
|
||||||
|
- pattern: kafka.(.*)<type=(.+), name=(.+), (.+)=(.*)><>(\d+)thPercentile
|
||||||
|
name: kafka_$1_$2_$3
|
||||||
|
type: GAUGE
|
||||||
|
labels:
|
||||||
|
'$4': '$5'
|
||||||
|
quantile: '0.$6'
|
||||||
|
- pattern: kafka.(.*)<type=(.+), name=(.+)><>(\d+)thPercentile
|
||||||
|
name: kafka_$1_$2_$3
|
||||||
|
type: GAUGE
|
||||||
|
labels:
|
||||||
|
quantile: '0.$4'
|
||||||
|
|
|
@ -244,6 +244,11 @@
|
||||||
<artifactId>simpleclient_common</artifactId>
|
<artifactId>simpleclient_common</artifactId>
|
||||||
<version>0.16.0</version>
|
<version>0.16.0</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>io.prometheus</groupId>
|
||||||
|
<artifactId>simpleclient_pushgateway</artifactId>
|
||||||
|
<version>0.16.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
package com.provectus.kafka.ui.config;
|
package com.provectus.kafka.ui.config;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.MetricsConfig;
|
import com.provectus.kafka.ui.model.MetricsScrapeProperties;
|
||||||
import jakarta.annotation.PostConstruct;
|
import jakarta.annotation.PostConstruct;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -63,7 +63,7 @@ public class ClustersProperties {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@ToString(exclude = "password")
|
@ToString(exclude = {"password", "keystorePassword"})
|
||||||
public static class MetricsConfigData {
|
public static class MetricsConfigData {
|
||||||
String type;
|
String type;
|
||||||
Integer port;
|
Integer port;
|
||||||
|
@ -72,6 +72,31 @@ public class ClustersProperties {
|
||||||
String password;
|
String password;
|
||||||
String keystoreLocation;
|
String keystoreLocation;
|
||||||
String keystorePassword;
|
String keystorePassword;
|
||||||
|
|
||||||
|
// JmxScraper jmxScraper;
|
||||||
|
// PrometheusScraper prometheusScraper;
|
||||||
|
//
|
||||||
|
// @Data
|
||||||
|
// @ToString(exclude = "password")
|
||||||
|
// public static class JmxScraper {
|
||||||
|
// Integer port;
|
||||||
|
// Boolean ssl;
|
||||||
|
// String username;
|
||||||
|
// String password;
|
||||||
|
// String keystoreLocation;
|
||||||
|
// String keystorePassword;
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// @Data
|
||||||
|
// @ToString(exclude = "password")
|
||||||
|
// public static class PrometheusScraper {
|
||||||
|
// Integer port;
|
||||||
|
// Boolean ssl;
|
||||||
|
// String username;
|
||||||
|
// String password;
|
||||||
|
// String keystoreLocation;
|
||||||
|
// String keystorePassword;
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
|
@ -155,7 +180,7 @@ public class ClustersProperties {
|
||||||
private void setMetricsDefaults() {
|
private void setMetricsDefaults() {
|
||||||
for (Cluster cluster : clusters) {
|
for (Cluster cluster : clusters) {
|
||||||
if (cluster.getMetrics() != null && !StringUtils.hasText(cluster.getMetrics().getType())) {
|
if (cluster.getMetrics() != null && !StringUtils.hasText(cluster.getMetrics().getType())) {
|
||||||
cluster.getMetrics().setType(MetricsConfig.JMX_METRICS_TYPE);
|
cluster.getMetrics().setType(MetricsScrapeProperties.JMX_METRICS_TYPE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,7 @@ public interface ClusterMapper {
|
||||||
|
|
||||||
ClusterStatsDTO toClusterStats(InternalClusterState clusterState);
|
ClusterStatsDTO toClusterStats(InternalClusterState clusterState);
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
default ClusterMetricsDTO toClusterMetrics(Metrics metrics) {
|
default ClusterMetricsDTO toClusterMetrics(Metrics metrics) {
|
||||||
return new ClusterMetricsDTO()
|
return new ClusterMetricsDTO()
|
||||||
.items(metrics.getSummarizedMetrics().map(this::convert).collect(Collectors.toList()));
|
.items(metrics.getSummarizedMetrics().map(this::convert).collect(Collectors.toList()));
|
||||||
|
|
|
@ -5,6 +5,7 @@ import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
|
||||||
import com.provectus.kafka.ui.emitter.PollingSettings;
|
import com.provectus.kafka.ui.emitter.PollingSettings;
|
||||||
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
|
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
|
||||||
import com.provectus.kafka.ui.service.masking.DataMasking;
|
import com.provectus.kafka.ui.service.masking.DataMasking;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.MetricsScrapping;
|
||||||
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
||||||
import com.provectus.kafka.ui.util.ReactiveFailover;
|
import com.provectus.kafka.ui.util.ReactiveFailover;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -25,10 +26,10 @@ public class KafkaCluster {
|
||||||
private final String bootstrapServers;
|
private final String bootstrapServers;
|
||||||
private final Properties properties;
|
private final Properties properties;
|
||||||
private final boolean readOnly;
|
private final boolean readOnly;
|
||||||
private final MetricsConfig metricsConfig;
|
|
||||||
private final DataMasking masking;
|
private final DataMasking masking;
|
||||||
private final PollingSettings pollingSettings;
|
private final PollingSettings pollingSettings;
|
||||||
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
|
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
|
||||||
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
|
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
|
||||||
private final ReactiveFailover<KsqlApiClient> ksqlClient;
|
private final ReactiveFailover<KsqlApiClient> ksqlClient;
|
||||||
|
private final MetricsScrapping metricsScrapping;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
package com.provectus.kafka.ui.model;
|
package com.provectus.kafka.ui.model;
|
||||||
|
|
||||||
|
import static io.prometheus.client.Collector.*;
|
||||||
import static java.util.stream.Collectors.toMap;
|
import static java.util.stream.Collectors.toMap;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.InferredMetrics;
|
||||||
|
import io.prometheus.client.Collector;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -16,25 +19,32 @@ import lombok.Value;
|
||||||
@Value
|
@Value
|
||||||
public class Metrics {
|
public class Metrics {
|
||||||
|
|
||||||
Map<Integer, BigDecimal> brokerBytesInPerSec;
|
|
||||||
Map<Integer, BigDecimal> brokerBytesOutPerSec;
|
|
||||||
Map<String, BigDecimal> topicBytesInPerSec;
|
|
||||||
Map<String, BigDecimal> topicBytesOutPerSec;
|
|
||||||
Map<Integer, List<RawMetric>> perBrokerMetrics;
|
|
||||||
|
|
||||||
public static Metrics empty() {
|
public static Metrics empty() {
|
||||||
return Metrics.builder()
|
return Metrics.builder()
|
||||||
.brokerBytesInPerSec(Map.of())
|
.ioRates(null) //TODO: empty
|
||||||
.brokerBytesOutPerSec(Map.of())
|
.perBrokerScrapedMetrics(Map.of())
|
||||||
.topicBytesInPerSec(Map.of())
|
.inferredMetrics(InferredMetrics.empty())
|
||||||
.topicBytesOutPerSec(Map.of())
|
|
||||||
.perBrokerMetrics(Map.of())
|
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Builder
|
||||||
|
public record IoRates(Map<Integer, BigDecimal> brokerBytesInPerSec,
|
||||||
|
Map<Integer, BigDecimal> brokerBytesOutPerSec,
|
||||||
|
Map<String, BigDecimal> topicBytesInPerSec,
|
||||||
|
Map<String, BigDecimal> topicBytesOutPerSec) {
|
||||||
|
}
|
||||||
|
|
||||||
|
IoRates ioRates;
|
||||||
|
InferredMetrics inferredMetrics;
|
||||||
|
Map<Integer, List<MetricFamilySamples>> perBrokerScrapedMetrics;
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
public Stream<RawMetric> getSummarizedMetrics() {
|
public Stream<RawMetric> getSummarizedMetrics() {
|
||||||
return perBrokerMetrics.values().stream()
|
return perBrokerScrapedMetrics
|
||||||
|
.values()
|
||||||
|
.stream()
|
||||||
.flatMap(Collection::stream)
|
.flatMap(Collection::stream)
|
||||||
|
.flatMap(RawMetric::create)
|
||||||
.collect(toMap(RawMetric::identityKey, m -> m, (m1, m2) -> m1.copyWithValue(m1.value().add(m2.value()))))
|
.collect(toMap(RawMetric::identityKey, m -> m, (m1, m2) -> m1.copyWithValue(m1.value().add(m2.value()))))
|
||||||
.values()
|
.values()
|
||||||
.stream();
|
.stream();
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
package com.provectus.kafka.ui.model;
|
|
||||||
|
|
||||||
import lombok.AccessLevel;
|
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import lombok.Builder;
|
|
||||||
import lombok.Data;
|
|
||||||
|
|
||||||
@Data
|
|
||||||
@Builder(toBuilder = true)
|
|
||||||
@AllArgsConstructor(access = AccessLevel.PRIVATE)
|
|
||||||
public class MetricsConfig {
|
|
||||||
public static final String JMX_METRICS_TYPE = "JMX";
|
|
||||||
public static final String PROMETHEUS_METRICS_TYPE = "PROMETHEUS";
|
|
||||||
|
|
||||||
private final String type;
|
|
||||||
private final Integer port;
|
|
||||||
private final boolean ssl;
|
|
||||||
private final String username;
|
|
||||||
private final String password;
|
|
||||||
private final String keystoreLocation;
|
|
||||||
private final String keystorePassword;
|
|
||||||
}
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
package com.provectus.kafka.ui.model;
|
||||||
|
|
||||||
|
import static com.provectus.kafka.ui.config.ClustersProperties.*;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import lombok.Builder;
|
||||||
|
import lombok.Data;
|
||||||
|
import lombok.Value;
|
||||||
|
|
||||||
|
@Value
|
||||||
|
@Builder
|
||||||
|
public class MetricsScrapeProperties {
|
||||||
|
public static final String JMX_METRICS_TYPE = "JMX";
|
||||||
|
public static final String PROMETHEUS_METRICS_TYPE = "PROMETHEUS";
|
||||||
|
|
||||||
|
Integer port;
|
||||||
|
boolean ssl;
|
||||||
|
String username;
|
||||||
|
String password;
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
KeystoreConfig keystoreConfig;
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
TruststoreConfig truststoreConfig;
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -8,9 +8,10 @@ import com.provectus.kafka.ui.emitter.PollingSettings;
|
||||||
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
|
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
|
||||||
import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
|
import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||||
import com.provectus.kafka.ui.model.MetricsConfig;
|
|
||||||
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
|
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
|
||||||
import com.provectus.kafka.ui.service.masking.DataMasking;
|
import com.provectus.kafka.ui.service.masking.DataMasking;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.jmx.JmxMetricsRetriever;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.MetricsScrapping;
|
||||||
import com.provectus.kafka.ui.sr.ApiClient;
|
import com.provectus.kafka.ui.sr.ApiClient;
|
||||||
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
||||||
import com.provectus.kafka.ui.util.KafkaServicesValidation;
|
import com.provectus.kafka.ui.util.KafkaServicesValidation;
|
||||||
|
@ -22,7 +23,6 @@ import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
import javax.annotation.Nullable;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import org.springframework.util.unit.DataSize;
|
import org.springframework.util.unit.DataSize;
|
||||||
|
@ -39,11 +39,13 @@ public class KafkaClusterFactory {
|
||||||
private static final DataSize DEFAULT_WEBCLIENT_BUFFER = DataSize.parse("20MB");
|
private static final DataSize DEFAULT_WEBCLIENT_BUFFER = DataSize.parse("20MB");
|
||||||
|
|
||||||
private final DataSize webClientMaxBuffSize;
|
private final DataSize webClientMaxBuffSize;
|
||||||
|
private final JmxMetricsRetriever jmxMetricsRetriever;
|
||||||
|
|
||||||
public KafkaClusterFactory(WebclientProperties webclientProperties) {
|
public KafkaClusterFactory(WebclientProperties webclientProperties, JmxMetricsRetriever jmxMetricsRetriever) {
|
||||||
this.webClientMaxBuffSize = Optional.ofNullable(webclientProperties.getMaxInMemoryBufferSize())
|
this.webClientMaxBuffSize = Optional.ofNullable(webclientProperties.getMaxInMemoryBufferSize())
|
||||||
.map(DataSize::parse)
|
.map(DataSize::parse)
|
||||||
.orElse(DEFAULT_WEBCLIENT_BUFFER);
|
.orElse(DEFAULT_WEBCLIENT_BUFFER);
|
||||||
|
this.jmxMetricsRetriever = jmxMetricsRetriever;
|
||||||
}
|
}
|
||||||
|
|
||||||
public KafkaCluster create(ClustersProperties properties,
|
public KafkaCluster create(ClustersProperties properties,
|
||||||
|
@ -56,6 +58,7 @@ public class KafkaClusterFactory {
|
||||||
builder.readOnly(clusterProperties.isReadOnly());
|
builder.readOnly(clusterProperties.isReadOnly());
|
||||||
builder.masking(DataMasking.create(clusterProperties.getMasking()));
|
builder.masking(DataMasking.create(clusterProperties.getMasking()));
|
||||||
builder.pollingSettings(PollingSettings.create(clusterProperties, properties));
|
builder.pollingSettings(PollingSettings.create(clusterProperties, properties));
|
||||||
|
builder.metricsScrapping(MetricsScrapping.create(clusterProperties, jmxMetricsRetriever));
|
||||||
|
|
||||||
if (schemaRegistryConfigured(clusterProperties)) {
|
if (schemaRegistryConfigured(clusterProperties)) {
|
||||||
builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));
|
builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));
|
||||||
|
@ -66,9 +69,6 @@ public class KafkaClusterFactory {
|
||||||
if (ksqlConfigured(clusterProperties)) {
|
if (ksqlConfigured(clusterProperties)) {
|
||||||
builder.ksqlClient(ksqlClient(clusterProperties));
|
builder.ksqlClient(ksqlClient(clusterProperties));
|
||||||
}
|
}
|
||||||
if (metricsConfigured(clusterProperties)) {
|
|
||||||
builder.metricsConfig(metricsConfigDataToMetricsConfig(clusterProperties.getMetrics()));
|
|
||||||
}
|
|
||||||
builder.originalProperties(clusterProperties);
|
builder.originalProperties(clusterProperties);
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
@ -202,20 +202,4 @@ public class KafkaClusterFactory {
|
||||||
return clusterProperties.getMetrics() != null;
|
return clusterProperties.getMetrics() != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
|
||||||
private MetricsConfig metricsConfigDataToMetricsConfig(ClustersProperties.MetricsConfigData metricsConfigData) {
|
|
||||||
if (metricsConfigData == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
MetricsConfig.MetricsConfigBuilder builder = MetricsConfig.builder();
|
|
||||||
builder.type(metricsConfigData.getType());
|
|
||||||
builder.port(metricsConfigData.getPort());
|
|
||||||
builder.ssl(Optional.ofNullable(metricsConfigData.getSsl()).orElse(false));
|
|
||||||
builder.username(metricsConfigData.getUsername());
|
|
||||||
builder.password(metricsConfigData.getPassword());
|
|
||||||
builder.keystoreLocation(metricsConfigData.getKeystoreLocation());
|
|
||||||
builder.keystorePassword(metricsConfigData.getKeystorePassword());
|
|
||||||
return builder.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,6 @@ import com.provectus.kafka.ui.model.KafkaCluster;
|
||||||
import com.provectus.kafka.ui.model.Metrics;
|
import com.provectus.kafka.ui.model.Metrics;
|
||||||
import com.provectus.kafka.ui.model.ServerStatusDTO;
|
import com.provectus.kafka.ui.model.ServerStatusDTO;
|
||||||
import com.provectus.kafka.ui.model.Statistics;
|
import com.provectus.kafka.ui.model.Statistics;
|
||||||
import com.provectus.kafka.ui.service.metrics.MetricsCollector;
|
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedClusterState;
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedClusterState;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -20,13 +19,13 @@ import org.apache.kafka.clients.admin.TopicDescription;
|
||||||
import org.apache.kafka.common.Node;
|
import org.apache.kafka.common.Node;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.util.function.Tuple2;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@RequiredArgsConstructor
|
@RequiredArgsConstructor
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class StatisticsService {
|
public class StatisticsService {
|
||||||
|
|
||||||
private final MetricsCollector metricsCollector;
|
|
||||||
private final AdminClientService adminClientService;
|
private final AdminClientService adminClientService;
|
||||||
private final FeatureService featureService;
|
private final FeatureService featureService;
|
||||||
private final StatisticsCache cache;
|
private final StatisticsCache cache;
|
||||||
|
@ -37,30 +36,24 @@ public class StatisticsService {
|
||||||
|
|
||||||
private Mono<Statistics> getStatistics(KafkaCluster cluster) {
|
private Mono<Statistics> getStatistics(KafkaCluster cluster) {
|
||||||
return adminClientService.get(cluster).flatMap(ac ->
|
return adminClientService.get(cluster).flatMap(ac ->
|
||||||
ac.describeCluster().flatMap(description ->
|
ac.describeCluster()
|
||||||
ac.updateInternalStats(description.getController()).then(
|
.flatMap(description ->
|
||||||
Mono.zip(
|
ac.updateInternalStats(description.getController())
|
||||||
List.of(
|
.then(
|
||||||
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
|
Mono.zip(
|
||||||
getLogDirInfo(description, ac),
|
featureService.getAvailableFeatures(ac, cluster, description),
|
||||||
featureService.getAvailableFeatures(ac, cluster, description),
|
loadClusterState(description, ac)
|
||||||
loadTopicConfigs(cluster),
|
).flatMap(featuresAndState ->
|
||||||
describeTopics(cluster),
|
scrapeMetrics(cluster, featuresAndState.getT2(), description)
|
||||||
loadClusterState(ac)
|
.map(metrics ->
|
||||||
),
|
Statistics.builder()
|
||||||
results ->
|
.status(ServerStatusDTO.ONLINE)
|
||||||
Statistics.builder()
|
.clusterDescription(description)
|
||||||
.status(ServerStatusDTO.ONLINE)
|
.version(ac.getVersion())
|
||||||
.clusterDescription(description)
|
.metrics(metrics)
|
||||||
.version(ac.getVersion())
|
.features(featuresAndState.getT1())
|
||||||
.metrics((Metrics) results[0])
|
.clusterState(featuresAndState.getT2())
|
||||||
.logDirInfo((InternalLogDirStats) results[1])
|
.build())))))
|
||||||
.features((List<ClusterFeature>) results[2])
|
|
||||||
.topicConfigs((Map<String, List<ConfigEntry>>) results[3])
|
|
||||||
.topicDescriptions((Map<String, TopicDescription>) results[4])
|
|
||||||
.clusterState((ScrapedClusterState) results[5])
|
|
||||||
.build()
|
|
||||||
))))
|
|
||||||
.doOnError(e ->
|
.doOnError(e ->
|
||||||
log.error("Failed to collect cluster {} info", cluster.getName(), e))
|
log.error("Failed to collect cluster {} info", cluster.getName(), e))
|
||||||
.onErrorResume(
|
.onErrorResume(
|
||||||
|
@ -80,8 +73,15 @@ public class StatisticsService {
|
||||||
return adminClientService.get(c).flatMap(ReactiveAdminClient::getTopicsConfig);
|
return adminClientService.get(c).flatMap(ReactiveAdminClient::getTopicsConfig);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Mono<ScrapedClusterState> loadClusterState(ReactiveAdminClient ac){
|
private Mono<ScrapedClusterState> loadClusterState(ClusterDescription clusterDescription,
|
||||||
return ScrapedClusterState.scrape(ac);
|
ReactiveAdminClient ac) {
|
||||||
|
return ScrapedClusterState.scrape(clusterDescription, ac);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Mono<Metrics> scrapeMetrics(KafkaCluster c,
|
||||||
|
ScrapedClusterState clusterState,
|
||||||
|
ClusterDescription clusterDescription) {
|
||||||
|
return c.getMetricsScrapping().scrape(clusterState, clusterDescription.getNodes());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,69 +0,0 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
|
||||||
import com.provectus.kafka.ui.model.Metrics;
|
|
||||||
import com.provectus.kafka.ui.model.MetricsConfig;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
|
||||||
import org.apache.kafka.common.Node;
|
|
||||||
import org.springframework.stereotype.Component;
|
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
import reactor.util.function.Tuple2;
|
|
||||||
import reactor.util.function.Tuples;
|
|
||||||
|
|
||||||
@Component
|
|
||||||
@Slf4j
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
public class MetricsCollector {
|
|
||||||
|
|
||||||
private final JmxMetricsRetriever jmxMetricsRetriever;
|
|
||||||
private final PrometheusMetricsRetriever prometheusMetricsRetriever;
|
|
||||||
|
|
||||||
public Mono<Metrics> getBrokerMetrics(KafkaCluster cluster, Collection<Node> nodes) {
|
|
||||||
return Flux.fromIterable(nodes)
|
|
||||||
.flatMap(n -> getMetrics(cluster, n).map(lst -> Tuples.of(n, lst)))
|
|
||||||
.collectMap(Tuple2::getT1, Tuple2::getT2)
|
|
||||||
.map(nodeMetrics -> collectMetrics(cluster, nodeMetrics))
|
|
||||||
.defaultIfEmpty(Metrics.empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
private Mono<List<RawMetric>> getMetrics(KafkaCluster kafkaCluster, Node node) {
|
|
||||||
Flux<RawMetric> metricFlux = Flux.empty();
|
|
||||||
if (kafkaCluster.getMetricsConfig() != null) {
|
|
||||||
String type = kafkaCluster.getMetricsConfig().getType();
|
|
||||||
if (type == null || type.equalsIgnoreCase(MetricsConfig.JMX_METRICS_TYPE)) {
|
|
||||||
metricFlux = jmxMetricsRetriever.retrieve(kafkaCluster, node);
|
|
||||||
} else if (type.equalsIgnoreCase(MetricsConfig.PROMETHEUS_METRICS_TYPE)) {
|
|
||||||
metricFlux = prometheusMetricsRetriever.retrieve(kafkaCluster, node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return metricFlux.collectList();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Metrics collectMetrics(KafkaCluster cluster, Map<Node, List<RawMetric>> perBrokerMetrics) {
|
|
||||||
Metrics.MetricsBuilder builder = Metrics.builder()
|
|
||||||
.perBrokerMetrics(
|
|
||||||
perBrokerMetrics.entrySet()
|
|
||||||
.stream()
|
|
||||||
.collect(Collectors.toMap(e -> e.getKey().id(), Map.Entry::getValue)));
|
|
||||||
|
|
||||||
populateWellknowMetrics(cluster, perBrokerMetrics)
|
|
||||||
.apply(builder);
|
|
||||||
|
|
||||||
return builder.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
private WellKnownMetrics populateWellknowMetrics(KafkaCluster cluster, Map<Node, List<RawMetric>> perBrokerMetrics) {
|
|
||||||
WellKnownMetrics wellKnownMetrics = new WellKnownMetrics();
|
|
||||||
perBrokerMetrics.forEach((node, metrics) ->
|
|
||||||
metrics.forEach(metric ->
|
|
||||||
wellKnownMetrics.populate(node, metric)));
|
|
||||||
return wellKnownMetrics;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,9 +0,0 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
|
||||||
import org.apache.kafka.common.Node;
|
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
|
|
||||||
interface MetricsRetriever {
|
|
||||||
Flux<RawMetric> retrieve(KafkaCluster c, Node node);
|
|
||||||
}
|
|
|
@ -1,7 +1,14 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics;
|
||||||
|
|
||||||
|
import static io.prometheus.client.Collector.*;
|
||||||
|
|
||||||
|
import io.prometheus.client.Collector;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.IntStream;
|
||||||
|
import java.util.stream.Stream;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import lombok.EqualsAndHashCode;
|
import lombok.EqualsAndHashCode;
|
||||||
import lombok.ToString;
|
import lombok.ToString;
|
||||||
|
@ -23,10 +30,27 @@ public interface RawMetric {
|
||||||
|
|
||||||
//--------------------------------------------------
|
//--------------------------------------------------
|
||||||
|
|
||||||
|
static Stream<MetricFamilySamples> groupIntoMFS(Collection<RawMetric> lst) {
|
||||||
|
//TODO: impl
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
static RawMetric create(String name, Map<String, String> labels, BigDecimal value) {
|
static RawMetric create(String name, Map<String, String> labels, BigDecimal value) {
|
||||||
return new SimpleMetric(name, labels, value);
|
return new SimpleMetric(name, labels, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Stream<RawMetric> create(MetricFamilySamples samples) {
|
||||||
|
return samples.samples.stream()
|
||||||
|
.map(s -> create(
|
||||||
|
s.name,
|
||||||
|
IntStream.range(0, s.labelNames.size())
|
||||||
|
.boxed()
|
||||||
|
.collect(Collectors.<Integer, String, String>toMap(s.labelNames::get, s.labelValues::get)),
|
||||||
|
BigDecimal.valueOf(s.value)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
record SimpleMetric(String name,
|
record SimpleMetric(String name,
|
||||||
Map<String, String> labels,
|
Map<String, String> labels,
|
||||||
BigDecimal value) implements RawMetric {
|
BigDecimal value) implements RawMetric {
|
||||||
|
|
|
@ -0,0 +1,87 @@
|
||||||
|
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
||||||
|
|
||||||
|
import static com.provectus.kafka.ui.config.ClustersProperties.*;
|
||||||
|
import static com.provectus.kafka.ui.model.MetricsScrapeProperties.*;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.model.Metrics;
|
||||||
|
import com.provectus.kafka.ui.model.MetricsScrapeProperties;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.InferredMetrics;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.InferredMetricsScraper;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.jmx.JmxMetricsRetriever;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.jmx.JmxMetricsScraper;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.prometheus.PrometheusScraper;
|
||||||
|
import jakarta.annotation.Nullable;
|
||||||
|
import java.util.Collection;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import org.apache.kafka.common.Node;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class MetricsScrapping {
|
||||||
|
|
||||||
|
private final InferredMetricsScraper inferredMetricsScraper;
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private final JmxMetricsScraper jmxMetricsScraper;
|
||||||
|
|
||||||
|
@Nullable
|
||||||
|
private final PrometheusScraper prometheusScraper;
|
||||||
|
|
||||||
|
public static MetricsScrapping create(Cluster cluster,
|
||||||
|
JmxMetricsRetriever jmxMetricsRetriever) {
|
||||||
|
InferredMetricsScraper inferredMetricsScraper = new InferredMetricsScraper();
|
||||||
|
JmxMetricsScraper jmxMetricsScraper = null;
|
||||||
|
PrometheusScraper prometheusScraper = null;
|
||||||
|
|
||||||
|
var metrics = cluster.getMetrics();
|
||||||
|
if (cluster.getMetrics() != null) {
|
||||||
|
var scrapeProperties = createScrapeProps(cluster);
|
||||||
|
if (metrics.getType() == null || metrics.getType().equalsIgnoreCase(JMX_METRICS_TYPE)) {
|
||||||
|
jmxMetricsScraper = new JmxMetricsScraper(scrapeProperties, jmxMetricsRetriever);
|
||||||
|
} else if (metrics.getType().equalsIgnoreCase(PROMETHEUS_METRICS_TYPE)) {
|
||||||
|
prometheusScraper = new PrometheusScraper(scrapeProperties);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new MetricsScrapping(inferredMetricsScraper, jmxMetricsScraper, prometheusScraper);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static MetricsScrapeProperties createScrapeProps(Cluster cluster) {
|
||||||
|
var metrics = cluster.getMetrics();
|
||||||
|
return MetricsScrapeProperties.builder()
|
||||||
|
.port(metrics.getPort())
|
||||||
|
.ssl(metrics.getSsl())
|
||||||
|
.username(metrics.getUsername())
|
||||||
|
.password(metrics.getPassword())
|
||||||
|
.truststoreConfig(cluster.getSsl())
|
||||||
|
.keystoreConfig(
|
||||||
|
metrics.getKeystoreLocation() != null
|
||||||
|
? new KeystoreConfig(metrics.getKeystoreLocation(), metrics.getKeystorePassword())
|
||||||
|
: null
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Mono<Metrics> scrape(ScrapedClusterState clusterState, Collection<Node> nodes) {
|
||||||
|
Mono<InferredMetrics> inferred = inferredMetricsScraper.scrape(clusterState);
|
||||||
|
Mono<? extends PerBrokerScrapedMetrics> external = scrapeExternal(nodes);
|
||||||
|
return inferred.zipWith(
|
||||||
|
external,
|
||||||
|
(inf, ext) -> Metrics.builder()
|
||||||
|
.ioRates(ext.ioRates())
|
||||||
|
.perBrokerScrapedMetrics(ext.getPerBrokerMetrics())
|
||||||
|
.inferredMetrics(inf)
|
||||||
|
.build()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Mono<? extends PerBrokerScrapedMetrics> scrapeExternal(Collection<Node> nodes) {
|
||||||
|
if (jmxMetricsScraper != null) {
|
||||||
|
return jmxMetricsScraper.scrape(nodes);
|
||||||
|
}
|
||||||
|
if (prometheusScraper != null) {
|
||||||
|
return prometheusScraper.scrape(nodes);
|
||||||
|
}
|
||||||
|
return Mono.just(PerBrokerScrapedMetrics.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.model.Metrics;
|
||||||
|
import io.prometheus.client.Collector;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class PerBrokerScrapedMetrics {
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
private final Map<Integer, List<Collector.MetricFamilySamples>> perBrokerMetrics;
|
||||||
|
|
||||||
|
public static PerBrokerScrapedMetrics empty() {
|
||||||
|
return new PerBrokerScrapedMetrics(Map.of());
|
||||||
|
}
|
||||||
|
|
||||||
|
Metrics.IoRates ioRates() {
|
||||||
|
//TODO: rename WKMetrics
|
||||||
|
return new WellKnownMetrics(perBrokerMetrics).ioRates();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,16 +1,27 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
||||||
|
|
||||||
|
import static com.provectus.kafka.ui.service.ReactiveAdminClient.*;
|
||||||
|
|
||||||
import com.google.common.collect.Table;
|
import com.google.common.collect.Table;
|
||||||
|
import com.provectus.kafka.ui.model.InternalLogDirStats;
|
||||||
import com.provectus.kafka.ui.service.ReactiveAdminClient;
|
import com.provectus.kafka.ui.service.ReactiveAdminClient;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import lombok.Builder;
|
||||||
import lombok.Value;
|
import lombok.Value;
|
||||||
import org.apache.kafka.clients.admin.ConfigEntry;
|
import org.apache.kafka.clients.admin.ConfigEntry;
|
||||||
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
|
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
|
||||||
|
import org.apache.kafka.clients.admin.ConsumerGroupListing;
|
||||||
import org.apache.kafka.clients.admin.TopicDescription;
|
import org.apache.kafka.clients.admin.TopicDescription;
|
||||||
|
import org.apache.kafka.common.ConsumerGroupState;
|
||||||
|
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
|
||||||
|
import org.apache.kafka.common.requests.DescribeLogDirsResponse.LogDirInfo;
|
||||||
|
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
|
@Builder
|
||||||
@Value
|
@Value
|
||||||
public class ScrapedClusterState {
|
public class ScrapedClusterState {
|
||||||
|
|
||||||
|
@ -22,7 +33,7 @@ public class ScrapedClusterState {
|
||||||
String name,
|
String name,
|
||||||
List<ConfigEntry> configs,
|
List<ConfigEntry> configs,
|
||||||
TopicDescription description,
|
TopicDescription description,
|
||||||
Map<Integer, Long> offsets,
|
Map<Integer, Long> endOffsets,
|
||||||
SegmentStats segmentStats,
|
SegmentStats segmentStats,
|
||||||
Map<Integer, SegmentStats> partitionsSegmentStats) {
|
Map<Integer, SegmentStats> partitionsSegmentStats) {
|
||||||
}
|
}
|
||||||
|
@ -30,6 +41,7 @@ public class ScrapedClusterState {
|
||||||
record ConsumerGroupState(
|
record ConsumerGroupState(
|
||||||
Instant scrapeTime,
|
Instant scrapeTime,
|
||||||
String group,
|
String group,
|
||||||
|
org.apache.kafka.common.ConsumerGroupState state,
|
||||||
ConsumerGroupDescription description,
|
ConsumerGroupDescription description,
|
||||||
Table<String, Integer, Long> committedOffsets,
|
Table<String, Integer, Long> committedOffsets,
|
||||||
Map<String, Instant> lastTopicActivity) {
|
Map<String, Instant> lastTopicActivity) {
|
||||||
|
@ -45,11 +57,36 @@ public class ScrapedClusterState {
|
||||||
Map<String, ConsumerGroupState> consumerGroupsStates;
|
Map<String, ConsumerGroupState> consumerGroupsStates;
|
||||||
|
|
||||||
public static ScrapedClusterState empty() {
|
public static ScrapedClusterState empty() {
|
||||||
//TODO impl
|
return ScrapedClusterState.builder()
|
||||||
return null;
|
.scrapeStartTime(Instant.now())
|
||||||
|
.nodesStates(Map.of())
|
||||||
|
.topicStates(Map.of())
|
||||||
|
.consumerGroupsStates(Map.of())
|
||||||
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Mono<ScrapedClusterState> scrape(ReactiveAdminClient ac) {
|
public static Mono<ScrapedClusterState> scrape(ClusterDescription clusterDescription,
|
||||||
|
ReactiveAdminClient ac) {
|
||||||
|
|
||||||
|
Mono<InternalLogDirStats> segmentStatsMono = ac.describeLogDirs().map(InternalLogDirStats::new);
|
||||||
|
Mono<List<String>> cgListingsMono = ac.listConsumerGroups().map(l -> l.stream().map(ConsumerGroupListing::groupId).toList());
|
||||||
|
Mono<Map<String, TopicDescription>> topicDescriptionsMono = ac.describeTopics();
|
||||||
|
Mono<Map<String, List<ConfigEntry>>> topicConfigsMono = ac.getTopicsConfig();
|
||||||
|
|
||||||
|
Mono.zip(
|
||||||
|
segmentStatsMono,
|
||||||
|
cgListingsMono,
|
||||||
|
topicDescriptionsMono,
|
||||||
|
topicConfigsMono
|
||||||
|
).flatMap(tuple -> {
|
||||||
|
InternalLogDirStats segmentStats = tuple.getT1();
|
||||||
|
List<String> consumerGroups = tuple.getT2();
|
||||||
|
Map<String, TopicDescription> topicDescriptions = tuple.getT3();
|
||||||
|
Map<String, List<ConfigEntry>> topicConfigs = tuple.getT4();
|
||||||
|
|
||||||
|
Mono<>
|
||||||
|
})
|
||||||
|
|
||||||
return null;//TODO impl
|
return null;//TODO impl
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
|
||||||
|
|
||||||
import io.prometheus.client.Collector.MetricFamilySamples;
|
|
||||||
import java.util.Collection;
|
|
||||||
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
public interface ScrapedMetrics {
|
|
||||||
|
|
||||||
static ScrapedMetrics create(Collection<MetricFamilySamples> lst) {
|
|
||||||
return lst::stream;
|
|
||||||
}
|
|
||||||
|
|
||||||
static ScrapedMetrics empty() {
|
|
||||||
return create(List.of());
|
|
||||||
}
|
|
||||||
|
|
||||||
Stream<MetricFamilySamples> asStream();
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,10 +0,0 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
|
||||||
|
|
||||||
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
public interface Scraper<T extends ScrapedMetrics> {
|
|
||||||
|
|
||||||
Mono<T> scrape();
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
|
||||||
|
|
||||||
public class Scrapping {
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,15 +1,18 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.containsIgnoreCase;
|
import static org.apache.commons.lang3.StringUtils.containsIgnoreCase;
|
||||||
import static org.apache.commons.lang3.StringUtils.endsWithIgnoreCase;
|
import static org.apache.commons.lang3.StringUtils.endsWithIgnoreCase;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.Metrics;
|
import com.provectus.kafka.ui.model.Metrics;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||||
|
import io.prometheus.client.Collector;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import org.apache.kafka.common.Node;
|
import org.apache.kafka.common.Node;
|
||||||
|
|
||||||
class WellKnownMetrics {
|
public class WellKnownMetrics {
|
||||||
|
|
||||||
// per broker
|
// per broker
|
||||||
final Map<Integer, BigDecimal> brokerBytesInFifteenMinuteRate = new HashMap<>();
|
final Map<Integer, BigDecimal> brokerBytesInFifteenMinuteRate = new HashMap<>();
|
||||||
|
@ -19,33 +22,41 @@ class WellKnownMetrics {
|
||||||
final Map<String, BigDecimal> bytesInFifteenMinuteRate = new HashMap<>();
|
final Map<String, BigDecimal> bytesInFifteenMinuteRate = new HashMap<>();
|
||||||
final Map<String, BigDecimal> bytesOutFifteenMinuteRate = new HashMap<>();
|
final Map<String, BigDecimal> bytesOutFifteenMinuteRate = new HashMap<>();
|
||||||
|
|
||||||
void populate(Node node, RawMetric rawMetric) {
|
public WellKnownMetrics(Map<Integer, List<Collector.MetricFamilySamples>> perBrokerMetrics) {
|
||||||
updateBrokerIOrates(node, rawMetric);
|
perBrokerMetrics.forEach((nodeId, metrics) -> {
|
||||||
updateTopicsIOrates(rawMetric);
|
metrics.forEach(m -> {
|
||||||
|
RawMetric.create(m).forEach(rawMetric -> {
|
||||||
|
updateBrokerIOrates(nodeId, rawMetric);
|
||||||
|
updateTopicsIOrates(rawMetric);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void apply(Metrics.MetricsBuilder metricsBuilder) {
|
public Metrics.IoRates ioRates() {
|
||||||
metricsBuilder.topicBytesInPerSec(bytesInFifteenMinuteRate);
|
return Metrics.IoRates.builder()
|
||||||
metricsBuilder.topicBytesOutPerSec(bytesOutFifteenMinuteRate);
|
.topicBytesInPerSec(bytesInFifteenMinuteRate)
|
||||||
metricsBuilder.brokerBytesInPerSec(brokerBytesInFifteenMinuteRate);
|
.topicBytesOutPerSec(bytesOutFifteenMinuteRate)
|
||||||
metricsBuilder.brokerBytesOutPerSec(brokerBytesOutFifteenMinuteRate);
|
.brokerBytesInPerSec(brokerBytesInFifteenMinuteRate)
|
||||||
|
.brokerBytesOutPerSec(brokerBytesOutFifteenMinuteRate)
|
||||||
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateBrokerIOrates(Node node, RawMetric rawMetric) {
|
private void updateBrokerIOrates(int nodeId, RawMetric rawMetric) {
|
||||||
String name = rawMetric.name();
|
String name = rawMetric.name();
|
||||||
if (!brokerBytesInFifteenMinuteRate.containsKey(node.id())
|
if (!brokerBytesInFifteenMinuteRate.containsKey(nodeId)
|
||||||
&& rawMetric.labels().size() == 1
|
&& rawMetric.labels().size() == 1
|
||||||
&& "BytesInPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
|
&& "BytesInPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
|
||||||
&& containsIgnoreCase(name, "BrokerTopicMetrics")
|
&& containsIgnoreCase(name, "BrokerTopicMetrics")
|
||||||
&& endsWithIgnoreCase(name, "FifteenMinuteRate")) {
|
&& endsWithIgnoreCase(name, "FifteenMinuteRate")) {
|
||||||
brokerBytesInFifteenMinuteRate.put(node.id(), rawMetric.value());
|
brokerBytesInFifteenMinuteRate.put(nodeId, rawMetric.value());
|
||||||
}
|
}
|
||||||
if (!brokerBytesOutFifteenMinuteRate.containsKey(node.id())
|
if (!brokerBytesOutFifteenMinuteRate.containsKey(nodeId)
|
||||||
&& rawMetric.labels().size() == 1
|
&& rawMetric.labels().size() == 1
|
||||||
&& "BytesOutPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
|
&& "BytesOutPerSec".equalsIgnoreCase(rawMetric.labels().get("name"))
|
||||||
&& containsIgnoreCase(name, "BrokerTopicMetrics")
|
&& containsIgnoreCase(name, "BrokerTopicMetrics")
|
||||||
&& endsWithIgnoreCase(name, "FifteenMinuteRate")) {
|
&& endsWithIgnoreCase(name, "FifteenMinuteRate")) {
|
||||||
brokerBytesOutFifteenMinuteRate.put(node.id(), rawMetric.value());
|
brokerBytesOutFifteenMinuteRate.put(nodeId, rawMetric.value());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,23 +1,23 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred;
|
||||||
|
|
||||||
import static io.prometheus.client.Collector.*;
|
import static io.prometheus.client.Collector.MetricFamilySamples;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedClusterState;
|
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedMetrics;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
public class InferredMetrics implements ScrapedMetrics {
|
public class InferredMetrics {
|
||||||
|
|
||||||
private final List<MetricFamilySamples> metrics;
|
private final List<MetricFamilySamples> metrics;
|
||||||
|
|
||||||
|
public static InferredMetrics empty() {
|
||||||
|
return new InferredMetrics(List.of());
|
||||||
|
}
|
||||||
|
|
||||||
public InferredMetrics(List<MetricFamilySamples> metrics) {
|
public InferredMetrics(List<MetricFamilySamples> metrics) {
|
||||||
this.metrics = metrics;
|
this.metrics = metrics;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public List<MetricFamilySamples> asList() {
|
||||||
public Stream<MetricFamilySamples> asStream() {
|
return metrics;
|
||||||
return metrics.stream();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,25 +1,20 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedClusterState;
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedClusterState;
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.Scraper;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.function.Supplier;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
import lombok.RequiredArgsConstructor;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@RequiredArgsConstructor
|
@RequiredArgsConstructor
|
||||||
public class InferredMetricsScraper implements Scraper<InferredMetrics> {
|
public class InferredMetricsScraper {
|
||||||
|
|
||||||
private final Supplier<ScrapedClusterState> currentStateSupplier;
|
|
||||||
private ScrapedClusterState prevState = null;
|
private ScrapedClusterState prevState = null;
|
||||||
|
|
||||||
@Override
|
public synchronized Mono<InferredMetrics> scrape(ScrapedClusterState newState) {
|
||||||
public synchronized Mono<InferredMetrics> scrape() {
|
|
||||||
if (prevState == null) {
|
if (prevState == null) {
|
||||||
prevState = currentStateSupplier.get();
|
prevState = newState;
|
||||||
return Mono.empty();
|
return Mono.just(InferredMetrics.empty());
|
||||||
}
|
}
|
||||||
var newState = currentStateSupplier.get();
|
|
||||||
var inferred = infer(prevState, newState);
|
var inferred = infer(prevState, newState);
|
||||||
prevState = newState;
|
prevState = newState;
|
||||||
return Mono.just(inferred);
|
return Mono.just(inferred);
|
||||||
|
@ -27,6 +22,7 @@ public class InferredMetricsScraper implements Scraper<InferredMetrics> {
|
||||||
|
|
||||||
private static InferredMetrics infer(ScrapedClusterState prevState,
|
private static InferredMetrics infer(ScrapedClusterState prevState,
|
||||||
ScrapedClusterState newState) {
|
ScrapedClusterState newState) {
|
||||||
|
|
||||||
//TODO: impl
|
//TODO: impl
|
||||||
return new InferredMetrics(List.of());
|
return new InferredMetrics(List.of());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.jmx;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||||
|
import io.prometheus.client.Collector;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
|
@ -1,6 +1,8 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.jmx;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||||
|
import com.provectus.kafka.ui.model.MetricsScrapeProperties;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -18,14 +20,13 @@ import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.kafka.common.Node;
|
import org.apache.kafka.common.Node;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import reactor.core.publisher.Flux;
|
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
import reactor.core.scheduler.Schedulers;
|
import reactor.core.scheduler.Schedulers;
|
||||||
|
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@Slf4j
|
@Slf4j
|
||||||
class JmxMetricsRetriever implements MetricsRetriever, Closeable {
|
public class JmxMetricsRetriever implements Closeable {
|
||||||
|
|
||||||
private static final boolean SSL_JMX_SUPPORTED;
|
private static final boolean SSL_JMX_SUPPORTED;
|
||||||
|
|
||||||
|
@ -43,35 +44,34 @@ class JmxMetricsRetriever implements MetricsRetriever, Closeable {
|
||||||
JmxSslSocketFactory.clearFactoriesCache();
|
JmxSslSocketFactory.clearFactoriesCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public Mono<List<RawMetric>> retrieveFromNode(MetricsScrapeProperties metricsConfig, Node node) {
|
||||||
public Flux<RawMetric> retrieve(KafkaCluster c, Node node) {
|
if (isSslJmxEndpoint(metricsConfig) && !SSL_JMX_SUPPORTED) {
|
||||||
if (isSslJmxEndpoint(c) && !SSL_JMX_SUPPORTED) {
|
log.warn("Cluster has jmx ssl configured, but it is not supported by app");
|
||||||
log.warn("Cluster {} has jmx ssl configured, but it is not supported", c.getName());
|
return Mono.just(List.of());
|
||||||
return Flux.empty();
|
|
||||||
}
|
}
|
||||||
return Mono.fromSupplier(() -> retrieveSync(c, node))
|
return Mono.fromSupplier(() -> retrieveSync(metricsConfig, node))
|
||||||
.subscribeOn(Schedulers.boundedElastic())
|
.subscribeOn(Schedulers.boundedElastic());
|
||||||
.flatMapMany(Flux::fromIterable);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isSslJmxEndpoint(KafkaCluster cluster) {
|
private boolean isSslJmxEndpoint(MetricsScrapeProperties metricsScrapeProperties) {
|
||||||
return cluster.getMetricsConfig().getKeystoreLocation() != null;
|
return metricsScrapeProperties.getKeystoreConfig() != null
|
||||||
|
&& metricsScrapeProperties.getKeystoreConfig().getKeystoreLocation() != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
private List<RawMetric> retrieveSync(KafkaCluster c, Node node) {
|
private List<RawMetric> retrieveSync(MetricsScrapeProperties metricsConfig, Node node) {
|
||||||
String jmxUrl = JMX_URL + node.host() + ":" + c.getMetricsConfig().getPort() + "/" + JMX_SERVICE_TYPE;
|
String jmxUrl = JMX_URL + node.host() + ":" + metricsConfig.getPort() + "/" + JMX_SERVICE_TYPE;
|
||||||
log.debug("Collection JMX metrics for {}", jmxUrl);
|
log.debug("Collection JMX metrics for {}", jmxUrl);
|
||||||
List<RawMetric> result = new ArrayList<>();
|
List<RawMetric> result = new ArrayList<>();
|
||||||
withJmxConnector(jmxUrl, c, jmxConnector -> getMetricsFromJmx(jmxConnector, result));
|
withJmxConnector(jmxUrl, metricsConfig, jmxConnector -> getMetricsFromJmx(jmxConnector, result));
|
||||||
log.debug("{} metrics collected for {}", result.size(), jmxUrl);
|
log.debug("{} metrics collected for {}", result.size(), jmxUrl);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void withJmxConnector(String jmxUrl,
|
private void withJmxConnector(String jmxUrl,
|
||||||
KafkaCluster c,
|
MetricsScrapeProperties metricsConfig,
|
||||||
Consumer<JMXConnector> consumer) {
|
Consumer<JMXConnector> consumer) {
|
||||||
var env = prepareJmxEnvAndSetThreadLocal(c);
|
var env = prepareJmxEnvAndSetThreadLocal(metricsConfig);
|
||||||
try (JMXConnector connector = JMXConnectorFactory.newJMXConnector(new JMXServiceURL(jmxUrl), env)) {
|
try (JMXConnector connector = JMXConnectorFactory.newJMXConnector(new JMXServiceURL(jmxUrl), env)) {
|
||||||
try {
|
try {
|
||||||
connector.connect(env);
|
connector.connect(env);
|
||||||
|
@ -87,16 +87,16 @@ class JmxMetricsRetriever implements MetricsRetriever, Closeable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, Object> prepareJmxEnvAndSetThreadLocal(KafkaCluster cluster) {
|
private Map<String, Object> prepareJmxEnvAndSetThreadLocal(MetricsScrapeProperties metricsConfig) {
|
||||||
var metricsConfig = cluster.getMetricsConfig();
|
|
||||||
Map<String, Object> env = new HashMap<>();
|
Map<String, Object> env = new HashMap<>();
|
||||||
if (isSslJmxEndpoint(cluster)) {
|
if (isSslJmxEndpoint(metricsConfig)) {
|
||||||
var clusterSsl = cluster.getOriginalProperties().getSsl();
|
var truststoreConfig = metricsConfig.getTruststoreConfig();
|
||||||
|
var keystoreConfig = metricsConfig.getKeystoreConfig();
|
||||||
JmxSslSocketFactory.setSslContextThreadLocal(
|
JmxSslSocketFactory.setSslContextThreadLocal(
|
||||||
clusterSsl != null ? clusterSsl.getTruststoreLocation() : null,
|
truststoreConfig != null ? truststoreConfig.getTruststoreLocation() : null,
|
||||||
clusterSsl != null ? clusterSsl.getTruststorePassword() : null,
|
truststoreConfig != null ? truststoreConfig.getTruststorePassword() : null,
|
||||||
metricsConfig.getKeystoreLocation(),
|
keystoreConfig != null ? keystoreConfig.getKeystoreLocation() : null,
|
||||||
metricsConfig.getKeystorePassword()
|
keystoreConfig != null ? keystoreConfig.getKeystorePassword() : null
|
||||||
);
|
);
|
||||||
JmxSslSocketFactory.editJmxConnectorEnv(env);
|
JmxSslSocketFactory.editJmxConnectorEnv(env);
|
||||||
}
|
}
|
|
@ -1,13 +1,36 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape.jmx;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.jmx;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedMetrics;
|
import static io.prometheus.client.Collector.*;
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.Scraper;
|
|
||||||
|
import com.provectus.kafka.ui.model.MetricsScrapeProperties;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.PerBrokerScrapedMetrics;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import org.apache.kafka.common.Node;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
public class JmxMetricsScraper implements Scraper<ScrapedMetrics> {
|
public class JmxMetricsScraper {
|
||||||
|
|
||||||
@Override
|
private final JmxMetricsRetriever jmxMetricsRetriever;
|
||||||
public Mono<ScrapedMetrics> scrape() {
|
private final MetricsScrapeProperties scrapeProperties;
|
||||||
return null;
|
|
||||||
|
public JmxMetricsScraper(MetricsScrapeProperties scrapeProperties,
|
||||||
|
JmxMetricsRetriever jmxMetricsRetriever) {
|
||||||
|
this.scrapeProperties = scrapeProperties;
|
||||||
|
this.jmxMetricsRetriever = jmxMetricsRetriever;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Mono<PerBrokerScrapedMetrics> scrape(Collection<Node> nodes) {
|
||||||
|
Mono<Map<Integer, List<MetricFamilySamples>>> collected = Flux.fromIterable(nodes)
|
||||||
|
.flatMap(n -> jmxMetricsRetriever.retrieveFromNode(scrapeProperties, n).map(metrics -> Tuples.of(n, metrics)))
|
||||||
|
.collectMap(
|
||||||
|
t -> t.getT1().id(),
|
||||||
|
t -> RawMetric.groupIntoMFS(t.getT2()).toList()
|
||||||
|
);
|
||||||
|
return collected.map(PerBrokerScrapedMetrics::new);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.jmx;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
|
@ -1,13 +0,0 @@
|
||||||
package com.provectus.kafka.ui.service.metrics.v2.scrape.prom;
|
|
||||||
|
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedMetrics;
|
|
||||||
import com.provectus.kafka.ui.service.metrics.v2.scrape.Scraper;
|
|
||||||
import reactor.core.publisher.Mono;
|
|
||||||
|
|
||||||
public class PrometheusScraper implements Scraper<ScrapedMetrics> {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Mono<ScrapedMetrics> scrape() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,5 +1,6 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.prometheus;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
|
@ -1,12 +1,14 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.prometheus;
|
||||||
|
|
||||||
|
import static io.prometheus.client.Collector.*;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Strings;
|
import com.google.common.base.Strings;
|
||||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
import com.provectus.kafka.ui.model.MetricsScrapeProperties;
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||||
import com.provectus.kafka.ui.model.MetricsConfig;
|
|
||||||
import com.provectus.kafka.ui.util.WebClientConfigurator;
|
import com.provectus.kafka.ui.util.WebClientConfigurator;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.kafka.common.Node;
|
import org.apache.kafka.common.Node;
|
||||||
|
@ -19,33 +21,29 @@ import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@Slf4j
|
@Slf4j
|
||||||
class PrometheusMetricsRetriever implements MetricsRetriever {
|
class PrometheusMetricsRetriever {
|
||||||
|
|
||||||
private static final String METRICS_ENDPOINT_PATH = "/metrics";
|
private static final String METRICS_ENDPOINT_PATH = "/metrics";
|
||||||
private static final int DEFAULT_EXPORTER_PORT = 11001;
|
private static final int DEFAULT_EXPORTER_PORT = 11001;
|
||||||
|
|
||||||
@Override
|
public Mono<List<MetricFamilySamples>> retrieve(MetricsScrapeProperties metricsConfig, Node node) {
|
||||||
public Flux<RawMetric> retrieve(KafkaCluster c, Node node) {
|
log.debug("Retrieving metrics from prometheus exporter: {}:{}", node.host(), metricsConfig.getPort());
|
||||||
log.debug("Retrieving metrics from prometheus exporter: {}:{}", node.host(), c.getMetricsConfig().getPort());
|
|
||||||
|
|
||||||
MetricsConfig metricsConfig = c.getMetricsConfig();
|
|
||||||
var webClient = new WebClientConfigurator()
|
var webClient = new WebClientConfigurator()
|
||||||
.configureBufferSize(DataSize.ofMegabytes(20))
|
.configureBufferSize(DataSize.ofMegabytes(20))
|
||||||
.configureBasicAuth(metricsConfig.getUsername(), metricsConfig.getPassword())
|
.configureBasicAuth(metricsConfig.getUsername(), metricsConfig.getPassword())
|
||||||
.configureSsl(
|
.configureSsl(metricsConfig.getTruststoreConfig(), metricsConfig.getKeystoreConfig())
|
||||||
c.getOriginalProperties().getSsl(),
|
|
||||||
new ClustersProperties.KeystoreConfig(
|
|
||||||
metricsConfig.getKeystoreLocation(),
|
|
||||||
metricsConfig.getKeystorePassword()))
|
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
return retrieve(webClient, node.host(), c.getMetricsConfig());
|
return retrieve(webClient, node.host(), metricsConfig)
|
||||||
|
.collectList()
|
||||||
|
.map(metrics -> RawMetric.groupIntoMFS(metrics).toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
Flux<RawMetric> retrieve(WebClient webClient, String host, MetricsConfig metricsConfig) {
|
Flux<RawMetric> retrieve(WebClient webClient, String host, MetricsScrapeProperties metricsConfig) {
|
||||||
int port = Optional.ofNullable(metricsConfig.getPort()).orElse(DEFAULT_EXPORTER_PORT);
|
int port = Optional.ofNullable(metricsConfig.getPort()).orElse(DEFAULT_EXPORTER_PORT);
|
||||||
boolean sslEnabled = metricsConfig.isSsl() || metricsConfig.getKeystoreLocation() != null;
|
boolean sslEnabled = metricsConfig.isSsl() || metricsConfig.getKeystoreConfig() != null;
|
||||||
var request = webClient.get()
|
var request = webClient.get()
|
||||||
.uri(UriComponentsBuilder.newInstance()
|
.uri(UriComponentsBuilder.newInstance()
|
||||||
.scheme(sslEnabled ? "https" : "http")
|
.scheme(sslEnabled ? "https" : "http")
|
|
@ -0,0 +1,31 @@
|
||||||
|
package com.provectus.kafka.ui.service.metrics.v2.scrape.prometheus;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.model.MetricsScrapeProperties;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.PerBrokerScrapedMetrics;
|
||||||
|
import io.prometheus.client.Collector;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import org.apache.kafka.common.Node;
|
||||||
|
import reactor.core.publisher.Flux;
|
||||||
|
import reactor.core.publisher.Mono;
|
||||||
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
|
public class PrometheusScraper {
|
||||||
|
|
||||||
|
private final static PrometheusMetricsRetriever RETRIEVER = new PrometheusMetricsRetriever();
|
||||||
|
|
||||||
|
private final MetricsScrapeProperties metricsConfig;
|
||||||
|
|
||||||
|
public PrometheusScraper(MetricsScrapeProperties metricsConfig) {
|
||||||
|
this.metricsConfig = metricsConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Mono<PerBrokerScrapedMetrics> scrape(Collection<Node> clusterNodes) {
|
||||||
|
Mono<Map<Integer, List<Collector.MetricFamilySamples>>> collected = Flux.fromIterable(clusterNodes)
|
||||||
|
.flatMap(n -> RETRIEVER.retrieve(metricsConfig, n).map(metrics -> Tuples.of(n, metrics)))
|
||||||
|
.collectMap(t -> t.getT1().id(), t -> t.getT2());
|
||||||
|
|
||||||
|
return collected.map(PerBrokerScrapedMetrics::new);
|
||||||
|
}
|
||||||
|
}
|
|
@ -81,9 +81,6 @@ public class ReactiveFailover<T> {
|
||||||
.flatMap(f)
|
.flatMap(f)
|
||||||
.onErrorResume(failoverExceptionsPredicate, th -> {
|
.onErrorResume(failoverExceptionsPredicate, th -> {
|
||||||
publisher.markFailed();
|
publisher.markFailed();
|
||||||
if (candidates.size() == 1) {
|
|
||||||
return Mono.error(th);
|
|
||||||
}
|
|
||||||
var newCandidates = candidates.stream().skip(1).filter(PublisherHolder::isActive).toList();
|
var newCandidates = candidates.stream().skip(1).filter(PublisherHolder::isActive).toList();
|
||||||
if (newCandidates.isEmpty()) {
|
if (newCandidates.isEmpty()) {
|
||||||
return Mono.error(th);
|
return Mono.error(th);
|
||||||
|
@ -106,9 +103,6 @@ public class ReactiveFailover<T> {
|
||||||
.flatMapMany(f)
|
.flatMapMany(f)
|
||||||
.onErrorResume(failoverExceptionsPredicate, th -> {
|
.onErrorResume(failoverExceptionsPredicate, th -> {
|
||||||
publisher.markFailed();
|
publisher.markFailed();
|
||||||
if (candidates.size() == 1) {
|
|
||||||
return Flux.error(th);
|
|
||||||
}
|
|
||||||
var newCandidates = candidates.stream().skip(1).filter(PublisherHolder::isActive).toList();
|
var newCandidates = candidates.stream().skip(1).filter(PublisherHolder::isActive).toList();
|
||||||
if (newCandidates.isEmpty()) {
|
if (newCandidates.isEmpty()) {
|
||||||
return Flux.error(th);
|
return Flux.error(th);
|
||||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui.service.metrics;
|
||||||
|
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.jmx.JmxMetricsFormatter;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -74,4 +75,4 @@ class JmxMetricsFormatterTest {
|
||||||
assertThat(actual.value()).isCloseTo(expected.value(), Offset.offset(new BigDecimal("0.001")));
|
assertThat(actual.value()).isCloseTo(expected.value(), Offset.offset(new BigDecimal("0.001")));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui.service.metrics;
|
||||||
|
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.prometheus.PrometheusEndpointMetricsParser;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package com.provectus.kafka.ui.service.metrics;
|
package com.provectus.kafka.ui.service.metrics;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.MetricsConfig;
|
import com.provectus.kafka.ui.model.MetricsScrapeProperties;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.prometheus.PrometheusMetricsRetriever;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -34,7 +35,7 @@ class PrometheusMetricsRetrieverTest {
|
||||||
var url = mockWebServer.url("/metrics");
|
var url = mockWebServer.url("/metrics");
|
||||||
mockWebServer.enqueue(prepareResponse());
|
mockWebServer.enqueue(prepareResponse());
|
||||||
|
|
||||||
MetricsConfig metricsConfig = prepareMetricsConfig(url.port(), null, null);
|
MetricsScrapeProperties metricsConfig = prepareMetricsConfig(url.port(), null, null);
|
||||||
|
|
||||||
StepVerifier.create(retriever.retrieve(WebClient.create(), url.host(), metricsConfig))
|
StepVerifier.create(retriever.retrieve(WebClient.create(), url.host(), metricsConfig))
|
||||||
.expectNextSequence(expectedRawMetrics())
|
.expectNextSequence(expectedRawMetrics())
|
||||||
|
@ -48,7 +49,7 @@ class PrometheusMetricsRetrieverTest {
|
||||||
mockWebServer.enqueue(prepareResponse());
|
mockWebServer.enqueue(prepareResponse());
|
||||||
|
|
||||||
|
|
||||||
MetricsConfig metricsConfig = prepareMetricsConfig(url.port(), "username", "password");
|
MetricsScrapeProperties metricsConfig = prepareMetricsConfig(url.port(), "username", "password");
|
||||||
|
|
||||||
StepVerifier.create(retriever.retrieve(WebClient.create(), url.host(), metricsConfig))
|
StepVerifier.create(retriever.retrieve(WebClient.create(), url.host(), metricsConfig))
|
||||||
.expectNextSequence(expectedRawMetrics())
|
.expectNextSequence(expectedRawMetrics())
|
||||||
|
@ -69,11 +70,11 @@ class PrometheusMetricsRetrieverTest {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
MetricsConfig prepareMetricsConfig(Integer port, String username, String password) {
|
MetricsScrapeProperties prepareMetricsConfig(Integer port, String username, String password) {
|
||||||
return MetricsConfig.builder()
|
return MetricsScrapeProperties.builder()
|
||||||
.ssl(false)
|
.ssl(false)
|
||||||
.port(port)
|
.port(port)
|
||||||
.type(MetricsConfig.PROMETHEUS_METRICS_TYPE)
|
.type(MetricsScrapeProperties.PROMETHEUS_METRICS_TYPE)
|
||||||
.username(username)
|
.username(username)
|
||||||
.password(password)
|
.password(password)
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -3,6 +3,8 @@ package com.provectus.kafka.ui.service.metrics;
|
||||||
import static org.assertj.core.api.Assertions.assertThat;
|
import static org.assertj.core.api.Assertions.assertThat;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.Metrics;
|
import com.provectus.kafka.ui.model.Metrics;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.WellKnownMetrics;
|
||||||
|
import com.provectus.kafka.ui.service.metrics.v2.scrape.prometheus.PrometheusEndpointMetricsParser;
|
||||||
import java.math.BigDecimal;
|
import java.math.BigDecimal;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -68,7 +70,7 @@ class WellKnownMetricsTest {
|
||||||
wellKnownMetrics.brokerBytesOutFifteenMinuteRate.put(2, new BigDecimal(20));
|
wellKnownMetrics.brokerBytesOutFifteenMinuteRate.put(2, new BigDecimal(20));
|
||||||
|
|
||||||
Metrics.MetricsBuilder builder = Metrics.builder();
|
Metrics.MetricsBuilder builder = Metrics.builder();
|
||||||
wellKnownMetrics.apply(builder);
|
wellKnownMetrics.ioRates(builder);
|
||||||
var metrics = builder.build();
|
var metrics = builder.build();
|
||||||
|
|
||||||
// checking per topic io rates
|
// checking per topic io rates
|
||||||
|
@ -90,4 +92,4 @@ class WellKnownMetricsTest {
|
||||||
.forEach(m -> wellKnownMetrics.populate(n, m));
|
.forEach(m -> wellKnownMetrics.populate(n, m));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue