This commit is contained in:
iliax 2023-06-20 11:26:36 +04:00
parent 7f7242eb8b
commit 38eb68dcc5
16 changed files with 193 additions and 45 deletions

View file

@ -234,6 +234,17 @@
<artifactId>spring-security-ldap</artifactId> <artifactId>spring-security-ldap</artifactId>
</dependency> </dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient</artifactId>
<version>0.16.0</version>
</dependency>
<dependency>
<groupId>io.prometheus</groupId>
<artifactId>simpleclient_common</artifactId>
<version>0.16.0</version>
</dependency>
<dependency> <dependency>
<groupId>org.codehaus.groovy</groupId> <groupId>org.codehaus.groovy</groupId>

View file

@ -1,6 +1,7 @@
package com.provectus.kafka.ui.model; package com.provectus.kafka.ui.model;
import com.provectus.kafka.ui.service.ReactiveAdminClient; import com.provectus.kafka.ui.service.ReactiveAdminClient;
import com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.ScrapedClusterState;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -22,6 +23,8 @@ public class Statistics {
Map<String, TopicDescription> topicDescriptions; Map<String, TopicDescription> topicDescriptions;
Map<String, List<ConfigEntry>> topicConfigs; Map<String, List<ConfigEntry>> topicConfigs;
ScrapedClusterState clusterState;
public static Statistics empty() { public static Statistics empty() {
return builder() return builder()
.status(ServerStatusDTO.OFFLINE) .status(ServerStatusDTO.OFFLINE)
@ -33,6 +36,7 @@ public class Statistics {
.logDirInfo(InternalLogDirStats.empty()) .logDirInfo(InternalLogDirStats.empty())
.topicDescriptions(Map.of()) .topicDescriptions(Map.of())
.topicConfigs(Map.of()) .topicConfigs(Map.of())
.clusterState(ScrapedClusterState.empty())
.build(); .build();
} }
} }

View file

@ -12,9 +12,12 @@ import com.google.common.collect.Table;
import com.provectus.kafka.ui.exception.IllegalEntityStateException; import com.provectus.kafka.ui.exception.IllegalEntityStateException;
import com.provectus.kafka.ui.exception.NotFoundException; import com.provectus.kafka.ui.exception.NotFoundException;
import com.provectus.kafka.ui.exception.ValidationException; import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.util.KafkaVersion; import com.provectus.kafka.ui.util.KafkaVersion;
import com.provectus.kafka.ui.util.SslPropertiesUtil;
import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant; import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
import java.io.Closeable; import java.io.Closeable;
import java.time.Duration;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
@ -22,6 +25,7 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Properties;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
@ -55,6 +59,8 @@ import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.admin.OffsetSpec; import org.apache.kafka.clients.admin.OffsetSpec;
import org.apache.kafka.clients.admin.RecordsToDelete; import org.apache.kafka.clients.admin.RecordsToDelete;
import org.apache.kafka.clients.admin.TopicDescription; import org.apache.kafka.clients.admin.TopicDescription;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.KafkaFuture; import org.apache.kafka.common.KafkaFuture;
@ -77,6 +83,8 @@ import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.errors.UnsupportedVersionException; import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse; import org.apache.kafka.common.requests.DescribeLogDirsResponse;
import org.apache.kafka.common.resource.ResourcePatternFilter; import org.apache.kafka.common.resource.ResourcePatternFilter;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.Flux; import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono; import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers; import reactor.core.scheduler.Schedulers;
@ -727,4 +735,26 @@ public class ReactiveAdminClient implements Closeable {
public void close() { public void close() {
client.close(); client.close();
} }
public static void main(String[] args) {
Properties props = new Properties();
props.put(ConsumerConfig.GROUP_ID_CONFIG, "test_group_1");
props.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafka-ui-consumer-" + System.currentTimeMillis());
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
props.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, "false");
try (var consumer = new KafkaConsumer<Bytes, Bytes>(props)) {
consumer.subscribe(List.of("test"));
while (true) {
consumer.poll(Duration.ofMillis(500));
//consumer.commitSync();
}
}
}
} }

View file

@ -27,29 +27,9 @@ public interface RawMetric {
return new SimpleMetric(name, labels, value); return new SimpleMetric(name, labels, value);
} }
@AllArgsConstructor record SimpleMetric(String name,
@EqualsAndHashCode Map<String, String> labels,
@ToString BigDecimal value) implements RawMetric {
class SimpleMetric implements RawMetric {
private final String name;
private final Map<String, String> labels;
private final BigDecimal value;
@Override
public String name() {
return name;
}
@Override
public Map<String, String> labels() {
return labels;
}
@Override
public BigDecimal value() {
return value;
}
@Override @Override
public RawMetric copyWithValue(BigDecimal newValue) { public RawMetric copyWithValue(BigDecimal newValue) {

View file

@ -0,0 +1,16 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape;
import io.prometheus.client.Collector.MetricFamilySamples;
import java.util.Collection;
import java.util.stream.Stream;
public interface ScrapedMetrics {
Stream<MetricFamilySamples> asStream();
static ScrapedMetrics create(Collection<MetricFamilySamples> lst) {
return lst::stream;
}
}

View file

@ -0,0 +1,10 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape;
import reactor.core.publisher.Mono;
public interface Scraper<T extends ScrapedMetrics> {
Mono<T> scrape();
}

View file

@ -0,0 +1,20 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred;
import static io.prometheus.client.Collector.*;
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedMetrics;
import java.util.stream.Stream;
public class InferredMetrics implements ScrapedMetrics {
@Override
public Stream<MetricFamilySamples> asStream() {
return null;
}
public ScrapedClusterState clusterState() {
//todo: impl
return null;
}
}

View file

@ -0,0 +1,22 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred;
import com.provectus.kafka.ui.service.ReactiveAdminClient;
import com.provectus.kafka.ui.service.metrics.v2.scrape.Scraper;
import reactor.core.publisher.Mono;
public class InferredMetricsScrapper implements Scraper<InferredMetrics> {
private final ReactiveAdminClient adminClient;
private volatile ScrapedClusterState clusterState;
public InferredMetricsScrapper(ReactiveAdminClient adminClient) {
this.adminClient = adminClient;
}
@Override
public Mono<InferredMetrics> scrape() {
return null;
}
}

View file

@ -0,0 +1,19 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred;
import com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.states.ConsumerGroupsState;
import com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.states.TopicsState;
import java.time.Instant;
import lombok.Value;
@Value
public class ScrapedClusterState {
Instant scrapeStart;
TopicsState topicsState;
ConsumerGroupsState consumerGroupsState;
public static ScrapedClusterState empty() {
return new ScrapedClusterState(null, null, null);
}
}

View file

@ -0,0 +1,4 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.states;
public class ConsumerGroupsState {
}

View file

@ -0,0 +1,4 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape.inferred.states;
public class TopicsState {
}

View file

@ -0,0 +1,13 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape.jmx;
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedMetrics;
import com.provectus.kafka.ui.service.metrics.v2.scrape.Scraper;
import reactor.core.publisher.Mono;
public class JmxMetricsScraper implements Scraper<ScrapedMetrics> {
@Override
public Mono<ScrapedMetrics> scrape() {
return null;
}
}

View file

@ -0,0 +1,13 @@
package com.provectus.kafka.ui.service.metrics.v2.scrape.prom;
import com.provectus.kafka.ui.service.metrics.v2.scrape.ScrapedMetrics;
import com.provectus.kafka.ui.service.metrics.v2.scrape.Scraper;
import reactor.core.publisher.Mono;
public class PrometheusScraper implements Scraper<ScrapedMetrics> {
@Override
public Mono<ScrapedMetrics> scrape() {
return null;
}
}

View file

@ -32,7 +32,7 @@ public abstract class AbstractIntegrationTest {
public static final String LOCAL = "local"; public static final String LOCAL = "local";
public static final String SECOND_LOCAL = "secondLocal"; public static final String SECOND_LOCAL = "secondLocal";
private static final String CONFLUENT_PLATFORM_VERSION = "5.5.0"; private static final String CONFLUENT_PLATFORM_VERSION = "7.2.1";
public static final KafkaContainer kafka = new KafkaContainer( public static final KafkaContainer kafka = new KafkaContainer(
DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION)) DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION))

View file

@ -12,6 +12,7 @@ import com.provectus.kafka.ui.container.KsqlDbContainer;
import java.time.Duration; import java.time.Duration;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.junit.Ignore;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
@ -19,6 +20,7 @@ import org.testcontainers.shaded.org.awaitility.Awaitility;
import org.testcontainers.utility.DockerImageName; import org.testcontainers.utility.DockerImageName;
import reactor.test.StepVerifier; import reactor.test.StepVerifier;
@Ignore
class KsqlApiClientTest extends AbstractIntegrationTest { class KsqlApiClientTest extends AbstractIntegrationTest {
private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer( private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer(