Switched messages endpoint to provide sse with phases & consuming info (#645)

* Switched messages endpoint to provide sse with phases & consuming info

* Switched messages endpoint to provide sse with phases & consuming info

* fixed comments

* Fixed comparator

* Fixed tests

* Reduced images size

* Feature/sse for messages (#681)

* [#645] SSE. Cleanup Topic Messages

* New messages page

* Update outdated snapshots

* Specs

* Specs

* Fixed build

* Fixed possible npe in update cluster on init stage

* Provided additional information with messages #677 (to messages_sse branch) (#700)

* Provided additional information with messages #677

* Sse messages front (#725)

* SSE. Messages page

* Fix handleNextClick

* Add the page loader to the list of messages

Co-authored-by: Alexander <mr.afigitelniychuvak@gmail.com>

* Fix merge errors

* fix conflicts

Co-authored-by: Timur Davletov <tdavletov@provectus.com>
Co-authored-by: Oleg Shur <workshur@gmail.com>
Co-authored-by: Alexander <mr.afigitelniychuvak@gmail.com>
This commit is contained in:
German Osin 2021-08-04 16:30:00 +03:00 committed by GitHub
parent 1f268579d0
commit 18f5e1a2b2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
80 changed files with 1797 additions and 1399 deletions

View file

@ -1,4 +1,4 @@
FROM openjdk:13
FROM adoptopenjdk/openjdk13:x86_64-alpine-jre-13.0.2_8
VOLUME /tmp
ARG JAR_FILE
COPY "/target/${JAR_FILE}" "/kafka-ui-api.jar"

View file

@ -46,20 +46,15 @@
<artifactId>kafka-ui-contract</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.springdoc</groupId>
<artifactId>springdoc-openapi-webflux-ui</artifactId>
<version>${springdoc-openapi-webflux-ui.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka-clients.version}</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.13</artifactId>
<version>${kafka.version}</version>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.9</version>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>

View file

@ -1,14 +1,10 @@
package com.provectus.kafka.ui.config;
import static org.springdoc.core.Constants.CLASSPATH_RESOURCE_LOCATION;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.web.reactive.config.CorsRegistry;
import org.springframework.web.reactive.config.EnableWebFlux;
import org.springframework.web.reactive.config.ResourceHandlerRegistry;
import org.springframework.web.reactive.config.WebFluxConfigurer;
import org.springframework.web.reactive.resource.WebJarsResourceResolver;
@Configuration
@EnableWebFlux
@ -23,12 +19,4 @@ public class CorsGlobalConfiguration implements WebFluxConfigurer {
.allowedHeaders("*")
.allowCredentials(true);
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/webjars/**")
.addResourceLocations(CLASSPATH_RESOURCE_LOCATION + "/webjars/")
.resourceChain(true)
.addResolver(new WebJarsResourceResolver());
}
}

View file

@ -5,7 +5,7 @@ import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.CreateTopicMessage;
import com.provectus.kafka.ui.model.SeekDirection;
import com.provectus.kafka.ui.model.SeekType;
import com.provectus.kafka.ui.model.TopicMessage;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.model.TopicMessageSchema;
import com.provectus.kafka.ui.service.ClusterService;
import java.util.Collections;
@ -40,15 +40,17 @@ public class MessagesController implements MessagesApi {
).map(ResponseEntity::ok);
}
@Override
public Mono<ResponseEntity<Flux<TopicMessage>>> getTopicMessages(
public Mono<ResponseEntity<Flux<TopicMessageEvent>>> getTopicMessages(
String clusterName, String topicName, @Valid SeekType seekType, @Valid List<String> seekTo,
@Valid Integer limit, @Valid String q, @Valid SeekDirection seekDirection,
ServerWebExchange exchange) {
return parseConsumerPosition(topicName, seekType, seekTo, seekDirection)
.map(consumerPosition -> ResponseEntity
.ok(clusterService.getMessages(clusterName, topicName, consumerPosition, q, limit)));
.map(position ->
ResponseEntity.ok(
clusterService.getMessages(clusterName, topicName, position, q, limit)
)
);
}
@Override

View file

@ -0,0 +1,81 @@
package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessage;
import com.provectus.kafka.ui.model.TopicMessageConsuming;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.model.TopicMessagePhase;
import com.provectus.kafka.ui.serde.RecordSerDe;
import com.provectus.kafka.ui.util.ClusterUtil;
import java.time.Duration;
import java.time.Instant;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
public abstract class AbstractEmitter {
private static final Duration POLL_TIMEOUT_MS = Duration.ofMillis(1000L);
private final RecordSerDe recordDeserializer;
private long bytes = 0;
private int records = 0;
private long elapsed = 0;
public AbstractEmitter(RecordSerDe recordDeserializer) {
this.recordDeserializer = recordDeserializer;
}
protected ConsumerRecords<Bytes, Bytes> poll(
FluxSink<TopicMessageEvent> sink, Consumer<Bytes, Bytes> consumer) {
Instant start = Instant.now();
ConsumerRecords<Bytes, Bytes> records = consumer.poll(POLL_TIMEOUT_MS);
Instant finish = Instant.now();
sendConsuming(sink, records, Duration.between(start, finish).toMillis());
return records;
}
protected FluxSink<TopicMessageEvent> sendMessage(FluxSink<TopicMessageEvent> sink,
ConsumerRecord<Bytes, Bytes> msg) {
final TopicMessage topicMessage = ClusterUtil.mapToTopicMessage(msg, recordDeserializer);
return sink.next(
new TopicMessageEvent()
.type(TopicMessageEvent.TypeEnum.MESSAGE)
.message(topicMessage)
);
}
protected void sendPhase(FluxSink<TopicMessageEvent> sink, String name) {
sink.next(
new TopicMessageEvent()
.type(TopicMessageEvent.TypeEnum.PHASE)
.phase(new TopicMessagePhase().name(name))
);
}
protected void sendConsuming(FluxSink<TopicMessageEvent> sink,
ConsumerRecords<Bytes, Bytes> records,
long elapsed) {
for (ConsumerRecord<Bytes, Bytes> record : records) {
for (Header header : record.headers()) {
bytes +=
(header.key() != null ? header.key().getBytes().length : 0L)
+ (header.value() != null ? header.value().length : 0L);
}
bytes += record.serializedKeySize() + record.serializedValueSize();
}
this.records += records.count();
this.elapsed += elapsed;
final TopicMessageConsuming consuming = new TopicMessageConsuming()
.bytesConsumed(this.bytes)
.elapsedMs(this.elapsed)
.isCancelled(sink.isCancelled())
.messagesConsumed(this.records);
sink.next(
new TopicMessageEvent()
.type(TopicMessageEvent.TypeEnum.CONSUMING)
.consuming(consuming)
);
}
}

View file

@ -1,13 +1,16 @@
package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.serde.RecordSerDe;
import com.provectus.kafka.ui.util.OffsetsSeekBackward;
import java.time.Duration;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.function.Function;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
@ -17,36 +20,50 @@ import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
@RequiredArgsConstructor
@Log4j2
public class BackwardRecordEmitter
implements java.util.function.Consumer<FluxSink<ConsumerRecord<Bytes, Bytes>>> {
private static final Duration POLL_TIMEOUT_MS = Duration.ofMillis(1000L);
extends AbstractEmitter
implements java.util.function.Consumer<FluxSink<TopicMessageEvent>> {
private final Function<Map<String, Object>, KafkaConsumer<Bytes, Bytes>> consumerSupplier;
private final OffsetsSeekBackward offsetsSeek;
public BackwardRecordEmitter(
Function<Map<String, Object>, KafkaConsumer<Bytes, Bytes>> consumerSupplier,
OffsetsSeekBackward offsetsSeek,
RecordSerDe recordDeserializer) {
super(recordDeserializer);
this.offsetsSeek = offsetsSeek;
this.consumerSupplier = consumerSupplier;
}
@Override
public void accept(FluxSink<ConsumerRecord<Bytes, Bytes>> sink) {
public void accept(FluxSink<TopicMessageEvent> sink) {
try (KafkaConsumer<Bytes, Bytes> configConsumer = consumerSupplier.apply(Map.of())) {
final List<TopicPartition> requestedPartitions =
offsetsSeek.getRequestedPartitions(configConsumer);
sendPhase(sink, "Request partitions");
final int msgsPerPartition = offsetsSeek.msgsPerPartition(requestedPartitions.size());
try (KafkaConsumer<Bytes, Bytes> consumer =
consumerSupplier.apply(
Map.of(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, msgsPerPartition)
)
) {
final Map<TopicPartition, Long> partitionsOffsets =
offsetsSeek.getPartitionsOffsets(consumer);
sendPhase(sink, "Created consumer");
SortedMap<TopicPartition, Long> partitionsOffsets =
new TreeMap<>(Comparator.comparingInt(TopicPartition::partition));
partitionsOffsets.putAll(offsetsSeek.getPartitionsOffsets(consumer));
sendPhase(sink, "Requested partitions offsets");
log.debug("partition offsets: {}", partitionsOffsets);
var waitingOffsets =
offsetsSeek.waitingOffsets(consumer, partitionsOffsets.keySet());
log.debug("waittin offsets {} {}",
log.debug("waiting offsets {} {}",
waitingOffsets.getBeginOffsets(),
waitingOffsets.getEndOffsets()
);
while (!sink.isCancelled() && !waitingOffsets.beginReached()) {
for (Map.Entry<TopicPartition, Long> entry : partitionsOffsets.entrySet()) {
final Long lowest = waitingOffsets.getBeginOffsets().get(entry.getKey().partition());
@ -55,7 +72,10 @@ public class BackwardRecordEmitter
final long offset = Math.max(lowest, entry.getValue() - msgsPerPartition);
log.debug("Polling {} from {}", entry.getKey(), offset);
consumer.seek(entry.getKey(), offset);
ConsumerRecords<Bytes, Bytes> records = consumer.poll(POLL_TIMEOUT_MS);
sendPhase(sink,
String.format("Consuming partition: %s from %s", entry.getKey(), offset)
);
final ConsumerRecords<Bytes, Bytes> records = poll(sink, consumer);
final List<ConsumerRecord<Bytes, Bytes>> partitionRecords =
records.records(entry.getKey()).stream()
.filter(r -> r.offset() < partitionsOffsets.get(entry.getKey()))
@ -73,7 +93,7 @@ public class BackwardRecordEmitter
for (ConsumerRecord<Bytes, Bytes> msg : partitionRecords) {
if (!sink.isCancelled() && !waitingOffsets.beginReached()) {
sink.next(msg);
sendMessage(sink, msg);
waitingOffsets.markPolled(msg);
} else {
log.info("Begin reached");

View file

@ -1,9 +1,11 @@
package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.serde.RecordSerDe;
import com.provectus.kafka.ui.util.OffsetsSeek;
import java.time.Duration;
import java.time.Instant;
import java.util.function.Supplier;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
@ -11,33 +13,43 @@ import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
@RequiredArgsConstructor
@Log4j2
public class ForwardRecordEmitter
implements java.util.function.Consumer<FluxSink<ConsumerRecord<Bytes, Bytes>>> {
extends AbstractEmitter
implements java.util.function.Consumer<FluxSink<TopicMessageEvent>> {
private static final Duration POLL_TIMEOUT_MS = Duration.ofMillis(1000L);
private final Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier;
private final OffsetsSeek offsetsSeek;
public ForwardRecordEmitter(
Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
OffsetsSeek offsetsSeek,
RecordSerDe recordDeserializer) {
super(recordDeserializer);
this.consumerSupplier = consumerSupplier;
this.offsetsSeek = offsetsSeek;
}
@Override
public void accept(FluxSink<ConsumerRecord<Bytes, Bytes>> sink) {
public void accept(FluxSink<TopicMessageEvent> sink) {
try (KafkaConsumer<Bytes, Bytes> consumer = consumerSupplier.get()) {
sendPhase(sink, "Assigning partitions");
var waitingOffsets = offsetsSeek.assignAndSeek(consumer);
while (!sink.isCancelled() && !waitingOffsets.endReached()) {
ConsumerRecords<Bytes, Bytes> records = consumer.poll(POLL_TIMEOUT_MS);
sendPhase(sink, "Polling");
ConsumerRecords<Bytes, Bytes> records = poll(sink, consumer);
log.info("{} records polled", records.count());
for (ConsumerRecord<Bytes, Bytes> msg : records) {
if (!sink.isCancelled() && !waitingOffsets.endReached()) {
sink.next(msg);
sendMessage(sink, msg);
waitingOffsets.markPolled(msg);
} else {
break;
}
}
}
sink.complete();
log.info("Polling finished");

View file

@ -5,6 +5,9 @@ import com.google.protobuf.DynamicMessage;
import com.google.protobuf.util.JsonFormat;
import com.provectus.kafka.ui.model.MessageSchema;
import com.provectus.kafka.ui.model.TopicMessageSchema;
import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
import com.provectus.kafka.ui.serde.schemaregistry.MessageFormatter;
import com.provectus.kafka.ui.util.ConsumerRecordUtil;
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
@ -44,10 +47,16 @@ public class ProtobufFileRecordSerDe implements RecordSerDe {
@Override
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
try {
return new DeserializedKeyValue(
msg.key() != null ? new String(msg.key().get()) : null,
msg.value() != null ? parse(msg.value().get()) : null
);
var builder = DeserializedKeyValue.builder();
if (msg.key() != null) {
builder.key(new String(msg.key().get()));
builder.keyFormat(MessageFormat.UNKNOWN);
}
if (msg.value() != null) {
builder.value(parse(msg.value().get()));
builder.valueFormat(MessageFormat.PROTOBUF);
}
return builder.build();
} catch (Throwable e) {
throw new RuntimeException("Failed to parse record from topic " + msg.topic(), e);
}

View file

@ -1,7 +1,9 @@
package com.provectus.kafka.ui.serde;
import com.provectus.kafka.ui.model.TopicMessageSchema;
import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
import javax.annotation.Nullable;
import lombok.Builder;
import lombok.Value;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
@ -10,9 +12,14 @@ import org.apache.kafka.common.utils.Bytes;
public interface RecordSerDe {
@Value
@Builder
class DeserializedKeyValue {
@Nullable String key;
@Nullable String value;
@Nullable MessageFormat keyFormat;
@Nullable MessageFormat valueFormat;
@Nullable String keySchemaId;
@Nullable String valueSchemaId;
}
DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg);

View file

@ -3,6 +3,8 @@ package com.provectus.kafka.ui.serde;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.provectus.kafka.ui.model.MessageSchema;
import com.provectus.kafka.ui.model.TopicMessageSchema;
import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
import com.provectus.kafka.ui.util.ConsumerRecordUtil;
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
import javax.annotation.Nullable;
import org.apache.kafka.clients.consumer.ConsumerRecord;
@ -13,10 +15,16 @@ public class SimpleRecordSerDe implements RecordSerDe {
@Override
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
return new DeserializedKeyValue(
msg.key() != null ? new String(msg.key().get()) : null,
msg.value() != null ? new String(msg.value().get()) : null
);
var builder = DeserializedKeyValue.builder();
if (msg.key() != null) {
builder.key(new String(msg.key().get()))
.keyFormat(MessageFormat.UNKNOWN);
}
if (msg.value() != null) {
builder.value(new String(msg.value().get()))
.valueFormat(MessageFormat.UNKNOWN);
}
return builder.build();
}
@Override

View file

@ -20,4 +20,9 @@ public class AvroMessageFormatter implements MessageFormatter {
byte[] jsonBytes = AvroSchemaUtils.toJson(avroRecord);
return new String(jsonBytes);
}
@Override
public MessageFormat getFormat() {
return MessageFormat.AVRO;
}
}

View file

@ -17,4 +17,9 @@ public class JsonSchemaMessageFormatter implements MessageFormatter {
JsonNode json = jsonSchemaDeserializer.deserialize(topic, value);
return json.toString();
}
@Override
public MessageFormat getFormat() {
return MessageFormat.JSON;
}
}

View file

@ -3,5 +3,6 @@ package com.provectus.kafka.ui.serde.schemaregistry;
public enum MessageFormat {
AVRO,
JSON,
PROTOBUF
PROTOBUF,
UNKNOWN
}

View file

@ -2,4 +2,8 @@ package com.provectus.kafka.ui.serde.schemaregistry;
public interface MessageFormatter {
String format(String topic, byte[] value);
default MessageFormat getFormat() {
return MessageFormat.UNKNOWN;
}
}

View file

@ -20,4 +20,9 @@ public class ProtobufMessageFormatter implements MessageFormatter {
byte[] jsonBytes = ProtobufSchemaUtils.toJson(message);
return new String(jsonBytes);
}
@Override
public MessageFormat getFormat() {
return MessageFormat.PROTOBUF;
}
}

View file

@ -10,6 +10,7 @@ import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.MessageSchema;
import com.provectus.kafka.ui.model.TopicMessageSchema;
import com.provectus.kafka.ui.serde.RecordSerDe;
import com.provectus.kafka.ui.util.ConsumerRecordUtil;
import com.provectus.kafka.ui.util.jsonschema.AvroJsonSchemaConverter;
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
@ -114,14 +115,28 @@ public class SchemaRegistryAwareRecordSerDe implements RecordSerDe {
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
try {
return new DeserializedKeyValue(
msg.key() != null
? getMessageFormatter(msg, true).format(msg.topic(), msg.key().get())
: null,
msg.value() != null
? getMessageFormatter(msg, false).format(msg.topic(), msg.value().get())
: null
);
var builder = DeserializedKeyValue.builder();
if (msg.key() != null) {
MessageFormatter messageFormatter = getMessageFormatter(msg, true);
builder.key(messageFormatter.format(msg.topic(), msg.key().get()));
builder.keyFormat(messageFormatter.getFormat());
builder.keySchemaId(
getSchemaId(msg.key(), messageFormatter.getFormat())
.map(String::valueOf)
.orElse(null)
);
}
if (msg.value() != null) {
MessageFormatter messageFormatter = getMessageFormatter(msg, false);
builder.value(messageFormatter.format(msg.topic(), msg.value().get()));
builder.valueFormat(messageFormatter.getFormat());
builder.valueSchemaId(
getSchemaId(msg.value(), messageFormatter.getFormat())
.map(String::valueOf)
.orElse(null)
);
}
return builder.build();
} catch (Throwable e) {
throw new RuntimeException("Failed to parse record from topic " + msg.topic(), e);
}
@ -293,6 +308,16 @@ public class SchemaRegistryAwareRecordSerDe implements RecordSerDe {
return result;
}
private Optional<Integer> getSchemaId(Bytes value, MessageFormat format) {
if (format != MessageFormat.AVRO
&& format != MessageFormat.PROTOBUF
&& format != MessageFormat.JSON) {
return Optional.empty();
}
ByteBuffer buffer = ByteBuffer.wrap(value.get());
return buffer.get() == 0 ? Optional.of(buffer.getInt()) : Optional.empty();
}
@SneakyThrows
private Optional<SchemaMetadata> getSchemaBySubject(String topic, boolean isKey) {
return Optional.ofNullable(schemaRegistryClient)

View file

@ -32,6 +32,7 @@ import com.provectus.kafka.ui.model.TopicConfig;
import com.provectus.kafka.ui.model.TopicCreation;
import com.provectus.kafka.ui.model.TopicDetails;
import com.provectus.kafka.ui.model.TopicMessage;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.model.TopicMessageSchema;
import com.provectus.kafka.ui.model.TopicUpdate;
import com.provectus.kafka.ui.model.TopicsResponse;
@ -160,9 +161,7 @@ public class ClusterService {
public Optional<TopicDetails> getTopicDetails(String name, String topicName) {
return clustersStorage.getClusterByName(name)
.flatMap(c ->
Optional.ofNullable(
c.getTopics().get(topicName)
).map(
Optional.ofNullable(c.getTopics()).map(l -> l.get(topicName)).map(
t -> t.toBuilder().partitions(
kafkaService.getTopicPartitions(c, t)
).build()
@ -275,7 +274,7 @@ public class ClusterService {
.orElse(Mono.error(new ClusterNotFoundException()));
}
public Flux<TopicMessage> getMessages(String clusterName, String topicName,
public Flux<TopicMessageEvent> getMessages(String clusterName, String topicName,
ConsumerPosition consumerPosition, String query,
Integer limit) {
return clustersStorage.getClusterByName(clusterName)

View file

@ -7,9 +7,10 @@ import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.SeekDirection;
import com.provectus.kafka.ui.model.TopicMessage;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.serde.DeserializationService;
import com.provectus.kafka.ui.serde.RecordSerDe;
import com.provectus.kafka.ui.util.ClusterUtil;
import com.provectus.kafka.ui.util.FilterTopicMessageEvents;
import com.provectus.kafka.ui.util.OffsetsSeekBackward;
import com.provectus.kafka.ui.util.OffsetsSeekForward;
import java.util.Collection;
@ -19,13 +20,12 @@ import java.util.Optional;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
import reactor.core.publisher.Mono;
@ -43,32 +43,34 @@ public class ConsumingService {
private final DeserializationService deserializationService;
private final ObjectMapper objectMapper = new ObjectMapper();
public Flux<TopicMessage> loadMessages(KafkaCluster cluster, String topic,
ConsumerPosition consumerPosition, String query,
Integer limit) {
public Flux<TopicMessageEvent> loadMessages(KafkaCluster cluster, String topic,
ConsumerPosition consumerPosition, String query,
Integer limit) {
int recordsLimit = Optional.ofNullable(limit)
.map(s -> Math.min(s, MAX_RECORD_LIMIT))
.orElse(DEFAULT_RECORD_LIMIT);
java.util.function.Consumer<? super FluxSink<ConsumerRecord<Bytes, Bytes>>> emitter;
java.util.function.Consumer<? super FluxSink<TopicMessageEvent>> emitter;
RecordSerDe recordDeserializer =
deserializationService.getRecordDeserializerForCluster(cluster);
if (consumerPosition.getSeekDirection().equals(SeekDirection.FORWARD)) {
emitter = new ForwardRecordEmitter(
() -> kafkaService.createConsumer(cluster),
new OffsetsSeekForward(topic, consumerPosition)
new OffsetsSeekForward(topic, consumerPosition),
recordDeserializer
);
} else {
emitter = new BackwardRecordEmitter(
(Map<String, Object> props) -> kafkaService.createConsumer(cluster, props),
new OffsetsSeekBackward(topic, consumerPosition, recordsLimit)
new OffsetsSeekBackward(topic, consumerPosition, recordsLimit),
recordDeserializer
);
}
RecordSerDe recordDeserializer =
deserializationService.getRecordDeserializerForCluster(cluster);
return Flux.create(emitter)
.subscribeOn(Schedulers.boundedElastic())
.map(r -> ClusterUtil.mapToTopicMessage(r, recordDeserializer))
.filter(m -> filterTopicMessage(m, query))
.limitRequest(recordsLimit);
.takeWhile(new FilterTopicMessageEvents(recordsLimit))
.subscribeOn(Schedulers.elastic())
.share();
}
public Mono<Map<TopicPartition, Long>> offsetsForDeletion(KafkaCluster cluster, String topicName,
@ -102,12 +104,16 @@ public class ConsumingService {
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
}
private boolean filterTopicMessage(TopicMessage message, String query) {
if (StringUtils.isEmpty(query)) {
private boolean filterTopicMessage(TopicMessageEvent message, String query) {
log.info("filter");
if (StringUtils.isEmpty(query)
|| !message.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE)) {
return true;
}
return (StringUtils.isNotEmpty(message.getKey()) && message.getKey().contains(query))
|| (StringUtils.isNotEmpty(message.getContent()) && message.getContent().contains(query));
final TopicMessage msg = message.getMessage();
return (!StringUtils.isEmpty(msg.getKey()) && msg.getKey().contains(query))
|| (!StringUtils.isEmpty(msg.getContent()) && msg.getContent().contains(query));
}
}

View file

@ -30,10 +30,11 @@ import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.log4j.Log4j2;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;
@Service
@Log4j2
@ -59,7 +60,7 @@ public class KafkaConnectService {
return getConnects(clusterName)
.flatMapMany(Function.identity())
.flatMap(connect -> getConnectorNames(clusterName, connect))
.flatMap(pair -> getConnector(clusterName, pair.getLeft(), pair.getRight()))
.flatMap(pair -> getConnector(clusterName, pair.getT1(), pair.getT2()))
.flatMap(connector ->
getConnectorConfig(clusterName, connector.getConnect(), connector.getName())
.map(config -> InternalConnectInfo.builder()
@ -96,19 +97,19 @@ public class KafkaConnectService {
private Predicate<FullConnectorInfo> matchesSearchTerm(final String search) {
return (connector) -> getSearchValues(connector)
.anyMatch(value -> value.contains(
StringUtils.defaultString(
search,
StringUtils.EMPTY)
.toUpperCase()));
.anyMatch(value -> value.contains(
StringUtils.defaultString(
search,
StringUtils.EMPTY)
.toUpperCase()));
}
private Stream<String> getSearchValues(FullConnectorInfo fullConnectorInfo) {
return Stream.of(
fullConnectorInfo.getName(),
fullConnectorInfo.getStatus().getState().getValue(),
fullConnectorInfo.getType().getValue())
.map(String::toUpperCase);
fullConnectorInfo.getName(),
fullConnectorInfo.getStatus().getState().getValue(),
fullConnectorInfo.getType().getValue())
.map(String::toUpperCase);
}
private Mono<ConnectorTopics> getConnectorTopics(String clusterName, String connectClusterName,
@ -121,13 +122,13 @@ public class KafkaConnectService {
);
}
private Flux<Pair<String, String>> getConnectorNames(String clusterName, Connect connect) {
private Flux<Tuple2<String, String>> getConnectorNames(String clusterName, Connect connect) {
return getConnectors(clusterName, connect.getName())
.collectList().map(e -> e.get(0))
// for some reason `getConnectors` method returns the response as a single string
.map(this::parseToList)
.flatMapMany(Flux::fromIterable)
.map(connector -> Pair.of(connect.getName(), connector));
.map(connector -> Tuples.of(connect.getName(), connector));
}
@SneakyThrows

View file

@ -81,6 +81,9 @@ import org.apache.kafka.common.errors.InvalidRequestException;
import org.apache.kafka.common.errors.LogDirNotFoundException;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.header.internals.RecordHeaders;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.BytesDeserializer;
@ -110,7 +113,10 @@ public class KafkaService {
private int clientTimeout;
public KafkaCluster getUpdatedCluster(KafkaCluster cluster, InternalTopic updatedTopic) {
final Map<String, InternalTopic> topics = new HashMap<>(cluster.getTopics());
final Map<String, InternalTopic> topics =
Optional.ofNullable(cluster.getTopics()).map(
t -> new HashMap<>(cluster.getTopics())
).orElse(new HashMap<>());
topics.put(updatedTopic.getName(), updatedTopic);
return cluster.toBuilder().topics(topics).build();
}
@ -160,8 +166,8 @@ public class KafkaService {
Throwable zookeeperException = null;
try {
zookeeperStatus = zookeeperService.isZookeeperOnline(currentCluster)
? ServerStatus.ONLINE
: ServerStatus.OFFLINE;
? ServerStatus.ONLINE
: ServerStatus.OFFLINE;
} catch (Throwable e) {
zookeeperException = e;
}
@ -338,7 +344,7 @@ public class KafkaService {
.collect(Collectors.toList());
return ClusterUtil.toMono(adminClient.describeConfigs(resources,
new DescribeConfigsOptions().includeSynonyms(true)).all())
new DescribeConfigsOptions().includeSynonyms(true)).all())
.map(configs ->
configs.entrySet().stream().collect(Collectors.toMap(
c -> c.getKey().name(),
@ -391,8 +397,8 @@ public class KafkaService {
getConsumerGroupsInternal(
cluster,
s.stream().map(ConsumerGroupListing::groupId).collect(Collectors.toList()))
)
);
)
);
}
public Mono<List<InternalConsumerGroup>> getConsumerGroupsInternal(
@ -425,17 +431,17 @@ public class KafkaService {
}
return consumerGroups.map(c ->
c.stream()
.map(d -> ClusterUtil.filterConsumerGroupTopic(d, topic))
.filter(Optional::isPresent)
.map(Optional::get)
.map(g ->
g.toBuilder().endOffsets(
topicPartitionsEndOffsets(cluster, g.getOffsets().keySet())
).build()
)
.collect(Collectors.toList())
);
c.stream()
.map(d -> ClusterUtil.filterConsumerGroupTopic(d, topic))
.filter(Optional::isPresent)
.map(Optional::get)
.map(g ->
g.toBuilder().endOffsets(
topicPartitionsEndOffsets(cluster, g.getOffsets().keySet())
).build()
)
.collect(Collectors.toList())
);
}
public Mono<Map<TopicPartition, OffsetAndMetadata>> groupMetadata(KafkaCluster cluster,
@ -736,12 +742,18 @@ public class KafkaService {
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(properties)) {
final ProducerRecord<byte[], byte[]> producerRecord = serde.serialize(
ProducerRecord<byte[], byte[]> producerRecord = serde.serialize(
topic,
msg.getKey(),
msg.getContent(),
msg.getPartition()
);
producerRecord = new ProducerRecord<>(
producerRecord.topic(),
producerRecord.partition(),
producerRecord.key(),
producerRecord.value(),
createHeaders(msg.getHeaders()));
CompletableFuture<RecordMetadata> cf = new CompletableFuture<>();
producer.send(producerRecord, (metadata, exception) -> {
@ -755,6 +767,15 @@ public class KafkaService {
}
}
private Iterable<Header> createHeaders(Map<String, String> clientHeaders) {
if (clientHeaders == null) {
return null;
}
RecordHeaders headers = new RecordHeaders();
clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v.getBytes())));
return headers;
}
private Mono<InternalTopic> increaseTopicPartitions(AdminClient adminClient,
String topicName,
Map<String, NewPartitions> newPartitionsMap
@ -949,7 +970,7 @@ public class KafkaService {
}
public Mono<Void> updateBrokerLogDir(KafkaCluster cluster, Integer broker,
BrokerLogdirUpdate brokerLogDir) {
BrokerLogdirUpdate brokerLogDir) {
return getOrCreateAdminClient(cluster)
.flatMap(ac -> updateBrokerLogDir(ac, brokerLogDir, broker));
}

View file

@ -15,6 +15,7 @@ import com.provectus.kafka.ui.model.InternalPartition;
import com.provectus.kafka.ui.model.InternalReplica;
import com.provectus.kafka.ui.model.InternalTopic;
import com.provectus.kafka.ui.model.InternalTopicConfig;
import com.provectus.kafka.ui.model.MessageFormat;
import com.provectus.kafka.ui.model.ServerStatus;
import com.provectus.kafka.ui.model.TopicMessage;
import com.provectus.kafka.ui.serde.RecordSerDe;
@ -299,6 +300,17 @@ public class ClusterUtil {
var parsed = recordDeserializer.deserialize(consumerRecord);
topicMessage.setKey(parsed.getKey());
topicMessage.setContent(parsed.getValue());
topicMessage.setKeyFormat(parsed.getKeyFormat() != null
? MessageFormat.valueOf(parsed.getKeyFormat().name())
: null);
topicMessage.setValueFormat(parsed.getValueFormat() != null
? MessageFormat.valueOf(parsed.getValueFormat().name())
: null);
topicMessage.setKeySize(ConsumerRecordUtil.getKeySize(consumerRecord));
topicMessage.setValueSize(ConsumerRecordUtil.getValueSize(consumerRecord));
topicMessage.setKeySchemaId(parsed.getKeySchemaId());
topicMessage.setValueSchemaId(parsed.getValueSchemaId());
topicMessage.setHeadersSize(ConsumerRecordUtil.getHeadersSize(consumerRecord));
return topicMessage;
}

View file

@ -0,0 +1,37 @@
package com.provectus.kafka.ui.util;
import java.util.Arrays;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.Headers;
import org.apache.kafka.common.utils.Bytes;
public class ConsumerRecordUtil {
private ConsumerRecordUtil() {
}
public static Long getHeadersSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
Headers headers = consumerRecord.headers();
if (headers != null) {
return Arrays.stream(consumerRecord.headers().toArray())
.mapToLong(ConsumerRecordUtil::headerSize)
.sum();
}
return 0L;
}
public static Long getKeySize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.key() != null ? (long) consumerRecord.key().get().length : null;
}
public static Long getValueSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.value() != null ? (long) consumerRecord.value().get().length : null;
}
private static int headerSize(Header header) {
int key = header.key() != null ? header.key().getBytes().length : 0;
int val = header.value() != null ? header.value().length : 0;
return key + val;
}
}

View file

@ -0,0 +1,25 @@
package com.provectus.kafka.ui.util;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Predicate;
public class FilterTopicMessageEvents implements Predicate<TopicMessageEvent> {
private final AtomicInteger processed = new AtomicInteger();
private final int limit;
public FilterTopicMessageEvents(int limit) {
this.limit = limit;
}
@Override
public boolean test(TopicMessageEvent event) {
if (event.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE)) {
final int i = processed.incrementAndGet();
if (i > limit) {
return false;
}
}
return true;
}
}

View file

@ -1,12 +1,15 @@
package com.provectus.kafka.ui;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.http.MediaType.TEXT_EVENT_STREAM;
import com.provectus.kafka.ui.api.model.TopicConfig;
import com.provectus.kafka.ui.model.BrokerConfig;
import com.provectus.kafka.ui.model.PartitionsIncrease;
import com.provectus.kafka.ui.model.PartitionsIncreaseResponse;
import com.provectus.kafka.ui.model.TopicCreation;
import com.provectus.kafka.ui.model.TopicDetails;
import com.provectus.kafka.ui.model.TopicMessage;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.producer.KafkaTestProducer;
import java.util.List;
import java.util.Map;
@ -49,13 +52,20 @@ public class KafkaConsumerTests extends AbstractBaseTest {
.forEach(value -> producer.send(topicName, value));
}
webTestClient.get()
long count = webTestClient.get()
.uri("/api/clusters/{clusterName}/topics/{topicName}/messages", LOCAL, topicName)
.accept(TEXT_EVENT_STREAM)
.exchange()
.expectStatus()
.isOk()
.expectBodyList(TopicMessage.class)
.hasSize(4);
.expectBodyList(TopicMessageEvent.class)
.returnResult()
.getResponseBody()
.stream()
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.count();
assertThat(count).isEqualTo(4);
webTestClient.delete()
.uri("/api/clusters/{clusterName}/topics/{topicName}/messages", LOCAL, topicName)
@ -63,13 +73,19 @@ public class KafkaConsumerTests extends AbstractBaseTest {
.expectStatus()
.isOk();
webTestClient.get()
count = webTestClient.get()
.uri("/api/clusters/{clusterName}/topics/{topicName}/messages", LOCAL, topicName)
.exchange()
.expectStatus()
.isOk()
.expectBodyList(TopicMessage.class)
.hasSize(0);
.expectBodyList(TopicMessageEvent.class)
.returnResult()
.getResponseBody()
.stream()
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.count();
assertThat(count).isZero();
}
@Test

View file

@ -5,6 +5,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.serde.schemaregistry.MessageFormat;
import com.provectus.kafka.ui.serde.schemaregistry.SchemaRegistryAwareRecordSerDe;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.utils.Bytes;
@ -25,13 +26,23 @@ class SchemaRegistryRecordDeserializerTest {
var deserializedRecord = deserializer.deserialize(
new ConsumerRecord<>("topic", 1, 0, Bytes.wrap("key".getBytes()),
Bytes.wrap(value.getBytes())));
assertEquals(new DeserializedKeyValue("key", value), deserializedRecord);
DeserializedKeyValue expected = DeserializedKeyValue.builder()
.key("key")
.keyFormat(MessageFormat.UNKNOWN)
.value(value)
.valueFormat(MessageFormat.UNKNOWN)
.build();
assertEquals(expected, deserializedRecord);
}
@Test
public void shouldDeserializeNullValueRecordToEmptyMap() {
var deserializedRecord = deserializer
.deserialize(new ConsumerRecord<>("topic", 1, 0, Bytes.wrap("key".getBytes()), null));
assertEquals(new DeserializedKeyValue("key", null), deserializedRecord);
DeserializedKeyValue expected = DeserializedKeyValue.builder()
.key("key")
.keyFormat(MessageFormat.UNKNOWN)
.build();
assertEquals(expected, deserializedRecord);
}
}

View file

@ -29,7 +29,9 @@ import org.apache.kafka.common.utils.Bytes;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(initializers = {AbstractBaseTest.Initializer.class})
public class OffsetsResetServiceTest extends AbstractBaseTest {
private static final int PARTITIONS = 5;

View file

@ -8,7 +8,9 @@ import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.SeekDirection;
import com.provectus.kafka.ui.model.SeekType;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import com.provectus.kafka.ui.producer.KafkaTestProducer;
import com.provectus.kafka.ui.serde.SimpleRecordSerDe;
import com.provectus.kafka.ui.util.OffsetsSeekBackward;
import com.provectus.kafka.ui.util.OffsetsSeekForward;
import java.io.Serializable;
@ -24,19 +26,19 @@ import lombok.Value;
import lombok.extern.log4j.Log4j2;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.utils.Bytes;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.springframework.test.context.ContextConfiguration;
import reactor.core.publisher.Flux;
@Log4j2
@ContextConfiguration(initializers = {AbstractBaseTest.Initializer.class})
class RecordEmitterTest extends AbstractBaseTest {
static final int PARTITIONS = 5;
@ -80,7 +82,7 @@ class RecordEmitterTest extends AbstractBaseTest {
this::createConsumer,
new OffsetsSeekForward(EMPTY_TOPIC,
new ConsumerPosition(SeekType.BEGINNING, Map.of(), SeekDirection.FORWARD)
)
), new SimpleRecordSerDe()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -89,10 +91,11 @@ class RecordEmitterTest extends AbstractBaseTest {
EMPTY_TOPIC,
new ConsumerPosition(SeekType.BEGINNING, Map.of(), SeekDirection.BACKWARD),
100
)
), new SimpleRecordSerDe()
);
Long polledValues = Flux.create(forwardEmitter)
.filter(m -> m.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.limitRequest(100)
.count()
.block();
@ -100,6 +103,7 @@ class RecordEmitterTest extends AbstractBaseTest {
assertThat(polledValues).isZero();
polledValues = Flux.create(backwardEmitter)
.filter(m -> m.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.limitRequest(100)
.count()
.block();
@ -114,7 +118,7 @@ class RecordEmitterTest extends AbstractBaseTest {
this::createConsumer,
new OffsetsSeekForward(TOPIC,
new ConsumerPosition(SeekType.BEGINNING, Map.of(), SeekDirection.FORWARD)
)
), new SimpleRecordSerDe()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -122,12 +126,15 @@ class RecordEmitterTest extends AbstractBaseTest {
new OffsetsSeekBackward(TOPIC,
new ConsumerPosition(SeekType.BEGINNING, Map.of(), SeekDirection.FORWARD),
PARTITIONS * MSGS_PER_PARTITION
)
), new SimpleRecordSerDe()
);
var polledValues = Flux.create(forwardEmitter)
.map(this::deserialize)
.filter(m -> m.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.limitRequest(Long.MAX_VALUE)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.collect(Collectors.toList())
.block();
@ -135,8 +142,11 @@ class RecordEmitterTest extends AbstractBaseTest {
SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList()));
polledValues = Flux.create(backwardEmitter)
.map(this::deserialize)
.filter(m -> m.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.limitRequest(Long.MAX_VALUE)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.collect(Collectors.toList())
.block();
@ -157,7 +167,7 @@ class RecordEmitterTest extends AbstractBaseTest {
this::createConsumer,
new OffsetsSeekForward(TOPIC,
new ConsumerPosition(SeekType.OFFSET, targetOffsets, SeekDirection.FORWARD)
)
), new SimpleRecordSerDe()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -165,12 +175,15 @@ class RecordEmitterTest extends AbstractBaseTest {
new OffsetsSeekBackward(TOPIC,
new ConsumerPosition(SeekType.OFFSET, targetOffsets, SeekDirection.BACKWARD),
PARTITIONS * MSGS_PER_PARTITION
)
), new SimpleRecordSerDe()
);
var polledValues = Flux.create(forwardEmitter)
.map(this::deserialize)
.filter(m -> m.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.limitRequest(Long.MAX_VALUE)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.collect(Collectors.toList())
.block();
@ -187,8 +200,11 @@ class RecordEmitterTest extends AbstractBaseTest {
.collect(Collectors.toList());
polledValues = Flux.create(backwardEmitter)
.map(this::deserialize)
.filter(m -> m.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.limitRequest(Long.MAX_VALUE)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.collect(Collectors.toList())
.block();
@ -214,7 +230,7 @@ class RecordEmitterTest extends AbstractBaseTest {
this::createConsumer,
new OffsetsSeekForward(TOPIC,
new ConsumerPosition(SeekType.TIMESTAMP, targetTimestamps, SeekDirection.FORWARD)
)
), new SimpleRecordSerDe()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -222,11 +238,13 @@ class RecordEmitterTest extends AbstractBaseTest {
new OffsetsSeekBackward(TOPIC,
new ConsumerPosition(SeekType.TIMESTAMP, targetTimestamps, SeekDirection.BACKWARD),
PARTITIONS * MSGS_PER_PARTITION
)
), new SimpleRecordSerDe()
);
var polledValues = Flux.create(forwardEmitter)
.map(this::deserialize)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.limitRequest(Long.MAX_VALUE)
.collect(Collectors.toList())
.block();
@ -239,7 +257,9 @@ class RecordEmitterTest extends AbstractBaseTest {
assertThat(polledValues).containsExactlyInAnyOrderElementsOf(expectedValues);
polledValues = Flux.create(backwardEmitter)
.map(this::deserialize)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.limitRequest(Long.MAX_VALUE)
.collect(Collectors.toList())
.block();
@ -266,11 +286,13 @@ class RecordEmitterTest extends AbstractBaseTest {
new OffsetsSeekBackward(TOPIC,
new ConsumerPosition(SeekType.OFFSET, targetOffsets, SeekDirection.BACKWARD),
numMessages
)
), new SimpleRecordSerDe()
);
var polledValues = Flux.create(backwardEmitter)
.map(this::deserialize)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.limitRequest(numMessages)
.collect(Collectors.toList())
.block();
@ -297,11 +319,13 @@ class RecordEmitterTest extends AbstractBaseTest {
new OffsetsSeekBackward(TOPIC,
new ConsumerPosition(SeekType.OFFSET, offsets, SeekDirection.BACKWARD),
100
)
), new SimpleRecordSerDe()
);
var polledValues = Flux.create(backwardEmitter)
.map(this::deserialize)
.filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.map(m -> m.getContent().toString())
.limitRequest(Long.MAX_VALUE)
.collect(Collectors.toList())
.block();
@ -327,10 +351,6 @@ class RecordEmitterTest extends AbstractBaseTest {
return new KafkaConsumer<>(props);
}
private String deserialize(ConsumerRecord<Bytes, Bytes> rec) {
return new StringDeserializer().deserialize(TOPIC, rec.value().get());
}
@Value
static class Record {
String value;

View file

@ -7,9 +7,11 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.provectus.kafka.ui.AbstractBaseTest;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.CreateTopicMessage;
import com.provectus.kafka.ui.model.MessageFormat;
import com.provectus.kafka.ui.model.SeekDirection;
import com.provectus.kafka.ui.model.SeekType;
import com.provectus.kafka.ui.model.TopicMessage;
import com.provectus.kafka.ui.model.TopicMessageEvent;
import io.confluent.kafka.schemaregistry.ParsedSchema;
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
import io.confluent.kafka.schemaregistry.json.JsonSchema;
@ -24,7 +26,9 @@ import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.TopicPartition;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration(initializers = {AbstractBaseTest.Initializer.class})
public class SendAndReadTests extends AbstractBaseTest {
private static final AvroSchema AVRO_SCHEMA_1 = new AvroSchema(
@ -358,6 +362,73 @@ public class SendAndReadTests extends AbstractBaseTest {
.assertSendThrowsException();
}
@Test
void topicMessageMetadataAvro() {
new SendAndReadSpec()
.withKeySchema(AVRO_SCHEMA_1)
.withValueSchema(AVRO_SCHEMA_2)
.withMsgToSend(
new CreateTopicMessage()
.key(AVRO_SCHEMA_1_JSON_RECORD)
.content(AVRO_SCHEMA_2_JSON_RECORD)
)
.doAssert(polled -> {
assertJsonEqual(polled.getKey(), AVRO_SCHEMA_1_JSON_RECORD);
assertJsonEqual(polled.getContent(), AVRO_SCHEMA_2_JSON_RECORD);
assertThat(polled.getKeySize()).isEqualTo(15L);
assertThat(polled.getValueSize()).isEqualTo(15L);
assertThat(polled.getKeyFormat()).isEqualTo(MessageFormat.AVRO);
assertThat(polled.getValueFormat()).isEqualTo(MessageFormat.AVRO);
assertThat(polled.getKeySchemaId()).isNotEmpty();
assertThat(polled.getValueSchemaId()).isNotEmpty();
});
}
@Test
void topicMessageMetadataProtobuf() {
new SendAndReadSpec()
.withKeySchema(PROTOBUF_SCHEMA)
.withValueSchema(PROTOBUF_SCHEMA)
.withMsgToSend(
new CreateTopicMessage()
.key(PROTOBUF_SCHEMA_JSON_RECORD)
.content(PROTOBUF_SCHEMA_JSON_RECORD)
)
.doAssert(polled -> {
assertJsonEqual(polled.getKey(), PROTOBUF_SCHEMA_JSON_RECORD);
assertJsonEqual(polled.getContent(), PROTOBUF_SCHEMA_JSON_RECORD);
assertThat(polled.getKeySize()).isEqualTo(18L);
assertThat(polled.getValueSize()).isEqualTo(18L);
assertThat(polled.getKeyFormat()).isEqualTo(MessageFormat.PROTOBUF);
assertThat(polled.getValueFormat()).isEqualTo(MessageFormat.PROTOBUF);
assertThat(polled.getKeySchemaId()).isNotEmpty();
assertThat(polled.getValueSchemaId()).isNotEmpty();
});
}
@Test
void topicMessageMetadataJson() {
new SendAndReadSpec()
.withKeySchema(JSON_SCHEMA)
.withValueSchema(JSON_SCHEMA)
.withMsgToSend(
new CreateTopicMessage()
.key(JSON_SCHEMA_RECORD)
.content(JSON_SCHEMA_RECORD)
.headers(Map.of("header1", "value1"))
)
.doAssert(polled -> {
assertJsonEqual(polled.getKey(), JSON_SCHEMA_RECORD);
assertJsonEqual(polled.getContent(), JSON_SCHEMA_RECORD);
assertThat(polled.getKeyFormat()).isEqualTo(MessageFormat.JSON);
assertThat(polled.getValueFormat()).isEqualTo(MessageFormat.JSON);
assertThat(polled.getKeySchemaId()).isNotEmpty();
assertThat(polled.getValueSchemaId()).isNotEmpty();
assertThat(polled.getKeySize()).isEqualTo(57L);
assertThat(polled.getValueSize()).isEqualTo(57L);
assertThat(polled.getHeadersSize()).isEqualTo(13L);
});
}
@SneakyThrows
private void assertJsonEqual(String actual, String expected) {
@ -396,8 +467,10 @@ public class SendAndReadTests extends AbstractBaseTest {
if (valueSchema != null) {
schemaRegistry.schemaRegistryClient().register(topic + "-value", valueSchema);
}
// need to update to see new topic & schemas
clustersMetricsScheduler.updateMetrics();
return topic;
}
@ -425,7 +498,9 @@ public class SendAndReadTests extends AbstractBaseTest {
),
null,
1
).blockLast(Duration.ofSeconds(5));
).filter(e -> e.getType().equals(TopicMessageEvent.TypeEnum.MESSAGE))
.map(TopicMessageEvent::getMessage)
.blockLast(Duration.ofSeconds(5000));
assertThat(polled).isNotNull();
assertThat(polled.getPartition()).isEqualTo(0);

View file

@ -503,11 +503,11 @@ paths:
200:
description: OK
content:
application/json:
text/event-stream:
schema:
type: array
items:
$ref: '#/components/schemas/TopicMessage'
$ref: '#/components/schemas/TopicMessageEvent'
delete:
tags:
- Messages
@ -1793,6 +1793,14 @@ components:
- DEAD
- EMPTY
MessageFormat:
type: string
enum:
- AVRO
- JSON
- PROTOBUF
- UNKNOWN
ConsumerGroup:
type: object
properties:
@ -1859,6 +1867,44 @@ components:
- source
- schema
TopicMessageEvent:
type: object
properties:
type:
type: string
enum:
- PHASE
- MESSAGE
- CONSUMING
- DONE
message:
$ref: "#/components/schemas/TopicMessage"
phase:
$ref: "#/components/schemas/TopicMessagePhase"
consuming:
$ref: "#/components/schemas/TopicMessageConsuming"
TopicMessagePhase:
type: object
properties:
name:
type: string
TopicMessageConsuming:
type: object
properties:
bytesConsumed:
type: integer
format: int64
elapsedMs:
type: integer
format: int64
isCancelled:
type: boolean
messagesConsumed:
type: integer
TopicMessage:
type: object
properties:
@ -1884,6 +1930,23 @@ components:
type: string
content:
type: string
keyFormat:
$ref: "#/components/schemas/MessageFormat"
valueFormat:
$ref: "#/components/schemas/MessageFormat"
keySize:
type: integer
format: int64
valueSize:
type: integer
format: int64
keySchemaId:
type: string
valueSchemaId:
type: string
headersSize:
type: integer
format: int64
required:
- partition
- offset

View file

@ -2732,6 +2732,11 @@
"integrity": "sha512-LfZwXoGUDo0C3me81HXgkBg5CTQYb6xzEl+fNmbO4JdRiSKQ8A0GD1OBBvKAIsbCUgoyAty7m99GqqMQe784ew==",
"dev": true
},
"@types/eventsource": {
"version": "1.1.6",
"resolved": "https://registry.npmjs.org/@types/eventsource/-/eventsource-1.1.6.tgz",
"integrity": "sha512-y4xcLJ+lcoZ6mN9ndSdKOWg24Nj5uQc4Z/NRdy3HbiGGt5hfH3RLwAXr6V+RzGzOljAk48a09n6iY4BMNumEng=="
},
"@types/express-serve-static-core": {
"version": "4.17.22",
"resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.22.tgz",
@ -6538,6 +6543,11 @@
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.22.1.tgz",
"integrity": "sha512-yUFPQjrxEmIsMqlHhAhmxkuH769baF21Kk+nZwZGyrMoyLA+LugaQtC0+Tqf9CBUUULWwUJt6Q5ySI3LJDDCGg=="
},
"dayjs": {
"version": "1.10.6",
"resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.10.6.tgz",
"integrity": "sha512-AztC/IOW4L1Q41A86phW5Thhcrco3xuAA+YX/BLpLWWjRcTj5TOt/QImBLmCKlrF7u7k47arTnOyL6GnbG8Hvw=="
},
"debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",

View file

@ -8,13 +8,14 @@
"@hookform/error-message": "^2.0.0",
"@hookform/resolvers": "^2.5.1",
"@rooks/use-outside-click-ref": "^4.10.1",
"@types/eventsource": "^1.1.6",
"@types/yup": "^0.29.13",
"@testing-library/react": "^12.0.0",
"ace-builds": "^1.4.12",
"bulma": "^0.9.3",
"bulma-switch": "^2.0.0",
"classnames": "^2.2.6",
"date-fns": "^2.19.0",
"dayjs": "^1.10.6",
"eslint-import-resolver-node": "^0.3.4",
"eslint-import-resolver-typescript": "^2.4.0",
"json-schema-yup-transformer": "^1.6.0",

View file

@ -2,6 +2,8 @@ $header-height: 52px;
$navbar-width: 250px;
.Layout {
min-width: 1200px;
&__header {
box-shadow: 0 0.46875rem 2.1875rem rgba(4,9,20,0.03),
0 0.9375rem 1.40625rem rgba(4,9,20,0.03),
@ -63,6 +65,8 @@ $navbar-width: 250px;
@media screen and (max-width: 1023px) {
.Layout {
min-width: initial;
&__container {
margin-left: initial;
margin-top: 1.5rem;

View file

@ -176,7 +176,7 @@ exports[`Connectors ListItem matches snapshot 1`] = `
<button
aria-controls="dropdown-menu"
aria-haspopup="true"
className="button is-small"
className="button is-small is-link"
onClick={[Function]}
type="button"
>

View file

@ -25,15 +25,13 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({
<div className="box">
<div className="title is-6 has-text-overflow-ellipsis">
<div
className={`tag has-margin-right ${
className={`tag mr-2 ${
status === ServerStatus.ONLINE ? 'is-success' : 'is-danger'
}`}
>
{status}
</div>
{readOnly && (
<div className="tag has-margin-right is-info is-light">readonly</div>
)}
{readOnly && <div className="tag mr-2 is-info is-light">readonly</div>}
{name}
</div>

View file

@ -11,7 +11,7 @@ exports[`ClusterWidget when cluster is offline matches snapshot 1`] = `
className="title is-6 has-text-overflow-ellipsis"
>
<div
className="tag has-margin-right is-danger"
className="tag mr-2 is-danger"
>
offline
</div>
@ -96,7 +96,7 @@ exports[`ClusterWidget when cluster is online matches snapshot 1`] = `
className="title is-6 has-text-overflow-ellipsis"
>
<div
className="tag has-margin-right is-success"
className="tag mr-2 is-success"
>
online
</div>

View file

@ -90,7 +90,7 @@ const Details: React.FC<DetailsProps> = ({
type="button"
to={clusterSchemaSchemaEditPath(clusterName, subject)}
>
Update Schema
Edit Schema
</Link>
<button
className="button is-danger"

View file

@ -56,7 +56,7 @@ exports[`Details View Initial state matches snapshot 1`] = `
to="/ui/clusters/testCluster/schemas/test/edit"
type="button"
>
Update Schema
Edit Schema
</Link>
<button
className="button is-danger"
@ -204,7 +204,7 @@ exports[`Details View when page with schema versions loaded when schema has vers
to="/ui/clusters/testCluster/schemas/test/edit"
type="button"
>
Update Schema
Edit Schema
</Link>
<button
className="button is-danger"
@ -347,7 +347,7 @@ exports[`Details View when page with schema versions loaded when versions are em
to="/ui/clusters/testCluster/schemas/test/edit"
type="button"
>
Update Schema
Edit Schema
</Link>
<button
className="button is-danger"

View file

@ -16,8 +16,8 @@ import ConfirmationModal from 'components/common/ConfirmationModal/ConfirmationM
import OverviewContainer from './Overview/OverviewContainer';
import TopicConsumerGroupsContainer from './ConsumerGroups/TopicConsumerGroupsContainer';
import MessagesContainer from './Messages/MessagesContainer';
import SettingsContainer from './Settings/SettingsContainer';
import Messages from './Messages/Messages';
interface Props extends Topic, TopicDetails {
clusterName: ClusterName;
@ -134,7 +134,7 @@ const Details: React.FC<Props> = ({
<Route
exact
path="/ui/clusters/:clusterName/topics/:topicName/messages"
component={MessagesContainer}
component={Messages}
/>
<Route
exact

View file

@ -0,0 +1,362 @@
import 'react-datepicker/dist/react-datepicker.css';
import {
Partition,
SeekDirection,
SeekType,
TopicMessage,
TopicMessageConsuming,
TopicMessageEvent,
TopicMessageEventTypeEnum,
} from 'generated-sources';
import * as React from 'react';
import { omitBy } from 'lodash';
import { useHistory, useLocation } from 'react-router';
import DatePicker from 'react-datepicker';
import MultiSelect from 'react-multi-select-component';
import { Option } from 'react-multi-select-component/dist/lib/interfaces';
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
import { TopicName, ClusterName } from 'redux/interfaces';
import {
filterOptions,
getOffsetFromSeekToParam,
getSelectedPartitionsFromSeekToParam,
getTimestampFromSeekToParam,
} from './utils';
type Query = Record<string, string | string[] | number>;
export interface FiltersProps {
clusterName: ClusterName;
topicName: TopicName;
phaseMessage?: string;
partitions: Partition[];
meta: TopicMessageConsuming;
isFetching: boolean;
addMessage(message: TopicMessage): void;
resetMessages(): void;
updatePhase(phase: string): void;
updateMeta(meta: TopicMessageConsuming): void;
setIsFetching(status: boolean): void;
}
const PER_PAGE = 100;
const Filters: React.FC<FiltersProps> = ({
clusterName,
topicName,
partitions,
phaseMessage,
meta: { elapsedMs, bytesConsumed, messagesConsumed },
isFetching,
addMessage,
resetMessages,
updatePhase,
updateMeta,
setIsFetching,
}) => {
const location = useLocation();
const history = useHistory();
const source = React.useRef<EventSource | null>(null);
const searchParams = React.useMemo(
() => new URLSearchParams(location.search),
[location]
);
const [selectedPartitions, setSelectedPartitions] = React.useState<Option[]>(
getSelectedPartitionsFromSeekToParam(searchParams, partitions)
);
const [attempt, setAttempt] = React.useState(0);
const [seekType, setSeekType] = React.useState<SeekType>(
(searchParams.get('seekType') as SeekType) || SeekType.OFFSET
);
const [offset, setOffset] = React.useState<string>(
getOffsetFromSeekToParam(searchParams)
);
const [timestamp, setTimestamp] = React.useState<Date | null>(
getTimestampFromSeekToParam(searchParams)
);
const [query, setQuery] = React.useState<string>(searchParams.get('q') || '');
const [seekDirection, setSeekDirection] = React.useState<SeekDirection>(
(searchParams.get('seekDirection') as SeekDirection) ||
SeekDirection.FORWARD
);
const isSeekTypeControlVisible = React.useMemo(
() => selectedPartitions.length > 0,
[selectedPartitions]
);
const isSubmitDisabled = React.useMemo(() => {
if (isSeekTypeControlVisible) {
return seekType === SeekType.TIMESTAMP && !timestamp;
}
return false;
}, [isSeekTypeControlVisible, seekType, timestamp]);
const partitionMap = React.useMemo(
() =>
partitions.reduce<Record<string, Partition>>(
(acc, partition) => ({
...acc,
[partition.partition]: partition,
}),
{}
),
[partitions]
);
const handleFiltersSubmit = () => {
setAttempt(attempt + 1);
const props: Query = {
q: query,
attempt,
limit: PER_PAGE,
seekDirection,
};
if (isSeekTypeControlVisible) {
props.seekType = seekType;
props.seekTo = selectedPartitions.map(({ value }) => {
let seekToOffset;
if (seekType === SeekType.OFFSET) {
if (offset) {
seekToOffset = offset;
} else {
seekToOffset =
seekDirection === SeekDirection.FORWARD
? partitionMap[value].offsetMin
: partitionMap[value].offsetMax;
}
} else if (timestamp) {
seekToOffset = timestamp.getTime();
}
return `${value}::${seekToOffset || '0'}`;
});
}
const newProps = omitBy(props, (v) => v === undefined || v === '');
const qs = Object.keys(newProps)
.map((key) => `${key}=${newProps[key]}`)
.join('&');
history.push({
search: `?${qs}`,
});
};
const toggleSeekDirection = () => {
const nextSeekDirectionValue =
seekDirection === SeekDirection.FORWARD
? SeekDirection.BACKWARD
: SeekDirection.FORWARD;
setSeekDirection(nextSeekDirectionValue);
};
const handleSSECancel = () => {
if (!source.current) return;
setIsFetching(false);
source.current.close();
};
// eslint-disable-next-line consistent-return
React.useEffect(() => {
if (location.search.length !== 0) {
const url = `/api/clusters/${clusterName}/topics/${topicName}/messages${location.search}`;
const sse = new EventSource(url);
source.current = sse;
setIsFetching(true);
sse.onopen = () => {
resetMessages();
setIsFetching(true);
};
sse.onmessage = ({ data }) => {
const { type, message, phase, consuming }: TopicMessageEvent =
JSON.parse(data);
switch (type) {
case TopicMessageEventTypeEnum.MESSAGE:
if (message) addMessage(message);
break;
case TopicMessageEventTypeEnum.PHASE:
if (phase?.name) updatePhase(phase.name);
break;
case TopicMessageEventTypeEnum.CONSUMING:
if (consuming) updateMeta(consuming);
break;
default:
}
};
sse.onerror = () => {
setIsFetching(false);
sse.close();
};
return () => {
setIsFetching(false);
sse.close();
};
}
}, [clusterName, topicName, location]);
React.useEffect(() => {
if (location.search.length === 0) {
handleFiltersSubmit();
}
}, [location]);
React.useEffect(() => {
handleFiltersSubmit();
}, [seekDirection]);
return (
<>
<div className="columns is-align-items-flex-end">
<div className="column is-3">
<label className="label">Partitions</label>
<MultiSelect
options={partitions.map((p) => ({
label: `Partition #${p.partition.toString()}`,
value: p.partition,
}))}
filterOptions={filterOptions}
value={selectedPartitions}
onChange={setSelectedPartitions}
labelledBy="Select partitions"
/>
</div>
{isSeekTypeControlVisible && (
<>
<div className="column is-2">
<label className="label">Seek Type</label>
<div className="select is-block">
<select
id="selectSeekType"
name="selectSeekType"
onChange={({ target: { value } }) =>
setSeekType(value as SeekType)
}
value={seekType}
>
<option value={SeekType.OFFSET}>Offset</option>
<option value={SeekType.TIMESTAMP}>Timestamp</option>
</select>
</div>
</div>
<div className="column is-2">
{seekType === SeekType.OFFSET ? (
<>
<label className="label">Offset</label>
<input
id="offset"
name="offset"
type="text"
className="input"
value={offset}
onChange={({ target: { value } }) => setOffset(value)}
/>
</>
) : (
<>
<label className="label">Timestamp</label>
<DatePicker
selected={timestamp}
onChange={(date: Date | null) => setTimestamp(date)}
showTimeInput
timeInputLabel="Time:"
dateFormat="MMMM d, yyyy HH:mm"
className="input"
/>
</>
)}
</div>
</>
)}
<div className="column is-3">
<label className="label">Search</label>
<input
id="searchText"
type="text"
name="searchText"
className="input"
placeholder="Search"
value={query}
onChange={({ target: { value } }) => setQuery(value)}
/>
</div>
<div className="column is-2">
{isFetching ? (
<button
type="button"
className="button is-primary is-fullwidth"
disabled={isSubmitDisabled}
onClick={handleSSECancel}
>
Cancel
</button>
) : (
<input
type="submit"
className="button is-primary is-fullwidth"
disabled={isSubmitDisabled}
onClick={handleFiltersSubmit}
/>
)}
</div>
</div>
<div className="columns">
<div className="column is-half">
<div className="field">
<input
id="switchRoundedDefault"
type="checkbox"
name="switchRoundedDefault"
className="switch is-rounded"
checked={seekDirection === SeekDirection.BACKWARD}
onChange={toggleSeekDirection}
/>
<label htmlFor="switchRoundedDefault">Newest first</label>
</div>
</div>
<div className="column is-half">
<div className="tags is-justify-content-flex-end">
<div className="tag is-white">{isFetching && phaseMessage}</div>
<div className="tag is-info" title="Elapsed Time">
<span className="icon">
<i className="fas fa-clock" />
</span>
<span>{Math.max(elapsedMs || 0, 0)}ms</span>
</div>
<div className="tag is-info" title="Bytes Consumed">
<span className="icon">
<i className="fas fa-download" />
</span>
<BytesFormatted value={bytesConsumed} />
</div>
<div className="tag is-info" title="Messages Consumed">
<span className="icon">
<i className="fas fa-envelope" />
</span>
<span>{messagesConsumed}</span>
</div>
</div>
</div>
</div>
</>
);
};
export default Filters;

View file

@ -0,0 +1,67 @@
import { connect } from 'react-redux';
import { Action, ClusterName, RootState, TopicName } from 'redux/interfaces';
import { withRouter, RouteComponentProps } from 'react-router-dom';
import { ThunkDispatch } from 'redux-thunk';
import {
addTopicMessage,
resetTopicMessages,
updateTopicMessagesMeta,
updateTopicMessagesPhase,
setTopicMessagesFetchingStatus,
} from 'redux/actions';
import { TopicMessage, TopicMessageConsuming } from 'generated-sources';
import {
getTopicMessgesMeta,
getTopicMessgesPhase,
getIsTopicMessagesFetching,
} from 'redux/reducers/topicMessages/selectors';
import { getPartitionsByTopicName } from 'redux/reducers/topics/selectors';
import Filters from './Filters';
interface RouteProps {
clusterName: ClusterName;
topicName: TopicName;
}
type OwnProps = RouteComponentProps<RouteProps>;
const mapStateToProps = (
state: RootState,
{
match: {
params: { topicName, clusterName },
},
}: OwnProps
) => ({
clusterName,
topicName,
phaseMessage: getTopicMessgesPhase(state),
partitions: getPartitionsByTopicName(state, topicName),
meta: getTopicMessgesMeta(state),
isFetching: getIsTopicMessagesFetching(state),
});
const mapDispatchToProps = (
dispatch: ThunkDispatch<RootState, undefined, Action>
) => ({
addMessage: (message: TopicMessage) => {
dispatch(addTopicMessage(message));
},
resetMessages: () => {
dispatch(resetTopicMessages());
},
updatePhase: (phase: string) => {
dispatch(updateTopicMessagesPhase(phase));
},
updateMeta: (meta: TopicMessageConsuming) => {
dispatch(updateTopicMessagesMeta(meta));
},
setIsFetching: (status: boolean) => {
dispatch(setTopicMessagesFetchingStatus(status));
},
});
export default withRouter(
connect(mapStateToProps, mapDispatchToProps)(Filters)
);

View file

@ -0,0 +1,70 @@
import { Partition, SeekType } from 'generated-sources';
import { compact } from 'lodash';
import { Option } from 'react-multi-select-component/dist/lib/interfaces';
export const filterOptions = (options: Option[], filter: string) => {
if (!filter) {
return options;
}
return options.filter(
({ value }) => value.toString() && value.toString() === filter
);
};
export const getOffsetFromSeekToParam = (params: URLSearchParams) => {
if (params.get('seekType') === SeekType.OFFSET) {
// seekTo format = ?seekTo=0::123,1::123,2::0
const offsets = params
.get('seekTo')
?.split(',')
.map((item) => Number(item.split('::')[1]));
return String(Math.max(...(offsets || []), 0));
}
return '';
};
export const getTimestampFromSeekToParam = (params: URLSearchParams) => {
if (params.get('seekType') === SeekType.TIMESTAMP) {
// seekTo format = ?seekTo=0::1627333200000,1::1627333200000
const offsets = params
.get('seekTo')
?.split(',')
.map((item) => Number(item.split('::')[1]));
return new Date(Math.max(...(offsets || []), 0));
}
return null;
};
export const getSelectedPartitionsFromSeekToParam = (
params: URLSearchParams,
partitions: Partition[]
) => {
const seekTo = params.get('seekTo');
if (seekTo) {
const selectedPartitionIds = seekTo
.split(',')
.map((item) => Number(item.split('::')[0]));
return compact(
partitions.map(({ partition }) => {
if (selectedPartitionIds?.includes(partition)) {
return {
value: partition,
label: partition.toString(),
};
}
return undefined;
})
);
}
return partitions.map(({ partition }) => ({
value: partition,
label: partition.toString(),
}));
};

View file

@ -0,0 +1,147 @@
import * as React from 'react';
import dayjs from 'dayjs';
import { TopicMessage } from 'generated-sources';
import JSONViewer from 'components/common/JSONViewer/JSONViewer';
import Dropdown from 'components/common/Dropdown/Dropdown';
import DropdownItem from 'components/common/Dropdown/DropdownItem';
import useDataSaver from 'lib/hooks/useDataSaver';
type Tab = 'key' | 'content' | 'headers';
const Message: React.FC<{ message: TopicMessage }> = ({
message: {
timestamp,
timestampType,
offset,
key,
partition,
content,
headers,
},
}) => {
const [isOpen, setIsOpen] = React.useState(false);
const [activeTab, setActiveTab] = React.useState<Tab>('content');
const { copyToClipboard, saveFile } = useDataSaver(
'topic-message',
content || ''
);
const toggleIsOpen = () => setIsOpen(!isOpen);
const handleKeyTabClick = (e: React.MouseEvent) => {
e.preventDefault();
setActiveTab('key');
};
const handleContentTabClick = (e: React.MouseEvent) => {
e.preventDefault();
setActiveTab('content');
};
const handleHeadersTabClick = (e: React.MouseEvent) => {
e.preventDefault();
setActiveTab('headers');
};
const activeTabContent = () => {
switch (activeTab) {
case 'content':
return content;
case 'key':
return key;
default:
return JSON.stringify(headers);
}
};
return (
<>
<tr>
<td>
<span
className="icon has-text-link is-size-7 is-small is-clickable"
onClick={toggleIsOpen}
aria-hidden
>
<i className={`fas fa-${isOpen ? 'minus' : 'plus'}`} />
</span>
</td>
<td>{offset}</td>
<td>{partition}</td>
<td
className="has-text-overflow-ellipsis is-family-code"
style={{ width: 80, maxWidth: 250 }}
title={key}
>
{key}
</td>
<td>
<div className="tag">
{dayjs(timestamp).format('MM.DD.YYYY HH:mm:ss')}
</div>
</td>
<td
className="has-text-overflow-ellipsis is-family-code"
style={{ width: '100%', maxWidth: 0 }}
>
{content}
</td>
<td className="has-text-right">
<Dropdown
label={
<span className="icon">
<i className="fas fa-cog" />
</span>
}
right
>
<DropdownItem onClick={copyToClipboard}>
Copy to clipboard
</DropdownItem>
<DropdownItem onClick={saveFile}>Save as a file</DropdownItem>
</Dropdown>
</td>
</tr>
{isOpen && (
<tr className="has-background-light">
<td />
<td colSpan={3}>
<div className="title is-7">Timestamp Type</div>
<div className="subtitle is-7 is-spaced">{timestampType}</div>
<div className="title is-7">Timestamp</div>
<div className="subtitle is-7">{timestamp}</div>
</td>
<td colSpan={3} style={{ wordBreak: 'break-word' }}>
<nav className="panel has-background-white">
<p className="panel-tabs is-justify-content-start pl-5">
<a
href="key"
onClick={handleKeyTabClick}
className={activeTab === 'key' ? 'is-active' : ''}
>
Key
</a>
<a
href="content"
className={activeTab === 'content' ? 'is-active' : ''}
onClick={handleContentTabClick}
>
Content
</a>
<a
href="headers"
className={activeTab === 'headers' ? 'is-active' : ''}
onClick={handleHeadersTabClick}
>
Headers
</a>
</p>
<div className="panel-block is-family-code">
<JSONViewer data={activeTabContent() || ''} />
</div>
</nav>
</td>
</tr>
)}
</>
);
};
export default Message;

View file

@ -1,37 +0,0 @@
import React from 'react';
import FullMessage from './FullMessage';
interface MessageContentProps {
message: string;
}
const MessageContent: React.FC<MessageContentProps> = ({ message }) => {
const [isFolded, setIsFolded] = React.useState(message.length > 250);
return (
<div className="is-flex is-flex-direction-column">
{isFolded ? (
<p className="has-content-overflow-ellipsis">
{`${message.slice(0, 250)}...`}
</p>
) : (
<FullMessage message={message} />
)}
{isFolded && (
<button
type="button"
className="button is-small mt-2"
onClick={() => setIsFolded((state) => !state)}
title="Expand to JSON"
>
<span className="icon is-small">
<i className="fas fa-chevron-down" />
</span>
</button>
)}
</div>
);
};
export default MessageContent;

View file

@ -1,57 +0,0 @@
import React from 'react';
import { format } from 'date-fns';
import { TopicMessage } from 'generated-sources';
import Dropdown from 'components/common/Dropdown/Dropdown';
import DropdownItem from 'components/common/Dropdown/DropdownItem';
import useDataSaver from 'lib/hooks/useDataSaver';
import MessageContent from './MessageContent';
export interface MessageItemProp {
partition: TopicMessage['partition'];
offset: TopicMessage['offset'];
timestamp: TopicMessage['timestamp'];
content?: TopicMessage['content'];
messageKey?: TopicMessage['key'];
}
const MessageItem: React.FC<MessageItemProp> = ({
partition,
offset,
timestamp,
content,
messageKey,
}) => {
const { copyToClipboard, saveFile } = useDataSaver(
'topic-message',
content || ''
);
return (
<tr>
<td style={{ width: 200 }}>{format(timestamp, 'yyyy-MM-dd HH:mm:ss')}</td>
<td>{messageKey}</td>
<td style={{ width: 150 }}>{offset}</td>
<td style={{ width: 100 }}>{partition}</td>
<td style={{ wordBreak: 'break-word' }}>
{content && <MessageContent message={content} />}
</td>
<td className="has-text-right">
<Dropdown
label={
<span className="icon">
<i className="fas fa-cog" />
</span>
}
right
>
<DropdownItem onClick={copyToClipboard}>
Copy to clipboard
</DropdownItem>
<DropdownItem onClick={saveFile}>Save as a file</DropdownItem>
</Dropdown>
</td>
</tr>
);
};
export default MessageItem;

View file

@ -1,327 +1,13 @@
import 'react-datepicker/dist/react-datepicker.css';
import React, { useCallback, useEffect, useRef } from 'react';
import { groupBy, map, concat, maxBy, minBy } from 'lodash';
import DatePicker from 'react-datepicker';
import MultiSelect from 'react-multi-select-component';
import { Option } from 'react-multi-select-component/dist/lib/interfaces';
import { useDebouncedCallback } from 'use-debounce';
import {
ClusterName,
TopicMessageQueryParams,
TopicName,
} from 'redux/interfaces';
import {
TopicMessage,
Partition,
SeekType,
SeekDirection,
} from 'generated-sources';
import PageLoader from 'components/common/PageLoader/PageLoader';
import React from 'react';
import FiltersContainer from './Filters/FiltersContainer';
import MessagesTable from './MessagesTable';
export interface Props {
clusterName: ClusterName;
topicName: TopicName;
isFetched: boolean;
fetchTopicMessages: (
clusterName: ClusterName,
topicName: TopicName,
queryParams: Partial<TopicMessageQueryParams>
) => void;
messages: TopicMessage[];
partitions: Partition[];
}
interface FilterProps {
offset: TopicMessage['offset'];
partition: TopicMessage['partition'];
}
function usePrevious(value: Date | null) {
const ref = useRef<Date | null>();
useEffect(() => {
ref.current = value;
});
return ref.current;
}
const Messages: React.FC<Props> = ({
isFetched,
clusterName,
topicName,
messages,
partitions,
fetchTopicMessages,
}) => {
const [searchQuery, setSearchQuery] = React.useState<string>('');
const [searchTimestamp, setSearchTimestamp] = React.useState<Date | null>(
null
);
const [filterProps, setFilterProps] = React.useState<FilterProps[]>([]);
const [selectedSeekType, setSelectedSeekType] = React.useState<SeekType>(
SeekType.OFFSET
);
const [searchOffset, setSearchOffset] = React.useState<string>();
const [selectedPartitions, setSelectedPartitions] = React.useState<Option[]>(
partitions.map((p) => ({
value: p.partition,
label: p.partition.toString(),
}))
);
const [queryParams, setQueryParams] = React.useState<
Partial<TopicMessageQueryParams>
>({ limit: 100 });
const debouncedCallback = useDebouncedCallback(
(query: Partial<TopicMessageQueryParams>) =>
setQueryParams({ ...queryParams, ...query }),
1000
);
const [selectedSeekDirection, setSelectedSeekDirection] =
React.useState<SeekDirection>(SeekDirection.FORWARD);
const prevSearchTimestamp = usePrevious(searchTimestamp);
const getUniqueDataForEachPartition: FilterProps[] = React.useMemo(() => {
const partitionUniqs: FilterProps[] = partitions.map((p) => ({
offset: 0,
partition: p.partition,
}));
const messageUniqs: FilterProps[] = map(
groupBy(messages, 'partition'),
(v) =>
selectedSeekDirection === SeekDirection.FORWARD
? maxBy(v, 'offset')
: minBy(v, 'offset')
).map((v) => ({
offset: v ? v.offset : 0,
partition: v ? v.partition : 0,
}));
return map(
groupBy(concat(partitionUniqs, messageUniqs), 'partition'),
(v) => maxBy(v, 'offset') as FilterProps
);
}, [messages, partitions]);
const getSeekToValuesForPartitions = (partition: Option) => {
const foundedValues = filterProps.find(
(prop) => prop.partition === partition.value
);
if (selectedSeekType === SeekType.OFFSET) {
return foundedValues ? foundedValues.offset : 0;
}
return searchTimestamp ? searchTimestamp.getTime() : null;
};
React.useEffect(() => {
fetchTopicMessages(clusterName, topicName, queryParams);
}, []);
React.useEffect(() => {
setFilterProps(getUniqueDataForEachPartition);
}, [messages, partitions]);
const handleQueryChange = (event: React.ChangeEvent<HTMLInputElement>) => {
const query = event.target.value;
setSearchQuery(query);
debouncedCallback({ q: query });
};
const handleDateTimeChange = () => {
if (searchTimestamp !== prevSearchTimestamp) {
if (searchTimestamp) {
const timestamp: number = searchTimestamp.getTime();
setSearchTimestamp(searchTimestamp);
setQueryParams({
...queryParams,
seekType: SeekType.TIMESTAMP,
seekTo: selectedPartitions.map((p) => `${p.value}::${timestamp}`),
});
} else {
setSearchTimestamp(null);
const { seekTo, seekType, ...queryParamsWithoutSeek } = queryParams;
setQueryParams(queryParamsWithoutSeek);
}
}
};
const handleSeekTypeChange = (
event: React.ChangeEvent<HTMLSelectElement>
) => {
setSelectedSeekType(event.target.value as SeekType);
};
const handleOffsetChange = (event: React.ChangeEvent<HTMLInputElement>) => {
const offset = event.target.value || '0';
setSearchOffset(offset);
debouncedCallback({
seekType: SeekType.OFFSET,
seekTo: selectedPartitions.map((p) => `${p.value}::${offset}`),
});
};
const handlePartitionsChange = (options: Option[]) => {
setSelectedPartitions(options);
debouncedCallback({
seekType: options.length > 0 ? selectedSeekType : undefined,
seekTo:
options.length > 0
? options.map((p) => `${p.value}::${getSeekToValuesForPartitions(p)}`)
: undefined,
});
};
const handleFiltersSubmit = useCallback(() => {
fetchTopicMessages(clusterName, topicName, queryParams);
}, [clusterName, topicName, queryParams]);
const onNext = (event: React.MouseEvent<HTMLButtonElement>) => {
event.preventDefault();
const seekTo: string[] = filterProps
.filter(
(value) =>
selectedPartitions.findIndex((p) => p.value === value.partition) > -1
)
.map((p) => `${p.partition}::${p.offset}`);
fetchTopicMessages(clusterName, topicName, {
...queryParams,
seekType: SeekType.OFFSET,
seekTo,
});
};
const filterOptions = (options: Option[], filter: string) => {
if (!filter) {
return options;
}
return options.filter(
({ value }) => value.toString() && value.toString() === filter
);
};
const toggleSeekDirection = () => {
const nextSeekDirectionValue =
selectedSeekDirection === SeekDirection.FORWARD
? SeekDirection.BACKWARD
: SeekDirection.FORWARD;
setSelectedSeekDirection(nextSeekDirectionValue);
debouncedCallback({
seekDirection: nextSeekDirectionValue,
});
fetchTopicMessages(clusterName, topicName, {
...queryParams,
seekDirection: nextSeekDirectionValue,
});
};
if (!isFetched) {
return <PageLoader />;
}
return (
<div>
<div className="columns is-align-items-flex-end">
<div className="column is-3">
<label className="label">Partitions</label>
<MultiSelect
options={partitions.map((p) => ({
label: `Partition #${p.partition.toString()}`,
value: p.partition,
}))}
filterOptions={filterOptions}
value={selectedPartitions}
onChange={handlePartitionsChange}
labelledBy="Select partitions"
/>
</div>
<div className="column is-2">
<label className="label">Seek Type</label>
<div className="select is-block">
<select
id="selectSeekType"
name="selectSeekType"
onChange={handleSeekTypeChange}
value={selectedSeekType}
>
<option value={SeekType.OFFSET}>Offset</option>
<option value={SeekType.TIMESTAMP}>Timestamp</option>
</select>
</div>
</div>
<div className="column is-2">
{selectedSeekType === SeekType.OFFSET ? (
<>
<label className="label">Offset</label>
<input
id="searchOffset"
name="searchOffset"
type="text"
className="input"
value={searchOffset}
onChange={handleOffsetChange}
/>
</>
) : (
<>
<label className="label">Timestamp</label>
<DatePicker
selected={searchTimestamp}
onChange={(date: Date | null) => setSearchTimestamp(date)}
onCalendarClose={handleDateTimeChange}
showTimeInput
timeInputLabel="Time:"
dateFormat="MMMM d, yyyy h:mm aa"
className="input"
/>
</>
)}
</div>
<div className="column is-3">
<label className="label">Search</label>
<input
id="searchText"
type="text"
name="searchText"
className="input"
placeholder="Search"
value={searchQuery}
onChange={handleQueryChange}
/>
</div>
<div className="column is-2">
<input
type="submit"
className="button is-primary is-fullwidth"
onClick={handleFiltersSubmit}
/>
</div>
</div>
<div className="columns">
<div className="column is-half">
<div className="field">
<input
id="switchRoundedDefault"
type="checkbox"
name="switchRoundedDefault"
className="switch is-rounded"
checked={selectedSeekDirection === SeekDirection.BACKWARD}
onChange={toggleSeekDirection}
/>
<label htmlFor="switchRoundedDefault">Newest first</label>
</div>
</div>
</div>
<MessagesTable messages={messages} onNext={onNext} />
</div>
);
};
const Messages: React.FC = () => (
<div className="box">
<FiltersContainer />
<MessagesTable />
</div>
);
export default Messages;

View file

@ -1,41 +0,0 @@
import { connect } from 'react-redux';
import { ClusterName, RootState, TopicName } from 'redux/interfaces';
import { RouteComponentProps, withRouter } from 'react-router-dom';
import { fetchTopicMessages } from 'redux/actions';
import {
getIsTopicMessagesFetched,
getPartitionsByTopicName,
getTopicMessages,
} from 'redux/reducers/topics/selectors';
import Messages from './Messages';
interface RouteProps {
clusterName: ClusterName;
topicName: TopicName;
}
type OwnProps = RouteComponentProps<RouteProps>;
const mapStateToProps = (
state: RootState,
{
match: {
params: { topicName, clusterName },
},
}: OwnProps
) => ({
clusterName,
topicName,
isFetched: getIsTopicMessagesFetched(state),
messages: getTopicMessages(state),
partitions: getPartitionsByTopicName(state, topicName),
});
const mapDispatchToProps = {
fetchTopicMessages,
};
export default withRouter(
connect(mapStateToProps, mapDispatchToProps)(Messages)
);

View file

@ -1,58 +1,135 @@
import React from 'react';
import { TopicMessage } from 'generated-sources';
import PageLoader from 'components/common/PageLoader/PageLoader';
import CustomParamButton from 'components/Topics/shared/Form/CustomParams/CustomParamButton';
import {
Partition,
SeekDirection,
TopicMessage,
TopicMessageConsuming,
} from 'generated-sources';
import { compact, concat, groupBy, map, maxBy, minBy } from 'lodash';
import React from 'react';
import { useSelector } from 'react-redux';
import { useHistory, useLocation } from 'react-router';
import { ClusterName, TopicName } from 'redux/interfaces';
import {
getTopicMessges,
getIsTopicMessagesFetching,
} from 'redux/reducers/topicMessages/selectors';
import MessageItem from './MessageItem';
import Message from './Message';
export interface MessagesTableProp {
export interface MessagesProps {
clusterName: ClusterName;
topicName: TopicName;
messages: TopicMessage[];
onNext(event: React.MouseEvent<HTMLButtonElement>): void;
phaseMessage?: string;
partitions: Partition[];
meta: TopicMessageConsuming;
addMessage(message: TopicMessage): void;
resetMessages(): void;
updatePhase(phase: string): void;
updateMeta(meta: TopicMessageConsuming): void;
}
const MessagesTable: React.FC<MessagesTableProp> = ({ messages, onNext }) => (
<>
<table className="table is-fullwidth is-narrow">
<thead>
<tr>
<th>Timestamp</th>
<th>Key</th>
<th>Offset</th>
<th>Partition</th>
<th>Content</th>
<th> </th>
</tr>
</thead>
<tbody>
{messages.map(
({ partition, offset, timestamp, content, key }: TopicMessage) => (
<MessageItem
key={`message-${timestamp.getTime()}-${offset}`}
partition={partition}
offset={offset}
timestamp={timestamp}
content={content}
messageKey={key}
/>
)
)}
{messages.length === 0 && (
const MessagesTable: React.FC = () => {
const location = useLocation();
const history = useHistory();
const searchParams = React.useMemo(
() => new URLSearchParams(location.search),
[location, history]
);
const messages = useSelector(getTopicMessges);
const isFetching = useSelector(getIsTopicMessagesFetching);
const handleNextClick = React.useCallback(() => {
const seekTo = searchParams.get('seekTo');
if (seekTo) {
const selectedPartitions = seekTo.split(',').map((item) => {
const [partition] = item.split('::');
return { offset: 0, partition: parseInt(partition, 10) };
});
const messageUniqs = map(groupBy(messages, 'partition'), (v) =>
searchParams.get('seekDirection') === SeekDirection.BACKWARD
? minBy(v, 'offset')
: maxBy(v, 'offset')
).map((message) => ({
offset: message?.offset || 0,
partition: message?.partition || 0,
}));
const nextSeekTo = compact(
map(
groupBy(concat(selectedPartitions, messageUniqs), 'partition'),
(v) => maxBy(v, 'offset')
)
)
.map(({ offset, partition }) => `${partition}::${offset}`)
.join(',');
searchParams.set('seekTo', nextSeekTo);
history.push({
search: `?${searchParams.toString()}`,
});
}
}, [searchParams, history, messages]);
return (
<>
<table className="table is-fullwidth">
<thead>
<tr>
<td colSpan={10}>No messages at selected topic</td>
<th style={{ width: 40 }}> </th>
<th style={{ width: 70 }}>Offset</th>
<th style={{ width: 90 }}>Partition</th>
<th>Key</th>
<th style={{ width: 170 }}>Timestamp</th>
<th>Content</th>
<th> </th>
</tr>
)}
</tbody>
</table>
<div className="columns">
<div className="column is-full">
<CustomParamButton
className="is-link is-pulled-right"
type="fa-chevron-right"
onClick={onNext}
btnText="Next"
/>
</thead>
<tbody>
{messages.map((message: TopicMessage) => (
<Message
key={[
message.offset,
message.timestamp,
message.key,
message.partition,
].join('-')}
message={message}
/>
))}
{isFetching && (
<tr>
<td colSpan={10}>
<PageLoader />
</td>
</tr>
)}
{messages.length === 0 && !isFetching && (
<tr>
<td colSpan={10}>No messages found</td>
</tr>
)}
</tbody>
</table>
<div className="columns">
<div className="column is-full">
<CustomParamButton
className="is-link is-pulled-right"
type="fa-chevron-right"
onClick={handleNextClick}
btnText="Next"
/>
</div>
</div>
</div>
</>
);
</>
);
};
export default MessagesTable;

View file

@ -0,0 +1,27 @@
import React from 'react';
import { mount } from 'enzyme';
import { Provider } from 'react-redux';
import { StaticRouter } from 'react-router-dom';
import configureStore from 'redux/store/configureStore';
import FiltersContainer from 'components/Topics/Topic/Details/Messages/Filters/FiltersContainer';
const store = configureStore();
jest.mock(
'components/Topics/Topic/Details/Messages/Filters/Filters',
() => 'mock-Filters'
);
describe('FiltersContainer', () => {
it('renders view with initial state of storage', () => {
const wrapper = mount(
<Provider store={store}>
<StaticRouter>
<FiltersContainer />
</StaticRouter>
</Provider>
);
expect(wrapper.exists('mock-Filters')).toBeTruthy();
});
});

View file

@ -1,21 +0,0 @@
import { shallow } from 'enzyme';
import React from 'react';
import MessageContent from 'components/Topics/Topic/Details/Messages/MessageContent';
import { messageContent } from './fixtures';
describe('MessageContent', () => {
const component = shallow(<MessageContent message={messageContent} />);
describe('when it is folded', () => {
it('matches the snapshot', () => {
expect(component).toMatchSnapshot();
});
});
describe('when it is unfolded', () => {
it('matches the snapshot', () => {
component.find('button').simulate('click');
expect(component).toMatchSnapshot();
});
});
});

View file

@ -1,31 +0,0 @@
import React from 'react';
import { shallow } from 'enzyme';
import MessageItem from 'components/Topics/Topic/Details/Messages/MessageItem';
import { messages } from './fixtures';
jest.mock('date-fns', () => ({
format: () => `mocked date`,
}));
describe('MessageItem', () => {
describe('when content is defined', () => {
it('renders table row with MessageContent', () => {
const wrapper = shallow(<MessageItem {...messages[0]} />);
expect(wrapper.find('tr').length).toEqual(1);
expect(wrapper.find('td').length).toEqual(6);
expect(wrapper.find('MessageContent').length).toEqual(1);
});
it('matches snapshot', () => {
expect(shallow(<MessageItem {...messages[0]} />)).toMatchSnapshot();
});
});
describe('when content is undefined', () => {
it('matches snapshot', () => {
expect(shallow(<MessageItem {...messages[1]} />)).toMatchSnapshot();
});
});
});

View file

@ -1,168 +0,0 @@
import React from 'react';
import { Provider } from 'react-redux';
import { mount, shallow } from 'enzyme';
import DatePicker from 'react-datepicker';
import Messages, {
Props,
} from 'components/Topics/Topic/Details/Messages/Messages';
import MessagesContainer from 'components/Topics/Topic/Details/Messages/MessagesContainer';
import PageLoader from 'components/common/PageLoader/PageLoader';
import configureStore from 'redux/store/configureStore';
describe('Messages', () => {
describe('Container', () => {
const store = configureStore();
it('renders view', () => {
const component = shallow(
<Provider store={store}>
<MessagesContainer />
</Provider>
);
expect(component.exists()).toBeTruthy();
});
});
describe('View', () => {
beforeEach(() => {
jest.restoreAllMocks();
});
const setupWrapper = (props: Partial<Props> = {}) => (
<Messages
clusterName="Test cluster"
topicName="Cluster topic"
isFetched
fetchTopicMessages={jest.fn()}
messages={[]}
partitions={[]}
{...props}
/>
);
describe('Initial state', () => {
it('renders PageLoader', () => {
expect(
shallow(setupWrapper({ isFetched: false })).exists(PageLoader)
).toBeTruthy();
});
});
describe('Table', () => {
describe('With messages', () => {
const messagesWrapper = mount(
setupWrapper({
messages: [
{
partition: 1,
offset: 2,
timestamp: new Date('05-05-1994'),
content: '[1, 2, 3]',
},
],
})
);
it('renders table', () => {
expect(messagesWrapper.exists('.table.is-fullwidth')).toBeTruthy();
});
it('renders MessageContent', () => {
expect(messagesWrapper.find('MessageContent').length).toEqual(1);
});
it('parses message content correctly', () => {
const messages = [
{
partition: 1,
offset: 2,
timestamp: new Date('05-05-1994'),
content: '[1, 2, 3]',
},
];
const content = JSON.stringify(messages[0].content);
expect(JSON.parse(content)).toEqual(messages[0].content);
});
});
describe('Without messages', () => {
it('renders string', () => {
const wrapper = mount(setupWrapper());
expect(wrapper.text()).toContain('No messages at selected topic');
});
});
});
describe('Offset field', () => {
describe('Seek Type dependency', () => {
const wrapper = mount(setupWrapper());
it('renders DatePicker', () => {
wrapper
.find('[id="selectSeekType"]')
.simulate('change', { target: { value: 'TIMESTAMP' } });
expect(
wrapper.find('[id="selectSeekType"]').first().props().value
).toEqual('TIMESTAMP');
expect(wrapper.exists(DatePicker)).toBeTruthy();
});
});
describe('With defined offset value', () => {
const wrapper = shallow(setupWrapper());
it('shows offset value in input', () => {
const offset = '10';
wrapper
.find('#searchOffset')
.simulate('change', { target: { value: offset } });
expect(wrapper.find('#searchOffset').first().props().value).toEqual(
offset
);
});
});
describe('With invalid offset value', () => {
const wrapper = shallow(setupWrapper());
it('shows 0 in input', () => {
wrapper
.find('#searchOffset')
.simulate('change', { target: { value: null } });
expect(wrapper.find('#searchOffset').first().props().value).toBe('0');
});
});
});
describe('Search field', () => {
it('renders input correctly', () => {
const query = 20;
const wrapper = shallow(setupWrapper());
expect(wrapper.exists('#searchText')).toBeTruthy();
wrapper
.find('#searchText')
.simulate('change', { target: { value: query } });
expect(wrapper.find('#searchText').at(0).props().value).toEqual(query);
});
});
describe('Submit button', () => {
it('fetches topic messages', () => {
const mockedfetchTopicMessages = jest.fn();
const wrapper = mount(
setupWrapper({ fetchTopicMessages: mockedfetchTopicMessages })
);
wrapper.find('[type="submit"]').simulate('click');
expect(mockedfetchTopicMessages).toHaveBeenCalled();
});
});
describe('Seek Direction', () => {
it('fetches topic messages', () => {
const mockedfetchTopicMessages = jest.fn();
const wrapper = mount(
setupWrapper({ fetchTopicMessages: mockedfetchTopicMessages })
);
wrapper.find('input[type="checkbox"]').simulate('change');
expect(mockedfetchTopicMessages).toHaveBeenCalled();
wrapper.find('input[type="checkbox"]').simulate('change');
expect(mockedfetchTopicMessages).toHaveBeenCalled();
});
});
});
});

View file

@ -1,50 +0,0 @@
import React from 'react';
import { shallow } from 'enzyme';
import MessagesTable, {
MessagesTableProp,
} from 'components/Topics/Topic/Details/Messages/MessagesTable';
import { messages } from './fixtures';
jest.mock('date-fns', () => ({
format: () => `mocked date`,
}));
describe('MessagesTable', () => {
const setupWrapper = (props: Partial<MessagesTableProp> = {}) => (
<MessagesTable messages={[]} onNext={jest.fn()} {...props} />
);
describe('when topic is empty', () => {
it('renders table row with JSONEditor', () => {
const wrapper = shallow(setupWrapper());
expect(wrapper.find('td').text()).toEqual(
'No messages at selected topic'
);
});
it('matches snapshot', () => {
expect(shallow(setupWrapper())).toMatchSnapshot();
});
});
describe('when topic contains messages', () => {
const onNext = jest.fn();
const wrapper = shallow(setupWrapper({ messages, onNext }));
it('renders table row without JSONEditor', () => {
expect(wrapper.exists('table')).toBeTruthy();
expect(wrapper.exists('CustomParamButton')).toBeTruthy();
expect(wrapper.find('MessageItem').length).toEqual(2);
});
it('handles CustomParamButton click', () => {
wrapper.find('CustomParamButton').simulate('click');
expect(onNext).toHaveBeenCalled();
});
it('matches snapshot', () => {
expect(shallow(setupWrapper({ messages, onNext }))).toMatchSnapshot();
});
});
});

View file

@ -1,54 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`MessageContent when it is folded matches the snapshot 1`] = `
<div
className="is-flex is-flex-direction-column"
>
<p
className="has-content-overflow-ellipsis"
>
{
"_id": "609fab8aed527f514f4e648d",
"name": "in nostrud",
"desc": "Dolore nostrud commodo magna velit ut magna voluptate sint aute. Excepteur aute culpa culpa dolor ipsum. Tempor est ut officia tempor laborum consectetur.
Amet officia eu veni...
</p>
<button
className="button is-small mt-2"
onClick={[Function]}
title="Expand to JSON"
type="button"
>
<span
className="icon is-small"
>
<i
className="fas fa-chevron-down"
/>
</span>
</button>
</div>
`;
exports[`MessageContent when it is unfolded matches the snapshot 1`] = `
<div
className="is-flex is-flex-direction-column"
>
<FullMessage
message="{
\\"_id\\": \\"609fab8aed527f514f4e648d\\",
\\"name\\": \\"in nostrud\\",
\\"desc\\": \\"Dolore nostrud commodo magna velit ut magna voluptate sint aute. Excepteur aute culpa culpa dolor ipsum. Tempor est ut officia tempor laborum consectetur.
Amet officia eu veniam Lorem enim aliqua aute voluptate elit do sunt in magna occaecat. Nisi sit non est adipisicing adipisicing consequat duis duis tempor consequat deserunt ea quis ad. Veniam sunt culpa nostrud adipisicing cillum voluptate non est cupidatat. Eiusmod tempor officia irure et deserunt est ex laboris occaecat adipisicing occaecat in aliquip aliqua. Do laboris culpa cupidatat cillum non. Ullamco excepteur mollit voluptate anim in nisi anim elit culpa aute. Ad officia sunt proident ut ullamco officia ea fugiat culpa cillum et fugiat aliquip.
Amet non labore anim in ipsum. Et Lorem velit dolor ipsum. Irure id proident excepteur aliquip deserunt id officia dolor deserunt amet in sint. Aute in nostrud nulla ut laboris Lorem commodo nulla ipsum. Aliqua nulla commodo Lorem labore magna esse proident id ea in pariatur consectetur sint Lorem.
Cupidatat deserunt mollit tempor aliqua. Fugiat ullamco magna pariatur quis nulla magna. Esse duis labore ipsum nisi ullamco qui aute duis duis amet est laborum adipisicing magna. Est aliquip quis qui do aliquip nisi elit tempor ex aliquip. Excepteur aliquip ea deserunt amet adipisicing voluptate eiusmod sit sint exercitation exercitation. Id labore amet mollit ex commodo. Proident ex adipisicing deserunt esse Lorem tempor laborum nostrud commodo incididunt ea id.
\\",
\\"semster\\": \\"spring19\\",
\\"profile\\": \\"cs\\",
\\"degree\\": \\"bachelor\\",
\\"degreee\\": \\"master\\",
\\"degreeeee\\": \\"bachelor\\"
}"
/>
</div>
`;

View file

@ -1,139 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`MessageItem when content is defined matches snapshot 1`] = `
<tr>
<td
style={
Object {
"width": 200,
}
}
>
mocked date
</td>
<td />
<td
style={
Object {
"width": 150,
}
}
>
2
</td>
<td
style={
Object {
"width": 100,
}
}
>
1
</td>
<td
style={
Object {
"wordBreak": "break-word",
}
}
>
<MessageContent
message="{\\"foo\\":\\"bar\\",\\"key\\":\\"val\\"}"
/>
</td>
<td
className="has-text-right"
>
<Dropdown
label={
<span
className="icon"
>
<i
className="fas fa-cog"
/>
</span>
}
right={true}
>
<DropdownItem
onClick={[Function]}
>
Copy to clipboard
</DropdownItem>
<DropdownItem
onClick={[Function]}
>
Save as a file
</DropdownItem>
</Dropdown>
</td>
</tr>
`;
exports[`MessageItem when content is undefined matches snapshot 1`] = `
<tr>
<td
style={
Object {
"width": 200,
}
}
>
mocked date
</td>
<td />
<td
style={
Object {
"width": 150,
}
}
>
20
</td>
<td
style={
Object {
"width": 100,
}
}
>
2
</td>
<td
style={
Object {
"wordBreak": "break-word",
}
}
/>
<td
className="has-text-right"
>
<Dropdown
label={
<span
className="icon"
>
<i
className="fas fa-cog"
/>
</span>
}
right={true}
>
<DropdownItem
onClick={[Function]}
>
Copy to clipboard
</DropdownItem>
<DropdownItem
onClick={[Function]}
>
Save as a file
</DropdownItem>
</Dropdown>
</td>
</tr>
`;

View file

@ -1,117 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`MessagesTable when topic contains messages matches snapshot 1`] = `
<Fragment>
<table
className="table is-fullwidth is-narrow"
>
<thead>
<tr>
<th>
Timestamp
</th>
<th>
Key
</th>
<th>
Offset
</th>
<th>
Partition
</th>
<th>
Content
</th>
<th>
</th>
</tr>
</thead>
<tbody>
<MessageItem
content="{\\"foo\\":\\"bar\\",\\"key\\":\\"val\\"}"
key="message-802310400000-2"
messageKey="1"
offset={2}
partition={1}
timestamp={1995-06-05T00:00:00.000Z}
/>
<MessageItem
key="message-1596585600000-20"
messageKey="1"
offset={20}
partition={2}
timestamp={2020-08-05T00:00:00.000Z}
/>
</tbody>
</table>
<div
className="columns"
>
<div
className="column is-full"
>
<CustomParamButton
btnText="Next"
className="is-link is-pulled-right"
onClick={[MockFunction]}
type="fa-chevron-right"
/>
</div>
</div>
</Fragment>
`;
exports[`MessagesTable when topic is empty matches snapshot 1`] = `
<Fragment>
<table
className="table is-fullwidth is-narrow"
>
<thead>
<tr>
<th>
Timestamp
</th>
<th>
Key
</th>
<th>
Offset
</th>
<th>
Partition
</th>
<th>
Content
</th>
<th>
</th>
</tr>
</thead>
<tbody>
<tr>
<td
colSpan={10}
>
No messages at selected topic
</td>
</tr>
</tbody>
</table>
<div
className="columns"
>
<div
className="column is-full"
>
<CustomParamButton
btnText="Next"
className="is-link is-pulled-right"
onClick={[MockFunction]}
type="fa-chevron-right"
/>
</div>
</div>
</Fragment>
`;

View file

@ -1,32 +0,0 @@
import { TopicMessage } from 'generated-sources';
export const messages: TopicMessage[] = [
{
partition: 1,
offset: 2,
timestamp: new Date(Date.UTC(1995, 5, 5)),
content: JSON.stringify({
foo: 'bar',
key: 'val',
}),
key: '1',
},
{
partition: 2,
offset: 20,
timestamp: new Date(Date.UTC(2020, 7, 5)),
content: undefined,
key: '1',
},
];
export const messageContent = `{
"_id": "609fab8aed527f514f4e648d",
"name": "in nostrud",
"desc": "Dolore nostrud commodo magna velit ut magna voluptate sint aute. Excepteur aute culpa culpa dolor ipsum. Tempor est ut officia tempor laborum consectetur.\r\nAmet officia eu veniam Lorem enim aliqua aute voluptate elit do sunt in magna occaecat. Nisi sit non est adipisicing adipisicing consequat duis duis tempor consequat deserunt ea quis ad. Veniam sunt culpa nostrud adipisicing cillum voluptate non est cupidatat. Eiusmod tempor officia irure et deserunt est ex laboris occaecat adipisicing occaecat in aliquip aliqua. Do laboris culpa cupidatat cillum non. Ullamco excepteur mollit voluptate anim in nisi anim elit culpa aute. Ad officia sunt proident ut ullamco officia ea fugiat culpa cillum et fugiat aliquip.\r\nAmet non labore anim in ipsum. Et Lorem velit dolor ipsum. Irure id proident excepteur aliquip deserunt id officia dolor deserunt amet in sint. Aute in nostrud nulla ut laboris Lorem commodo nulla ipsum. Aliqua nulla commodo Lorem labore magna esse proident id ea in pariatur consectetur sint Lorem.\r\nCupidatat deserunt mollit tempor aliqua. Fugiat ullamco magna pariatur quis nulla magna. Esse duis labore ipsum nisi ullamco qui aute duis duis amet est laborum adipisicing magna. Est aliquip quis qui do aliquip nisi elit tempor ex aliquip. Excepteur aliquip ea deserunt amet adipisicing voluptate eiusmod sit sint exercitation exercitation. Id labore amet mollit ex commodo. Proident ex adipisicing deserunt esse Lorem tempor laborum nostrud commodo incididunt ea id.\r\n",
"semster": "spring19",
"profile": "cs",
"degree": "bachelor",
"degreee": "master",
"degreeeee": "bachelor"
}`;

View file

@ -0,0 +1,33 @@
import { Option } from 'react-multi-select-component/dist/lib/interfaces';
import { filterOptions } from 'components/Topics/Topic/Details/Messages/Filters/utils';
const options: Option[] = [
{
value: 0,
label: 'Partition #0',
},
{
value: 1,
label: 'Partition #1',
},
{
value: 11,
label: 'Partition #11',
},
{
value: 21,
label: 'Partition #21',
},
];
describe('utils', () => {
describe('filterOptions', () => {
it('returns options if no filter is defined', () => {
expect(filterOptions(options, '')).toEqual(options);
});
it('returns filtered options', () => {
expect(filterOptions(options, '11')).toEqual([options[2]]);
});
});
});

View file

@ -5,6 +5,7 @@ interface Props {
className: string;
type: 'fa-plus' | 'fa-minus' | 'fa-chevron-right';
btnText?: string;
disabled?: boolean;
}
const CustomParamButton: React.FC<Props> = ({

View file

@ -27,7 +27,7 @@ const Dropdown: React.FC<DropdownProps> = ({ label, right, up, children }) => {
<div className="dropdown-trigger">
<button
type="button"
className="button is-small"
className="button is-small is-link"
aria-haspopup="true"
aria-controls="dropdown-menu"
onClick={onClick}

View file

@ -15,7 +15,7 @@ exports[`Dropdown matches snapshot 1`] = `
<button
aria-controls="dropdown-menu"
aria-haspopup="true"
className="button is-small"
className="button is-small is-link"
onClick={[Function]}
type="button"
>

View file

@ -4,22 +4,22 @@ import JSONTree from 'react-json-tree';
import theme from './theme';
interface FullMessageProps {
message: string;
data: string;
}
const FullMessage: React.FC<FullMessageProps> = ({ message }) => {
const JSONViewer: React.FC<FullMessageProps> = ({ data }) => {
try {
return (
<JSONTree
data={JSON.parse(message)}
data={JSON.parse(data)}
theme={theme}
shouldExpandNode={() => true}
hideRoot
/>
);
} catch (e) {
return <p>{JSON.stringify(message)}</p>;
return <p>{JSON.stringify(data)}</p>;
}
};
export default FullMessage;
export default JSONViewer;

View file

@ -0,0 +1,18 @@
import { shallow } from 'enzyme';
import React from 'react';
import JSONViewer from 'components/common/JSONViewer/JSONViewer';
const data = { a: 1 };
describe('JSONViewer component', () => {
it('renders JSONTree', () => {
const component = shallow(<JSONViewer data={JSON.stringify(data)} />);
expect(component.exists('JSONTree')).toBeTruthy();
});
it('matches the snapshot with fixed height with no value', () => {
const component = shallow(<JSONViewer data={data as unknown as string} />);
expect(component.exists('JSONTree')).toBeFalsy();
expect(component.exists('p')).toBeTruthy();
});
});

View file

@ -9,6 +9,10 @@ import {
TopicMessageSchema,
} from 'generated-sources';
import { FailurePayload } from 'redux/interfaces';
import {
topicMessagePayload,
topicMessagesMetaPayload,
} from 'redux/reducers/topicMessages/__test__/fixtures';
import { mockTopicsState } from './fixtures';
@ -126,9 +130,8 @@ describe('Actions', () => {
});
it('creates a SUCCESS action', () => {
expect(actions.clearMessagesTopicAction.success('topic')).toEqual({
expect(actions.clearMessagesTopicAction.success()).toEqual({
type: 'CLEAR_TOPIC_MESSAGES__SUCCESS',
payload: 'topic',
});
});
@ -207,6 +210,37 @@ describe('Actions', () => {
});
});
describe('topic messages', () => {
it('creates ADD_TOPIC_MESSAGE', () => {
expect(actions.addTopicMessage(topicMessagePayload)).toEqual({
type: 'ADD_TOPIC_MESSAGE',
payload: topicMessagePayload,
});
});
it('creates RESET_TOPIC_MESSAGES', () => {
expect(actions.resetTopicMessages()).toEqual({
type: 'RESET_TOPIC_MESSAGES',
});
});
it('creates UPDATE_TOPIC_MESSAGES_PHASE', () => {
expect(actions.updateTopicMessagesPhase('Polling')).toEqual({
type: 'UPDATE_TOPIC_MESSAGES_PHASE',
payload: 'Polling',
});
});
it('creates UPDATE_TOPIC_MESSAGES_META', () => {
expect(actions.updateTopicMessagesMeta(topicMessagesMetaPayload)).toEqual(
{
type: 'UPDATE_TOPIC_MESSAGES_META',
payload: topicMessagesMetaPayload,
}
);
});
});
describe('sending messages', () => {
describe('fetchTopicMessageSchemaAction', () => {
it('creates GET_TOPIC_SCHEMA__REQUEST', () => {

View file

@ -57,7 +57,7 @@ describe('Thunks', () => {
await store.dispatch(thunks.clearTopicMessages(clusterName, topicName));
expect(store.getActions()).toEqual([
actions.clearMessagesTopicAction.request(),
actions.clearMessagesTopicAction.success(topicName),
actions.clearMessagesTopicAction.success(),
]);
});
@ -78,26 +78,6 @@ describe('Thunks', () => {
});
});
describe('fetchTopicMessages', () => {
it('creates GET_TOPIC_MESSAGES__FAILURE when deleting existing messages', async () => {
fetchMock.getOnce(
`/api/clusters/${clusterName}/topics/${topicName}/messages`,
404
);
try {
await store.dispatch(
thunks.fetchTopicMessages(clusterName, topicName, {})
);
} catch (error) {
expect(error.status).toEqual(404);
expect(store.getActions()).toEqual([
actions.fetchTopicMessagesAction.request(),
actions.fetchTopicMessagesAction.failure(),
]);
}
});
});
describe('fetchTopicConsumerGroups', () => {
it('GET_TOPIC_CONSUMER_GROUPS__FAILURE', async () => {
fetchMock.getOnce(

View file

@ -13,7 +13,6 @@ import {
ClusterMetrics,
Broker,
BrokerMetrics,
TopicMessage,
ConsumerGroup,
ConsumerGroupDetails,
SchemaSubject,
@ -23,6 +22,8 @@ import {
FullConnectorInfo,
Connect,
Task,
TopicMessage,
TopicMessageConsuming,
TopicMessageSchema,
} from 'generated-sources';
@ -62,17 +63,11 @@ export const fetchTopicsListAction = createAsyncAction(
'GET_TOPICS__FAILURE'
)<undefined, TopicsState, undefined>();
export const fetchTopicMessagesAction = createAsyncAction(
'GET_TOPIC_MESSAGES__REQUEST',
'GET_TOPIC_MESSAGES__SUCCESS',
'GET_TOPIC_MESSAGES__FAILURE'
)<undefined, TopicMessage[], undefined>();
export const clearMessagesTopicAction = createAsyncAction(
'CLEAR_TOPIC_MESSAGES__REQUEST',
'CLEAR_TOPIC_MESSAGES__SUCCESS',
'CLEAR_TOPIC_MESSAGES__FAILURE'
)<undefined, TopicName, { alert?: FailurePayload }>();
)<undefined, undefined, { alert?: FailurePayload }>();
export const fetchTopicDetailsAction = createAsyncAction(
'GET_TOPIC_DETAILS__REQUEST',
@ -255,6 +250,23 @@ export const fetchTopicConsumerGroupsAction = createAsyncAction(
'GET_TOPIC_CONSUMER_GROUPS__FAILURE'
)<undefined, TopicsState, undefined>();
export const addTopicMessage =
createAction('ADD_TOPIC_MESSAGE')<TopicMessage>();
export const resetTopicMessages = createAction('RESET_TOPIC_MESSAGES')();
export const setTopicMessagesFetchingStatus = createAction(
'SET_TOPIC_MESSAGES_FETCHING_STATUS'
)<boolean>();
export const updateTopicMessagesPhase = createAction(
'UPDATE_TOPIC_MESSAGES_PHASE'
)<string>();
export const updateTopicMessagesMeta = createAction(
'UPDATE_TOPIC_MESSAGES_META'
)<TopicMessageConsuming>();
export const fetchTopicMessageSchemaAction = createAsyncAction(
'GET_TOPIC_SCHEMA__REQUEST',
'GET_TOPIC_SCHEMA__SUCCESS',

View file

@ -15,7 +15,6 @@ import {
PromiseThunkResult,
ClusterName,
TopicName,
TopicMessageQueryParams,
TopicFormFormattedParams,
TopicFormDataRaw,
TopicsState,
@ -62,27 +61,6 @@ export const fetchTopicsList =
dispatch(actions.fetchTopicsListAction.failure());
}
};
export const fetchTopicMessages =
(
clusterName: ClusterName,
topicName: TopicName,
queryParams: Partial<TopicMessageQueryParams>
): PromiseThunkResult =>
async (dispatch) => {
dispatch(actions.fetchTopicMessagesAction.request());
try {
const messages = await messagesApiClient.getTopicMessages({
clusterName,
topicName,
...queryParams,
});
dispatch(actions.fetchTopicMessagesAction.success(messages));
} catch (e) {
dispatch(actions.fetchTopicMessagesAction.failure());
}
};
export const clearTopicMessages =
(
clusterName: ClusterName,
@ -97,7 +75,7 @@ export const clearTopicMessages =
topicName,
partitions,
});
dispatch(actions.clearMessagesTopicAction.success(topicName));
dispatch(actions.clearMessagesTopicAction.success());
} catch (e) {
const response = await getResponse(e);
const alert: FailurePayload = {

View file

@ -2,7 +2,7 @@ import { ActionType } from 'typesafe-actions';
import { ThunkAction } from 'redux-thunk';
import * as actions from 'redux/actions/actions';
import { TopicsState } from './topic';
import { TopicMessagesState, TopicsState } from './topic';
import { ClusterState } from './cluster';
import { BrokersState } from './broker';
import { LoaderState } from './loader';
@ -22,6 +22,7 @@ export * from './connect';
export interface RootState {
topics: TopicsState;
topicMessages: TopicMessagesState;
clusters: ClusterState;
brokers: BrokersState;
consumerGroups: ConsumerGroupsState;

View file

@ -1,12 +1,13 @@
import {
Topic,
TopicDetails,
TopicMessage,
TopicConfig,
TopicCreation,
GetTopicMessagesRequest,
ConsumerGroup,
TopicColumnsToSort,
TopicMessage,
TopicMessageConsuming,
TopicMessageSchema,
} from 'generated-sources';
@ -50,7 +51,6 @@ export interface TopicsState {
byName: { [topicName: string]: TopicWithDetailedInfo };
allNames: TopicName[];
totalPages: number;
messages: TopicMessage[];
search: string;
orderBy: TopicColumnsToSort | null;
consumerGroups: ConsumerGroup[];
@ -69,3 +69,10 @@ export interface TopicFormDataRaw {
maxMessageBytes: number;
customParams: TopicFormCustomParams;
}
export interface TopicMessagesState {
messages: TopicMessage[];
phase?: string;
meta: TopicMessageConsuming;
isFetching: boolean;
}

View file

@ -2,6 +2,7 @@ import { combineReducers } from 'redux';
import { RootState } from 'redux/interfaces';
import topics from './topics/reducer';
import topicMessages from './topicMessages/reducer';
import clusters from './clusters/reducer';
import brokers from './brokers/reducer';
import consumerGroups from './consumerGroups/reducer';
@ -12,6 +13,7 @@ import alerts from './alerts/reducer';
export default combineReducers<RootState>({
topics,
topicMessages,
clusters,
brokers,
consumerGroups,

View file

@ -0,0 +1,24 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`TopicMessages reducer Adds new message 1`] = `
Object {
"isFetching": false,
"messages": Array [
Object {
"content": "{\\"host\\":\\"schemaregistry1\\",\\"port\\":8085,\\"master_eligibility\\":true,\\"scheme\\":\\"http\\",\\"version\\":1}",
"headers": Object {},
"key": "schema-registry",
"offset": 14,
"partition": 29,
"timestamp": 2021-07-21T23:25:14.865Z,
"timestampType": "CREATE_TIME",
},
],
"meta": Object {
"bytesConsumed": 0,
"elapsedMs": 0,
"isCancelled": false,
"messagesConsumed": 0,
},
}
`;

View file

@ -0,0 +1,23 @@
import {
TopicMessage,
TopicMessageConsuming,
TopicMessageTimestampTypeEnum,
} from 'generated-sources';
export const topicMessagePayload: TopicMessage = {
partition: 29,
offset: 14,
timestamp: new Date('2021-07-21T23:25:14.865Z'),
timestampType: TopicMessageTimestampTypeEnum.CREATE_TIME,
key: 'schema-registry',
headers: {},
content:
'{"host":"schemaregistry1","port":8085,"master_eligibility":true,"scheme":"http","version":1}',
};
export const topicMessagesMetaPayload: TopicMessageConsuming = {
bytesConsumed: 1830,
elapsedMs: 440,
messagesConsumed: 2301,
isCancelled: false,
};

View file

@ -0,0 +1,43 @@
import {
addTopicMessage,
fetchSchemaVersionsAction,
resetTopicMessages,
updateTopicMessagesMeta,
updateTopicMessagesPhase,
} from 'redux/actions';
import reducer, { initialState } from 'redux/reducers/topicMessages/reducer';
import { topicMessagePayload, topicMessagesMetaPayload } from './fixtures';
describe('TopicMessages reducer', () => {
it('returns the initial state', () => {
expect(reducer(undefined, fetchSchemaVersionsAction.request())).toEqual(
initialState
);
});
it('Adds new message', () => {
const state = reducer(undefined, addTopicMessage(topicMessagePayload));
expect(state.messages.length).toEqual(1);
expect(state).toMatchSnapshot();
});
it('Clears messages', () => {
const state = reducer(undefined, addTopicMessage(topicMessagePayload));
expect(state.messages.length).toEqual(1);
const newState = reducer(state, resetTopicMessages());
expect(newState.messages.length).toEqual(0);
});
it('Updates Topic Messages Phase', () => {
const phase = 'Polling';
const state = reducer(undefined, updateTopicMessagesPhase(phase));
expect(state.phase).toEqual(phase);
});
it('Updates Topic Messages Meta', () => {
const state = reducer(
undefined,
updateTopicMessagesMeta(topicMessagesMetaPayload)
);
expect(state.meta).toEqual(topicMessagesMetaPayload);
});
});

View file

@ -0,0 +1,56 @@
import configureStore from 'redux/store/configureStore';
import * as selectors from 'redux/reducers/topicMessages/selectors';
import { initialState } from 'redux/reducers/topicMessages/reducer';
import {
addTopicMessage,
updateTopicMessagesMeta,
updateTopicMessagesPhase,
} from 'redux/actions';
import { topicMessagePayload, topicMessagesMetaPayload } from './fixtures';
const store = configureStore();
describe('TopicMessages selectors', () => {
describe('Initial state', () => {
it('returns empty message array', () => {
expect(selectors.getTopicMessges(store.getState())).toEqual([]);
});
it('returns undefined phase', () => {
expect(selectors.getTopicMessgesPhase(store.getState())).toBeUndefined();
});
it('returns initial vesrion of meta', () => {
expect(selectors.getTopicMessgesMeta(store.getState())).toEqual(
initialState.meta
);
});
});
describe('state', () => {
beforeAll(() => {
store.dispatch(addTopicMessage(topicMessagePayload));
store.dispatch(updateTopicMessagesPhase('consuming'));
store.dispatch(updateTopicMessagesMeta(topicMessagesMetaPayload));
});
it('returns messages', () => {
expect(selectors.getTopicMessges(store.getState())).toEqual([
topicMessagePayload,
]);
});
it('returns phase', () => {
expect(selectors.getTopicMessgesPhase(store.getState())).toEqual(
'consuming'
);
});
it('returns ordered versions of schema', () => {
expect(selectors.getTopicMessgesMeta(store.getState())).toEqual(
topicMessagesMetaPayload
);
});
});
});

View file

@ -0,0 +1,46 @@
import { Action, TopicMessagesState } from 'redux/interfaces';
import { getType } from 'typesafe-actions';
import * as actions from 'redux/actions';
export const initialState: TopicMessagesState = {
messages: [],
meta: {
bytesConsumed: 0,
elapsedMs: 0,
messagesConsumed: 0,
isCancelled: false,
},
isFetching: false,
};
const reducer = (state = initialState, action: Action): TopicMessagesState => {
switch (action.type) {
case getType(actions.addTopicMessage): {
return {
...state,
messages: [...state.messages, action.payload],
};
}
case getType(actions.resetTopicMessages):
return initialState;
case getType(actions.updateTopicMessagesPhase):
return {
...state,
phase: action.payload,
};
case getType(actions.updateTopicMessagesMeta):
return {
...state,
meta: action.payload,
};
case getType(actions.setTopicMessagesFetchingStatus):
return {
...state,
isFetching: action.payload,
};
default:
return state;
}
};
export default reducer;

View file

@ -0,0 +1,25 @@
import { createSelector } from 'reselect';
import { RootState, TopicMessagesState } from 'redux/interfaces';
const topicMessagesState = ({ topicMessages }: RootState): TopicMessagesState =>
topicMessages;
export const getTopicMessges = createSelector(
topicMessagesState,
({ messages }) => messages
);
export const getTopicMessgesPhase = createSelector(
topicMessagesState,
({ phase }) => phase
);
export const getTopicMessgesMeta = createSelector(
topicMessagesState,
({ meta }) => meta
);
export const getIsTopicMessagesFetching = createSelector(
topicMessagesState,
({ isFetching }) => isFetching
);

View file

@ -87,9 +87,7 @@ describe('topics reducer', () => {
});
it('delete topic messages on CLEAR_TOPIC_MESSAGES__SUCCESS', () => {
expect(
reducer(state, clearMessagesTopicAction.success(topic.name))
).toEqual(state);
expect(reducer(state, clearMessagesTopicAction.success())).toEqual(state);
});
});

View file

@ -1,4 +1,3 @@
import { TopicMessage } from 'generated-sources';
import { Action, TopicsState } from 'redux/interfaces';
import { getType } from 'typesafe-actions';
import * as actions from 'redux/actions';
@ -8,37 +7,11 @@ export const initialState: TopicsState = {
byName: {},
allNames: [],
totalPages: 1,
messages: [],
search: '',
orderBy: null,
consumerGroups: [],
};
const transformTopicMessages = (
state: TopicsState,
messages: TopicMessage[]
): TopicsState => ({
...state,
messages: messages.map((mes) => {
const { content } = mes;
let parsedContent = content;
if (content) {
try {
parsedContent =
typeof content !== 'object' ? JSON.parse(content) : content;
} catch (err) {
// do nothing
}
}
return {
...mes,
content: parsedContent,
};
}),
});
const reducer = (state = initialState, action: Action): TopicsState => {
switch (action.type) {
case getType(actions.fetchTopicsListAction.success):
@ -48,8 +21,6 @@ const reducer = (state = initialState, action: Action): TopicsState => {
case getType(actions.fetchTopicConsumerGroupsAction.success):
case getType(actions.updateTopicAction.success):
return action.payload;
case getType(actions.fetchTopicMessagesAction.success):
return transformTopicMessages(state, action.payload);
case getType(actions.deleteTopicAction.success): {
const newState: TopicsState = { ...state };
delete newState.byName[action.payload];
@ -58,12 +29,6 @@ const reducer = (state = initialState, action: Action): TopicsState => {
);
return newState;
}
case getType(actions.clearMessagesTopicAction.success): {
return {
...state,
messages: [],
};
}
case getType(actions.setTopicsSearchAction): {
return {
...state,

View file

@ -6,22 +6,19 @@ import {
TopicConfigByName,
} from 'redux/interfaces';
import { createFetchingSelector } from 'redux/reducers/loader/selectors';
import { Partition } from 'generated-sources';
const topicsState = ({ topics }: RootState): TopicsState => topics;
const getAllNames = (state: RootState) => topicsState(state).allNames;
const getTopicMap = (state: RootState) => topicsState(state).byName;
export const getTopicMessages = (state: RootState) =>
topicsState(state).messages;
export const getTopicListTotalPages = (state: RootState) =>
topicsState(state).totalPages;
const getTopicListFetchingStatus = createFetchingSelector('GET_TOPICS');
const getTopicDetailsFetchingStatus =
createFetchingSelector('GET_TOPIC_DETAILS');
const getTopicMessagesFetchingStatus =
createFetchingSelector('GET_TOPIC_MESSAGES');
const getTopicConfigFetchingStatus = createFetchingSelector('GET_TOPIC_CONFIG');
const getTopicCreationStatus = createFetchingSelector('POST_TOPIC');
const getTopicUpdateStatus = createFetchingSelector('PATCH_TOPIC');
@ -55,11 +52,6 @@ export const getIsTopicDetailsFetched = createSelector(
(status) => status === 'fetched'
);
export const getIsTopicMessagesFetched = createSelector(
getTopicMessagesFetchingStatus,
(status) => status === 'fetched'
);
export const getTopicConfigFetched = createSelector(
getTopicConfigFetchingStatus,
(status) => status === 'fetched'
@ -123,7 +115,7 @@ export const getTopicByName = createSelector(
export const getPartitionsByTopicName = createSelector(
getTopicMap,
getTopicName,
(topics, topicName) => topics[topicName].partitions as Partition[]
(topics, topicName) => topics[topicName]?.partitions || []
);
export const getFullTopic = createSelector(getTopicByName, (topic) =>

View file

@ -6,10 +6,8 @@
white-space: nowrap;
}
&-margin {
&-right {
margin-right: 10px;
}
&-text-nowrap {
white-space: nowrap;
}
&-content-overflow-ellipsis {
@ -56,6 +54,11 @@
}
}
.is-family-code {
font-size: .9rem;
line-height: 1.5em;
}
@keyframes fadein {
from { opacity: 0; }
to { opacity: 1; }