|
@@ -12,9 +12,8 @@ import com.provectus.kafka.ui.exception.ValidationException;
|
|
|
import com.provectus.kafka.ui.model.ConsumerPosition;
|
|
|
import com.provectus.kafka.ui.model.CreateTopicMessageDTO;
|
|
|
import com.provectus.kafka.ui.model.KafkaCluster;
|
|
|
-import com.provectus.kafka.ui.model.MessageFilterTypeDTO;
|
|
|
import com.provectus.kafka.ui.model.PollingModeDTO;
|
|
|
-import com.provectus.kafka.ui.model.SeekDirectionDTO;
|
|
|
+import com.provectus.kafka.ui.model.TopicMessageDTO;
|
|
|
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
|
|
import com.provectus.kafka.ui.serde.api.Serde;
|
|
|
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
|
@@ -23,14 +22,16 @@ import com.provectus.kafka.ui.util.SslPropertiesUtil;
|
|
|
import java.util.List;
|
|
|
import java.util.Map;
|
|
|
import java.util.Properties;
|
|
|
+import java.util.Random;
|
|
|
import java.util.concurrent.CompletableFuture;
|
|
|
+import java.util.concurrent.ConcurrentHashMap;
|
|
|
import java.util.function.Predicate;
|
|
|
import java.util.function.UnaryOperator;
|
|
|
import java.util.stream.Collectors;
|
|
|
import javax.annotation.Nullable;
|
|
|
import lombok.RequiredArgsConstructor;
|
|
|
import lombok.extern.slf4j.Slf4j;
|
|
|
-import org.apache.commons.lang3.StringUtils;
|
|
|
+import org.apache.commons.lang3.RandomStringUtils;
|
|
|
import org.apache.kafka.clients.admin.OffsetSpec;
|
|
|
import org.apache.kafka.clients.admin.TopicDescription;
|
|
|
import org.apache.kafka.clients.producer.KafkaProducer;
|
|
@@ -41,7 +42,6 @@ import org.apache.kafka.common.TopicPartition;
|
|
|
import org.apache.kafka.common.serialization.ByteArraySerializer;
|
|
|
import org.springframework.stereotype.Service;
|
|
|
import reactor.core.publisher.Flux;
|
|
|
-import reactor.core.publisher.FluxSink;
|
|
|
import reactor.core.publisher.Mono;
|
|
|
import reactor.core.scheduler.Schedulers;
|
|
|
|
|
@@ -57,6 +57,8 @@ public class MessagesService {
|
|
|
private final DeserializationService deserializationService;
|
|
|
private final ConsumerGroupService consumerGroupService;
|
|
|
|
|
|
+ private final Map<String, Predicate<TopicMessageDTO>> registeredFilters = new ConcurrentHashMap<>();
|
|
|
+
|
|
|
private Mono<TopicDescription> withExistingTopic(KafkaCluster cluster, String topicName) {
|
|
|
return adminClientService.get(cluster)
|
|
|
.flatMap(client -> client.describeTopic(topicName))
|
|
@@ -137,69 +139,9 @@ public class MessagesService {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- public Flux<TopicMessageEventDTO> loadMessages(KafkaCluster cluster, String topic,
|
|
|
- ConsumerPosition consumerPosition,
|
|
|
- @Nullable String query,
|
|
|
- MessageFilterTypeDTO filterQueryType,
|
|
|
- int limit,
|
|
|
- SeekDirectionDTO seekDirection,
|
|
|
- @Nullable String keySerde,
|
|
|
- @Nullable String valueSerde) {
|
|
|
- return withExistingTopic(cluster, topic)
|
|
|
- .flux()
|
|
|
- .publishOn(Schedulers.boundedElastic())
|
|
|
- .flatMap(td -> loadMessagesImpl(cluster, topic, consumerPosition, query,
|
|
|
- filterQueryType, limit, seekDirection, keySerde, valueSerde));
|
|
|
- }
|
|
|
-
|
|
|
- private Flux<TopicMessageEventDTO> loadMessagesImpl(KafkaCluster cluster,
|
|
|
- String topic,
|
|
|
- ConsumerPosition consumerPosition,
|
|
|
- @Nullable String query,
|
|
|
- MessageFilterTypeDTO filterQueryType,
|
|
|
- int limit,
|
|
|
- SeekDirectionDTO seekDirection,
|
|
|
- @Nullable String keySerde,
|
|
|
- @Nullable String valueSerde) {
|
|
|
-
|
|
|
- java.util.function.Consumer<? super FluxSink<TopicMessageEventDTO>> emitter;
|
|
|
- ConsumerRecordDeserializer recordDeserializer =
|
|
|
- deserializationService.deserializerFor(cluster, topic, keySerde, valueSerde);
|
|
|
- if (seekDirection.equals(SeekDirectionDTO.FORWARD)) {
|
|
|
- emitter = new ForwardRecordEmitter(
|
|
|
- () -> consumerGroupService.createConsumer(cluster),
|
|
|
- consumerPosition,
|
|
|
- recordDeserializer,
|
|
|
- cluster.getPollingSettings()
|
|
|
- );
|
|
|
- } else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
|
|
|
- emitter = new BackwardRecordEmitter(
|
|
|
- () -> consumerGroupService.createConsumer(cluster),
|
|
|
- consumerPosition,
|
|
|
- limit,
|
|
|
- recordDeserializer,
|
|
|
- cluster.getPollingSettings()
|
|
|
- );
|
|
|
- } else {
|
|
|
- emitter = new TailingEmitter(
|
|
|
- () -> consumerGroupService.createConsumer(cluster),
|
|
|
- consumerPosition,
|
|
|
- recordDeserializer,
|
|
|
- cluster.getPollingSettings()
|
|
|
- );
|
|
|
- }
|
|
|
- MessageFilterStats filterStats = new MessageFilterStats();
|
|
|
- return Flux.create(emitter)
|
|
|
- .contextWrite(ctx -> ctx.put(MessageFilterStats.class, filterStats))
|
|
|
- .filter(getMsgFilter(query, filterQueryType, filterStats))
|
|
|
- .map(getDataMasker(cluster, topic))
|
|
|
- .takeWhile(createTakeWhilePredicate(seekDirection, limit))
|
|
|
- .map(throttleUiPublish(seekDirection));
|
|
|
- }
|
|
|
-
|
|
|
public Flux<TopicMessageEventDTO> loadMessagesV2(KafkaCluster cluster,
|
|
|
String topic,
|
|
|
- PollingModeDTO pollingMode,
|
|
|
+ ConsumerPosition position,
|
|
|
@Nullable String query,
|
|
|
@Nullable String filterId,
|
|
|
int limit,
|
|
@@ -208,58 +150,55 @@ public class MessagesService {
|
|
|
return withExistingTopic(cluster, topic)
|
|
|
.flux()
|
|
|
.publishOn(Schedulers.boundedElastic())
|
|
|
- .flatMap(td -> loadMessagesImplV2(cluster, topic, consumerPosition, query,
|
|
|
- filterQueryType, limit, seekDirection, keySerde, valueSerde));
|
|
|
+ .flatMap(td -> loadMessagesImplV2(cluster, topic, position, query, filterId, limit, keySerde, valueSerde));
|
|
|
}
|
|
|
|
|
|
private Flux<TopicMessageEventDTO> loadMessagesImplV2(KafkaCluster cluster,
|
|
|
String topic,
|
|
|
ConsumerPosition consumerPosition,
|
|
|
@Nullable String query,
|
|
|
- MessageFilterTypeDTO filterQueryType,
|
|
|
+ @Nullable String filterId,
|
|
|
int limit,
|
|
|
- SeekDirectionDTO seekDirection,
|
|
|
@Nullable String keySerde,
|
|
|
@Nullable String valueSerde) {
|
|
|
|
|
|
- java.util.function.Consumer<? super FluxSink<TopicMessageEventDTO>> emitter;
|
|
|
ConsumerRecordDeserializer recordDeserializer =
|
|
|
deserializationService.deserializerFor(cluster, topic, keySerde, valueSerde);
|
|
|
- if (seekDirection.equals(SeekDirectionDTO.FORWARD)) {
|
|
|
- emitter = new ForwardRecordEmitter(
|
|
|
+
|
|
|
+ var emitter = switch (consumerPosition.pollingMode()) {
|
|
|
+ case TO_OFFSET, TO_TIMESTAMP, LATEST -> new BackwardRecordEmitter(
|
|
|
() -> consumerGroupService.createConsumer(cluster),
|
|
|
consumerPosition,
|
|
|
+ limit,
|
|
|
recordDeserializer,
|
|
|
- cluster.getThrottler().get()
|
|
|
+ cluster.getPollingSettings()
|
|
|
);
|
|
|
- } else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
|
|
|
- emitter = new BackwardRecordEmitter(
|
|
|
+ case FROM_OFFSET, FROM_TIMESTAMP, EARLIEST -> new ForwardRecordEmitter(
|
|
|
() -> consumerGroupService.createConsumer(cluster),
|
|
|
consumerPosition,
|
|
|
- limit,
|
|
|
recordDeserializer,
|
|
|
- cluster.getThrottler().get()
|
|
|
+ cluster.getPollingSettings()
|
|
|
);
|
|
|
- } else {
|
|
|
- emitter = new TailingEmitter(
|
|
|
+ case TAILING -> new TailingEmitter(
|
|
|
() -> consumerGroupService.createConsumer(cluster),
|
|
|
consumerPosition,
|
|
|
recordDeserializer,
|
|
|
- cluster.getThrottler().get()
|
|
|
+ cluster.getPollingSettings()
|
|
|
);
|
|
|
- }
|
|
|
+ };
|
|
|
+
|
|
|
MessageFilterStats filterStats = new MessageFilterStats();
|
|
|
return Flux.create(emitter)
|
|
|
.contextWrite(ctx -> ctx.put(MessageFilterStats.class, filterStats))
|
|
|
- .filter(getMsgFilter(query, filterQueryType, filterStats))
|
|
|
+ .filter(getMsgFilter(query, filterId, filterStats))
|
|
|
.map(getDataMasker(cluster, topic))
|
|
|
- .takeWhile(createTakeWhilePredicate(seekDirection, limit))
|
|
|
- .map(throttleUiPublish(seekDirection));
|
|
|
+ .takeWhile(createTakeWhilePredicate(consumerPosition.pollingMode(), limit))
|
|
|
+ .map(throttleUiPublish(consumerPosition.pollingMode()));
|
|
|
}
|
|
|
|
|
|
private Predicate<TopicMessageEventDTO> createTakeWhilePredicate(
|
|
|
- SeekDirectionDTO seekDirection, int limit) {
|
|
|
- return seekDirection == SeekDirectionDTO.TAILING
|
|
|
+ PollingModeDTO pollingMode, int limit) {
|
|
|
+ return pollingMode == PollingModeDTO.TAILING
|
|
|
? evt -> true // no limit for tailing
|
|
|
: new ResultSizeLimiter(limit);
|
|
|
}
|
|
@@ -278,21 +217,35 @@ public class MessagesService {
|
|
|
};
|
|
|
}
|
|
|
|
|
|
- private Predicate<TopicMessageEventDTO> getMsgFilter(String query,
|
|
|
- MessageFilterTypeDTO filterQueryType,
|
|
|
+ public String registerMessageFilter(String groovyCode) {
|
|
|
+ var filter = MessageFilters.groovyScriptFilter(groovyCode);
|
|
|
+ var id = RandomStringUtils.random(10, true, true);
|
|
|
+ registeredFilters.put(id, filter);
|
|
|
+ return id;
|
|
|
+ }
|
|
|
+
|
|
|
+ private Predicate<TopicMessageEventDTO> getMsgFilter(@Nullable String containsStrFilter,
|
|
|
+ @Nullable String filterId,
|
|
|
MessageFilterStats filterStats) {
|
|
|
- if (StringUtils.isEmpty(query)) {
|
|
|
- return evt -> true;
|
|
|
+ Predicate<TopicMessageDTO> messageFilter = e -> true;
|
|
|
+ if (containsStrFilter != null) {
|
|
|
+ messageFilter = MessageFilters.containsStringFilter(containsStrFilter);
|
|
|
+ }
|
|
|
+ if (filterId != null) {
|
|
|
+ messageFilter = registeredFilters.get(filterId);
|
|
|
+ if (messageFilter == null) {
|
|
|
+ throw new ValidationException("No filter was registered with id " + filterId);
|
|
|
+ }
|
|
|
}
|
|
|
- var messageFilter = MessageFilters.createMsgFilter(query, filterQueryType);
|
|
|
+ Predicate<TopicMessageDTO> finalMessageFilter = messageFilter;
|
|
|
return evt -> {
|
|
|
// we only apply filter for message events
|
|
|
if (evt.getType() == TopicMessageEventDTO.TypeEnum.MESSAGE) {
|
|
|
try {
|
|
|
- return messageFilter.test(evt.getMessage());
|
|
|
+ return finalMessageFilter.test(evt.getMessage());
|
|
|
} catch (Exception e) {
|
|
|
filterStats.incrementApplyErrors();
|
|
|
- log.trace("Error applying filter '{}' for message {}", query, evt.getMessage());
|
|
|
+ log.trace("Error applying filter for message {}", evt.getMessage());
|
|
|
return false;
|
|
|
}
|
|
|
}
|
|
@@ -300,8 +253,8 @@ public class MessagesService {
|
|
|
};
|
|
|
}
|
|
|
|
|
|
- private <T> UnaryOperator<T> throttleUiPublish(SeekDirectionDTO seekDirection) {
|
|
|
- if (seekDirection == SeekDirectionDTO.TAILING) {
|
|
|
+ private <T> UnaryOperator<T> throttleUiPublish(PollingModeDTO pollingMode) {
|
|
|
+ if (pollingMode == PollingModeDTO.TAILING) {
|
|
|
RateLimiter rateLimiter = RateLimiter.create(TAILING_UI_MESSAGE_THROTTLE_RATE);
|
|
|
return m -> {
|
|
|
rateLimiter.acquire(1);
|