Merge branch 'master' into ISSUE_754_acl

This commit is contained in:
Ilya Kuramshin 2023-03-27 13:16:04 +04:00 committed by GitHub
commit 931b3d1fa8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
114 changed files with 1528 additions and 2835 deletions

View file

@ -31,7 +31,7 @@ jobs:
echo "Packer will be triggered in this dir $WORK_DIR"
- name: Configure AWS credentials for Kafka-UI account
uses: aws-actions/configure-aws-credentials@v1-node16
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }}

View file

@ -9,9 +9,9 @@ jobs:
if: ${{ github.event.label.name == 'status/feature_testing' || github.event.label.name == 'status/feature_testing_public' }}
runs-on: ubuntu-latest
steps:
- uses: ./.github/workflows/build-template.yaml
- uses: actions/checkout@v3
with:
APP_VERSION: $GITHUB_SHA
ref: ${{ github.event.pull_request.head.sha }}
- name: get branch name
id: extract_branch
run: |
@ -19,6 +19,19 @@ jobs:
echo "tag=${tag}" >> $GITHUB_OUTPUT
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Set up JDK
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'zulu'
cache: 'maven'
- name: Build
id: build
run: |
./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
./mvnw -B -V -ntp clean package -Pprod -DskipTests
export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
@ -32,7 +45,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
uses: aws-actions/configure-aws-credentials@v1-node16
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -42,7 +42,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
uses: aws-actions/configure-aws-credentials@v1-node16
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -1,32 +0,0 @@
name: Maven build template
on:
workflow_call:
inputs:
APP_VERSION:
required: true
type: string
jobs:
build:
runs-on: ubuntu-latest
outputs:
version: ${{steps.build.outputs.version}}
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
- name: Set up JDK
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'zulu'
cache: 'maven'
- name: Build
id: build
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ inputs.APP_VERSION }}
./mvnw -B -V -ntp clean package -Pprod -DskipTests
export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
echo "version=${VERSION}" >> $GITHUB_OUTPUT

View file

@ -15,7 +15,7 @@ jobs:
tag='${{ github.event.pull_request.number }}'
echo "tag=${tag}" >> $GITHUB_OUTPUT
- name: Configure AWS credentials for Kafka-UI account
uses: aws-actions/configure-aws-credentials@v1-node16
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -23,6 +23,12 @@ jobs:
- uses: actions/checkout@v3
with:
ref: ${{ github.sha }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-central-1
- name: Set up environment
id: set_env_values
run: |
@ -65,8 +71,6 @@ jobs:
if: always()
env:
AWS_S3_BUCKET: 'kafkaui-allure-reports'
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: 'eu-central-1'
SOURCE_DIR: 'allure-history/allure-results'
- name: Deploy report to Amazon S3

View file

@ -16,7 +16,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Configure AWS credentials for Kafka-UI account
uses: aws-actions/configure-aws-credentials@v1-node16
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -10,6 +10,12 @@ jobs:
- uses: actions/checkout@v3
with:
ref: ${{ github.sha }}
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: eu-central-1
- name: Set up environment
id: set_env_values
run: |
@ -52,8 +58,6 @@ jobs:
if: always()
env:
AWS_S3_BUCKET: 'kafkaui-allure-reports'
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: 'eu-central-1'
SOURCE_DIR: 'allure-history/allure-results'
- name: Deploy report to Amazon S3

View file

@ -47,7 +47,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
uses: aws-actions/configure-aws-credentials@v1-node16
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -26,7 +26,7 @@ jobs:
echo "Terraform will be triggered in this dir $TF_DIR"
- name: Configure AWS credentials for Kafka-UI account
uses: aws-actions/configure-aws-credentials@v1-node16
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View file

@ -27,6 +27,8 @@ public class ClustersProperties {
String internalTopicPrefix;
PollingProperties polling = new PollingProperties();
@Data
public static class Cluster {
String name;
@ -49,6 +51,13 @@ public class ClustersProperties {
TruststoreConfig ssl;
}
@Data
public static class PollingProperties {
Integer pollTimeoutMs;
Integer partitionPollTimeout;
Integer noDataEmptyPolls;
}
@Data
@ToString(exclude = "password")
public static class MetricsConfigData {

View file

@ -1,25 +1,12 @@
package com.provectus.kafka.ui.config;
import lombok.AllArgsConstructor;
import org.springframework.boot.autoconfigure.web.ServerProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.core.io.ClassPathResource;
import org.springframework.util.StringUtils;
import org.springframework.web.reactive.config.CorsRegistry;
import org.springframework.web.reactive.config.WebFluxConfigurer;
import org.springframework.web.reactive.function.server.RouterFunction;
import org.springframework.web.reactive.function.server.RouterFunctions;
import org.springframework.web.reactive.function.server.ServerResponse;
@Configuration
@Profile("local")
@AllArgsConstructor
public class CorsGlobalConfiguration implements WebFluxConfigurer {
private final ServerProperties serverProperties;
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**")
@ -28,31 +15,4 @@ public class CorsGlobalConfiguration implements WebFluxConfigurer {
.allowedHeaders("*")
.allowCredentials(false);
}
private String withContext(String pattern) {
final String basePath = serverProperties.getServlet().getContextPath();
if (StringUtils.hasText(basePath)) {
return basePath + pattern;
} else {
return pattern;
}
}
@Bean
public RouterFunction<ServerResponse> cssFilesRouter() {
return RouterFunctions
.resources(withContext("/static/css/**"), new ClassPathResource("static/static/css/"));
}
@Bean
public RouterFunction<ServerResponse> jsFilesRouter() {
return RouterFunctions
.resources(withContext("/static/js/**"), new ClassPathResource("static/static/js/"));
}
@Bean
public RouterFunction<ServerResponse> mediaFilesRouter() {
return RouterFunctions
.resources(withContext("/static/media/**"), new ClassPathResource("static/static/media/"));
}
}

View file

@ -4,7 +4,6 @@ import com.provectus.kafka.ui.model.TopicMessageDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.util.PollingThrottler;
import java.time.Duration;
import java.time.Instant;
import org.apache.kafka.clients.consumer.Consumer;
@ -14,27 +13,21 @@ import org.apache.kafka.common.utils.Bytes;
import reactor.core.publisher.FluxSink;
public abstract class AbstractEmitter {
private static final Duration DEFAULT_POLL_TIMEOUT_MS = Duration.ofMillis(1000L);
// In some situations it is hard to say whether records range (between two offsets) was fully polled.
// This happens when we have holes in records sequences that is usual case for compact topics or
// topics with transactional writes. In such cases if you want to poll all records between offsets X and Y
// there is no guarantee that you will ever see record with offset Y.
// To workaround this we can assume that after N consecutive empty polls all target messages were read.
public static final int NO_MORE_DATA_EMPTY_POLLS_COUNT = 3;
private final ConsumerRecordDeserializer recordDeserializer;
private final ConsumingStats consumingStats = new ConsumingStats();
private final PollingThrottler throttler;
protected final PollingSettings pollingSettings;
protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingThrottler throttler) {
protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingSettings pollingSettings) {
this.recordDeserializer = recordDeserializer;
this.throttler = throttler;
this.pollingSettings = pollingSettings;
this.throttler = pollingSettings.getPollingThrottler();
}
protected ConsumerRecords<Bytes, Bytes> poll(
FluxSink<TopicMessageEventDTO> sink, Consumer<Bytes, Bytes> consumer) {
return poll(sink, consumer, DEFAULT_POLL_TIMEOUT_MS);
return poll(sink, consumer, pollingSettings.getPollTimeout());
}
protected ConsumerRecords<Bytes, Bytes> poll(

View file

@ -3,15 +3,12 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.util.PollingThrottler;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.TreeMap;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
@ -26,8 +23,6 @@ public class BackwardRecordEmitter
extends AbstractEmitter
implements java.util.function.Consumer<FluxSink<TopicMessageEventDTO>> {
private static final Duration POLL_TIMEOUT = Duration.ofMillis(200);
private final Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier;
private final ConsumerPosition consumerPosition;
private final int messagesPerPage;
@ -37,8 +32,8 @@ public class BackwardRecordEmitter
ConsumerPosition consumerPosition,
int messagesPerPage,
ConsumerRecordDeserializer recordDeserializer,
PollingThrottler throttler) {
super(recordDeserializer, throttler);
PollingSettings pollingSettings) {
super(recordDeserializer, pollingSettings);
this.consumerPosition = consumerPosition;
this.messagesPerPage = messagesPerPage;
this.consumerSupplier = consumerSupplier;
@ -109,17 +104,18 @@ public class BackwardRecordEmitter
var recordsToSend = new ArrayList<ConsumerRecord<Bytes, Bytes>>();
// we use empty polls counting to verify that partition was fully read
for (int emptyPolls = 0; recordsToSend.size() < desiredMsgsToPoll && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) {
var polledRecords = poll(sink, consumer, POLL_TIMEOUT);
log.debug("{} records polled from {}", polledRecords.count(), tp);
EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
while (!sink.isCancelled()
&& recordsToSend.size() < desiredMsgsToPoll
&& !emptyPolls.noDataEmptyPollsReached()) {
var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout());
emptyPolls.count(polledRecords);
// counting sequential empty polls
emptyPolls = polledRecords.isEmpty() ? emptyPolls + 1 : 0;
log.debug("{} records polled from {}", polledRecords.count(), tp);
var filteredRecords = polledRecords.records(tp).stream()
.filter(r -> r.offset() < toOffset)
.collect(Collectors.toList());
.toList();
if (!polledRecords.isEmpty() && filteredRecords.isEmpty()) {
// we already read all messages in target offsets interval

View file

@ -0,0 +1,28 @@
package com.provectus.kafka.ui.emitter;
import org.apache.kafka.clients.consumer.ConsumerRecords;
// In some situations it is hard to say whether records range (between two offsets) was fully polled.
// This happens when we have holes in records sequences that is usual case for compact topics or
// topics with transactional writes. In such cases if you want to poll all records between offsets X and Y
// there is no guarantee that you will ever see record with offset Y.
// To workaround this we can assume that after N consecutive empty polls all target messages were read.
public class EmptyPollsCounter {
private final int maxEmptyPolls;
private int emptyPolls = 0;
EmptyPollsCounter(int maxEmptyPolls) {
this.maxEmptyPolls = maxEmptyPolls;
}
public void count(ConsumerRecords<?, ?> polled) {
emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0;
}
public boolean noDataEmptyPollsReached() {
return emptyPolls >= maxEmptyPolls;
}
}

View file

@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.util.PollingThrottler;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
@ -25,8 +24,8 @@ public class ForwardRecordEmitter
Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
ConsumerPosition position,
ConsumerRecordDeserializer recordDeserializer,
PollingThrottler throttler) {
super(recordDeserializer, throttler);
PollingSettings pollingSettings) {
super(recordDeserializer, pollingSettings);
this.position = position;
this.consumerSupplier = consumerSupplier;
}
@ -39,16 +38,16 @@ public class ForwardRecordEmitter
var seekOperations = SeekOperations.create(consumer, position);
seekOperations.assignAndSeekNonEmptyPartitions();
// we use empty polls counting to verify that topic was fully read
int emptyPolls = 0;
EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
while (!sink.isCancelled()
&& !seekOperations.assignedPartitionsFullyPolled()
&& emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT) {
&& !emptyPolls.noDataEmptyPollsReached()) {
sendPhase(sink, "Polling");
ConsumerRecords<Bytes, Bytes> records = poll(sink, consumer);
emptyPolls.count(records);
log.debug("{} records polled", records.count());
emptyPolls = records.isEmpty() ? emptyPolls + 1 : 0;
for (ConsumerRecord<Bytes, Bytes> msg : records) {
if (!sink.isCancelled()) {

View file

@ -0,0 +1,79 @@
package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.config.ClustersProperties;
import java.time.Duration;
import java.util.Optional;
import java.util.function.Supplier;
public class PollingSettings {
private static final Duration DEFAULT_POLL_TIMEOUT = Duration.ofMillis(1_000);
private static final Duration DEFAULT_PARTITION_POLL_TIMEOUT = Duration.ofMillis(200);
private static final int DEFAULT_NO_DATA_EMPTY_POLLS = 3;
private final Duration pollTimeout;
private final Duration partitionPollTimeout;
private final int notDataEmptyPolls; //see EmptyPollsCounter docs
private final Supplier<PollingThrottler> throttlerSupplier;
public static PollingSettings create(ClustersProperties.Cluster cluster,
ClustersProperties clustersProperties) {
var pollingProps = Optional.ofNullable(clustersProperties.getPolling())
.orElseGet(ClustersProperties.PollingProperties::new);
var pollTimeout = pollingProps.getPollTimeoutMs() != null
? Duration.ofMillis(pollingProps.getPollTimeoutMs())
: DEFAULT_POLL_TIMEOUT;
var partitionPollTimeout = pollingProps.getPartitionPollTimeout() != null
? Duration.ofMillis(pollingProps.getPartitionPollTimeout())
: Duration.ofMillis(pollTimeout.toMillis() / 5);
int noDataEmptyPolls = pollingProps.getNoDataEmptyPolls() != null
? pollingProps.getNoDataEmptyPolls()
: DEFAULT_NO_DATA_EMPTY_POLLS;
return new PollingSettings(
pollTimeout,
partitionPollTimeout,
noDataEmptyPolls,
PollingThrottler.throttlerSupplier(cluster)
);
}
public static PollingSettings createDefault() {
return new PollingSettings(
DEFAULT_POLL_TIMEOUT,
DEFAULT_PARTITION_POLL_TIMEOUT,
DEFAULT_NO_DATA_EMPTY_POLLS,
PollingThrottler::noop
);
}
private PollingSettings(Duration pollTimeout,
Duration partitionPollTimeout,
int notDataEmptyPolls,
Supplier<PollingThrottler> throttlerSupplier) {
this.pollTimeout = pollTimeout;
this.partitionPollTimeout = partitionPollTimeout;
this.notDataEmptyPolls = notDataEmptyPolls;
this.throttlerSupplier = throttlerSupplier;
}
public EmptyPollsCounter createEmptyPollsCounter() {
return new EmptyPollsCounter(notDataEmptyPolls);
}
public Duration getPollTimeout() {
return pollTimeout;
}
public Duration getPartitionPollTimeout() {
return partitionPollTimeout;
}
public PollingThrottler getPollingThrottler() {
return throttlerSupplier.get();
}
}

View file

@ -1,8 +1,9 @@
package com.provectus.kafka.ui.util;
package com.provectus.kafka.ui.emitter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.RateLimiter;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.util.ConsumerRecordsUtil;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecords;

View file

@ -1,4 +1,4 @@
package com.provectus.kafka.ui.util;
package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import java.util.concurrent.atomic.AtomicInteger;

View file

@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.util.PollingThrottler;
import java.util.HashMap;
import java.util.function.Supplier;
import lombok.extern.slf4j.Slf4j;
@ -22,8 +21,8 @@ public class TailingEmitter extends AbstractEmitter
public TailingEmitter(Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
ConsumerPosition consumerPosition,
ConsumerRecordDeserializer recordDeserializer,
PollingThrottler throttler) {
super(recordDeserializer, throttler);
PollingSettings pollingSettings) {
super(recordDeserializer, pollingSettings);
this.consumerSupplier = consumerSupplier;
this.consumerPosition = consumerPosition;
}

View file

@ -2,14 +2,13 @@ package com.provectus.kafka.ui.model;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.emitter.PollingSettings;
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
import com.provectus.kafka.ui.service.masking.DataMasking;
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
import com.provectus.kafka.ui.util.PollingThrottler;
import com.provectus.kafka.ui.util.ReactiveFailover;
import java.util.Map;
import java.util.Properties;
import java.util.function.Supplier;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
@ -28,7 +27,7 @@ public class KafkaCluster {
private final boolean readOnly;
private final MetricsConfig metricsConfig;
private final DataMasking masking;
private final Supplier<PollingThrottler> throttler;
private final PollingSettings pollingSettings;
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
private final ReactiveFailover<KsqlApiClient> ksqlClient;

View file

@ -14,7 +14,7 @@ public class ClustersStorage {
public ClustersStorage(ClustersProperties properties, KafkaClusterFactory factory) {
var builder = ImmutableMap.<String, KafkaCluster>builder();
properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(c)));
properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(properties, c)));
this.kafkaClusters = builder.build();
}

View file

@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service;
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.emitter.PollingSettings;
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
import com.provectus.kafka.ui.model.KafkaCluster;
@ -12,7 +13,6 @@ import com.provectus.kafka.ui.service.masking.DataMasking;
import com.provectus.kafka.ui.sr.ApiClient;
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
import com.provectus.kafka.ui.util.KafkaServicesValidation;
import com.provectus.kafka.ui.util.PollingThrottler;
import com.provectus.kafka.ui.util.ReactiveFailover;
import com.provectus.kafka.ui.util.WebClientConfigurator;
import java.util.HashMap;
@ -41,7 +41,8 @@ public class KafkaClusterFactory {
@Value("${webclient.max-in-memory-buffer-size:20MB}")
private DataSize maxBuffSize;
public KafkaCluster create(ClustersProperties.Cluster clusterProperties) {
public KafkaCluster create(ClustersProperties properties,
ClustersProperties.Cluster clusterProperties) {
KafkaCluster.KafkaClusterBuilder builder = KafkaCluster.builder();
builder.name(clusterProperties.getName());
@ -49,7 +50,7 @@ public class KafkaClusterFactory {
builder.properties(convertProperties(clusterProperties.getProperties()));
builder.readOnly(clusterProperties.isReadOnly());
builder.masking(DataMasking.create(clusterProperties.getMasking()));
builder.throttler(PollingThrottler.throttlerSupplier(clusterProperties));
builder.pollingSettings(PollingSettings.create(clusterProperties, properties));
if (schemaRegistryConfigured(clusterProperties)) {
builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));

View file

@ -5,6 +5,7 @@ import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
import com.provectus.kafka.ui.emitter.MessageFilterStats;
import com.provectus.kafka.ui.emitter.MessageFilters;
import com.provectus.kafka.ui.emitter.ResultSizeLimiter;
import com.provectus.kafka.ui.emitter.TailingEmitter;
import com.provectus.kafka.ui.exception.TopicNotFoundException;
import com.provectus.kafka.ui.exception.ValidationException;
@ -17,7 +18,6 @@ import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
import com.provectus.kafka.ui.util.ResultSizeLimiter;
import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.util.List;
import java.util.Map;
@ -169,7 +169,7 @@ public class MessagesService {
() -> consumerGroupService.createConsumer(cluster),
consumerPosition,
recordDeserializer,
cluster.getThrottler().get()
cluster.getPollingSettings()
);
} else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
emitter = new BackwardRecordEmitter(
@ -177,14 +177,14 @@ public class MessagesService {
consumerPosition,
limit,
recordDeserializer,
cluster.getThrottler().get()
cluster.getPollingSettings()
);
} else {
emitter = new TailingEmitter(
() -> consumerGroupService.createConsumer(cluster),
consumerPosition,
recordDeserializer,
cluster.getThrottler().get()
cluster.getPollingSettings()
);
}
MessageFilterStats filterStats = new MessageFilterStats();

View file

@ -228,17 +228,24 @@ public class ReactiveAdminClient implements Closeable {
.map(brokerId -> new ConfigResource(ConfigResource.Type.BROKER, Integer.toString(brokerId)))
.collect(toList());
return toMono(client.describeConfigs(resources).all())
// some kafka backends (like MSK serverless) do not support broker's configs retrieval,
// in that case InvalidRequestException will be thrown
.onErrorResume(InvalidRequestException.class, th -> {
log.trace("Error while getting broker {} configs", brokerIds, th);
return Mono.just(Map.of());
})
// some kafka backends don't support broker's configs retrieval,
// and throw various exceptions on describeConfigs() call
.onErrorResume(th -> th instanceof InvalidRequestException // MSK Serverless
|| th instanceof UnknownTopicOrPartitionException, // Azure event hub
th -> {
log.trace("Error while getting configs for brokers {}", brokerIds, th);
return Mono.just(Map.of());
})
// there are situations when kafka-ui user has no DESCRIBE_CONFIGS permission on cluster
.onErrorResume(ClusterAuthorizationException.class, th -> {
log.trace("AuthorizationException while getting configs for brokers {}", brokerIds, th);
return Mono.just(Map.of());
})
// catching all remaining exceptions, but logging on WARN level
.onErrorResume(th -> true, th -> {
log.warn("Unexpected error while getting configs for brokers {}", brokerIds, th);
return Mono.just(Map.of());
})
.map(config -> config.entrySet().stream()
.collect(toMap(
c -> Integer.valueOf(c.getKey().name()),

View file

@ -1,14 +1,14 @@
package com.provectus.kafka.ui.service.analyze;
import static com.provectus.kafka.ui.emitter.AbstractEmitter.NO_MORE_DATA_EMPTY_POLLS_COUNT;
import com.provectus.kafka.ui.emitter.EmptyPollsCounter;
import com.provectus.kafka.ui.emitter.OffsetsInfo;
import com.provectus.kafka.ui.emitter.PollingSettings;
import com.provectus.kafka.ui.emitter.PollingThrottler;
import com.provectus.kafka.ui.exception.TopicAnalysisException;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.TopicAnalysisDTO;
import com.provectus.kafka.ui.service.ConsumerGroupService;
import com.provectus.kafka.ui.service.TopicsService;
import com.provectus.kafka.ui.util.PollingThrottler;
import java.io.Closeable;
import java.time.Duration;
import java.time.Instant;
@ -63,7 +63,7 @@ public class TopicAnalysisService {
if (analysisTasksStore.isAnalysisInProgress(topicId)) {
throw new TopicAnalysisException("Topic is already analyzing");
}
var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getThrottler().get());
var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getPollingSettings());
analysisTasksStore.registerNewTask(topicId, task);
Schedulers.boundedElastic().schedule(task);
}
@ -83,6 +83,7 @@ public class TopicAnalysisService {
private final TopicIdentity topicId;
private final int partitionsCnt;
private final long approxNumberOfMsgs;
private final EmptyPollsCounter emptyPollsCounter;
private final PollingThrottler throttler;
private final TopicAnalysisStats totalStats = new TopicAnalysisStats();
@ -91,7 +92,7 @@ public class TopicAnalysisService {
private final KafkaConsumer<Bytes, Bytes> consumer;
AnalysisTask(KafkaCluster cluster, TopicIdentity topicId, int partitionsCnt,
long approxNumberOfMsgs, PollingThrottler throttler) {
long approxNumberOfMsgs, PollingSettings pollingSettings) {
this.topicId = topicId;
this.approxNumberOfMsgs = approxNumberOfMsgs;
this.partitionsCnt = partitionsCnt;
@ -103,7 +104,8 @@ public class TopicAnalysisService {
ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "100000"
)
);
this.throttler = throttler;
this.throttler = pollingSettings.getPollingThrottler();
this.emptyPollsCounter = pollingSettings.createEmptyPollsCounter();
}
@Override
@ -124,11 +126,10 @@ public class TopicAnalysisService {
consumer.seekToBeginning(topicPartitions);
var offsetsInfo = new OffsetsInfo(consumer, topicId.topicName);
for (int emptyPolls = 0; !offsetsInfo.assignedPartitionsFullyPolled()
&& emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) {
while (!offsetsInfo.assignedPartitionsFullyPolled() && !emptyPollsCounter.noDataEmptyPollsReached()) {
var polled = consumer.poll(Duration.ofSeconds(3));
throttler.throttleAfterPoll(polled);
emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0;
emptyPollsCounter.count(polled);
polled.forEach(r -> {
totalStats.apply(r);
partitionStats.get(r.partition()).apply(r);

View file

@ -9,6 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
import com.provectus.kafka.ui.emitter.PollingSettings;
import com.provectus.kafka.ui.model.ConsumerPosition;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.producer.KafkaTestProducer;
@ -16,7 +17,6 @@ import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
import com.provectus.kafka.ui.serdes.builtin.StringSerde;
import com.provectus.kafka.ui.util.PollingThrottler;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
@ -112,7 +112,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -120,7 +120,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
100,
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
StepVerifier.create(Flux.create(forwardEmitter))
@ -142,7 +142,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(BEGINNING, TOPIC, null),
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -150,7 +150,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
new ConsumerPosition(LATEST, TOPIC, null),
PARTITIONS * MSGS_PER_PARTITION,
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
List<String> expectedValues = SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList());
@ -171,7 +171,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -179,7 +179,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
PARTITIONS * MSGS_PER_PARTITION,
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
var expectedValues = SENT_RECORDS.stream()
@ -216,7 +216,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
this::createConsumer,
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
var backwardEmitter = new BackwardRecordEmitter(
@ -224,7 +224,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
PARTITIONS * MSGS_PER_PARTITION,
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
var expectedValues = SENT_RECORDS.stream()
@ -255,7 +255,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
numMessages,
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
var expectedValues = SENT_RECORDS.stream()
@ -281,7 +281,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
new ConsumerPosition(OFFSET, TOPIC, offsets),
100,
RECORD_DESERIALIZER,
PollingThrottler.noop()
PollingSettings.createDefault()
);
expectEmitter(backwardEmitter,

View file

@ -5,6 +5,7 @@ import static org.assertj.core.data.Percentage.withPercentage;
import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.RateLimiter;
import com.provectus.kafka.ui.emitter.PollingThrottler;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.Test;

View file

@ -3600,6 +3600,15 @@ components:
kafka:
type: object
properties:
polling:
type: object
properties:
pollTimeoutMs:
type: integer
partitionPollTimeout:
type: integer
noDataEmptyPolls:
type: integer
clusters:
type: array
items:

View file

@ -4,6 +4,7 @@ import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
import com.provectus.kafka.ui.utilities.WebUtils;
import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.Keys;
@ -33,6 +34,8 @@ public abstract class BasePage extends WebUtils {
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
protected String pageTitleFromHeader = "//h1[text()='%s']";
protected String pagePathFromHeader = "//a[text()='%s']/../h1";
protected void waitUntilSpinnerDisappear() {
log.debug("\nwaitUntilSpinnerDisappear");
@ -41,6 +44,14 @@ public abstract class BasePage extends WebUtils {
}
}
protected SelenideElement getPageTitleFromHeader(MenuItem menuItem) {
return $x(String.format(pageTitleFromHeader, menuItem.getPageTitle()));
}
protected SelenideElement getPagePathFromHeader(MenuItem menuItem) {
return $x(String.format(pagePathFromHeader, menuItem.getPageTitle()));
}
protected void clickSubmitBtn() {
clickByJavaScript(submitBtn);
}

View file

@ -12,15 +12,14 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.BROKERS;
public class BrokersList extends BasePage {
protected SelenideElement brokersListHeader = $x("//h1[text()='Brokers']");
@Step
public BrokersList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
brokersListHeader.shouldBe(Condition.visible);
getPageTitleFromHeader(BROKERS).shouldBe(Condition.visible);
return this;
}

View file

@ -6,6 +6,7 @@ import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KAFKA_CONNECT;
public class KafkaConnectList extends BasePage {
@ -19,7 +20,7 @@ public class KafkaConnectList extends BasePage {
@Step
public KafkaConnectList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
createConnectorBtn.shouldBe(Condition.visible);
getPageTitleFromHeader(KAFKA_CONNECT).shouldBe(Condition.visible);
return this;
}

View file

@ -1,20 +1,17 @@
package com.provectus.kafka.ui.pages.consumers;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.CONSUMERS;
public class ConsumersList extends BasePage {
protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']");
@Step
public ConsumersList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
consumerListHeader.shouldHave(Condition.visible);
getPageTitleFromHeader(CONSUMERS).shouldBe(Condition.visible);
return this;
}
}

View file

@ -1,137 +1,139 @@
package com.provectus.kafka.ui.pages.ksqlDb;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs;
import io.qameta.allure.Step;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.openqa.selenium.By;
import java.util.ArrayList;
import java.util.List;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
public class KsqlDbList extends BasePage {
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
@Step
public KsqlDbList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
Arrays.asList(tablesTab, streamsTab).forEach(tab -> tab.shouldBe(Condition.visible));
return this;
}
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
@Step
public KsqlDbList clickExecuteKsqlRequestBtn() {
clickByJavaScript(executeKsqlBtn);
return this;
}
@Step
public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
$(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
waitUntilSpinnerDisappear();
return this;
}
private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
return gridItemList;
}
@Step
public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
return initTablesItems().stream()
.filter(e -> e.getTableName().equals(tableName))
.findFirst().orElseThrow();
}
public static class KsqlTablesGridItem extends BasePage {
private final SelenideElement element;
public KsqlTablesGridItem(SelenideElement element) {
this.element = element;
@Step
public KsqlDbList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
getPageTitleFromHeader(KSQL_DB).shouldBe(Condition.visible);
return this;
}
@Step
public String getTableName() {
return element.$x("./td[1]").getText().trim();
public KsqlDbList clickExecuteKsqlRequestBtn() {
clickByJavaScript(executeKsqlBtn);
return this;
}
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
$(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
waitUntilSpinnerDisappear();
return this;
}
private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
return gridItemList;
}
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
return initTablesItems().stream()
.filter(e -> e.getTableName().equals(tableName))
.findFirst().orElseThrow();
}
private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
return gridItemList;
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
return initStreamsItems().stream()
.filter(e -> e.getStreamName().equals(streamName))
.findFirst().orElseThrow();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
}
}
public static class KsqlTablesGridItem extends BasePage {
private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
return gridItemList;
}
private final SelenideElement element;
@Step
public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
return initStreamsItems().stream()
.filter(e -> e.getStreamName().equals(streamName))
.findFirst().orElseThrow();
}
public KsqlTablesGridItem(SelenideElement element) {
this.element = element;
}
public static class KsqlStreamsGridItem extends BasePage {
@Step
public String getTableName() {
return element.$x("./td[1]").getText().trim();
}
private final SelenideElement element;
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
}
public KsqlStreamsGridItem(SelenideElement element) {
this.element = element;
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
}
}
@Step
public String getStreamName() {
return element.$x("./td[1]").getText().trim();
}
public static class KsqlStreamsGridItem extends BasePage {
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
}
private final SelenideElement element;
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
}
public KsqlStreamsGridItem(SelenideElement element) {
this.element = element;
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getStreamName() {
return element.$x("./td[1]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
@Step
public String getTopicName() {
return element.$x("./td[2]").getText().trim();
}
@Step
public String getKeyFormat() {
return element.$x("./td[3]").getText().trim();
}
@Step
public String getValueFormat() {
return element.$x("./td[4]").getText().trim();
}
@Step
public String getIsWindowed() {
return element.$x("./td[5]").getText().trim();
}
}
}
}

View file

@ -16,7 +16,6 @@ import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
public class KsqlQueryForm extends BasePage {
protected SelenideElement pageTitle = $x("//h1[text()='Query']");
protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
@ -31,7 +30,7 @@ public class KsqlQueryForm extends BasePage {
@Step
public KsqlQueryForm waitUntilScreenReady() {
waitUntilSpinnerDisappear();
pageTitle.shouldBe(Condition.visible);
executeBtn.shouldBe(Condition.visible);
return this;
}

View file

@ -1,7 +1,9 @@
package com.provectus.kafka.ui.pages;
package com.provectus.kafka.ui.pages.panels;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import java.time.Duration;
@ -34,38 +36,29 @@ public class NaviSideBar extends BasePage {
}
@Step
public NaviSideBar openSideMenu(String clusterName, SideMenuOption option) {
public String getPagePath(MenuItem menuItem) {
return getPagePathFromHeader(menuItem)
.shouldBe(Condition.visible)
.getText().trim();
}
@Step
public NaviSideBar openSideMenu(String clusterName, MenuItem menuItem) {
clickByActions(expandCluster(clusterName).parent()
.$x(String.format(sideMenuOptionElementLocator, option.value)));
.$x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())));
return this;
}
@Step
public NaviSideBar openSideMenu(SideMenuOption option) {
openSideMenu(CLUSTER_NAME, option);
public NaviSideBar openSideMenu(MenuItem menuItem) {
openSideMenu(CLUSTER_NAME, menuItem);
return this;
}
public List<SelenideElement> getAllMenuButtons() {
expandCluster(CLUSTER_NAME);
return Stream.of(SideMenuOption.values())
.map(option -> $x(String.format(sideMenuOptionElementLocator, option.value)))
return Stream.of(MenuItem.values())
.map(menuItem -> $x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())))
.collect(Collectors.toList());
}
public enum SideMenuOption {
DASHBOARD("Dashboard"),
BROKERS("Brokers"),
TOPICS("Topics"),
CONSUMERS("Consumers"),
SCHEMA_REGISTRY("Schema Registry"),
KAFKA_CONNECT("Kafka Connect"),
KSQL_DB("KSQL DB");
final String value;
SideMenuOption(String value) {
this.value = value;
}
}
}

View file

@ -1,6 +1,7 @@
package com.provectus.kafka.ui.pages;
package com.provectus.kafka.ui.pages.panels;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import java.util.Arrays;
import java.util.List;

View file

@ -0,0 +1,28 @@
package com.provectus.kafka.ui.pages.panels.enums;
public enum MenuItem {
DASHBOARD("Dashboard", "Dashboard"),
BROKERS("Brokers", "Brokers"),
TOPICS("Topics", "Topics"),
CONSUMERS("Consumers", "Consumers"),
SCHEMA_REGISTRY("Schema Registry", "Schema Registry"),
KAFKA_CONNECT("Kafka Connect", "Connectors"),
KSQL_DB("KSQL DB", "KSQL DB");
private final String naviTitle;
private final String pageTitle;
MenuItem(String naviTitle, String pageTitle) {
this.naviTitle = naviTitle;
this.pageTitle = pageTitle;
}
public String getNaviTitle() {
return naviTitle;
}
public String getPageTitle() {
return pageTitle;
}
}

View file

@ -6,6 +6,7 @@ import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.SCHEMA_REGISTRY;
public class SchemaRegistryList extends BasePage {
@ -14,7 +15,7 @@ public class SchemaRegistryList extends BasePage {
@Step
public SchemaRegistryList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
createSchemaBtn.shouldBe(Condition.visible);
getPageTitleFromHeader(SCHEMA_REGISTRY).shouldBe(Condition.visible);
return this;
}

View file

@ -14,10 +14,10 @@ import java.util.stream.Stream;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.TOPICS;
public class TopicsList extends BasePage {
protected SelenideElement topicListHeader = $x("//h1[text()='Topics']");
protected SelenideElement addTopicBtn = $x("//button[normalize-space(text()) ='Add a Topic']");
protected SelenideElement searchField = $x("//input[@placeholder='Search by Topic Name']");
protected SelenideElement showInternalRadioBtn = $x("//input[@name='ShowInternalTopics']");
@ -31,7 +31,7 @@ public class TopicsList extends BasePage {
@Step
public TopicsList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
topicListHeader.shouldBe(visible);
getPageTitleFromHeader(TOPICS).shouldBe(visible);
return this;
}

View file

@ -67,8 +67,8 @@ public class ApiService extends BaseSource {
}
@Step
public ApiService createTopic(String topicName) {
createTopic(CLUSTER_NAME, topicName);
public ApiService createTopic(Topic topic) {
createTopic(CLUSTER_NAME, topic.getName());
return this;
}
@ -133,6 +133,12 @@ public class ApiService extends BaseSource {
return this;
}
@Step
public ApiService deleteConnector(String connectorName) {
deleteConnector(CLUSTER_NAME, CONNECT_NAME, connectorName);
return this;
}
@SneakyThrows
private void createConnector(String clusterName, String connectName, Connector connector) {
NewConnector connectorProperties = new NewConnector();
@ -152,9 +158,15 @@ public class ApiService extends BaseSource {
return this;
}
@Step
public ApiService createConnector(Connector connector) {
createConnector(CLUSTER_NAME, CONNECT_NAME, connector);
return this;
}
@Step
public String getFirstConnectName(String clusterName) {
return connectorApi().getConnects(clusterName).blockFirst().getName();
return Objects.requireNonNull(connectorApi().getConnects(clusterName).blockFirst()).getName();
}
@SneakyThrows

View file

@ -8,6 +8,7 @@ public abstract class BaseSource {
public static final String BASE_CONTAINER_URL = "http://host.testcontainers.internal:8080";
public static final String BASE_LOCAL_URL = "http://localhost:8080";
public static final String CLUSTER_NAME = "local";
public static final String CONNECT_NAME = "first";
private static Config config;
public static final String BROWSER = config().browser();
public static final String SUITE_NAME = config().suite();

View file

@ -22,7 +22,7 @@ import org.testng.asserts.SoftAssert;
import java.time.Duration;
import java.util.List;
import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.*;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.*;
import static com.provectus.kafka.ui.settings.BaseSource.*;
import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
@ -117,6 +117,17 @@ public abstract class BaseTest extends Facade {
.waitUntilScreenReady();
}
@Step
protected void navigateToBrokersAndOpenDetails(int brokerId) {
naviSideBar
.openSideMenu(BROKERS);
brokersList
.waitUntilScreenReady()
.openBroker(brokerId);
brokersDetails
.waitUntilScreenReady();
}
@Step
protected void navigateToTopics() {
naviSideBar

View file

@ -1,7 +1,7 @@
package com.provectus.kafka.ui;
import com.provectus.kafka.ui.pages.NaviSideBar;
import com.provectus.kafka.ui.pages.TopPanel;
import com.provectus.kafka.ui.pages.panels.NaviSideBar;
import com.provectus.kafka.ui.pages.panels.TopPanel;
import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab;
import com.provectus.kafka.ui.pages.brokers.BrokersDetails;
import com.provectus.kafka.ui.pages.brokers.BrokersList;

View file

@ -11,6 +11,7 @@ import java.lang.reflect.Method;
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
@Listeners(QaseResultListener.class)
public abstract class BaseManualTest {
@ -22,7 +23,8 @@ public abstract class BaseManualTest {
@BeforeMethod
public void beforeMethod(Method method) {
if (method.getAnnotation(Automation.class).state().equals(NOT_AUTOMATED))
if (method.getAnnotation(Automation.class).state().equals(NOT_AUTOMATED)
|| method.getAnnotation(Automation.class).state().equals(TO_BE_AUTOMATED))
throw new SkipException("Skip test exception");
}
}

View file

@ -0,0 +1,19 @@
package com.provectus.kafka.ui.manualSuite.backlog;
import com.provectus.kafka.ui.manualSuite.BaseManualTest;
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
import io.qase.api.annotation.QaseId;
import org.testng.annotations.Test;
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
public class SanityBacklog extends BaseManualTest {
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = 19)
@QaseId(285)
@Test
public void testCaseA() {
}
}

View file

@ -1,35 +1,61 @@
package com.provectus.kafka.ui.manualSuite.suite;
package com.provectus.kafka.ui.manualSuite.backlog;
import com.provectus.kafka.ui.manualSuite.BaseManualTest;
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
import io.qase.api.annotation.QaseId;
import org.testng.annotations.Test;
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
public class KsqlDbTest extends BaseManualTest {
public class SmokeBacklog extends BaseManualTest {
@Automation(state = TO_BE_AUTOMATED)
@QaseId(276)
@Suite(id = 1)
@QaseId(330)
@Test
public void testCaseA() {
}
@Automation(state = TO_BE_AUTOMATED)
@QaseId(277)
@Suite(id = 8)
@QaseId(276)
@Test
public void testCaseB() {
}
@Automation(state = TO_BE_AUTOMATED)
@QaseId(278)
@Suite(id = 8)
@QaseId(277)
@Test
public void testCaseC() {
}
@Automation(state = TO_BE_AUTOMATED)
@QaseId(284)
@Suite(id = 8)
@QaseId(278)
@Test
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = 8)
@QaseId(284)
@Test
public void testCaseE() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = 1)
@QaseId(331)
@Test
public void testCaseF() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = 1)
@QaseId(332)
@Test
public void testCaseG() {
}
}

View file

@ -34,68 +34,62 @@ public class TopicsTest extends BaseManualTest {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(46)
@QaseId(47)
@Test
public void testCaseE() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(47)
@QaseId(48)
@Test
public void testCaseF() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(48)
@QaseId(49)
@Test
public void testCaseG() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(49)
@QaseId(50)
@Test
public void testCaseH() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(50)
@QaseId(57)
@Test
public void testCaseI() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(57)
@QaseId(58)
@Test
public void testCaseJ() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(58)
@QaseId(269)
@Test
public void testCaseK() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(269)
@QaseId(270)
@Test
public void testCaseL() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(270)
@QaseId(271)
@Test
public void testCaseM() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(271)
@QaseId(272)
@Test
public void testCaseN() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(272)
@Test
public void testCaseO() {
}
}

View file

@ -5,12 +5,12 @@ import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
import io.qase.api.annotation.QaseId;
import org.testng.annotations.Test;
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
public class BrokersTest extends BaseManualTest {
public class WizardTest extends BaseManualTest {
@Automation(state = TO_BE_AUTOMATED)
@QaseId(330)
@Automation(state = NOT_AUTOMATED)
@QaseId(333)
@Test
public void testCaseA() {
}

View file

@ -9,6 +9,13 @@ import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrati
@Listeners(QaseCreateListener.class)
public abstract class BaseQaseTest {
protected static final long BROKERS_SUITE_ID = 1;
protected static final long CONNECTORS_SUITE_ID = 10;
protected static final long KSQL_DB_SUITE_ID = 8;
protected static final long SANITY_SUITE_ID = 19;
protected static final long SCHEMAS_SUITE_ID = 11;
protected static final long TOPICS_SUITE_ID = 2;
@BeforeSuite
public void beforeSuite() {
qaseIntegrationSetup();

View file

@ -13,7 +13,7 @@ public class Template extends BaseQaseTest {
/**
* this class is a kind of placeholder or example, use is as template to create new one
* copy class into kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/suite
* copy Template into kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/
* place it into regarding folder and rename according to test case summary from Qase.io
* uncomment @Test and set all annotations according to kafka-ui-e2e-checks/QASE.md
*/

View file

@ -3,20 +3,46 @@ package com.provectus.kafka.ui.smokeSuite;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.WebDriverRunner;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Schema;
import com.provectus.kafka.ui.models.Topic;
import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.*;
import static com.provectus.kafka.ui.settings.BaseSource.BROWSER;
import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString;
import static com.provectus.kafka.ui.variables.Browser.LOCAL;
import static com.provectus.kafka.ui.variables.Url.*;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
public class SmokeTest extends BaseTest {
private static final int BROKER_ID = 1;
private static final Schema TEST_SCHEMA = Schema.createSchemaAvro();
private static final Topic TEST_TOPIC = new Topic()
.setName("new-topic-" + randomAlphabetic(5))
.setNumberOfPartitions(1);
private static final Connector TEST_CONNECTOR = new Connector()
.setName("new-connector-" + randomAlphabetic(5))
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector_via_api.json"));
@BeforeClass(alwaysRun = true)
public void beforeClass() {
apiService
.createTopic(TEST_TOPIC)
.createSchema(TEST_SCHEMA)
.createConnector(TEST_CONNECTOR);
}
@QaseId(198)
@Test
public void checkBasePageElements() {
@ -45,10 +71,37 @@ public class SmokeTest extends BaseTest {
verifyCurrentUrl(KSQL_DB_LIST_URL);
}
@QaseId(46)
@Test
public void checkComponentsPathWhileNavigating() {
navigateToBrokersAndOpenDetails(BROKER_ID);
verifyComponentsPath(BROKERS, String.format("Broker %d", BROKER_ID));
navigateToTopicsAndOpenDetails(TEST_TOPIC.getName());
verifyComponentsPath(TOPICS, TEST_TOPIC.getName());
navigateToSchemaRegistryAndOpenDetails(TEST_SCHEMA.getName());
verifyComponentsPath(SCHEMA_REGISTRY, TEST_SCHEMA.getName());
navigateToConnectorsAndOpenDetails(TEST_CONNECTOR.getName());
verifyComponentsPath(KAFKA_CONNECT, TEST_CONNECTOR.getName());
}
@Step
private void verifyCurrentUrl(String expectedUrl) {
String host = BROWSER.equals(LOCAL) ? "localhost" : "host.testcontainers.internal";
Assert.assertEquals(WebDriverRunner.getWebDriver().getCurrentUrl(),
String.format(expectedUrl, host), "getCurrentUrl()");
}
@Step
private void verifyComponentsPath(MenuItem menuItem, String expectedPath) {
Assert.assertEquals(naviSideBar.getPagePath(menuItem), expectedPath,
String.format("getPagePath() for %s", menuItem.getPageTitle().toUpperCase()));
}
@AfterClass(alwaysRun = true)
public void afterClass() {
apiService
.deleteTopic(TEST_TOPIC.getName())
.deleteSchema(TEST_SCHEMA.getName())
.deleteConnector(TEST_CONNECTOR.getName());
}
}

View file

@ -18,44 +18,42 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
public class ConnectorsTest extends BaseTest {
private static final String CONNECT_NAME = "first";
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
private static final List<Connector> CONNECTOR_LIST = new ArrayList<>();
private static final String MESSAGE_CONTENT = "testData/topics/message_content_create_topic.json";
private static final String MESSAGE_KEY = " ";
private static final Topic TOPIC_FOR_CREATE = new Topic()
.setName("topic_for_create_connector-" + randomAlphabetic(5))
.setName("topic-for-create-connector-" + randomAlphabetic(5))
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
private static final Topic TOPIC_FOR_DELETE = new Topic()
.setName("topic_for_delete_connector-" + randomAlphabetic(5))
.setName("topic-for-delete-connector-" + randomAlphabetic(5))
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
private static final Topic TOPIC_FOR_UPDATE = new Topic()
.setName("topic_for_update_connector-" + randomAlphabetic(5))
.setName("topic-for-update-connector-" + randomAlphabetic(5))
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
private static final Connector CONNECTOR_FOR_DELETE = new Connector()
.setName("sink_postgres_activities_e2e_checks_for_delete-" + randomAlphabetic(5))
.setName("connector-for-delete-" + randomAlphabetic(5))
.setConfig(getResourceAsString("testData/connectors/delete_connector_config.json"));
private static final Connector CONNECTOR_FOR_UPDATE = new Connector()
.setName("sink_postgres_activities_e2e_checks_for_update-" + randomAlphabetic(5))
.setName("connector-for-update-and-delete-" + randomAlphabetic(5))
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector_via_api.json"));
@BeforeClass(alwaysRun = true)
public void beforeClass() {
TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE));
TOPIC_LIST.forEach(topic -> apiService
.createTopic(topic.getName())
.createTopic(topic)
.sendMessage(topic)
);
CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE));
CONNECTOR_LIST.forEach(connector -> apiService
.createConnector(CONNECT_NAME, connector));
CONNECTOR_LIST.forEach(connector -> apiService.createConnector(connector));
}
@QaseId(42)
@Test
public void createConnector() {
Connector connectorForCreate = new Connector()
.setName("sink_postgres_activities_e2e_checks-" + randomAlphabetic(5))
.setName("connector-for-create-" + randomAlphabetic(5))
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector.json"));
navigateToConnectors();
kafkaConnectList
@ -102,7 +100,7 @@ public class ConnectorsTest extends BaseTest {
@AfterClass(alwaysRun = true)
public void afterClass() {
CONNECTOR_LIST.forEach(connector ->
apiService.deleteConnector(CONNECT_NAME, connector.getName()));
apiService.deleteConnector(connector.getName()));
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
}
}

View file

@ -53,7 +53,7 @@ public class MessagesTest extends BaseTest {
public void beforeClass() {
TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECK_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
TOPIC_TO_RECREATE, TOPIC_FOR_CHECK_MESSAGES_COUNT));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic));
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
waitUntilNewMinuteStarted();
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
@ -75,8 +75,6 @@ public class MessagesTest extends BaseTest {
softly.assertAll();
}
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2778")
@QaseId(19)
@Test(priority = 2)
public void clearMessage() {
@ -85,12 +83,13 @@ public class MessagesTest extends BaseTest {
.openDetailsTab(OVERVIEW);
int messageAmount = topicDetails.getMessageCountAmount();
produceMessage(TOPIC_FOR_MESSAGES);
Assert.assertEquals(messageAmount + 1, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
Assert.assertEquals(topicDetails.getMessageCountAmount(), messageAmount + 1, "getMessageCountAmount()");
topicDetails
.openDotMenu()
.clickClearMessagesMenu()
.clickConfirmBtnMdl()
.waitUntilScreenReady();
Assert.assertEquals(0, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
Assert.assertEquals(topicDetails.getMessageCountAmount(), 0, "getMessageCountAmount()");
}
@QaseId(239)

View file

@ -59,7 +59,7 @@ public class TopicsTest extends BaseTest {
@BeforeClass(alwaysRun = true)
public void beforeClass() {
TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic));
}
@QaseId(199)

View file

@ -30,6 +30,9 @@ const queryClient = new QueryClient({
defaultOptions: {
queries: {
suspense: true,
onError(error) {
showServerError(error as Response);
},
},
mutations: {
onError(error) {

View file

@ -1,17 +1,18 @@
import React from 'react';
import { Route, Routes } from 'react-router-dom';
import Details from 'components/ConsumerGroups/Details/Details';
import ListContainer from 'components/ConsumerGroups/List/ListContainer';
import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets';
import {
clusterConsumerGroupResetOffsetsRelativePath,
RouteParams,
} from 'lib/paths';
import List from './List';
const ConsumerGroups: React.FC = () => {
return (
<Routes>
<Route index element={<ListContainer />} />
<Route index element={<List />} />
<Route path={RouteParams.consumerGroupID} element={<Details />} />
<Route
path={clusterConsumerGroupResetOffsetsRelativePath}

View file

@ -7,26 +7,22 @@ import {
ClusterGroupParam,
} from 'lib/paths';
import Search from 'components/common/Search/Search';
import PageLoader from 'components/common/PageLoader/PageLoader';
import ClusterContext from 'components/contexts/ClusterContext';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as Metrics from 'components/common/Metrics';
import { Tag } from 'components/common/Tag/Tag.styled';
import groupBy from 'lodash/groupBy';
import { Table } from 'components/common/table/Table/Table.styled';
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
import {
deleteConsumerGroup,
selectById,
fetchConsumerGroupDetails,
getAreConsumerGroupDetailsFulfilled,
} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
import getTagColor from 'components/common/Tag/getTagColor';
import { Dropdown } from 'components/common/Dropdown';
import { ControlPanelWrapper } from 'components/common/ControlPanel/ControlPanel.styled';
import { Action, ResourceType } from 'generated-sources';
import { ActionDropdownItem } from 'components/common/ActionComponent';
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
import {
useConsumerGroupDetails,
useDeleteConsumerGroupMutation,
} from 'lib/hooks/api/consumers';
import ListItem from './ListItem';
@ -35,38 +31,25 @@ const Details: React.FC = () => {
const [searchParams] = useSearchParams();
const searchValue = searchParams.get('q') || '';
const { isReadOnly } = React.useContext(ClusterContext);
const { consumerGroupID, clusterName } = useAppParams<ClusterGroupParam>();
const dispatch = useAppDispatch();
const consumerGroup = useAppSelector((state) =>
selectById(state, consumerGroupID)
);
const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled);
const routeParams = useAppParams<ClusterGroupParam>();
const { clusterName, consumerGroupID } = routeParams;
React.useEffect(() => {
dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID }));
}, [clusterName, consumerGroupID, dispatch]);
const consumerGroup = useConsumerGroupDetails(routeParams);
const deleteConsumerGroup = useDeleteConsumerGroupMutation(routeParams);
const onDelete = async () => {
const res = await dispatch(
deleteConsumerGroup({ clusterName, consumerGroupID })
).unwrap();
if (res) navigate('../');
await deleteConsumerGroup.mutateAsync();
navigate('../');
};
const onResetOffsets = () => {
navigate(clusterConsumerGroupResetRelativePath);
};
if (!isFetched || !consumerGroup) {
return <PageLoader />;
}
const partitionsByTopic = groupBy(consumerGroup.partitions, 'topic');
const partitionsByTopic = groupBy(consumerGroup.data?.partitions, 'topic');
const filteredPartitionsByTopic = Object.keys(partitionsByTopic).filter(
(el) => el.includes(searchValue)
);
const currentPartitionsByTopic = searchValue.length
? filteredPartitionsByTopic
: Object.keys(partitionsByTopic);
@ -110,24 +93,24 @@ const Details: React.FC = () => {
<Metrics.Wrapper>
<Metrics.Section>
<Metrics.Indicator label="State">
<Tag color={getTagColor(consumerGroup.state)}>
{consumerGroup.state}
<Tag color={getTagColor(consumerGroup.data?.state)}>
{consumerGroup.data?.state}
</Tag>
</Metrics.Indicator>
<Metrics.Indicator label="Members">
{consumerGroup.members}
{consumerGroup.data?.members}
</Metrics.Indicator>
<Metrics.Indicator label="Assigned Topics">
{consumerGroup.topics}
{consumerGroup.data?.topics}
</Metrics.Indicator>
<Metrics.Indicator label="Assigned Partitions">
{consumerGroup.partitions?.length}
{consumerGroup.data?.partitions?.length}
</Metrics.Indicator>
<Metrics.Indicator label="Coordinator ID">
{consumerGroup.coordinator?.id}
{consumerGroup.data?.coordinator?.id}
</Metrics.Indicator>
<Metrics.Indicator label="Total lag">
{consumerGroup.messagesBehind}
{consumerGroup.data?.messagesBehind}
</Metrics.Indicator>
</Metrics.Section>
</Metrics.Wrapper>

View file

@ -0,0 +1,197 @@
import React from 'react';
import { useNavigate } from 'react-router-dom';
import {
ConsumerGroupDetails,
ConsumerGroupOffsetsReset,
ConsumerGroupOffsetsResetType,
} from 'generated-sources';
import { ClusterGroupParam } from 'lib/paths';
import {
Controller,
FormProvider,
useFieldArray,
useForm,
} from 'react-hook-form';
import { MultiSelect, Option } from 'react-multi-select-component';
import 'react-datepicker/dist/react-datepicker.css';
import { ErrorMessage } from '@hookform/error-message';
import { InputLabel } from 'components/common/Input/InputLabel.styled';
import { Button } from 'components/common/Button/Button';
import Input from 'components/common/Input/Input';
import { FormError } from 'components/common/Input/Input.styled';
import useAppParams from 'lib/hooks/useAppParams';
import { useResetConsumerGroupOffsetsMutation } from 'lib/hooks/api/consumers';
import { FlexFieldset, StyledForm } from 'components/common/Form/Form.styled';
import ControlledSelect from 'components/common/Select/ControlledSelect';
import * as S from './ResetOffsets.styled';
interface FormProps {
defaultValues: ConsumerGroupOffsetsReset;
topics: string[];
partitions: ConsumerGroupDetails['partitions'];
}
const resetTypeOptions = Object.values(ConsumerGroupOffsetsResetType).map(
(value) => ({ value, label: value })
);
const Form: React.FC<FormProps> = ({ defaultValues, partitions, topics }) => {
const navigate = useNavigate();
const routerParams = useAppParams<ClusterGroupParam>();
const reset = useResetConsumerGroupOffsetsMutation(routerParams);
const topicOptions = React.useMemo(
() => topics.map((value) => ({ value, label: value })),
[topics]
);
const methods = useForm<ConsumerGroupOffsetsReset>({
mode: 'onChange',
defaultValues,
});
const {
handleSubmit,
setValue,
watch,
control,
formState: { errors },
} = methods;
const { fields } = useFieldArray({
control,
name: 'partitionsOffsets',
});
const resetTypeValue = watch('resetType');
const topicValue = watch('topic');
const offsetsValue = watch('partitionsOffsets');
const partitionsValue = watch('partitions') || [];
const partitionOptions =
partitions
?.filter((p) => p.topic === topicValue)
.map((p) => ({
label: `Partition #${p.partition.toString()}`,
value: p.partition,
})) || [];
const onSelectedPartitionsChange = (selected: Option[]) => {
setValue(
'partitions',
selected.map(({ value }) => value)
);
setValue(
'partitionsOffsets',
selected.map(({ value }) => {
const currentOffset = offsetsValue?.find(
({ partition }) => partition === value
);
return { offset: currentOffset?.offset, partition: value };
})
);
};
React.useEffect(() => {
onSelectedPartitionsChange([]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [topicValue]);
const onSubmit = async (data: ConsumerGroupOffsetsReset) => {
await reset.mutateAsync(data);
navigate('../');
};
return (
<FormProvider {...methods}>
<StyledForm onSubmit={handleSubmit(onSubmit)}>
<FlexFieldset>
<ControlledSelect
name="topic"
label="Topic"
placeholder="Select Topic"
options={topicOptions}
/>
<ControlledSelect
name="resetType"
label="Reset Type"
placeholder="Select Reset Type"
options={resetTypeOptions}
/>
<div>
<InputLabel>Partitions</InputLabel>
<MultiSelect
options={partitionOptions}
value={partitionsValue.map((p) => ({
value: p,
label: String(p),
}))}
onChange={onSelectedPartitionsChange}
labelledBy="Select partitions"
/>
</div>
{resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP &&
partitionsValue.length > 0 && (
<div>
<InputLabel>Timestamp</InputLabel>
<Controller
control={control}
name="resetToTimestamp"
rules={{
required: 'Timestamp is required',
}}
render={({ field: { onChange, onBlur, value, ref } }) => (
<S.DatePickerInput
ref={ref}
selected={new Date(value as number)}
onChange={(e: Date | null) => onChange(e?.getTime())}
onBlur={onBlur}
/>
)}
/>
<ErrorMessage
errors={errors}
name="resetToTimestamp"
render={({ message }) => <FormError>{message}</FormError>}
/>
</div>
)}
{resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET &&
partitionsValue.length > 0 && (
<S.OffsetsWrapper>
{fields.map((field, index) => (
<Input
key={field.id}
label={`Partition #${field.partition} Offset`}
type="number"
name={`partitionsOffsets.${index}.offset` as const}
hookFormOptions={{
shouldUnregister: true,
required: 'Offset is required',
min: {
value: 0,
message: 'must be greater than or equal to 0',
},
}}
withError
/>
))}
</S.OffsetsWrapper>
)}
</FlexFieldset>
<div>
<Button
buttonSize="M"
buttonType="primary"
type="submit"
disabled={partitionsValue.length === 0}
>
Submit
</Button>
</div>
</StyledForm>
</FormProvider>
);
};
export default Form;

View file

@ -1,37 +1,5 @@
import styled from 'styled-components';
export const Wrapper = styled.div`
padding: 16px;
padding-top: 0;
& > form {
display: flex;
flex-direction: column;
gap: 16px;
& > button:last-child {
align-self: flex-start;
}
}
& .multi-select {
height: 32px;
& > .dropdown-container {
height: 32px;
& > .dropdown-heading {
height: 32px;
}
}
}
`;
export const MainSelectors = styled.div`
display: flex;
gap: 16px;
& > * {
flex-grow: 1;
}
`;
import DatePicker from 'react-datepicker';
export const OffsetsWrapper = styled.div`
display: flex;
@ -40,7 +8,26 @@ export const OffsetsWrapper = styled.div`
gap: 16px;
`;
export const OffsetsTitle = styled.h1`
font-size: 18px;
font-weight: 500;
export const DatePickerInput = styled(DatePicker).attrs({
showTimeInput: true,
timeInputLabel: 'Time:',
dateFormat: 'MMMM d, yyyy h:mm aa',
})`
height: 40px;
border: 1px ${({ theme }) => theme.select.borderColor.normal} solid;
border-radius: 4px;
font-size: 14px;
width: 270px;
padding-left: 12px;
background-color: ${({ theme }) => theme.input.backgroundColor.normal};
color: ${({ theme }) => theme.input.color.normal};
&::placeholder {
color: ${({ theme }) => theme.input.color.normal};
}
&:hover {
cursor: pointer;
}
&:focus {
outline: none;
}
`;

View file

@ -1,315 +1,52 @@
import React from 'react';
import { useNavigate } from 'react-router-dom';
import { ConsumerGroupOffsetsResetType } from 'generated-sources';
import { clusterConsumerGroupsPath, ClusterGroupParam } from 'lib/paths';
import {
Controller,
FormProvider,
useFieldArray,
useForm,
} from 'react-hook-form';
import { MultiSelect, Option } from 'react-multi-select-component';
import DatePicker from 'react-datepicker';
import 'react-datepicker/dist/react-datepicker.css';
import groupBy from 'lodash/groupBy';
import PageLoader from 'components/common/PageLoader/PageLoader';
import { ErrorMessage } from '@hookform/error-message';
import Select from 'components/common/Select/Select';
import { InputLabel } from 'components/common/Input/InputLabel.styled';
import { Button } from 'components/common/Button/Button';
import Input from 'components/common/Input/Input';
import { FormError } from 'components/common/Input/Input.styled';
import PageHeading from 'components/common/PageHeading/PageHeading';
import {
fetchConsumerGroupDetails,
selectById,
getAreConsumerGroupDetailsFulfilled,
getIsOffsetReseted,
resetConsumerGroupOffsets,
} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
import useAppParams from 'lib/hooks/useAppParams';
import { resetLoaderById } from 'redux/reducers/loader/loaderSlice';
import { useConsumerGroupDetails } from 'lib/hooks/api/consumers';
import PageLoader from 'components/common/PageLoader/PageLoader';
import {
ConsumerGroupOffsetsReset,
ConsumerGroupOffsetsResetType,
} from 'generated-sources';
import * as S from './ResetOffsets.styled';
interface FormType {
topic: string;
resetType: ConsumerGroupOffsetsResetType;
partitionsOffsets: { offset: string | undefined; partition: number }[];
resetToTimestamp: Date;
}
import Form from './Form';
const ResetOffsets: React.FC = () => {
const dispatch = useAppDispatch();
const { consumerGroupID, clusterName } = useAppParams<ClusterGroupParam>();
const consumerGroup = useAppSelector((state) =>
selectById(state, consumerGroupID)
);
const routerParams = useAppParams<ClusterGroupParam>();
const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled);
const isOffsetReseted = useAppSelector(getIsOffsetReseted);
const consumerGroup = useConsumerGroupDetails(routerParams);
React.useEffect(() => {
dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID }));
}, [clusterName, consumerGroupID, dispatch]);
const [uniqueTopics, setUniqueTopics] = React.useState<string[]>([]);
const [selectedPartitions, setSelectedPartitions] = React.useState<Option[]>(
[]
);
const methods = useForm<FormType>({
mode: 'onChange',
defaultValues: {
resetType: ConsumerGroupOffsetsResetType.EARLIEST,
topic: '',
partitionsOffsets: [],
},
});
const {
handleSubmit,
setValue,
watch,
control,
setError,
clearErrors,
formState: { errors, isValid },
} = methods;
const { fields } = useFieldArray({
control,
name: 'partitionsOffsets',
});
const resetTypeValue = watch('resetType');
const topicValue = watch('topic');
const offsetsValue = watch('partitionsOffsets');
React.useEffect(() => {
if (isFetched && consumerGroup?.partitions) {
setValue('topic', consumerGroup.partitions[0].topic);
setUniqueTopics(Object.keys(groupBy(consumerGroup.partitions, 'topic')));
}
}, [consumerGroup?.partitions, isFetched, setValue]);
const onSelectedPartitionsChange = (value: Option[]) => {
clearErrors();
setValue(
'partitionsOffsets',
value.map((partition) => {
const currentOffset = offsetsValue.find(
(offset) => offset.partition === partition.value
);
return {
offset: currentOffset ? currentOffset?.offset : undefined,
partition: partition.value,
};
})
);
setSelectedPartitions(value);
};
React.useEffect(() => {
onSelectedPartitionsChange([]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [topicValue]);
const onSubmit = (data: FormType) => {
const augmentedData = {
...data,
partitions: selectedPartitions.map((partition) => partition.value),
partitionsOffsets: data.partitionsOffsets as {
offset: string;
partition: number;
}[],
};
let isValidAugmentedData = true;
if (augmentedData.resetType === ConsumerGroupOffsetsResetType.OFFSET) {
augmentedData.partitionsOffsets.forEach((offset, index) => {
if (!offset.offset) {
setError(`partitionsOffsets.${index}.offset`, {
type: 'manual',
message: "This field shouldn't be empty!",
});
isValidAugmentedData = false;
}
});
} else if (
augmentedData.resetType === ConsumerGroupOffsetsResetType.TIMESTAMP
) {
if (!augmentedData.resetToTimestamp) {
setError(`resetToTimestamp`, {
type: 'manual',
message: "This field shouldn't be empty!",
});
isValidAugmentedData = false;
}
}
if (isValidAugmentedData) {
dispatch(
resetConsumerGroupOffsets({
clusterName,
consumerGroupID,
requestBody: augmentedData,
})
);
}
};
const navigate = useNavigate();
React.useEffect(() => {
if (isOffsetReseted) {
dispatch(resetLoaderById('consumerGroups/resetConsumerGroupOffsets'));
navigate('../');
}
}, [clusterName, consumerGroupID, dispatch, navigate, isOffsetReseted]);
if (!isFetched || !consumerGroup) {
if (consumerGroup.isLoading || !consumerGroup.isSuccess)
return <PageLoader />;
}
const partitions = consumerGroup.data.partitions || [];
const { topic } = partitions[0];
const uniqTopics = Array.from(
new Set(partitions.map((partition) => partition.topic))
);
const defaultValues: ConsumerGroupOffsetsReset = {
resetType: ConsumerGroupOffsetsResetType.EARLIEST,
topic,
partitionsOffsets: [],
resetToTimestamp: new Date().getTime(),
};
return (
<FormProvider {...methods}>
<>
<PageHeading
text="Reset offsets"
backTo={clusterConsumerGroupsPath(clusterName)}
backTo={clusterConsumerGroupsPath(routerParams.clusterName)}
backText="Consumers"
/>
<S.Wrapper>
<form onSubmit={handleSubmit(onSubmit)}>
<S.MainSelectors>
<div>
<InputLabel id="topicLabel">Topic</InputLabel>
<Controller
control={control}
name="topic"
render={({ field: { name, onChange, value } }) => (
<Select
id="topic"
selectSize="M"
aria-labelledby="topicLabel"
minWidth="100%"
name={name}
onChange={onChange}
defaultValue={value}
value={value}
options={uniqueTopics.map((topic) => ({
value: topic,
label: topic,
}))}
/>
)}
/>
</div>
<div>
<InputLabel id="resetTypeLabel">Reset Type</InputLabel>
<Controller
control={control}
name="resetType"
render={({ field: { name, onChange, value } }) => (
<Select
id="resetType"
selectSize="M"
aria-labelledby="resetTypeLabel"
minWidth="100%"
name={name}
onChange={onChange}
value={value}
options={Object.values(ConsumerGroupOffsetsResetType).map(
(type) => ({ value: type, label: type })
)}
/>
)}
/>
</div>
<div>
<InputLabel>Partitions</InputLabel>
<MultiSelect
options={
consumerGroup.partitions
?.filter((p) => p.topic === topicValue)
.map((p) => ({
label: `Partition #${p.partition.toString()}`,
value: p.partition,
})) || []
}
value={selectedPartitions}
onChange={onSelectedPartitionsChange}
labelledBy="Select partitions"
/>
</div>
</S.MainSelectors>
{resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP &&
selectedPartitions.length > 0 && (
<div>
<InputLabel>Timestamp</InputLabel>
<Controller
control={control}
name="resetToTimestamp"
render={({ field: { onChange, onBlur, value, ref } }) => (
<DatePicker
ref={ref}
selected={value}
onChange={onChange}
onBlur={onBlur}
showTimeInput
timeInputLabel="Time:"
dateFormat="MMMM d, yyyy h:mm aa"
/>
)}
/>
<ErrorMessage
errors={errors}
name="resetToTimestamp"
render={({ message }) => <FormError>{message}</FormError>}
/>
</div>
)}
{resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET &&
selectedPartitions.length > 0 && (
<div>
<S.OffsetsTitle>Offsets</S.OffsetsTitle>
<S.OffsetsWrapper>
{fields.map((field, index) => (
<div key={field.id}>
<InputLabel htmlFor={`partitionsOffsets.${index}.offset`}>
Partition #{field.partition}
</InputLabel>
<Input
id={`partitionsOffsets.${index}.offset`}
type="number"
name={`partitionsOffsets.${index}.offset` as const}
hookFormOptions={{
shouldUnregister: true,
min: {
value: 0,
message: 'must be greater than or equal to 0',
},
}}
defaultValue={field.offset}
/>
<ErrorMessage
errors={errors}
name={`partitionsOffsets.${index}.offset`}
render={({ message }) => (
<FormError>{message}</FormError>
)}
/>
</div>
))}
</S.OffsetsWrapper>
</div>
)}
<Button
buttonSize="M"
buttonType="primary"
type="submit"
disabled={!isValid || selectedPartitions.length === 0}
>
Submit
</Button>
</form>
</S.Wrapper>
</FormProvider>
<Form
defaultValues={defaultValues}
topics={uniqTopics}
partitions={partitions}
/>
</>
);
};

View file

@ -1,158 +0,0 @@
import React from 'react';
import fetchMock from 'fetch-mock';
import { act, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { render, WithRoute } from 'lib/testHelpers';
import { clusterConsumerGroupResetOffsetsPath } from 'lib/paths';
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets';
const clusterName = 'cluster1';
const { groupId } = consumerGroupPayload;
const renderComponent = () =>
render(
<WithRoute path={clusterConsumerGroupResetOffsetsPath()}>
<ResetOffsets />
</WithRoute>,
{
initialEntries: [
clusterConsumerGroupResetOffsetsPath(
clusterName,
consumerGroupPayload.groupId
),
],
}
);
const resetConsumerGroupOffsetsMockCalled = () =>
expect(
fetchMock.called(
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`
)
).toBeTruthy();
const selectresetTypeAndPartitions = async (resetType: string) => {
await userEvent.click(screen.getByLabelText('Reset Type'));
await userEvent.click(screen.getByText(resetType));
await userEvent.click(screen.getByText('Select...'));
await userEvent.click(screen.getByText('Partition #0'));
};
const resetConsumerGroupOffsetsWith = async (
resetType: string,
offset: null | number = null
) => {
await userEvent.click(screen.getByLabelText('Reset Type'));
const options = screen.getAllByText(resetType);
await userEvent.click(options.length > 1 ? options[1] : options[0]);
await userEvent.click(screen.getByText('Select...'));
await userEvent.click(screen.getByText('Partition #0'));
fetchMock.postOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
200,
{
body: {
topic: '__amazon_msk_canary',
resetType,
partitions: [0],
partitionsOffsets: [{ partition: 0, offset }],
},
}
);
await userEvent.click(screen.getByText('Submit'));
await waitFor(() => resetConsumerGroupOffsetsMockCalled());
};
describe('ResetOffsets', () => {
afterEach(() => {
fetchMock.reset();
});
xit('renders progress bar for initial state', async () => {
fetchMock.getOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
404
);
await act(() => {
renderComponent();
});
expect(screen.getByRole('progressbar')).toBeInTheDocument();
});
describe('with consumer group', () => {
describe('submit handles resetConsumerGroupOffsets', () => {
beforeEach(async () => {
const fetchConsumerGroupMock = fetchMock.getOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
consumerGroupPayload
);
await act(() => {
renderComponent();
});
expect(fetchConsumerGroupMock.called()).toBeTruthy();
});
it('calls resetConsumerGroupOffsets with EARLIEST', async () => {
await resetConsumerGroupOffsetsWith('EARLIEST');
});
it('calls resetConsumerGroupOffsets with LATEST', async () => {
await resetConsumerGroupOffsetsWith('LATEST');
});
it('calls resetConsumerGroupOffsets with OFFSET', async () => {
await selectresetTypeAndPartitions('OFFSET');
fetchMock.postOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
200,
{
body: {
topic: '__amazon_msk_canary',
resetType: 'OFFSET',
partitions: [0],
partitionsOffsets: [{ partition: 0, offset: 10 }],
},
}
);
await userEvent.click(screen.getAllByLabelText('Partition #0')[1]);
await userEvent.keyboard('10');
await userEvent.click(screen.getByText('Submit'));
await resetConsumerGroupOffsetsMockCalled();
});
// focus doesn't work for datepicker
it.skip('calls resetConsumerGroupOffsets with TIMESTAMP', async () => {
await selectresetTypeAndPartitions('TIMESTAMP');
const resetConsumerGroupOffsetsMock = fetchMock.postOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
200,
{
body: {
topic: '__amazon_msk_canary',
resetType: 'OFFSET',
partitions: [0],
partitionsOffsets: [{ partition: 0, offset: 10 }],
},
}
);
await userEvent.click(screen.getByText('Submit'));
await waitFor(() =>
expect(
screen.getByText("This field shouldn't be empty!")
).toBeInTheDocument()
);
await waitFor(() =>
expect(
resetConsumerGroupOffsetsMock.called(
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`
)
).toBeFalsy()
);
});
});
});
});

View file

@ -2,9 +2,9 @@ import React from 'react';
import { clusterConsumerGroupDetailsPath } from 'lib/paths';
import { screen } from '@testing-library/react';
import TopicContents from 'components/ConsumerGroups/Details/TopicContents/TopicContents';
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
import { render, WithRoute } from 'lib/testHelpers';
import { ConsumerGroupTopicPartition } from 'generated-sources';
import { consumerGroupPayload } from 'lib/fixtures/consumerGroups';
const clusterName = 'cluster1';

View file

@ -1,114 +0,0 @@
import Details from 'components/ConsumerGroups/Details/Details';
import React from 'react';
import fetchMock from 'fetch-mock';
import { render, WithRoute } from 'lib/testHelpers';
import {
clusterConsumerGroupDetailsPath,
clusterConsumerGroupResetRelativePath,
} from 'lib/paths';
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
import {
screen,
waitFor,
waitForElementToBeRemoved,
} from '@testing-library/dom';
import userEvent from '@testing-library/user-event';
const clusterName = 'cluster1';
const { groupId } = consumerGroupPayload;
const mockNavigate = jest.fn();
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useNavigate: () => mockNavigate,
}));
const renderComponent = () => {
render(
<WithRoute path={clusterConsumerGroupDetailsPath()}>
<Details />
</WithRoute>,
{ initialEntries: [clusterConsumerGroupDetailsPath(clusterName, groupId)] }
);
};
describe('Details component', () => {
afterEach(() => {
fetchMock.reset();
mockNavigate.mockClear();
});
describe('when consumer groups are NOT fetched', () => {
it('renders progress bar for initial state', () => {
fetchMock.getOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
404
);
renderComponent();
expect(screen.getByRole('progressbar')).toBeInTheDocument();
});
});
describe('when consumer gruops are fetched', () => {
beforeEach(async () => {
const fetchConsumerGroupMock = fetchMock.getOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
consumerGroupPayload
);
renderComponent();
await waitForElementToBeRemoved(() => screen.getByRole('progressbar'));
await waitFor(() => expect(fetchConsumerGroupMock.called()).toBeTruthy());
});
it('renders component', () => {
expect(screen.getByRole('heading')).toBeInTheDocument();
expect(screen.getByText(groupId)).toBeInTheDocument();
expect(screen.getByRole('table')).toBeInTheDocument();
expect(screen.getAllByRole('columnheader').length).toEqual(2);
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
});
it('handles [Reset offset] click', async () => {
await userEvent.click(screen.getByText('Reset offset'));
expect(mockNavigate).toHaveBeenLastCalledWith(
clusterConsumerGroupResetRelativePath
);
});
it('renders search input', async () => {
expect(
screen.getByPlaceholderText('Search by Topic Name')
).toBeInTheDocument();
});
it('shows confirmation modal on consumer group delete', async () => {
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
await userEvent.click(screen.getByText('Delete consumer group'));
await waitFor(() =>
expect(screen.queryByRole('dialog')).toBeInTheDocument()
);
await userEvent.click(screen.getByText('Cancel'));
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
});
it('handles [Delete consumer group] click', async () => {
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
await userEvent.click(screen.getByText('Delete consumer group'));
expect(screen.queryByRole('dialog')).toBeInTheDocument();
const deleteConsumerGroupMock = fetchMock.deleteOnce(
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
200
);
await waitFor(() => {
userEvent.click(screen.getByRole('button', { name: 'Confirm' }));
});
expect(deleteConsumerGroupMock.called()).toBeTruthy();
await waitForElementToBeRemoved(() => screen.queryByRole('dialog'));
await waitFor(() => expect(mockNavigate).toHaveBeenLastCalledWith('../'));
});
});
});

View file

@ -1,48 +0,0 @@
import React from 'react';
import { clusterConsumerGroupDetailsPath } from 'lib/paths';
import { screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import ListItem from 'components/ConsumerGroups/Details/ListItem';
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
import { render, WithRoute } from 'lib/testHelpers';
import { ConsumerGroupTopicPartition } from 'generated-sources';
const clusterName = 'cluster1';
const renderComponent = (consumers: ConsumerGroupTopicPartition[] = []) =>
render(
<WithRoute path={clusterConsumerGroupDetailsPath()}>
<table>
<tbody>
<ListItem
clusterName={clusterName}
name={clusterName}
consumers={consumers}
/>
</tbody>
</table>
</WithRoute>,
{
initialEntries: [
clusterConsumerGroupDetailsPath(
clusterName,
consumerGroupPayload.groupId
),
],
}
);
describe('ListItem', () => {
beforeEach(() => renderComponent(consumerGroupPayload.partitions));
it('should renders list item with topic content closed and check if element exists', () => {
expect(screen.getByRole('row')).toBeInTheDocument();
});
it('should renders list item with topic content open', async () => {
await userEvent.click(
screen.getByRole('cell', { name: 'cluster1' }).children[0].children[0]
);
expect(screen.getByText('Consumer ID')).toBeInTheDocument();
});
});

View file

@ -7,41 +7,29 @@ import {
ConsumerGroupOrdering,
SortOrder,
} from 'generated-sources';
import { useAppDispatch } from 'lib/hooks/redux';
import useAppParams from 'lib/hooks/useAppParams';
import { clusterConsumerGroupDetailsPath, ClusterNameRoute } from 'lib/paths';
import { fetchConsumerGroupsPaged } from 'redux/reducers/consumerGroups/consumerGroupsSlice';
import { ColumnDef } from '@tanstack/react-table';
import Table, { TagCell, LinkCell } from 'components/common/NewTable';
import { useNavigate, useSearchParams } from 'react-router-dom';
import { PER_PAGE } from 'lib/constants';
import { useConsumerGroups } from 'lib/hooks/api/consumers';
export interface Props {
consumerGroups: ConsumerGroupDetails[];
totalPages: number;
}
const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
const dispatch = useAppDispatch();
const List = () => {
const { clusterName } = useAppParams<ClusterNameRoute>();
const [searchParams] = useSearchParams();
const navigate = useNavigate();
React.useEffect(() => {
dispatch(
fetchConsumerGroupsPaged({
clusterName,
orderBy:
(searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined,
sortOrder:
(searchParams.get('sortDirection')?.toUpperCase() as SortOrder) ||
undefined,
page: Number(searchParams.get('page') || 1),
perPage: Number(searchParams.get('perPage') || PER_PAGE),
search: searchParams.get('q') || '',
})
);
}, [clusterName, dispatch, searchParams]);
const consumerGroups = useConsumerGroups({
clusterName,
orderBy: (searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined,
sortOrder:
(searchParams.get('sortDirection')?.toUpperCase() as SortOrder) ||
undefined,
page: Number(searchParams.get('page') || 1),
perPage: Number(searchParams.get('perPage') || PER_PAGE),
search: searchParams.get('q') || '',
});
const columns = React.useMemo<ColumnDef<ConsumerGroupDetails>[]>(
() => [
@ -95,9 +83,13 @@ const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
</ControlPanelWrapper>
<Table
columns={columns}
pageCount={totalPages}
data={consumerGroups}
emptyMessage="No active consumer groups found"
pageCount={consumerGroups.data?.pageCount || 0}
data={consumerGroups.data?.consumerGroups || []}
emptyMessage={
consumerGroups.isSuccess
? 'No active consumer groups found'
: 'Loading...'
}
serverSideProcessing
enableSorting
onRowClick={({ original }) =>
@ -105,6 +97,7 @@ const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
clusterConsumerGroupDetailsPath(clusterName, original.groupId)
)
}
disabled={consumerGroups.isFetching}
/>
</>
);

View file

@ -1,16 +0,0 @@
import { connect } from 'react-redux';
import { RootState } from 'redux/interfaces';
import {
getConsumerGroupsOrderBy,
getConsumerGroupsTotalPages,
selectAll,
} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
import List from 'components/ConsumerGroups/List/List';
const mapStateToProps = (state: RootState) => ({
consumerGroups: selectAll(state),
orderBy: getConsumerGroupsOrderBy(state),
totalPages: getConsumerGroupsTotalPages(state),
});
export default connect(mapStateToProps)(List);

View file

@ -1,60 +0,0 @@
import React from 'react';
import List, { Props } from 'components/ConsumerGroups/List/List';
import { screen } from '@testing-library/react';
import { render } from 'lib/testHelpers';
import { consumerGroups as consumerGroupMock } from 'redux/reducers/consumerGroups/__test__/fixtures';
import { clusterConsumerGroupDetailsPath } from 'lib/paths';
import userEvent from '@testing-library/user-event';
import ListContainer from 'components/ConsumerGroups/List/ListContainer';
const mockedUsedNavigate = jest.fn();
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useNavigate: () => mockedUsedNavigate,
}));
describe('ListContainer', () => {
it('renders correctly', () => {
render(<ListContainer />);
expect(screen.getByRole('table')).toBeInTheDocument();
});
});
describe('List', () => {
const renderComponent = (props: Partial<Props> = {}) => {
const { consumerGroups, totalPages } = props;
return render(
<List
consumerGroups={consumerGroups || []}
totalPages={totalPages || 1}
/>
);
};
it('renders empty table', () => {
renderComponent();
expect(screen.getByRole('table')).toBeInTheDocument();
expect(
screen.getByText('No active consumer groups found')
).toBeInTheDocument();
});
describe('consumerGroups are fetched', () => {
beforeEach(() => renderComponent({ consumerGroups: consumerGroupMock }));
it('renders all rows with consumers', () => {
expect(screen.getByText('groupId1')).toBeInTheDocument();
expect(screen.getByText('groupId2')).toBeInTheDocument();
});
it('handles onRowClick', async () => {
const row = screen.getByRole('row', { name: 'groupId1 0 1 1' });
expect(row).toBeInTheDocument();
await userEvent.click(row);
expect(mockedUsedNavigate).toHaveBeenCalledWith(
clusterConsumerGroupDetailsPath(':clusterName', 'groupId1')
);
});
});
});

View file

@ -11,9 +11,7 @@ import { render, WithRoute } from 'lib/testHelpers';
const clusterName = 'cluster1';
jest.mock('components/ConsumerGroups/List/ListContainer', () => () => (
<div>ListContainerMock</div>
));
jest.mock('components/ConsumerGroups/List', () => () => <div>ListPage</div>);
jest.mock('components/ConsumerGroups/Details/Details', () => () => (
<div>DetailsMock</div>
));
@ -35,7 +33,7 @@ const renderComponent = (path?: string) =>
describe('ConsumerGroups', () => {
it('renders ListContainer', async () => {
renderComponent();
expect(screen.getByText('ListContainerMock')).toBeInTheDocument();
expect(screen.getByText('ListPage')).toBeInTheDocument();
});
it('renders ResetOffsets', async () => {
renderComponent(

View file

@ -1,15 +1,109 @@
import React from 'react';
import { Route, Routes } from 'react-router-dom';
import { clusterKsqlDbQueryRelativePath } from 'lib/paths';
import List from 'components/KsqlDb/List/List';
import Query from 'components/KsqlDb/Query/Query';
import useAppParams from 'lib/hooks/useAppParams';
import * as Metrics from 'components/common/Metrics';
import {
clusterKsqlDbQueryRelativePath,
clusterKsqlDbStreamsPath,
clusterKsqlDbStreamsRelativePath,
clusterKsqlDbTablesPath,
clusterKsqlDbTablesRelativePath,
ClusterNameRoute,
} from 'lib/paths';
import PageHeading from 'components/common/PageHeading/PageHeading';
import { ActionButton } from 'components/common/ActionComponent';
import Navbar from 'components/common/Navigation/Navbar.styled';
import { Navigate, NavLink, Route, Routes } from 'react-router-dom';
import { Action, ResourceType } from 'generated-sources';
import { useKsqlkDb } from 'lib/hooks/api/ksqlDb';
import 'ace-builds/src-noconflict/ace';
import TableView from './TableView';
const KsqlDb: React.FC = () => {
const { clusterName } = useAppParams<ClusterNameRoute>();
const [tables, streams] = useKsqlkDb(clusterName);
const isFetching = tables.isFetching || streams.isFetching;
return (
<Routes>
<Route path="/*" element={<List />} />
<Route path={clusterKsqlDbQueryRelativePath} element={<Query />} />
</Routes>
<>
<PageHeading text="KSQL DB">
<ActionButton
to={clusterKsqlDbQueryRelativePath}
buttonType="primary"
buttonSize="M"
permission={{
resource: ResourceType.KSQL,
action: Action.EXECUTE,
}}
>
Execute KSQL Request
</ActionButton>
</PageHeading>
<Metrics.Wrapper>
<Metrics.Section>
<Metrics.Indicator
label="Tables"
title="Tables"
fetching={isFetching}
>
{tables.isSuccess ? tables.data.length : '-'}
</Metrics.Indicator>
<Metrics.Indicator
label="Streams"
title="Streams"
fetching={isFetching}
>
{streams.isSuccess ? streams.data.length : '-'}
</Metrics.Indicator>
</Metrics.Section>
</Metrics.Wrapper>
<div>
<Navbar role="navigation">
<NavLink
to={clusterKsqlDbTablesPath(clusterName)}
className={({ isActive }) => (isActive ? 'is-active' : '')}
end
>
Tables
</NavLink>
<NavLink
to={clusterKsqlDbStreamsPath(clusterName)}
className={({ isActive }) => (isActive ? 'is-active' : '')}
end
>
Streams
</NavLink>
</Navbar>
<Routes>
<Route
index
element={<Navigate to={clusterKsqlDbTablesRelativePath} />}
/>
<Route
path={clusterKsqlDbTablesRelativePath}
element={
<TableView
fetching={tables.isFetching}
rows={tables.data || []}
/>
}
/>
<Route
path={clusterKsqlDbStreamsRelativePath}
element={
<TableView
fetching={streams.isFetching}
rows={streams.data || []}
/>
}
/>
<Route path={clusterKsqlDbQueryRelativePath} element={<Query />} />
</Routes>
</div>
</>
);
};

View file

@ -1,58 +0,0 @@
import React from 'react';
import PageLoader from 'components/common/PageLoader/PageLoader';
import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources';
import { ksqlRowData } from 'components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData';
import Table from 'components/common/NewTable';
import { ColumnDef } from '@tanstack/react-table';
export enum KsqlDbItemType {
Tables = 'tables',
Streams = 'streams',
}
interface RowsType {
tables: KsqlTableDescription[];
streams: KsqlStreamDescription[];
}
export interface KsqlDbItemProps {
type: KsqlDbItemType;
fetching: boolean;
rows: RowsType;
}
export interface KsqlTableState {
name: string;
topic: string;
keyFormat: string;
valueFormat: string;
isWindowed: string;
}
const KsqlDbItem: React.FC<KsqlDbItemProps> = ({ type, fetching, rows }) => {
const preparedRows = rows[type]?.map(ksqlRowData) || [];
const columns = React.useMemo<ColumnDef<KsqlTableState>[]>(
() => [
{ header: 'Name', accessorKey: 'name' },
{ header: 'Topic', accessorKey: 'topic' },
{ header: 'Key Format', accessorKey: 'keyFormat' },
{ header: 'Value Format', accessorKey: 'valueFormat' },
{ header: 'Is Windowed', accessorKey: 'isWindowed' },
],
[]
);
if (fetching) {
return <PageLoader />;
}
return (
<Table
data={preparedRows}
columns={columns}
emptyMessage="No tables or streams found"
enableSorting={false}
/>
);
};
export default KsqlDbItem;

View file

@ -1,59 +0,0 @@
import React from 'react';
import { render, WithRoute } from 'lib/testHelpers';
import { clusterKsqlDbTablesPath } from 'lib/paths';
import KsqlDbItem, {
KsqlDbItemProps,
KsqlDbItemType,
} from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem';
import { screen } from '@testing-library/dom';
import { fetchKsqlDbTablesPayload } from 'redux/reducers/ksqlDb/__test__/fixtures';
describe('KsqlDbItem', () => {
const tablesPathname = clusterKsqlDbTablesPath();
const renderComponent = (props: Partial<KsqlDbItemProps> = {}) => {
render(
<WithRoute path={tablesPathname}>
<KsqlDbItem
type={KsqlDbItemType.Tables}
fetching={false}
rows={{ tables: [], streams: [] }}
{...props}
/>
</WithRoute>,
{
initialEntries: [clusterKsqlDbTablesPath()],
}
);
};
it('renders progressbar when fetching tables and streams', () => {
renderComponent({ fetching: true });
expect(screen.getByRole('progressbar')).toBeInTheDocument();
});
it('show no text if no data found', () => {
renderComponent({});
expect(screen.getByText('No tables or streams found')).toBeInTheDocument();
});
it('renders with tables', () => {
renderComponent({
rows: {
tables: fetchKsqlDbTablesPayload.tables,
streams: [],
},
});
expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10);
});
it('renders with streams', () => {
renderComponent({
type: KsqlDbItemType.Streams,
rows: {
tables: [],
streams: fetchKsqlDbTablesPayload.streams,
},
});
expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10);
});
});

View file

@ -1,12 +0,0 @@
import { KsqlDescription } from 'redux/interfaces/ksqlDb';
import { KsqlTableState } from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem';
export const ksqlRowData = (data: KsqlDescription): KsqlTableState => {
return {
name: data.name || '',
topic: data.topic || '',
keyFormat: data.keyFormat || '',
valueFormat: data.valueFormat || '',
isWindowed: 'isWindowed' in data ? String(data.isWindowed) : '-',
};
};

View file

@ -1,111 +0,0 @@
import React, { FC } from 'react';
import useAppParams from 'lib/hooks/useAppParams';
import * as Metrics from 'components/common/Metrics';
import { getKsqlDbTables } from 'redux/reducers/ksqlDb/selectors';
import {
clusterKsqlDbQueryRelativePath,
clusterKsqlDbStreamsPath,
clusterKsqlDbStreamsRelativePath,
clusterKsqlDbTablesPath,
clusterKsqlDbTablesRelativePath,
ClusterNameRoute,
} from 'lib/paths';
import PageHeading from 'components/common/PageHeading/PageHeading';
import { ActionButton } from 'components/common/ActionComponent';
import Navbar from 'components/common/Navigation/Navbar.styled';
import { Navigate, NavLink, Route, Routes } from 'react-router-dom';
import { fetchKsqlDbTables } from 'redux/reducers/ksqlDb/ksqlDbSlice';
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
import { Action, ResourceType } from 'generated-sources';
import KsqlDbItem, { KsqlDbItemType } from './KsqlDbItem/KsqlDbItem';
const List: FC = () => {
const { clusterName } = useAppParams<ClusterNameRoute>();
const dispatch = useAppDispatch();
const { rows, fetching, tablesCount, streamsCount } =
useAppSelector(getKsqlDbTables);
React.useEffect(() => {
dispatch(fetchKsqlDbTables(clusterName));
}, [clusterName, dispatch]);
return (
<>
<PageHeading text="KSQL DB">
<ActionButton
to={clusterKsqlDbQueryRelativePath}
buttonType="primary"
buttonSize="M"
permission={{
resource: ResourceType.KSQL,
action: Action.EXECUTE,
}}
>
Execute KSQL Request
</ActionButton>
</PageHeading>
<Metrics.Wrapper>
<Metrics.Section>
<Metrics.Indicator label="Tables" title="Tables" fetching={fetching}>
{tablesCount}
</Metrics.Indicator>
<Metrics.Indicator
label="Streams"
title="Streams"
fetching={fetching}
>
{streamsCount}
</Metrics.Indicator>
</Metrics.Section>
</Metrics.Wrapper>
<div>
<Navbar role="navigation">
<NavLink
to={clusterKsqlDbTablesPath(clusterName)}
className={({ isActive }) => (isActive ? 'is-active' : '')}
end
>
Tables
</NavLink>
<NavLink
to={clusterKsqlDbStreamsPath(clusterName)}
className={({ isActive }) => (isActive ? 'is-active' : '')}
end
>
Streams
</NavLink>
</Navbar>
<Routes>
<Route
index
element={<Navigate to={clusterKsqlDbTablesRelativePath} />}
/>
<Route
path={clusterKsqlDbTablesRelativePath}
element={
<KsqlDbItem
type={KsqlDbItemType.Tables}
fetching={fetching}
rows={rows}
/>
}
/>
<Route
path={clusterKsqlDbStreamsRelativePath}
element={
<KsqlDbItem
type={KsqlDbItemType.Streams}
fetching={fetching}
rows={rows}
/>
}
/>
</Routes>
</div>
</>
);
};
export default List;

View file

@ -1,22 +0,0 @@
import React from 'react';
import List from 'components/KsqlDb/List/List';
import { render } from 'lib/testHelpers';
import fetchMock from 'fetch-mock';
import { screen } from '@testing-library/dom';
import { act } from '@testing-library/react';
describe('KsqlDb List', () => {
const renderComponent = async () => {
await act(() => {
render(<List />);
});
};
afterEach(() => fetchMock.reset());
it('renders List component with Tables and Streams tabs', async () => {
await renderComponent();
const Tables = screen.getByTitle('Tables');
const Streams = screen.getByTitle('Streams');
expect(Tables).toBeInTheDocument();
expect(Streams).toBeInTheDocument();
});
});

View file

@ -1,9 +0,0 @@
import PageLoader from 'components/common/PageLoader/PageLoader';
import styled from 'styled-components';
export const ContinuousLoader = styled(PageLoader)`
& > div {
transform: scale(0.5);
padding-top: 0;
}
`;

View file

@ -1,223 +1,54 @@
import React, { useCallback, useEffect, FC, useState } from 'react';
import React from 'react';
import useAppParams from 'lib/hooks/useAppParams';
import TableRenderer from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer';
import { ClusterNameRoute } from 'lib/paths';
import {
executeKsql,
resetExecutionResult,
} from 'redux/reducers/ksqlDb/ksqlDbSlice';
import { getKsqlExecution } from 'redux/reducers/ksqlDb/selectors';
import { BASE_PARAMS } from 'lib/constants';
import { KsqlResponse, KsqlTableResponse } from 'generated-sources';
import { clusterKsqlDbPath, ClusterNameRoute } from 'lib/paths';
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
import { showAlert, showSuccessAlert } from 'lib/errorHandling';
import PageHeading from 'components/common/PageHeading/PageHeading';
useExecuteKsqlkDbQueryMutation,
useKsqlkDbSSE,
} from 'lib/hooks/api/ksqlDb';
import type { FormValues } from './QueryForm/QueryForm';
import * as S from './Query.styled';
import QueryForm from './QueryForm/QueryForm';
export const getFormattedErrorFromTableData = (
responseValues: KsqlTableResponse['values']
): { title: string; message: string } => {
// We expect someting like that
// [[
// "@type",
// "error_code",
// "message",
// "statementText"?,
// "entities"?
// ]],
// or
// [["message"]]
if (!responseValues || !responseValues.length) {
return {
title: 'Unknown error',
message: 'Recieved empty response',
};
}
let title = '';
let message = '';
if (responseValues[0].length < 2) {
const [messageText] = responseValues[0];
title = messageText;
} else {
const [type, errorCode, messageText, statementText, entities] =
responseValues[0];
title = `[Error #${errorCode}] ${type}`;
message =
(entities?.length ? `[${entities.join(', ')}] ` : '') +
(statementText ? `"${statementText}" ` : '') +
messageText;
}
return {
title,
message,
};
};
const Query: FC = () => {
const Query = () => {
const { clusterName } = useAppParams<ClusterNameRoute>();
const executeQuery = useExecuteKsqlkDbQueryMutation();
const [pipeId, setPipeId] = React.useState<string | false>(false);
const sseRef = React.useRef<{ sse: EventSource | null; isOpen: boolean }>({
sse: null,
isOpen: false,
});
const [fetching, setFetching] = useState(false);
const dispatch = useAppDispatch();
const sse = useKsqlkDbSSE({ clusterName, pipeId });
const { executionResult } = useAppSelector(getKsqlExecution);
const [KSQLTable, setKSQLTable] = useState<KsqlTableResponse | null>(null);
const isFetching = executeQuery.isLoading || sse.isFetching;
const reset = useCallback(() => {
dispatch(resetExecutionResult());
}, [dispatch]);
useEffect(() => {
return reset;
}, [reset]);
const destroySSE = () => {
if (sseRef.current?.sse) {
sseRef.current.sse.close();
setFetching(false);
sseRef.current.sse = null;
sseRef.current.isOpen = false;
}
const submitHandler = async (values: FormValues) => {
const filtered = values.streamsProperties.filter(({ key }) => key != null);
const streamsProperties = filtered.reduce<Record<string, string>>(
(acc, current) => ({ ...acc, [current.key]: current.value }),
{}
);
await executeQuery.mutateAsync(
{
clusterName,
ksqlCommandV2: {
...values,
streamsProperties:
values.streamsProperties[0].key !== ''
? JSON.parse(JSON.stringify(streamsProperties))
: undefined,
},
},
{ onSuccess: (data) => setPipeId(data.pipeId) }
);
};
const handleSSECancel = useCallback(() => {
reset();
destroySSE();
}, [reset]);
const createSSE = useCallback(
(pipeId: string) => {
const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response?pipeId=${pipeId}`;
const sse = new EventSource(url);
sseRef.current.sse = sse;
setFetching(true);
sse.onopen = () => {
sseRef.current.isOpen = true;
};
sse.onmessage = ({ data }) => {
const { table }: KsqlResponse = JSON.parse(data);
if (table) {
switch (table?.header) {
case 'Execution error': {
const { title, message } = getFormattedErrorFromTableData(
table.values
);
const id = `${url}-executionError`;
showAlert('error', { id, title, message });
break;
}
case 'Schema': {
setKSQLTable(table);
break;
}
case 'Row': {
setKSQLTable((PrevKSQLTable) => {
return {
header: PrevKSQLTable?.header,
columnNames: PrevKSQLTable?.columnNames,
values: [
...(PrevKSQLTable?.values || []),
...(table?.values || []),
],
};
});
break;
}
case 'Query Result': {
const id = `${url}-querySuccess`;
showSuccessAlert({ id, title: 'Query succeed', message: '' });
break;
}
case 'Source Description':
case 'properties':
default: {
setKSQLTable(table);
break;
}
}
}
return sse;
};
sse.onerror = () => {
// if it's open - we know that server responded without opening SSE
if (!sseRef.current.isOpen) {
showAlert('error', {
id: `${url}-connectionClosedError`,
title: '',
message: 'SSE connection closed',
});
}
destroySSE();
};
},
[clusterName, dispatch]
);
const submitHandler = useCallback(
(values: FormValues) => {
const filteredProperties = values.streamsProperties.filter(
(property) => property.key != null
);
const streamsProperties = filteredProperties.reduce(
(acc, current) => ({
...acc,
[current.key as keyof string]: current.value,
}),
{} as { [key: string]: string }
);
setFetching(true);
dispatch(
executeKsql({
clusterName,
ksqlCommandV2: {
...values,
streamsProperties:
values.streamsProperties[0].key !== ''
? JSON.parse(JSON.stringify(streamsProperties))
: undefined,
},
})
);
},
[dispatch, clusterName]
);
useEffect(() => {
if (executionResult?.pipeId) {
createSSE(executionResult.pipeId);
}
return () => {
destroySSE();
};
}, [createSSE, executionResult]);
return (
<>
<PageHeading
text="Query"
backText="KSQL DB"
backTo={clusterKsqlDbPath(clusterName)}
/>
<QueryForm
fetching={fetching}
hasResults={!!KSQLTable}
handleClearResults={() => setKSQLTable(null)}
handleSSECancel={handleSSECancel}
fetching={isFetching}
hasResults={!!sse.data && !!pipeId}
resetResults={() => setPipeId(false)}
submitHandler={submitHandler}
/>
{KSQLTable && <TableRenderer table={KSQLTable} />}
{fetching && <S.ContinuousLoader />}
{pipeId && !!sse.data && <TableRenderer table={sse.data} />}
</>
);
};

View file

@ -6,13 +6,12 @@ export const QueryWrapper = styled.div`
`;
export const KSQLInputsWrapper = styled.div`
width: 100%;
display: flex;
gap: 24px;
padding-bottom: 16px;
& > div {
flex-grow: 1;
@media screen and (max-width: 769px) {
flex-direction: column;
}
`;
@ -22,61 +21,23 @@ export const KSQLInputHeader = styled.div`
color: ${({ theme }) => theme.default.color.normal};
`;
export const KSQLButtons = styled.div`
display: flex;
gap: 16px;
`;
export const StreamPropertiesContainer = styled.label`
display: flex;
flex-direction: column;
gap: 10px;
width: 50%;
color: ${({ theme }) => theme.default.color.normal};
`;
export const InputsContainer = styled.div`
overflow: hidden;
width: 100%;
display: flex;
justify-content: center;
gap: 10px;
`;
export const StreamPropertiesInputWrapper = styled.div`
& {
width: 100%;
}
& > input {
width: 100%;
height: 40px;
border: 1px solid grey;
&:focus {
outline: none;
border-color: ${({ theme }) => theme.input.borderColor.focus};
&::placeholder {
color: transparent;
}
}
border-radius: 4px;
font-size: 16px;
padding-left: 15px;
background-color: ${({ theme }) => theme.input.backgroundColor.normal};
color: ${({ theme }) => theme.input.color.normal};
}
`;
export const DeleteButtonWrapper = styled.div`
min-height: 32px;
display: flex;
flex-direction: column;
display: grid;
grid-template-columns: 1fr 1fr 30px;
align-items: center;
justify-self: flex-start;
margin-top: 10px;
gap: 10px;
`;
export const Fieldset = styled.fieldset`
width: 50%;
display: flex;
flex: 1;
flex-direction: column;
gap: 8px;
`;
export const ButtonsContainer = styled.div`
display: flex;
gap: 8px;
`;
export const SQLEditor = styled(BaseSQLEditor)(

View file

@ -1,22 +1,27 @@
import React, { useCallback, useRef } from 'react';
import React from 'react';
import { FormError } from 'components/common/Input/Input.styled';
import { ErrorMessage } from '@hookform/error-message';
import { useForm, Controller, useFieldArray } from 'react-hook-form';
import {
useForm,
Controller,
useFieldArray,
FormProvider,
} from 'react-hook-form';
import { Button } from 'components/common/Button/Button';
import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
import CloseIcon from 'components/common/Icons/CloseIcon';
import { yupResolver } from '@hookform/resolvers/yup';
import yup from 'lib/yupExtended';
import PlusIcon from 'components/common/Icons/PlusIcon';
import ReactAce from 'react-ace/lib/ace';
import ReactAce from 'react-ace';
import Input from 'components/common/Input/Input';
import * as S from './QueryForm.styled';
export interface Props {
interface QueryFormProps {
fetching: boolean;
hasResults: boolean;
handleClearResults: () => void;
handleSSECancel: () => void;
resetResults: () => void;
submitHandler: (values: FormValues) => void;
}
type StreamsPropertiesType = {
@ -37,20 +42,13 @@ const validationSchema = yup.object({
streamsProperties: yup.array().of(streamsPropertiesSchema),
});
const QueryForm: React.FC<Props> = ({
const QueryForm: React.FC<QueryFormProps> = ({
fetching,
hasResults,
handleClearResults,
handleSSECancel,
submitHandler,
resetResults,
}) => {
const {
handleSubmit,
setValue,
getValues,
control,
formState: { errors },
} = useForm<FormValues>({
const methods = useForm<FormValues>({
mode: 'onTouched',
resolver: yupResolver(validationSchema),
defaultValues: {
@ -58,7 +56,16 @@ const QueryForm: React.FC<Props> = ({
streamsProperties: [{ key: '', value: '' }],
},
});
const { fields, append, remove } = useFieldArray<
const {
handleSubmit,
setValue,
control,
watch,
formState: { errors, isDirty },
} = methods;
const { fields, append, remove, update } = useFieldArray<
FormValues,
'streamsProperties'
>({
@ -66,17 +73,24 @@ const QueryForm: React.FC<Props> = ({
name: 'streamsProperties',
});
const handleAddNewProperty = useCallback(() => {
if (
getValues().streamsProperties.every((prop) => {
return prop.key;
})
) {
append({ key: '', value: '' });
}
}, []);
const watchStreamProps = watch('streamsProperties');
const inputRef = useRef<ReactAce>(null);
const appendProperty = () => {
append({ key: '', value: '' });
};
const removeProperty = (index: number) => () => {
if (fields.length === 1) {
update(index, { key: '', value: '' });
return;
}
remove(index);
};
const isAppendDisabled =
fetching || !!watchStreamProps.find((field) => !field.key);
const inputRef = React.useRef<ReactAce>(null);
const handleFocus = () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@ -87,145 +101,117 @@ const QueryForm: React.FC<Props> = ({
}
};
const handleClear = () => {
handleFocus();
resetResults();
};
return (
<S.QueryWrapper>
<form onSubmit={handleSubmit(submitHandler)}>
<S.KSQLInputsWrapper>
<S.Fieldset aria-labelledby="ksqlLabel">
<S.KSQLInputHeader>
<label id="ksqlLabel">KSQL</label>
<Button
onClick={() => setValue('ksql', '')}
buttonType="primary"
buttonSize="S"
isInverted
>
Clear
</Button>
</S.KSQLInputHeader>
<Controller
control={control}
name="ksql"
render={({ field }) => (
<S.SQLEditor
{...field}
commands={[
{
// commands is array of key bindings.
// name for the key binding.
name: 'commandName',
// key combination used for the command.
bindKey: { win: 'Ctrl-Enter', mac: 'Command-Enter' },
// function to execute when keys are pressed.
exec: () => {
handleSubmit(submitHandler)();
<FormProvider {...methods}>
<S.QueryWrapper>
<form onSubmit={handleSubmit(submitHandler)}>
<S.KSQLInputsWrapper>
<S.Fieldset>
<S.KSQLInputHeader>
<label id="ksqlLabel">KSQL</label>
<Button
onClick={() => setValue('ksql', '')}
buttonType="primary"
buttonSize="S"
isInverted
>
Clear
</Button>
</S.KSQLInputHeader>
<Controller
control={control}
name="ksql"
render={({ field }) => (
<S.SQLEditor
{...field}
commands={[
{
// commands is array of key bindings.
// name for the key binding.
name: 'commandName',
// key combination used for the command.
bindKey: { win: 'Ctrl-Enter', mac: 'Command-Enter' },
// function to execute when keys are pressed.
exec: () => {
handleSubmit(submitHandler)();
},
},
},
]}
readOnly={fetching}
ref={inputRef}
/>
)}
/>
<FormError>
<ErrorMessage errors={errors} name="ksql" />
</FormError>
</S.Fieldset>
]}
readOnly={fetching}
ref={inputRef}
/>
)}
/>
<FormError>
<ErrorMessage errors={errors} name="ksql" />
</FormError>
</S.Fieldset>
<S.StreamPropertiesContainer>
Stream properties:
{fields.map((item, index) => (
<S.InputsContainer key={item.id}>
<S.StreamPropertiesInputWrapper>
<Controller
control={control}
<S.Fieldset>
Stream properties:
{fields.map((field, index) => (
<S.InputsContainer key={field.id}>
<Input
name={`streamsProperties.${index}.key`}
render={({ field }) => (
<input
{...field}
placeholder="Key"
aria-label="key"
type="text"
autoComplete="off"
/>
)}
placeholder="Key"
type="text"
autoComplete="off"
withError
/>
<FormError>
<ErrorMessage
errors={errors}
name={`streamsProperties.${index}.key`}
/>
</FormError>
</S.StreamPropertiesInputWrapper>
<S.StreamPropertiesInputWrapper>
<Controller
control={control}
<Input
name={`streamsProperties.${index}.value`}
render={({ field }) => (
<input
{...field}
placeholder="Value"
aria-label="value"
type="text"
autoComplete="off"
/>
)}
placeholder="Value"
type="text"
autoComplete="off"
withError
/>
<FormError>
<ErrorMessage
errors={errors}
name={`streamsProperties.${index}.value`}
/>
</FormError>
</S.StreamPropertiesInputWrapper>
<S.DeleteButtonWrapper onClick={() => remove(index)}>
<IconButtonWrapper aria-label="deleteProperty">
<IconButtonWrapper
aria-label="deleteProperty"
onClick={removeProperty(index)}
>
<CloseIcon aria-hidden />
</IconButtonWrapper>
</S.DeleteButtonWrapper>
</S.InputsContainer>
))}
</S.InputsContainer>
))}
<Button
type="button"
buttonSize="M"
buttonType="secondary"
disabled={isAppendDisabled}
onClick={appendProperty}
>
<PlusIcon />
Add Stream Property
</Button>
</S.Fieldset>
</S.KSQLInputsWrapper>
<S.ButtonsContainer>
<Button
type="button"
buttonSize="M"
buttonType="secondary"
onClick={handleAddNewProperty}
buttonSize="M"
disabled={fetching || !isDirty || !hasResults}
onClick={handleClear}
>
<PlusIcon />
Add Stream Property
Clear results
</Button>
</S.StreamPropertiesContainer>
</S.KSQLInputsWrapper>
<S.KSQLButtons>
<Button
buttonType="primary"
buttonSize="M"
type="submit"
disabled={fetching}
onClick={handleFocus}
>
Execute
</Button>
<Button
buttonType="secondary"
buttonSize="M"
disabled={!fetching}
onClick={handleSSECancel}
>
Stop query
</Button>
<Button
buttonType="secondary"
buttonSize="M"
disabled={fetching || !hasResults}
onClick={handleClearResults}
>
Clear results
</Button>
</S.KSQLButtons>
</form>
</S.QueryWrapper>
<Button
buttonType="primary"
buttonSize="M"
type="submit"
disabled={fetching}
onClick={handleFocus}
>
Execute
</Button>
</S.ButtonsContainer>
</form>
</S.QueryWrapper>
</FormProvider>
);
};

View file

@ -1,189 +0,0 @@
import { render } from 'lib/testHelpers';
import React from 'react';
import QueryForm, { Props } from 'components/KsqlDb/Query/QueryForm/QueryForm';
import { screen, waitFor, within } from '@testing-library/dom';
import userEvent from '@testing-library/user-event';
const renderComponent = (props: Props) => render(<QueryForm {...props} />);
describe('QueryForm', () => {
it('renders', () => {
renderComponent({
fetching: false,
hasResults: false,
handleClearResults: jest.fn(),
handleSSECancel: jest.fn(),
submitHandler: jest.fn(),
});
const KSQLBlock = screen.getByLabelText('KSQL');
expect(KSQLBlock).toBeInTheDocument();
expect(within(KSQLBlock).getByText('KSQL')).toBeInTheDocument();
expect(
within(KSQLBlock).getByRole('button', { name: 'Clear' })
).toBeInTheDocument();
// Represents SQL editor
expect(within(KSQLBlock).getByRole('textbox')).toBeInTheDocument();
const streamPropertiesBlock = screen.getByRole('textbox', { name: 'key' });
expect(streamPropertiesBlock).toBeInTheDocument();
expect(screen.getByText('Stream properties:')).toBeInTheDocument();
expect(screen.getByRole('button', { name: 'Clear' })).toBeInTheDocument();
expect(screen.queryAllByRole('textbox')[0]).toBeInTheDocument();
// Form controls
expect(screen.getByRole('button', { name: 'Execute' })).toBeInTheDocument();
expect(screen.getByRole('button', { name: 'Execute' })).toBeEnabled();
expect(
screen.getByRole('button', { name: 'Stop query' })
).toBeInTheDocument();
expect(screen.getByRole('button', { name: 'Stop query' })).toBeDisabled();
expect(
screen.getByRole('button', { name: 'Clear results' })
).toBeInTheDocument();
expect(
screen.getByRole('button', { name: 'Clear results' })
).toBeDisabled();
});
it('renders error with empty input', async () => {
const submitFn = jest.fn();
renderComponent({
fetching: false,
hasResults: false,
handleClearResults: jest.fn(),
handleSSECancel: jest.fn(),
submitHandler: submitFn,
});
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
await waitFor(() => {
expect(screen.getByText('ksql is a required field')).toBeInTheDocument();
expect(submitFn).not.toBeCalled();
});
});
it('submits with correct inputs', async () => {
const submitFn = jest.fn();
renderComponent({
fetching: false,
hasResults: false,
handleClearResults: jest.fn(),
handleSSECancel: jest.fn(),
submitHandler: submitFn,
});
const textbox = screen.getAllByRole('textbox');
textbox[0].focus();
await userEvent.paste('show tables;');
const key = screen.getByRole('textbox', { name: 'key' });
key.focus();
await userEvent.paste('test');
const value = screen.getByRole('textbox', { name: 'value' });
value.focus();
await userEvent.paste('test');
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
expect(
screen.queryByText('ksql is a required field')
).not.toBeInTheDocument();
expect(
screen.queryByText('streamsProperties is not JSON object')
).not.toBeInTheDocument();
expect(submitFn).toBeCalled();
});
it('clear results is enabled when has results', async () => {
const clearFn = jest.fn();
renderComponent({
fetching: false,
hasResults: true,
handleClearResults: clearFn,
handleSSECancel: jest.fn(),
submitHandler: jest.fn(),
});
expect(screen.getByRole('button', { name: 'Clear results' })).toBeEnabled();
await userEvent.click(
screen.getByRole('button', { name: 'Clear results' })
);
expect(clearFn).toBeCalled();
});
it('stop query query is enabled when is fetching', async () => {
const cancelFn = jest.fn();
renderComponent({
fetching: true,
hasResults: false,
handleClearResults: jest.fn(),
handleSSECancel: cancelFn,
submitHandler: jest.fn(),
});
expect(screen.getByRole('button', { name: 'Stop query' })).toBeEnabled();
await userEvent.click(screen.getByRole('button', { name: 'Stop query' }));
expect(cancelFn).toBeCalled();
});
it('add new property', async () => {
renderComponent({
fetching: false,
hasResults: false,
handleClearResults: jest.fn(),
handleSSECancel: jest.fn(),
submitHandler: jest.fn(),
});
const textbox = screen.getByLabelText('key');
await userEvent.type(textbox, 'prop_name');
await userEvent.click(
screen.getByRole('button', { name: 'Add Stream Property' })
);
expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(2);
});
it("doesn't add new property", async () => {
renderComponent({
fetching: false,
hasResults: false,
handleClearResults: jest.fn(),
handleSSECancel: jest.fn(),
submitHandler: jest.fn(),
});
await userEvent.click(
screen.getByRole('button', { name: 'Add Stream Property' })
);
expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(1);
});
it('delete stream property', async () => {
await renderComponent({
fetching: false,
hasResults: false,
handleClearResults: jest.fn(),
handleSSECancel: jest.fn(),
submitHandler: jest.fn(),
});
const textBoxes = screen.getAllByRole('textbox', { name: 'key' });
textBoxes[0].focus();
await userEvent.paste('test');
await userEvent.click(
screen.getByRole('button', { name: 'Add Stream Property' })
);
await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]);
await screen.getByRole('button', { name: 'Add Stream Property' });
await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]);
expect(textBoxes.length).toEqual(1);
});
});

View file

@ -1,116 +0,0 @@
import { render, EventSourceMock, WithRoute } from 'lib/testHelpers';
import React from 'react';
import Query, {
getFormattedErrorFromTableData,
} from 'components/KsqlDb/Query/Query';
import { screen } from '@testing-library/dom';
import fetchMock from 'fetch-mock';
import { clusterKsqlDbQueryPath } from 'lib/paths';
import userEvent from '@testing-library/user-event';
const clusterName = 'testLocal';
const renderComponent = () =>
render(
<WithRoute path={clusterKsqlDbQueryPath()}>
<Query />
</WithRoute>,
{
initialEntries: [clusterKsqlDbQueryPath(clusterName)],
}
);
describe('Query', () => {
it('renders', () => {
renderComponent();
expect(screen.getByLabelText('KSQL')).toBeInTheDocument();
expect(screen.getByLabelText('Stream properties:')).toBeInTheDocument();
});
afterEach(() => fetchMock.reset());
it('fetch on execute', async () => {
renderComponent();
const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, {
pipeId: 'testPipeID',
});
Object.defineProperty(window, 'EventSource', {
value: EventSourceMock,
});
const inputs = screen.getAllByRole('textbox');
const textAreaElement = inputs[0] as HTMLTextAreaElement;
textAreaElement.focus();
await userEvent.paste('show tables;');
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
expect(mock.calls().length).toBe(1);
});
it('fetch on execute with streamParams', async () => {
renderComponent();
const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, {
pipeId: 'testPipeID',
});
Object.defineProperty(window, 'EventSource', {
value: EventSourceMock,
});
const inputs = screen.getAllByRole('textbox');
const textAreaElement = inputs[0] as HTMLTextAreaElement;
textAreaElement.focus();
await userEvent.paste('show tables;');
const key = screen.getByLabelText('key');
key.focus();
await userEvent.paste('key');
const value = screen.getByLabelText('value');
value.focus();
await userEvent.paste('value');
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
expect(mock.calls().length).toBe(1);
});
});
describe('getFormattedErrorFromTableData', () => {
it('works', () => {
expect(getFormattedErrorFromTableData([['Test Error']])).toStrictEqual({
title: 'Test Error',
message: '',
});
expect(
getFormattedErrorFromTableData([
['some_type', 'errorCode', 'messageText'],
])
).toStrictEqual({
title: '[Error #errorCode] some_type',
message: 'messageText',
});
expect(
getFormattedErrorFromTableData([
[
'some_type',
'errorCode',
'messageText',
'statementText',
['test1', 'test2'],
],
])
).toStrictEqual({
title: '[Error #errorCode] some_type',
message: '[test1, test2] "statementText" messageText',
});
expect(getFormattedErrorFromTableData([])).toStrictEqual({
title: 'Unknown error',
message: 'Recieved empty response',
});
});
});

View file

@ -6,13 +6,11 @@ import { TableTitle } from 'components/common/table/TableTitle/TableTitle.styled
import * as S from './TableRenderer.styled';
export interface Props {
interface TableRendererProps {
table: KsqlTableResponse;
}
export function hasJsonStructure(
str: string | Record<string, unknown>
): boolean {
function hasJsonStructure(str: string | Record<string, unknown>): boolean {
if (typeof str === 'object') {
return true;
}
@ -30,13 +28,7 @@ export function hasJsonStructure(
return false;
}
const TableRenderer: React.FC<Props> = ({ table }) => {
const heading = React.useMemo(() => {
return table.header || '';
}, [table.header]);
const ths = React.useMemo(() => {
return table.columnNames || [];
}, [table.columnNames]);
const TableRenderer: React.FC<TableRendererProps> = ({ table }) => {
const rows = React.useMemo(() => {
return (table.values || []).map((row) => {
return {
@ -53,9 +45,11 @@ const TableRenderer: React.FC<Props> = ({ table }) => {
});
}, [table.values]);
const ths = table.columnNames || [];
return (
<S.Wrapper>
<TableTitle>{heading}</TableTitle>
<TableTitle>{table.header}</TableTitle>
<S.ScrollableTable>
<thead>
<tr>

View file

@ -1,71 +0,0 @@
import { render } from 'lib/testHelpers';
import React from 'react';
import TableRenderer, {
Props,
hasJsonStructure,
} from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer';
import { screen } from '@testing-library/dom';
const renderComponent = (props: Props) => render(<TableRenderer {...props} />);
describe('TableRenderer', () => {
it('renders', () => {
renderComponent({
table: {
header: 'Test header',
columnNames: ['Test column name'],
values: [['Table row #1'], ['Table row #2'], ['{"jsonrow": "#3"}']],
},
});
expect(
screen.getByRole('heading', { name: 'Test header' })
).toBeInTheDocument();
expect(
screen.getByRole('columnheader', { name: 'Test column name' })
).toBeInTheDocument();
expect(
screen.getByRole('cell', { name: 'Table row #1' })
).toBeInTheDocument();
expect(
screen.getByRole('cell', { name: 'Table row #2' })
).toBeInTheDocument();
});
it('renders with empty arrays', () => {
renderComponent({
table: {},
});
expect(screen.getByText('No tables or streams found')).toBeInTheDocument();
});
});
describe('hasJsonStructure', () => {
it('works', () => {
expect(hasJsonStructure('simplestring')).toBeFalsy();
expect(
hasJsonStructure("{'looksLikeJson': 'but has wrong quotes'}")
).toBeFalsy();
expect(
hasJsonStructure('{"json": "but doesnt have closing brackets"')
).toBeFalsy();
expect(hasJsonStructure('"string":"that looks like json"')).toBeFalsy();
expect(hasJsonStructure('1')).toBeFalsy();
expect(hasJsonStructure('{1:}')).toBeFalsy();
expect(hasJsonStructure('{1:"1"}')).toBeFalsy();
// @ts-expect-error We suppress error because this function works with unknown data from server
expect(hasJsonStructure(1)).toBeFalsy();
expect(hasJsonStructure('{}')).toBeTruthy();
expect(hasJsonStructure('{"correct": "json"}')).toBeTruthy();
expect(hasJsonStructure('[]')).toBeTruthy();
expect(hasJsonStructure('[{}]')).toBeTruthy();
expect(hasJsonStructure({})).toBeTruthy();
expect(hasJsonStructure({ correct: 'json' })).toBeTruthy();
});
});

View file

@ -0,0 +1,39 @@
import React from 'react';
import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources';
import Table from 'components/common/NewTable';
import { ColumnDef } from '@tanstack/react-table';
interface TableViewProps {
fetching: boolean;
rows: KsqlTableDescription[] | KsqlStreamDescription[];
}
const TableView: React.FC<TableViewProps> = ({ fetching, rows }) => {
const columns = React.useMemo<
ColumnDef<KsqlTableDescription | KsqlStreamDescription>[]
>(
() => [
{ header: 'Name', accessorKey: 'name' },
{ header: 'Topic', accessorKey: 'topic' },
{ header: 'Key Format', accessorKey: 'keyFormat' },
{ header: 'Value Format', accessorKey: 'valueFormat' },
{
header: 'Is Windowed',
accessorKey: 'isWindowed',
cell: ({ row }) =>
'isWindowed' in row.original ? String(row.original.isWindowed) : '-',
},
],
[]
);
return (
<Table
data={rows || []}
columns={columns}
emptyMessage={fetching ? 'Loading...' : 'No rows found'}
enableSorting={false}
/>
);
};
export default TableView;

View file

@ -1,42 +0,0 @@
import React from 'react';
import KsqlDb from 'components/KsqlDb/KsqlDb';
import { render, WithRoute } from 'lib/testHelpers';
import { screen } from '@testing-library/dom';
import {
clusterKsqlDbPath,
clusterKsqlDbQueryPath,
getNonExactPath,
} from 'lib/paths';
const KSqLComponentText = {
list: 'list',
query: 'query',
};
jest.mock('components/KsqlDb/List/List', () => () => (
<div>{KSqLComponentText.list}</div>
));
jest.mock('components/KsqlDb/Query/Query', () => () => (
<div>{KSqLComponentText.query}</div>
));
describe('KsqlDb Component', () => {
const clusterName = 'clusterName';
const renderComponent = (path: string) =>
render(
<WithRoute path={getNonExactPath(clusterKsqlDbPath())}>
<KsqlDb />
</WithRoute>,
{ initialEntries: [path] }
);
it('Renders the List', () => {
renderComponent(clusterKsqlDbPath(clusterName));
expect(screen.getByText(KSqLComponentText.list)).toBeInTheDocument();
});
it('Renders the List', () => {
renderComponent(clusterKsqlDbQueryPath(clusterName));
expect(screen.getByText(KSqLComponentText.query)).toBeInTheDocument();
});
});

View file

@ -12,12 +12,6 @@ export const versionPayload = [
];
export const versionEmptyPayload = [];
export const versions = [
schemaVersion1,
schemaVersion2,
schemaVersionWithNonAsciiChars,
];
export const jsonSchema: SchemaSubject = {
subject: 'test',
version: '15',

View file

@ -14,7 +14,7 @@ export const invalidPermission = {
action: Action.DELETE,
};
export const roles = [
const roles = [
{
...validPermission,
actions: [validPermission.action],

View file

@ -1,4 +1,5 @@
import { diff as DiffEditor } from 'react-ace';
import 'ace-builds/src-noconflict/ace';
import 'ace-builds/src-noconflict/mode-json5';
import 'ace-builds/src-noconflict/mode-protobuf';
import 'ace-builds/src-noconflict/theme-textmate';

View file

@ -1,11 +1,9 @@
/* eslint-disable react/jsx-props-no-spreading */
import AceEditor, { IAceEditorProps } from 'react-ace';
import 'ace-builds/src-noconflict/mode-json5';
import 'ace-builds/src-noconflict/mode-protobuf';
import 'ace-builds/src-noconflict/theme-tomorrow';
import { SchemaType } from 'generated-sources';
import React from 'react';
import ReactAce from 'react-ace/lib/ace';
import styled from 'styled-components';
interface EditorProps extends IAceEditorProps {
@ -13,7 +11,7 @@ interface EditorProps extends IAceEditorProps {
schemaType?: string;
}
const Editor = React.forwardRef<ReactAce | null, EditorProps>((props, ref) => {
const Editor = React.forwardRef<AceEditor | null, EditorProps>((props, ref) => {
const { isFixedHeight, schemaType, ...rest } = props;
return (
<AceEditor

View file

@ -225,6 +225,13 @@ export const Ellipsis = styled.div`
display: block;
`;
export const TableWrapper = styled.div`
overflow-x: auto;
`;
export const TableWrapper = styled.div<{ $disabled: boolean }>(
({ $disabled }) => css`
overflow-x: auto;
${$disabled &&
css`
pointer-events: none;
opacity: 0.5;
`}
`
);

View file

@ -48,6 +48,8 @@ export interface TableProps<TData> {
// Placeholder for empty table
emptyMessage?: React.ReactNode;
disabled?: boolean;
// Handles row click. Can not be combined with `enableRowSelection` && expandable rows.
onRowClick?: (row: Row<TData>) => void;
}
@ -123,6 +125,7 @@ const Table: React.FC<TableProps<any>> = ({
enableRowSelection = false,
batchActionsBar: BatchActionsBar,
emptyMessage,
disabled,
onRowClick,
}) => {
const [searchParams, setSearchParams] = useSearchParams();
@ -200,7 +203,7 @@ const Table: React.FC<TableProps<any>> = ({
/>
</S.TableActionsBar>
)}
<S.TableWrapper>
<S.TableWrapper $disabled={!!disabled}>
<S.Table>
<thead>
{table.getHeaderGroups().map((headerGroup) => (

View file

@ -1,11 +0,0 @@
import { CellContext } from '@tanstack/react-table';
import React from 'react';
import * as S from './Table.styled';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const TruncatedTextCell: React.FC<CellContext<any, unknown>> = ({
getValue,
}) => <S.Ellipsis>{getValue<string>()}</S.Ellipsis>;
export default TruncatedTextCell;

View file

@ -1,15 +1,15 @@
/* eslint-disable react/jsx-props-no-spreading */
import AceEditor, { IAceEditorProps } from 'react-ace';
import 'ace-builds/src-noconflict/ace';
import 'ace-builds/src-noconflict/mode-sql';
import 'ace-builds/src-noconflict/theme-textmate';
import React from 'react';
import ReactAce from 'react-ace/lib/ace';
interface SQLEditorProps extends IAceEditorProps {
isFixedHeight?: boolean;
}
const SQLEditor = React.forwardRef<ReactAce | null, SQLEditorProps>(
const SQLEditor = React.forwardRef<AceEditor | null, SQLEditorProps>(
(props, ref) => {
const { isFixedHeight, ...rest } = props;
return (

View file

@ -8,13 +8,13 @@ import {
import * as S from './Tooltip.styled';
export interface PropsTypes {
interface TooltipProps {
value: React.ReactNode;
content: string;
placement?: Placement;
}
const Tooltip: React.FC<PropsTypes> = ({ value, content, placement }) => {
const Tooltip: React.FC<TooltipProps> = ({ value, content, placement }) => {
const [open, setOpen] = useState(false);
const { x, y, refs, strategy, context } = useFloating({
open,

View file

@ -3,5 +3,5 @@ import Heading from 'components/common/heading/Heading.styled';
import styled from 'styled-components';
export const TableTitle = styled((props) => <Heading level={3} {...props} />)`
padding: 16px;
padding: 16px 16px 0;
`;

View file

@ -1,30 +1,5 @@
import { ConsumerGroupState } from 'generated-sources';
export const consumerGroups = [
{
groupId: 'groupId1',
members: 0,
topics: 1,
simple: false,
partitionAssignor: '',
coordinator: {
id: 1,
host: 'host',
},
},
{
groupId: 'groupId2',
members: 0,
topics: 1,
simple: false,
partitionAssignor: '',
coordinator: {
id: 1,
host: 'host',
},
},
];
export const consumerGroupPayload = {
groupId: 'amazon.msk.canary.group.broker-1',
members: 0,

View file

@ -4,7 +4,7 @@ import { modifyRolesData } from 'lib/permissions';
export const clusterName1 = 'local';
export const clusterName2 = 'dev';
export const userPermissionsMock = [
const userPermissionsMock = [
{
clusters: [clusterName1],
resource: ResourceType.TOPIC,

View file

@ -0,0 +1,92 @@
import { consumerGroupsApiClient as api } from 'lib/api';
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
import { ClusterName } from 'redux/interfaces';
import {
ConsumerGroup,
ConsumerGroupOffsetsReset,
ConsumerGroupOrdering,
SortOrder,
} from 'generated-sources';
import { showSuccessAlert } from 'lib/errorHandling';
export type ConsumerGroupID = ConsumerGroup['groupId'];
type UseConsumerGroupsProps = {
clusterName: ClusterName;
orderBy?: ConsumerGroupOrdering;
sortOrder?: SortOrder;
page?: number;
perPage?: number;
search: string;
};
type UseConsumerGroupDetailsProps = {
clusterName: ClusterName;
consumerGroupID: ConsumerGroupID;
};
export function useConsumerGroups(props: UseConsumerGroupsProps) {
const { clusterName, ...rest } = props;
return useQuery(
['clusters', clusterName, 'consumerGroups', rest],
() => api.getConsumerGroupsPage(props),
{ suspense: false, keepPreviousData: true }
);
}
export function useConsumerGroupDetails(props: UseConsumerGroupDetailsProps) {
const { clusterName, consumerGroupID } = props;
return useQuery(
['clusters', clusterName, 'consumerGroups', consumerGroupID],
() => api.getConsumerGroup({ clusterName, id: consumerGroupID })
);
}
export const useDeleteConsumerGroupMutation = ({
clusterName,
consumerGroupID,
}: UseConsumerGroupDetailsProps) => {
const queryClient = useQueryClient();
return useMutation(
() => api.deleteConsumerGroup({ clusterName, id: consumerGroupID }),
{
onSuccess: () => {
showSuccessAlert({
message: `Consumer ${consumerGroupID} group deleted`,
});
queryClient.invalidateQueries([
'clusters',
clusterName,
'consumerGroups',
]);
},
}
);
};
export const useResetConsumerGroupOffsetsMutation = ({
clusterName,
consumerGroupID,
}: UseConsumerGroupDetailsProps) => {
const queryClient = useQueryClient();
return useMutation(
(props: ConsumerGroupOffsetsReset) =>
api.resetConsumerGroupOffsets({
clusterName,
id: consumerGroupID,
consumerGroupOffsetsReset: props,
}),
{
onSuccess: () => {
showSuccessAlert({
message: `Consumer ${consumerGroupID} group offsets reset`,
});
queryClient.invalidateQueries([
'clusters',
clusterName,
'consumerGroups',
]);
},
}
);
};

View file

@ -109,7 +109,7 @@ export function useUpdateConnectorConfig(props: UseConnectorProps) {
}
);
}
export function useCreateConnectorMutation(clusterName: ClusterName) {
function useCreateConnectorMutation(clusterName: ClusterName) {
const client = useQueryClient();
return useMutation(
(props: CreateConnectorProps) =>

View file

@ -0,0 +1,184 @@
import { ksqlDbApiClient as api } from 'lib/api';
import { useMutation, useQueries } from '@tanstack/react-query';
import { ClusterName } from 'redux/interfaces';
import { BASE_PARAMS } from 'lib/constants';
import React from 'react';
import { fetchEventSource } from '@microsoft/fetch-event-source';
import {
showAlert,
showServerError,
showSuccessAlert,
} from 'lib/errorHandling';
import {
ExecuteKsqlRequest,
KsqlResponse,
KsqlTableResponse,
} from 'generated-sources';
import { StopLoading } from 'components/Topics/Topic/Messages/Messages.styled';
import toast from 'react-hot-toast';
export function useKsqlkDb(clusterName: ClusterName) {
return useQueries({
queries: [
{
queryKey: ['clusters', clusterName, 'ksqlDb', 'tables'],
queryFn: () => api.listTables({ clusterName }),
suspense: false,
},
{
queryKey: ['clusters', clusterName, 'ksqlDb', 'streams'],
queryFn: () => api.listStreams({ clusterName }),
suspense: false,
},
],
});
}
export function useExecuteKsqlkDbQueryMutation() {
return useMutation((props: ExecuteKsqlRequest) => api.executeKsql(props));
}
const getFormattedErrorFromTableData = (
responseValues: KsqlTableResponse['values']
): { title: string; message: string } => {
// We expect someting like that
// [[
// "@type",
// "error_code",
// "message",
// "statementText"?,
// "entities"?
// ]],
// or
// [["message"]]
if (!responseValues || !responseValues.length) {
return {
title: 'Unknown error',
message: 'Recieved empty response',
};
}
let title = '';
let message = '';
if (responseValues[0].length < 2) {
const [messageText] = responseValues[0];
title = messageText;
} else {
const [type, errorCode, messageText, statementText, entities] =
responseValues[0];
title = `[Error #${errorCode}] ${type}`;
message =
(entities?.length ? `[${entities.join(', ')}] ` : '') +
(statementText ? `"${statementText}" ` : '') +
messageText;
}
return { title, message };
};
type UseKsqlkDbSSEProps = {
pipeId: string | false;
clusterName: ClusterName;
};
export const useKsqlkDbSSE = ({ clusterName, pipeId }: UseKsqlkDbSSEProps) => {
const [data, setData] = React.useState<KsqlTableResponse>();
const [isFetching, setIsFetching] = React.useState<boolean>(false);
const abortController = new AbortController();
React.useEffect(() => {
const fetchData = async () => {
const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response`;
await fetchEventSource(
`${url}?${new URLSearchParams({ pipeId: pipeId || '' }).toString()}`,
{
method: 'GET',
signal: abortController.signal,
openWhenHidden: true,
async onopen(response) {
const { ok, status } = response;
if (ok) setData(undefined); // Reset
if (status >= 400 && status < 500 && status !== 429) {
showServerError(response);
}
},
onmessage(event) {
const { table }: KsqlResponse = JSON.parse(event.data);
if (!table) {
return;
}
switch (table?.header) {
case 'Execution error': {
showAlert('error', {
...getFormattedErrorFromTableData(table.values),
id: `${url}-executionError`,
});
break;
}
case 'Schema':
setData(table);
break;
case 'Row':
setData((state) => ({
header: state?.header,
columnNames: state?.columnNames,
values: [...(state?.values || []), ...(table?.values || [])],
}));
break;
case 'Query Result':
showSuccessAlert({
id: `${url}-querySuccess`,
title: 'Query succeed',
message: '',
});
break;
case 'Source Description':
case 'properties':
default:
setData(table);
break;
}
},
onclose() {
setIsFetching(false);
},
onerror(err) {
setIsFetching(false);
showServerError(err);
},
}
);
};
const abortFetchData = () => {
setIsFetching(false);
if (pipeId) abortController.abort();
};
if (pipeId) {
toast.promise(
fetchData(),
{
loading: (
<>
<div>Consuming query execution result...</div>
&nbsp;
<StopLoading onClick={abortFetchData}>Abort</StopLoading>
</>
),
success: 'Cancelled',
error: 'Something went wrong. Please try again.',
},
{
id: 'messages',
success: { duration: 20 },
}
);
}
return abortFetchData;
}, [pipeId]);
return { data, isFetching };
};

Some files were not shown because too many files have changed in this diff Show more