Merge branch 'master' into ISSUE_754_acl
This commit is contained in:
commit
931b3d1fa8
114 changed files with 1528 additions and 2835 deletions
2
.github/workflows/aws_publisher.yaml
vendored
2
.github/workflows/aws_publisher.yaml
vendored
|
@ -31,7 +31,7 @@ jobs:
|
||||||
echo "Packer will be triggered in this dir $WORK_DIR"
|
echo "Packer will be triggered in this dir $WORK_DIR"
|
||||||
|
|
||||||
- name: Configure AWS credentials for Kafka-UI account
|
- name: Configure AWS credentials for Kafka-UI account
|
||||||
uses: aws-actions/configure-aws-credentials@v1-node16
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }}
|
aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }}
|
||||||
|
|
19
.github/workflows/branch-deploy.yml
vendored
19
.github/workflows/branch-deploy.yml
vendored
|
@ -9,9 +9,9 @@ jobs:
|
||||||
if: ${{ github.event.label.name == 'status/feature_testing' || github.event.label.name == 'status/feature_testing_public' }}
|
if: ${{ github.event.label.name == 'status/feature_testing' || github.event.label.name == 'status/feature_testing_public' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: ./.github/workflows/build-template.yaml
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
APP_VERSION: $GITHUB_SHA
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: get branch name
|
- name: get branch name
|
||||||
id: extract_branch
|
id: extract_branch
|
||||||
run: |
|
run: |
|
||||||
|
@ -19,6 +19,19 @@ jobs:
|
||||||
echo "tag=${tag}" >> $GITHUB_OUTPUT
|
echo "tag=${tag}" >> $GITHUB_OUTPUT
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Set up JDK
|
||||||
|
uses: actions/setup-java@v3
|
||||||
|
with:
|
||||||
|
java-version: '17'
|
||||||
|
distribution: 'zulu'
|
||||||
|
cache: 'maven'
|
||||||
|
- name: Build
|
||||||
|
id: build
|
||||||
|
run: |
|
||||||
|
./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA
|
||||||
|
./mvnw -B -V -ntp clean package -Pprod -DskipTests
|
||||||
|
export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
|
||||||
|
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
|
@ -32,7 +45,7 @@ jobs:
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
- name: Configure AWS credentials for Kafka-UI account
|
- name: Configure AWS credentials for Kafka-UI account
|
||||||
uses: aws-actions/configure-aws-credentials@v1-node16
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
2
.github/workflows/build-public-image.yml
vendored
2
.github/workflows/build-public-image.yml
vendored
|
@ -42,7 +42,7 @@ jobs:
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
- name: Configure AWS credentials for Kafka-UI account
|
- name: Configure AWS credentials for Kafka-UI account
|
||||||
uses: aws-actions/configure-aws-credentials@v1-node16
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
32
.github/workflows/build-template.yml
vendored
32
.github/workflows/build-template.yml
vendored
|
@ -1,32 +0,0 @@
|
||||||
name: Maven build template
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
APP_VERSION:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
version: ${{steps.build.outputs.version}}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
- run: |
|
|
||||||
git config user.name github-actions
|
|
||||||
git config user.email github-actions@github.com
|
|
||||||
- name: Set up JDK
|
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with:
|
|
||||||
java-version: '17'
|
|
||||||
distribution: 'zulu'
|
|
||||||
cache: 'maven'
|
|
||||||
- name: Build
|
|
||||||
id: build
|
|
||||||
run: |
|
|
||||||
./mvnw -B -ntp versions:set -DnewVersion=${{ inputs.APP_VERSION }}
|
|
||||||
./mvnw -B -V -ntp clean package -Pprod -DskipTests
|
|
||||||
export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
|
|
||||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
|
2
.github/workflows/delete-public-image.yml
vendored
2
.github/workflows/delete-public-image.yml
vendored
|
@ -15,7 +15,7 @@ jobs:
|
||||||
tag='${{ github.event.pull_request.number }}'
|
tag='${{ github.event.pull_request.number }}'
|
||||||
echo "tag=${tag}" >> $GITHUB_OUTPUT
|
echo "tag=${tag}" >> $GITHUB_OUTPUT
|
||||||
- name: Configure AWS credentials for Kafka-UI account
|
- name: Configure AWS credentials for Kafka-UI account
|
||||||
uses: aws-actions/configure-aws-credentials@v1-node16
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
8
.github/workflows/e2e-automation.yml
vendored
8
.github/workflows/e2e-automation.yml
vendored
|
@ -23,6 +23,12 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.sha }}
|
ref: ${{ github.sha }}
|
||||||
|
- name: Configure AWS credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
|
with:
|
||||||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
aws-region: eu-central-1
|
||||||
- name: Set up environment
|
- name: Set up environment
|
||||||
id: set_env_values
|
id: set_env_values
|
||||||
run: |
|
run: |
|
||||||
|
@ -65,8 +71,6 @@ jobs:
|
||||||
if: always()
|
if: always()
|
||||||
env:
|
env:
|
||||||
AWS_S3_BUCKET: 'kafkaui-allure-reports'
|
AWS_S3_BUCKET: 'kafkaui-allure-reports'
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_REGION: 'eu-central-1'
|
AWS_REGION: 'eu-central-1'
|
||||||
SOURCE_DIR: 'allure-history/allure-results'
|
SOURCE_DIR: 'allure-history/allure-results'
|
||||||
- name: Deploy report to Amazon S3
|
- name: Deploy report to Amazon S3
|
||||||
|
|
2
.github/workflows/e2e-checks.yaml
vendored
2
.github/workflows/e2e-checks.yaml
vendored
|
@ -16,7 +16,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Configure AWS credentials for Kafka-UI account
|
- name: Configure AWS credentials for Kafka-UI account
|
||||||
uses: aws-actions/configure-aws-credentials@v1-node16
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
8
.github/workflows/e2e-weekly.yml
vendored
8
.github/workflows/e2e-weekly.yml
vendored
|
@ -10,6 +10,12 @@ jobs:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.sha }}
|
ref: ${{ github.sha }}
|
||||||
|
- name: Configure AWS credentials
|
||||||
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
|
with:
|
||||||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
aws-region: eu-central-1
|
||||||
- name: Set up environment
|
- name: Set up environment
|
||||||
id: set_env_values
|
id: set_env_values
|
||||||
run: |
|
run: |
|
||||||
|
@ -52,8 +58,6 @@ jobs:
|
||||||
if: always()
|
if: always()
|
||||||
env:
|
env:
|
||||||
AWS_S3_BUCKET: 'kafkaui-allure-reports'
|
AWS_S3_BUCKET: 'kafkaui-allure-reports'
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_REGION: 'eu-central-1'
|
AWS_REGION: 'eu-central-1'
|
||||||
SOURCE_DIR: 'allure-history/allure-results'
|
SOURCE_DIR: 'allure-history/allure-results'
|
||||||
- name: Deploy report to Amazon S3
|
- name: Deploy report to Amazon S3
|
||||||
|
|
|
@ -47,7 +47,7 @@ jobs:
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
- name: Configure AWS credentials for Kafka-UI account
|
- name: Configure AWS credentials for Kafka-UI account
|
||||||
uses: aws-actions/configure-aws-credentials@v1-node16
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
2
.github/workflows/terraform-deploy.yml
vendored
2
.github/workflows/terraform-deploy.yml
vendored
|
@ -26,7 +26,7 @@ jobs:
|
||||||
echo "Terraform will be triggered in this dir $TF_DIR"
|
echo "Terraform will be triggered in this dir $TF_DIR"
|
||||||
|
|
||||||
- name: Configure AWS credentials for Kafka-UI account
|
- name: Configure AWS credentials for Kafka-UI account
|
||||||
uses: aws-actions/configure-aws-credentials@v1-node16
|
uses: aws-actions/configure-aws-credentials@v2
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
|
@ -27,6 +27,8 @@ public class ClustersProperties {
|
||||||
|
|
||||||
String internalTopicPrefix;
|
String internalTopicPrefix;
|
||||||
|
|
||||||
|
PollingProperties polling = new PollingProperties();
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
public static class Cluster {
|
public static class Cluster {
|
||||||
String name;
|
String name;
|
||||||
|
@ -49,6 +51,13 @@ public class ClustersProperties {
|
||||||
TruststoreConfig ssl;
|
TruststoreConfig ssl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Data
|
||||||
|
public static class PollingProperties {
|
||||||
|
Integer pollTimeoutMs;
|
||||||
|
Integer partitionPollTimeout;
|
||||||
|
Integer noDataEmptyPolls;
|
||||||
|
}
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@ToString(exclude = "password")
|
@ToString(exclude = "password")
|
||||||
public static class MetricsConfigData {
|
public static class MetricsConfigData {
|
||||||
|
|
|
@ -1,25 +1,12 @@
|
||||||
package com.provectus.kafka.ui.config;
|
package com.provectus.kafka.ui.config;
|
||||||
|
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import org.springframework.boot.autoconfigure.web.ServerProperties;
|
|
||||||
import org.springframework.context.annotation.Bean;
|
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.Profile;
|
|
||||||
import org.springframework.core.io.ClassPathResource;
|
|
||||||
import org.springframework.util.StringUtils;
|
|
||||||
import org.springframework.web.reactive.config.CorsRegistry;
|
import org.springframework.web.reactive.config.CorsRegistry;
|
||||||
import org.springframework.web.reactive.config.WebFluxConfigurer;
|
import org.springframework.web.reactive.config.WebFluxConfigurer;
|
||||||
import org.springframework.web.reactive.function.server.RouterFunction;
|
|
||||||
import org.springframework.web.reactive.function.server.RouterFunctions;
|
|
||||||
import org.springframework.web.reactive.function.server.ServerResponse;
|
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
@Profile("local")
|
|
||||||
@AllArgsConstructor
|
|
||||||
public class CorsGlobalConfiguration implements WebFluxConfigurer {
|
public class CorsGlobalConfiguration implements WebFluxConfigurer {
|
||||||
|
|
||||||
private final ServerProperties serverProperties;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void addCorsMappings(CorsRegistry registry) {
|
public void addCorsMappings(CorsRegistry registry) {
|
||||||
registry.addMapping("/**")
|
registry.addMapping("/**")
|
||||||
|
@ -28,31 +15,4 @@ public class CorsGlobalConfiguration implements WebFluxConfigurer {
|
||||||
.allowedHeaders("*")
|
.allowedHeaders("*")
|
||||||
.allowCredentials(false);
|
.allowCredentials(false);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
private String withContext(String pattern) {
|
|
||||||
final String basePath = serverProperties.getServlet().getContextPath();
|
|
||||||
if (StringUtils.hasText(basePath)) {
|
|
||||||
return basePath + pattern;
|
|
||||||
} else {
|
|
||||||
return pattern;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public RouterFunction<ServerResponse> cssFilesRouter() {
|
|
||||||
return RouterFunctions
|
|
||||||
.resources(withContext("/static/css/**"), new ClassPathResource("static/static/css/"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public RouterFunction<ServerResponse> jsFilesRouter() {
|
|
||||||
return RouterFunctions
|
|
||||||
.resources(withContext("/static/js/**"), new ClassPathResource("static/static/js/"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public RouterFunction<ServerResponse> mediaFilesRouter() {
|
|
||||||
return RouterFunctions
|
|
||||||
.resources(withContext("/static/media/**"), new ClassPathResource("static/static/media/"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -4,7 +4,6 @@ import com.provectus.kafka.ui.model.TopicMessageDTO;
|
||||||
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
||||||
import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
|
import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
|
||||||
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import org.apache.kafka.clients.consumer.Consumer;
|
import org.apache.kafka.clients.consumer.Consumer;
|
||||||
|
@ -14,27 +13,21 @@ import org.apache.kafka.common.utils.Bytes;
|
||||||
import reactor.core.publisher.FluxSink;
|
import reactor.core.publisher.FluxSink;
|
||||||
|
|
||||||
public abstract class AbstractEmitter {
|
public abstract class AbstractEmitter {
|
||||||
private static final Duration DEFAULT_POLL_TIMEOUT_MS = Duration.ofMillis(1000L);
|
|
||||||
|
|
||||||
// In some situations it is hard to say whether records range (between two offsets) was fully polled.
|
|
||||||
// This happens when we have holes in records sequences that is usual case for compact topics or
|
|
||||||
// topics with transactional writes. In such cases if you want to poll all records between offsets X and Y
|
|
||||||
// there is no guarantee that you will ever see record with offset Y.
|
|
||||||
// To workaround this we can assume that after N consecutive empty polls all target messages were read.
|
|
||||||
public static final int NO_MORE_DATA_EMPTY_POLLS_COUNT = 3;
|
|
||||||
|
|
||||||
private final ConsumerRecordDeserializer recordDeserializer;
|
private final ConsumerRecordDeserializer recordDeserializer;
|
||||||
private final ConsumingStats consumingStats = new ConsumingStats();
|
private final ConsumingStats consumingStats = new ConsumingStats();
|
||||||
private final PollingThrottler throttler;
|
private final PollingThrottler throttler;
|
||||||
|
protected final PollingSettings pollingSettings;
|
||||||
|
|
||||||
protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingThrottler throttler) {
|
protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingSettings pollingSettings) {
|
||||||
this.recordDeserializer = recordDeserializer;
|
this.recordDeserializer = recordDeserializer;
|
||||||
this.throttler = throttler;
|
this.pollingSettings = pollingSettings;
|
||||||
|
this.throttler = pollingSettings.getPollingThrottler();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected ConsumerRecords<Bytes, Bytes> poll(
|
protected ConsumerRecords<Bytes, Bytes> poll(
|
||||||
FluxSink<TopicMessageEventDTO> sink, Consumer<Bytes, Bytes> consumer) {
|
FluxSink<TopicMessageEventDTO> sink, Consumer<Bytes, Bytes> consumer) {
|
||||||
return poll(sink, consumer, DEFAULT_POLL_TIMEOUT_MS);
|
return poll(sink, consumer, pollingSettings.getPollTimeout());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected ConsumerRecords<Bytes, Bytes> poll(
|
protected ConsumerRecords<Bytes, Bytes> poll(
|
||||||
|
|
|
@ -3,15 +3,12 @@ package com.provectus.kafka.ui.emitter;
|
||||||
import com.provectus.kafka.ui.model.ConsumerPosition;
|
import com.provectus.kafka.ui.model.ConsumerPosition;
|
||||||
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
||||||
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.kafka.clients.consumer.Consumer;
|
import org.apache.kafka.clients.consumer.Consumer;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||||
|
@ -26,8 +23,6 @@ public class BackwardRecordEmitter
|
||||||
extends AbstractEmitter
|
extends AbstractEmitter
|
||||||
implements java.util.function.Consumer<FluxSink<TopicMessageEventDTO>> {
|
implements java.util.function.Consumer<FluxSink<TopicMessageEventDTO>> {
|
||||||
|
|
||||||
private static final Duration POLL_TIMEOUT = Duration.ofMillis(200);
|
|
||||||
|
|
||||||
private final Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier;
|
private final Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier;
|
||||||
private final ConsumerPosition consumerPosition;
|
private final ConsumerPosition consumerPosition;
|
||||||
private final int messagesPerPage;
|
private final int messagesPerPage;
|
||||||
|
@ -37,8 +32,8 @@ public class BackwardRecordEmitter
|
||||||
ConsumerPosition consumerPosition,
|
ConsumerPosition consumerPosition,
|
||||||
int messagesPerPage,
|
int messagesPerPage,
|
||||||
ConsumerRecordDeserializer recordDeserializer,
|
ConsumerRecordDeserializer recordDeserializer,
|
||||||
PollingThrottler throttler) {
|
PollingSettings pollingSettings) {
|
||||||
super(recordDeserializer, throttler);
|
super(recordDeserializer, pollingSettings);
|
||||||
this.consumerPosition = consumerPosition;
|
this.consumerPosition = consumerPosition;
|
||||||
this.messagesPerPage = messagesPerPage;
|
this.messagesPerPage = messagesPerPage;
|
||||||
this.consumerSupplier = consumerSupplier;
|
this.consumerSupplier = consumerSupplier;
|
||||||
|
@ -109,17 +104,18 @@ public class BackwardRecordEmitter
|
||||||
|
|
||||||
var recordsToSend = new ArrayList<ConsumerRecord<Bytes, Bytes>>();
|
var recordsToSend = new ArrayList<ConsumerRecord<Bytes, Bytes>>();
|
||||||
|
|
||||||
// we use empty polls counting to verify that partition was fully read
|
EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
|
||||||
for (int emptyPolls = 0; recordsToSend.size() < desiredMsgsToPoll && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) {
|
while (!sink.isCancelled()
|
||||||
var polledRecords = poll(sink, consumer, POLL_TIMEOUT);
|
&& recordsToSend.size() < desiredMsgsToPoll
|
||||||
log.debug("{} records polled from {}", polledRecords.count(), tp);
|
&& !emptyPolls.noDataEmptyPollsReached()) {
|
||||||
|
var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout());
|
||||||
|
emptyPolls.count(polledRecords);
|
||||||
|
|
||||||
// counting sequential empty polls
|
log.debug("{} records polled from {}", polledRecords.count(), tp);
|
||||||
emptyPolls = polledRecords.isEmpty() ? emptyPolls + 1 : 0;
|
|
||||||
|
|
||||||
var filteredRecords = polledRecords.records(tp).stream()
|
var filteredRecords = polledRecords.records(tp).stream()
|
||||||
.filter(r -> r.offset() < toOffset)
|
.filter(r -> r.offset() < toOffset)
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
|
|
||||||
if (!polledRecords.isEmpty() && filteredRecords.isEmpty()) {
|
if (!polledRecords.isEmpty() && filteredRecords.isEmpty()) {
|
||||||
// we already read all messages in target offsets interval
|
// we already read all messages in target offsets interval
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
package com.provectus.kafka.ui.emitter;
|
||||||
|
|
||||||
|
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||||
|
|
||||||
|
// In some situations it is hard to say whether records range (between two offsets) was fully polled.
|
||||||
|
// This happens when we have holes in records sequences that is usual case for compact topics or
|
||||||
|
// topics with transactional writes. In such cases if you want to poll all records between offsets X and Y
|
||||||
|
// there is no guarantee that you will ever see record with offset Y.
|
||||||
|
// To workaround this we can assume that after N consecutive empty polls all target messages were read.
|
||||||
|
public class EmptyPollsCounter {
|
||||||
|
|
||||||
|
private final int maxEmptyPolls;
|
||||||
|
|
||||||
|
private int emptyPolls = 0;
|
||||||
|
|
||||||
|
EmptyPollsCounter(int maxEmptyPolls) {
|
||||||
|
this.maxEmptyPolls = maxEmptyPolls;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void count(ConsumerRecords<?, ?> polled) {
|
||||||
|
emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean noDataEmptyPollsReached() {
|
||||||
|
return emptyPolls >= maxEmptyPolls;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter;
|
||||||
import com.provectus.kafka.ui.model.ConsumerPosition;
|
import com.provectus.kafka.ui.model.ConsumerPosition;
|
||||||
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
||||||
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||||
|
@ -25,8 +24,8 @@ public class ForwardRecordEmitter
|
||||||
Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
|
Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
|
||||||
ConsumerPosition position,
|
ConsumerPosition position,
|
||||||
ConsumerRecordDeserializer recordDeserializer,
|
ConsumerRecordDeserializer recordDeserializer,
|
||||||
PollingThrottler throttler) {
|
PollingSettings pollingSettings) {
|
||||||
super(recordDeserializer, throttler);
|
super(recordDeserializer, pollingSettings);
|
||||||
this.position = position;
|
this.position = position;
|
||||||
this.consumerSupplier = consumerSupplier;
|
this.consumerSupplier = consumerSupplier;
|
||||||
}
|
}
|
||||||
|
@ -39,16 +38,16 @@ public class ForwardRecordEmitter
|
||||||
var seekOperations = SeekOperations.create(consumer, position);
|
var seekOperations = SeekOperations.create(consumer, position);
|
||||||
seekOperations.assignAndSeekNonEmptyPartitions();
|
seekOperations.assignAndSeekNonEmptyPartitions();
|
||||||
|
|
||||||
// we use empty polls counting to verify that topic was fully read
|
EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
|
||||||
int emptyPolls = 0;
|
|
||||||
while (!sink.isCancelled()
|
while (!sink.isCancelled()
|
||||||
&& !seekOperations.assignedPartitionsFullyPolled()
|
&& !seekOperations.assignedPartitionsFullyPolled()
|
||||||
&& emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT) {
|
&& !emptyPolls.noDataEmptyPollsReached()) {
|
||||||
|
|
||||||
sendPhase(sink, "Polling");
|
sendPhase(sink, "Polling");
|
||||||
ConsumerRecords<Bytes, Bytes> records = poll(sink, consumer);
|
ConsumerRecords<Bytes, Bytes> records = poll(sink, consumer);
|
||||||
|
emptyPolls.count(records);
|
||||||
|
|
||||||
log.debug("{} records polled", records.count());
|
log.debug("{} records polled", records.count());
|
||||||
emptyPolls = records.isEmpty() ? emptyPolls + 1 : 0;
|
|
||||||
|
|
||||||
for (ConsumerRecord<Bytes, Bytes> msg : records) {
|
for (ConsumerRecord<Bytes, Bytes> msg : records) {
|
||||||
if (!sink.isCancelled()) {
|
if (!sink.isCancelled()) {
|
||||||
|
|
|
@ -0,0 +1,79 @@
|
||||||
|
package com.provectus.kafka.ui.emitter;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
|
import java.time.Duration;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
public class PollingSettings {
|
||||||
|
|
||||||
|
private static final Duration DEFAULT_POLL_TIMEOUT = Duration.ofMillis(1_000);
|
||||||
|
private static final Duration DEFAULT_PARTITION_POLL_TIMEOUT = Duration.ofMillis(200);
|
||||||
|
private static final int DEFAULT_NO_DATA_EMPTY_POLLS = 3;
|
||||||
|
|
||||||
|
private final Duration pollTimeout;
|
||||||
|
private final Duration partitionPollTimeout;
|
||||||
|
private final int notDataEmptyPolls; //see EmptyPollsCounter docs
|
||||||
|
|
||||||
|
private final Supplier<PollingThrottler> throttlerSupplier;
|
||||||
|
|
||||||
|
public static PollingSettings create(ClustersProperties.Cluster cluster,
|
||||||
|
ClustersProperties clustersProperties) {
|
||||||
|
var pollingProps = Optional.ofNullable(clustersProperties.getPolling())
|
||||||
|
.orElseGet(ClustersProperties.PollingProperties::new);
|
||||||
|
|
||||||
|
var pollTimeout = pollingProps.getPollTimeoutMs() != null
|
||||||
|
? Duration.ofMillis(pollingProps.getPollTimeoutMs())
|
||||||
|
: DEFAULT_POLL_TIMEOUT;
|
||||||
|
|
||||||
|
var partitionPollTimeout = pollingProps.getPartitionPollTimeout() != null
|
||||||
|
? Duration.ofMillis(pollingProps.getPartitionPollTimeout())
|
||||||
|
: Duration.ofMillis(pollTimeout.toMillis() / 5);
|
||||||
|
|
||||||
|
int noDataEmptyPolls = pollingProps.getNoDataEmptyPolls() != null
|
||||||
|
? pollingProps.getNoDataEmptyPolls()
|
||||||
|
: DEFAULT_NO_DATA_EMPTY_POLLS;
|
||||||
|
|
||||||
|
return new PollingSettings(
|
||||||
|
pollTimeout,
|
||||||
|
partitionPollTimeout,
|
||||||
|
noDataEmptyPolls,
|
||||||
|
PollingThrottler.throttlerSupplier(cluster)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static PollingSettings createDefault() {
|
||||||
|
return new PollingSettings(
|
||||||
|
DEFAULT_POLL_TIMEOUT,
|
||||||
|
DEFAULT_PARTITION_POLL_TIMEOUT,
|
||||||
|
DEFAULT_NO_DATA_EMPTY_POLLS,
|
||||||
|
PollingThrottler::noop
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private PollingSettings(Duration pollTimeout,
|
||||||
|
Duration partitionPollTimeout,
|
||||||
|
int notDataEmptyPolls,
|
||||||
|
Supplier<PollingThrottler> throttlerSupplier) {
|
||||||
|
this.pollTimeout = pollTimeout;
|
||||||
|
this.partitionPollTimeout = partitionPollTimeout;
|
||||||
|
this.notDataEmptyPolls = notDataEmptyPolls;
|
||||||
|
this.throttlerSupplier = throttlerSupplier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public EmptyPollsCounter createEmptyPollsCounter() {
|
||||||
|
return new EmptyPollsCounter(notDataEmptyPolls);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Duration getPollTimeout() {
|
||||||
|
return pollTimeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Duration getPartitionPollTimeout() {
|
||||||
|
return partitionPollTimeout;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PollingThrottler getPollingThrottler() {
|
||||||
|
return throttlerSupplier.get();
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,8 +1,9 @@
|
||||||
package com.provectus.kafka.ui.util;
|
package com.provectus.kafka.ui.emitter;
|
||||||
|
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.util.concurrent.RateLimiter;
|
import com.google.common.util.concurrent.RateLimiter;
|
||||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
|
import com.provectus.kafka.ui.util.ConsumerRecordsUtil;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
|
@ -1,4 +1,4 @@
|
||||||
package com.provectus.kafka.ui.util;
|
package com.provectus.kafka.ui.emitter;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
|
@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter;
|
||||||
import com.provectus.kafka.ui.model.ConsumerPosition;
|
import com.provectus.kafka.ui.model.ConsumerPosition;
|
||||||
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
||||||
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
@ -22,8 +21,8 @@ public class TailingEmitter extends AbstractEmitter
|
||||||
public TailingEmitter(Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
|
public TailingEmitter(Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
|
||||||
ConsumerPosition consumerPosition,
|
ConsumerPosition consumerPosition,
|
||||||
ConsumerRecordDeserializer recordDeserializer,
|
ConsumerRecordDeserializer recordDeserializer,
|
||||||
PollingThrottler throttler) {
|
PollingSettings pollingSettings) {
|
||||||
super(recordDeserializer, throttler);
|
super(recordDeserializer, pollingSettings);
|
||||||
this.consumerSupplier = consumerSupplier;
|
this.consumerSupplier = consumerSupplier;
|
||||||
this.consumerPosition = consumerPosition;
|
this.consumerPosition = consumerPosition;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,14 +2,13 @@ package com.provectus.kafka.ui.model;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
|
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
|
||||||
|
import com.provectus.kafka.ui.emitter.PollingSettings;
|
||||||
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
|
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
|
||||||
import com.provectus.kafka.ui.service.masking.DataMasking;
|
import com.provectus.kafka.ui.service.masking.DataMasking;
|
||||||
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import com.provectus.kafka.ui.util.ReactiveFailover;
|
import com.provectus.kafka.ui.util.ReactiveFailover;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.function.Supplier;
|
|
||||||
import lombok.AccessLevel;
|
import lombok.AccessLevel;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
|
@ -28,7 +27,7 @@ public class KafkaCluster {
|
||||||
private final boolean readOnly;
|
private final boolean readOnly;
|
||||||
private final MetricsConfig metricsConfig;
|
private final MetricsConfig metricsConfig;
|
||||||
private final DataMasking masking;
|
private final DataMasking masking;
|
||||||
private final Supplier<PollingThrottler> throttler;
|
private final PollingSettings pollingSettings;
|
||||||
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
|
private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
|
||||||
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
|
private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
|
||||||
private final ReactiveFailover<KsqlApiClient> ksqlClient;
|
private final ReactiveFailover<KsqlApiClient> ksqlClient;
|
||||||
|
|
|
@ -14,7 +14,7 @@ public class ClustersStorage {
|
||||||
|
|
||||||
public ClustersStorage(ClustersProperties properties, KafkaClusterFactory factory) {
|
public ClustersStorage(ClustersProperties properties, KafkaClusterFactory factory) {
|
||||||
var builder = ImmutableMap.<String, KafkaCluster>builder();
|
var builder = ImmutableMap.<String, KafkaCluster>builder();
|
||||||
properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(c)));
|
properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(properties, c)));
|
||||||
this.kafkaClusters = builder.build();
|
this.kafkaClusters = builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service;
|
||||||
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
|
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
|
||||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
|
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
|
||||||
|
import com.provectus.kafka.ui.emitter.PollingSettings;
|
||||||
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
|
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
|
||||||
import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
|
import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||||
|
@ -12,7 +13,6 @@ import com.provectus.kafka.ui.service.masking.DataMasking;
|
||||||
import com.provectus.kafka.ui.sr.ApiClient;
|
import com.provectus.kafka.ui.sr.ApiClient;
|
||||||
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
|
||||||
import com.provectus.kafka.ui.util.KafkaServicesValidation;
|
import com.provectus.kafka.ui.util.KafkaServicesValidation;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import com.provectus.kafka.ui.util.ReactiveFailover;
|
import com.provectus.kafka.ui.util.ReactiveFailover;
|
||||||
import com.provectus.kafka.ui.util.WebClientConfigurator;
|
import com.provectus.kafka.ui.util.WebClientConfigurator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -41,7 +41,8 @@ public class KafkaClusterFactory {
|
||||||
@Value("${webclient.max-in-memory-buffer-size:20MB}")
|
@Value("${webclient.max-in-memory-buffer-size:20MB}")
|
||||||
private DataSize maxBuffSize;
|
private DataSize maxBuffSize;
|
||||||
|
|
||||||
public KafkaCluster create(ClustersProperties.Cluster clusterProperties) {
|
public KafkaCluster create(ClustersProperties properties,
|
||||||
|
ClustersProperties.Cluster clusterProperties) {
|
||||||
KafkaCluster.KafkaClusterBuilder builder = KafkaCluster.builder();
|
KafkaCluster.KafkaClusterBuilder builder = KafkaCluster.builder();
|
||||||
|
|
||||||
builder.name(clusterProperties.getName());
|
builder.name(clusterProperties.getName());
|
||||||
|
@ -49,7 +50,7 @@ public class KafkaClusterFactory {
|
||||||
builder.properties(convertProperties(clusterProperties.getProperties()));
|
builder.properties(convertProperties(clusterProperties.getProperties()));
|
||||||
builder.readOnly(clusterProperties.isReadOnly());
|
builder.readOnly(clusterProperties.isReadOnly());
|
||||||
builder.masking(DataMasking.create(clusterProperties.getMasking()));
|
builder.masking(DataMasking.create(clusterProperties.getMasking()));
|
||||||
builder.throttler(PollingThrottler.throttlerSupplier(clusterProperties));
|
builder.pollingSettings(PollingSettings.create(clusterProperties, properties));
|
||||||
|
|
||||||
if (schemaRegistryConfigured(clusterProperties)) {
|
if (schemaRegistryConfigured(clusterProperties)) {
|
||||||
builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));
|
builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));
|
||||||
|
|
|
@ -5,6 +5,7 @@ import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
|
||||||
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
|
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
|
||||||
import com.provectus.kafka.ui.emitter.MessageFilterStats;
|
import com.provectus.kafka.ui.emitter.MessageFilterStats;
|
||||||
import com.provectus.kafka.ui.emitter.MessageFilters;
|
import com.provectus.kafka.ui.emitter.MessageFilters;
|
||||||
|
import com.provectus.kafka.ui.emitter.ResultSizeLimiter;
|
||||||
import com.provectus.kafka.ui.emitter.TailingEmitter;
|
import com.provectus.kafka.ui.emitter.TailingEmitter;
|
||||||
import com.provectus.kafka.ui.exception.TopicNotFoundException;
|
import com.provectus.kafka.ui.exception.TopicNotFoundException;
|
||||||
import com.provectus.kafka.ui.exception.ValidationException;
|
import com.provectus.kafka.ui.exception.ValidationException;
|
||||||
|
@ -17,7 +18,6 @@ import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
||||||
import com.provectus.kafka.ui.serde.api.Serde;
|
import com.provectus.kafka.ui.serde.api.Serde;
|
||||||
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
||||||
import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
|
import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
|
||||||
import com.provectus.kafka.ui.util.ResultSizeLimiter;
|
|
||||||
import com.provectus.kafka.ui.util.SslPropertiesUtil;
|
import com.provectus.kafka.ui.util.SslPropertiesUtil;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -169,7 +169,7 @@ public class MessagesService {
|
||||||
() -> consumerGroupService.createConsumer(cluster),
|
() -> consumerGroupService.createConsumer(cluster),
|
||||||
consumerPosition,
|
consumerPosition,
|
||||||
recordDeserializer,
|
recordDeserializer,
|
||||||
cluster.getThrottler().get()
|
cluster.getPollingSettings()
|
||||||
);
|
);
|
||||||
} else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
|
} else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
|
||||||
emitter = new BackwardRecordEmitter(
|
emitter = new BackwardRecordEmitter(
|
||||||
|
@ -177,14 +177,14 @@ public class MessagesService {
|
||||||
consumerPosition,
|
consumerPosition,
|
||||||
limit,
|
limit,
|
||||||
recordDeserializer,
|
recordDeserializer,
|
||||||
cluster.getThrottler().get()
|
cluster.getPollingSettings()
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
emitter = new TailingEmitter(
|
emitter = new TailingEmitter(
|
||||||
() -> consumerGroupService.createConsumer(cluster),
|
() -> consumerGroupService.createConsumer(cluster),
|
||||||
consumerPosition,
|
consumerPosition,
|
||||||
recordDeserializer,
|
recordDeserializer,
|
||||||
cluster.getThrottler().get()
|
cluster.getPollingSettings()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
MessageFilterStats filterStats = new MessageFilterStats();
|
MessageFilterStats filterStats = new MessageFilterStats();
|
||||||
|
|
|
@ -228,17 +228,24 @@ public class ReactiveAdminClient implements Closeable {
|
||||||
.map(brokerId -> new ConfigResource(ConfigResource.Type.BROKER, Integer.toString(brokerId)))
|
.map(brokerId -> new ConfigResource(ConfigResource.Type.BROKER, Integer.toString(brokerId)))
|
||||||
.collect(toList());
|
.collect(toList());
|
||||||
return toMono(client.describeConfigs(resources).all())
|
return toMono(client.describeConfigs(resources).all())
|
||||||
// some kafka backends (like MSK serverless) do not support broker's configs retrieval,
|
// some kafka backends don't support broker's configs retrieval,
|
||||||
// in that case InvalidRequestException will be thrown
|
// and throw various exceptions on describeConfigs() call
|
||||||
.onErrorResume(InvalidRequestException.class, th -> {
|
.onErrorResume(th -> th instanceof InvalidRequestException // MSK Serverless
|
||||||
log.trace("Error while getting broker {} configs", brokerIds, th);
|
|| th instanceof UnknownTopicOrPartitionException, // Azure event hub
|
||||||
return Mono.just(Map.of());
|
th -> {
|
||||||
})
|
log.trace("Error while getting configs for brokers {}", brokerIds, th);
|
||||||
|
return Mono.just(Map.of());
|
||||||
|
})
|
||||||
// there are situations when kafka-ui user has no DESCRIBE_CONFIGS permission on cluster
|
// there are situations when kafka-ui user has no DESCRIBE_CONFIGS permission on cluster
|
||||||
.onErrorResume(ClusterAuthorizationException.class, th -> {
|
.onErrorResume(ClusterAuthorizationException.class, th -> {
|
||||||
log.trace("AuthorizationException while getting configs for brokers {}", brokerIds, th);
|
log.trace("AuthorizationException while getting configs for brokers {}", brokerIds, th);
|
||||||
return Mono.just(Map.of());
|
return Mono.just(Map.of());
|
||||||
})
|
})
|
||||||
|
// catching all remaining exceptions, but logging on WARN level
|
||||||
|
.onErrorResume(th -> true, th -> {
|
||||||
|
log.warn("Unexpected error while getting configs for brokers {}", brokerIds, th);
|
||||||
|
return Mono.just(Map.of());
|
||||||
|
})
|
||||||
.map(config -> config.entrySet().stream()
|
.map(config -> config.entrySet().stream()
|
||||||
.collect(toMap(
|
.collect(toMap(
|
||||||
c -> Integer.valueOf(c.getKey().name()),
|
c -> Integer.valueOf(c.getKey().name()),
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
package com.provectus.kafka.ui.service.analyze;
|
package com.provectus.kafka.ui.service.analyze;
|
||||||
|
|
||||||
import static com.provectus.kafka.ui.emitter.AbstractEmitter.NO_MORE_DATA_EMPTY_POLLS_COUNT;
|
import com.provectus.kafka.ui.emitter.EmptyPollsCounter;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.emitter.OffsetsInfo;
|
import com.provectus.kafka.ui.emitter.OffsetsInfo;
|
||||||
|
import com.provectus.kafka.ui.emitter.PollingSettings;
|
||||||
|
import com.provectus.kafka.ui.emitter.PollingThrottler;
|
||||||
import com.provectus.kafka.ui.exception.TopicAnalysisException;
|
import com.provectus.kafka.ui.exception.TopicAnalysisException;
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||||
import com.provectus.kafka.ui.model.TopicAnalysisDTO;
|
import com.provectus.kafka.ui.model.TopicAnalysisDTO;
|
||||||
import com.provectus.kafka.ui.service.ConsumerGroupService;
|
import com.provectus.kafka.ui.service.ConsumerGroupService;
|
||||||
import com.provectus.kafka.ui.service.TopicsService;
|
import com.provectus.kafka.ui.service.TopicsService;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
|
@ -63,7 +63,7 @@ public class TopicAnalysisService {
|
||||||
if (analysisTasksStore.isAnalysisInProgress(topicId)) {
|
if (analysisTasksStore.isAnalysisInProgress(topicId)) {
|
||||||
throw new TopicAnalysisException("Topic is already analyzing");
|
throw new TopicAnalysisException("Topic is already analyzing");
|
||||||
}
|
}
|
||||||
var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getThrottler().get());
|
var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getPollingSettings());
|
||||||
analysisTasksStore.registerNewTask(topicId, task);
|
analysisTasksStore.registerNewTask(topicId, task);
|
||||||
Schedulers.boundedElastic().schedule(task);
|
Schedulers.boundedElastic().schedule(task);
|
||||||
}
|
}
|
||||||
|
@ -83,6 +83,7 @@ public class TopicAnalysisService {
|
||||||
private final TopicIdentity topicId;
|
private final TopicIdentity topicId;
|
||||||
private final int partitionsCnt;
|
private final int partitionsCnt;
|
||||||
private final long approxNumberOfMsgs;
|
private final long approxNumberOfMsgs;
|
||||||
|
private final EmptyPollsCounter emptyPollsCounter;
|
||||||
private final PollingThrottler throttler;
|
private final PollingThrottler throttler;
|
||||||
|
|
||||||
private final TopicAnalysisStats totalStats = new TopicAnalysisStats();
|
private final TopicAnalysisStats totalStats = new TopicAnalysisStats();
|
||||||
|
@ -91,7 +92,7 @@ public class TopicAnalysisService {
|
||||||
private final KafkaConsumer<Bytes, Bytes> consumer;
|
private final KafkaConsumer<Bytes, Bytes> consumer;
|
||||||
|
|
||||||
AnalysisTask(KafkaCluster cluster, TopicIdentity topicId, int partitionsCnt,
|
AnalysisTask(KafkaCluster cluster, TopicIdentity topicId, int partitionsCnt,
|
||||||
long approxNumberOfMsgs, PollingThrottler throttler) {
|
long approxNumberOfMsgs, PollingSettings pollingSettings) {
|
||||||
this.topicId = topicId;
|
this.topicId = topicId;
|
||||||
this.approxNumberOfMsgs = approxNumberOfMsgs;
|
this.approxNumberOfMsgs = approxNumberOfMsgs;
|
||||||
this.partitionsCnt = partitionsCnt;
|
this.partitionsCnt = partitionsCnt;
|
||||||
|
@ -103,7 +104,8 @@ public class TopicAnalysisService {
|
||||||
ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "100000"
|
ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "100000"
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
this.throttler = throttler;
|
this.throttler = pollingSettings.getPollingThrottler();
|
||||||
|
this.emptyPollsCounter = pollingSettings.createEmptyPollsCounter();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -124,11 +126,10 @@ public class TopicAnalysisService {
|
||||||
consumer.seekToBeginning(topicPartitions);
|
consumer.seekToBeginning(topicPartitions);
|
||||||
|
|
||||||
var offsetsInfo = new OffsetsInfo(consumer, topicId.topicName);
|
var offsetsInfo = new OffsetsInfo(consumer, topicId.topicName);
|
||||||
for (int emptyPolls = 0; !offsetsInfo.assignedPartitionsFullyPolled()
|
while (!offsetsInfo.assignedPartitionsFullyPolled() && !emptyPollsCounter.noDataEmptyPollsReached()) {
|
||||||
&& emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) {
|
|
||||||
var polled = consumer.poll(Duration.ofSeconds(3));
|
var polled = consumer.poll(Duration.ofSeconds(3));
|
||||||
throttler.throttleAfterPoll(polled);
|
throttler.throttleAfterPoll(polled);
|
||||||
emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0;
|
emptyPollsCounter.count(polled);
|
||||||
polled.forEach(r -> {
|
polled.forEach(r -> {
|
||||||
totalStats.apply(r);
|
totalStats.apply(r);
|
||||||
partitionStats.get(r.partition()).apply(r);
|
partitionStats.get(r.partition()).apply(r);
|
||||||
|
|
|
@ -9,6 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat;
|
||||||
import com.provectus.kafka.ui.AbstractIntegrationTest;
|
import com.provectus.kafka.ui.AbstractIntegrationTest;
|
||||||
import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
|
import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
|
||||||
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
|
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
|
||||||
|
import com.provectus.kafka.ui.emitter.PollingSettings;
|
||||||
import com.provectus.kafka.ui.model.ConsumerPosition;
|
import com.provectus.kafka.ui.model.ConsumerPosition;
|
||||||
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
|
||||||
import com.provectus.kafka.ui.producer.KafkaTestProducer;
|
import com.provectus.kafka.ui.producer.KafkaTestProducer;
|
||||||
|
@ -16,7 +17,6 @@ import com.provectus.kafka.ui.serde.api.Serde;
|
||||||
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
|
||||||
import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
|
import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
|
||||||
import com.provectus.kafka.ui.serdes.builtin.StringSerde;
|
import com.provectus.kafka.ui.serdes.builtin.StringSerde;
|
||||||
import com.provectus.kafka.ui.util.PollingThrottler;
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -112,7 +112,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
this::createConsumer,
|
this::createConsumer,
|
||||||
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
|
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
var backwardEmitter = new BackwardRecordEmitter(
|
var backwardEmitter = new BackwardRecordEmitter(
|
||||||
|
@ -120,7 +120,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
|
new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
|
||||||
100,
|
100,
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
StepVerifier.create(Flux.create(forwardEmitter))
|
StepVerifier.create(Flux.create(forwardEmitter))
|
||||||
|
@ -142,7 +142,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
this::createConsumer,
|
this::createConsumer,
|
||||||
new ConsumerPosition(BEGINNING, TOPIC, null),
|
new ConsumerPosition(BEGINNING, TOPIC, null),
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
var backwardEmitter = new BackwardRecordEmitter(
|
var backwardEmitter = new BackwardRecordEmitter(
|
||||||
|
@ -150,7 +150,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
new ConsumerPosition(LATEST, TOPIC, null),
|
new ConsumerPosition(LATEST, TOPIC, null),
|
||||||
PARTITIONS * MSGS_PER_PARTITION,
|
PARTITIONS * MSGS_PER_PARTITION,
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
List<String> expectedValues = SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList());
|
List<String> expectedValues = SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList());
|
||||||
|
@ -171,7 +171,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
this::createConsumer,
|
this::createConsumer,
|
||||||
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
|
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
var backwardEmitter = new BackwardRecordEmitter(
|
var backwardEmitter = new BackwardRecordEmitter(
|
||||||
|
@ -179,7 +179,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
|
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
|
||||||
PARTITIONS * MSGS_PER_PARTITION,
|
PARTITIONS * MSGS_PER_PARTITION,
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
var expectedValues = SENT_RECORDS.stream()
|
var expectedValues = SENT_RECORDS.stream()
|
||||||
|
@ -216,7 +216,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
this::createConsumer,
|
this::createConsumer,
|
||||||
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
|
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
var backwardEmitter = new BackwardRecordEmitter(
|
var backwardEmitter = new BackwardRecordEmitter(
|
||||||
|
@ -224,7 +224,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
|
new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
|
||||||
PARTITIONS * MSGS_PER_PARTITION,
|
PARTITIONS * MSGS_PER_PARTITION,
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
var expectedValues = SENT_RECORDS.stream()
|
var expectedValues = SENT_RECORDS.stream()
|
||||||
|
@ -255,7 +255,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
|
new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
|
||||||
numMessages,
|
numMessages,
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
var expectedValues = SENT_RECORDS.stream()
|
var expectedValues = SENT_RECORDS.stream()
|
||||||
|
@ -281,7 +281,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
|
||||||
new ConsumerPosition(OFFSET, TOPIC, offsets),
|
new ConsumerPosition(OFFSET, TOPIC, offsets),
|
||||||
100,
|
100,
|
||||||
RECORD_DESERIALIZER,
|
RECORD_DESERIALIZER,
|
||||||
PollingThrottler.noop()
|
PollingSettings.createDefault()
|
||||||
);
|
);
|
||||||
|
|
||||||
expectEmitter(backwardEmitter,
|
expectEmitter(backwardEmitter,
|
||||||
|
|
|
@ -5,6 +5,7 @@ import static org.assertj.core.data.Percentage.withPercentage;
|
||||||
|
|
||||||
import com.google.common.base.Stopwatch;
|
import com.google.common.base.Stopwatch;
|
||||||
import com.google.common.util.concurrent.RateLimiter;
|
import com.google.common.util.concurrent.RateLimiter;
|
||||||
|
import com.provectus.kafka.ui.emitter.PollingThrottler;
|
||||||
import java.util.concurrent.ThreadLocalRandom;
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
|
@ -3600,6 +3600,15 @@ components:
|
||||||
kafka:
|
kafka:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
polling:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
pollTimeoutMs:
|
||||||
|
type: integer
|
||||||
|
partitionPollTimeout:
|
||||||
|
type: integer
|
||||||
|
noDataEmptyPolls:
|
||||||
|
type: integer
|
||||||
clusters:
|
clusters:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
|
|
|
@ -4,6 +4,7 @@ import com.codeborne.selenide.Condition;
|
||||||
import com.codeborne.selenide.ElementsCollection;
|
import com.codeborne.selenide.ElementsCollection;
|
||||||
import com.codeborne.selenide.SelenideElement;
|
import com.codeborne.selenide.SelenideElement;
|
||||||
import com.codeborne.selenide.WebDriverRunner;
|
import com.codeborne.selenide.WebDriverRunner;
|
||||||
|
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
|
||||||
import com.provectus.kafka.ui.utilities.WebUtils;
|
import com.provectus.kafka.ui.utilities.WebUtils;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.openqa.selenium.Keys;
|
import org.openqa.selenium.Keys;
|
||||||
|
@ -33,6 +34,8 @@ public abstract class BasePage extends WebUtils {
|
||||||
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
|
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
|
||||||
protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
|
protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
|
||||||
protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
|
protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
|
||||||
|
protected String pageTitleFromHeader = "//h1[text()='%s']";
|
||||||
|
protected String pagePathFromHeader = "//a[text()='%s']/../h1";
|
||||||
|
|
||||||
protected void waitUntilSpinnerDisappear() {
|
protected void waitUntilSpinnerDisappear() {
|
||||||
log.debug("\nwaitUntilSpinnerDisappear");
|
log.debug("\nwaitUntilSpinnerDisappear");
|
||||||
|
@ -41,6 +44,14 @@ public abstract class BasePage extends WebUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected SelenideElement getPageTitleFromHeader(MenuItem menuItem) {
|
||||||
|
return $x(String.format(pageTitleFromHeader, menuItem.getPageTitle()));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected SelenideElement getPagePathFromHeader(MenuItem menuItem) {
|
||||||
|
return $x(String.format(pagePathFromHeader, menuItem.getPageTitle()));
|
||||||
|
}
|
||||||
|
|
||||||
protected void clickSubmitBtn() {
|
protected void clickSubmitBtn() {
|
||||||
clickByJavaScript(submitBtn);
|
clickByJavaScript(submitBtn);
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,15 +12,14 @@ import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
import static com.codeborne.selenide.Selenide.$x;
|
||||||
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.BROKERS;
|
||||||
|
|
||||||
public class BrokersList extends BasePage {
|
public class BrokersList extends BasePage {
|
||||||
|
|
||||||
protected SelenideElement brokersListHeader = $x("//h1[text()='Brokers']");
|
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public BrokersList waitUntilScreenReady() {
|
public BrokersList waitUntilScreenReady() {
|
||||||
waitUntilSpinnerDisappear();
|
waitUntilSpinnerDisappear();
|
||||||
brokersListHeader.shouldBe(Condition.visible);
|
getPageTitleFromHeader(BROKERS).shouldBe(Condition.visible);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ import com.provectus.kafka.ui.pages.BasePage;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
|
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
import static com.codeborne.selenide.Selenide.$x;
|
||||||
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KAFKA_CONNECT;
|
||||||
|
|
||||||
|
|
||||||
public class KafkaConnectList extends BasePage {
|
public class KafkaConnectList extends BasePage {
|
||||||
|
@ -19,7 +20,7 @@ public class KafkaConnectList extends BasePage {
|
||||||
@Step
|
@Step
|
||||||
public KafkaConnectList waitUntilScreenReady() {
|
public KafkaConnectList waitUntilScreenReady() {
|
||||||
waitUntilSpinnerDisappear();
|
waitUntilSpinnerDisappear();
|
||||||
createConnectorBtn.shouldBe(Condition.visible);
|
getPageTitleFromHeader(KAFKA_CONNECT).shouldBe(Condition.visible);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,17 @@
|
||||||
package com.provectus.kafka.ui.pages.consumers;
|
package com.provectus.kafka.ui.pages.consumers;
|
||||||
|
|
||||||
import com.codeborne.selenide.Condition;
|
import com.codeborne.selenide.Condition;
|
||||||
import com.codeborne.selenide.SelenideElement;
|
|
||||||
import com.provectus.kafka.ui.pages.BasePage;
|
import com.provectus.kafka.ui.pages.BasePage;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
|
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.CONSUMERS;
|
||||||
|
|
||||||
public class ConsumersList extends BasePage {
|
public class ConsumersList extends BasePage {
|
||||||
|
|
||||||
protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']");
|
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public ConsumersList waitUntilScreenReady() {
|
public ConsumersList waitUntilScreenReady() {
|
||||||
waitUntilSpinnerDisappear();
|
waitUntilSpinnerDisappear();
|
||||||
consumerListHeader.shouldHave(Condition.visible);
|
getPageTitleFromHeader(CONSUMERS).shouldBe(Condition.visible);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,137 +1,139 @@
|
||||||
package com.provectus.kafka.ui.pages.ksqlDb;
|
package com.provectus.kafka.ui.pages.ksqlDb;
|
||||||
|
|
||||||
import static com.codeborne.selenide.Selenide.$;
|
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
|
||||||
|
|
||||||
import com.codeborne.selenide.CollectionCondition;
|
import com.codeborne.selenide.CollectionCondition;
|
||||||
import com.codeborne.selenide.Condition;
|
import com.codeborne.selenide.Condition;
|
||||||
import com.codeborne.selenide.SelenideElement;
|
import com.codeborne.selenide.SelenideElement;
|
||||||
import com.provectus.kafka.ui.pages.BasePage;
|
import com.provectus.kafka.ui.pages.BasePage;
|
||||||
import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs;
|
import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
import org.openqa.selenium.By;
|
import org.openqa.selenium.By;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static com.codeborne.selenide.Selenide.$;
|
||||||
|
import static com.codeborne.selenide.Selenide.$x;
|
||||||
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
|
||||||
|
|
||||||
public class KsqlDbList extends BasePage {
|
public class KsqlDbList extends BasePage {
|
||||||
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
|
|
||||||
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
|
|
||||||
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
|
|
||||||
|
|
||||||
@Step
|
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
|
||||||
public KsqlDbList waitUntilScreenReady() {
|
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
|
||||||
waitUntilSpinnerDisappear();
|
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
|
||||||
Arrays.asList(tablesTab, streamsTab).forEach(tab -> tab.shouldBe(Condition.visible));
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public KsqlDbList clickExecuteKsqlRequestBtn() {
|
public KsqlDbList waitUntilScreenReady() {
|
||||||
clickByJavaScript(executeKsqlBtn);
|
waitUntilSpinnerDisappear();
|
||||||
return this;
|
getPageTitleFromHeader(KSQL_DB).shouldBe(Condition.visible);
|
||||||
}
|
return this;
|
||||||
|
|
||||||
@Step
|
|
||||||
public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
|
|
||||||
$(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
|
|
||||||
waitUntilSpinnerDisappear();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
|
|
||||||
List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
|
|
||||||
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
|
|
||||||
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
|
|
||||||
return gridItemList;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Step
|
|
||||||
public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
|
|
||||||
return initTablesItems().stream()
|
|
||||||
.filter(e -> e.getTableName().equals(tableName))
|
|
||||||
.findFirst().orElseThrow();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class KsqlTablesGridItem extends BasePage {
|
|
||||||
|
|
||||||
private final SelenideElement element;
|
|
||||||
|
|
||||||
public KsqlTablesGridItem(SelenideElement element) {
|
|
||||||
this.element = element;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getTableName() {
|
public KsqlDbList clickExecuteKsqlRequestBtn() {
|
||||||
return element.$x("./td[1]").getText().trim();
|
clickByJavaScript(executeKsqlBtn);
|
||||||
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getTopicName() {
|
public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
|
||||||
return element.$x("./td[2]").getText().trim();
|
$(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
|
||||||
|
waitUntilSpinnerDisappear();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
|
||||||
|
List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
|
||||||
|
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
|
||||||
|
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
|
||||||
|
return gridItemList;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getKeyFormat() {
|
public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
|
||||||
return element.$x("./td[3]").getText().trim();
|
return initTablesItems().stream()
|
||||||
|
.filter(e -> e.getTableName().equals(tableName))
|
||||||
|
.findFirst().orElseThrow();
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
|
||||||
|
List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
|
||||||
|
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
|
||||||
|
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
|
||||||
|
return gridItemList;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getValueFormat() {
|
public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
|
||||||
return element.$x("./td[4]").getText().trim();
|
return initStreamsItems().stream()
|
||||||
|
.filter(e -> e.getStreamName().equals(streamName))
|
||||||
|
.findFirst().orElseThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
public static class KsqlTablesGridItem extends BasePage {
|
||||||
public String getIsWindowed() {
|
|
||||||
return element.$x("./td[5]").getText().trim();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
|
private final SelenideElement element;
|
||||||
List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
|
|
||||||
gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
|
|
||||||
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
|
|
||||||
return gridItemList;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Step
|
public KsqlTablesGridItem(SelenideElement element) {
|
||||||
public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
|
this.element = element;
|
||||||
return initStreamsItems().stream()
|
}
|
||||||
.filter(e -> e.getStreamName().equals(streamName))
|
|
||||||
.findFirst().orElseThrow();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class KsqlStreamsGridItem extends BasePage {
|
@Step
|
||||||
|
public String getTableName() {
|
||||||
|
return element.$x("./td[1]").getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
private final SelenideElement element;
|
@Step
|
||||||
|
public String getTopicName() {
|
||||||
|
return element.$x("./td[2]").getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
public KsqlStreamsGridItem(SelenideElement element) {
|
@Step
|
||||||
this.element = element;
|
public String getKeyFormat() {
|
||||||
|
return element.$x("./td[3]").getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public String getValueFormat() {
|
||||||
|
return element.$x("./td[4]").getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public String getIsWindowed() {
|
||||||
|
return element.$x("./td[5]").getText().trim();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
public static class KsqlStreamsGridItem extends BasePage {
|
||||||
public String getStreamName() {
|
|
||||||
return element.$x("./td[1]").getText().trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Step
|
private final SelenideElement element;
|
||||||
public String getTopicName() {
|
|
||||||
return element.$x("./td[2]").getText().trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Step
|
public KsqlStreamsGridItem(SelenideElement element) {
|
||||||
public String getKeyFormat() {
|
this.element = element;
|
||||||
return element.$x("./td[3]").getText().trim();
|
}
|
||||||
}
|
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getValueFormat() {
|
public String getStreamName() {
|
||||||
return element.$x("./td[4]").getText().trim();
|
return element.$x("./td[1]").getText().trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getIsWindowed() {
|
public String getTopicName() {
|
||||||
return element.$x("./td[5]").getText().trim();
|
return element.$x("./td[2]").getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public String getKeyFormat() {
|
||||||
|
return element.$x("./td[3]").getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public String getValueFormat() {
|
||||||
|
return element.$x("./td[4]").getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public String getIsWindowed() {
|
||||||
|
return element.$x("./td[5]").getText().trim();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,6 @@ import static com.codeborne.selenide.Selenide.$$x;
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
import static com.codeborne.selenide.Selenide.$x;
|
||||||
|
|
||||||
public class KsqlQueryForm extends BasePage {
|
public class KsqlQueryForm extends BasePage {
|
||||||
protected SelenideElement pageTitle = $x("//h1[text()='Query']");
|
|
||||||
protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
|
protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
|
||||||
protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
|
protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
|
||||||
protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
|
protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
|
||||||
|
@ -31,7 +30,7 @@ public class KsqlQueryForm extends BasePage {
|
||||||
@Step
|
@Step
|
||||||
public KsqlQueryForm waitUntilScreenReady() {
|
public KsqlQueryForm waitUntilScreenReady() {
|
||||||
waitUntilSpinnerDisappear();
|
waitUntilSpinnerDisappear();
|
||||||
pageTitle.shouldBe(Condition.visible);
|
executeBtn.shouldBe(Condition.visible);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
package com.provectus.kafka.ui.pages;
|
package com.provectus.kafka.ui.pages.panels;
|
||||||
|
|
||||||
import com.codeborne.selenide.Condition;
|
import com.codeborne.selenide.Condition;
|
||||||
import com.codeborne.selenide.SelenideElement;
|
import com.codeborne.selenide.SelenideElement;
|
||||||
|
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
|
||||||
|
import com.provectus.kafka.ui.pages.BasePage;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
|
@ -34,38 +36,29 @@ public class NaviSideBar extends BasePage {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public NaviSideBar openSideMenu(String clusterName, SideMenuOption option) {
|
public String getPagePath(MenuItem menuItem) {
|
||||||
|
return getPagePathFromHeader(menuItem)
|
||||||
|
.shouldBe(Condition.visible)
|
||||||
|
.getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public NaviSideBar openSideMenu(String clusterName, MenuItem menuItem) {
|
||||||
clickByActions(expandCluster(clusterName).parent()
|
clickByActions(expandCluster(clusterName).parent()
|
||||||
.$x(String.format(sideMenuOptionElementLocator, option.value)));
|
.$x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())));
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public NaviSideBar openSideMenu(SideMenuOption option) {
|
public NaviSideBar openSideMenu(MenuItem menuItem) {
|
||||||
openSideMenu(CLUSTER_NAME, option);
|
openSideMenu(CLUSTER_NAME, menuItem);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<SelenideElement> getAllMenuButtons() {
|
public List<SelenideElement> getAllMenuButtons() {
|
||||||
expandCluster(CLUSTER_NAME);
|
expandCluster(CLUSTER_NAME);
|
||||||
return Stream.of(SideMenuOption.values())
|
return Stream.of(MenuItem.values())
|
||||||
.map(option -> $x(String.format(sideMenuOptionElementLocator, option.value)))
|
.map(menuItem -> $x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum SideMenuOption {
|
|
||||||
DASHBOARD("Dashboard"),
|
|
||||||
BROKERS("Brokers"),
|
|
||||||
TOPICS("Topics"),
|
|
||||||
CONSUMERS("Consumers"),
|
|
||||||
SCHEMA_REGISTRY("Schema Registry"),
|
|
||||||
KAFKA_CONNECT("Kafka Connect"),
|
|
||||||
KSQL_DB("KSQL DB");
|
|
||||||
|
|
||||||
final String value;
|
|
||||||
|
|
||||||
SideMenuOption(String value) {
|
|
||||||
this.value = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -1,6 +1,7 @@
|
||||||
package com.provectus.kafka.ui.pages;
|
package com.provectus.kafka.ui.pages.panels;
|
||||||
|
|
||||||
import com.codeborne.selenide.SelenideElement;
|
import com.codeborne.selenide.SelenideElement;
|
||||||
|
import com.provectus.kafka.ui.pages.BasePage;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
|
@ -0,0 +1,28 @@
|
||||||
|
package com.provectus.kafka.ui.pages.panels.enums;
|
||||||
|
|
||||||
|
public enum MenuItem {
|
||||||
|
|
||||||
|
DASHBOARD("Dashboard", "Dashboard"),
|
||||||
|
BROKERS("Brokers", "Brokers"),
|
||||||
|
TOPICS("Topics", "Topics"),
|
||||||
|
CONSUMERS("Consumers", "Consumers"),
|
||||||
|
SCHEMA_REGISTRY("Schema Registry", "Schema Registry"),
|
||||||
|
KAFKA_CONNECT("Kafka Connect", "Connectors"),
|
||||||
|
KSQL_DB("KSQL DB", "KSQL DB");
|
||||||
|
|
||||||
|
private final String naviTitle;
|
||||||
|
private final String pageTitle;
|
||||||
|
|
||||||
|
MenuItem(String naviTitle, String pageTitle) {
|
||||||
|
this.naviTitle = naviTitle;
|
||||||
|
this.pageTitle = pageTitle;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getNaviTitle() {
|
||||||
|
return naviTitle;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getPageTitle() {
|
||||||
|
return pageTitle;
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,6 +6,7 @@ import com.provectus.kafka.ui.pages.BasePage;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
|
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
import static com.codeborne.selenide.Selenide.$x;
|
||||||
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.SCHEMA_REGISTRY;
|
||||||
|
|
||||||
public class SchemaRegistryList extends BasePage {
|
public class SchemaRegistryList extends BasePage {
|
||||||
|
|
||||||
|
@ -14,7 +15,7 @@ public class SchemaRegistryList extends BasePage {
|
||||||
@Step
|
@Step
|
||||||
public SchemaRegistryList waitUntilScreenReady() {
|
public SchemaRegistryList waitUntilScreenReady() {
|
||||||
waitUntilSpinnerDisappear();
|
waitUntilSpinnerDisappear();
|
||||||
createSchemaBtn.shouldBe(Condition.visible);
|
getPageTitleFromHeader(SCHEMA_REGISTRY).shouldBe(Condition.visible);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,10 +14,10 @@ import java.util.stream.Stream;
|
||||||
|
|
||||||
import static com.codeborne.selenide.Condition.visible;
|
import static com.codeborne.selenide.Condition.visible;
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
import static com.codeborne.selenide.Selenide.$x;
|
||||||
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.TOPICS;
|
||||||
|
|
||||||
public class TopicsList extends BasePage {
|
public class TopicsList extends BasePage {
|
||||||
|
|
||||||
protected SelenideElement topicListHeader = $x("//h1[text()='Topics']");
|
|
||||||
protected SelenideElement addTopicBtn = $x("//button[normalize-space(text()) ='Add a Topic']");
|
protected SelenideElement addTopicBtn = $x("//button[normalize-space(text()) ='Add a Topic']");
|
||||||
protected SelenideElement searchField = $x("//input[@placeholder='Search by Topic Name']");
|
protected SelenideElement searchField = $x("//input[@placeholder='Search by Topic Name']");
|
||||||
protected SelenideElement showInternalRadioBtn = $x("//input[@name='ShowInternalTopics']");
|
protected SelenideElement showInternalRadioBtn = $x("//input[@name='ShowInternalTopics']");
|
||||||
|
@ -31,7 +31,7 @@ public class TopicsList extends BasePage {
|
||||||
@Step
|
@Step
|
||||||
public TopicsList waitUntilScreenReady() {
|
public TopicsList waitUntilScreenReady() {
|
||||||
waitUntilSpinnerDisappear();
|
waitUntilSpinnerDisappear();
|
||||||
topicListHeader.shouldBe(visible);
|
getPageTitleFromHeader(TOPICS).shouldBe(visible);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -67,8 +67,8 @@ public class ApiService extends BaseSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public ApiService createTopic(String topicName) {
|
public ApiService createTopic(Topic topic) {
|
||||||
createTopic(CLUSTER_NAME, topicName);
|
createTopic(CLUSTER_NAME, topic.getName());
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,6 +133,12 @@ public class ApiService extends BaseSource {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public ApiService deleteConnector(String connectorName) {
|
||||||
|
deleteConnector(CLUSTER_NAME, CONNECT_NAME, connectorName);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
private void createConnector(String clusterName, String connectName, Connector connector) {
|
private void createConnector(String clusterName, String connectName, Connector connector) {
|
||||||
NewConnector connectorProperties = new NewConnector();
|
NewConnector connectorProperties = new NewConnector();
|
||||||
|
@ -152,9 +158,15 @@ public class ApiService extends BaseSource {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public ApiService createConnector(Connector connector) {
|
||||||
|
createConnector(CLUSTER_NAME, CONNECT_NAME, connector);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getFirstConnectName(String clusterName) {
|
public String getFirstConnectName(String clusterName) {
|
||||||
return connectorApi().getConnects(clusterName).blockFirst().getName();
|
return Objects.requireNonNull(connectorApi().getConnects(clusterName).blockFirst()).getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
@SneakyThrows
|
@SneakyThrows
|
||||||
|
|
|
@ -8,6 +8,7 @@ public abstract class BaseSource {
|
||||||
public static final String BASE_CONTAINER_URL = "http://host.testcontainers.internal:8080";
|
public static final String BASE_CONTAINER_URL = "http://host.testcontainers.internal:8080";
|
||||||
public static final String BASE_LOCAL_URL = "http://localhost:8080";
|
public static final String BASE_LOCAL_URL = "http://localhost:8080";
|
||||||
public static final String CLUSTER_NAME = "local";
|
public static final String CLUSTER_NAME = "local";
|
||||||
|
public static final String CONNECT_NAME = "first";
|
||||||
private static Config config;
|
private static Config config;
|
||||||
public static final String BROWSER = config().browser();
|
public static final String BROWSER = config().browser();
|
||||||
public static final String SUITE_NAME = config().suite();
|
public static final String SUITE_NAME = config().suite();
|
||||||
|
|
|
@ -22,7 +22,7 @@ import org.testng.asserts.SoftAssert;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.*;
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.*;
|
||||||
import static com.provectus.kafka.ui.settings.BaseSource.*;
|
import static com.provectus.kafka.ui.settings.BaseSource.*;
|
||||||
import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
|
import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
|
||||||
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
|
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
|
||||||
|
@ -108,7 +108,7 @@ public abstract class BaseTest extends Facade {
|
||||||
public void afterMethod() {
|
public void afterMethod() {
|
||||||
browserClear();
|
browserClear();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToBrokers() {
|
protected void navigateToBrokers() {
|
||||||
naviSideBar
|
naviSideBar
|
||||||
|
@ -117,6 +117,17 @@ public abstract class BaseTest extends Facade {
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
protected void navigateToBrokersAndOpenDetails(int brokerId) {
|
||||||
|
naviSideBar
|
||||||
|
.openSideMenu(BROKERS);
|
||||||
|
brokersList
|
||||||
|
.waitUntilScreenReady()
|
||||||
|
.openBroker(brokerId);
|
||||||
|
brokersDetails
|
||||||
|
.waitUntilScreenReady();
|
||||||
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToTopics() {
|
protected void navigateToTopics() {
|
||||||
naviSideBar
|
naviSideBar
|
||||||
|
@ -135,7 +146,7 @@ public abstract class BaseTest extends Facade {
|
||||||
topicDetails
|
topicDetails
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToConsumers() {
|
protected void navigateToConsumers() {
|
||||||
naviSideBar
|
naviSideBar
|
||||||
|
@ -143,7 +154,7 @@ public abstract class BaseTest extends Facade {
|
||||||
consumersList
|
consumersList
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToSchemaRegistry() {
|
protected void navigateToSchemaRegistry() {
|
||||||
naviSideBar
|
naviSideBar
|
||||||
|
@ -151,7 +162,7 @@ public abstract class BaseTest extends Facade {
|
||||||
schemaRegistryList
|
schemaRegistryList
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToSchemaRegistryAndOpenDetails(String schemaName) {
|
protected void navigateToSchemaRegistryAndOpenDetails(String schemaName) {
|
||||||
navigateToSchemaRegistry();
|
navigateToSchemaRegistry();
|
||||||
|
@ -160,7 +171,7 @@ public abstract class BaseTest extends Facade {
|
||||||
schemaDetails
|
schemaDetails
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToConnectors() {
|
protected void navigateToConnectors() {
|
||||||
naviSideBar
|
naviSideBar
|
||||||
|
@ -168,7 +179,7 @@ public abstract class BaseTest extends Facade {
|
||||||
kafkaConnectList
|
kafkaConnectList
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToConnectorsAndOpenDetails(String connectorName) {
|
protected void navigateToConnectorsAndOpenDetails(String connectorName) {
|
||||||
navigateToConnectors();
|
navigateToConnectors();
|
||||||
|
@ -177,7 +188,7 @@ public abstract class BaseTest extends Facade {
|
||||||
connectorDetails
|
connectorDetails
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void navigateToKsqlDb() {
|
protected void navigateToKsqlDb() {
|
||||||
naviSideBar
|
naviSideBar
|
||||||
|
@ -185,7 +196,7 @@ public abstract class BaseTest extends Facade {
|
||||||
ksqlDbList
|
ksqlDbList
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
protected void verifyElementsCondition(List<SelenideElement> elementList, Condition expectedCondition) {
|
protected void verifyElementsCondition(List<SelenideElement> elementList, Condition expectedCondition) {
|
||||||
SoftAssert softly = new SoftAssert();
|
SoftAssert softly = new SoftAssert();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
package com.provectus.kafka.ui;
|
package com.provectus.kafka.ui;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.pages.NaviSideBar;
|
import com.provectus.kafka.ui.pages.panels.NaviSideBar;
|
||||||
import com.provectus.kafka.ui.pages.TopPanel;
|
import com.provectus.kafka.ui.pages.panels.TopPanel;
|
||||||
import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab;
|
import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab;
|
||||||
import com.provectus.kafka.ui.pages.brokers.BrokersDetails;
|
import com.provectus.kafka.ui.pages.brokers.BrokersDetails;
|
||||||
import com.provectus.kafka.ui.pages.brokers.BrokersList;
|
import com.provectus.kafka.ui.pages.brokers.BrokersList;
|
||||||
|
|
|
@ -11,6 +11,7 @@ import java.lang.reflect.Method;
|
||||||
|
|
||||||
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
|
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
|
||||||
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
|
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
|
||||||
|
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
|
||||||
|
|
||||||
@Listeners(QaseResultListener.class)
|
@Listeners(QaseResultListener.class)
|
||||||
public abstract class BaseManualTest {
|
public abstract class BaseManualTest {
|
||||||
|
@ -22,7 +23,8 @@ public abstract class BaseManualTest {
|
||||||
|
|
||||||
@BeforeMethod
|
@BeforeMethod
|
||||||
public void beforeMethod(Method method) {
|
public void beforeMethod(Method method) {
|
||||||
if (method.getAnnotation(Automation.class).state().equals(NOT_AUTOMATED))
|
if (method.getAnnotation(Automation.class).state().equals(NOT_AUTOMATED)
|
||||||
|
|| method.getAnnotation(Automation.class).state().equals(TO_BE_AUTOMATED))
|
||||||
throw new SkipException("Skip test exception");
|
throw new SkipException("Skip test exception");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
package com.provectus.kafka.ui.manualSuite.backlog;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.manualSuite.BaseManualTest;
|
||||||
|
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
|
||||||
|
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
|
||||||
|
import io.qase.api.annotation.QaseId;
|
||||||
|
import org.testng.annotations.Test;
|
||||||
|
|
||||||
|
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
|
||||||
|
|
||||||
|
public class SanityBacklog extends BaseManualTest {
|
||||||
|
|
||||||
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
|
@Suite(id = 19)
|
||||||
|
@QaseId(285)
|
||||||
|
@Test
|
||||||
|
public void testCaseA() {
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,35 +1,61 @@
|
||||||
package com.provectus.kafka.ui.manualSuite.suite;
|
package com.provectus.kafka.ui.manualSuite.backlog;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.manualSuite.BaseManualTest;
|
import com.provectus.kafka.ui.manualSuite.BaseManualTest;
|
||||||
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
|
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
|
||||||
|
import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
|
||||||
import io.qase.api.annotation.QaseId;
|
import io.qase.api.annotation.QaseId;
|
||||||
import org.testng.annotations.Test;
|
import org.testng.annotations.Test;
|
||||||
|
|
||||||
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
|
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
|
||||||
|
|
||||||
public class KsqlDbTest extends BaseManualTest {
|
public class SmokeBacklog extends BaseManualTest {
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@QaseId(276)
|
@Suite(id = 1)
|
||||||
|
@QaseId(330)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseA() {
|
public void testCaseA() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@QaseId(277)
|
@Suite(id = 8)
|
||||||
|
@QaseId(276)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseB() {
|
public void testCaseB() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@QaseId(278)
|
@Suite(id = 8)
|
||||||
|
@QaseId(277)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseC() {
|
public void testCaseC() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@QaseId(284)
|
@Suite(id = 8)
|
||||||
|
@QaseId(278)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseD() {
|
public void testCaseD() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
|
@Suite(id = 8)
|
||||||
|
@QaseId(284)
|
||||||
|
@Test
|
||||||
|
public void testCaseE() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
|
@Suite(id = 1)
|
||||||
|
@QaseId(331)
|
||||||
|
@Test
|
||||||
|
public void testCaseF() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
|
@Suite(id = 1)
|
||||||
|
@QaseId(332)
|
||||||
|
@Test
|
||||||
|
public void testCaseG() {
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -34,68 +34,62 @@ public class TopicsTest extends BaseManualTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(46)
|
@QaseId(47)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseE() {
|
public void testCaseE() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(47)
|
@QaseId(48)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseF() {
|
public void testCaseF() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(48)
|
@QaseId(49)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseG() {
|
public void testCaseG() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(49)
|
@QaseId(50)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseH() {
|
public void testCaseH() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(50)
|
@QaseId(57)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseI() {
|
public void testCaseI() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(57)
|
@QaseId(58)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseJ() {
|
public void testCaseJ() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(58)
|
@QaseId(269)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseK() {
|
public void testCaseK() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(269)
|
@QaseId(270)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseL() {
|
public void testCaseL() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(270)
|
@QaseId(271)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseM() {
|
public void testCaseM() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(271)
|
@QaseId(272)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseN() {
|
public void testCaseN() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = NOT_AUTOMATED)
|
|
||||||
@QaseId(272)
|
|
||||||
@Test
|
|
||||||
public void testCaseO() {
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,12 +5,12 @@ import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
|
||||||
import io.qase.api.annotation.QaseId;
|
import io.qase.api.annotation.QaseId;
|
||||||
import org.testng.annotations.Test;
|
import org.testng.annotations.Test;
|
||||||
|
|
||||||
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED;
|
import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
|
||||||
|
|
||||||
public class BrokersTest extends BaseManualTest {
|
public class WizardTest extends BaseManualTest {
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = NOT_AUTOMATED)
|
||||||
@QaseId(330)
|
@QaseId(333)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseA() {
|
public void testCaseA() {
|
||||||
}
|
}
|
|
@ -9,6 +9,13 @@ import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrati
|
||||||
@Listeners(QaseCreateListener.class)
|
@Listeners(QaseCreateListener.class)
|
||||||
public abstract class BaseQaseTest {
|
public abstract class BaseQaseTest {
|
||||||
|
|
||||||
|
protected static final long BROKERS_SUITE_ID = 1;
|
||||||
|
protected static final long CONNECTORS_SUITE_ID = 10;
|
||||||
|
protected static final long KSQL_DB_SUITE_ID = 8;
|
||||||
|
protected static final long SANITY_SUITE_ID = 19;
|
||||||
|
protected static final long SCHEMAS_SUITE_ID = 11;
|
||||||
|
protected static final long TOPICS_SUITE_ID = 2;
|
||||||
|
|
||||||
@BeforeSuite
|
@BeforeSuite
|
||||||
public void beforeSuite() {
|
public void beforeSuite() {
|
||||||
qaseIntegrationSetup();
|
qaseIntegrationSetup();
|
||||||
|
|
|
@ -13,7 +13,7 @@ public class Template extends BaseQaseTest {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* this class is a kind of placeholder or example, use is as template to create new one
|
* this class is a kind of placeholder or example, use is as template to create new one
|
||||||
* copy class into kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/suite
|
* copy Template into kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/
|
||||||
* place it into regarding folder and rename according to test case summary from Qase.io
|
* place it into regarding folder and rename according to test case summary from Qase.io
|
||||||
* uncomment @Test and set all annotations according to kafka-ui-e2e-checks/QASE.md
|
* uncomment @Test and set all annotations according to kafka-ui-e2e-checks/QASE.md
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -3,20 +3,46 @@ package com.provectus.kafka.ui.smokeSuite;
|
||||||
import com.codeborne.selenide.Condition;
|
import com.codeborne.selenide.Condition;
|
||||||
import com.codeborne.selenide.WebDriverRunner;
|
import com.codeborne.selenide.WebDriverRunner;
|
||||||
import com.provectus.kafka.ui.BaseTest;
|
import com.provectus.kafka.ui.BaseTest;
|
||||||
|
import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
|
||||||
|
import com.provectus.kafka.ui.models.Connector;
|
||||||
|
import com.provectus.kafka.ui.models.Schema;
|
||||||
|
import com.provectus.kafka.ui.models.Topic;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
import io.qase.api.annotation.QaseId;
|
import io.qase.api.annotation.QaseId;
|
||||||
import org.testng.Assert;
|
import org.testng.Assert;
|
||||||
|
import org.testng.annotations.AfterClass;
|
||||||
|
import org.testng.annotations.BeforeClass;
|
||||||
import org.testng.annotations.Test;
|
import org.testng.annotations.Test;
|
||||||
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.*;
|
||||||
import static com.provectus.kafka.ui.settings.BaseSource.BROWSER;
|
import static com.provectus.kafka.ui.settings.BaseSource.BROWSER;
|
||||||
|
import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString;
|
||||||
import static com.provectus.kafka.ui.variables.Browser.LOCAL;
|
import static com.provectus.kafka.ui.variables.Browser.LOCAL;
|
||||||
import static com.provectus.kafka.ui.variables.Url.*;
|
import static com.provectus.kafka.ui.variables.Url.*;
|
||||||
|
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
||||||
|
|
||||||
public class SmokeTest extends BaseTest {
|
public class SmokeTest extends BaseTest {
|
||||||
|
|
||||||
|
private static final int BROKER_ID = 1;
|
||||||
|
private static final Schema TEST_SCHEMA = Schema.createSchemaAvro();
|
||||||
|
private static final Topic TEST_TOPIC = new Topic()
|
||||||
|
.setName("new-topic-" + randomAlphabetic(5))
|
||||||
|
.setNumberOfPartitions(1);
|
||||||
|
private static final Connector TEST_CONNECTOR = new Connector()
|
||||||
|
.setName("new-connector-" + randomAlphabetic(5))
|
||||||
|
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector_via_api.json"));
|
||||||
|
|
||||||
|
@BeforeClass(alwaysRun = true)
|
||||||
|
public void beforeClass() {
|
||||||
|
apiService
|
||||||
|
.createTopic(TEST_TOPIC)
|
||||||
|
.createSchema(TEST_SCHEMA)
|
||||||
|
.createConnector(TEST_CONNECTOR);
|
||||||
|
}
|
||||||
|
|
||||||
@QaseId(198)
|
@QaseId(198)
|
||||||
@Test
|
@Test
|
||||||
public void checkBasePageElements() {
|
public void checkBasePageElements() {
|
||||||
|
@ -45,10 +71,37 @@ public class SmokeTest extends BaseTest {
|
||||||
verifyCurrentUrl(KSQL_DB_LIST_URL);
|
verifyCurrentUrl(KSQL_DB_LIST_URL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@QaseId(46)
|
||||||
|
@Test
|
||||||
|
public void checkComponentsPathWhileNavigating() {
|
||||||
|
navigateToBrokersAndOpenDetails(BROKER_ID);
|
||||||
|
verifyComponentsPath(BROKERS, String.format("Broker %d", BROKER_ID));
|
||||||
|
navigateToTopicsAndOpenDetails(TEST_TOPIC.getName());
|
||||||
|
verifyComponentsPath(TOPICS, TEST_TOPIC.getName());
|
||||||
|
navigateToSchemaRegistryAndOpenDetails(TEST_SCHEMA.getName());
|
||||||
|
verifyComponentsPath(SCHEMA_REGISTRY, TEST_SCHEMA.getName());
|
||||||
|
navigateToConnectorsAndOpenDetails(TEST_CONNECTOR.getName());
|
||||||
|
verifyComponentsPath(KAFKA_CONNECT, TEST_CONNECTOR.getName());
|
||||||
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
private void verifyCurrentUrl(String expectedUrl) {
|
private void verifyCurrentUrl(String expectedUrl) {
|
||||||
String host = BROWSER.equals(LOCAL) ? "localhost" : "host.testcontainers.internal";
|
String host = BROWSER.equals(LOCAL) ? "localhost" : "host.testcontainers.internal";
|
||||||
Assert.assertEquals(WebDriverRunner.getWebDriver().getCurrentUrl(),
|
Assert.assertEquals(WebDriverRunner.getWebDriver().getCurrentUrl(),
|
||||||
String.format(expectedUrl, host), "getCurrentUrl()");
|
String.format(expectedUrl, host), "getCurrentUrl()");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
private void verifyComponentsPath(MenuItem menuItem, String expectedPath) {
|
||||||
|
Assert.assertEquals(naviSideBar.getPagePath(menuItem), expectedPath,
|
||||||
|
String.format("getPagePath() for %s", menuItem.getPageTitle().toUpperCase()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass(alwaysRun = true)
|
||||||
|
public void afterClass() {
|
||||||
|
apiService
|
||||||
|
.deleteTopic(TEST_TOPIC.getName())
|
||||||
|
.deleteSchema(TEST_SCHEMA.getName())
|
||||||
|
.deleteConnector(TEST_CONNECTOR.getName());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,44 +18,42 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
||||||
|
|
||||||
public class ConnectorsTest extends BaseTest {
|
public class ConnectorsTest extends BaseTest {
|
||||||
|
|
||||||
private static final String CONNECT_NAME = "first";
|
|
||||||
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
|
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
|
||||||
private static final List<Connector> CONNECTOR_LIST = new ArrayList<>();
|
private static final List<Connector> CONNECTOR_LIST = new ArrayList<>();
|
||||||
private static final String MESSAGE_CONTENT = "testData/topics/message_content_create_topic.json";
|
private static final String MESSAGE_CONTENT = "testData/topics/message_content_create_topic.json";
|
||||||
private static final String MESSAGE_KEY = " ";
|
private static final String MESSAGE_KEY = " ";
|
||||||
private static final Topic TOPIC_FOR_CREATE = new Topic()
|
private static final Topic TOPIC_FOR_CREATE = new Topic()
|
||||||
.setName("topic_for_create_connector-" + randomAlphabetic(5))
|
.setName("topic-for-create-connector-" + randomAlphabetic(5))
|
||||||
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
|
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
|
||||||
private static final Topic TOPIC_FOR_DELETE = new Topic()
|
private static final Topic TOPIC_FOR_DELETE = new Topic()
|
||||||
.setName("topic_for_delete_connector-" + randomAlphabetic(5))
|
.setName("topic-for-delete-connector-" + randomAlphabetic(5))
|
||||||
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
|
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
|
||||||
private static final Topic TOPIC_FOR_UPDATE = new Topic()
|
private static final Topic TOPIC_FOR_UPDATE = new Topic()
|
||||||
.setName("topic_for_update_connector-" + randomAlphabetic(5))
|
.setName("topic-for-update-connector-" + randomAlphabetic(5))
|
||||||
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
|
.setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
|
||||||
private static final Connector CONNECTOR_FOR_DELETE = new Connector()
|
private static final Connector CONNECTOR_FOR_DELETE = new Connector()
|
||||||
.setName("sink_postgres_activities_e2e_checks_for_delete-" + randomAlphabetic(5))
|
.setName("connector-for-delete-" + randomAlphabetic(5))
|
||||||
.setConfig(getResourceAsString("testData/connectors/delete_connector_config.json"));
|
.setConfig(getResourceAsString("testData/connectors/delete_connector_config.json"));
|
||||||
private static final Connector CONNECTOR_FOR_UPDATE = new Connector()
|
private static final Connector CONNECTOR_FOR_UPDATE = new Connector()
|
||||||
.setName("sink_postgres_activities_e2e_checks_for_update-" + randomAlphabetic(5))
|
.setName("connector-for-update-and-delete-" + randomAlphabetic(5))
|
||||||
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector_via_api.json"));
|
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector_via_api.json"));
|
||||||
|
|
||||||
@BeforeClass(alwaysRun = true)
|
@BeforeClass(alwaysRun = true)
|
||||||
public void beforeClass() {
|
public void beforeClass() {
|
||||||
TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE));
|
TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE));
|
||||||
TOPIC_LIST.forEach(topic -> apiService
|
TOPIC_LIST.forEach(topic -> apiService
|
||||||
.createTopic(topic.getName())
|
.createTopic(topic)
|
||||||
.sendMessage(topic)
|
.sendMessage(topic)
|
||||||
);
|
);
|
||||||
CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE));
|
CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE));
|
||||||
CONNECTOR_LIST.forEach(connector -> apiService
|
CONNECTOR_LIST.forEach(connector -> apiService.createConnector(connector));
|
||||||
.createConnector(CONNECT_NAME, connector));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@QaseId(42)
|
@QaseId(42)
|
||||||
@Test
|
@Test
|
||||||
public void createConnector() {
|
public void createConnector() {
|
||||||
Connector connectorForCreate = new Connector()
|
Connector connectorForCreate = new Connector()
|
||||||
.setName("sink_postgres_activities_e2e_checks-" + randomAlphabetic(5))
|
.setName("connector-for-create-" + randomAlphabetic(5))
|
||||||
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector.json"));
|
.setConfig(getResourceAsString("testData/connectors/config_for_create_connector.json"));
|
||||||
navigateToConnectors();
|
navigateToConnectors();
|
||||||
kafkaConnectList
|
kafkaConnectList
|
||||||
|
@ -102,7 +100,7 @@ public class ConnectorsTest extends BaseTest {
|
||||||
@AfterClass(alwaysRun = true)
|
@AfterClass(alwaysRun = true)
|
||||||
public void afterClass() {
|
public void afterClass() {
|
||||||
CONNECTOR_LIST.forEach(connector ->
|
CONNECTOR_LIST.forEach(connector ->
|
||||||
apiService.deleteConnector(CONNECT_NAME, connector.getName()));
|
apiService.deleteConnector(connector.getName()));
|
||||||
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
|
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class MessagesTest extends BaseTest {
|
||||||
public void beforeClass() {
|
public void beforeClass() {
|
||||||
TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECK_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
|
TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECK_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
|
||||||
TOPIC_TO_RECREATE, TOPIC_FOR_CHECK_MESSAGES_COUNT));
|
TOPIC_TO_RECREATE, TOPIC_FOR_CHECK_MESSAGES_COUNT));
|
||||||
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
|
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic));
|
||||||
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
|
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
|
||||||
waitUntilNewMinuteStarted();
|
waitUntilNewMinuteStarted();
|
||||||
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
|
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
|
||||||
|
@ -75,8 +75,6 @@ public class MessagesTest extends BaseTest {
|
||||||
softly.assertAll();
|
softly.assertAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Ignore
|
|
||||||
@Issue("https://github.com/provectus/kafka-ui/issues/2778")
|
|
||||||
@QaseId(19)
|
@QaseId(19)
|
||||||
@Test(priority = 2)
|
@Test(priority = 2)
|
||||||
public void clearMessage() {
|
public void clearMessage() {
|
||||||
|
@ -85,12 +83,13 @@ public class MessagesTest extends BaseTest {
|
||||||
.openDetailsTab(OVERVIEW);
|
.openDetailsTab(OVERVIEW);
|
||||||
int messageAmount = topicDetails.getMessageCountAmount();
|
int messageAmount = topicDetails.getMessageCountAmount();
|
||||||
produceMessage(TOPIC_FOR_MESSAGES);
|
produceMessage(TOPIC_FOR_MESSAGES);
|
||||||
Assert.assertEquals(messageAmount + 1, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
|
Assert.assertEquals(topicDetails.getMessageCountAmount(), messageAmount + 1, "getMessageCountAmount()");
|
||||||
topicDetails
|
topicDetails
|
||||||
.openDotMenu()
|
.openDotMenu()
|
||||||
.clickClearMessagesMenu()
|
.clickClearMessagesMenu()
|
||||||
|
.clickConfirmBtnMdl()
|
||||||
.waitUntilScreenReady();
|
.waitUntilScreenReady();
|
||||||
Assert.assertEquals(0, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
|
Assert.assertEquals(topicDetails.getMessageCountAmount(), 0, "getMessageCountAmount()");
|
||||||
}
|
}
|
||||||
|
|
||||||
@QaseId(239)
|
@QaseId(239)
|
||||||
|
|
|
@ -59,7 +59,7 @@ public class TopicsTest extends BaseTest {
|
||||||
@BeforeClass(alwaysRun = true)
|
@BeforeClass(alwaysRun = true)
|
||||||
public void beforeClass() {
|
public void beforeClass() {
|
||||||
TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS));
|
TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS));
|
||||||
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
|
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic));
|
||||||
}
|
}
|
||||||
|
|
||||||
@QaseId(199)
|
@QaseId(199)
|
||||||
|
|
|
@ -30,6 +30,9 @@ const queryClient = new QueryClient({
|
||||||
defaultOptions: {
|
defaultOptions: {
|
||||||
queries: {
|
queries: {
|
||||||
suspense: true,
|
suspense: true,
|
||||||
|
onError(error) {
|
||||||
|
showServerError(error as Response);
|
||||||
|
},
|
||||||
},
|
},
|
||||||
mutations: {
|
mutations: {
|
||||||
onError(error) {
|
onError(error) {
|
||||||
|
|
|
@ -1,17 +1,18 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { Route, Routes } from 'react-router-dom';
|
import { Route, Routes } from 'react-router-dom';
|
||||||
import Details from 'components/ConsumerGroups/Details/Details';
|
import Details from 'components/ConsumerGroups/Details/Details';
|
||||||
import ListContainer from 'components/ConsumerGroups/List/ListContainer';
|
|
||||||
import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets';
|
import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets';
|
||||||
import {
|
import {
|
||||||
clusterConsumerGroupResetOffsetsRelativePath,
|
clusterConsumerGroupResetOffsetsRelativePath,
|
||||||
RouteParams,
|
RouteParams,
|
||||||
} from 'lib/paths';
|
} from 'lib/paths';
|
||||||
|
|
||||||
|
import List from './List';
|
||||||
|
|
||||||
const ConsumerGroups: React.FC = () => {
|
const ConsumerGroups: React.FC = () => {
|
||||||
return (
|
return (
|
||||||
<Routes>
|
<Routes>
|
||||||
<Route index element={<ListContainer />} />
|
<Route index element={<List />} />
|
||||||
<Route path={RouteParams.consumerGroupID} element={<Details />} />
|
<Route path={RouteParams.consumerGroupID} element={<Details />} />
|
||||||
<Route
|
<Route
|
||||||
path={clusterConsumerGroupResetOffsetsRelativePath}
|
path={clusterConsumerGroupResetOffsetsRelativePath}
|
||||||
|
|
|
@ -7,26 +7,22 @@ import {
|
||||||
ClusterGroupParam,
|
ClusterGroupParam,
|
||||||
} from 'lib/paths';
|
} from 'lib/paths';
|
||||||
import Search from 'components/common/Search/Search';
|
import Search from 'components/common/Search/Search';
|
||||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
|
||||||
import ClusterContext from 'components/contexts/ClusterContext';
|
import ClusterContext from 'components/contexts/ClusterContext';
|
||||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||||
import * as Metrics from 'components/common/Metrics';
|
import * as Metrics from 'components/common/Metrics';
|
||||||
import { Tag } from 'components/common/Tag/Tag.styled';
|
import { Tag } from 'components/common/Tag/Tag.styled';
|
||||||
import groupBy from 'lodash/groupBy';
|
import groupBy from 'lodash/groupBy';
|
||||||
import { Table } from 'components/common/table/Table/Table.styled';
|
import { Table } from 'components/common/table/Table/Table.styled';
|
||||||
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
|
||||||
import {
|
|
||||||
deleteConsumerGroup,
|
|
||||||
selectById,
|
|
||||||
fetchConsumerGroupDetails,
|
|
||||||
getAreConsumerGroupDetailsFulfilled,
|
|
||||||
} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
|
|
||||||
import getTagColor from 'components/common/Tag/getTagColor';
|
import getTagColor from 'components/common/Tag/getTagColor';
|
||||||
import { Dropdown } from 'components/common/Dropdown';
|
import { Dropdown } from 'components/common/Dropdown';
|
||||||
import { ControlPanelWrapper } from 'components/common/ControlPanel/ControlPanel.styled';
|
import { ControlPanelWrapper } from 'components/common/ControlPanel/ControlPanel.styled';
|
||||||
import { Action, ResourceType } from 'generated-sources';
|
import { Action, ResourceType } from 'generated-sources';
|
||||||
import { ActionDropdownItem } from 'components/common/ActionComponent';
|
import { ActionDropdownItem } from 'components/common/ActionComponent';
|
||||||
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
|
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
|
||||||
|
import {
|
||||||
|
useConsumerGroupDetails,
|
||||||
|
useDeleteConsumerGroupMutation,
|
||||||
|
} from 'lib/hooks/api/consumers';
|
||||||
|
|
||||||
import ListItem from './ListItem';
|
import ListItem from './ListItem';
|
||||||
|
|
||||||
|
@ -35,38 +31,25 @@ const Details: React.FC = () => {
|
||||||
const [searchParams] = useSearchParams();
|
const [searchParams] = useSearchParams();
|
||||||
const searchValue = searchParams.get('q') || '';
|
const searchValue = searchParams.get('q') || '';
|
||||||
const { isReadOnly } = React.useContext(ClusterContext);
|
const { isReadOnly } = React.useContext(ClusterContext);
|
||||||
const { consumerGroupID, clusterName } = useAppParams<ClusterGroupParam>();
|
const routeParams = useAppParams<ClusterGroupParam>();
|
||||||
const dispatch = useAppDispatch();
|
const { clusterName, consumerGroupID } = routeParams;
|
||||||
const consumerGroup = useAppSelector((state) =>
|
|
||||||
selectById(state, consumerGroupID)
|
|
||||||
);
|
|
||||||
const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled);
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
const consumerGroup = useConsumerGroupDetails(routeParams);
|
||||||
dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID }));
|
const deleteConsumerGroup = useDeleteConsumerGroupMutation(routeParams);
|
||||||
}, [clusterName, consumerGroupID, dispatch]);
|
|
||||||
|
|
||||||
const onDelete = async () => {
|
const onDelete = async () => {
|
||||||
const res = await dispatch(
|
await deleteConsumerGroup.mutateAsync();
|
||||||
deleteConsumerGroup({ clusterName, consumerGroupID })
|
navigate('../');
|
||||||
).unwrap();
|
|
||||||
if (res) navigate('../');
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const onResetOffsets = () => {
|
const onResetOffsets = () => {
|
||||||
navigate(clusterConsumerGroupResetRelativePath);
|
navigate(clusterConsumerGroupResetRelativePath);
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!isFetched || !consumerGroup) {
|
const partitionsByTopic = groupBy(consumerGroup.data?.partitions, 'topic');
|
||||||
return <PageLoader />;
|
|
||||||
}
|
|
||||||
|
|
||||||
const partitionsByTopic = groupBy(consumerGroup.partitions, 'topic');
|
|
||||||
|
|
||||||
const filteredPartitionsByTopic = Object.keys(partitionsByTopic).filter(
|
const filteredPartitionsByTopic = Object.keys(partitionsByTopic).filter(
|
||||||
(el) => el.includes(searchValue)
|
(el) => el.includes(searchValue)
|
||||||
);
|
);
|
||||||
|
|
||||||
const currentPartitionsByTopic = searchValue.length
|
const currentPartitionsByTopic = searchValue.length
|
||||||
? filteredPartitionsByTopic
|
? filteredPartitionsByTopic
|
||||||
: Object.keys(partitionsByTopic);
|
: Object.keys(partitionsByTopic);
|
||||||
|
@ -110,24 +93,24 @@ const Details: React.FC = () => {
|
||||||
<Metrics.Wrapper>
|
<Metrics.Wrapper>
|
||||||
<Metrics.Section>
|
<Metrics.Section>
|
||||||
<Metrics.Indicator label="State">
|
<Metrics.Indicator label="State">
|
||||||
<Tag color={getTagColor(consumerGroup.state)}>
|
<Tag color={getTagColor(consumerGroup.data?.state)}>
|
||||||
{consumerGroup.state}
|
{consumerGroup.data?.state}
|
||||||
</Tag>
|
</Tag>
|
||||||
</Metrics.Indicator>
|
</Metrics.Indicator>
|
||||||
<Metrics.Indicator label="Members">
|
<Metrics.Indicator label="Members">
|
||||||
{consumerGroup.members}
|
{consumerGroup.data?.members}
|
||||||
</Metrics.Indicator>
|
</Metrics.Indicator>
|
||||||
<Metrics.Indicator label="Assigned Topics">
|
<Metrics.Indicator label="Assigned Topics">
|
||||||
{consumerGroup.topics}
|
{consumerGroup.data?.topics}
|
||||||
</Metrics.Indicator>
|
</Metrics.Indicator>
|
||||||
<Metrics.Indicator label="Assigned Partitions">
|
<Metrics.Indicator label="Assigned Partitions">
|
||||||
{consumerGroup.partitions?.length}
|
{consumerGroup.data?.partitions?.length}
|
||||||
</Metrics.Indicator>
|
</Metrics.Indicator>
|
||||||
<Metrics.Indicator label="Coordinator ID">
|
<Metrics.Indicator label="Coordinator ID">
|
||||||
{consumerGroup.coordinator?.id}
|
{consumerGroup.data?.coordinator?.id}
|
||||||
</Metrics.Indicator>
|
</Metrics.Indicator>
|
||||||
<Metrics.Indicator label="Total lag">
|
<Metrics.Indicator label="Total lag">
|
||||||
{consumerGroup.messagesBehind}
|
{consumerGroup.data?.messagesBehind}
|
||||||
</Metrics.Indicator>
|
</Metrics.Indicator>
|
||||||
</Metrics.Section>
|
</Metrics.Section>
|
||||||
</Metrics.Wrapper>
|
</Metrics.Wrapper>
|
||||||
|
|
|
@ -0,0 +1,197 @@
|
||||||
|
import React from 'react';
|
||||||
|
import { useNavigate } from 'react-router-dom';
|
||||||
|
import {
|
||||||
|
ConsumerGroupDetails,
|
||||||
|
ConsumerGroupOffsetsReset,
|
||||||
|
ConsumerGroupOffsetsResetType,
|
||||||
|
} from 'generated-sources';
|
||||||
|
import { ClusterGroupParam } from 'lib/paths';
|
||||||
|
import {
|
||||||
|
Controller,
|
||||||
|
FormProvider,
|
||||||
|
useFieldArray,
|
||||||
|
useForm,
|
||||||
|
} from 'react-hook-form';
|
||||||
|
import { MultiSelect, Option } from 'react-multi-select-component';
|
||||||
|
import 'react-datepicker/dist/react-datepicker.css';
|
||||||
|
import { ErrorMessage } from '@hookform/error-message';
|
||||||
|
import { InputLabel } from 'components/common/Input/InputLabel.styled';
|
||||||
|
import { Button } from 'components/common/Button/Button';
|
||||||
|
import Input from 'components/common/Input/Input';
|
||||||
|
import { FormError } from 'components/common/Input/Input.styled';
|
||||||
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
|
import { useResetConsumerGroupOffsetsMutation } from 'lib/hooks/api/consumers';
|
||||||
|
import { FlexFieldset, StyledForm } from 'components/common/Form/Form.styled';
|
||||||
|
import ControlledSelect from 'components/common/Select/ControlledSelect';
|
||||||
|
|
||||||
|
import * as S from './ResetOffsets.styled';
|
||||||
|
|
||||||
|
interface FormProps {
|
||||||
|
defaultValues: ConsumerGroupOffsetsReset;
|
||||||
|
topics: string[];
|
||||||
|
partitions: ConsumerGroupDetails['partitions'];
|
||||||
|
}
|
||||||
|
|
||||||
|
const resetTypeOptions = Object.values(ConsumerGroupOffsetsResetType).map(
|
||||||
|
(value) => ({ value, label: value })
|
||||||
|
);
|
||||||
|
|
||||||
|
const Form: React.FC<FormProps> = ({ defaultValues, partitions, topics }) => {
|
||||||
|
const navigate = useNavigate();
|
||||||
|
const routerParams = useAppParams<ClusterGroupParam>();
|
||||||
|
const reset = useResetConsumerGroupOffsetsMutation(routerParams);
|
||||||
|
const topicOptions = React.useMemo(
|
||||||
|
() => topics.map((value) => ({ value, label: value })),
|
||||||
|
[topics]
|
||||||
|
);
|
||||||
|
const methods = useForm<ConsumerGroupOffsetsReset>({
|
||||||
|
mode: 'onChange',
|
||||||
|
defaultValues,
|
||||||
|
});
|
||||||
|
|
||||||
|
const {
|
||||||
|
handleSubmit,
|
||||||
|
setValue,
|
||||||
|
watch,
|
||||||
|
control,
|
||||||
|
formState: { errors },
|
||||||
|
} = methods;
|
||||||
|
const { fields } = useFieldArray({
|
||||||
|
control,
|
||||||
|
name: 'partitionsOffsets',
|
||||||
|
});
|
||||||
|
|
||||||
|
const resetTypeValue = watch('resetType');
|
||||||
|
const topicValue = watch('topic');
|
||||||
|
const offsetsValue = watch('partitionsOffsets');
|
||||||
|
const partitionsValue = watch('partitions') || [];
|
||||||
|
|
||||||
|
const partitionOptions =
|
||||||
|
partitions
|
||||||
|
?.filter((p) => p.topic === topicValue)
|
||||||
|
.map((p) => ({
|
||||||
|
label: `Partition #${p.partition.toString()}`,
|
||||||
|
value: p.partition,
|
||||||
|
})) || [];
|
||||||
|
|
||||||
|
const onSelectedPartitionsChange = (selected: Option[]) => {
|
||||||
|
setValue(
|
||||||
|
'partitions',
|
||||||
|
selected.map(({ value }) => value)
|
||||||
|
);
|
||||||
|
|
||||||
|
setValue(
|
||||||
|
'partitionsOffsets',
|
||||||
|
selected.map(({ value }) => {
|
||||||
|
const currentOffset = offsetsValue?.find(
|
||||||
|
({ partition }) => partition === value
|
||||||
|
);
|
||||||
|
return { offset: currentOffset?.offset, partition: value };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
React.useEffect(() => {
|
||||||
|
onSelectedPartitionsChange([]);
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [topicValue]);
|
||||||
|
|
||||||
|
const onSubmit = async (data: ConsumerGroupOffsetsReset) => {
|
||||||
|
await reset.mutateAsync(data);
|
||||||
|
navigate('../');
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<FormProvider {...methods}>
|
||||||
|
<StyledForm onSubmit={handleSubmit(onSubmit)}>
|
||||||
|
<FlexFieldset>
|
||||||
|
<ControlledSelect
|
||||||
|
name="topic"
|
||||||
|
label="Topic"
|
||||||
|
placeholder="Select Topic"
|
||||||
|
options={topicOptions}
|
||||||
|
/>
|
||||||
|
<ControlledSelect
|
||||||
|
name="resetType"
|
||||||
|
label="Reset Type"
|
||||||
|
placeholder="Select Reset Type"
|
||||||
|
options={resetTypeOptions}
|
||||||
|
/>
|
||||||
|
<div>
|
||||||
|
<InputLabel>Partitions</InputLabel>
|
||||||
|
<MultiSelect
|
||||||
|
options={partitionOptions}
|
||||||
|
value={partitionsValue.map((p) => ({
|
||||||
|
value: p,
|
||||||
|
label: String(p),
|
||||||
|
}))}
|
||||||
|
onChange={onSelectedPartitionsChange}
|
||||||
|
labelledBy="Select partitions"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP &&
|
||||||
|
partitionsValue.length > 0 && (
|
||||||
|
<div>
|
||||||
|
<InputLabel>Timestamp</InputLabel>
|
||||||
|
<Controller
|
||||||
|
control={control}
|
||||||
|
name="resetToTimestamp"
|
||||||
|
rules={{
|
||||||
|
required: 'Timestamp is required',
|
||||||
|
}}
|
||||||
|
render={({ field: { onChange, onBlur, value, ref } }) => (
|
||||||
|
<S.DatePickerInput
|
||||||
|
ref={ref}
|
||||||
|
selected={new Date(value as number)}
|
||||||
|
onChange={(e: Date | null) => onChange(e?.getTime())}
|
||||||
|
onBlur={onBlur}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
<ErrorMessage
|
||||||
|
errors={errors}
|
||||||
|
name="resetToTimestamp"
|
||||||
|
render={({ message }) => <FormError>{message}</FormError>}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET &&
|
||||||
|
partitionsValue.length > 0 && (
|
||||||
|
<S.OffsetsWrapper>
|
||||||
|
{fields.map((field, index) => (
|
||||||
|
<Input
|
||||||
|
key={field.id}
|
||||||
|
label={`Partition #${field.partition} Offset`}
|
||||||
|
type="number"
|
||||||
|
name={`partitionsOffsets.${index}.offset` as const}
|
||||||
|
hookFormOptions={{
|
||||||
|
shouldUnregister: true,
|
||||||
|
required: 'Offset is required',
|
||||||
|
min: {
|
||||||
|
value: 0,
|
||||||
|
message: 'must be greater than or equal to 0',
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
withError
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</S.OffsetsWrapper>
|
||||||
|
)}
|
||||||
|
</FlexFieldset>
|
||||||
|
<div>
|
||||||
|
<Button
|
||||||
|
buttonSize="M"
|
||||||
|
buttonType="primary"
|
||||||
|
type="submit"
|
||||||
|
disabled={partitionsValue.length === 0}
|
||||||
|
>
|
||||||
|
Submit
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</StyledForm>
|
||||||
|
</FormProvider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default Form;
|
|
@ -1,37 +1,5 @@
|
||||||
import styled from 'styled-components';
|
import styled from 'styled-components';
|
||||||
|
import DatePicker from 'react-datepicker';
|
||||||
export const Wrapper = styled.div`
|
|
||||||
padding: 16px;
|
|
||||||
padding-top: 0;
|
|
||||||
|
|
||||||
& > form {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 16px;
|
|
||||||
|
|
||||||
& > button:last-child {
|
|
||||||
align-self: flex-start;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
& .multi-select {
|
|
||||||
height: 32px;
|
|
||||||
& > .dropdown-container {
|
|
||||||
height: 32px;
|
|
||||||
& > .dropdown-heading {
|
|
||||||
height: 32px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
export const MainSelectors = styled.div`
|
|
||||||
display: flex;
|
|
||||||
gap: 16px;
|
|
||||||
& > * {
|
|
||||||
flex-grow: 1;
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
export const OffsetsWrapper = styled.div`
|
export const OffsetsWrapper = styled.div`
|
||||||
display: flex;
|
display: flex;
|
||||||
|
@ -40,7 +8,26 @@ export const OffsetsWrapper = styled.div`
|
||||||
gap: 16px;
|
gap: 16px;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const OffsetsTitle = styled.h1`
|
export const DatePickerInput = styled(DatePicker).attrs({
|
||||||
font-size: 18px;
|
showTimeInput: true,
|
||||||
font-weight: 500;
|
timeInputLabel: 'Time:',
|
||||||
|
dateFormat: 'MMMM d, yyyy h:mm aa',
|
||||||
|
})`
|
||||||
|
height: 40px;
|
||||||
|
border: 1px ${({ theme }) => theme.select.borderColor.normal} solid;
|
||||||
|
border-radius: 4px;
|
||||||
|
font-size: 14px;
|
||||||
|
width: 270px;
|
||||||
|
padding-left: 12px;
|
||||||
|
background-color: ${({ theme }) => theme.input.backgroundColor.normal};
|
||||||
|
color: ${({ theme }) => theme.input.color.normal};
|
||||||
|
&::placeholder {
|
||||||
|
color: ${({ theme }) => theme.input.color.normal};
|
||||||
|
}
|
||||||
|
&:hover {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
&:focus {
|
||||||
|
outline: none;
|
||||||
|
}
|
||||||
`;
|
`;
|
||||||
|
|
|
@ -1,315 +1,52 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { useNavigate } from 'react-router-dom';
|
|
||||||
import { ConsumerGroupOffsetsResetType } from 'generated-sources';
|
|
||||||
import { clusterConsumerGroupsPath, ClusterGroupParam } from 'lib/paths';
|
import { clusterConsumerGroupsPath, ClusterGroupParam } from 'lib/paths';
|
||||||
import {
|
|
||||||
Controller,
|
|
||||||
FormProvider,
|
|
||||||
useFieldArray,
|
|
||||||
useForm,
|
|
||||||
} from 'react-hook-form';
|
|
||||||
import { MultiSelect, Option } from 'react-multi-select-component';
|
|
||||||
import DatePicker from 'react-datepicker';
|
|
||||||
import 'react-datepicker/dist/react-datepicker.css';
|
import 'react-datepicker/dist/react-datepicker.css';
|
||||||
import groupBy from 'lodash/groupBy';
|
|
||||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
|
||||||
import { ErrorMessage } from '@hookform/error-message';
|
|
||||||
import Select from 'components/common/Select/Select';
|
|
||||||
import { InputLabel } from 'components/common/Input/InputLabel.styled';
|
|
||||||
import { Button } from 'components/common/Button/Button';
|
|
||||||
import Input from 'components/common/Input/Input';
|
|
||||||
import { FormError } from 'components/common/Input/Input.styled';
|
|
||||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||||
import {
|
|
||||||
fetchConsumerGroupDetails,
|
|
||||||
selectById,
|
|
||||||
getAreConsumerGroupDetailsFulfilled,
|
|
||||||
getIsOffsetReseted,
|
|
||||||
resetConsumerGroupOffsets,
|
|
||||||
} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
|
|
||||||
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import { resetLoaderById } from 'redux/reducers/loader/loaderSlice';
|
import { useConsumerGroupDetails } from 'lib/hooks/api/consumers';
|
||||||
|
import PageLoader from 'components/common/PageLoader/PageLoader';
|
||||||
|
import {
|
||||||
|
ConsumerGroupOffsetsReset,
|
||||||
|
ConsumerGroupOffsetsResetType,
|
||||||
|
} from 'generated-sources';
|
||||||
|
|
||||||
import * as S from './ResetOffsets.styled';
|
import Form from './Form';
|
||||||
|
|
||||||
interface FormType {
|
|
||||||
topic: string;
|
|
||||||
resetType: ConsumerGroupOffsetsResetType;
|
|
||||||
partitionsOffsets: { offset: string | undefined; partition: number }[];
|
|
||||||
resetToTimestamp: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
const ResetOffsets: React.FC = () => {
|
const ResetOffsets: React.FC = () => {
|
||||||
const dispatch = useAppDispatch();
|
const routerParams = useAppParams<ClusterGroupParam>();
|
||||||
const { consumerGroupID, clusterName } = useAppParams<ClusterGroupParam>();
|
|
||||||
const consumerGroup = useAppSelector((state) =>
|
|
||||||
selectById(state, consumerGroupID)
|
|
||||||
);
|
|
||||||
|
|
||||||
const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled);
|
const consumerGroup = useConsumerGroupDetails(routerParams);
|
||||||
const isOffsetReseted = useAppSelector(getIsOffsetReseted);
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
if (consumerGroup.isLoading || !consumerGroup.isSuccess)
|
||||||
dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID }));
|
|
||||||
}, [clusterName, consumerGroupID, dispatch]);
|
|
||||||
|
|
||||||
const [uniqueTopics, setUniqueTopics] = React.useState<string[]>([]);
|
|
||||||
const [selectedPartitions, setSelectedPartitions] = React.useState<Option[]>(
|
|
||||||
[]
|
|
||||||
);
|
|
||||||
|
|
||||||
const methods = useForm<FormType>({
|
|
||||||
mode: 'onChange',
|
|
||||||
defaultValues: {
|
|
||||||
resetType: ConsumerGroupOffsetsResetType.EARLIEST,
|
|
||||||
topic: '',
|
|
||||||
partitionsOffsets: [],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const {
|
|
||||||
handleSubmit,
|
|
||||||
setValue,
|
|
||||||
watch,
|
|
||||||
control,
|
|
||||||
setError,
|
|
||||||
clearErrors,
|
|
||||||
formState: { errors, isValid },
|
|
||||||
} = methods;
|
|
||||||
const { fields } = useFieldArray({
|
|
||||||
control,
|
|
||||||
name: 'partitionsOffsets',
|
|
||||||
});
|
|
||||||
const resetTypeValue = watch('resetType');
|
|
||||||
const topicValue = watch('topic');
|
|
||||||
const offsetsValue = watch('partitionsOffsets');
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
|
||||||
if (isFetched && consumerGroup?.partitions) {
|
|
||||||
setValue('topic', consumerGroup.partitions[0].topic);
|
|
||||||
setUniqueTopics(Object.keys(groupBy(consumerGroup.partitions, 'topic')));
|
|
||||||
}
|
|
||||||
}, [consumerGroup?.partitions, isFetched, setValue]);
|
|
||||||
|
|
||||||
const onSelectedPartitionsChange = (value: Option[]) => {
|
|
||||||
clearErrors();
|
|
||||||
setValue(
|
|
||||||
'partitionsOffsets',
|
|
||||||
value.map((partition) => {
|
|
||||||
const currentOffset = offsetsValue.find(
|
|
||||||
(offset) => offset.partition === partition.value
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
offset: currentOffset ? currentOffset?.offset : undefined,
|
|
||||||
partition: partition.value,
|
|
||||||
};
|
|
||||||
})
|
|
||||||
);
|
|
||||||
setSelectedPartitions(value);
|
|
||||||
};
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
|
||||||
onSelectedPartitionsChange([]);
|
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [topicValue]);
|
|
||||||
|
|
||||||
const onSubmit = (data: FormType) => {
|
|
||||||
const augmentedData = {
|
|
||||||
...data,
|
|
||||||
partitions: selectedPartitions.map((partition) => partition.value),
|
|
||||||
partitionsOffsets: data.partitionsOffsets as {
|
|
||||||
offset: string;
|
|
||||||
partition: number;
|
|
||||||
}[],
|
|
||||||
};
|
|
||||||
let isValidAugmentedData = true;
|
|
||||||
if (augmentedData.resetType === ConsumerGroupOffsetsResetType.OFFSET) {
|
|
||||||
augmentedData.partitionsOffsets.forEach((offset, index) => {
|
|
||||||
if (!offset.offset) {
|
|
||||||
setError(`partitionsOffsets.${index}.offset`, {
|
|
||||||
type: 'manual',
|
|
||||||
message: "This field shouldn't be empty!",
|
|
||||||
});
|
|
||||||
isValidAugmentedData = false;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else if (
|
|
||||||
augmentedData.resetType === ConsumerGroupOffsetsResetType.TIMESTAMP
|
|
||||||
) {
|
|
||||||
if (!augmentedData.resetToTimestamp) {
|
|
||||||
setError(`resetToTimestamp`, {
|
|
||||||
type: 'manual',
|
|
||||||
message: "This field shouldn't be empty!",
|
|
||||||
});
|
|
||||||
isValidAugmentedData = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (isValidAugmentedData) {
|
|
||||||
dispatch(
|
|
||||||
resetConsumerGroupOffsets({
|
|
||||||
clusterName,
|
|
||||||
consumerGroupID,
|
|
||||||
requestBody: augmentedData,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const navigate = useNavigate();
|
|
||||||
React.useEffect(() => {
|
|
||||||
if (isOffsetReseted) {
|
|
||||||
dispatch(resetLoaderById('consumerGroups/resetConsumerGroupOffsets'));
|
|
||||||
navigate('../');
|
|
||||||
}
|
|
||||||
}, [clusterName, consumerGroupID, dispatch, navigate, isOffsetReseted]);
|
|
||||||
|
|
||||||
if (!isFetched || !consumerGroup) {
|
|
||||||
return <PageLoader />;
|
return <PageLoader />;
|
||||||
}
|
|
||||||
|
const partitions = consumerGroup.data.partitions || [];
|
||||||
|
const { topic } = partitions[0];
|
||||||
|
|
||||||
|
const uniqTopics = Array.from(
|
||||||
|
new Set(partitions.map((partition) => partition.topic))
|
||||||
|
);
|
||||||
|
|
||||||
|
const defaultValues: ConsumerGroupOffsetsReset = {
|
||||||
|
resetType: ConsumerGroupOffsetsResetType.EARLIEST,
|
||||||
|
topic,
|
||||||
|
partitionsOffsets: [],
|
||||||
|
resetToTimestamp: new Date().getTime(),
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<FormProvider {...methods}>
|
<>
|
||||||
<PageHeading
|
<PageHeading
|
||||||
text="Reset offsets"
|
text="Reset offsets"
|
||||||
backTo={clusterConsumerGroupsPath(clusterName)}
|
backTo={clusterConsumerGroupsPath(routerParams.clusterName)}
|
||||||
backText="Consumers"
|
backText="Consumers"
|
||||||
/>
|
/>
|
||||||
<S.Wrapper>
|
<Form
|
||||||
<form onSubmit={handleSubmit(onSubmit)}>
|
defaultValues={defaultValues}
|
||||||
<S.MainSelectors>
|
topics={uniqTopics}
|
||||||
<div>
|
partitions={partitions}
|
||||||
<InputLabel id="topicLabel">Topic</InputLabel>
|
/>
|
||||||
<Controller
|
</>
|
||||||
control={control}
|
|
||||||
name="topic"
|
|
||||||
render={({ field: { name, onChange, value } }) => (
|
|
||||||
<Select
|
|
||||||
id="topic"
|
|
||||||
selectSize="M"
|
|
||||||
aria-labelledby="topicLabel"
|
|
||||||
minWidth="100%"
|
|
||||||
name={name}
|
|
||||||
onChange={onChange}
|
|
||||||
defaultValue={value}
|
|
||||||
value={value}
|
|
||||||
options={uniqueTopics.map((topic) => ({
|
|
||||||
value: topic,
|
|
||||||
label: topic,
|
|
||||||
}))}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<InputLabel id="resetTypeLabel">Reset Type</InputLabel>
|
|
||||||
<Controller
|
|
||||||
control={control}
|
|
||||||
name="resetType"
|
|
||||||
render={({ field: { name, onChange, value } }) => (
|
|
||||||
<Select
|
|
||||||
id="resetType"
|
|
||||||
selectSize="M"
|
|
||||||
aria-labelledby="resetTypeLabel"
|
|
||||||
minWidth="100%"
|
|
||||||
name={name}
|
|
||||||
onChange={onChange}
|
|
||||||
value={value}
|
|
||||||
options={Object.values(ConsumerGroupOffsetsResetType).map(
|
|
||||||
(type) => ({ value: type, label: type })
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<InputLabel>Partitions</InputLabel>
|
|
||||||
<MultiSelect
|
|
||||||
options={
|
|
||||||
consumerGroup.partitions
|
|
||||||
?.filter((p) => p.topic === topicValue)
|
|
||||||
.map((p) => ({
|
|
||||||
label: `Partition #${p.partition.toString()}`,
|
|
||||||
value: p.partition,
|
|
||||||
})) || []
|
|
||||||
}
|
|
||||||
value={selectedPartitions}
|
|
||||||
onChange={onSelectedPartitionsChange}
|
|
||||||
labelledBy="Select partitions"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</S.MainSelectors>
|
|
||||||
{resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP &&
|
|
||||||
selectedPartitions.length > 0 && (
|
|
||||||
<div>
|
|
||||||
<InputLabel>Timestamp</InputLabel>
|
|
||||||
<Controller
|
|
||||||
control={control}
|
|
||||||
name="resetToTimestamp"
|
|
||||||
render={({ field: { onChange, onBlur, value, ref } }) => (
|
|
||||||
<DatePicker
|
|
||||||
ref={ref}
|
|
||||||
selected={value}
|
|
||||||
onChange={onChange}
|
|
||||||
onBlur={onBlur}
|
|
||||||
showTimeInput
|
|
||||||
timeInputLabel="Time:"
|
|
||||||
dateFormat="MMMM d, yyyy h:mm aa"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
<ErrorMessage
|
|
||||||
errors={errors}
|
|
||||||
name="resetToTimestamp"
|
|
||||||
render={({ message }) => <FormError>{message}</FormError>}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET &&
|
|
||||||
selectedPartitions.length > 0 && (
|
|
||||||
<div>
|
|
||||||
<S.OffsetsTitle>Offsets</S.OffsetsTitle>
|
|
||||||
<S.OffsetsWrapper>
|
|
||||||
{fields.map((field, index) => (
|
|
||||||
<div key={field.id}>
|
|
||||||
<InputLabel htmlFor={`partitionsOffsets.${index}.offset`}>
|
|
||||||
Partition #{field.partition}
|
|
||||||
</InputLabel>
|
|
||||||
<Input
|
|
||||||
id={`partitionsOffsets.${index}.offset`}
|
|
||||||
type="number"
|
|
||||||
name={`partitionsOffsets.${index}.offset` as const}
|
|
||||||
hookFormOptions={{
|
|
||||||
shouldUnregister: true,
|
|
||||||
min: {
|
|
||||||
value: 0,
|
|
||||||
message: 'must be greater than or equal to 0',
|
|
||||||
},
|
|
||||||
}}
|
|
||||||
defaultValue={field.offset}
|
|
||||||
/>
|
|
||||||
<ErrorMessage
|
|
||||||
errors={errors}
|
|
||||||
name={`partitionsOffsets.${index}.offset`}
|
|
||||||
render={({ message }) => (
|
|
||||||
<FormError>{message}</FormError>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</S.OffsetsWrapper>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
<Button
|
|
||||||
buttonSize="M"
|
|
||||||
buttonType="primary"
|
|
||||||
type="submit"
|
|
||||||
disabled={!isValid || selectedPartitions.length === 0}
|
|
||||||
>
|
|
||||||
Submit
|
|
||||||
</Button>
|
|
||||||
</form>
|
|
||||||
</S.Wrapper>
|
|
||||||
</FormProvider>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,158 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
import fetchMock from 'fetch-mock';
|
|
||||||
import { act, screen, waitFor } from '@testing-library/react';
|
|
||||||
import userEvent from '@testing-library/user-event';
|
|
||||||
import { render, WithRoute } from 'lib/testHelpers';
|
|
||||||
import { clusterConsumerGroupResetOffsetsPath } from 'lib/paths';
|
|
||||||
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
|
|
||||||
import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets';
|
|
||||||
|
|
||||||
const clusterName = 'cluster1';
|
|
||||||
const { groupId } = consumerGroupPayload;
|
|
||||||
|
|
||||||
const renderComponent = () =>
|
|
||||||
render(
|
|
||||||
<WithRoute path={clusterConsumerGroupResetOffsetsPath()}>
|
|
||||||
<ResetOffsets />
|
|
||||||
</WithRoute>,
|
|
||||||
{
|
|
||||||
initialEntries: [
|
|
||||||
clusterConsumerGroupResetOffsetsPath(
|
|
||||||
clusterName,
|
|
||||||
consumerGroupPayload.groupId
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const resetConsumerGroupOffsetsMockCalled = () =>
|
|
||||||
expect(
|
|
||||||
fetchMock.called(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`
|
|
||||||
)
|
|
||||||
).toBeTruthy();
|
|
||||||
|
|
||||||
const selectresetTypeAndPartitions = async (resetType: string) => {
|
|
||||||
await userEvent.click(screen.getByLabelText('Reset Type'));
|
|
||||||
await userEvent.click(screen.getByText(resetType));
|
|
||||||
await userEvent.click(screen.getByText('Select...'));
|
|
||||||
await userEvent.click(screen.getByText('Partition #0'));
|
|
||||||
};
|
|
||||||
|
|
||||||
const resetConsumerGroupOffsetsWith = async (
|
|
||||||
resetType: string,
|
|
||||||
offset: null | number = null
|
|
||||||
) => {
|
|
||||||
await userEvent.click(screen.getByLabelText('Reset Type'));
|
|
||||||
const options = screen.getAllByText(resetType);
|
|
||||||
await userEvent.click(options.length > 1 ? options[1] : options[0]);
|
|
||||||
await userEvent.click(screen.getByText('Select...'));
|
|
||||||
|
|
||||||
await userEvent.click(screen.getByText('Partition #0'));
|
|
||||||
|
|
||||||
fetchMock.postOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
body: {
|
|
||||||
topic: '__amazon_msk_canary',
|
|
||||||
resetType,
|
|
||||||
partitions: [0],
|
|
||||||
partitionsOffsets: [{ partition: 0, offset }],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
await userEvent.click(screen.getByText('Submit'));
|
|
||||||
await waitFor(() => resetConsumerGroupOffsetsMockCalled());
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('ResetOffsets', () => {
|
|
||||||
afterEach(() => {
|
|
||||||
fetchMock.reset();
|
|
||||||
});
|
|
||||||
|
|
||||||
xit('renders progress bar for initial state', async () => {
|
|
||||||
fetchMock.getOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
|
|
||||||
404
|
|
||||||
);
|
|
||||||
await act(() => {
|
|
||||||
renderComponent();
|
|
||||||
});
|
|
||||||
expect(screen.getByRole('progressbar')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('with consumer group', () => {
|
|
||||||
describe('submit handles resetConsumerGroupOffsets', () => {
|
|
||||||
beforeEach(async () => {
|
|
||||||
const fetchConsumerGroupMock = fetchMock.getOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
|
|
||||||
consumerGroupPayload
|
|
||||||
);
|
|
||||||
await act(() => {
|
|
||||||
renderComponent();
|
|
||||||
});
|
|
||||||
expect(fetchConsumerGroupMock.called()).toBeTruthy();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('calls resetConsumerGroupOffsets with EARLIEST', async () => {
|
|
||||||
await resetConsumerGroupOffsetsWith('EARLIEST');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('calls resetConsumerGroupOffsets with LATEST', async () => {
|
|
||||||
await resetConsumerGroupOffsetsWith('LATEST');
|
|
||||||
});
|
|
||||||
it('calls resetConsumerGroupOffsets with OFFSET', async () => {
|
|
||||||
await selectresetTypeAndPartitions('OFFSET');
|
|
||||||
fetchMock.postOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
body: {
|
|
||||||
topic: '__amazon_msk_canary',
|
|
||||||
resetType: 'OFFSET',
|
|
||||||
partitions: [0],
|
|
||||||
partitionsOffsets: [{ partition: 0, offset: 10 }],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
await userEvent.click(screen.getAllByLabelText('Partition #0')[1]);
|
|
||||||
await userEvent.keyboard('10');
|
|
||||||
await userEvent.click(screen.getByText('Submit'));
|
|
||||||
await resetConsumerGroupOffsetsMockCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
// focus doesn't work for datepicker
|
|
||||||
it.skip('calls resetConsumerGroupOffsets with TIMESTAMP', async () => {
|
|
||||||
await selectresetTypeAndPartitions('TIMESTAMP');
|
|
||||||
const resetConsumerGroupOffsetsMock = fetchMock.postOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
|
|
||||||
200,
|
|
||||||
{
|
|
||||||
body: {
|
|
||||||
topic: '__amazon_msk_canary',
|
|
||||||
resetType: 'OFFSET',
|
|
||||||
partitions: [0],
|
|
||||||
partitionsOffsets: [{ partition: 0, offset: 10 }],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
await userEvent.click(screen.getByText('Submit'));
|
|
||||||
await waitFor(() =>
|
|
||||||
expect(
|
|
||||||
screen.getByText("This field shouldn't be empty!")
|
|
||||||
).toBeInTheDocument()
|
|
||||||
);
|
|
||||||
|
|
||||||
await waitFor(() =>
|
|
||||||
expect(
|
|
||||||
resetConsumerGroupOffsetsMock.called(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`
|
|
||||||
)
|
|
||||||
).toBeFalsy()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -2,9 +2,9 @@ import React from 'react';
|
||||||
import { clusterConsumerGroupDetailsPath } from 'lib/paths';
|
import { clusterConsumerGroupDetailsPath } from 'lib/paths';
|
||||||
import { screen } from '@testing-library/react';
|
import { screen } from '@testing-library/react';
|
||||||
import TopicContents from 'components/ConsumerGroups/Details/TopicContents/TopicContents';
|
import TopicContents from 'components/ConsumerGroups/Details/TopicContents/TopicContents';
|
||||||
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
|
|
||||||
import { render, WithRoute } from 'lib/testHelpers';
|
import { render, WithRoute } from 'lib/testHelpers';
|
||||||
import { ConsumerGroupTopicPartition } from 'generated-sources';
|
import { ConsumerGroupTopicPartition } from 'generated-sources';
|
||||||
|
import { consumerGroupPayload } from 'lib/fixtures/consumerGroups';
|
||||||
|
|
||||||
const clusterName = 'cluster1';
|
const clusterName = 'cluster1';
|
||||||
|
|
||||||
|
|
|
@ -1,114 +0,0 @@
|
||||||
import Details from 'components/ConsumerGroups/Details/Details';
|
|
||||||
import React from 'react';
|
|
||||||
import fetchMock from 'fetch-mock';
|
|
||||||
import { render, WithRoute } from 'lib/testHelpers';
|
|
||||||
import {
|
|
||||||
clusterConsumerGroupDetailsPath,
|
|
||||||
clusterConsumerGroupResetRelativePath,
|
|
||||||
} from 'lib/paths';
|
|
||||||
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
|
|
||||||
import {
|
|
||||||
screen,
|
|
||||||
waitFor,
|
|
||||||
waitForElementToBeRemoved,
|
|
||||||
} from '@testing-library/dom';
|
|
||||||
import userEvent from '@testing-library/user-event';
|
|
||||||
|
|
||||||
const clusterName = 'cluster1';
|
|
||||||
const { groupId } = consumerGroupPayload;
|
|
||||||
|
|
||||||
const mockNavigate = jest.fn();
|
|
||||||
jest.mock('react-router-dom', () => ({
|
|
||||||
...jest.requireActual('react-router-dom'),
|
|
||||||
useNavigate: () => mockNavigate,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const renderComponent = () => {
|
|
||||||
render(
|
|
||||||
<WithRoute path={clusterConsumerGroupDetailsPath()}>
|
|
||||||
<Details />
|
|
||||||
</WithRoute>,
|
|
||||||
{ initialEntries: [clusterConsumerGroupDetailsPath(clusterName, groupId)] }
|
|
||||||
);
|
|
||||||
};
|
|
||||||
describe('Details component', () => {
|
|
||||||
afterEach(() => {
|
|
||||||
fetchMock.reset();
|
|
||||||
mockNavigate.mockClear();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('when consumer groups are NOT fetched', () => {
|
|
||||||
it('renders progress bar for initial state', () => {
|
|
||||||
fetchMock.getOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
|
|
||||||
404
|
|
||||||
);
|
|
||||||
renderComponent();
|
|
||||||
expect(screen.getByRole('progressbar')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('when consumer gruops are fetched', () => {
|
|
||||||
beforeEach(async () => {
|
|
||||||
const fetchConsumerGroupMock = fetchMock.getOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
|
|
||||||
consumerGroupPayload
|
|
||||||
);
|
|
||||||
renderComponent();
|
|
||||||
await waitForElementToBeRemoved(() => screen.getByRole('progressbar'));
|
|
||||||
await waitFor(() => expect(fetchConsumerGroupMock.called()).toBeTruthy());
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders component', () => {
|
|
||||||
expect(screen.getByRole('heading')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText(groupId)).toBeInTheDocument();
|
|
||||||
|
|
||||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
|
||||||
expect(screen.getAllByRole('columnheader').length).toEqual(2);
|
|
||||||
|
|
||||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles [Reset offset] click', async () => {
|
|
||||||
await userEvent.click(screen.getByText('Reset offset'));
|
|
||||||
expect(mockNavigate).toHaveBeenLastCalledWith(
|
|
||||||
clusterConsumerGroupResetRelativePath
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders search input', async () => {
|
|
||||||
expect(
|
|
||||||
screen.getByPlaceholderText('Search by Topic Name')
|
|
||||||
).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows confirmation modal on consumer group delete', async () => {
|
|
||||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
|
||||||
await userEvent.click(screen.getByText('Delete consumer group'));
|
|
||||||
await waitFor(() =>
|
|
||||||
expect(screen.queryByRole('dialog')).toBeInTheDocument()
|
|
||||||
);
|
|
||||||
await userEvent.click(screen.getByText('Cancel'));
|
|
||||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles [Delete consumer group] click', async () => {
|
|
||||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
|
||||||
|
|
||||||
await userEvent.click(screen.getByText('Delete consumer group'));
|
|
||||||
|
|
||||||
expect(screen.queryByRole('dialog')).toBeInTheDocument();
|
|
||||||
const deleteConsumerGroupMock = fetchMock.deleteOnce(
|
|
||||||
`/api/clusters/${clusterName}/consumer-groups/${groupId}`,
|
|
||||||
200
|
|
||||||
);
|
|
||||||
await waitFor(() => {
|
|
||||||
userEvent.click(screen.getByRole('button', { name: 'Confirm' }));
|
|
||||||
});
|
|
||||||
expect(deleteConsumerGroupMock.called()).toBeTruthy();
|
|
||||||
|
|
||||||
await waitForElementToBeRemoved(() => screen.queryByRole('dialog'));
|
|
||||||
await waitFor(() => expect(mockNavigate).toHaveBeenLastCalledWith('../'));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,48 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
import { clusterConsumerGroupDetailsPath } from 'lib/paths';
|
|
||||||
import { screen } from '@testing-library/react';
|
|
||||||
import userEvent from '@testing-library/user-event';
|
|
||||||
import ListItem from 'components/ConsumerGroups/Details/ListItem';
|
|
||||||
import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
|
|
||||||
import { render, WithRoute } from 'lib/testHelpers';
|
|
||||||
import { ConsumerGroupTopicPartition } from 'generated-sources';
|
|
||||||
|
|
||||||
const clusterName = 'cluster1';
|
|
||||||
|
|
||||||
const renderComponent = (consumers: ConsumerGroupTopicPartition[] = []) =>
|
|
||||||
render(
|
|
||||||
<WithRoute path={clusterConsumerGroupDetailsPath()}>
|
|
||||||
<table>
|
|
||||||
<tbody>
|
|
||||||
<ListItem
|
|
||||||
clusterName={clusterName}
|
|
||||||
name={clusterName}
|
|
||||||
consumers={consumers}
|
|
||||||
/>
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</WithRoute>,
|
|
||||||
{
|
|
||||||
initialEntries: [
|
|
||||||
clusterConsumerGroupDetailsPath(
|
|
||||||
clusterName,
|
|
||||||
consumerGroupPayload.groupId
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
describe('ListItem', () => {
|
|
||||||
beforeEach(() => renderComponent(consumerGroupPayload.partitions));
|
|
||||||
|
|
||||||
it('should renders list item with topic content closed and check if element exists', () => {
|
|
||||||
expect(screen.getByRole('row')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should renders list item with topic content open', async () => {
|
|
||||||
await userEvent.click(
|
|
||||||
screen.getByRole('cell', { name: 'cluster1' }).children[0].children[0]
|
|
||||||
);
|
|
||||||
expect(screen.getByText('Consumer ID')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -7,41 +7,29 @@ import {
|
||||||
ConsumerGroupOrdering,
|
ConsumerGroupOrdering,
|
||||||
SortOrder,
|
SortOrder,
|
||||||
} from 'generated-sources';
|
} from 'generated-sources';
|
||||||
import { useAppDispatch } from 'lib/hooks/redux';
|
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import { clusterConsumerGroupDetailsPath, ClusterNameRoute } from 'lib/paths';
|
import { clusterConsumerGroupDetailsPath, ClusterNameRoute } from 'lib/paths';
|
||||||
import { fetchConsumerGroupsPaged } from 'redux/reducers/consumerGroups/consumerGroupsSlice';
|
|
||||||
import { ColumnDef } from '@tanstack/react-table';
|
import { ColumnDef } from '@tanstack/react-table';
|
||||||
import Table, { TagCell, LinkCell } from 'components/common/NewTable';
|
import Table, { TagCell, LinkCell } from 'components/common/NewTable';
|
||||||
import { useNavigate, useSearchParams } from 'react-router-dom';
|
import { useNavigate, useSearchParams } from 'react-router-dom';
|
||||||
import { PER_PAGE } from 'lib/constants';
|
import { PER_PAGE } from 'lib/constants';
|
||||||
|
import { useConsumerGroups } from 'lib/hooks/api/consumers';
|
||||||
|
|
||||||
export interface Props {
|
const List = () => {
|
||||||
consumerGroups: ConsumerGroupDetails[];
|
|
||||||
totalPages: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
|
|
||||||
const dispatch = useAppDispatch();
|
|
||||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||||
const [searchParams] = useSearchParams();
|
const [searchParams] = useSearchParams();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
|
||||||
React.useEffect(() => {
|
const consumerGroups = useConsumerGroups({
|
||||||
dispatch(
|
clusterName,
|
||||||
fetchConsumerGroupsPaged({
|
orderBy: (searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined,
|
||||||
clusterName,
|
sortOrder:
|
||||||
orderBy:
|
(searchParams.get('sortDirection')?.toUpperCase() as SortOrder) ||
|
||||||
(searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined,
|
undefined,
|
||||||
sortOrder:
|
page: Number(searchParams.get('page') || 1),
|
||||||
(searchParams.get('sortDirection')?.toUpperCase() as SortOrder) ||
|
perPage: Number(searchParams.get('perPage') || PER_PAGE),
|
||||||
undefined,
|
search: searchParams.get('q') || '',
|
||||||
page: Number(searchParams.get('page') || 1),
|
});
|
||||||
perPage: Number(searchParams.get('perPage') || PER_PAGE),
|
|
||||||
search: searchParams.get('q') || '',
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}, [clusterName, dispatch, searchParams]);
|
|
||||||
|
|
||||||
const columns = React.useMemo<ColumnDef<ConsumerGroupDetails>[]>(
|
const columns = React.useMemo<ColumnDef<ConsumerGroupDetails>[]>(
|
||||||
() => [
|
() => [
|
||||||
|
@ -95,9 +83,13 @@ const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
|
||||||
</ControlPanelWrapper>
|
</ControlPanelWrapper>
|
||||||
<Table
|
<Table
|
||||||
columns={columns}
|
columns={columns}
|
||||||
pageCount={totalPages}
|
pageCount={consumerGroups.data?.pageCount || 0}
|
||||||
data={consumerGroups}
|
data={consumerGroups.data?.consumerGroups || []}
|
||||||
emptyMessage="No active consumer groups found"
|
emptyMessage={
|
||||||
|
consumerGroups.isSuccess
|
||||||
|
? 'No active consumer groups found'
|
||||||
|
: 'Loading...'
|
||||||
|
}
|
||||||
serverSideProcessing
|
serverSideProcessing
|
||||||
enableSorting
|
enableSorting
|
||||||
onRowClick={({ original }) =>
|
onRowClick={({ original }) =>
|
||||||
|
@ -105,6 +97,7 @@ const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
|
||||||
clusterConsumerGroupDetailsPath(clusterName, original.groupId)
|
clusterConsumerGroupDetailsPath(clusterName, original.groupId)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
disabled={consumerGroups.isFetching}
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
|
@ -1,16 +0,0 @@
|
||||||
import { connect } from 'react-redux';
|
|
||||||
import { RootState } from 'redux/interfaces';
|
|
||||||
import {
|
|
||||||
getConsumerGroupsOrderBy,
|
|
||||||
getConsumerGroupsTotalPages,
|
|
||||||
selectAll,
|
|
||||||
} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
|
|
||||||
import List from 'components/ConsumerGroups/List/List';
|
|
||||||
|
|
||||||
const mapStateToProps = (state: RootState) => ({
|
|
||||||
consumerGroups: selectAll(state),
|
|
||||||
orderBy: getConsumerGroupsOrderBy(state),
|
|
||||||
totalPages: getConsumerGroupsTotalPages(state),
|
|
||||||
});
|
|
||||||
|
|
||||||
export default connect(mapStateToProps)(List);
|
|
|
@ -1,60 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
import List, { Props } from 'components/ConsumerGroups/List/List';
|
|
||||||
import { screen } from '@testing-library/react';
|
|
||||||
import { render } from 'lib/testHelpers';
|
|
||||||
import { consumerGroups as consumerGroupMock } from 'redux/reducers/consumerGroups/__test__/fixtures';
|
|
||||||
import { clusterConsumerGroupDetailsPath } from 'lib/paths';
|
|
||||||
import userEvent from '@testing-library/user-event';
|
|
||||||
import ListContainer from 'components/ConsumerGroups/List/ListContainer';
|
|
||||||
|
|
||||||
const mockedUsedNavigate = jest.fn();
|
|
||||||
|
|
||||||
jest.mock('react-router-dom', () => ({
|
|
||||||
...jest.requireActual('react-router-dom'),
|
|
||||||
useNavigate: () => mockedUsedNavigate,
|
|
||||||
}));
|
|
||||||
|
|
||||||
describe('ListContainer', () => {
|
|
||||||
it('renders correctly', () => {
|
|
||||||
render(<ListContainer />);
|
|
||||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('List', () => {
|
|
||||||
const renderComponent = (props: Partial<Props> = {}) => {
|
|
||||||
const { consumerGroups, totalPages } = props;
|
|
||||||
return render(
|
|
||||||
<List
|
|
||||||
consumerGroups={consumerGroups || []}
|
|
||||||
totalPages={totalPages || 1}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
it('renders empty table', () => {
|
|
||||||
renderComponent();
|
|
||||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
|
||||||
expect(
|
|
||||||
screen.getByText('No active consumer groups found')
|
|
||||||
).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('consumerGroups are fetched', () => {
|
|
||||||
beforeEach(() => renderComponent({ consumerGroups: consumerGroupMock }));
|
|
||||||
|
|
||||||
it('renders all rows with consumers', () => {
|
|
||||||
expect(screen.getByText('groupId1')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('groupId2')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles onRowClick', async () => {
|
|
||||||
const row = screen.getByRole('row', { name: 'groupId1 0 1 1' });
|
|
||||||
expect(row).toBeInTheDocument();
|
|
||||||
await userEvent.click(row);
|
|
||||||
expect(mockedUsedNavigate).toHaveBeenCalledWith(
|
|
||||||
clusterConsumerGroupDetailsPath(':clusterName', 'groupId1')
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -11,9 +11,7 @@ import { render, WithRoute } from 'lib/testHelpers';
|
||||||
|
|
||||||
const clusterName = 'cluster1';
|
const clusterName = 'cluster1';
|
||||||
|
|
||||||
jest.mock('components/ConsumerGroups/List/ListContainer', () => () => (
|
jest.mock('components/ConsumerGroups/List', () => () => <div>ListPage</div>);
|
||||||
<div>ListContainerMock</div>
|
|
||||||
));
|
|
||||||
jest.mock('components/ConsumerGroups/Details/Details', () => () => (
|
jest.mock('components/ConsumerGroups/Details/Details', () => () => (
|
||||||
<div>DetailsMock</div>
|
<div>DetailsMock</div>
|
||||||
));
|
));
|
||||||
|
@ -35,7 +33,7 @@ const renderComponent = (path?: string) =>
|
||||||
describe('ConsumerGroups', () => {
|
describe('ConsumerGroups', () => {
|
||||||
it('renders ListContainer', async () => {
|
it('renders ListContainer', async () => {
|
||||||
renderComponent();
|
renderComponent();
|
||||||
expect(screen.getByText('ListContainerMock')).toBeInTheDocument();
|
expect(screen.getByText('ListPage')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
it('renders ResetOffsets', async () => {
|
it('renders ResetOffsets', async () => {
|
||||||
renderComponent(
|
renderComponent(
|
||||||
|
|
|
@ -1,15 +1,109 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { Route, Routes } from 'react-router-dom';
|
|
||||||
import { clusterKsqlDbQueryRelativePath } from 'lib/paths';
|
|
||||||
import List from 'components/KsqlDb/List/List';
|
|
||||||
import Query from 'components/KsqlDb/Query/Query';
|
import Query from 'components/KsqlDb/Query/Query';
|
||||||
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
|
import * as Metrics from 'components/common/Metrics';
|
||||||
|
import {
|
||||||
|
clusterKsqlDbQueryRelativePath,
|
||||||
|
clusterKsqlDbStreamsPath,
|
||||||
|
clusterKsqlDbStreamsRelativePath,
|
||||||
|
clusterKsqlDbTablesPath,
|
||||||
|
clusterKsqlDbTablesRelativePath,
|
||||||
|
ClusterNameRoute,
|
||||||
|
} from 'lib/paths';
|
||||||
|
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||||
|
import { ActionButton } from 'components/common/ActionComponent';
|
||||||
|
import Navbar from 'components/common/Navigation/Navbar.styled';
|
||||||
|
import { Navigate, NavLink, Route, Routes } from 'react-router-dom';
|
||||||
|
import { Action, ResourceType } from 'generated-sources';
|
||||||
|
import { useKsqlkDb } from 'lib/hooks/api/ksqlDb';
|
||||||
|
import 'ace-builds/src-noconflict/ace';
|
||||||
|
|
||||||
|
import TableView from './TableView';
|
||||||
|
|
||||||
const KsqlDb: React.FC = () => {
|
const KsqlDb: React.FC = () => {
|
||||||
|
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||||
|
|
||||||
|
const [tables, streams] = useKsqlkDb(clusterName);
|
||||||
|
|
||||||
|
const isFetching = tables.isFetching || streams.isFetching;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Routes>
|
<>
|
||||||
<Route path="/*" element={<List />} />
|
<PageHeading text="KSQL DB">
|
||||||
<Route path={clusterKsqlDbQueryRelativePath} element={<Query />} />
|
<ActionButton
|
||||||
</Routes>
|
to={clusterKsqlDbQueryRelativePath}
|
||||||
|
buttonType="primary"
|
||||||
|
buttonSize="M"
|
||||||
|
permission={{
|
||||||
|
resource: ResourceType.KSQL,
|
||||||
|
action: Action.EXECUTE,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
Execute KSQL Request
|
||||||
|
</ActionButton>
|
||||||
|
</PageHeading>
|
||||||
|
<Metrics.Wrapper>
|
||||||
|
<Metrics.Section>
|
||||||
|
<Metrics.Indicator
|
||||||
|
label="Tables"
|
||||||
|
title="Tables"
|
||||||
|
fetching={isFetching}
|
||||||
|
>
|
||||||
|
{tables.isSuccess ? tables.data.length : '-'}
|
||||||
|
</Metrics.Indicator>
|
||||||
|
<Metrics.Indicator
|
||||||
|
label="Streams"
|
||||||
|
title="Streams"
|
||||||
|
fetching={isFetching}
|
||||||
|
>
|
||||||
|
{streams.isSuccess ? streams.data.length : '-'}
|
||||||
|
</Metrics.Indicator>
|
||||||
|
</Metrics.Section>
|
||||||
|
</Metrics.Wrapper>
|
||||||
|
<div>
|
||||||
|
<Navbar role="navigation">
|
||||||
|
<NavLink
|
||||||
|
to={clusterKsqlDbTablesPath(clusterName)}
|
||||||
|
className={({ isActive }) => (isActive ? 'is-active' : '')}
|
||||||
|
end
|
||||||
|
>
|
||||||
|
Tables
|
||||||
|
</NavLink>
|
||||||
|
<NavLink
|
||||||
|
to={clusterKsqlDbStreamsPath(clusterName)}
|
||||||
|
className={({ isActive }) => (isActive ? 'is-active' : '')}
|
||||||
|
end
|
||||||
|
>
|
||||||
|
Streams
|
||||||
|
</NavLink>
|
||||||
|
</Navbar>
|
||||||
|
<Routes>
|
||||||
|
<Route
|
||||||
|
index
|
||||||
|
element={<Navigate to={clusterKsqlDbTablesRelativePath} />}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path={clusterKsqlDbTablesRelativePath}
|
||||||
|
element={
|
||||||
|
<TableView
|
||||||
|
fetching={tables.isFetching}
|
||||||
|
rows={tables.data || []}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path={clusterKsqlDbStreamsRelativePath}
|
||||||
|
element={
|
||||||
|
<TableView
|
||||||
|
fetching={streams.isFetching}
|
||||||
|
rows={streams.data || []}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<Route path={clusterKsqlDbQueryRelativePath} element={<Query />} />
|
||||||
|
</Routes>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,58 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
|
||||||
import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources';
|
|
||||||
import { ksqlRowData } from 'components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData';
|
|
||||||
import Table from 'components/common/NewTable';
|
|
||||||
import { ColumnDef } from '@tanstack/react-table';
|
|
||||||
|
|
||||||
export enum KsqlDbItemType {
|
|
||||||
Tables = 'tables',
|
|
||||||
Streams = 'streams',
|
|
||||||
}
|
|
||||||
|
|
||||||
interface RowsType {
|
|
||||||
tables: KsqlTableDescription[];
|
|
||||||
streams: KsqlStreamDescription[];
|
|
||||||
}
|
|
||||||
export interface KsqlDbItemProps {
|
|
||||||
type: KsqlDbItemType;
|
|
||||||
fetching: boolean;
|
|
||||||
rows: RowsType;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface KsqlTableState {
|
|
||||||
name: string;
|
|
||||||
topic: string;
|
|
||||||
keyFormat: string;
|
|
||||||
valueFormat: string;
|
|
||||||
isWindowed: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const KsqlDbItem: React.FC<KsqlDbItemProps> = ({ type, fetching, rows }) => {
|
|
||||||
const preparedRows = rows[type]?.map(ksqlRowData) || [];
|
|
||||||
|
|
||||||
const columns = React.useMemo<ColumnDef<KsqlTableState>[]>(
|
|
||||||
() => [
|
|
||||||
{ header: 'Name', accessorKey: 'name' },
|
|
||||||
{ header: 'Topic', accessorKey: 'topic' },
|
|
||||||
{ header: 'Key Format', accessorKey: 'keyFormat' },
|
|
||||||
{ header: 'Value Format', accessorKey: 'valueFormat' },
|
|
||||||
{ header: 'Is Windowed', accessorKey: 'isWindowed' },
|
|
||||||
],
|
|
||||||
[]
|
|
||||||
);
|
|
||||||
|
|
||||||
if (fetching) {
|
|
||||||
return <PageLoader />;
|
|
||||||
}
|
|
||||||
return (
|
|
||||||
<Table
|
|
||||||
data={preparedRows}
|
|
||||||
columns={columns}
|
|
||||||
emptyMessage="No tables or streams found"
|
|
||||||
enableSorting={false}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default KsqlDbItem;
|
|
|
@ -1,59 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
import { render, WithRoute } from 'lib/testHelpers';
|
|
||||||
import { clusterKsqlDbTablesPath } from 'lib/paths';
|
|
||||||
import KsqlDbItem, {
|
|
||||||
KsqlDbItemProps,
|
|
||||||
KsqlDbItemType,
|
|
||||||
} from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem';
|
|
||||||
import { screen } from '@testing-library/dom';
|
|
||||||
import { fetchKsqlDbTablesPayload } from 'redux/reducers/ksqlDb/__test__/fixtures';
|
|
||||||
|
|
||||||
describe('KsqlDbItem', () => {
|
|
||||||
const tablesPathname = clusterKsqlDbTablesPath();
|
|
||||||
const renderComponent = (props: Partial<KsqlDbItemProps> = {}) => {
|
|
||||||
render(
|
|
||||||
<WithRoute path={tablesPathname}>
|
|
||||||
<KsqlDbItem
|
|
||||||
type={KsqlDbItemType.Tables}
|
|
||||||
fetching={false}
|
|
||||||
rows={{ tables: [], streams: [] }}
|
|
||||||
{...props}
|
|
||||||
/>
|
|
||||||
</WithRoute>,
|
|
||||||
{
|
|
||||||
initialEntries: [clusterKsqlDbTablesPath()],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
it('renders progressbar when fetching tables and streams', () => {
|
|
||||||
renderComponent({ fetching: true });
|
|
||||||
expect(screen.getByRole('progressbar')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('show no text if no data found', () => {
|
|
||||||
renderComponent({});
|
|
||||||
expect(screen.getByText('No tables or streams found')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders with tables', () => {
|
|
||||||
renderComponent({
|
|
||||||
rows: {
|
|
||||||
tables: fetchKsqlDbTablesPayload.tables,
|
|
||||||
streams: [],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10);
|
|
||||||
});
|
|
||||||
it('renders with streams', () => {
|
|
||||||
renderComponent({
|
|
||||||
type: KsqlDbItemType.Streams,
|
|
||||||
rows: {
|
|
||||||
tables: [],
|
|
||||||
streams: fetchKsqlDbTablesPayload.streams,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10);
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,12 +0,0 @@
|
||||||
import { KsqlDescription } from 'redux/interfaces/ksqlDb';
|
|
||||||
import { KsqlTableState } from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem';
|
|
||||||
|
|
||||||
export const ksqlRowData = (data: KsqlDescription): KsqlTableState => {
|
|
||||||
return {
|
|
||||||
name: data.name || '',
|
|
||||||
topic: data.topic || '',
|
|
||||||
keyFormat: data.keyFormat || '',
|
|
||||||
valueFormat: data.valueFormat || '',
|
|
||||||
isWindowed: 'isWindowed' in data ? String(data.isWindowed) : '-',
|
|
||||||
};
|
|
||||||
};
|
|
|
@ -1,111 +0,0 @@
|
||||||
import React, { FC } from 'react';
|
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
|
||||||
import * as Metrics from 'components/common/Metrics';
|
|
||||||
import { getKsqlDbTables } from 'redux/reducers/ksqlDb/selectors';
|
|
||||||
import {
|
|
||||||
clusterKsqlDbQueryRelativePath,
|
|
||||||
clusterKsqlDbStreamsPath,
|
|
||||||
clusterKsqlDbStreamsRelativePath,
|
|
||||||
clusterKsqlDbTablesPath,
|
|
||||||
clusterKsqlDbTablesRelativePath,
|
|
||||||
ClusterNameRoute,
|
|
||||||
} from 'lib/paths';
|
|
||||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
|
||||||
import { ActionButton } from 'components/common/ActionComponent';
|
|
||||||
import Navbar from 'components/common/Navigation/Navbar.styled';
|
|
||||||
import { Navigate, NavLink, Route, Routes } from 'react-router-dom';
|
|
||||||
import { fetchKsqlDbTables } from 'redux/reducers/ksqlDb/ksqlDbSlice';
|
|
||||||
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
|
||||||
import { Action, ResourceType } from 'generated-sources';
|
|
||||||
|
|
||||||
import KsqlDbItem, { KsqlDbItemType } from './KsqlDbItem/KsqlDbItem';
|
|
||||||
|
|
||||||
const List: FC = () => {
|
|
||||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
|
||||||
const dispatch = useAppDispatch();
|
|
||||||
|
|
||||||
const { rows, fetching, tablesCount, streamsCount } =
|
|
||||||
useAppSelector(getKsqlDbTables);
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
|
||||||
dispatch(fetchKsqlDbTables(clusterName));
|
|
||||||
}, [clusterName, dispatch]);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<PageHeading text="KSQL DB">
|
|
||||||
<ActionButton
|
|
||||||
to={clusterKsqlDbQueryRelativePath}
|
|
||||||
buttonType="primary"
|
|
||||||
buttonSize="M"
|
|
||||||
permission={{
|
|
||||||
resource: ResourceType.KSQL,
|
|
||||||
action: Action.EXECUTE,
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
Execute KSQL Request
|
|
||||||
</ActionButton>
|
|
||||||
</PageHeading>
|
|
||||||
<Metrics.Wrapper>
|
|
||||||
<Metrics.Section>
|
|
||||||
<Metrics.Indicator label="Tables" title="Tables" fetching={fetching}>
|
|
||||||
{tablesCount}
|
|
||||||
</Metrics.Indicator>
|
|
||||||
<Metrics.Indicator
|
|
||||||
label="Streams"
|
|
||||||
title="Streams"
|
|
||||||
fetching={fetching}
|
|
||||||
>
|
|
||||||
{streamsCount}
|
|
||||||
</Metrics.Indicator>
|
|
||||||
</Metrics.Section>
|
|
||||||
</Metrics.Wrapper>
|
|
||||||
<div>
|
|
||||||
<Navbar role="navigation">
|
|
||||||
<NavLink
|
|
||||||
to={clusterKsqlDbTablesPath(clusterName)}
|
|
||||||
className={({ isActive }) => (isActive ? 'is-active' : '')}
|
|
||||||
end
|
|
||||||
>
|
|
||||||
Tables
|
|
||||||
</NavLink>
|
|
||||||
<NavLink
|
|
||||||
to={clusterKsqlDbStreamsPath(clusterName)}
|
|
||||||
className={({ isActive }) => (isActive ? 'is-active' : '')}
|
|
||||||
end
|
|
||||||
>
|
|
||||||
Streams
|
|
||||||
</NavLink>
|
|
||||||
</Navbar>
|
|
||||||
<Routes>
|
|
||||||
<Route
|
|
||||||
index
|
|
||||||
element={<Navigate to={clusterKsqlDbTablesRelativePath} />}
|
|
||||||
/>
|
|
||||||
<Route
|
|
||||||
path={clusterKsqlDbTablesRelativePath}
|
|
||||||
element={
|
|
||||||
<KsqlDbItem
|
|
||||||
type={KsqlDbItemType.Tables}
|
|
||||||
fetching={fetching}
|
|
||||||
rows={rows}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
<Route
|
|
||||||
path={clusterKsqlDbStreamsRelativePath}
|
|
||||||
element={
|
|
||||||
<KsqlDbItem
|
|
||||||
type={KsqlDbItemType.Streams}
|
|
||||||
fetching={fetching}
|
|
||||||
rows={rows}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
</Routes>
|
|
||||||
</div>
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default List;
|
|
|
@ -1,22 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
import List from 'components/KsqlDb/List/List';
|
|
||||||
import { render } from 'lib/testHelpers';
|
|
||||||
import fetchMock from 'fetch-mock';
|
|
||||||
import { screen } from '@testing-library/dom';
|
|
||||||
import { act } from '@testing-library/react';
|
|
||||||
|
|
||||||
describe('KsqlDb List', () => {
|
|
||||||
const renderComponent = async () => {
|
|
||||||
await act(() => {
|
|
||||||
render(<List />);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
afterEach(() => fetchMock.reset());
|
|
||||||
it('renders List component with Tables and Streams tabs', async () => {
|
|
||||||
await renderComponent();
|
|
||||||
const Tables = screen.getByTitle('Tables');
|
|
||||||
const Streams = screen.getByTitle('Streams');
|
|
||||||
expect(Tables).toBeInTheDocument();
|
|
||||||
expect(Streams).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,9 +0,0 @@
|
||||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
|
||||||
import styled from 'styled-components';
|
|
||||||
|
|
||||||
export const ContinuousLoader = styled(PageLoader)`
|
|
||||||
& > div {
|
|
||||||
transform: scale(0.5);
|
|
||||||
padding-top: 0;
|
|
||||||
}
|
|
||||||
`;
|
|
|
@ -1,223 +1,54 @@
|
||||||
import React, { useCallback, useEffect, FC, useState } from 'react';
|
import React from 'react';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import TableRenderer from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer';
|
import TableRenderer from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer';
|
||||||
|
import { ClusterNameRoute } from 'lib/paths';
|
||||||
import {
|
import {
|
||||||
executeKsql,
|
useExecuteKsqlkDbQueryMutation,
|
||||||
resetExecutionResult,
|
useKsqlkDbSSE,
|
||||||
} from 'redux/reducers/ksqlDb/ksqlDbSlice';
|
} from 'lib/hooks/api/ksqlDb';
|
||||||
import { getKsqlExecution } from 'redux/reducers/ksqlDb/selectors';
|
|
||||||
import { BASE_PARAMS } from 'lib/constants';
|
|
||||||
import { KsqlResponse, KsqlTableResponse } from 'generated-sources';
|
|
||||||
import { clusterKsqlDbPath, ClusterNameRoute } from 'lib/paths';
|
|
||||||
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
|
|
||||||
import { showAlert, showSuccessAlert } from 'lib/errorHandling';
|
|
||||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
|
||||||
|
|
||||||
import type { FormValues } from './QueryForm/QueryForm';
|
import type { FormValues } from './QueryForm/QueryForm';
|
||||||
import * as S from './Query.styled';
|
|
||||||
import QueryForm from './QueryForm/QueryForm';
|
import QueryForm from './QueryForm/QueryForm';
|
||||||
|
|
||||||
export const getFormattedErrorFromTableData = (
|
const Query = () => {
|
||||||
responseValues: KsqlTableResponse['values']
|
|
||||||
): { title: string; message: string } => {
|
|
||||||
// We expect someting like that
|
|
||||||
// [[
|
|
||||||
// "@type",
|
|
||||||
// "error_code",
|
|
||||||
// "message",
|
|
||||||
// "statementText"?,
|
|
||||||
// "entities"?
|
|
||||||
// ]],
|
|
||||||
// or
|
|
||||||
// [["message"]]
|
|
||||||
|
|
||||||
if (!responseValues || !responseValues.length) {
|
|
||||||
return {
|
|
||||||
title: 'Unknown error',
|
|
||||||
message: 'Recieved empty response',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let title = '';
|
|
||||||
let message = '';
|
|
||||||
if (responseValues[0].length < 2) {
|
|
||||||
const [messageText] = responseValues[0];
|
|
||||||
title = messageText;
|
|
||||||
} else {
|
|
||||||
const [type, errorCode, messageText, statementText, entities] =
|
|
||||||
responseValues[0];
|
|
||||||
title = `[Error #${errorCode}] ${type}`;
|
|
||||||
message =
|
|
||||||
(entities?.length ? `[${entities.join(', ')}] ` : '') +
|
|
||||||
(statementText ? `"${statementText}" ` : '') +
|
|
||||||
messageText;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
title,
|
|
||||||
message,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
const Query: FC = () => {
|
|
||||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||||
|
const executeQuery = useExecuteKsqlkDbQueryMutation();
|
||||||
|
const [pipeId, setPipeId] = React.useState<string | false>(false);
|
||||||
|
|
||||||
const sseRef = React.useRef<{ sse: EventSource | null; isOpen: boolean }>({
|
const sse = useKsqlkDbSSE({ clusterName, pipeId });
|
||||||
sse: null,
|
|
||||||
isOpen: false,
|
|
||||||
});
|
|
||||||
const [fetching, setFetching] = useState(false);
|
|
||||||
const dispatch = useAppDispatch();
|
|
||||||
|
|
||||||
const { executionResult } = useAppSelector(getKsqlExecution);
|
const isFetching = executeQuery.isLoading || sse.isFetching;
|
||||||
const [KSQLTable, setKSQLTable] = useState<KsqlTableResponse | null>(null);
|
|
||||||
|
|
||||||
const reset = useCallback(() => {
|
const submitHandler = async (values: FormValues) => {
|
||||||
dispatch(resetExecutionResult());
|
const filtered = values.streamsProperties.filter(({ key }) => key != null);
|
||||||
}, [dispatch]);
|
const streamsProperties = filtered.reduce<Record<string, string>>(
|
||||||
|
(acc, current) => ({ ...acc, [current.key]: current.value }),
|
||||||
useEffect(() => {
|
{}
|
||||||
return reset;
|
);
|
||||||
}, [reset]);
|
await executeQuery.mutateAsync(
|
||||||
|
{
|
||||||
const destroySSE = () => {
|
clusterName,
|
||||||
if (sseRef.current?.sse) {
|
ksqlCommandV2: {
|
||||||
sseRef.current.sse.close();
|
...values,
|
||||||
setFetching(false);
|
streamsProperties:
|
||||||
sseRef.current.sse = null;
|
values.streamsProperties[0].key !== ''
|
||||||
sseRef.current.isOpen = false;
|
? JSON.parse(JSON.stringify(streamsProperties))
|
||||||
}
|
: undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{ onSuccess: (data) => setPipeId(data.pipeId) }
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSSECancel = useCallback(() => {
|
|
||||||
reset();
|
|
||||||
destroySSE();
|
|
||||||
}, [reset]);
|
|
||||||
|
|
||||||
const createSSE = useCallback(
|
|
||||||
(pipeId: string) => {
|
|
||||||
const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response?pipeId=${pipeId}`;
|
|
||||||
const sse = new EventSource(url);
|
|
||||||
sseRef.current.sse = sse;
|
|
||||||
setFetching(true);
|
|
||||||
|
|
||||||
sse.onopen = () => {
|
|
||||||
sseRef.current.isOpen = true;
|
|
||||||
};
|
|
||||||
|
|
||||||
sse.onmessage = ({ data }) => {
|
|
||||||
const { table }: KsqlResponse = JSON.parse(data);
|
|
||||||
if (table) {
|
|
||||||
switch (table?.header) {
|
|
||||||
case 'Execution error': {
|
|
||||||
const { title, message } = getFormattedErrorFromTableData(
|
|
||||||
table.values
|
|
||||||
);
|
|
||||||
const id = `${url}-executionError`;
|
|
||||||
showAlert('error', { id, title, message });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 'Schema': {
|
|
||||||
setKSQLTable(table);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 'Row': {
|
|
||||||
setKSQLTable((PrevKSQLTable) => {
|
|
||||||
return {
|
|
||||||
header: PrevKSQLTable?.header,
|
|
||||||
columnNames: PrevKSQLTable?.columnNames,
|
|
||||||
values: [
|
|
||||||
...(PrevKSQLTable?.values || []),
|
|
||||||
...(table?.values || []),
|
|
||||||
],
|
|
||||||
};
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 'Query Result': {
|
|
||||||
const id = `${url}-querySuccess`;
|
|
||||||
showSuccessAlert({ id, title: 'Query succeed', message: '' });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 'Source Description':
|
|
||||||
case 'properties':
|
|
||||||
default: {
|
|
||||||
setKSQLTable(table);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sse;
|
|
||||||
};
|
|
||||||
|
|
||||||
sse.onerror = () => {
|
|
||||||
// if it's open - we know that server responded without opening SSE
|
|
||||||
if (!sseRef.current.isOpen) {
|
|
||||||
showAlert('error', {
|
|
||||||
id: `${url}-connectionClosedError`,
|
|
||||||
title: '',
|
|
||||||
message: 'SSE connection closed',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
destroySSE();
|
|
||||||
};
|
|
||||||
},
|
|
||||||
[clusterName, dispatch]
|
|
||||||
);
|
|
||||||
|
|
||||||
const submitHandler = useCallback(
|
|
||||||
(values: FormValues) => {
|
|
||||||
const filteredProperties = values.streamsProperties.filter(
|
|
||||||
(property) => property.key != null
|
|
||||||
);
|
|
||||||
const streamsProperties = filteredProperties.reduce(
|
|
||||||
(acc, current) => ({
|
|
||||||
...acc,
|
|
||||||
[current.key as keyof string]: current.value,
|
|
||||||
}),
|
|
||||||
{} as { [key: string]: string }
|
|
||||||
);
|
|
||||||
setFetching(true);
|
|
||||||
dispatch(
|
|
||||||
executeKsql({
|
|
||||||
clusterName,
|
|
||||||
ksqlCommandV2: {
|
|
||||||
...values,
|
|
||||||
streamsProperties:
|
|
||||||
values.streamsProperties[0].key !== ''
|
|
||||||
? JSON.parse(JSON.stringify(streamsProperties))
|
|
||||||
: undefined,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
);
|
|
||||||
},
|
|
||||||
[dispatch, clusterName]
|
|
||||||
);
|
|
||||||
useEffect(() => {
|
|
||||||
if (executionResult?.pipeId) {
|
|
||||||
createSSE(executionResult.pipeId);
|
|
||||||
}
|
|
||||||
return () => {
|
|
||||||
destroySSE();
|
|
||||||
};
|
|
||||||
}, [createSSE, executionResult]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<PageHeading
|
|
||||||
text="Query"
|
|
||||||
backText="KSQL DB"
|
|
||||||
backTo={clusterKsqlDbPath(clusterName)}
|
|
||||||
/>
|
|
||||||
<QueryForm
|
<QueryForm
|
||||||
fetching={fetching}
|
fetching={isFetching}
|
||||||
hasResults={!!KSQLTable}
|
hasResults={!!sse.data && !!pipeId}
|
||||||
handleClearResults={() => setKSQLTable(null)}
|
resetResults={() => setPipeId(false)}
|
||||||
handleSSECancel={handleSSECancel}
|
|
||||||
submitHandler={submitHandler}
|
submitHandler={submitHandler}
|
||||||
/>
|
/>
|
||||||
{KSQLTable && <TableRenderer table={KSQLTable} />}
|
{pipeId && !!sse.data && <TableRenderer table={sse.data} />}
|
||||||
{fetching && <S.ContinuousLoader />}
|
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,13 +6,12 @@ export const QueryWrapper = styled.div`
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const KSQLInputsWrapper = styled.div`
|
export const KSQLInputsWrapper = styled.div`
|
||||||
width: 100%;
|
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 24px;
|
gap: 24px;
|
||||||
|
|
||||||
padding-bottom: 16px;
|
padding-bottom: 16px;
|
||||||
& > div {
|
|
||||||
flex-grow: 1;
|
@media screen and (max-width: 769px) {
|
||||||
|
flex-direction: column;
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
@ -22,61 +21,23 @@ export const KSQLInputHeader = styled.div`
|
||||||
color: ${({ theme }) => theme.default.color.normal};
|
color: ${({ theme }) => theme.default.color.normal};
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const KSQLButtons = styled.div`
|
|
||||||
display: flex;
|
|
||||||
gap: 16px;
|
|
||||||
`;
|
|
||||||
|
|
||||||
export const StreamPropertiesContainer = styled.label`
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
gap: 10px;
|
|
||||||
width: 50%;
|
|
||||||
color: ${({ theme }) => theme.default.color.normal};
|
|
||||||
`;
|
|
||||||
|
|
||||||
export const InputsContainer = styled.div`
|
export const InputsContainer = styled.div`
|
||||||
overflow: hidden;
|
display: grid;
|
||||||
width: 100%;
|
grid-template-columns: 1fr 1fr 30px;
|
||||||
display: flex;
|
|
||||||
justify-content: center;
|
|
||||||
gap: 10px;
|
|
||||||
`;
|
|
||||||
|
|
||||||
export const StreamPropertiesInputWrapper = styled.div`
|
|
||||||
& {
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
& > input {
|
|
||||||
width: 100%;
|
|
||||||
height: 40px;
|
|
||||||
border: 1px solid grey;
|
|
||||||
&:focus {
|
|
||||||
outline: none;
|
|
||||||
border-color: ${({ theme }) => theme.input.borderColor.focus};
|
|
||||||
&::placeholder {
|
|
||||||
color: transparent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
border-radius: 4px;
|
|
||||||
font-size: 16px;
|
|
||||||
padding-left: 15px;
|
|
||||||
background-color: ${({ theme }) => theme.input.backgroundColor.normal};
|
|
||||||
color: ${({ theme }) => theme.input.color.normal};
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
export const DeleteButtonWrapper = styled.div`
|
|
||||||
min-height: 32px;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
align-items: center;
|
align-items: center;
|
||||||
justify-self: flex-start;
|
gap: 10px;
|
||||||
margin-top: 10px;
|
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const Fieldset = styled.fieldset`
|
export const Fieldset = styled.fieldset`
|
||||||
width: 50%;
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const ButtonsContainer = styled.div`
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const SQLEditor = styled(BaseSQLEditor)(
|
export const SQLEditor = styled(BaseSQLEditor)(
|
||||||
|
|
|
@ -1,22 +1,27 @@
|
||||||
import React, { useCallback, useRef } from 'react';
|
import React from 'react';
|
||||||
import { FormError } from 'components/common/Input/Input.styled';
|
import { FormError } from 'components/common/Input/Input.styled';
|
||||||
import { ErrorMessage } from '@hookform/error-message';
|
import { ErrorMessage } from '@hookform/error-message';
|
||||||
import { useForm, Controller, useFieldArray } from 'react-hook-form';
|
import {
|
||||||
|
useForm,
|
||||||
|
Controller,
|
||||||
|
useFieldArray,
|
||||||
|
FormProvider,
|
||||||
|
} from 'react-hook-form';
|
||||||
import { Button } from 'components/common/Button/Button';
|
import { Button } from 'components/common/Button/Button';
|
||||||
import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
|
import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
|
||||||
import CloseIcon from 'components/common/Icons/CloseIcon';
|
import CloseIcon from 'components/common/Icons/CloseIcon';
|
||||||
import { yupResolver } from '@hookform/resolvers/yup';
|
import { yupResolver } from '@hookform/resolvers/yup';
|
||||||
import yup from 'lib/yupExtended';
|
import yup from 'lib/yupExtended';
|
||||||
import PlusIcon from 'components/common/Icons/PlusIcon';
|
import PlusIcon from 'components/common/Icons/PlusIcon';
|
||||||
import ReactAce from 'react-ace/lib/ace';
|
import ReactAce from 'react-ace';
|
||||||
|
import Input from 'components/common/Input/Input';
|
||||||
|
|
||||||
import * as S from './QueryForm.styled';
|
import * as S from './QueryForm.styled';
|
||||||
|
|
||||||
export interface Props {
|
interface QueryFormProps {
|
||||||
fetching: boolean;
|
fetching: boolean;
|
||||||
hasResults: boolean;
|
hasResults: boolean;
|
||||||
handleClearResults: () => void;
|
resetResults: () => void;
|
||||||
handleSSECancel: () => void;
|
|
||||||
submitHandler: (values: FormValues) => void;
|
submitHandler: (values: FormValues) => void;
|
||||||
}
|
}
|
||||||
type StreamsPropertiesType = {
|
type StreamsPropertiesType = {
|
||||||
|
@ -37,20 +42,13 @@ const validationSchema = yup.object({
|
||||||
streamsProperties: yup.array().of(streamsPropertiesSchema),
|
streamsProperties: yup.array().of(streamsPropertiesSchema),
|
||||||
});
|
});
|
||||||
|
|
||||||
const QueryForm: React.FC<Props> = ({
|
const QueryForm: React.FC<QueryFormProps> = ({
|
||||||
fetching,
|
fetching,
|
||||||
hasResults,
|
hasResults,
|
||||||
handleClearResults,
|
|
||||||
handleSSECancel,
|
|
||||||
submitHandler,
|
submitHandler,
|
||||||
|
resetResults,
|
||||||
}) => {
|
}) => {
|
||||||
const {
|
const methods = useForm<FormValues>({
|
||||||
handleSubmit,
|
|
||||||
setValue,
|
|
||||||
getValues,
|
|
||||||
control,
|
|
||||||
formState: { errors },
|
|
||||||
} = useForm<FormValues>({
|
|
||||||
mode: 'onTouched',
|
mode: 'onTouched',
|
||||||
resolver: yupResolver(validationSchema),
|
resolver: yupResolver(validationSchema),
|
||||||
defaultValues: {
|
defaultValues: {
|
||||||
|
@ -58,7 +56,16 @@ const QueryForm: React.FC<Props> = ({
|
||||||
streamsProperties: [{ key: '', value: '' }],
|
streamsProperties: [{ key: '', value: '' }],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const { fields, append, remove } = useFieldArray<
|
|
||||||
|
const {
|
||||||
|
handleSubmit,
|
||||||
|
setValue,
|
||||||
|
control,
|
||||||
|
watch,
|
||||||
|
formState: { errors, isDirty },
|
||||||
|
} = methods;
|
||||||
|
|
||||||
|
const { fields, append, remove, update } = useFieldArray<
|
||||||
FormValues,
|
FormValues,
|
||||||
'streamsProperties'
|
'streamsProperties'
|
||||||
>({
|
>({
|
||||||
|
@ -66,17 +73,24 @@ const QueryForm: React.FC<Props> = ({
|
||||||
name: 'streamsProperties',
|
name: 'streamsProperties',
|
||||||
});
|
});
|
||||||
|
|
||||||
const handleAddNewProperty = useCallback(() => {
|
const watchStreamProps = watch('streamsProperties');
|
||||||
if (
|
|
||||||
getValues().streamsProperties.every((prop) => {
|
|
||||||
return prop.key;
|
|
||||||
})
|
|
||||||
) {
|
|
||||||
append({ key: '', value: '' });
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const inputRef = useRef<ReactAce>(null);
|
const appendProperty = () => {
|
||||||
|
append({ key: '', value: '' });
|
||||||
|
};
|
||||||
|
const removeProperty = (index: number) => () => {
|
||||||
|
if (fields.length === 1) {
|
||||||
|
update(index, { key: '', value: '' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
remove(index);
|
||||||
|
};
|
||||||
|
|
||||||
|
const isAppendDisabled =
|
||||||
|
fetching || !!watchStreamProps.find((field) => !field.key);
|
||||||
|
|
||||||
|
const inputRef = React.useRef<ReactAce>(null);
|
||||||
|
|
||||||
const handleFocus = () => {
|
const handleFocus = () => {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
@ -87,145 +101,117 @@ const QueryForm: React.FC<Props> = ({
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleClear = () => {
|
||||||
|
handleFocus();
|
||||||
|
resetResults();
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<S.QueryWrapper>
|
<FormProvider {...methods}>
|
||||||
<form onSubmit={handleSubmit(submitHandler)}>
|
<S.QueryWrapper>
|
||||||
<S.KSQLInputsWrapper>
|
<form onSubmit={handleSubmit(submitHandler)}>
|
||||||
<S.Fieldset aria-labelledby="ksqlLabel">
|
<S.KSQLInputsWrapper>
|
||||||
<S.KSQLInputHeader>
|
<S.Fieldset>
|
||||||
<label id="ksqlLabel">KSQL</label>
|
<S.KSQLInputHeader>
|
||||||
<Button
|
<label id="ksqlLabel">KSQL</label>
|
||||||
onClick={() => setValue('ksql', '')}
|
<Button
|
||||||
buttonType="primary"
|
onClick={() => setValue('ksql', '')}
|
||||||
buttonSize="S"
|
buttonType="primary"
|
||||||
isInverted
|
buttonSize="S"
|
||||||
>
|
isInverted
|
||||||
Clear
|
>
|
||||||
</Button>
|
Clear
|
||||||
</S.KSQLInputHeader>
|
</Button>
|
||||||
<Controller
|
</S.KSQLInputHeader>
|
||||||
control={control}
|
<Controller
|
||||||
name="ksql"
|
control={control}
|
||||||
render={({ field }) => (
|
name="ksql"
|
||||||
<S.SQLEditor
|
render={({ field }) => (
|
||||||
{...field}
|
<S.SQLEditor
|
||||||
commands={[
|
{...field}
|
||||||
{
|
commands={[
|
||||||
// commands is array of key bindings.
|
{
|
||||||
// name for the key binding.
|
// commands is array of key bindings.
|
||||||
name: 'commandName',
|
// name for the key binding.
|
||||||
// key combination used for the command.
|
name: 'commandName',
|
||||||
bindKey: { win: 'Ctrl-Enter', mac: 'Command-Enter' },
|
// key combination used for the command.
|
||||||
// function to execute when keys are pressed.
|
bindKey: { win: 'Ctrl-Enter', mac: 'Command-Enter' },
|
||||||
exec: () => {
|
// function to execute when keys are pressed.
|
||||||
handleSubmit(submitHandler)();
|
exec: () => {
|
||||||
|
handleSubmit(submitHandler)();
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
]}
|
||||||
]}
|
readOnly={fetching}
|
||||||
readOnly={fetching}
|
ref={inputRef}
|
||||||
ref={inputRef}
|
/>
|
||||||
/>
|
)}
|
||||||
)}
|
/>
|
||||||
/>
|
<FormError>
|
||||||
<FormError>
|
<ErrorMessage errors={errors} name="ksql" />
|
||||||
<ErrorMessage errors={errors} name="ksql" />
|
</FormError>
|
||||||
</FormError>
|
</S.Fieldset>
|
||||||
</S.Fieldset>
|
|
||||||
|
|
||||||
<S.StreamPropertiesContainer>
|
<S.Fieldset>
|
||||||
Stream properties:
|
Stream properties:
|
||||||
{fields.map((item, index) => (
|
{fields.map((field, index) => (
|
||||||
<S.InputsContainer key={item.id}>
|
<S.InputsContainer key={field.id}>
|
||||||
<S.StreamPropertiesInputWrapper>
|
<Input
|
||||||
<Controller
|
|
||||||
control={control}
|
|
||||||
name={`streamsProperties.${index}.key`}
|
name={`streamsProperties.${index}.key`}
|
||||||
render={({ field }) => (
|
placeholder="Key"
|
||||||
<input
|
type="text"
|
||||||
{...field}
|
autoComplete="off"
|
||||||
placeholder="Key"
|
withError
|
||||||
aria-label="key"
|
|
||||||
type="text"
|
|
||||||
autoComplete="off"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
/>
|
/>
|
||||||
<FormError>
|
<Input
|
||||||
<ErrorMessage
|
|
||||||
errors={errors}
|
|
||||||
name={`streamsProperties.${index}.key`}
|
|
||||||
/>
|
|
||||||
</FormError>
|
|
||||||
</S.StreamPropertiesInputWrapper>
|
|
||||||
<S.StreamPropertiesInputWrapper>
|
|
||||||
<Controller
|
|
||||||
control={control}
|
|
||||||
name={`streamsProperties.${index}.value`}
|
name={`streamsProperties.${index}.value`}
|
||||||
render={({ field }) => (
|
placeholder="Value"
|
||||||
<input
|
type="text"
|
||||||
{...field}
|
autoComplete="off"
|
||||||
placeholder="Value"
|
withError
|
||||||
aria-label="value"
|
|
||||||
type="text"
|
|
||||||
autoComplete="off"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
/>
|
/>
|
||||||
<FormError>
|
<IconButtonWrapper
|
||||||
<ErrorMessage
|
aria-label="deleteProperty"
|
||||||
errors={errors}
|
onClick={removeProperty(index)}
|
||||||
name={`streamsProperties.${index}.value`}
|
>
|
||||||
/>
|
|
||||||
</FormError>
|
|
||||||
</S.StreamPropertiesInputWrapper>
|
|
||||||
|
|
||||||
<S.DeleteButtonWrapper onClick={() => remove(index)}>
|
|
||||||
<IconButtonWrapper aria-label="deleteProperty">
|
|
||||||
<CloseIcon aria-hidden />
|
<CloseIcon aria-hidden />
|
||||||
</IconButtonWrapper>
|
</IconButtonWrapper>
|
||||||
</S.DeleteButtonWrapper>
|
</S.InputsContainer>
|
||||||
</S.InputsContainer>
|
))}
|
||||||
))}
|
<Button
|
||||||
|
type="button"
|
||||||
|
buttonSize="M"
|
||||||
|
buttonType="secondary"
|
||||||
|
disabled={isAppendDisabled}
|
||||||
|
onClick={appendProperty}
|
||||||
|
>
|
||||||
|
<PlusIcon />
|
||||||
|
Add Stream Property
|
||||||
|
</Button>
|
||||||
|
</S.Fieldset>
|
||||||
|
</S.KSQLInputsWrapper>
|
||||||
|
<S.ButtonsContainer>
|
||||||
<Button
|
<Button
|
||||||
type="button"
|
|
||||||
buttonSize="M"
|
|
||||||
buttonType="secondary"
|
buttonType="secondary"
|
||||||
onClick={handleAddNewProperty}
|
buttonSize="M"
|
||||||
|
disabled={fetching || !isDirty || !hasResults}
|
||||||
|
onClick={handleClear}
|
||||||
>
|
>
|
||||||
<PlusIcon />
|
Clear results
|
||||||
Add Stream Property
|
|
||||||
</Button>
|
</Button>
|
||||||
</S.StreamPropertiesContainer>
|
<Button
|
||||||
</S.KSQLInputsWrapper>
|
buttonType="primary"
|
||||||
<S.KSQLButtons>
|
buttonSize="M"
|
||||||
<Button
|
type="submit"
|
||||||
buttonType="primary"
|
disabled={fetching}
|
||||||
buttonSize="M"
|
onClick={handleFocus}
|
||||||
type="submit"
|
>
|
||||||
disabled={fetching}
|
Execute
|
||||||
onClick={handleFocus}
|
</Button>
|
||||||
>
|
</S.ButtonsContainer>
|
||||||
Execute
|
</form>
|
||||||
</Button>
|
</S.QueryWrapper>
|
||||||
<Button
|
</FormProvider>
|
||||||
buttonType="secondary"
|
|
||||||
buttonSize="M"
|
|
||||||
disabled={!fetching}
|
|
||||||
onClick={handleSSECancel}
|
|
||||||
>
|
|
||||||
Stop query
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
buttonType="secondary"
|
|
||||||
buttonSize="M"
|
|
||||||
disabled={fetching || !hasResults}
|
|
||||||
onClick={handleClearResults}
|
|
||||||
>
|
|
||||||
Clear results
|
|
||||||
</Button>
|
|
||||||
</S.KSQLButtons>
|
|
||||||
</form>
|
|
||||||
</S.QueryWrapper>
|
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,189 +0,0 @@
|
||||||
import { render } from 'lib/testHelpers';
|
|
||||||
import React from 'react';
|
|
||||||
import QueryForm, { Props } from 'components/KsqlDb/Query/QueryForm/QueryForm';
|
|
||||||
import { screen, waitFor, within } from '@testing-library/dom';
|
|
||||||
import userEvent from '@testing-library/user-event';
|
|
||||||
|
|
||||||
const renderComponent = (props: Props) => render(<QueryForm {...props} />);
|
|
||||||
|
|
||||||
describe('QueryForm', () => {
|
|
||||||
it('renders', () => {
|
|
||||||
renderComponent({
|
|
||||||
fetching: false,
|
|
||||||
hasResults: false,
|
|
||||||
handleClearResults: jest.fn(),
|
|
||||||
handleSSECancel: jest.fn(),
|
|
||||||
submitHandler: jest.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const KSQLBlock = screen.getByLabelText('KSQL');
|
|
||||||
expect(KSQLBlock).toBeInTheDocument();
|
|
||||||
expect(within(KSQLBlock).getByText('KSQL')).toBeInTheDocument();
|
|
||||||
expect(
|
|
||||||
within(KSQLBlock).getByRole('button', { name: 'Clear' })
|
|
||||||
).toBeInTheDocument();
|
|
||||||
// Represents SQL editor
|
|
||||||
expect(within(KSQLBlock).getByRole('textbox')).toBeInTheDocument();
|
|
||||||
|
|
||||||
const streamPropertiesBlock = screen.getByRole('textbox', { name: 'key' });
|
|
||||||
expect(streamPropertiesBlock).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('Stream properties:')).toBeInTheDocument();
|
|
||||||
expect(screen.getByRole('button', { name: 'Clear' })).toBeInTheDocument();
|
|
||||||
expect(screen.queryAllByRole('textbox')[0]).toBeInTheDocument();
|
|
||||||
|
|
||||||
// Form controls
|
|
||||||
expect(screen.getByRole('button', { name: 'Execute' })).toBeInTheDocument();
|
|
||||||
expect(screen.getByRole('button', { name: 'Execute' })).toBeEnabled();
|
|
||||||
expect(
|
|
||||||
screen.getByRole('button', { name: 'Stop query' })
|
|
||||||
).toBeInTheDocument();
|
|
||||||
expect(screen.getByRole('button', { name: 'Stop query' })).toBeDisabled();
|
|
||||||
expect(
|
|
||||||
screen.getByRole('button', { name: 'Clear results' })
|
|
||||||
).toBeInTheDocument();
|
|
||||||
expect(
|
|
||||||
screen.getByRole('button', { name: 'Clear results' })
|
|
||||||
).toBeDisabled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders error with empty input', async () => {
|
|
||||||
const submitFn = jest.fn();
|
|
||||||
renderComponent({
|
|
||||||
fetching: false,
|
|
||||||
hasResults: false,
|
|
||||||
handleClearResults: jest.fn(),
|
|
||||||
handleSSECancel: jest.fn(),
|
|
||||||
submitHandler: submitFn,
|
|
||||||
});
|
|
||||||
|
|
||||||
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByText('ksql is a required field')).toBeInTheDocument();
|
|
||||||
expect(submitFn).not.toBeCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('submits with correct inputs', async () => {
|
|
||||||
const submitFn = jest.fn();
|
|
||||||
renderComponent({
|
|
||||||
fetching: false,
|
|
||||||
hasResults: false,
|
|
||||||
handleClearResults: jest.fn(),
|
|
||||||
handleSSECancel: jest.fn(),
|
|
||||||
submitHandler: submitFn,
|
|
||||||
});
|
|
||||||
|
|
||||||
const textbox = screen.getAllByRole('textbox');
|
|
||||||
textbox[0].focus();
|
|
||||||
await userEvent.paste('show tables;');
|
|
||||||
const key = screen.getByRole('textbox', { name: 'key' });
|
|
||||||
key.focus();
|
|
||||||
await userEvent.paste('test');
|
|
||||||
const value = screen.getByRole('textbox', { name: 'value' });
|
|
||||||
value.focus();
|
|
||||||
await userEvent.paste('test');
|
|
||||||
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
|
|
||||||
|
|
||||||
expect(
|
|
||||||
screen.queryByText('ksql is a required field')
|
|
||||||
).not.toBeInTheDocument();
|
|
||||||
|
|
||||||
expect(
|
|
||||||
screen.queryByText('streamsProperties is not JSON object')
|
|
||||||
).not.toBeInTheDocument();
|
|
||||||
|
|
||||||
expect(submitFn).toBeCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('clear results is enabled when has results', async () => {
|
|
||||||
const clearFn = jest.fn();
|
|
||||||
renderComponent({
|
|
||||||
fetching: false,
|
|
||||||
hasResults: true,
|
|
||||||
handleClearResults: clearFn,
|
|
||||||
handleSSECancel: jest.fn(),
|
|
||||||
submitHandler: jest.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(screen.getByRole('button', { name: 'Clear results' })).toBeEnabled();
|
|
||||||
|
|
||||||
await userEvent.click(
|
|
||||||
screen.getByRole('button', { name: 'Clear results' })
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(clearFn).toBeCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stop query query is enabled when is fetching', async () => {
|
|
||||||
const cancelFn = jest.fn();
|
|
||||||
renderComponent({
|
|
||||||
fetching: true,
|
|
||||||
hasResults: false,
|
|
||||||
handleClearResults: jest.fn(),
|
|
||||||
handleSSECancel: cancelFn,
|
|
||||||
submitHandler: jest.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(screen.getByRole('button', { name: 'Stop query' })).toBeEnabled();
|
|
||||||
|
|
||||||
await userEvent.click(screen.getByRole('button', { name: 'Stop query' }));
|
|
||||||
|
|
||||||
expect(cancelFn).toBeCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('add new property', async () => {
|
|
||||||
renderComponent({
|
|
||||||
fetching: false,
|
|
||||||
hasResults: false,
|
|
||||||
handleClearResults: jest.fn(),
|
|
||||||
handleSSECancel: jest.fn(),
|
|
||||||
submitHandler: jest.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const textbox = screen.getByLabelText('key');
|
|
||||||
await userEvent.type(textbox, 'prop_name');
|
|
||||||
await userEvent.click(
|
|
||||||
screen.getByRole('button', { name: 'Add Stream Property' })
|
|
||||||
);
|
|
||||||
expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("doesn't add new property", async () => {
|
|
||||||
renderComponent({
|
|
||||||
fetching: false,
|
|
||||||
hasResults: false,
|
|
||||||
handleClearResults: jest.fn(),
|
|
||||||
handleSSECancel: jest.fn(),
|
|
||||||
submitHandler: jest.fn(),
|
|
||||||
});
|
|
||||||
|
|
||||||
await userEvent.click(
|
|
||||||
screen.getByRole('button', { name: 'Add Stream Property' })
|
|
||||||
);
|
|
||||||
expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('delete stream property', async () => {
|
|
||||||
await renderComponent({
|
|
||||||
fetching: false,
|
|
||||||
hasResults: false,
|
|
||||||
handleClearResults: jest.fn(),
|
|
||||||
handleSSECancel: jest.fn(),
|
|
||||||
submitHandler: jest.fn(),
|
|
||||||
});
|
|
||||||
const textBoxes = screen.getAllByRole('textbox', { name: 'key' });
|
|
||||||
textBoxes[0].focus();
|
|
||||||
await userEvent.paste('test');
|
|
||||||
await userEvent.click(
|
|
||||||
screen.getByRole('button', { name: 'Add Stream Property' })
|
|
||||||
);
|
|
||||||
await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]);
|
|
||||||
|
|
||||||
await screen.getByRole('button', { name: 'Add Stream Property' });
|
|
||||||
|
|
||||||
await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]);
|
|
||||||
|
|
||||||
expect(textBoxes.length).toEqual(1);
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,116 +0,0 @@
|
||||||
import { render, EventSourceMock, WithRoute } from 'lib/testHelpers';
|
|
||||||
import React from 'react';
|
|
||||||
import Query, {
|
|
||||||
getFormattedErrorFromTableData,
|
|
||||||
} from 'components/KsqlDb/Query/Query';
|
|
||||||
import { screen } from '@testing-library/dom';
|
|
||||||
import fetchMock from 'fetch-mock';
|
|
||||||
import { clusterKsqlDbQueryPath } from 'lib/paths';
|
|
||||||
import userEvent from '@testing-library/user-event';
|
|
||||||
|
|
||||||
const clusterName = 'testLocal';
|
|
||||||
const renderComponent = () =>
|
|
||||||
render(
|
|
||||||
<WithRoute path={clusterKsqlDbQueryPath()}>
|
|
||||||
<Query />
|
|
||||||
</WithRoute>,
|
|
||||||
{
|
|
||||||
initialEntries: [clusterKsqlDbQueryPath(clusterName)],
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
describe('Query', () => {
|
|
||||||
it('renders', () => {
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
expect(screen.getByLabelText('KSQL')).toBeInTheDocument();
|
|
||||||
expect(screen.getByLabelText('Stream properties:')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => fetchMock.reset());
|
|
||||||
it('fetch on execute', async () => {
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, {
|
|
||||||
pipeId: 'testPipeID',
|
|
||||||
});
|
|
||||||
|
|
||||||
Object.defineProperty(window, 'EventSource', {
|
|
||||||
value: EventSourceMock,
|
|
||||||
});
|
|
||||||
const inputs = screen.getAllByRole('textbox');
|
|
||||||
const textAreaElement = inputs[0] as HTMLTextAreaElement;
|
|
||||||
|
|
||||||
textAreaElement.focus();
|
|
||||||
await userEvent.paste('show tables;');
|
|
||||||
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
|
|
||||||
|
|
||||||
expect(mock.calls().length).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('fetch on execute with streamParams', async () => {
|
|
||||||
renderComponent();
|
|
||||||
|
|
||||||
const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, {
|
|
||||||
pipeId: 'testPipeID',
|
|
||||||
});
|
|
||||||
|
|
||||||
Object.defineProperty(window, 'EventSource', {
|
|
||||||
value: EventSourceMock,
|
|
||||||
});
|
|
||||||
|
|
||||||
const inputs = screen.getAllByRole('textbox');
|
|
||||||
const textAreaElement = inputs[0] as HTMLTextAreaElement;
|
|
||||||
textAreaElement.focus();
|
|
||||||
await userEvent.paste('show tables;');
|
|
||||||
|
|
||||||
const key = screen.getByLabelText('key');
|
|
||||||
key.focus();
|
|
||||||
await userEvent.paste('key');
|
|
||||||
const value = screen.getByLabelText('value');
|
|
||||||
value.focus();
|
|
||||||
await userEvent.paste('value');
|
|
||||||
|
|
||||||
await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
|
|
||||||
|
|
||||||
expect(mock.calls().length).toBe(1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getFormattedErrorFromTableData', () => {
|
|
||||||
it('works', () => {
|
|
||||||
expect(getFormattedErrorFromTableData([['Test Error']])).toStrictEqual({
|
|
||||||
title: 'Test Error',
|
|
||||||
message: '',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(
|
|
||||||
getFormattedErrorFromTableData([
|
|
||||||
['some_type', 'errorCode', 'messageText'],
|
|
||||||
])
|
|
||||||
).toStrictEqual({
|
|
||||||
title: '[Error #errorCode] some_type',
|
|
||||||
message: 'messageText',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(
|
|
||||||
getFormattedErrorFromTableData([
|
|
||||||
[
|
|
||||||
'some_type',
|
|
||||||
'errorCode',
|
|
||||||
'messageText',
|
|
||||||
'statementText',
|
|
||||||
['test1', 'test2'],
|
|
||||||
],
|
|
||||||
])
|
|
||||||
).toStrictEqual({
|
|
||||||
title: '[Error #errorCode] some_type',
|
|
||||||
message: '[test1, test2] "statementText" messageText',
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(getFormattedErrorFromTableData([])).toStrictEqual({
|
|
||||||
title: 'Unknown error',
|
|
||||||
message: 'Recieved empty response',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -6,13 +6,11 @@ import { TableTitle } from 'components/common/table/TableTitle/TableTitle.styled
|
||||||
|
|
||||||
import * as S from './TableRenderer.styled';
|
import * as S from './TableRenderer.styled';
|
||||||
|
|
||||||
export interface Props {
|
interface TableRendererProps {
|
||||||
table: KsqlTableResponse;
|
table: KsqlTableResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function hasJsonStructure(
|
function hasJsonStructure(str: string | Record<string, unknown>): boolean {
|
||||||
str: string | Record<string, unknown>
|
|
||||||
): boolean {
|
|
||||||
if (typeof str === 'object') {
|
if (typeof str === 'object') {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -30,13 +28,7 @@ export function hasJsonStructure(
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const TableRenderer: React.FC<Props> = ({ table }) => {
|
const TableRenderer: React.FC<TableRendererProps> = ({ table }) => {
|
||||||
const heading = React.useMemo(() => {
|
|
||||||
return table.header || '';
|
|
||||||
}, [table.header]);
|
|
||||||
const ths = React.useMemo(() => {
|
|
||||||
return table.columnNames || [];
|
|
||||||
}, [table.columnNames]);
|
|
||||||
const rows = React.useMemo(() => {
|
const rows = React.useMemo(() => {
|
||||||
return (table.values || []).map((row) => {
|
return (table.values || []).map((row) => {
|
||||||
return {
|
return {
|
||||||
|
@ -53,9 +45,11 @@ const TableRenderer: React.FC<Props> = ({ table }) => {
|
||||||
});
|
});
|
||||||
}, [table.values]);
|
}, [table.values]);
|
||||||
|
|
||||||
|
const ths = table.columnNames || [];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<S.Wrapper>
|
<S.Wrapper>
|
||||||
<TableTitle>{heading}</TableTitle>
|
<TableTitle>{table.header}</TableTitle>
|
||||||
<S.ScrollableTable>
|
<S.ScrollableTable>
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
|
|
|
@ -1,71 +0,0 @@
|
||||||
import { render } from 'lib/testHelpers';
|
|
||||||
import React from 'react';
|
|
||||||
import TableRenderer, {
|
|
||||||
Props,
|
|
||||||
hasJsonStructure,
|
|
||||||
} from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer';
|
|
||||||
import { screen } from '@testing-library/dom';
|
|
||||||
|
|
||||||
const renderComponent = (props: Props) => render(<TableRenderer {...props} />);
|
|
||||||
|
|
||||||
describe('TableRenderer', () => {
|
|
||||||
it('renders', () => {
|
|
||||||
renderComponent({
|
|
||||||
table: {
|
|
||||||
header: 'Test header',
|
|
||||||
columnNames: ['Test column name'],
|
|
||||||
values: [['Table row #1'], ['Table row #2'], ['{"jsonrow": "#3"}']],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(
|
|
||||||
screen.getByRole('heading', { name: 'Test header' })
|
|
||||||
).toBeInTheDocument();
|
|
||||||
expect(
|
|
||||||
screen.getByRole('columnheader', { name: 'Test column name' })
|
|
||||||
).toBeInTheDocument();
|
|
||||||
expect(
|
|
||||||
screen.getByRole('cell', { name: 'Table row #1' })
|
|
||||||
).toBeInTheDocument();
|
|
||||||
expect(
|
|
||||||
screen.getByRole('cell', { name: 'Table row #2' })
|
|
||||||
).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders with empty arrays', () => {
|
|
||||||
renderComponent({
|
|
||||||
table: {},
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(screen.getByText('No tables or streams found')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('hasJsonStructure', () => {
|
|
||||||
it('works', () => {
|
|
||||||
expect(hasJsonStructure('simplestring')).toBeFalsy();
|
|
||||||
expect(
|
|
||||||
hasJsonStructure("{'looksLikeJson': 'but has wrong quotes'}")
|
|
||||||
).toBeFalsy();
|
|
||||||
expect(
|
|
||||||
hasJsonStructure('{"json": "but doesnt have closing brackets"')
|
|
||||||
).toBeFalsy();
|
|
||||||
expect(hasJsonStructure('"string":"that looks like json"')).toBeFalsy();
|
|
||||||
|
|
||||||
expect(hasJsonStructure('1')).toBeFalsy();
|
|
||||||
expect(hasJsonStructure('{1:}')).toBeFalsy();
|
|
||||||
expect(hasJsonStructure('{1:"1"}')).toBeFalsy();
|
|
||||||
|
|
||||||
// @ts-expect-error We suppress error because this function works with unknown data from server
|
|
||||||
expect(hasJsonStructure(1)).toBeFalsy();
|
|
||||||
|
|
||||||
expect(hasJsonStructure('{}')).toBeTruthy();
|
|
||||||
expect(hasJsonStructure('{"correct": "json"}')).toBeTruthy();
|
|
||||||
|
|
||||||
expect(hasJsonStructure('[]')).toBeTruthy();
|
|
||||||
expect(hasJsonStructure('[{}]')).toBeTruthy();
|
|
||||||
|
|
||||||
expect(hasJsonStructure({})).toBeTruthy();
|
|
||||||
expect(hasJsonStructure({ correct: 'json' })).toBeTruthy();
|
|
||||||
});
|
|
||||||
});
|
|
39
kafka-ui-react-app/src/components/KsqlDb/TableView.tsx
Normal file
39
kafka-ui-react-app/src/components/KsqlDb/TableView.tsx
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
import React from 'react';
|
||||||
|
import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources';
|
||||||
|
import Table from 'components/common/NewTable';
|
||||||
|
import { ColumnDef } from '@tanstack/react-table';
|
||||||
|
|
||||||
|
interface TableViewProps {
|
||||||
|
fetching: boolean;
|
||||||
|
rows: KsqlTableDescription[] | KsqlStreamDescription[];
|
||||||
|
}
|
||||||
|
|
||||||
|
const TableView: React.FC<TableViewProps> = ({ fetching, rows }) => {
|
||||||
|
const columns = React.useMemo<
|
||||||
|
ColumnDef<KsqlTableDescription | KsqlStreamDescription>[]
|
||||||
|
>(
|
||||||
|
() => [
|
||||||
|
{ header: 'Name', accessorKey: 'name' },
|
||||||
|
{ header: 'Topic', accessorKey: 'topic' },
|
||||||
|
{ header: 'Key Format', accessorKey: 'keyFormat' },
|
||||||
|
{ header: 'Value Format', accessorKey: 'valueFormat' },
|
||||||
|
{
|
||||||
|
header: 'Is Windowed',
|
||||||
|
accessorKey: 'isWindowed',
|
||||||
|
cell: ({ row }) =>
|
||||||
|
'isWindowed' in row.original ? String(row.original.isWindowed) : '-',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
<Table
|
||||||
|
data={rows || []}
|
||||||
|
columns={columns}
|
||||||
|
emptyMessage={fetching ? 'Loading...' : 'No rows found'}
|
||||||
|
enableSorting={false}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default TableView;
|
|
@ -1,42 +0,0 @@
|
||||||
import React from 'react';
|
|
||||||
import KsqlDb from 'components/KsqlDb/KsqlDb';
|
|
||||||
import { render, WithRoute } from 'lib/testHelpers';
|
|
||||||
import { screen } from '@testing-library/dom';
|
|
||||||
import {
|
|
||||||
clusterKsqlDbPath,
|
|
||||||
clusterKsqlDbQueryPath,
|
|
||||||
getNonExactPath,
|
|
||||||
} from 'lib/paths';
|
|
||||||
|
|
||||||
const KSqLComponentText = {
|
|
||||||
list: 'list',
|
|
||||||
query: 'query',
|
|
||||||
};
|
|
||||||
|
|
||||||
jest.mock('components/KsqlDb/List/List', () => () => (
|
|
||||||
<div>{KSqLComponentText.list}</div>
|
|
||||||
));
|
|
||||||
jest.mock('components/KsqlDb/Query/Query', () => () => (
|
|
||||||
<div>{KSqLComponentText.query}</div>
|
|
||||||
));
|
|
||||||
|
|
||||||
describe('KsqlDb Component', () => {
|
|
||||||
const clusterName = 'clusterName';
|
|
||||||
const renderComponent = (path: string) =>
|
|
||||||
render(
|
|
||||||
<WithRoute path={getNonExactPath(clusterKsqlDbPath())}>
|
|
||||||
<KsqlDb />
|
|
||||||
</WithRoute>,
|
|
||||||
{ initialEntries: [path] }
|
|
||||||
);
|
|
||||||
|
|
||||||
it('Renders the List', () => {
|
|
||||||
renderComponent(clusterKsqlDbPath(clusterName));
|
|
||||||
expect(screen.getByText(KSqLComponentText.list)).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Renders the List', () => {
|
|
||||||
renderComponent(clusterKsqlDbQueryPath(clusterName));
|
|
||||||
expect(screen.getByText(KSqLComponentText.query)).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -12,12 +12,6 @@ export const versionPayload = [
|
||||||
];
|
];
|
||||||
export const versionEmptyPayload = [];
|
export const versionEmptyPayload = [];
|
||||||
|
|
||||||
export const versions = [
|
|
||||||
schemaVersion1,
|
|
||||||
schemaVersion2,
|
|
||||||
schemaVersionWithNonAsciiChars,
|
|
||||||
];
|
|
||||||
|
|
||||||
export const jsonSchema: SchemaSubject = {
|
export const jsonSchema: SchemaSubject = {
|
||||||
subject: 'test',
|
subject: 'test',
|
||||||
version: '15',
|
version: '15',
|
||||||
|
|
|
@ -14,7 +14,7 @@ export const invalidPermission = {
|
||||||
action: Action.DELETE,
|
action: Action.DELETE,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const roles = [
|
const roles = [
|
||||||
{
|
{
|
||||||
...validPermission,
|
...validPermission,
|
||||||
actions: [validPermission.action],
|
actions: [validPermission.action],
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { diff as DiffEditor } from 'react-ace';
|
import { diff as DiffEditor } from 'react-ace';
|
||||||
|
import 'ace-builds/src-noconflict/ace';
|
||||||
import 'ace-builds/src-noconflict/mode-json5';
|
import 'ace-builds/src-noconflict/mode-json5';
|
||||||
import 'ace-builds/src-noconflict/mode-protobuf';
|
import 'ace-builds/src-noconflict/mode-protobuf';
|
||||||
import 'ace-builds/src-noconflict/theme-textmate';
|
import 'ace-builds/src-noconflict/theme-textmate';
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
/* eslint-disable react/jsx-props-no-spreading */
|
|
||||||
import AceEditor, { IAceEditorProps } from 'react-ace';
|
import AceEditor, { IAceEditorProps } from 'react-ace';
|
||||||
import 'ace-builds/src-noconflict/mode-json5';
|
import 'ace-builds/src-noconflict/mode-json5';
|
||||||
import 'ace-builds/src-noconflict/mode-protobuf';
|
import 'ace-builds/src-noconflict/mode-protobuf';
|
||||||
import 'ace-builds/src-noconflict/theme-tomorrow';
|
import 'ace-builds/src-noconflict/theme-tomorrow';
|
||||||
import { SchemaType } from 'generated-sources';
|
import { SchemaType } from 'generated-sources';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import ReactAce from 'react-ace/lib/ace';
|
|
||||||
import styled from 'styled-components';
|
import styled from 'styled-components';
|
||||||
|
|
||||||
interface EditorProps extends IAceEditorProps {
|
interface EditorProps extends IAceEditorProps {
|
||||||
|
@ -13,7 +11,7 @@ interface EditorProps extends IAceEditorProps {
|
||||||
schemaType?: string;
|
schemaType?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Editor = React.forwardRef<ReactAce | null, EditorProps>((props, ref) => {
|
const Editor = React.forwardRef<AceEditor | null, EditorProps>((props, ref) => {
|
||||||
const { isFixedHeight, schemaType, ...rest } = props;
|
const { isFixedHeight, schemaType, ...rest } = props;
|
||||||
return (
|
return (
|
||||||
<AceEditor
|
<AceEditor
|
||||||
|
|
|
@ -225,6 +225,13 @@ export const Ellipsis = styled.div`
|
||||||
display: block;
|
display: block;
|
||||||
`;
|
`;
|
||||||
|
|
||||||
export const TableWrapper = styled.div`
|
export const TableWrapper = styled.div<{ $disabled: boolean }>(
|
||||||
overflow-x: auto;
|
({ $disabled }) => css`
|
||||||
`;
|
overflow-x: auto;
|
||||||
|
${$disabled &&
|
||||||
|
css`
|
||||||
|
pointer-events: none;
|
||||||
|
opacity: 0.5;
|
||||||
|
`}
|
||||||
|
`
|
||||||
|
);
|
||||||
|
|
|
@ -48,6 +48,8 @@ export interface TableProps<TData> {
|
||||||
// Placeholder for empty table
|
// Placeholder for empty table
|
||||||
emptyMessage?: React.ReactNode;
|
emptyMessage?: React.ReactNode;
|
||||||
|
|
||||||
|
disabled?: boolean;
|
||||||
|
|
||||||
// Handles row click. Can not be combined with `enableRowSelection` && expandable rows.
|
// Handles row click. Can not be combined with `enableRowSelection` && expandable rows.
|
||||||
onRowClick?: (row: Row<TData>) => void;
|
onRowClick?: (row: Row<TData>) => void;
|
||||||
}
|
}
|
||||||
|
@ -123,6 +125,7 @@ const Table: React.FC<TableProps<any>> = ({
|
||||||
enableRowSelection = false,
|
enableRowSelection = false,
|
||||||
batchActionsBar: BatchActionsBar,
|
batchActionsBar: BatchActionsBar,
|
||||||
emptyMessage,
|
emptyMessage,
|
||||||
|
disabled,
|
||||||
onRowClick,
|
onRowClick,
|
||||||
}) => {
|
}) => {
|
||||||
const [searchParams, setSearchParams] = useSearchParams();
|
const [searchParams, setSearchParams] = useSearchParams();
|
||||||
|
@ -200,7 +203,7 @@ const Table: React.FC<TableProps<any>> = ({
|
||||||
/>
|
/>
|
||||||
</S.TableActionsBar>
|
</S.TableActionsBar>
|
||||||
)}
|
)}
|
||||||
<S.TableWrapper>
|
<S.TableWrapper $disabled={!!disabled}>
|
||||||
<S.Table>
|
<S.Table>
|
||||||
<thead>
|
<thead>
|
||||||
{table.getHeaderGroups().map((headerGroup) => (
|
{table.getHeaderGroups().map((headerGroup) => (
|
||||||
|
|
|
@ -1,11 +0,0 @@
|
||||||
import { CellContext } from '@tanstack/react-table';
|
|
||||||
import React from 'react';
|
|
||||||
|
|
||||||
import * as S from './Table.styled';
|
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
||||||
const TruncatedTextCell: React.FC<CellContext<any, unknown>> = ({
|
|
||||||
getValue,
|
|
||||||
}) => <S.Ellipsis>{getValue<string>()}</S.Ellipsis>;
|
|
||||||
|
|
||||||
export default TruncatedTextCell;
|
|
|
@ -1,15 +1,15 @@
|
||||||
/* eslint-disable react/jsx-props-no-spreading */
|
/* eslint-disable react/jsx-props-no-spreading */
|
||||||
import AceEditor, { IAceEditorProps } from 'react-ace';
|
import AceEditor, { IAceEditorProps } from 'react-ace';
|
||||||
|
import 'ace-builds/src-noconflict/ace';
|
||||||
import 'ace-builds/src-noconflict/mode-sql';
|
import 'ace-builds/src-noconflict/mode-sql';
|
||||||
import 'ace-builds/src-noconflict/theme-textmate';
|
import 'ace-builds/src-noconflict/theme-textmate';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import ReactAce from 'react-ace/lib/ace';
|
|
||||||
|
|
||||||
interface SQLEditorProps extends IAceEditorProps {
|
interface SQLEditorProps extends IAceEditorProps {
|
||||||
isFixedHeight?: boolean;
|
isFixedHeight?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const SQLEditor = React.forwardRef<ReactAce | null, SQLEditorProps>(
|
const SQLEditor = React.forwardRef<AceEditor | null, SQLEditorProps>(
|
||||||
(props, ref) => {
|
(props, ref) => {
|
||||||
const { isFixedHeight, ...rest } = props;
|
const { isFixedHeight, ...rest } = props;
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -8,13 +8,13 @@ import {
|
||||||
|
|
||||||
import * as S from './Tooltip.styled';
|
import * as S from './Tooltip.styled';
|
||||||
|
|
||||||
export interface PropsTypes {
|
interface TooltipProps {
|
||||||
value: React.ReactNode;
|
value: React.ReactNode;
|
||||||
content: string;
|
content: string;
|
||||||
placement?: Placement;
|
placement?: Placement;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Tooltip: React.FC<PropsTypes> = ({ value, content, placement }) => {
|
const Tooltip: React.FC<TooltipProps> = ({ value, content, placement }) => {
|
||||||
const [open, setOpen] = useState(false);
|
const [open, setOpen] = useState(false);
|
||||||
const { x, y, refs, strategy, context } = useFloating({
|
const { x, y, refs, strategy, context } = useFloating({
|
||||||
open,
|
open,
|
||||||
|
|
|
@ -3,5 +3,5 @@ import Heading from 'components/common/heading/Heading.styled';
|
||||||
import styled from 'styled-components';
|
import styled from 'styled-components';
|
||||||
|
|
||||||
export const TableTitle = styled((props) => <Heading level={3} {...props} />)`
|
export const TableTitle = styled((props) => <Heading level={3} {...props} />)`
|
||||||
padding: 16px;
|
padding: 16px 16px 0;
|
||||||
`;
|
`;
|
||||||
|
|
|
@ -1,30 +1,5 @@
|
||||||
import { ConsumerGroupState } from 'generated-sources';
|
import { ConsumerGroupState } from 'generated-sources';
|
||||||
|
|
||||||
export const consumerGroups = [
|
|
||||||
{
|
|
||||||
groupId: 'groupId1',
|
|
||||||
members: 0,
|
|
||||||
topics: 1,
|
|
||||||
simple: false,
|
|
||||||
partitionAssignor: '',
|
|
||||||
coordinator: {
|
|
||||||
id: 1,
|
|
||||||
host: 'host',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
groupId: 'groupId2',
|
|
||||||
members: 0,
|
|
||||||
topics: 1,
|
|
||||||
simple: false,
|
|
||||||
partitionAssignor: '',
|
|
||||||
coordinator: {
|
|
||||||
id: 1,
|
|
||||||
host: 'host',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
export const consumerGroupPayload = {
|
export const consumerGroupPayload = {
|
||||||
groupId: 'amazon.msk.canary.group.broker-1',
|
groupId: 'amazon.msk.canary.group.broker-1',
|
||||||
members: 0,
|
members: 0,
|
|
@ -4,7 +4,7 @@ import { modifyRolesData } from 'lib/permissions';
|
||||||
export const clusterName1 = 'local';
|
export const clusterName1 = 'local';
|
||||||
export const clusterName2 = 'dev';
|
export const clusterName2 = 'dev';
|
||||||
|
|
||||||
export const userPermissionsMock = [
|
const userPermissionsMock = [
|
||||||
{
|
{
|
||||||
clusters: [clusterName1],
|
clusters: [clusterName1],
|
||||||
resource: ResourceType.TOPIC,
|
resource: ResourceType.TOPIC,
|
||||||
|
|
92
kafka-ui-react-app/src/lib/hooks/api/consumers.ts
Normal file
92
kafka-ui-react-app/src/lib/hooks/api/consumers.ts
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
import { consumerGroupsApiClient as api } from 'lib/api';
|
||||||
|
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||||
|
import { ClusterName } from 'redux/interfaces';
|
||||||
|
import {
|
||||||
|
ConsumerGroup,
|
||||||
|
ConsumerGroupOffsetsReset,
|
||||||
|
ConsumerGroupOrdering,
|
||||||
|
SortOrder,
|
||||||
|
} from 'generated-sources';
|
||||||
|
import { showSuccessAlert } from 'lib/errorHandling';
|
||||||
|
|
||||||
|
export type ConsumerGroupID = ConsumerGroup['groupId'];
|
||||||
|
|
||||||
|
type UseConsumerGroupsProps = {
|
||||||
|
clusterName: ClusterName;
|
||||||
|
orderBy?: ConsumerGroupOrdering;
|
||||||
|
sortOrder?: SortOrder;
|
||||||
|
page?: number;
|
||||||
|
perPage?: number;
|
||||||
|
search: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type UseConsumerGroupDetailsProps = {
|
||||||
|
clusterName: ClusterName;
|
||||||
|
consumerGroupID: ConsumerGroupID;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function useConsumerGroups(props: UseConsumerGroupsProps) {
|
||||||
|
const { clusterName, ...rest } = props;
|
||||||
|
return useQuery(
|
||||||
|
['clusters', clusterName, 'consumerGroups', rest],
|
||||||
|
() => api.getConsumerGroupsPage(props),
|
||||||
|
{ suspense: false, keepPreviousData: true }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useConsumerGroupDetails(props: UseConsumerGroupDetailsProps) {
|
||||||
|
const { clusterName, consumerGroupID } = props;
|
||||||
|
return useQuery(
|
||||||
|
['clusters', clusterName, 'consumerGroups', consumerGroupID],
|
||||||
|
() => api.getConsumerGroup({ clusterName, id: consumerGroupID })
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const useDeleteConsumerGroupMutation = ({
|
||||||
|
clusterName,
|
||||||
|
consumerGroupID,
|
||||||
|
}: UseConsumerGroupDetailsProps) => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
return useMutation(
|
||||||
|
() => api.deleteConsumerGroup({ clusterName, id: consumerGroupID }),
|
||||||
|
{
|
||||||
|
onSuccess: () => {
|
||||||
|
showSuccessAlert({
|
||||||
|
message: `Consumer ${consumerGroupID} group deleted`,
|
||||||
|
});
|
||||||
|
queryClient.invalidateQueries([
|
||||||
|
'clusters',
|
||||||
|
clusterName,
|
||||||
|
'consumerGroups',
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useResetConsumerGroupOffsetsMutation = ({
|
||||||
|
clusterName,
|
||||||
|
consumerGroupID,
|
||||||
|
}: UseConsumerGroupDetailsProps) => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
return useMutation(
|
||||||
|
(props: ConsumerGroupOffsetsReset) =>
|
||||||
|
api.resetConsumerGroupOffsets({
|
||||||
|
clusterName,
|
||||||
|
id: consumerGroupID,
|
||||||
|
consumerGroupOffsetsReset: props,
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
onSuccess: () => {
|
||||||
|
showSuccessAlert({
|
||||||
|
message: `Consumer ${consumerGroupID} group offsets reset`,
|
||||||
|
});
|
||||||
|
queryClient.invalidateQueries([
|
||||||
|
'clusters',
|
||||||
|
clusterName,
|
||||||
|
'consumerGroups',
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
|
@ -109,7 +109,7 @@ export function useUpdateConnectorConfig(props: UseConnectorProps) {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
export function useCreateConnectorMutation(clusterName: ClusterName) {
|
function useCreateConnectorMutation(clusterName: ClusterName) {
|
||||||
const client = useQueryClient();
|
const client = useQueryClient();
|
||||||
return useMutation(
|
return useMutation(
|
||||||
(props: CreateConnectorProps) =>
|
(props: CreateConnectorProps) =>
|
||||||
|
|
184
kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx
Normal file
184
kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx
Normal file
|
@ -0,0 +1,184 @@
|
||||||
|
import { ksqlDbApiClient as api } from 'lib/api';
|
||||||
|
import { useMutation, useQueries } from '@tanstack/react-query';
|
||||||
|
import { ClusterName } from 'redux/interfaces';
|
||||||
|
import { BASE_PARAMS } from 'lib/constants';
|
||||||
|
import React from 'react';
|
||||||
|
import { fetchEventSource } from '@microsoft/fetch-event-source';
|
||||||
|
import {
|
||||||
|
showAlert,
|
||||||
|
showServerError,
|
||||||
|
showSuccessAlert,
|
||||||
|
} from 'lib/errorHandling';
|
||||||
|
import {
|
||||||
|
ExecuteKsqlRequest,
|
||||||
|
KsqlResponse,
|
||||||
|
KsqlTableResponse,
|
||||||
|
} from 'generated-sources';
|
||||||
|
import { StopLoading } from 'components/Topics/Topic/Messages/Messages.styled';
|
||||||
|
import toast from 'react-hot-toast';
|
||||||
|
|
||||||
|
export function useKsqlkDb(clusterName: ClusterName) {
|
||||||
|
return useQueries({
|
||||||
|
queries: [
|
||||||
|
{
|
||||||
|
queryKey: ['clusters', clusterName, 'ksqlDb', 'tables'],
|
||||||
|
queryFn: () => api.listTables({ clusterName }),
|
||||||
|
suspense: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
queryKey: ['clusters', clusterName, 'ksqlDb', 'streams'],
|
||||||
|
queryFn: () => api.listStreams({ clusterName }),
|
||||||
|
suspense: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useExecuteKsqlkDbQueryMutation() {
|
||||||
|
return useMutation((props: ExecuteKsqlRequest) => api.executeKsql(props));
|
||||||
|
}
|
||||||
|
|
||||||
|
const getFormattedErrorFromTableData = (
|
||||||
|
responseValues: KsqlTableResponse['values']
|
||||||
|
): { title: string; message: string } => {
|
||||||
|
// We expect someting like that
|
||||||
|
// [[
|
||||||
|
// "@type",
|
||||||
|
// "error_code",
|
||||||
|
// "message",
|
||||||
|
// "statementText"?,
|
||||||
|
// "entities"?
|
||||||
|
// ]],
|
||||||
|
// or
|
||||||
|
// [["message"]]
|
||||||
|
|
||||||
|
if (!responseValues || !responseValues.length) {
|
||||||
|
return {
|
||||||
|
title: 'Unknown error',
|
||||||
|
message: 'Recieved empty response',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let title = '';
|
||||||
|
let message = '';
|
||||||
|
if (responseValues[0].length < 2) {
|
||||||
|
const [messageText] = responseValues[0];
|
||||||
|
title = messageText;
|
||||||
|
} else {
|
||||||
|
const [type, errorCode, messageText, statementText, entities] =
|
||||||
|
responseValues[0];
|
||||||
|
title = `[Error #${errorCode}] ${type}`;
|
||||||
|
message =
|
||||||
|
(entities?.length ? `[${entities.join(', ')}] ` : '') +
|
||||||
|
(statementText ? `"${statementText}" ` : '') +
|
||||||
|
messageText;
|
||||||
|
}
|
||||||
|
|
||||||
|
return { title, message };
|
||||||
|
};
|
||||||
|
|
||||||
|
type UseKsqlkDbSSEProps = {
|
||||||
|
pipeId: string | false;
|
||||||
|
clusterName: ClusterName;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const useKsqlkDbSSE = ({ clusterName, pipeId }: UseKsqlkDbSSEProps) => {
|
||||||
|
const [data, setData] = React.useState<KsqlTableResponse>();
|
||||||
|
const [isFetching, setIsFetching] = React.useState<boolean>(false);
|
||||||
|
|
||||||
|
const abortController = new AbortController();
|
||||||
|
|
||||||
|
React.useEffect(() => {
|
||||||
|
const fetchData = async () => {
|
||||||
|
const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response`;
|
||||||
|
await fetchEventSource(
|
||||||
|
`${url}?${new URLSearchParams({ pipeId: pipeId || '' }).toString()}`,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
signal: abortController.signal,
|
||||||
|
openWhenHidden: true,
|
||||||
|
async onopen(response) {
|
||||||
|
const { ok, status } = response;
|
||||||
|
if (ok) setData(undefined); // Reset
|
||||||
|
if (status >= 400 && status < 500 && status !== 429) {
|
||||||
|
showServerError(response);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onmessage(event) {
|
||||||
|
const { table }: KsqlResponse = JSON.parse(event.data);
|
||||||
|
if (!table) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
switch (table?.header) {
|
||||||
|
case 'Execution error': {
|
||||||
|
showAlert('error', {
|
||||||
|
...getFormattedErrorFromTableData(table.values),
|
||||||
|
id: `${url}-executionError`,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case 'Schema':
|
||||||
|
setData(table);
|
||||||
|
break;
|
||||||
|
case 'Row':
|
||||||
|
setData((state) => ({
|
||||||
|
header: state?.header,
|
||||||
|
columnNames: state?.columnNames,
|
||||||
|
values: [...(state?.values || []), ...(table?.values || [])],
|
||||||
|
}));
|
||||||
|
break;
|
||||||
|
case 'Query Result':
|
||||||
|
showSuccessAlert({
|
||||||
|
id: `${url}-querySuccess`,
|
||||||
|
title: 'Query succeed',
|
||||||
|
message: '',
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
case 'Source Description':
|
||||||
|
case 'properties':
|
||||||
|
default:
|
||||||
|
setData(table);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onclose() {
|
||||||
|
setIsFetching(false);
|
||||||
|
},
|
||||||
|
onerror(err) {
|
||||||
|
setIsFetching(false);
|
||||||
|
showServerError(err);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const abortFetchData = () => {
|
||||||
|
setIsFetching(false);
|
||||||
|
if (pipeId) abortController.abort();
|
||||||
|
};
|
||||||
|
if (pipeId) {
|
||||||
|
toast.promise(
|
||||||
|
fetchData(),
|
||||||
|
{
|
||||||
|
loading: (
|
||||||
|
<>
|
||||||
|
<div>Consuming query execution result...</div>
|
||||||
|
|
||||||
|
<StopLoading onClick={abortFetchData}>Abort</StopLoading>
|
||||||
|
</>
|
||||||
|
),
|
||||||
|
success: 'Cancelled',
|
||||||
|
error: 'Something went wrong. Please try again.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'messages',
|
||||||
|
success: { duration: 20 },
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return abortFetchData;
|
||||||
|
}, [pipeId]);
|
||||||
|
|
||||||
|
return { data, isFetching };
|
||||||
|
};
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue