浏览代码

Merge branch 'master' into vlad/develop

VladSenyuta 2 年之前
父节点
当前提交
90c4d68d25
共有 94 个文件被更改,包括 1278 次插入2692 次删除
  1. 1 1
      .github/workflows/aws_publisher.yaml
  2. 1 1
      .github/workflows/branch-deploy.yml
  3. 1 1
      .github/workflows/build-public-image.yml
  4. 1 1
      .github/workflows/delete-public-image.yml
  5. 1 1
      .github/workflows/e2e-automation.yml
  6. 1 1
      .github/workflows/e2e-checks.yaml
  7. 1 1
      .github/workflows/e2e-weekly.yml
  8. 1 1
      .github/workflows/separate_env_public_create.yml
  9. 1 1
      .github/workflows/terraform-deploy.yml
  10. 9 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
  11. 1 41
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java
  12. 5 12
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java
  13. 10 14
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java
  14. 28 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java
  15. 6 7
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java
  16. 79 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingSettings.java
  17. 2 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java
  18. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ResultSizeLimiter.java
  19. 2 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java
  20. 2 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java
  21. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java
  22. 4 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
  23. 4 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
  24. 13 6
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
  25. 10 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java
  26. 11 11
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java
  27. 1 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/PollingThrottlerTest.java
  28. 9 0
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  29. 1 2
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
  30. 6 6
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
  31. 14 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java
  32. 17 4
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
  33. 3 0
      kafka-ui-react-app/src/components/App.tsx
  34. 3 2
      kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx
  35. 18 35
      kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx
  36. 197 0
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx
  37. 23 36
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.styled.ts
  38. 29 292
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx
  39. 0 158
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/__test__/ResetOffsets.spec.tsx
  40. 1 1
      kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/__test__/TopicContents.spec.tsx
  41. 0 114
      kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx
  42. 0 48
      kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx
  43. 20 27
      kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
  44. 0 16
      kafka-ui-react-app/src/components/ConsumerGroups/List/ListContainer.tsx
  45. 0 60
      kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx
  46. 2 4
      kafka-ui-react-app/src/components/ConsumerGroups/__test__/ConsumerGroups.spec.tsx
  47. 101 7
      kafka-ui-react-app/src/components/KsqlDb/KsqlDb.tsx
  48. 0 58
      kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/KsqlDbItem.tsx
  49. 0 59
      kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/__test__/KsqlDbItem.spec.tsx
  50. 0 12
      kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData.ts
  51. 0 111
      kafka-ui-react-app/src/components/KsqlDb/List/List.tsx
  52. 0 22
      kafka-ui-react-app/src/components/KsqlDb/List/__test__/List.spec.tsx
  53. 0 9
      kafka-ui-react-app/src/components/KsqlDb/Query/Query.styled.ts
  54. 36 205
      kafka-ui-react-app/src/components/KsqlDb/Query/Query.tsx
  55. 12 51
      kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts
  56. 139 153
      kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.tsx
  57. 0 189
      kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/__test__/QueryForm.spec.tsx
  58. 0 116
      kafka-ui-react-app/src/components/KsqlDb/Query/__test__/Query.spec.tsx
  59. 6 12
      kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/TableRenderer.tsx
  60. 0 71
      kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/__test__/TableRenderer.spec.tsx
  61. 39 0
      kafka-ui-react-app/src/components/KsqlDb/TableView.tsx
  62. 0 42
      kafka-ui-react-app/src/components/KsqlDb/__test__/KsqlDb.spec.tsx
  63. 0 6
      kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts
  64. 1 1
      kafka-ui-react-app/src/components/common/ActionComponent/__tests__/fixtures.ts
  65. 1 0
      kafka-ui-react-app/src/components/common/DiffViewer/DiffViewer.tsx
  66. 1 3
      kafka-ui-react-app/src/components/common/Editor/Editor.tsx
  67. 10 3
      kafka-ui-react-app/src/components/common/NewTable/Table.styled.ts
  68. 4 1
      kafka-ui-react-app/src/components/common/NewTable/Table.tsx
  69. 0 11
      kafka-ui-react-app/src/components/common/NewTable/TimestampCell copy.tsx
  70. 2 2
      kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx
  71. 1 0
      kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx
  72. 86 77
      kafka-ui-react-app/src/components/common/Select/Select.tsx
  73. 2 2
      kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx
  74. 1 1
      kafka-ui-react-app/src/components/common/table/TableTitle/TableTitle.styled.tsx
  75. 0 25
      kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts
  76. 1 1
      kafka-ui-react-app/src/lib/hooks/__tests__/fixtures.ts
  77. 92 0
      kafka-ui-react-app/src/lib/hooks/api/consumers.ts
  78. 1 1
      kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts
  79. 184 0
      kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx
  80. 1 1
      kafka-ui-react-app/src/lib/hooks/useMessageFiltersStore.ts
  81. 4 8
      kafka-ui-react-app/src/lib/paths.ts
  82. 2 1
      kafka-ui-react-app/src/lib/yupExtended.ts
  83. 1 2
      kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts
  84. 0 19
      kafka-ui-react-app/src/redux/interfaces/ksqlDb.ts
  85. 0 49
      kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/consumerGroupSlice.spec.ts
  86. 0 223
      kafka-ui-react-app/src/redux/reducers/consumerGroups/consumerGroupsSlice.ts
  87. 0 4
      kafka-ui-react-app/src/redux/reducers/index.ts
  88. 0 43
      kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/fixtures.ts
  89. 0 51
      kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/selectors.spec.ts
  90. 0 75
      kafka-ui-react-app/src/redux/reducers/ksqlDb/ksqlDbSlice.ts
  91. 0 33
      kafka-ui-react-app/src/redux/reducers/ksqlDb/selectors.ts
  92. 1 1
      kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
  93. 7 0
      kafka-ui-react-app/vite.config.ts
  94. 1 1
      kafka-ui-serde-api/pom.xml

+ 1 - 1
.github/workflows/aws_publisher.yaml

@@ -31,7 +31,7 @@ jobs:
           echo "Packer will be triggered in this dir $WORK_DIR"
 
       - name: Configure AWS credentials for Kafka-UI account
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }}

+ 1 - 1
.github/workflows/branch-deploy.yml

@@ -45,7 +45,7 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-buildx-
       - name: Configure AWS credentials for Kafka-UI account
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 1 - 1
.github/workflows/build-public-image.yml

@@ -42,7 +42,7 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-buildx-
       - name: Configure AWS credentials for Kafka-UI account
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 1 - 1
.github/workflows/delete-public-image.yml

@@ -15,7 +15,7 @@ jobs:
           tag='${{ github.event.pull_request.number }}'
           echo "tag=${tag}" >> $GITHUB_OUTPUT
       - name: Configure AWS credentials for Kafka-UI account
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 1 - 1
.github/workflows/e2e-automation.yml

@@ -24,7 +24,7 @@ jobs:
         with:
           ref: ${{ github.sha }}
       - name: Configure AWS credentials
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 1 - 1
.github/workflows/e2e-checks.yaml

@@ -16,7 +16,7 @@ jobs:
         with:
           ref: ${{ github.event.pull_request.head.sha }}
       - name: Configure AWS credentials for Kafka-UI account
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 1 - 1
.github/workflows/e2e-weekly.yml

@@ -11,7 +11,7 @@ jobs:
         with:
           ref: ${{ github.sha }}
       - name: Configure AWS credentials
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 1 - 1
.github/workflows/separate_env_public_create.yml

@@ -47,7 +47,7 @@ jobs:
           restore-keys: |
             ${{ runner.os }}-buildx-
       - name: Configure AWS credentials for Kafka-UI account
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 1 - 1
.github/workflows/terraform-deploy.yml

@@ -26,7 +26,7 @@ jobs:
           echo "Terraform will be triggered in this dir $TF_DIR"
 
       - name: Configure AWS credentials for Kafka-UI account
-        uses: aws-actions/configure-aws-credentials@v1-node16
+        uses: aws-actions/configure-aws-credentials@v2
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

+ 9 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -27,6 +27,8 @@ public class ClustersProperties {
 
   String internalTopicPrefix;
 
+  PollingProperties polling = new PollingProperties();
+
   @Data
   public static class Cluster {
     String name;
@@ -49,6 +51,13 @@ public class ClustersProperties {
     TruststoreConfig ssl;
   }
 
+  @Data
+  public static class PollingProperties {
+    Integer pollTimeoutMs;
+    Integer partitionPollTimeout;
+    Integer noDataEmptyPolls;
+  }
+
   @Data
   @ToString(exclude = "password")
   public static class MetricsConfigData {

+ 1 - 41
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java

@@ -1,25 +1,12 @@
 package com.provectus.kafka.ui.config;
 
-import lombok.AllArgsConstructor;
-import org.springframework.boot.autoconfigure.web.ServerProperties;
-import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Profile;
-import org.springframework.core.io.ClassPathResource;
-import org.springframework.util.StringUtils;
 import org.springframework.web.reactive.config.CorsRegistry;
 import org.springframework.web.reactive.config.WebFluxConfigurer;
-import org.springframework.web.reactive.function.server.RouterFunction;
-import org.springframework.web.reactive.function.server.RouterFunctions;
-import org.springframework.web.reactive.function.server.ServerResponse;
 
 @Configuration
-@Profile("local")
-@AllArgsConstructor
 public class CorsGlobalConfiguration implements WebFluxConfigurer {
 
-  private final ServerProperties serverProperties;
-
   @Override
   public void addCorsMappings(CorsRegistry registry) {
     registry.addMapping("/**")
@@ -28,31 +15,4 @@ public class CorsGlobalConfiguration implements WebFluxConfigurer {
         .allowedHeaders("*")
         .allowCredentials(false);
   }
-
-  private String withContext(String pattern) {
-    final String basePath = serverProperties.getServlet().getContextPath();
-    if (StringUtils.hasText(basePath)) {
-      return basePath + pattern;
-    } else {
-      return pattern;
-    }
-  }
-
-  @Bean
-  public RouterFunction<ServerResponse> cssFilesRouter() {
-    return RouterFunctions
-        .resources(withContext("/static/css/**"), new ClassPathResource("static/static/css/"));
-  }
-
-  @Bean
-  public RouterFunction<ServerResponse> jsFilesRouter() {
-    return RouterFunctions
-        .resources(withContext("/static/js/**"), new ClassPathResource("static/static/js/"));
-  }
-
-  @Bean
-  public RouterFunction<ServerResponse> mediaFilesRouter() {
-    return RouterFunctions
-        .resources(withContext("/static/media/**"), new ClassPathResource("static/static/media/"));
-  }
-}
+}

+ 5 - 12
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java

@@ -4,7 +4,6 @@ import com.provectus.kafka.ui.model.TopicMessageDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
 import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
-import com.provectus.kafka.ui.util.PollingThrottler;
 import java.time.Duration;
 import java.time.Instant;
 import org.apache.kafka.clients.consumer.Consumer;
@@ -14,27 +13,21 @@ import org.apache.kafka.common.utils.Bytes;
 import reactor.core.publisher.FluxSink;
 
 public abstract class AbstractEmitter {
-  private static final Duration DEFAULT_POLL_TIMEOUT_MS = Duration.ofMillis(1000L);
-
-  // In some situations it is hard to say whether records range (between two offsets) was fully polled.
-  // This happens when we have holes in records sequences that is usual case for compact topics or
-  // topics with transactional writes. In such cases if you want to poll all records between offsets X and Y
-  // there is no guarantee that you will ever see record with offset Y.
-  // To workaround this we can assume that after N consecutive empty polls all target messages were read.
-  public static final int NO_MORE_DATA_EMPTY_POLLS_COUNT = 3;
 
   private final ConsumerRecordDeserializer recordDeserializer;
   private final ConsumingStats consumingStats = new ConsumingStats();
   private final PollingThrottler throttler;
+  protected final PollingSettings pollingSettings;
 
-  protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingThrottler throttler) {
+  protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingSettings pollingSettings) {
     this.recordDeserializer = recordDeserializer;
-    this.throttler = throttler;
+    this.pollingSettings = pollingSettings;
+    this.throttler = pollingSettings.getPollingThrottler();
   }
 
   protected ConsumerRecords<Bytes, Bytes> poll(
       FluxSink<TopicMessageEventDTO> sink, Consumer<Bytes, Bytes> consumer) {
-    return poll(sink, consumer, DEFAULT_POLL_TIMEOUT_MS);
+    return poll(sink, consumer, pollingSettings.getPollTimeout());
   }
 
   protected ConsumerRecords<Bytes, Bytes> poll(

+ 10 - 14
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java

@@ -3,15 +3,12 @@ package com.provectus.kafka.ui.emitter;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
-import com.provectus.kafka.ui.util.PollingThrottler;
-import java.time.Duration;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.TreeMap;
 import java.util.function.Supplier;
-import java.util.stream.Collectors;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.consumer.Consumer;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
@@ -26,8 +23,6 @@ public class BackwardRecordEmitter
     extends AbstractEmitter
     implements java.util.function.Consumer<FluxSink<TopicMessageEventDTO>> {
 
-  private static final Duration POLL_TIMEOUT = Duration.ofMillis(200);
-
   private final Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier;
   private final ConsumerPosition consumerPosition;
   private final int messagesPerPage;
@@ -37,8 +32,8 @@ public class BackwardRecordEmitter
       ConsumerPosition consumerPosition,
       int messagesPerPage,
       ConsumerRecordDeserializer recordDeserializer,
-      PollingThrottler throttler) {
-    super(recordDeserializer, throttler);
+      PollingSettings pollingSettings) {
+    super(recordDeserializer, pollingSettings);
     this.consumerPosition = consumerPosition;
     this.messagesPerPage = messagesPerPage;
     this.consumerSupplier = consumerSupplier;
@@ -109,17 +104,18 @@ public class BackwardRecordEmitter
 
     var recordsToSend = new ArrayList<ConsumerRecord<Bytes, Bytes>>();
 
-    // we use empty polls counting to verify that partition was fully read
-    for (int emptyPolls = 0; recordsToSend.size() < desiredMsgsToPoll && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) {
-      var polledRecords = poll(sink, consumer, POLL_TIMEOUT);
-      log.debug("{} records polled from {}", polledRecords.count(), tp);
+    EmptyPollsCounter emptyPolls  = pollingSettings.createEmptyPollsCounter();
+    while (!sink.isCancelled()
+        && recordsToSend.size() < desiredMsgsToPoll
+        && !emptyPolls.noDataEmptyPollsReached()) {
+      var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout());
+      emptyPolls.count(polledRecords);
 
-      // counting sequential empty polls
-      emptyPolls = polledRecords.isEmpty() ? emptyPolls + 1 : 0;
+      log.debug("{} records polled from {}", polledRecords.count(), tp);
 
       var filteredRecords = polledRecords.records(tp).stream()
           .filter(r -> r.offset() < toOffset)
-          .collect(Collectors.toList());
+          .toList();
 
       if (!polledRecords.isEmpty() && filteredRecords.isEmpty()) {
         // we already read all messages in target offsets interval

+ 28 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java

@@ -0,0 +1,28 @@
+package com.provectus.kafka.ui.emitter;
+
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+
+// In some situations it is hard to say whether records range (between two offsets) was fully polled.
+// This happens when we have holes in records sequences that is usual case for compact topics or
+// topics with transactional writes. In such cases if you want to poll all records between offsets X and Y
+// there is no guarantee that you will ever see record with offset Y.
+// To workaround this we can assume that after N consecutive empty polls all target messages were read.
+public class EmptyPollsCounter {
+
+  private final int maxEmptyPolls;
+
+  private int emptyPolls = 0;
+
+  EmptyPollsCounter(int maxEmptyPolls) {
+    this.maxEmptyPolls = maxEmptyPolls;
+  }
+
+  public void count(ConsumerRecords<?, ?> polled) {
+    emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0;
+  }
+
+  public boolean noDataEmptyPollsReached() {
+    return emptyPolls >= maxEmptyPolls;
+  }
+
+}

+ 6 - 7
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java

@@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
-import com.provectus.kafka.ui.util.PollingThrottler;
 import java.util.function.Supplier;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
@@ -25,8 +24,8 @@ public class ForwardRecordEmitter
       Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
       ConsumerPosition position,
       ConsumerRecordDeserializer recordDeserializer,
-      PollingThrottler throttler) {
-    super(recordDeserializer, throttler);
+      PollingSettings pollingSettings) {
+    super(recordDeserializer, pollingSettings);
     this.position = position;
     this.consumerSupplier = consumerSupplier;
   }
@@ -39,16 +38,16 @@ public class ForwardRecordEmitter
       var seekOperations = SeekOperations.create(consumer, position);
       seekOperations.assignAndSeekNonEmptyPartitions();
 
-      // we use empty polls counting to verify that topic was fully read
-      int emptyPolls = 0;
+      EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
       while (!sink.isCancelled()
           && !seekOperations.assignedPartitionsFullyPolled()
-          && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT) {
+          && !emptyPolls.noDataEmptyPollsReached()) {
 
         sendPhase(sink, "Polling");
         ConsumerRecords<Bytes, Bytes> records = poll(sink, consumer);
+        emptyPolls.count(records);
+
         log.debug("{} records polled", records.count());
-        emptyPolls = records.isEmpty() ? emptyPolls + 1 : 0;
 
         for (ConsumerRecord<Bytes, Bytes> msg : records) {
           if (!sink.isCancelled()) {

+ 79 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingSettings.java

@@ -0,0 +1,79 @@
+package com.provectus.kafka.ui.emitter;
+
+import com.provectus.kafka.ui.config.ClustersProperties;
+import java.time.Duration;
+import java.util.Optional;
+import java.util.function.Supplier;
+
+public class PollingSettings {
+
+  private static final Duration DEFAULT_POLL_TIMEOUT = Duration.ofMillis(1_000);
+  private static final Duration DEFAULT_PARTITION_POLL_TIMEOUT = Duration.ofMillis(200);
+  private static final int DEFAULT_NO_DATA_EMPTY_POLLS = 3;
+
+  private final Duration pollTimeout;
+  private final Duration partitionPollTimeout;
+  private final int notDataEmptyPolls; //see EmptyPollsCounter docs
+
+  private final Supplier<PollingThrottler> throttlerSupplier;
+
+  public static PollingSettings create(ClustersProperties.Cluster cluster,
+                                       ClustersProperties clustersProperties) {
+    var pollingProps = Optional.ofNullable(clustersProperties.getPolling())
+        .orElseGet(ClustersProperties.PollingProperties::new);
+
+    var pollTimeout = pollingProps.getPollTimeoutMs() != null
+        ? Duration.ofMillis(pollingProps.getPollTimeoutMs())
+        : DEFAULT_POLL_TIMEOUT;
+
+    var partitionPollTimeout = pollingProps.getPartitionPollTimeout() != null
+        ? Duration.ofMillis(pollingProps.getPartitionPollTimeout())
+        : Duration.ofMillis(pollTimeout.toMillis() / 5);
+
+    int noDataEmptyPolls = pollingProps.getNoDataEmptyPolls() != null
+        ? pollingProps.getNoDataEmptyPolls()
+        : DEFAULT_NO_DATA_EMPTY_POLLS;
+
+    return new PollingSettings(
+        pollTimeout,
+        partitionPollTimeout,
+        noDataEmptyPolls,
+        PollingThrottler.throttlerSupplier(cluster)
+    );
+  }
+
+  public static PollingSettings createDefault() {
+    return new PollingSettings(
+        DEFAULT_POLL_TIMEOUT,
+        DEFAULT_PARTITION_POLL_TIMEOUT,
+        DEFAULT_NO_DATA_EMPTY_POLLS,
+        PollingThrottler::noop
+    );
+  }
+
+  private PollingSettings(Duration pollTimeout,
+                          Duration partitionPollTimeout,
+                          int notDataEmptyPolls,
+                          Supplier<PollingThrottler> throttlerSupplier) {
+    this.pollTimeout = pollTimeout;
+    this.partitionPollTimeout = partitionPollTimeout;
+    this.notDataEmptyPolls = notDataEmptyPolls;
+    this.throttlerSupplier = throttlerSupplier;
+  }
+
+  public EmptyPollsCounter createEmptyPollsCounter() {
+    return new EmptyPollsCounter(notDataEmptyPolls);
+  }
+
+  public Duration getPollTimeout() {
+    return pollTimeout;
+  }
+
+  public Duration getPartitionPollTimeout() {
+    return partitionPollTimeout;
+  }
+
+  public PollingThrottler getPollingThrottler() {
+    return throttlerSupplier.get();
+  }
+}

+ 2 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/PollingThrottler.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java

@@ -1,8 +1,9 @@
-package com.provectus.kafka.ui.util;
+package com.provectus.kafka.ui.emitter;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.util.concurrent.RateLimiter;
 import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.util.ConsumerRecordsUtil;
 import java.util.function.Supplier;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.consumer.ConsumerRecords;

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ResultSizeLimiter.java → kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ResultSizeLimiter.java

@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.util;
+package com.provectus.kafka.ui.emitter;
 
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import java.util.concurrent.atomic.AtomicInteger;

+ 2 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java

@@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
-import com.provectus.kafka.ui.util.PollingThrottler;
 import java.util.HashMap;
 import java.util.function.Supplier;
 import lombok.extern.slf4j.Slf4j;
@@ -22,8 +21,8 @@ public class TailingEmitter extends AbstractEmitter
   public TailingEmitter(Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
                         ConsumerPosition consumerPosition,
                         ConsumerRecordDeserializer recordDeserializer,
-                        PollingThrottler throttler) {
-    super(recordDeserializer, throttler);
+                        PollingSettings pollingSettings) {
+    super(recordDeserializer, pollingSettings);
     this.consumerSupplier = consumerSupplier;
     this.consumerPosition = consumerPosition;
   }

+ 2 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java

@@ -2,14 +2,13 @@ package com.provectus.kafka.ui.model;
 
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
+import com.provectus.kafka.ui.emitter.PollingSettings;
 import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
 import com.provectus.kafka.ui.service.masking.DataMasking;
 import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
-import com.provectus.kafka.ui.util.PollingThrottler;
 import com.provectus.kafka.ui.util.ReactiveFailover;
 import java.util.Map;
 import java.util.Properties;
-import java.util.function.Supplier;
 import lombok.AccessLevel;
 import lombok.AllArgsConstructor;
 import lombok.Builder;
@@ -28,7 +27,7 @@ public class KafkaCluster {
   private final boolean readOnly;
   private final MetricsConfig metricsConfig;
   private final DataMasking masking;
-  private final Supplier<PollingThrottler> throttler;
+  private final PollingSettings pollingSettings;
   private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
   private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
   private final ReactiveFailover<KsqlApiClient> ksqlClient;

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java

@@ -14,7 +14,7 @@ public class ClustersStorage {
 
   public ClustersStorage(ClustersProperties properties, KafkaClusterFactory factory) {
     var builder = ImmutableMap.<String, KafkaCluster>builder();
-    properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(c)));
+    properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(properties, c)));
     this.kafkaClusters = builder.build();
   }
 

+ 4 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java

@@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service;
 import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
+import com.provectus.kafka.ui.emitter.PollingSettings;
 import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
 import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
 import com.provectus.kafka.ui.model.KafkaCluster;
@@ -12,7 +13,6 @@ import com.provectus.kafka.ui.service.masking.DataMasking;
 import com.provectus.kafka.ui.sr.ApiClient;
 import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
 import com.provectus.kafka.ui.util.KafkaServicesValidation;
-import com.provectus.kafka.ui.util.PollingThrottler;
 import com.provectus.kafka.ui.util.ReactiveFailover;
 import com.provectus.kafka.ui.util.WebClientConfigurator;
 import java.util.HashMap;
@@ -41,7 +41,8 @@ public class KafkaClusterFactory {
   @Value("${webclient.max-in-memory-buffer-size:20MB}")
   private DataSize maxBuffSize;
 
-  public KafkaCluster create(ClustersProperties.Cluster clusterProperties) {
+  public KafkaCluster create(ClustersProperties properties,
+                             ClustersProperties.Cluster clusterProperties) {
     KafkaCluster.KafkaClusterBuilder builder = KafkaCluster.builder();
 
     builder.name(clusterProperties.getName());
@@ -49,7 +50,7 @@ public class KafkaClusterFactory {
     builder.properties(convertProperties(clusterProperties.getProperties()));
     builder.readOnly(clusterProperties.isReadOnly());
     builder.masking(DataMasking.create(clusterProperties.getMasking()));
-    builder.throttler(PollingThrottler.throttlerSupplier(clusterProperties));
+    builder.pollingSettings(PollingSettings.create(clusterProperties, properties));
 
     if (schemaRegistryConfigured(clusterProperties)) {
       builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));

+ 4 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java

@@ -5,6 +5,7 @@ import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.MessageFilterStats;
 import com.provectus.kafka.ui.emitter.MessageFilters;
+import com.provectus.kafka.ui.emitter.ResultSizeLimiter;
 import com.provectus.kafka.ui.emitter.TailingEmitter;
 import com.provectus.kafka.ui.exception.TopicNotFoundException;
 import com.provectus.kafka.ui.exception.ValidationException;
@@ -17,7 +18,6 @@ import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.serde.api.Serde;
 import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
-import com.provectus.kafka.ui.util.ResultSizeLimiter;
 import com.provectus.kafka.ui.util.SslPropertiesUtil;
 import java.util.List;
 import java.util.Map;
@@ -169,7 +169,7 @@ public class MessagesService {
           () -> consumerGroupService.createConsumer(cluster),
           consumerPosition,
           recordDeserializer,
-          cluster.getThrottler().get()
+          cluster.getPollingSettings()
       );
     } else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
       emitter = new BackwardRecordEmitter(
@@ -177,14 +177,14 @@ public class MessagesService {
           consumerPosition,
           limit,
           recordDeserializer,
-          cluster.getThrottler().get()
+          cluster.getPollingSettings()
       );
     } else {
       emitter = new TailingEmitter(
           () -> consumerGroupService.createConsumer(cluster),
           consumerPosition,
           recordDeserializer,
-          cluster.getThrottler().get()
+          cluster.getPollingSettings()
       );
     }
     MessageFilterStats filterStats = new MessageFilterStats();

+ 13 - 6
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java

@@ -212,17 +212,24 @@ public class ReactiveAdminClient implements Closeable {
         .map(brokerId -> new ConfigResource(ConfigResource.Type.BROKER, Integer.toString(brokerId)))
         .collect(toList());
     return toMono(client.describeConfigs(resources).all())
-        // some kafka backends (like MSK serverless) do not support broker's configs retrieval,
-        // in that case InvalidRequestException will be thrown
-        .onErrorResume(InvalidRequestException.class, th -> {
-          log.trace("Error while getting broker {} configs", brokerIds, th);
-          return Mono.just(Map.of());
-        })
+        // some kafka backends don't support broker's configs retrieval,
+        // and throw various exceptions on describeConfigs() call
+        .onErrorResume(th -> th instanceof InvalidRequestException // MSK Serverless
+                || th instanceof UnknownTopicOrPartitionException, // Azure event hub
+            th -> {
+              log.trace("Error while getting configs for brokers {}", brokerIds, th);
+              return Mono.just(Map.of());
+            })
         // there are situations when kafka-ui user has no DESCRIBE_CONFIGS permission on cluster
         .onErrorResume(ClusterAuthorizationException.class, th -> {
           log.trace("AuthorizationException while getting configs for brokers {}", brokerIds, th);
           return Mono.just(Map.of());
         })
+        // catching all remaining exceptions, but logging on WARN level
+        .onErrorResume(th -> true, th -> {
+          log.warn("Unexpected error while getting configs for brokers {}", brokerIds, th);
+          return Mono.just(Map.of());
+        })
         .map(config -> config.entrySet().stream()
             .collect(toMap(
                 c -> Integer.valueOf(c.getKey().name()),

+ 10 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java

@@ -1,14 +1,14 @@
 package com.provectus.kafka.ui.service.analyze;
 
-import static com.provectus.kafka.ui.emitter.AbstractEmitter.NO_MORE_DATA_EMPTY_POLLS_COUNT;
-
+import com.provectus.kafka.ui.emitter.EmptyPollsCounter;
 import com.provectus.kafka.ui.emitter.OffsetsInfo;
+import com.provectus.kafka.ui.emitter.PollingSettings;
+import com.provectus.kafka.ui.emitter.PollingThrottler;
 import com.provectus.kafka.ui.exception.TopicAnalysisException;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.TopicAnalysisDTO;
 import com.provectus.kafka.ui.service.ConsumerGroupService;
 import com.provectus.kafka.ui.service.TopicsService;
-import com.provectus.kafka.ui.util.PollingThrottler;
 import java.io.Closeable;
 import java.time.Duration;
 import java.time.Instant;
@@ -63,7 +63,7 @@ public class TopicAnalysisService {
     if (analysisTasksStore.isAnalysisInProgress(topicId)) {
       throw new TopicAnalysisException("Topic is already analyzing");
     }
-    var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getThrottler().get());
+    var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getPollingSettings());
     analysisTasksStore.registerNewTask(topicId, task);
     Schedulers.boundedElastic().schedule(task);
   }
@@ -83,6 +83,7 @@ public class TopicAnalysisService {
     private final TopicIdentity topicId;
     private final int partitionsCnt;
     private final long approxNumberOfMsgs;
+    private final EmptyPollsCounter emptyPollsCounter;
     private final PollingThrottler throttler;
 
     private final TopicAnalysisStats totalStats = new TopicAnalysisStats();
@@ -91,7 +92,7 @@ public class TopicAnalysisService {
     private final KafkaConsumer<Bytes, Bytes> consumer;
 
     AnalysisTask(KafkaCluster cluster, TopicIdentity topicId, int partitionsCnt,
-                 long approxNumberOfMsgs, PollingThrottler throttler) {
+                 long approxNumberOfMsgs, PollingSettings pollingSettings) {
       this.topicId = topicId;
       this.approxNumberOfMsgs = approxNumberOfMsgs;
       this.partitionsCnt = partitionsCnt;
@@ -103,7 +104,8 @@ public class TopicAnalysisService {
               ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "100000"
           )
       );
-      this.throttler = throttler;
+      this.throttler = pollingSettings.getPollingThrottler();
+      this.emptyPollsCounter = pollingSettings.createEmptyPollsCounter();
     }
 
     @Override
@@ -124,11 +126,10 @@ public class TopicAnalysisService {
         consumer.seekToBeginning(topicPartitions);
 
         var offsetsInfo = new OffsetsInfo(consumer, topicId.topicName);
-        for (int emptyPolls = 0; !offsetsInfo.assignedPartitionsFullyPolled()
-            && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) {
+        while (!offsetsInfo.assignedPartitionsFullyPolled() && !emptyPollsCounter.noDataEmptyPollsReached()) {
           var polled = consumer.poll(Duration.ofSeconds(3));
           throttler.throttleAfterPoll(polled);
-          emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0;
+          emptyPollsCounter.count(polled);
           polled.forEach(r -> {
             totalStats.apply(r);
             partitionStats.get(r.partition()).apply(r);

+ 11 - 11
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java

@@ -9,6 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
+import com.provectus.kafka.ui.emitter.PollingSettings;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.producer.KafkaTestProducer;
@@ -16,7 +17,6 @@ import com.provectus.kafka.ui.serde.api.Serde;
 import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
 import com.provectus.kafka.ui.serdes.builtin.StringSerde;
-import com.provectus.kafka.ui.util.PollingThrottler;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -112,7 +112,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -120,7 +120,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
         100,
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     StepVerifier.create(Flux.create(forwardEmitter))
@@ -142,7 +142,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         new ConsumerPosition(BEGINNING, TOPIC, null),
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -150,7 +150,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new ConsumerPosition(LATEST, TOPIC, null),
         PARTITIONS * MSGS_PER_PARTITION,
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     List<String> expectedValues = SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList());
@@ -171,7 +171,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -179,7 +179,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         PARTITIONS * MSGS_PER_PARTITION,
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     var expectedValues = SENT_RECORDS.stream()
@@ -216,7 +216,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     var backwardEmitter = new BackwardRecordEmitter(
@@ -224,7 +224,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
         PARTITIONS * MSGS_PER_PARTITION,
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     var expectedValues = SENT_RECORDS.stream()
@@ -255,7 +255,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         numMessages,
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     var expectedValues = SENT_RECORDS.stream()
@@ -281,7 +281,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         new ConsumerPosition(OFFSET, TOPIC, offsets),
         100,
         RECORD_DESERIALIZER,
-        PollingThrottler.noop()
+        PollingSettings.createDefault()
     );
 
     expectEmitter(backwardEmitter,

+ 1 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/PollingThrottlerTest.java

@@ -5,6 +5,7 @@ import static org.assertj.core.data.Percentage.withPercentage;
 
 import com.google.common.base.Stopwatch;
 import com.google.common.util.concurrent.RateLimiter;
+import com.provectus.kafka.ui.emitter.PollingThrottler;
 import java.util.concurrent.ThreadLocalRandom;
 import java.util.concurrent.TimeUnit;
 import org.junit.jupiter.api.Test;

+ 9 - 0
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -3445,6 +3445,15 @@ components:
             kafka:
               type: object
               properties:
+                polling:
+                  type: object
+                  properties:
+                    pollTimeoutMs:
+                      type: integer
+                    partitionPollTimeout:
+                      type: integer
+                    noDataEmptyPolls:
+                      type: integer
                 clusters:
                   type: array
                   items:

+ 1 - 2
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java

@@ -16,7 +16,6 @@ import static com.codeborne.selenide.Selenide.$$x;
 import static com.codeborne.selenide.Selenide.$x;
 
 public class KsqlQueryForm extends BasePage {
-    protected SelenideElement pageTitle = $x("//h1[text()='Query']");
     protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
     protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
     protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
@@ -31,7 +30,7 @@ public class KsqlQueryForm extends BasePage {
     @Step
     public KsqlQueryForm waitUntilScreenReady() {
         waitUntilSpinnerDisappear();
-        pageTitle.shouldBe(Condition.visible);
+        executeBtn.shouldBe(Condition.visible);
         return this;
     }
 

+ 6 - 6
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java

@@ -208,23 +208,23 @@ public class TopicsList extends BasePage {
             return new TopicsList();
         }
 
+        private SelenideElement getNameElm() {
+            return element.$x("./td[2]");
+        }
+
         @Step
         public boolean isInternal() {
             boolean internal = false;
             try {
-                internal = element.$x("./td[2]/a/span").isDisplayed();
+                internal = getNameElm().$x("./a/span").isDisplayed();
             } catch (Throwable ignored) {
             }
             return internal;
         }
 
-        private SelenideElement getNameElm() {
-            return element.$x("./td[2]");
-        }
-
         @Step
         public String getName() {
-            return getNameElm().getText().trim();
+            return getNameElm().$x("./a").getAttribute("title");
         }
 
         @Step

+ 14 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java

@@ -58,4 +58,18 @@ public class SmokeBacklog extends BaseManualTest {
     @Test
     public void testCaseG() {
     }
+
+    @Automation(state = TO_BE_AUTOMATED)
+    @Suite(id = 5)
+    @QaseId(335)
+    @Test
+    public void testCaseH() {
+    }
+
+    @Automation(state = TO_BE_AUTOMATED)
+    @Suite(id = 5)
+    @QaseId(336)
+    @Test
+    public void testCaseI() {
+    }
 }

+ 17 - 4
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java

@@ -359,7 +359,7 @@ public class TopicsTest extends BaseTest {
 
     @QaseId(11)
     @Test(priority = 15)
-    public void checkShowInternalTopicsButtonFunctionality() {
+    public void checkShowInternalTopicsButton() {
         navigateToTopics();
         SoftAssert softly = new SoftAssert();
         softly.assertTrue(topicsList.isShowInternalRadioBtnSelected(), "isInternalRadioBtnSelected()");
@@ -373,8 +373,21 @@ public class TopicsTest extends BaseTest {
         softly.assertAll();
     }
 
-    @QaseId(56)
+    @QaseId(334)
     @Test(priority = 16)
+    public void checkInternalTopicsNaming() {
+        navigateToTopics();
+        SoftAssert softly = new SoftAssert();
+        topicsList
+                .setShowInternalRadioButton(true)
+                .getInternalTopics()
+                .forEach(topic -> softly.assertTrue(topic.getName().startsWith("_"),
+                        String.format("'%s' starts with '_'", topic.getName())));
+        softly.assertAll();
+    }
+
+    @QaseId(56)
+    @Test(priority = 17)
     public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
         navigateToTopics();
         topicsList
@@ -422,7 +435,7 @@ public class TopicsTest extends BaseTest {
     }
 
     @QaseId(247)
-    @Test(priority = 17)
+    @Test(priority = 18)
     public void recreateTopicFromTopicProfile() {
         Topic topicToRecreate = new Topic()
                 .setName("topic-to-recreate-" + randomAlphabetic(5))
@@ -450,7 +463,7 @@ public class TopicsTest extends BaseTest {
     }
 
     @QaseId(8)
-    @Test(priority = 18)
+    @Test(priority = 19)
     public void checkCopyTopicPossibility() {
         Topic topicToCopy = new Topic()
                 .setName("topic-to-copy-" + randomAlphabetic(5))

+ 3 - 0
kafka-ui-react-app/src/components/App.tsx

@@ -30,6 +30,9 @@ const queryClient = new QueryClient({
   defaultOptions: {
     queries: {
       suspense: true,
+      onError(error) {
+        showServerError(error as Response);
+      },
     },
     mutations: {
       onError(error) {

+ 3 - 2
kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx

@@ -1,17 +1,18 @@
 import React from 'react';
 import { Route, Routes } from 'react-router-dom';
 import Details from 'components/ConsumerGroups/Details/Details';
-import ListContainer from 'components/ConsumerGroups/List/ListContainer';
 import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets';
 import {
   clusterConsumerGroupResetOffsetsRelativePath,
   RouteParams,
 } from 'lib/paths';
 
+import List from './List';
+
 const ConsumerGroups: React.FC = () => {
   return (
     <Routes>
-      <Route index element={<ListContainer />} />
+      <Route index element={<List />} />
       <Route path={RouteParams.consumerGroupID} element={<Details />} />
       <Route
         path={clusterConsumerGroupResetOffsetsRelativePath}

+ 18 - 35
kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx

@@ -7,26 +7,22 @@ import {
   ClusterGroupParam,
 } from 'lib/paths';
 import Search from 'components/common/Search/Search';
-import PageLoader from 'components/common/PageLoader/PageLoader';
 import ClusterContext from 'components/contexts/ClusterContext';
 import PageHeading from 'components/common/PageHeading/PageHeading';
 import * as Metrics from 'components/common/Metrics';
 import { Tag } from 'components/common/Tag/Tag.styled';
 import groupBy from 'lodash/groupBy';
 import { Table } from 'components/common/table/Table/Table.styled';
-import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
-import {
-  deleteConsumerGroup,
-  selectById,
-  fetchConsumerGroupDetails,
-  getAreConsumerGroupDetailsFulfilled,
-} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
 import getTagColor from 'components/common/Tag/getTagColor';
 import { Dropdown } from 'components/common/Dropdown';
 import { ControlPanelWrapper } from 'components/common/ControlPanel/ControlPanel.styled';
 import { Action, ResourceType } from 'generated-sources';
 import { ActionDropdownItem } from 'components/common/ActionComponent';
 import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
+import {
+  useConsumerGroupDetails,
+  useDeleteConsumerGroupMutation,
+} from 'lib/hooks/api/consumers';
 
 import ListItem from './ListItem';
 
@@ -35,38 +31,25 @@ const Details: React.FC = () => {
   const [searchParams] = useSearchParams();
   const searchValue = searchParams.get('q') || '';
   const { isReadOnly } = React.useContext(ClusterContext);
-  const { consumerGroupID, clusterName } = useAppParams<ClusterGroupParam>();
-  const dispatch = useAppDispatch();
-  const consumerGroup = useAppSelector((state) =>
-    selectById(state, consumerGroupID)
-  );
-  const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled);
+  const routeParams = useAppParams<ClusterGroupParam>();
+  const { clusterName, consumerGroupID } = routeParams;
 
-  React.useEffect(() => {
-    dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID }));
-  }, [clusterName, consumerGroupID, dispatch]);
+  const consumerGroup = useConsumerGroupDetails(routeParams);
+  const deleteConsumerGroup = useDeleteConsumerGroupMutation(routeParams);
 
   const onDelete = async () => {
-    const res = await dispatch(
-      deleteConsumerGroup({ clusterName, consumerGroupID })
-    ).unwrap();
-    if (res) navigate('../');
+    await deleteConsumerGroup.mutateAsync();
+    navigate('../');
   };
 
   const onResetOffsets = () => {
     navigate(clusterConsumerGroupResetRelativePath);
   };
 
-  if (!isFetched || !consumerGroup) {
-    return <PageLoader />;
-  }
-
-  const partitionsByTopic = groupBy(consumerGroup.partitions, 'topic');
-
+  const partitionsByTopic = groupBy(consumerGroup.data?.partitions, 'topic');
   const filteredPartitionsByTopic = Object.keys(partitionsByTopic).filter(
     (el) => el.includes(searchValue)
   );
-
   const currentPartitionsByTopic = searchValue.length
     ? filteredPartitionsByTopic
     : Object.keys(partitionsByTopic);
@@ -110,24 +93,24 @@ const Details: React.FC = () => {
       <Metrics.Wrapper>
         <Metrics.Section>
           <Metrics.Indicator label="State">
-            <Tag color={getTagColor(consumerGroup.state)}>
-              {consumerGroup.state}
+            <Tag color={getTagColor(consumerGroup.data?.state)}>
+              {consumerGroup.data?.state}
             </Tag>
           </Metrics.Indicator>
           <Metrics.Indicator label="Members">
-            {consumerGroup.members}
+            {consumerGroup.data?.members}
           </Metrics.Indicator>
           <Metrics.Indicator label="Assigned Topics">
-            {consumerGroup.topics}
+            {consumerGroup.data?.topics}
           </Metrics.Indicator>
           <Metrics.Indicator label="Assigned Partitions">
-            {consumerGroup.partitions?.length}
+            {consumerGroup.data?.partitions?.length}
           </Metrics.Indicator>
           <Metrics.Indicator label="Coordinator ID">
-            {consumerGroup.coordinator?.id}
+            {consumerGroup.data?.coordinator?.id}
           </Metrics.Indicator>
           <Metrics.Indicator label="Total lag">
-            {consumerGroup.messagesBehind}
+            {consumerGroup.data?.messagesBehind}
           </Metrics.Indicator>
         </Metrics.Section>
       </Metrics.Wrapper>

+ 197 - 0
kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx

@@ -0,0 +1,197 @@
+import React from 'react';
+import { useNavigate } from 'react-router-dom';
+import {
+  ConsumerGroupDetails,
+  ConsumerGroupOffsetsReset,
+  ConsumerGroupOffsetsResetType,
+} from 'generated-sources';
+import { ClusterGroupParam } from 'lib/paths';
+import {
+  Controller,
+  FormProvider,
+  useFieldArray,
+  useForm,
+} from 'react-hook-form';
+import { MultiSelect, Option } from 'react-multi-select-component';
+import 'react-datepicker/dist/react-datepicker.css';
+import { ErrorMessage } from '@hookform/error-message';
+import { InputLabel } from 'components/common/Input/InputLabel.styled';
+import { Button } from 'components/common/Button/Button';
+import Input from 'components/common/Input/Input';
+import { FormError } from 'components/common/Input/Input.styled';
+import useAppParams from 'lib/hooks/useAppParams';
+import { useResetConsumerGroupOffsetsMutation } from 'lib/hooks/api/consumers';
+import { FlexFieldset, StyledForm } from 'components/common/Form/Form.styled';
+import ControlledSelect from 'components/common/Select/ControlledSelect';
+
+import * as S from './ResetOffsets.styled';
+
+interface FormProps {
+  defaultValues: ConsumerGroupOffsetsReset;
+  topics: string[];
+  partitions: ConsumerGroupDetails['partitions'];
+}
+
+const resetTypeOptions = Object.values(ConsumerGroupOffsetsResetType).map(
+  (value) => ({ value, label: value })
+);
+
+const Form: React.FC<FormProps> = ({ defaultValues, partitions, topics }) => {
+  const navigate = useNavigate();
+  const routerParams = useAppParams<ClusterGroupParam>();
+  const reset = useResetConsumerGroupOffsetsMutation(routerParams);
+  const topicOptions = React.useMemo(
+    () => topics.map((value) => ({ value, label: value })),
+    [topics]
+  );
+  const methods = useForm<ConsumerGroupOffsetsReset>({
+    mode: 'onChange',
+    defaultValues,
+  });
+
+  const {
+    handleSubmit,
+    setValue,
+    watch,
+    control,
+    formState: { errors },
+  } = methods;
+  const { fields } = useFieldArray({
+    control,
+    name: 'partitionsOffsets',
+  });
+
+  const resetTypeValue = watch('resetType');
+  const topicValue = watch('topic');
+  const offsetsValue = watch('partitionsOffsets');
+  const partitionsValue = watch('partitions') || [];
+
+  const partitionOptions =
+    partitions
+      ?.filter((p) => p.topic === topicValue)
+      .map((p) => ({
+        label: `Partition #${p.partition.toString()}`,
+        value: p.partition,
+      })) || [];
+
+  const onSelectedPartitionsChange = (selected: Option[]) => {
+    setValue(
+      'partitions',
+      selected.map(({ value }) => value)
+    );
+
+    setValue(
+      'partitionsOffsets',
+      selected.map(({ value }) => {
+        const currentOffset = offsetsValue?.find(
+          ({ partition }) => partition === value
+        );
+        return { offset: currentOffset?.offset, partition: value };
+      })
+    );
+  };
+
+  React.useEffect(() => {
+    onSelectedPartitionsChange([]);
+    // eslint-disable-next-line react-hooks/exhaustive-deps
+  }, [topicValue]);
+
+  const onSubmit = async (data: ConsumerGroupOffsetsReset) => {
+    await reset.mutateAsync(data);
+    navigate('../');
+  };
+
+  return (
+    <FormProvider {...methods}>
+      <StyledForm onSubmit={handleSubmit(onSubmit)}>
+        <FlexFieldset>
+          <ControlledSelect
+            name="topic"
+            label="Topic"
+            placeholder="Select Topic"
+            options={topicOptions}
+          />
+          <ControlledSelect
+            name="resetType"
+            label="Reset Type"
+            placeholder="Select Reset Type"
+            options={resetTypeOptions}
+          />
+          <div>
+            <InputLabel>Partitions</InputLabel>
+            <MultiSelect
+              options={partitionOptions}
+              value={partitionsValue.map((p) => ({
+                value: p,
+                label: String(p),
+              }))}
+              onChange={onSelectedPartitionsChange}
+              labelledBy="Select partitions"
+            />
+          </div>
+          {resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP &&
+            partitionsValue.length > 0 && (
+              <div>
+                <InputLabel>Timestamp</InputLabel>
+                <Controller
+                  control={control}
+                  name="resetToTimestamp"
+                  rules={{
+                    required: 'Timestamp is required',
+                  }}
+                  render={({ field: { onChange, onBlur, value, ref } }) => (
+                    <S.DatePickerInput
+                      ref={ref}
+                      selected={new Date(value as number)}
+                      onChange={(e: Date | null) => onChange(e?.getTime())}
+                      onBlur={onBlur}
+                    />
+                  )}
+                />
+                <ErrorMessage
+                  errors={errors}
+                  name="resetToTimestamp"
+                  render={({ message }) => <FormError>{message}</FormError>}
+                />
+              </div>
+            )}
+
+          {resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET &&
+            partitionsValue.length > 0 && (
+              <S.OffsetsWrapper>
+                {fields.map((field, index) => (
+                  <Input
+                    key={field.id}
+                    label={`Partition #${field.partition} Offset`}
+                    type="number"
+                    name={`partitionsOffsets.${index}.offset` as const}
+                    hookFormOptions={{
+                      shouldUnregister: true,
+                      required: 'Offset is required',
+                      min: {
+                        value: 0,
+                        message: 'must be greater than or equal to 0',
+                      },
+                    }}
+                    withError
+                  />
+                ))}
+              </S.OffsetsWrapper>
+            )}
+        </FlexFieldset>
+        <div>
+          <Button
+            buttonSize="M"
+            buttonType="primary"
+            type="submit"
+            disabled={partitionsValue.length === 0}
+          >
+            Submit
+          </Button>
+        </div>
+      </StyledForm>
+    </FormProvider>
+  );
+};
+
+export default Form;

+ 23 - 36
kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.styled.ts

@@ -1,37 +1,5 @@
 import styled from 'styled-components';
-
-export const Wrapper = styled.div`
-  padding: 16px;
-  padding-top: 0;
-
-  & > form {
-    display: flex;
-    flex-direction: column;
-    gap: 16px;
-
-    & > button:last-child {
-      align-self: flex-start;
-    }
-  }
-
-  & .multi-select {
-    height: 32px;
-    & > .dropdown-container {
-      height: 32px;
-      & > .dropdown-heading {
-        height: 32px;
-      }
-    }
-  }
-`;
-
-export const MainSelectors = styled.div`
-  display: flex;
-  gap: 16px;
-  & > * {
-    flex-grow: 1;
-  }
-`;
+import DatePicker from 'react-datepicker';
 
 export const OffsetsWrapper = styled.div`
   display: flex;
@@ -40,7 +8,26 @@ export const OffsetsWrapper = styled.div`
   gap: 16px;
 `;
 
-export const OffsetsTitle = styled.h1`
-  font-size: 18px;
-  font-weight: 500;
+export const DatePickerInput = styled(DatePicker).attrs({
+  showTimeInput: true,
+  timeInputLabel: 'Time:',
+  dateFormat: 'MMMM d, yyyy h:mm aa',
+})`
+  height: 40px;
+  border: 1px ${({ theme }) => theme.select.borderColor.normal} solid;
+  border-radius: 4px;
+  font-size: 14px;
+  width: 270px;
+  padding-left: 12px;
+  background-color: ${({ theme }) => theme.input.backgroundColor.normal};
+  color: ${({ theme }) => theme.input.color.normal};
+  &::placeholder {
+    color: ${({ theme }) => theme.input.color.normal};
+  }
+  &:hover {
+    cursor: pointer;
+  }
+  &:focus {
+    outline: none;
+  }
 `;

+ 29 - 292
kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx

@@ -1,315 +1,52 @@
 import React from 'react';
-import { useNavigate } from 'react-router-dom';
-import { ConsumerGroupOffsetsResetType } from 'generated-sources';
 import { clusterConsumerGroupsPath, ClusterGroupParam } from 'lib/paths';
-import {
-  Controller,
-  FormProvider,
-  useFieldArray,
-  useForm,
-} from 'react-hook-form';
-import { MultiSelect, Option } from 'react-multi-select-component';
-import DatePicker from 'react-datepicker';
 import 'react-datepicker/dist/react-datepicker.css';
-import groupBy from 'lodash/groupBy';
-import PageLoader from 'components/common/PageLoader/PageLoader';
-import { ErrorMessage } from '@hookform/error-message';
-import Select from 'components/common/Select/Select';
-import { InputLabel } from 'components/common/Input/InputLabel.styled';
-import { Button } from 'components/common/Button/Button';
-import Input from 'components/common/Input/Input';
-import { FormError } from 'components/common/Input/Input.styled';
 import PageHeading from 'components/common/PageHeading/PageHeading';
-import {
-  fetchConsumerGroupDetails,
-  selectById,
-  getAreConsumerGroupDetailsFulfilled,
-  getIsOffsetReseted,
-  resetConsumerGroupOffsets,
-} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
-import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
 import useAppParams from 'lib/hooks/useAppParams';
-import { resetLoaderById } from 'redux/reducers/loader/loaderSlice';
-
-import * as S from './ResetOffsets.styled';
+import { useConsumerGroupDetails } from 'lib/hooks/api/consumers';
+import PageLoader from 'components/common/PageLoader/PageLoader';
+import {
+  ConsumerGroupOffsetsReset,
+  ConsumerGroupOffsetsResetType,
+} from 'generated-sources';
 
-interface FormType {
-  topic: string;
-  resetType: ConsumerGroupOffsetsResetType;
-  partitionsOffsets: { offset: string | undefined; partition: number }[];
-  resetToTimestamp: Date;
-}
+import Form from './Form';
 
 const ResetOffsets: React.FC = () => {
-  const dispatch = useAppDispatch();
-  const { consumerGroupID, clusterName } = useAppParams<ClusterGroupParam>();
-  const consumerGroup = useAppSelector((state) =>
-    selectById(state, consumerGroupID)
-  );
-
-  const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled);
-  const isOffsetReseted = useAppSelector(getIsOffsetReseted);
-
-  React.useEffect(() => {
-    dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID }));
-  }, [clusterName, consumerGroupID, dispatch]);
+  const routerParams = useAppParams<ClusterGroupParam>();
 
-  const [uniqueTopics, setUniqueTopics] = React.useState<string[]>([]);
-  const [selectedPartitions, setSelectedPartitions] = React.useState<Option[]>(
-    []
-  );
+  const consumerGroup = useConsumerGroupDetails(routerParams);
 
-  const methods = useForm<FormType>({
-    mode: 'onChange',
-    defaultValues: {
-      resetType: ConsumerGroupOffsetsResetType.EARLIEST,
-      topic: '',
-      partitionsOffsets: [],
-    },
-  });
-  const {
-    handleSubmit,
-    setValue,
-    watch,
-    control,
-    setError,
-    clearErrors,
-    formState: { errors, isValid },
-  } = methods;
-  const { fields } = useFieldArray({
-    control,
-    name: 'partitionsOffsets',
-  });
-  const resetTypeValue = watch('resetType');
-  const topicValue = watch('topic');
-  const offsetsValue = watch('partitionsOffsets');
+  if (consumerGroup.isLoading || !consumerGroup.isSuccess)
+    return <PageLoader />;
 
-  React.useEffect(() => {
-    if (isFetched && consumerGroup?.partitions) {
-      setValue('topic', consumerGroup.partitions[0].topic);
-      setUniqueTopics(Object.keys(groupBy(consumerGroup.partitions, 'topic')));
-    }
-  }, [consumerGroup?.partitions, isFetched, setValue]);
+  const partitions = consumerGroup.data.partitions || [];
+  const { topic } = partitions[0];
 
-  const onSelectedPartitionsChange = (value: Option[]) => {
-    clearErrors();
-    setValue(
-      'partitionsOffsets',
-      value.map((partition) => {
-        const currentOffset = offsetsValue.find(
-          (offset) => offset.partition === partition.value
-        );
-        return {
-          offset: currentOffset ? currentOffset?.offset : undefined,
-          partition: partition.value,
-        };
-      })
-    );
-    setSelectedPartitions(value);
-  };
-
-  React.useEffect(() => {
-    onSelectedPartitionsChange([]);
-    // eslint-disable-next-line react-hooks/exhaustive-deps
-  }, [topicValue]);
+  const uniqTopics = Array.from(
+    new Set(partitions.map((partition) => partition.topic))
+  );
 
-  const onSubmit = (data: FormType) => {
-    const augmentedData = {
-      ...data,
-      partitions: selectedPartitions.map((partition) => partition.value),
-      partitionsOffsets: data.partitionsOffsets as {
-        offset: string;
-        partition: number;
-      }[],
-    };
-    let isValidAugmentedData = true;
-    if (augmentedData.resetType === ConsumerGroupOffsetsResetType.OFFSET) {
-      augmentedData.partitionsOffsets.forEach((offset, index) => {
-        if (!offset.offset) {
-          setError(`partitionsOffsets.${index}.offset`, {
-            type: 'manual',
-            message: "This field shouldn't be empty!",
-          });
-          isValidAugmentedData = false;
-        }
-      });
-    } else if (
-      augmentedData.resetType === ConsumerGroupOffsetsResetType.TIMESTAMP
-    ) {
-      if (!augmentedData.resetToTimestamp) {
-        setError(`resetToTimestamp`, {
-          type: 'manual',
-          message: "This field shouldn't be empty!",
-        });
-        isValidAugmentedData = false;
-      }
-    }
-    if (isValidAugmentedData) {
-      dispatch(
-        resetConsumerGroupOffsets({
-          clusterName,
-          consumerGroupID,
-          requestBody: augmentedData,
-        })
-      );
-    }
+  const defaultValues: ConsumerGroupOffsetsReset = {
+    resetType: ConsumerGroupOffsetsResetType.EARLIEST,
+    topic,
+    partitionsOffsets: [],
+    resetToTimestamp: new Date().getTime(),
   };
 
-  const navigate = useNavigate();
-  React.useEffect(() => {
-    if (isOffsetReseted) {
-      dispatch(resetLoaderById('consumerGroups/resetConsumerGroupOffsets'));
-      navigate('../');
-    }
-  }, [clusterName, consumerGroupID, dispatch, navigate, isOffsetReseted]);
-
-  if (!isFetched || !consumerGroup) {
-    return <PageLoader />;
-  }
-
   return (
-    <FormProvider {...methods}>
+    <>
       <PageHeading
         text="Reset offsets"
-        backTo={clusterConsumerGroupsPath(clusterName)}
+        backTo={clusterConsumerGroupsPath(routerParams.clusterName)}
         backText="Consumers"
       />
-      <S.Wrapper>
-        <form onSubmit={handleSubmit(onSubmit)}>
-          <S.MainSelectors>
-            <div>
-              <InputLabel id="topicLabel">Topic</InputLabel>
-              <Controller
-                control={control}
-                name="topic"
-                render={({ field: { name, onChange, value } }) => (
-                  <Select
-                    id="topic"
-                    selectSize="M"
-                    aria-labelledby="topicLabel"
-                    minWidth="100%"
-                    name={name}
-                    onChange={onChange}
-                    defaultValue={value}
-                    value={value}
-                    options={uniqueTopics.map((topic) => ({
-                      value: topic,
-                      label: topic,
-                    }))}
-                  />
-                )}
-              />
-            </div>
-            <div>
-              <InputLabel id="resetTypeLabel">Reset Type</InputLabel>
-              <Controller
-                control={control}
-                name="resetType"
-                render={({ field: { name, onChange, value } }) => (
-                  <Select
-                    id="resetType"
-                    selectSize="M"
-                    aria-labelledby="resetTypeLabel"
-                    minWidth="100%"
-                    name={name}
-                    onChange={onChange}
-                    value={value}
-                    options={Object.values(ConsumerGroupOffsetsResetType).map(
-                      (type) => ({ value: type, label: type })
-                    )}
-                  />
-                )}
-              />
-            </div>
-            <div>
-              <InputLabel>Partitions</InputLabel>
-              <MultiSelect
-                options={
-                  consumerGroup.partitions
-                    ?.filter((p) => p.topic === topicValue)
-                    .map((p) => ({
-                      label: `Partition #${p.partition.toString()}`,
-                      value: p.partition,
-                    })) || []
-                }
-                value={selectedPartitions}
-                onChange={onSelectedPartitionsChange}
-                labelledBy="Select partitions"
-              />
-            </div>
-          </S.MainSelectors>
-          {resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP &&
-            selectedPartitions.length > 0 && (
-              <div>
-                <InputLabel>Timestamp</InputLabel>
-                <Controller
-                  control={control}
-                  name="resetToTimestamp"
-                  render={({ field: { onChange, onBlur, value, ref } }) => (
-                    <DatePicker
-                      ref={ref}
-                      selected={value}
-                      onChange={onChange}
-                      onBlur={onBlur}
-                      showTimeInput
-                      timeInputLabel="Time:"
-                      dateFormat="MMMM d, yyyy h:mm aa"
-                    />
-                  )}
-                />
-                <ErrorMessage
-                  errors={errors}
-                  name="resetToTimestamp"
-                  render={({ message }) => <FormError>{message}</FormError>}
-                />
-              </div>
-            )}
-          {resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET &&
-            selectedPartitions.length > 0 && (
-              <div>
-                <S.OffsetsTitle>Offsets</S.OffsetsTitle>
-                <S.OffsetsWrapper>
-                  {fields.map((field, index) => (
-                    <div key={field.id}>
-                      <InputLabel htmlFor={`partitionsOffsets.${index}.offset`}>
-                        Partition #{field.partition}
-                      </InputLabel>
-                      <Input
-                        id={`partitionsOffsets.${index}.offset`}
-                        type="number"
-                        name={`partitionsOffsets.${index}.offset` as const}
-                        hookFormOptions={{
-                          shouldUnregister: true,
-                          min: {
-                            value: 0,
-                            message: 'must be greater than or equal to 0',
-                          },
-                        }}
-                        defaultValue={field.offset}
-                      />
-                      <ErrorMessage
-                        errors={errors}
-                        name={`partitionsOffsets.${index}.offset`}
-                        render={({ message }) => (
-                          <FormError>{message}</FormError>
-                        )}
-                      />
-                    </div>
-                  ))}
-                </S.OffsetsWrapper>
-              </div>
-            )}
-          <Button
-            buttonSize="M"
-            buttonType="primary"
-            type="submit"
-            disabled={!isValid || selectedPartitions.length === 0}
-          >
-            Submit
-          </Button>
-        </form>
-      </S.Wrapper>
-    </FormProvider>
+      <Form
+        defaultValues={defaultValues}
+        topics={uniqTopics}
+        partitions={partitions}
+      />
+    </>
   );
 };
 

+ 0 - 158
kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/__test__/ResetOffsets.spec.tsx

@@ -1,158 +0,0 @@
-import React from 'react';
-import fetchMock from 'fetch-mock';
-import { act, screen, waitFor } from '@testing-library/react';
-import userEvent from '@testing-library/user-event';
-import { render, WithRoute } from 'lib/testHelpers';
-import { clusterConsumerGroupResetOffsetsPath } from 'lib/paths';
-import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
-import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets';
-
-const clusterName = 'cluster1';
-const { groupId } = consumerGroupPayload;
-
-const renderComponent = () =>
-  render(
-    <WithRoute path={clusterConsumerGroupResetOffsetsPath()}>
-      <ResetOffsets />
-    </WithRoute>,
-    {
-      initialEntries: [
-        clusterConsumerGroupResetOffsetsPath(
-          clusterName,
-          consumerGroupPayload.groupId
-        ),
-      ],
-    }
-  );
-
-const resetConsumerGroupOffsetsMockCalled = () =>
-  expect(
-    fetchMock.called(
-      `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`
-    )
-  ).toBeTruthy();
-
-const selectresetTypeAndPartitions = async (resetType: string) => {
-  await userEvent.click(screen.getByLabelText('Reset Type'));
-  await userEvent.click(screen.getByText(resetType));
-  await userEvent.click(screen.getByText('Select...'));
-  await userEvent.click(screen.getByText('Partition #0'));
-};
-
-const resetConsumerGroupOffsetsWith = async (
-  resetType: string,
-  offset: null | number = null
-) => {
-  await userEvent.click(screen.getByLabelText('Reset Type'));
-  const options = screen.getAllByText(resetType);
-  await userEvent.click(options.length > 1 ? options[1] : options[0]);
-  await userEvent.click(screen.getByText('Select...'));
-
-  await userEvent.click(screen.getByText('Partition #0'));
-
-  fetchMock.postOnce(
-    `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
-    200,
-    {
-      body: {
-        topic: '__amazon_msk_canary',
-        resetType,
-        partitions: [0],
-        partitionsOffsets: [{ partition: 0, offset }],
-      },
-    }
-  );
-  await userEvent.click(screen.getByText('Submit'));
-  await waitFor(() => resetConsumerGroupOffsetsMockCalled());
-};
-
-describe('ResetOffsets', () => {
-  afterEach(() => {
-    fetchMock.reset();
-  });
-
-  xit('renders progress bar for initial state', async () => {
-    fetchMock.getOnce(
-      `/api/clusters/${clusterName}/consumer-groups/${groupId}`,
-      404
-    );
-    await act(() => {
-      renderComponent();
-    });
-    expect(screen.getByRole('progressbar')).toBeInTheDocument();
-  });
-
-  describe('with consumer group', () => {
-    describe('submit handles resetConsumerGroupOffsets', () => {
-      beforeEach(async () => {
-        const fetchConsumerGroupMock = fetchMock.getOnce(
-          `/api/clusters/${clusterName}/consumer-groups/${groupId}`,
-          consumerGroupPayload
-        );
-        await act(() => {
-          renderComponent();
-        });
-        expect(fetchConsumerGroupMock.called()).toBeTruthy();
-      });
-
-      it('calls resetConsumerGroupOffsets with EARLIEST', async () => {
-        await resetConsumerGroupOffsetsWith('EARLIEST');
-      });
-
-      it('calls resetConsumerGroupOffsets with LATEST', async () => {
-        await resetConsumerGroupOffsetsWith('LATEST');
-      });
-      it('calls resetConsumerGroupOffsets with OFFSET', async () => {
-        await selectresetTypeAndPartitions('OFFSET');
-        fetchMock.postOnce(
-          `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
-          200,
-          {
-            body: {
-              topic: '__amazon_msk_canary',
-              resetType: 'OFFSET',
-              partitions: [0],
-              partitionsOffsets: [{ partition: 0, offset: 10 }],
-            },
-          }
-        );
-
-        await userEvent.click(screen.getAllByLabelText('Partition #0')[1]);
-        await userEvent.keyboard('10');
-        await userEvent.click(screen.getByText('Submit'));
-        await resetConsumerGroupOffsetsMockCalled();
-      });
-
-      // focus doesn't work for datepicker
-      it.skip('calls resetConsumerGroupOffsets with TIMESTAMP', async () => {
-        await selectresetTypeAndPartitions('TIMESTAMP');
-        const resetConsumerGroupOffsetsMock = fetchMock.postOnce(
-          `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`,
-          200,
-          {
-            body: {
-              topic: '__amazon_msk_canary',
-              resetType: 'OFFSET',
-              partitions: [0],
-              partitionsOffsets: [{ partition: 0, offset: 10 }],
-            },
-          }
-        );
-        await userEvent.click(screen.getByText('Submit'));
-        await waitFor(() =>
-          expect(
-            screen.getByText("This field shouldn't be empty!")
-          ).toBeInTheDocument()
-        );
-
-        await waitFor(() =>
-          expect(
-            resetConsumerGroupOffsetsMock.called(
-              `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`
-            )
-          ).toBeFalsy()
-        );
-      });
-    });
-  });
-});

+ 1 - 1
kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/__test__/TopicContents.spec.tsx

@@ -2,9 +2,9 @@ import React from 'react';
 import { clusterConsumerGroupDetailsPath } from 'lib/paths';
 import { screen } from '@testing-library/react';
 import TopicContents from 'components/ConsumerGroups/Details/TopicContents/TopicContents';
-import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
 import { render, WithRoute } from 'lib/testHelpers';
 import { ConsumerGroupTopicPartition } from 'generated-sources';
+import { consumerGroupPayload } from 'lib/fixtures/consumerGroups';
 
 const clusterName = 'cluster1';
 

+ 0 - 114
kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx

@@ -1,114 +0,0 @@
-import Details from 'components/ConsumerGroups/Details/Details';
-import React from 'react';
-import fetchMock from 'fetch-mock';
-import { render, WithRoute } from 'lib/testHelpers';
-import {
-  clusterConsumerGroupDetailsPath,
-  clusterConsumerGroupResetRelativePath,
-} from 'lib/paths';
-import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
-import {
-  screen,
-  waitFor,
-  waitForElementToBeRemoved,
-} from '@testing-library/dom';
-import userEvent from '@testing-library/user-event';
-
-const clusterName = 'cluster1';
-const { groupId } = consumerGroupPayload;
-
-const mockNavigate = jest.fn();
-jest.mock('react-router-dom', () => ({
-  ...jest.requireActual('react-router-dom'),
-  useNavigate: () => mockNavigate,
-}));
-
-const renderComponent = () => {
-  render(
-    <WithRoute path={clusterConsumerGroupDetailsPath()}>
-      <Details />
-    </WithRoute>,
-    { initialEntries: [clusterConsumerGroupDetailsPath(clusterName, groupId)] }
-  );
-};
-describe('Details component', () => {
-  afterEach(() => {
-    fetchMock.reset();
-    mockNavigate.mockClear();
-  });
-
-  describe('when consumer groups are NOT fetched', () => {
-    it('renders progress bar for initial state', () => {
-      fetchMock.getOnce(
-        `/api/clusters/${clusterName}/consumer-groups/${groupId}`,
-        404
-      );
-      renderComponent();
-      expect(screen.getByRole('progressbar')).toBeInTheDocument();
-    });
-  });
-
-  describe('when consumer gruops are fetched', () => {
-    beforeEach(async () => {
-      const fetchConsumerGroupMock = fetchMock.getOnce(
-        `/api/clusters/${clusterName}/consumer-groups/${groupId}`,
-        consumerGroupPayload
-      );
-      renderComponent();
-      await waitForElementToBeRemoved(() => screen.getByRole('progressbar'));
-      await waitFor(() => expect(fetchConsumerGroupMock.called()).toBeTruthy());
-    });
-
-    it('renders component', () => {
-      expect(screen.getByRole('heading')).toBeInTheDocument();
-      expect(screen.getByText(groupId)).toBeInTheDocument();
-
-      expect(screen.getByRole('table')).toBeInTheDocument();
-      expect(screen.getAllByRole('columnheader').length).toEqual(2);
-
-      expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
-    });
-
-    it('handles [Reset offset] click', async () => {
-      await userEvent.click(screen.getByText('Reset offset'));
-      expect(mockNavigate).toHaveBeenLastCalledWith(
-        clusterConsumerGroupResetRelativePath
-      );
-    });
-
-    it('renders search input', async () => {
-      expect(
-        screen.getByPlaceholderText('Search by Topic Name')
-      ).toBeInTheDocument();
-    });
-
-    it('shows confirmation modal on consumer group delete', async () => {
-      expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
-      await userEvent.click(screen.getByText('Delete consumer group'));
-      await waitFor(() =>
-        expect(screen.queryByRole('dialog')).toBeInTheDocument()
-      );
-      await userEvent.click(screen.getByText('Cancel'));
-      expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
-    });
-
-    it('handles [Delete consumer group] click', async () => {
-      expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
-
-      await userEvent.click(screen.getByText('Delete consumer group'));
-
-      expect(screen.queryByRole('dialog')).toBeInTheDocument();
-      const deleteConsumerGroupMock = fetchMock.deleteOnce(
-        `/api/clusters/${clusterName}/consumer-groups/${groupId}`,
-        200
-      );
-      await waitFor(() => {
-        userEvent.click(screen.getByRole('button', { name: 'Confirm' }));
-      });
-      expect(deleteConsumerGroupMock.called()).toBeTruthy();
-
-      await waitForElementToBeRemoved(() => screen.queryByRole('dialog'));
-      await waitFor(() => expect(mockNavigate).toHaveBeenLastCalledWith('../'));
-    });
-  });
-});

+ 0 - 48
kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx

@@ -1,48 +0,0 @@
-import React from 'react';
-import { clusterConsumerGroupDetailsPath } from 'lib/paths';
-import { screen } from '@testing-library/react';
-import userEvent from '@testing-library/user-event';
-import ListItem from 'components/ConsumerGroups/Details/ListItem';
-import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures';
-import { render, WithRoute } from 'lib/testHelpers';
-import { ConsumerGroupTopicPartition } from 'generated-sources';
-
-const clusterName = 'cluster1';
-
-const renderComponent = (consumers: ConsumerGroupTopicPartition[] = []) =>
-  render(
-    <WithRoute path={clusterConsumerGroupDetailsPath()}>
-      <table>
-        <tbody>
-          <ListItem
-            clusterName={clusterName}
-            name={clusterName}
-            consumers={consumers}
-          />
-        </tbody>
-      </table>
-    </WithRoute>,
-    {
-      initialEntries: [
-        clusterConsumerGroupDetailsPath(
-          clusterName,
-          consumerGroupPayload.groupId
-        ),
-      ],
-    }
-  );
-
-describe('ListItem', () => {
-  beforeEach(() => renderComponent(consumerGroupPayload.partitions));
-
-  it('should renders list item with topic content closed and check if element exists', () => {
-    expect(screen.getByRole('row')).toBeInTheDocument();
-  });
-
-  it('should renders list item with topic content open', async () => {
-    await userEvent.click(
-      screen.getByRole('cell', { name: 'cluster1' }).children[0].children[0]
-    );
-    expect(screen.getByText('Consumer ID')).toBeInTheDocument();
-  });
-});

+ 20 - 27
kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx → kafka-ui-react-app/src/components/ConsumerGroups/List.tsx

@@ -7,41 +7,29 @@ import {
   ConsumerGroupOrdering,
   SortOrder,
 } from 'generated-sources';
-import { useAppDispatch } from 'lib/hooks/redux';
 import useAppParams from 'lib/hooks/useAppParams';
 import { clusterConsumerGroupDetailsPath, ClusterNameRoute } from 'lib/paths';
-import { fetchConsumerGroupsPaged } from 'redux/reducers/consumerGroups/consumerGroupsSlice';
 import { ColumnDef } from '@tanstack/react-table';
 import Table, { TagCell, LinkCell } from 'components/common/NewTable';
 import { useNavigate, useSearchParams } from 'react-router-dom';
 import { PER_PAGE } from 'lib/constants';
+import { useConsumerGroups } from 'lib/hooks/api/consumers';
 
-export interface Props {
-  consumerGroups: ConsumerGroupDetails[];
-  totalPages: number;
-}
-
-const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
-  const dispatch = useAppDispatch();
+const List = () => {
   const { clusterName } = useAppParams<ClusterNameRoute>();
   const [searchParams] = useSearchParams();
   const navigate = useNavigate();
 
-  React.useEffect(() => {
-    dispatch(
-      fetchConsumerGroupsPaged({
-        clusterName,
-        orderBy:
-          (searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined,
-        sortOrder:
-          (searchParams.get('sortDirection')?.toUpperCase() as SortOrder) ||
-          undefined,
-        page: Number(searchParams.get('page') || 1),
-        perPage: Number(searchParams.get('perPage') || PER_PAGE),
-        search: searchParams.get('q') || '',
-      })
-    );
-  }, [clusterName, dispatch, searchParams]);
+  const consumerGroups = useConsumerGroups({
+    clusterName,
+    orderBy: (searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined,
+    sortOrder:
+      (searchParams.get('sortDirection')?.toUpperCase() as SortOrder) ||
+      undefined,
+    page: Number(searchParams.get('page') || 1),
+    perPage: Number(searchParams.get('perPage') || PER_PAGE),
+    search: searchParams.get('q') || '',
+  });
 
   const columns = React.useMemo<ColumnDef<ConsumerGroupDetails>[]>(
     () => [
@@ -95,9 +83,13 @@ const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
       </ControlPanelWrapper>
       <Table
         columns={columns}
-        pageCount={totalPages}
-        data={consumerGroups}
-        emptyMessage="No active consumer groups found"
+        pageCount={consumerGroups.data?.pageCount || 0}
+        data={consumerGroups.data?.consumerGroups || []}
+        emptyMessage={
+          consumerGroups.isSuccess
+            ? 'No active consumer groups found'
+            : 'Loading...'
+        }
         serverSideProcessing
         enableSorting
         onRowClick={({ original }) =>
@@ -105,6 +97,7 @@ const List: React.FC<Props> = ({ consumerGroups, totalPages }) => {
             clusterConsumerGroupDetailsPath(clusterName, original.groupId)
           )
         }
+        disabled={consumerGroups.isFetching}
       />
     </>
   );

+ 0 - 16
kafka-ui-react-app/src/components/ConsumerGroups/List/ListContainer.tsx

@@ -1,16 +0,0 @@
-import { connect } from 'react-redux';
-import { RootState } from 'redux/interfaces';
-import {
-  getConsumerGroupsOrderBy,
-  getConsumerGroupsTotalPages,
-  selectAll,
-} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
-import List from 'components/ConsumerGroups/List/List';
-
-const mapStateToProps = (state: RootState) => ({
-  consumerGroups: selectAll(state),
-  orderBy: getConsumerGroupsOrderBy(state),
-  totalPages: getConsumerGroupsTotalPages(state),
-});
-
-export default connect(mapStateToProps)(List);

+ 0 - 60
kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx

@@ -1,60 +0,0 @@
-import React from 'react';
-import List, { Props } from 'components/ConsumerGroups/List/List';
-import { screen } from '@testing-library/react';
-import { render } from 'lib/testHelpers';
-import { consumerGroups as consumerGroupMock } from 'redux/reducers/consumerGroups/__test__/fixtures';
-import { clusterConsumerGroupDetailsPath } from 'lib/paths';
-import userEvent from '@testing-library/user-event';
-import ListContainer from 'components/ConsumerGroups/List/ListContainer';
-
-const mockedUsedNavigate = jest.fn();
-
-jest.mock('react-router-dom', () => ({
-  ...jest.requireActual('react-router-dom'),
-  useNavigate: () => mockedUsedNavigate,
-}));
-
-describe('ListContainer', () => {
-  it('renders correctly', () => {
-    render(<ListContainer />);
-    expect(screen.getByRole('table')).toBeInTheDocument();
-  });
-});
-
-describe('List', () => {
-  const renderComponent = (props: Partial<Props> = {}) => {
-    const { consumerGroups, totalPages } = props;
-    return render(
-      <List
-        consumerGroups={consumerGroups || []}
-        totalPages={totalPages || 1}
-      />
-    );
-  };
-
-  it('renders empty table', () => {
-    renderComponent();
-    expect(screen.getByRole('table')).toBeInTheDocument();
-    expect(
-      screen.getByText('No active consumer groups found')
-    ).toBeInTheDocument();
-  });
-
-  describe('consumerGroups are fetched', () => {
-    beforeEach(() => renderComponent({ consumerGroups: consumerGroupMock }));
-
-    it('renders all rows with consumers', () => {
-      expect(screen.getByText('groupId1')).toBeInTheDocument();
-      expect(screen.getByText('groupId2')).toBeInTheDocument();
-    });
-
-    it('handles onRowClick', async () => {
-      const row = screen.getByRole('row', { name: 'groupId1 0 1 1' });
-      expect(row).toBeInTheDocument();
-      await userEvent.click(row);
-      expect(mockedUsedNavigate).toHaveBeenCalledWith(
-        clusterConsumerGroupDetailsPath(':clusterName', 'groupId1')
-      );
-    });
-  });
-});

+ 2 - 4
kafka-ui-react-app/src/components/ConsumerGroups/__test__/ConsumerGroups.spec.tsx

@@ -11,9 +11,7 @@ import { render, WithRoute } from 'lib/testHelpers';
 
 const clusterName = 'cluster1';
 
-jest.mock('components/ConsumerGroups/List/ListContainer', () => () => (
-  <div>ListContainerMock</div>
-));
+jest.mock('components/ConsumerGroups/List', () => () => <div>ListPage</div>);
 jest.mock('components/ConsumerGroups/Details/Details', () => () => (
   <div>DetailsMock</div>
 ));
@@ -35,7 +33,7 @@ const renderComponent = (path?: string) =>
 describe('ConsumerGroups', () => {
   it('renders ListContainer', async () => {
     renderComponent();
-    expect(screen.getByText('ListContainerMock')).toBeInTheDocument();
+    expect(screen.getByText('ListPage')).toBeInTheDocument();
   });
   it('renders ResetOffsets', async () => {
     renderComponent(

+ 101 - 7
kafka-ui-react-app/src/components/KsqlDb/KsqlDb.tsx

@@ -1,15 +1,109 @@
 import React from 'react';
-import { Route, Routes } from 'react-router-dom';
-import { clusterKsqlDbQueryRelativePath } from 'lib/paths';
-import List from 'components/KsqlDb/List/List';
 import Query from 'components/KsqlDb/Query/Query';
+import useAppParams from 'lib/hooks/useAppParams';
+import * as Metrics from 'components/common/Metrics';
+import {
+  clusterKsqlDbQueryRelativePath,
+  clusterKsqlDbStreamsPath,
+  clusterKsqlDbStreamsRelativePath,
+  clusterKsqlDbTablesPath,
+  clusterKsqlDbTablesRelativePath,
+  ClusterNameRoute,
+} from 'lib/paths';
+import PageHeading from 'components/common/PageHeading/PageHeading';
+import { ActionButton } from 'components/common/ActionComponent';
+import Navbar from 'components/common/Navigation/Navbar.styled';
+import { Navigate, NavLink, Route, Routes } from 'react-router-dom';
+import { Action, ResourceType } from 'generated-sources';
+import { useKsqlkDb } from 'lib/hooks/api/ksqlDb';
+import 'ace-builds/src-noconflict/ace';
+
+import TableView from './TableView';
 
 const KsqlDb: React.FC = () => {
+  const { clusterName } = useAppParams<ClusterNameRoute>();
+
+  const [tables, streams] = useKsqlkDb(clusterName);
+
+  const isFetching = tables.isFetching || streams.isFetching;
+
   return (
-    <Routes>
-      <Route path="/*" element={<List />} />
-      <Route path={clusterKsqlDbQueryRelativePath} element={<Query />} />
-    </Routes>
+    <>
+      <PageHeading text="KSQL DB">
+        <ActionButton
+          to={clusterKsqlDbQueryRelativePath}
+          buttonType="primary"
+          buttonSize="M"
+          permission={{
+            resource: ResourceType.KSQL,
+            action: Action.EXECUTE,
+          }}
+        >
+          Execute KSQL Request
+        </ActionButton>
+      </PageHeading>
+      <Metrics.Wrapper>
+        <Metrics.Section>
+          <Metrics.Indicator
+            label="Tables"
+            title="Tables"
+            fetching={isFetching}
+          >
+            {tables.isSuccess ? tables.data.length : '-'}
+          </Metrics.Indicator>
+          <Metrics.Indicator
+            label="Streams"
+            title="Streams"
+            fetching={isFetching}
+          >
+            {streams.isSuccess ? streams.data.length : '-'}
+          </Metrics.Indicator>
+        </Metrics.Section>
+      </Metrics.Wrapper>
+      <div>
+        <Navbar role="navigation">
+          <NavLink
+            to={clusterKsqlDbTablesPath(clusterName)}
+            className={({ isActive }) => (isActive ? 'is-active' : '')}
+            end
+          >
+            Tables
+          </NavLink>
+          <NavLink
+            to={clusterKsqlDbStreamsPath(clusterName)}
+            className={({ isActive }) => (isActive ? 'is-active' : '')}
+            end
+          >
+            Streams
+          </NavLink>
+        </Navbar>
+        <Routes>
+          <Route
+            index
+            element={<Navigate to={clusterKsqlDbTablesRelativePath} />}
+          />
+          <Route
+            path={clusterKsqlDbTablesRelativePath}
+            element={
+              <TableView
+                fetching={tables.isFetching}
+                rows={tables.data || []}
+              />
+            }
+          />
+          <Route
+            path={clusterKsqlDbStreamsRelativePath}
+            element={
+              <TableView
+                fetching={streams.isFetching}
+                rows={streams.data || []}
+              />
+            }
+          />
+          <Route path={clusterKsqlDbQueryRelativePath} element={<Query />} />
+        </Routes>
+      </div>
+    </>
   );
 };
 

+ 0 - 58
kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/KsqlDbItem.tsx

@@ -1,58 +0,0 @@
-import React from 'react';
-import PageLoader from 'components/common/PageLoader/PageLoader';
-import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources';
-import { ksqlRowData } from 'components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData';
-import Table from 'components/common/NewTable';
-import { ColumnDef } from '@tanstack/react-table';
-
-export enum KsqlDbItemType {
-  Tables = 'tables',
-  Streams = 'streams',
-}
-
-interface RowsType {
-  tables: KsqlTableDescription[];
-  streams: KsqlStreamDescription[];
-}
-export interface KsqlDbItemProps {
-  type: KsqlDbItemType;
-  fetching: boolean;
-  rows: RowsType;
-}
-
-export interface KsqlTableState {
-  name: string;
-  topic: string;
-  keyFormat: string;
-  valueFormat: string;
-  isWindowed: string;
-}
-
-const KsqlDbItem: React.FC<KsqlDbItemProps> = ({ type, fetching, rows }) => {
-  const preparedRows = rows[type]?.map(ksqlRowData) || [];
-
-  const columns = React.useMemo<ColumnDef<KsqlTableState>[]>(
-    () => [
-      { header: 'Name', accessorKey: 'name' },
-      { header: 'Topic', accessorKey: 'topic' },
-      { header: 'Key Format', accessorKey: 'keyFormat' },
-      { header: 'Value Format', accessorKey: 'valueFormat' },
-      { header: 'Is Windowed', accessorKey: 'isWindowed' },
-    ],
-    []
-  );
-
-  if (fetching) {
-    return <PageLoader />;
-  }
-  return (
-    <Table
-      data={preparedRows}
-      columns={columns}
-      emptyMessage="No tables or streams found"
-      enableSorting={false}
-    />
-  );
-};
-
-export default KsqlDbItem;

+ 0 - 59
kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/__test__/KsqlDbItem.spec.tsx

@@ -1,59 +0,0 @@
-import React from 'react';
-import { render, WithRoute } from 'lib/testHelpers';
-import { clusterKsqlDbTablesPath } from 'lib/paths';
-import KsqlDbItem, {
-  KsqlDbItemProps,
-  KsqlDbItemType,
-} from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem';
-import { screen } from '@testing-library/dom';
-import { fetchKsqlDbTablesPayload } from 'redux/reducers/ksqlDb/__test__/fixtures';
-
-describe('KsqlDbItem', () => {
-  const tablesPathname = clusterKsqlDbTablesPath();
-  const renderComponent = (props: Partial<KsqlDbItemProps> = {}) => {
-    render(
-      <WithRoute path={tablesPathname}>
-        <KsqlDbItem
-          type={KsqlDbItemType.Tables}
-          fetching={false}
-          rows={{ tables: [], streams: [] }}
-          {...props}
-        />
-      </WithRoute>,
-      {
-        initialEntries: [clusterKsqlDbTablesPath()],
-      }
-    );
-  };
-
-  it('renders progressbar when fetching tables and streams', () => {
-    renderComponent({ fetching: true });
-    expect(screen.getByRole('progressbar')).toBeInTheDocument();
-  });
-
-  it('show no text if no data found', () => {
-    renderComponent({});
-    expect(screen.getByText('No tables or streams found')).toBeInTheDocument();
-  });
-
-  it('renders with tables', () => {
-    renderComponent({
-      rows: {
-        tables: fetchKsqlDbTablesPayload.tables,
-        streams: [],
-      },
-    });
-
-    expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10);
-  });
-  it('renders with streams', () => {
-    renderComponent({
-      type: KsqlDbItemType.Streams,
-      rows: {
-        tables: [],
-        streams: fetchKsqlDbTablesPayload.streams,
-      },
-    });
-    expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10);
-  });
-});

+ 0 - 12
kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData.ts

@@ -1,12 +0,0 @@
-import { KsqlDescription } from 'redux/interfaces/ksqlDb';
-import { KsqlTableState } from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem';
-
-export const ksqlRowData = (data: KsqlDescription): KsqlTableState => {
-  return {
-    name: data.name || '',
-    topic: data.topic || '',
-    keyFormat: data.keyFormat || '',
-    valueFormat: data.valueFormat || '',
-    isWindowed: 'isWindowed' in data ? String(data.isWindowed) : '-',
-  };
-};

+ 0 - 111
kafka-ui-react-app/src/components/KsqlDb/List/List.tsx

@@ -1,111 +0,0 @@
-import React, { FC } from 'react';
-import useAppParams from 'lib/hooks/useAppParams';
-import * as Metrics from 'components/common/Metrics';
-import { getKsqlDbTables } from 'redux/reducers/ksqlDb/selectors';
-import {
-  clusterKsqlDbQueryRelativePath,
-  clusterKsqlDbStreamsPath,
-  clusterKsqlDbStreamsRelativePath,
-  clusterKsqlDbTablesPath,
-  clusterKsqlDbTablesRelativePath,
-  ClusterNameRoute,
-} from 'lib/paths';
-import PageHeading from 'components/common/PageHeading/PageHeading';
-import { ActionButton } from 'components/common/ActionComponent';
-import Navbar from 'components/common/Navigation/Navbar.styled';
-import { Navigate, NavLink, Route, Routes } from 'react-router-dom';
-import { fetchKsqlDbTables } from 'redux/reducers/ksqlDb/ksqlDbSlice';
-import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
-import { Action, ResourceType } from 'generated-sources';
-
-import KsqlDbItem, { KsqlDbItemType } from './KsqlDbItem/KsqlDbItem';
-
-const List: FC = () => {
-  const { clusterName } = useAppParams<ClusterNameRoute>();
-  const dispatch = useAppDispatch();
-
-  const { rows, fetching, tablesCount, streamsCount } =
-    useAppSelector(getKsqlDbTables);
-
-  React.useEffect(() => {
-    dispatch(fetchKsqlDbTables(clusterName));
-  }, [clusterName, dispatch]);
-
-  return (
-    <>
-      <PageHeading text="KSQL DB">
-        <ActionButton
-          to={clusterKsqlDbQueryRelativePath}
-          buttonType="primary"
-          buttonSize="M"
-          permission={{
-            resource: ResourceType.KSQL,
-            action: Action.EXECUTE,
-          }}
-        >
-          Execute KSQL Request
-        </ActionButton>
-      </PageHeading>
-      <Metrics.Wrapper>
-        <Metrics.Section>
-          <Metrics.Indicator label="Tables" title="Tables" fetching={fetching}>
-            {tablesCount}
-          </Metrics.Indicator>
-          <Metrics.Indicator
-            label="Streams"
-            title="Streams"
-            fetching={fetching}
-          >
-            {streamsCount}
-          </Metrics.Indicator>
-        </Metrics.Section>
-      </Metrics.Wrapper>
-      <div>
-        <Navbar role="navigation">
-          <NavLink
-            to={clusterKsqlDbTablesPath(clusterName)}
-            className={({ isActive }) => (isActive ? 'is-active' : '')}
-            end
-          >
-            Tables
-          </NavLink>
-          <NavLink
-            to={clusterKsqlDbStreamsPath(clusterName)}
-            className={({ isActive }) => (isActive ? 'is-active' : '')}
-            end
-          >
-            Streams
-          </NavLink>
-        </Navbar>
-        <Routes>
-          <Route
-            index
-            element={<Navigate to={clusterKsqlDbTablesRelativePath} />}
-          />
-          <Route
-            path={clusterKsqlDbTablesRelativePath}
-            element={
-              <KsqlDbItem
-                type={KsqlDbItemType.Tables}
-                fetching={fetching}
-                rows={rows}
-              />
-            }
-          />
-          <Route
-            path={clusterKsqlDbStreamsRelativePath}
-            element={
-              <KsqlDbItem
-                type={KsqlDbItemType.Streams}
-                fetching={fetching}
-                rows={rows}
-              />
-            }
-          />
-        </Routes>
-      </div>
-    </>
-  );
-};
-
-export default List;

+ 0 - 22
kafka-ui-react-app/src/components/KsqlDb/List/__test__/List.spec.tsx

@@ -1,22 +0,0 @@
-import React from 'react';
-import List from 'components/KsqlDb/List/List';
-import { render } from 'lib/testHelpers';
-import fetchMock from 'fetch-mock';
-import { screen } from '@testing-library/dom';
-import { act } from '@testing-library/react';
-
-describe('KsqlDb List', () => {
-  const renderComponent = async () => {
-    await act(() => {
-      render(<List />);
-    });
-  };
-  afterEach(() => fetchMock.reset());
-  it('renders List component with Tables and Streams tabs', async () => {
-    await renderComponent();
-    const Tables = screen.getByTitle('Tables');
-    const Streams = screen.getByTitle('Streams');
-    expect(Tables).toBeInTheDocument();
-    expect(Streams).toBeInTheDocument();
-  });
-});

+ 0 - 9
kafka-ui-react-app/src/components/KsqlDb/Query/Query.styled.ts

@@ -1,9 +0,0 @@
-import PageLoader from 'components/common/PageLoader/PageLoader';
-import styled from 'styled-components';
-
-export const ContinuousLoader = styled(PageLoader)`
-  & > div {
-    transform: scale(0.5);
-    padding-top: 0;
-  }
-`;

+ 36 - 205
kafka-ui-react-app/src/components/KsqlDb/Query/Query.tsx

@@ -1,223 +1,54 @@
-import React, { useCallback, useEffect, FC, useState } from 'react';
+import React from 'react';
 import useAppParams from 'lib/hooks/useAppParams';
 import TableRenderer from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer';
+import { ClusterNameRoute } from 'lib/paths';
 import {
-  executeKsql,
-  resetExecutionResult,
-} from 'redux/reducers/ksqlDb/ksqlDbSlice';
-import { getKsqlExecution } from 'redux/reducers/ksqlDb/selectors';
-import { BASE_PARAMS } from 'lib/constants';
-import { KsqlResponse, KsqlTableResponse } from 'generated-sources';
-import { clusterKsqlDbPath, ClusterNameRoute } from 'lib/paths';
-import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
-import { showAlert, showSuccessAlert } from 'lib/errorHandling';
-import PageHeading from 'components/common/PageHeading/PageHeading';
+  useExecuteKsqlkDbQueryMutation,
+  useKsqlkDbSSE,
+} from 'lib/hooks/api/ksqlDb';
 
 import type { FormValues } from './QueryForm/QueryForm';
-import * as S from './Query.styled';
 import QueryForm from './QueryForm/QueryForm';
 
-export const getFormattedErrorFromTableData = (
-  responseValues: KsqlTableResponse['values']
-): { title: string; message: string } => {
-  // We expect someting like that
-  // [[
-  //   "@type",
-  //   "error_code",
-  //   "message",
-  //   "statementText"?,
-  //   "entities"?
-  // ]],
-  // or
-  // [["message"]]
-
-  if (!responseValues || !responseValues.length) {
-    return {
-      title: 'Unknown error',
-      message: 'Recieved empty response',
-    };
-  }
-
-  let title = '';
-  let message = '';
-  if (responseValues[0].length < 2) {
-    const [messageText] = responseValues[0];
-    title = messageText;
-  } else {
-    const [type, errorCode, messageText, statementText, entities] =
-      responseValues[0];
-    title = `[Error #${errorCode}] ${type}`;
-    message =
-      (entities?.length ? `[${entities.join(', ')}] ` : '') +
-      (statementText ? `"${statementText}" ` : '') +
-      messageText;
-  }
-
-  return {
-    title,
-    message,
-  };
-};
-
-const Query: FC = () => {
+const Query = () => {
   const { clusterName } = useAppParams<ClusterNameRoute>();
-
-  const sseRef = React.useRef<{ sse: EventSource | null; isOpen: boolean }>({
-    sse: null,
-    isOpen: false,
-  });
-  const [fetching, setFetching] = useState(false);
-  const dispatch = useAppDispatch();
-
-  const { executionResult } = useAppSelector(getKsqlExecution);
-  const [KSQLTable, setKSQLTable] = useState<KsqlTableResponse | null>(null);
-
-  const reset = useCallback(() => {
-    dispatch(resetExecutionResult());
-  }, [dispatch]);
-
-  useEffect(() => {
-    return reset;
-  }, [reset]);
-
-  const destroySSE = () => {
-    if (sseRef.current?.sse) {
-      sseRef.current.sse.close();
-      setFetching(false);
-      sseRef.current.sse = null;
-      sseRef.current.isOpen = false;
-    }
+  const executeQuery = useExecuteKsqlkDbQueryMutation();
+  const [pipeId, setPipeId] = React.useState<string | false>(false);
+
+  const sse = useKsqlkDbSSE({ clusterName, pipeId });
+
+  const isFetching = executeQuery.isLoading || sse.isFetching;
+
+  const submitHandler = async (values: FormValues) => {
+    const filtered = values.streamsProperties.filter(({ key }) => key != null);
+    const streamsProperties = filtered.reduce<Record<string, string>>(
+      (acc, current) => ({ ...acc, [current.key]: current.value }),
+      {}
+    );
+    await executeQuery.mutateAsync(
+      {
+        clusterName,
+        ksqlCommandV2: {
+          ...values,
+          streamsProperties:
+            values.streamsProperties[0].key !== ''
+              ? JSON.parse(JSON.stringify(streamsProperties))
+              : undefined,
+        },
+      },
+      { onSuccess: (data) => setPipeId(data.pipeId) }
+    );
   };
 
-  const handleSSECancel = useCallback(() => {
-    reset();
-    destroySSE();
-  }, [reset]);
-
-  const createSSE = useCallback(
-    (pipeId: string) => {
-      const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response?pipeId=${pipeId}`;
-      const sse = new EventSource(url);
-      sseRef.current.sse = sse;
-      setFetching(true);
-
-      sse.onopen = () => {
-        sseRef.current.isOpen = true;
-      };
-
-      sse.onmessage = ({ data }) => {
-        const { table }: KsqlResponse = JSON.parse(data);
-        if (table) {
-          switch (table?.header) {
-            case 'Execution error': {
-              const { title, message } = getFormattedErrorFromTableData(
-                table.values
-              );
-              const id = `${url}-executionError`;
-              showAlert('error', { id, title, message });
-              break;
-            }
-            case 'Schema': {
-              setKSQLTable(table);
-              break;
-            }
-            case 'Row': {
-              setKSQLTable((PrevKSQLTable) => {
-                return {
-                  header: PrevKSQLTable?.header,
-                  columnNames: PrevKSQLTable?.columnNames,
-                  values: [
-                    ...(PrevKSQLTable?.values || []),
-                    ...(table?.values || []),
-                  ],
-                };
-              });
-              break;
-            }
-            case 'Query Result': {
-              const id = `${url}-querySuccess`;
-              showSuccessAlert({ id, title: 'Query succeed', message: '' });
-              break;
-            }
-            case 'Source Description':
-            case 'properties':
-            default: {
-              setKSQLTable(table);
-              break;
-            }
-          }
-        }
-        return sse;
-      };
-
-      sse.onerror = () => {
-        // if it's open - we know that server responded without opening SSE
-        if (!sseRef.current.isOpen) {
-          showAlert('error', {
-            id: `${url}-connectionClosedError`,
-            title: '',
-            message: 'SSE connection closed',
-          });
-        }
-        destroySSE();
-      };
-    },
-    [clusterName, dispatch]
-  );
-
-  const submitHandler = useCallback(
-    (values: FormValues) => {
-      const filteredProperties = values.streamsProperties.filter(
-        (property) => property.key != null
-      );
-      const streamsProperties = filteredProperties.reduce(
-        (acc, current) => ({
-          ...acc,
-          [current.key as keyof string]: current.value,
-        }),
-        {} as { [key: string]: string }
-      );
-      setFetching(true);
-      dispatch(
-        executeKsql({
-          clusterName,
-          ksqlCommandV2: {
-            ...values,
-            streamsProperties:
-              values.streamsProperties[0].key !== ''
-                ? JSON.parse(JSON.stringify(streamsProperties))
-                : undefined,
-          },
-        })
-      );
-    },
-    [dispatch, clusterName]
-  );
-  useEffect(() => {
-    if (executionResult?.pipeId) {
-      createSSE(executionResult.pipeId);
-    }
-    return () => {
-      destroySSE();
-    };
-  }, [createSSE, executionResult]);
-
   return (
     <>
-      <PageHeading
-        text="Query"
-        backText="KSQL DB"
-        backTo={clusterKsqlDbPath(clusterName)}
-      />
       <QueryForm
-        fetching={fetching}
-        hasResults={!!KSQLTable}
-        handleClearResults={() => setKSQLTable(null)}
-        handleSSECancel={handleSSECancel}
+        fetching={isFetching}
+        hasResults={!!sse.data && !!pipeId}
+        resetResults={() => setPipeId(false)}
         submitHandler={submitHandler}
       />
-      {KSQLTable && <TableRenderer table={KSQLTable} />}
-      {fetching && <S.ContinuousLoader />}
+      {pipeId && !!sse.data && <TableRenderer table={sse.data} />}
     </>
   );
 };

+ 12 - 51
kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts

@@ -6,13 +6,12 @@ export const QueryWrapper = styled.div`
 `;
 
 export const KSQLInputsWrapper = styled.div`
-  width: 100%;
   display: flex;
   gap: 24px;
-
   padding-bottom: 16px;
-  & > div {
-    flex-grow: 1;
+
+  @media screen and (max-width: 769px) {
+    flex-direction: column;
   }
 `;
 
@@ -22,61 +21,23 @@ export const KSQLInputHeader = styled.div`
   color: ${({ theme }) => theme.default.color.normal};
 `;
 
-export const KSQLButtons = styled.div`
-  display: flex;
-  gap: 16px;
-`;
-
-export const StreamPropertiesContainer = styled.label`
-  display: flex;
-  flex-direction: column;
-  gap: 10px;
-  width: 50%;
-  color: ${({ theme }) => theme.default.color.normal};
-`;
-
 export const InputsContainer = styled.div`
-  overflow: hidden;
-  width: 100%;
-  display: flex;
-  justify-content: center;
+  display: grid;
+  grid-template-columns: 1fr 1fr 30px;
+  align-items: center;
   gap: 10px;
 `;
 
-export const StreamPropertiesInputWrapper = styled.div`
-  & {
-    width: 100%;
-  }
-  & > input {
-    width: 100%;
-    height: 40px;
-    border: 1px solid grey;
-    &:focus {
-      outline: none;
-      border-color: ${({ theme }) => theme.input.borderColor.focus};
-      &::placeholder {
-        color: transparent;
-      }
-    }
-    border-radius: 4px;
-    font-size: 16px;
-    padding-left: 15px;
-    background-color: ${({ theme }) => theme.input.backgroundColor.normal};
-    color: ${({ theme }) => theme.input.color.normal};
-  }
-`;
-
-export const DeleteButtonWrapper = styled.div`
-  min-height: 32px;
+export const Fieldset = styled.fieldset`
   display: flex;
+  flex: 1;
   flex-direction: column;
-  align-items: center;
-  justify-self: flex-start;
-  margin-top: 10px;
+  gap: 8px;
 `;
 
-export const Fieldset = styled.fieldset`
-  width: 50%;
+export const ButtonsContainer = styled.div`
+  display: flex;
+  gap: 8px;
 `;
 
 export const SQLEditor = styled(BaseSQLEditor)(

+ 139 - 153
kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.tsx

@@ -1,22 +1,27 @@
-import React, { useCallback, useRef } from 'react';
+import React from 'react';
 import { FormError } from 'components/common/Input/Input.styled';
 import { ErrorMessage } from '@hookform/error-message';
-import { useForm, Controller, useFieldArray } from 'react-hook-form';
+import {
+  useForm,
+  Controller,
+  useFieldArray,
+  FormProvider,
+} from 'react-hook-form';
 import { Button } from 'components/common/Button/Button';
 import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
 import CloseIcon from 'components/common/Icons/CloseIcon';
 import { yupResolver } from '@hookform/resolvers/yup';
 import yup from 'lib/yupExtended';
 import PlusIcon from 'components/common/Icons/PlusIcon';
-import ReactAce from 'react-ace/lib/ace';
+import ReactAce from 'react-ace';
+import Input from 'components/common/Input/Input';
 
 import * as S from './QueryForm.styled';
 
-export interface Props {
+interface QueryFormProps {
   fetching: boolean;
   hasResults: boolean;
-  handleClearResults: () => void;
-  handleSSECancel: () => void;
+  resetResults: () => void;
   submitHandler: (values: FormValues) => void;
 }
 type StreamsPropertiesType = {
@@ -37,20 +42,13 @@ const validationSchema = yup.object({
   streamsProperties: yup.array().of(streamsPropertiesSchema),
 });
 
-const QueryForm: React.FC<Props> = ({
+const QueryForm: React.FC<QueryFormProps> = ({
   fetching,
   hasResults,
-  handleClearResults,
-  handleSSECancel,
   submitHandler,
+  resetResults,
 }) => {
-  const {
-    handleSubmit,
-    setValue,
-    getValues,
-    control,
-    formState: { errors },
-  } = useForm<FormValues>({
+  const methods = useForm<FormValues>({
     mode: 'onTouched',
     resolver: yupResolver(validationSchema),
     defaultValues: {
@@ -58,7 +56,16 @@ const QueryForm: React.FC<Props> = ({
       streamsProperties: [{ key: '', value: '' }],
     },
   });
-  const { fields, append, remove } = useFieldArray<
+
+  const {
+    handleSubmit,
+    setValue,
+    control,
+    watch,
+    formState: { errors, isDirty },
+  } = methods;
+
+  const { fields, append, remove, update } = useFieldArray<
     FormValues,
     'streamsProperties'
   >({
@@ -66,17 +73,24 @@ const QueryForm: React.FC<Props> = ({
     name: 'streamsProperties',
   });
 
-  const handleAddNewProperty = useCallback(() => {
-    if (
-      getValues().streamsProperties.every((prop) => {
-        return prop.key;
-      })
-    ) {
-      append({ key: '', value: '' });
+  const watchStreamProps = watch('streamsProperties');
+
+  const appendProperty = () => {
+    append({ key: '', value: '' });
+  };
+  const removeProperty = (index: number) => () => {
+    if (fields.length === 1) {
+      update(index, { key: '', value: '' });
+      return;
     }
-  }, []);
 
-  const inputRef = useRef<ReactAce>(null);
+    remove(index);
+  };
+
+  const isAppendDisabled =
+    fetching || !!watchStreamProps.find((field) => !field.key);
+
+  const inputRef = React.useRef<ReactAce>(null);
 
   const handleFocus = () => {
     // eslint-disable-next-line @typescript-eslint/no-explicit-any
@@ -87,145 +101,117 @@ const QueryForm: React.FC<Props> = ({
     }
   };
 
+  const handleClear = () => {
+    handleFocus();
+    resetResults();
+  };
+
   return (
-    <S.QueryWrapper>
-      <form onSubmit={handleSubmit(submitHandler)}>
-        <S.KSQLInputsWrapper>
-          <S.Fieldset aria-labelledby="ksqlLabel">
-            <S.KSQLInputHeader>
-              <label id="ksqlLabel">KSQL</label>
-              <Button
-                onClick={() => setValue('ksql', '')}
-                buttonType="primary"
-                buttonSize="S"
-                isInverted
-              >
-                Clear
-              </Button>
-            </S.KSQLInputHeader>
-            <Controller
-              control={control}
-              name="ksql"
-              render={({ field }) => (
-                <S.SQLEditor
-                  {...field}
-                  commands={[
-                    {
-                      // commands is array of key bindings.
-                      // name for the key binding.
-                      name: 'commandName',
-                      // key combination used for the command.
-                      bindKey: { win: 'Ctrl-Enter', mac: 'Command-Enter' },
-                      // function to execute when keys are pressed.
-                      exec: () => {
-                        handleSubmit(submitHandler)();
+    <FormProvider {...methods}>
+      <S.QueryWrapper>
+        <form onSubmit={handleSubmit(submitHandler)}>
+          <S.KSQLInputsWrapper>
+            <S.Fieldset>
+              <S.KSQLInputHeader>
+                <label id="ksqlLabel">KSQL</label>
+                <Button
+                  onClick={() => setValue('ksql', '')}
+                  buttonType="primary"
+                  buttonSize="S"
+                  isInverted
+                >
+                  Clear
+                </Button>
+              </S.KSQLInputHeader>
+              <Controller
+                control={control}
+                name="ksql"
+                render={({ field }) => (
+                  <S.SQLEditor
+                    {...field}
+                    commands={[
+                      {
+                        // commands is array of key bindings.
+                        // name for the key binding.
+                        name: 'commandName',
+                        // key combination used for the command.
+                        bindKey: { win: 'Ctrl-Enter', mac: 'Command-Enter' },
+                        // function to execute when keys are pressed.
+                        exec: () => {
+                          handleSubmit(submitHandler)();
+                        },
                       },
-                    },
-                  ]}
-                  readOnly={fetching}
-                  ref={inputRef}
-                />
-              )}
-            />
-            <FormError>
-              <ErrorMessage errors={errors} name="ksql" />
-            </FormError>
-          </S.Fieldset>
-
-          <S.StreamPropertiesContainer>
-            Stream properties:
-            {fields.map((item, index) => (
-              <S.InputsContainer key={item.id}>
-                <S.StreamPropertiesInputWrapper>
-                  <Controller
-                    control={control}
+                    ]}
+                    readOnly={fetching}
+                    ref={inputRef}
+                  />
+                )}
+              />
+              <FormError>
+                <ErrorMessage errors={errors} name="ksql" />
+              </FormError>
+            </S.Fieldset>
+
+            <S.Fieldset>
+              Stream properties:
+              {fields.map((field, index) => (
+                <S.InputsContainer key={field.id}>
+                  <Input
                     name={`streamsProperties.${index}.key`}
-                    render={({ field }) => (
-                      <input
-                        {...field}
-                        placeholder="Key"
-                        aria-label="key"
-                        type="text"
-                        autoComplete="off"
-                      />
-                    )}
+                    placeholder="Key"
+                    type="text"
+                    autoComplete="off"
+                    withError
                   />
-                  <FormError>
-                    <ErrorMessage
-                      errors={errors}
-                      name={`streamsProperties.${index}.key`}
-                    />
-                  </FormError>
-                </S.StreamPropertiesInputWrapper>
-                <S.StreamPropertiesInputWrapper>
-                  <Controller
-                    control={control}
+                  <Input
                     name={`streamsProperties.${index}.value`}
-                    render={({ field }) => (
-                      <input
-                        {...field}
-                        placeholder="Value"
-                        aria-label="value"
-                        type="text"
-                        autoComplete="off"
-                      />
-                    )}
+                    placeholder="Value"
+                    type="text"
+                    autoComplete="off"
+                    withError
                   />
-                  <FormError>
-                    <ErrorMessage
-                      errors={errors}
-                      name={`streamsProperties.${index}.value`}
-                    />
-                  </FormError>
-                </S.StreamPropertiesInputWrapper>
-
-                <S.DeleteButtonWrapper onClick={() => remove(index)}>
-                  <IconButtonWrapper aria-label="deleteProperty">
+                  <IconButtonWrapper
+                    aria-label="deleteProperty"
+                    onClick={removeProperty(index)}
+                  >
                     <CloseIcon aria-hidden />
                   </IconButtonWrapper>
-                </S.DeleteButtonWrapper>
-              </S.InputsContainer>
-            ))}
+                </S.InputsContainer>
+              ))}
+              <Button
+                type="button"
+                buttonSize="M"
+                buttonType="secondary"
+                disabled={isAppendDisabled}
+                onClick={appendProperty}
+              >
+                <PlusIcon />
+                Add Stream Property
+              </Button>
+            </S.Fieldset>
+          </S.KSQLInputsWrapper>
+          <S.ButtonsContainer>
             <Button
-              type="button"
-              buttonSize="M"
               buttonType="secondary"
-              onClick={handleAddNewProperty}
+              buttonSize="M"
+              disabled={fetching || !isDirty || !hasResults}
+              onClick={handleClear}
+            >
+              Clear results
+            </Button>
+            <Button
+              buttonType="primary"
+              buttonSize="M"
+              type="submit"
+              disabled={fetching}
+              onClick={handleFocus}
             >
-              <PlusIcon />
-              Add Stream Property
+              Execute
             </Button>
-          </S.StreamPropertiesContainer>
-        </S.KSQLInputsWrapper>
-        <S.KSQLButtons>
-          <Button
-            buttonType="primary"
-            buttonSize="M"
-            type="submit"
-            disabled={fetching}
-            onClick={handleFocus}
-          >
-            Execute
-          </Button>
-          <Button
-            buttonType="secondary"
-            buttonSize="M"
-            disabled={!fetching}
-            onClick={handleSSECancel}
-          >
-            Stop query
-          </Button>
-          <Button
-            buttonType="secondary"
-            buttonSize="M"
-            disabled={fetching || !hasResults}
-            onClick={handleClearResults}
-          >
-            Clear results
-          </Button>
-        </S.KSQLButtons>
-      </form>
-    </S.QueryWrapper>
+          </S.ButtonsContainer>
+        </form>
+      </S.QueryWrapper>
+    </FormProvider>
   );
 };
 

+ 0 - 189
kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/__test__/QueryForm.spec.tsx

@@ -1,189 +0,0 @@
-import { render } from 'lib/testHelpers';
-import React from 'react';
-import QueryForm, { Props } from 'components/KsqlDb/Query/QueryForm/QueryForm';
-import { screen, waitFor, within } from '@testing-library/dom';
-import userEvent from '@testing-library/user-event';
-
-const renderComponent = (props: Props) => render(<QueryForm {...props} />);
-
-describe('QueryForm', () => {
-  it('renders', () => {
-    renderComponent({
-      fetching: false,
-      hasResults: false,
-      handleClearResults: jest.fn(),
-      handleSSECancel: jest.fn(),
-      submitHandler: jest.fn(),
-    });
-
-    const KSQLBlock = screen.getByLabelText('KSQL');
-    expect(KSQLBlock).toBeInTheDocument();
-    expect(within(KSQLBlock).getByText('KSQL')).toBeInTheDocument();
-    expect(
-      within(KSQLBlock).getByRole('button', { name: 'Clear' })
-    ).toBeInTheDocument();
-    // Represents SQL editor
-    expect(within(KSQLBlock).getByRole('textbox')).toBeInTheDocument();
-
-    const streamPropertiesBlock = screen.getByRole('textbox', { name: 'key' });
-    expect(streamPropertiesBlock).toBeInTheDocument();
-    expect(screen.getByText('Stream properties:')).toBeInTheDocument();
-    expect(screen.getByRole('button', { name: 'Clear' })).toBeInTheDocument();
-    expect(screen.queryAllByRole('textbox')[0]).toBeInTheDocument();
-
-    // Form controls
-    expect(screen.getByRole('button', { name: 'Execute' })).toBeInTheDocument();
-    expect(screen.getByRole('button', { name: 'Execute' })).toBeEnabled();
-    expect(
-      screen.getByRole('button', { name: 'Stop query' })
-    ).toBeInTheDocument();
-    expect(screen.getByRole('button', { name: 'Stop query' })).toBeDisabled();
-    expect(
-      screen.getByRole('button', { name: 'Clear results' })
-    ).toBeInTheDocument();
-    expect(
-      screen.getByRole('button', { name: 'Clear results' })
-    ).toBeDisabled();
-  });
-
-  it('renders error with empty input', async () => {
-    const submitFn = jest.fn();
-    renderComponent({
-      fetching: false,
-      hasResults: false,
-      handleClearResults: jest.fn(),
-      handleSSECancel: jest.fn(),
-      submitHandler: submitFn,
-    });
-
-    await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
-
-    await waitFor(() => {
-      expect(screen.getByText('ksql is a required field')).toBeInTheDocument();
-      expect(submitFn).not.toBeCalled();
-    });
-  });
-
-  it('submits with correct inputs', async () => {
-    const submitFn = jest.fn();
-    renderComponent({
-      fetching: false,
-      hasResults: false,
-      handleClearResults: jest.fn(),
-      handleSSECancel: jest.fn(),
-      submitHandler: submitFn,
-    });
-
-    const textbox = screen.getAllByRole('textbox');
-    textbox[0].focus();
-    await userEvent.paste('show tables;');
-    const key = screen.getByRole('textbox', { name: 'key' });
-    key.focus();
-    await userEvent.paste('test');
-    const value = screen.getByRole('textbox', { name: 'value' });
-    value.focus();
-    await userEvent.paste('test');
-    await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
-
-    expect(
-      screen.queryByText('ksql is a required field')
-    ).not.toBeInTheDocument();
-
-    expect(
-      screen.queryByText('streamsProperties is not JSON object')
-    ).not.toBeInTheDocument();
-
-    expect(submitFn).toBeCalled();
-  });
-
-  it('clear results is enabled when has results', async () => {
-    const clearFn = jest.fn();
-    renderComponent({
-      fetching: false,
-      hasResults: true,
-      handleClearResults: clearFn,
-      handleSSECancel: jest.fn(),
-      submitHandler: jest.fn(),
-    });
-
-    expect(screen.getByRole('button', { name: 'Clear results' })).toBeEnabled();
-
-    await userEvent.click(
-      screen.getByRole('button', { name: 'Clear results' })
-    );
-
-    expect(clearFn).toBeCalled();
-  });
-
-  it('stop query query is enabled when is fetching', async () => {
-    const cancelFn = jest.fn();
-    renderComponent({
-      fetching: true,
-      hasResults: false,
-      handleClearResults: jest.fn(),
-      handleSSECancel: cancelFn,
-      submitHandler: jest.fn(),
-    });
-
-    expect(screen.getByRole('button', { name: 'Stop query' })).toBeEnabled();
-
-    await userEvent.click(screen.getByRole('button', { name: 'Stop query' }));
-
-    expect(cancelFn).toBeCalled();
-  });
-
-  it('add new property', async () => {
-    renderComponent({
-      fetching: false,
-      hasResults: false,
-      handleClearResults: jest.fn(),
-      handleSSECancel: jest.fn(),
-      submitHandler: jest.fn(),
-    });
-
-    const textbox = screen.getByLabelText('key');
-    await userEvent.type(textbox, 'prop_name');
-    await userEvent.click(
-      screen.getByRole('button', { name: 'Add Stream Property' })
-    );
-    expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(2);
-  });
-
-  it("doesn't add new property", async () => {
-    renderComponent({
-      fetching: false,
-      hasResults: false,
-      handleClearResults: jest.fn(),
-      handleSSECancel: jest.fn(),
-      submitHandler: jest.fn(),
-    });
-
-    await userEvent.click(
-      screen.getByRole('button', { name: 'Add Stream Property' })
-    );
-    expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(1);
-  });
-
-  it('delete stream property', async () => {
-    await renderComponent({
-      fetching: false,
-      hasResults: false,
-      handleClearResults: jest.fn(),
-      handleSSECancel: jest.fn(),
-      submitHandler: jest.fn(),
-    });
-    const textBoxes = screen.getAllByRole('textbox', { name: 'key' });
-    textBoxes[0].focus();
-    await userEvent.paste('test');
-    await userEvent.click(
-      screen.getByRole('button', { name: 'Add Stream Property' })
-    );
-    await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]);
-
-    await screen.getByRole('button', { name: 'Add Stream Property' });
-
-    await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]);
-
-    expect(textBoxes.length).toEqual(1);
-  });
-});

+ 0 - 116
kafka-ui-react-app/src/components/KsqlDb/Query/__test__/Query.spec.tsx

@@ -1,116 +0,0 @@
-import { render, EventSourceMock, WithRoute } from 'lib/testHelpers';
-import React from 'react';
-import Query, {
-  getFormattedErrorFromTableData,
-} from 'components/KsqlDb/Query/Query';
-import { screen } from '@testing-library/dom';
-import fetchMock from 'fetch-mock';
-import { clusterKsqlDbQueryPath } from 'lib/paths';
-import userEvent from '@testing-library/user-event';
-
-const clusterName = 'testLocal';
-const renderComponent = () =>
-  render(
-    <WithRoute path={clusterKsqlDbQueryPath()}>
-      <Query />
-    </WithRoute>,
-    {
-      initialEntries: [clusterKsqlDbQueryPath(clusterName)],
-    }
-  );
-
-describe('Query', () => {
-  it('renders', () => {
-    renderComponent();
-
-    expect(screen.getByLabelText('KSQL')).toBeInTheDocument();
-    expect(screen.getByLabelText('Stream properties:')).toBeInTheDocument();
-  });
-
-  afterEach(() => fetchMock.reset());
-  it('fetch on execute', async () => {
-    renderComponent();
-
-    const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, {
-      pipeId: 'testPipeID',
-    });
-
-    Object.defineProperty(window, 'EventSource', {
-      value: EventSourceMock,
-    });
-    const inputs = screen.getAllByRole('textbox');
-    const textAreaElement = inputs[0] as HTMLTextAreaElement;
-
-    textAreaElement.focus();
-    await userEvent.paste('show tables;');
-    await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
-
-    expect(mock.calls().length).toBe(1);
-  });
-
-  it('fetch on execute with streamParams', async () => {
-    renderComponent();
-
-    const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, {
-      pipeId: 'testPipeID',
-    });
-
-    Object.defineProperty(window, 'EventSource', {
-      value: EventSourceMock,
-    });
-
-    const inputs = screen.getAllByRole('textbox');
-    const textAreaElement = inputs[0] as HTMLTextAreaElement;
-    textAreaElement.focus();
-    await userEvent.paste('show tables;');
-
-    const key = screen.getByLabelText('key');
-    key.focus();
-    await userEvent.paste('key');
-    const value = screen.getByLabelText('value');
-    value.focus();
-    await userEvent.paste('value');
-
-    await userEvent.click(screen.getByRole('button', { name: 'Execute' }));
-
-    expect(mock.calls().length).toBe(1);
-  });
-});
-
-describe('getFormattedErrorFromTableData', () => {
-  it('works', () => {
-    expect(getFormattedErrorFromTableData([['Test Error']])).toStrictEqual({
-      title: 'Test Error',
-      message: '',
-    });
-
-    expect(
-      getFormattedErrorFromTableData([
-        ['some_type', 'errorCode', 'messageText'],
-      ])
-    ).toStrictEqual({
-      title: '[Error #errorCode] some_type',
-      message: 'messageText',
-    });
-
-    expect(
-      getFormattedErrorFromTableData([
-        [
-          'some_type',
-          'errorCode',
-          'messageText',
-          'statementText',
-          ['test1', 'test2'],
-        ],
-      ])
-    ).toStrictEqual({
-      title: '[Error #errorCode] some_type',
-      message: '[test1, test2] "statementText" messageText',
-    });
-
-    expect(getFormattedErrorFromTableData([])).toStrictEqual({
-      title: 'Unknown error',
-      message: 'Recieved empty response',
-    });
-  });
-});

+ 6 - 12
kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/TableRenderer.tsx

@@ -6,13 +6,11 @@ import { TableTitle } from 'components/common/table/TableTitle/TableTitle.styled
 
 import * as S from './TableRenderer.styled';
 
-export interface Props {
+interface TableRendererProps {
   table: KsqlTableResponse;
 }
 
-export function hasJsonStructure(
-  str: string | Record<string, unknown>
-): boolean {
+function hasJsonStructure(str: string | Record<string, unknown>): boolean {
   if (typeof str === 'object') {
     return true;
   }
@@ -30,13 +28,7 @@ export function hasJsonStructure(
   return false;
 }
 
-const TableRenderer: React.FC<Props> = ({ table }) => {
-  const heading = React.useMemo(() => {
-    return table.header || '';
-  }, [table.header]);
-  const ths = React.useMemo(() => {
-    return table.columnNames || [];
-  }, [table.columnNames]);
+const TableRenderer: React.FC<TableRendererProps> = ({ table }) => {
   const rows = React.useMemo(() => {
     return (table.values || []).map((row) => {
       return {
@@ -53,9 +45,11 @@ const TableRenderer: React.FC<Props> = ({ table }) => {
     });
   }, [table.values]);
 
+  const ths = table.columnNames || [];
+
   return (
     <S.Wrapper>
-      <TableTitle>{heading}</TableTitle>
+      <TableTitle>{table.header}</TableTitle>
       <S.ScrollableTable>
         <thead>
           <tr>

+ 0 - 71
kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/__test__/TableRenderer.spec.tsx

@@ -1,71 +0,0 @@
-import { render } from 'lib/testHelpers';
-import React from 'react';
-import TableRenderer, {
-  Props,
-  hasJsonStructure,
-} from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer';
-import { screen } from '@testing-library/dom';
-
-const renderComponent = (props: Props) => render(<TableRenderer {...props} />);
-
-describe('TableRenderer', () => {
-  it('renders', () => {
-    renderComponent({
-      table: {
-        header: 'Test header',
-        columnNames: ['Test column name'],
-        values: [['Table row #1'], ['Table row #2'], ['{"jsonrow": "#3"}']],
-      },
-    });
-
-    expect(
-      screen.getByRole('heading', { name: 'Test header' })
-    ).toBeInTheDocument();
-    expect(
-      screen.getByRole('columnheader', { name: 'Test column name' })
-    ).toBeInTheDocument();
-    expect(
-      screen.getByRole('cell', { name: 'Table row #1' })
-    ).toBeInTheDocument();
-    expect(
-      screen.getByRole('cell', { name: 'Table row #2' })
-    ).toBeInTheDocument();
-  });
-
-  it('renders with empty arrays', () => {
-    renderComponent({
-      table: {},
-    });
-
-    expect(screen.getByText('No tables or streams found')).toBeInTheDocument();
-  });
-});
-
-describe('hasJsonStructure', () => {
-  it('works', () => {
-    expect(hasJsonStructure('simplestring')).toBeFalsy();
-    expect(
-      hasJsonStructure("{'looksLikeJson': 'but has wrong quotes'}")
-    ).toBeFalsy();
-    expect(
-      hasJsonStructure('{"json": "but doesnt have closing brackets"')
-    ).toBeFalsy();
-    expect(hasJsonStructure('"string":"that looks like json"')).toBeFalsy();
-
-    expect(hasJsonStructure('1')).toBeFalsy();
-    expect(hasJsonStructure('{1:}')).toBeFalsy();
-    expect(hasJsonStructure('{1:"1"}')).toBeFalsy();
-
-    // @ts-expect-error We suppress error because this function works with unknown data from server
-    expect(hasJsonStructure(1)).toBeFalsy();
-
-    expect(hasJsonStructure('{}')).toBeTruthy();
-    expect(hasJsonStructure('{"correct": "json"}')).toBeTruthy();
-
-    expect(hasJsonStructure('[]')).toBeTruthy();
-    expect(hasJsonStructure('[{}]')).toBeTruthy();
-
-    expect(hasJsonStructure({})).toBeTruthy();
-    expect(hasJsonStructure({ correct: 'json' })).toBeTruthy();
-  });
-});

+ 39 - 0
kafka-ui-react-app/src/components/KsqlDb/TableView.tsx

@@ -0,0 +1,39 @@
+import React from 'react';
+import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources';
+import Table from 'components/common/NewTable';
+import { ColumnDef } from '@tanstack/react-table';
+
+interface TableViewProps {
+  fetching: boolean;
+  rows: KsqlTableDescription[] | KsqlStreamDescription[];
+}
+
+const TableView: React.FC<TableViewProps> = ({ fetching, rows }) => {
+  const columns = React.useMemo<
+    ColumnDef<KsqlTableDescription | KsqlStreamDescription>[]
+  >(
+    () => [
+      { header: 'Name', accessorKey: 'name' },
+      { header: 'Topic', accessorKey: 'topic' },
+      { header: 'Key Format', accessorKey: 'keyFormat' },
+      { header: 'Value Format', accessorKey: 'valueFormat' },
+      {
+        header: 'Is Windowed',
+        accessorKey: 'isWindowed',
+        cell: ({ row }) =>
+          'isWindowed' in row.original ? String(row.original.isWindowed) : '-',
+      },
+    ],
+    []
+  );
+  return (
+    <Table
+      data={rows || []}
+      columns={columns}
+      emptyMessage={fetching ? 'Loading...' : 'No rows found'}
+      enableSorting={false}
+    />
+  );
+};
+
+export default TableView;

+ 0 - 42
kafka-ui-react-app/src/components/KsqlDb/__test__/KsqlDb.spec.tsx

@@ -1,42 +0,0 @@
-import React from 'react';
-import KsqlDb from 'components/KsqlDb/KsqlDb';
-import { render, WithRoute } from 'lib/testHelpers';
-import { screen } from '@testing-library/dom';
-import {
-  clusterKsqlDbPath,
-  clusterKsqlDbQueryPath,
-  getNonExactPath,
-} from 'lib/paths';
-
-const KSqLComponentText = {
-  list: 'list',
-  query: 'query',
-};
-
-jest.mock('components/KsqlDb/List/List', () => () => (
-  <div>{KSqLComponentText.list}</div>
-));
-jest.mock('components/KsqlDb/Query/Query', () => () => (
-  <div>{KSqLComponentText.query}</div>
-));
-
-describe('KsqlDb Component', () => {
-  const clusterName = 'clusterName';
-  const renderComponent = (path: string) =>
-    render(
-      <WithRoute path={getNonExactPath(clusterKsqlDbPath())}>
-        <KsqlDb />
-      </WithRoute>,
-      { initialEntries: [path] }
-    );
-
-  it('Renders the List', () => {
-    renderComponent(clusterKsqlDbPath(clusterName));
-    expect(screen.getByText(KSqLComponentText.list)).toBeInTheDocument();
-  });
-
-  it('Renders the List', () => {
-    renderComponent(clusterKsqlDbQueryPath(clusterName));
-    expect(screen.getByText(KSqLComponentText.query)).toBeInTheDocument();
-  });
-});

+ 0 - 6
kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts

@@ -12,12 +12,6 @@ export const versionPayload = [
 ];
 export const versionEmptyPayload = [];
 
-export const versions = [
-  schemaVersion1,
-  schemaVersion2,
-  schemaVersionWithNonAsciiChars,
-];
-
 export const jsonSchema: SchemaSubject = {
   subject: 'test',
   version: '15',

+ 1 - 1
kafka-ui-react-app/src/components/common/ActionComponent/__tests__/fixtures.ts

@@ -14,7 +14,7 @@ export const invalidPermission = {
   action: Action.DELETE,
 };
 
-export const roles = [
+const roles = [
   {
     ...validPermission,
     actions: [validPermission.action],

+ 1 - 0
kafka-ui-react-app/src/components/common/DiffViewer/DiffViewer.tsx

@@ -1,4 +1,5 @@
 import { diff as DiffEditor } from 'react-ace';
+import 'ace-builds/src-noconflict/ace';
 import 'ace-builds/src-noconflict/mode-json5';
 import 'ace-builds/src-noconflict/mode-protobuf';
 import 'ace-builds/src-noconflict/theme-textmate';

+ 1 - 3
kafka-ui-react-app/src/components/common/Editor/Editor.tsx

@@ -1,11 +1,9 @@
-/* eslint-disable react/jsx-props-no-spreading */
 import AceEditor, { IAceEditorProps } from 'react-ace';
 import 'ace-builds/src-noconflict/mode-json5';
 import 'ace-builds/src-noconflict/mode-protobuf';
 import 'ace-builds/src-noconflict/theme-tomorrow';
 import { SchemaType } from 'generated-sources';
 import React from 'react';
-import ReactAce from 'react-ace/lib/ace';
 import styled from 'styled-components';
 
 interface EditorProps extends IAceEditorProps {
@@ -13,7 +11,7 @@ interface EditorProps extends IAceEditorProps {
   schemaType?: string;
 }
 
-const Editor = React.forwardRef<ReactAce | null, EditorProps>((props, ref) => {
+const Editor = React.forwardRef<AceEditor | null, EditorProps>((props, ref) => {
   const { isFixedHeight, schemaType, ...rest } = props;
   return (
     <AceEditor

+ 10 - 3
kafka-ui-react-app/src/components/common/NewTable/Table.styled.ts

@@ -225,6 +225,13 @@ export const Ellipsis = styled.div`
   display: block;
 `;
 
-export const TableWrapper = styled.div`
-  overflow-x: auto;
-`;
+export const TableWrapper = styled.div<{ $disabled: boolean }>(
+  ({ $disabled }) => css`
+    overflow-x: auto;
+    ${$disabled &&
+    css`
+      pointer-events: none;
+      opacity: 0.5;
+    `}
+  `
+);

+ 4 - 1
kafka-ui-react-app/src/components/common/NewTable/Table.tsx

@@ -48,6 +48,8 @@ export interface TableProps<TData> {
   // Placeholder for empty table
   emptyMessage?: React.ReactNode;
 
+  disabled?: boolean;
+
   // Handles row click. Can not be combined with `enableRowSelection` && expandable rows.
   onRowClick?: (row: Row<TData>) => void;
 }
@@ -123,6 +125,7 @@ const Table: React.FC<TableProps<any>> = ({
   enableRowSelection = false,
   batchActionsBar: BatchActionsBar,
   emptyMessage,
+  disabled,
   onRowClick,
 }) => {
   const [searchParams, setSearchParams] = useSearchParams();
@@ -200,7 +203,7 @@ const Table: React.FC<TableProps<any>> = ({
           />
         </S.TableActionsBar>
       )}
-      <S.TableWrapper>
+      <S.TableWrapper $disabled={!!disabled}>
         <S.Table>
           <thead>
             {table.getHeaderGroups().map((headerGroup) => (

+ 0 - 11
kafka-ui-react-app/src/components/common/NewTable/TimestampCell copy.tsx

@@ -1,11 +0,0 @@
-import { CellContext } from '@tanstack/react-table';
-import React from 'react';
-
-import * as S from './Table.styled';
-
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-const TruncatedTextCell: React.FC<CellContext<any, unknown>> = ({
-  getValue,
-}) => <S.Ellipsis>{getValue<string>()}</S.Ellipsis>;
-
-export default TruncatedTextCell;

+ 2 - 2
kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx

@@ -1,15 +1,15 @@
 /* eslint-disable react/jsx-props-no-spreading */
 import AceEditor, { IAceEditorProps } from 'react-ace';
+import 'ace-builds/src-noconflict/ace';
 import 'ace-builds/src-noconflict/mode-sql';
 import 'ace-builds/src-noconflict/theme-textmate';
 import React from 'react';
-import ReactAce from 'react-ace/lib/ace';
 
 interface SQLEditorProps extends IAceEditorProps {
   isFixedHeight?: boolean;
 }
 
-const SQLEditor = React.forwardRef<ReactAce | null, SQLEditorProps>(
+const SQLEditor = React.forwardRef<AceEditor | null, SQLEditorProps>(
   (props, ref) => {
     const { isFixedHeight, ...rest } = props;
     return (

+ 1 - 0
kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx

@@ -45,6 +45,7 @@ const ControlledSelect: React.FC<ControlledSelectProps> = ({
               options={options}
               placeholder={placeholder}
               disabled={disabled}
+              ref={field.ref}
             />
           );
         }}

+ 86 - 77
kafka-ui-react-app/src/components/common/Select/Select.tsx

@@ -27,90 +27,99 @@ export interface SelectOption {
   isLive?: boolean;
 }
 
-const Select: React.FC<SelectProps> = ({
-  options = [],
-  value,
-  defaultValue,
-  selectSize = 'L',
-  placeholder = '',
-  isLive,
-  disabled = false,
-  onChange,
-  isThemeMode,
-  ...props
-}) => {
-  const [selectedOption, setSelectedOption] = useState(value);
-  const [showOptions, setShowOptions] = useState(false);
+const Select = React.forwardRef<HTMLUListElement, SelectProps>(
+  (
+    {
+      options = [],
+      value,
+      defaultValue,
+      selectSize = 'L',
+      placeholder = '',
+      isLive,
+      disabled = false,
+      onChange,
+      isThemeMode,
+      ...props
+    },
+    ref
+  ) => {
+    const [selectedOption, setSelectedOption] = useState(value);
+    const [showOptions, setShowOptions] = useState(false);
 
-  const showOptionsHandler = () => {
-    if (!disabled) setShowOptions(!showOptions);
-  };
+    const showOptionsHandler = () => {
+      if (!disabled) setShowOptions(!showOptions);
+    };
 
-  const selectContainerRef = useRef(null);
-  const clickOutsideHandler = () => setShowOptions(false);
-  useClickOutside(selectContainerRef, clickOutsideHandler);
+    const selectContainerRef = useRef(null);
+    const clickOutsideHandler = () => setShowOptions(false);
+    useClickOutside(selectContainerRef, clickOutsideHandler);
 
-  const updateSelectedOption = (option: SelectOption) => {
-    if (!option.disabled) {
-      setSelectedOption(option.value);
+    const updateSelectedOption = (option: SelectOption) => {
+      if (!option.disabled) {
+        setSelectedOption(option.value);
 
-      if (onChange) {
-        onChange(option.value);
+        if (onChange) {
+          onChange(option.value);
+        }
+
+        setShowOptions(false);
       }
+    };
 
-      setShowOptions(false);
-    }
-  };
+    React.useEffect(() => {
+      setSelectedOption(value);
+    }, [isLive, value]);
 
-  React.useEffect(() => {
-    setSelectedOption(value);
-  }, [isLive, value]);
+    return (
+      <div ref={selectContainerRef}>
+        <S.Select
+          role="listbox"
+          selectSize={selectSize}
+          isLive={isLive}
+          disabled={disabled}
+          onClick={showOptionsHandler}
+          onKeyDown={showOptionsHandler}
+          isThemeMode={isThemeMode}
+          ref={ref}
+          tabIndex={0}
+          {...props}
+        >
+          <S.SelectedOptionWrapper>
+            {isLive && <LiveIcon />}
+            <S.SelectedOption
+              role="option"
+              tabIndex={0}
+              isThemeMode={isThemeMode}
+            >
+              {options.find(
+                (option) => option.value === (defaultValue || selectedOption)
+              )?.label || placeholder}
+            </S.SelectedOption>
+          </S.SelectedOptionWrapper>
+          {showOptions && (
+            <S.OptionList>
+              {options?.map((option) => (
+                <S.Option
+                  value={option.value}
+                  key={option.value}
+                  disabled={option.disabled}
+                  onClick={() => updateSelectedOption(option)}
+                  tabIndex={0}
+                  role="option"
+                >
+                  {option.isLive && <LiveIcon />}
+                  {option.label}
+                </S.Option>
+              ))}
+            </S.OptionList>
+          )}
+          <DropdownArrowIcon isOpen={showOptions} />
+        </S.Select>
+      </div>
+    );
+  }
+);
 
-  return (
-    <div ref={selectContainerRef}>
-      <S.Select
-        role="listbox"
-        selectSize={selectSize}
-        isLive={isLive}
-        disabled={disabled}
-        onClick={showOptionsHandler}
-        onKeyDown={showOptionsHandler}
-        isThemeMode={isThemeMode}
-        {...props}
-      >
-        <S.SelectedOptionWrapper>
-          {isLive && <LiveIcon />}
-          <S.SelectedOption
-            role="option"
-            tabIndex={0}
-            isThemeMode={isThemeMode}
-          >
-            {options.find(
-              (option) => option.value === (defaultValue || selectedOption)
-            )?.label || placeholder}
-          </S.SelectedOption>
-        </S.SelectedOptionWrapper>
-        {showOptions && (
-          <S.OptionList>
-            {options?.map((option) => (
-              <S.Option
-                value={option.value}
-                key={option.value}
-                disabled={option.disabled}
-                onClick={() => updateSelectedOption(option)}
-                tabIndex={0}
-                role="option"
-              >
-                {option.isLive && <LiveIcon />}
-                {option.label}
-              </S.Option>
-            ))}
-          </S.OptionList>
-        )}
-        <DropdownArrowIcon isOpen={showOptions} />
-      </S.Select>
-    </div>
-  );
-};
+Select.displayName = 'Select';
 
 export default Select;

+ 2 - 2
kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx

@@ -8,13 +8,13 @@ import {
 
 import * as S from './Tooltip.styled';
 
-export interface PropsTypes {
+interface TooltipProps {
   value: React.ReactNode;
   content: string;
   placement?: Placement;
 }
 
-const Tooltip: React.FC<PropsTypes> = ({ value, content, placement }) => {
+const Tooltip: React.FC<TooltipProps> = ({ value, content, placement }) => {
   const [open, setOpen] = useState(false);
   const { x, y, refs, strategy, context } = useFloating({
     open,

+ 1 - 1
kafka-ui-react-app/src/components/common/table/TableTitle/TableTitle.styled.tsx

@@ -3,5 +3,5 @@ import Heading from 'components/common/heading/Heading.styled';
 import styled from 'styled-components';
 
 export const TableTitle = styled((props) => <Heading level={3} {...props} />)`
-  padding: 16px;
+  padding: 16px 16px 0;
 `;

+ 0 - 25
kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/fixtures.ts → kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts

@@ -1,30 +1,5 @@
 import { ConsumerGroupState } from 'generated-sources';
 
-export const consumerGroups = [
-  {
-    groupId: 'groupId1',
-    members: 0,
-    topics: 1,
-    simple: false,
-    partitionAssignor: '',
-    coordinator: {
-      id: 1,
-      host: 'host',
-    },
-  },
-  {
-    groupId: 'groupId2',
-    members: 0,
-    topics: 1,
-    simple: false,
-    partitionAssignor: '',
-    coordinator: {
-      id: 1,
-      host: 'host',
-    },
-  },
-];
-
 export const consumerGroupPayload = {
   groupId: 'amazon.msk.canary.group.broker-1',
   members: 0,

+ 1 - 1
kafka-ui-react-app/src/lib/hooks/__tests__/fixtures.ts

@@ -4,7 +4,7 @@ import { modifyRolesData } from 'lib/permissions';
 export const clusterName1 = 'local';
 export const clusterName2 = 'dev';
 
-export const userPermissionsMock = [
+const userPermissionsMock = [
   {
     clusters: [clusterName1],
     resource: ResourceType.TOPIC,

+ 92 - 0
kafka-ui-react-app/src/lib/hooks/api/consumers.ts

@@ -0,0 +1,92 @@
+import { consumerGroupsApiClient as api } from 'lib/api';
+import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
+import { ClusterName } from 'redux/interfaces';
+import {
+  ConsumerGroup,
+  ConsumerGroupOffsetsReset,
+  ConsumerGroupOrdering,
+  SortOrder,
+} from 'generated-sources';
+import { showSuccessAlert } from 'lib/errorHandling';
+
+export type ConsumerGroupID = ConsumerGroup['groupId'];
+
+type UseConsumerGroupsProps = {
+  clusterName: ClusterName;
+  orderBy?: ConsumerGroupOrdering;
+  sortOrder?: SortOrder;
+  page?: number;
+  perPage?: number;
+  search: string;
+};
+
+type UseConsumerGroupDetailsProps = {
+  clusterName: ClusterName;
+  consumerGroupID: ConsumerGroupID;
+};
+
+export function useConsumerGroups(props: UseConsumerGroupsProps) {
+  const { clusterName, ...rest } = props;
+  return useQuery(
+    ['clusters', clusterName, 'consumerGroups', rest],
+    () => api.getConsumerGroupsPage(props),
+    { suspense: false, keepPreviousData: true }
+  );
+}
+
+export function useConsumerGroupDetails(props: UseConsumerGroupDetailsProps) {
+  const { clusterName, consumerGroupID } = props;
+  return useQuery(
+    ['clusters', clusterName, 'consumerGroups', consumerGroupID],
+    () => api.getConsumerGroup({ clusterName, id: consumerGroupID })
+  );
+}
+
+export const useDeleteConsumerGroupMutation = ({
+  clusterName,
+  consumerGroupID,
+}: UseConsumerGroupDetailsProps) => {
+  const queryClient = useQueryClient();
+  return useMutation(
+    () => api.deleteConsumerGroup({ clusterName, id: consumerGroupID }),
+    {
+      onSuccess: () => {
+        showSuccessAlert({
+          message: `Consumer ${consumerGroupID} group deleted`,
+        });
+        queryClient.invalidateQueries([
+          'clusters',
+          clusterName,
+          'consumerGroups',
+        ]);
+      },
+    }
+  );
+};
+
+export const useResetConsumerGroupOffsetsMutation = ({
+  clusterName,
+  consumerGroupID,
+}: UseConsumerGroupDetailsProps) => {
+  const queryClient = useQueryClient();
+  return useMutation(
+    (props: ConsumerGroupOffsetsReset) =>
+      api.resetConsumerGroupOffsets({
+        clusterName,
+        id: consumerGroupID,
+        consumerGroupOffsetsReset: props,
+      }),
+    {
+      onSuccess: () => {
+        showSuccessAlert({
+          message: `Consumer ${consumerGroupID} group offsets reset`,
+        });
+        queryClient.invalidateQueries([
+          'clusters',
+          clusterName,
+          'consumerGroups',
+        ]);
+      },
+    }
+  );
+};

+ 1 - 1
kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts

@@ -109,7 +109,7 @@ export function useUpdateConnectorConfig(props: UseConnectorProps) {
     }
   );
 }
-export function useCreateConnectorMutation(clusterName: ClusterName) {
+function useCreateConnectorMutation(clusterName: ClusterName) {
   const client = useQueryClient();
   return useMutation(
     (props: CreateConnectorProps) =>

+ 184 - 0
kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx

@@ -0,0 +1,184 @@
+import { ksqlDbApiClient as api } from 'lib/api';
+import { useMutation, useQueries } from '@tanstack/react-query';
+import { ClusterName } from 'redux/interfaces';
+import { BASE_PARAMS } from 'lib/constants';
+import React from 'react';
+import { fetchEventSource } from '@microsoft/fetch-event-source';
+import {
+  showAlert,
+  showServerError,
+  showSuccessAlert,
+} from 'lib/errorHandling';
+import {
+  ExecuteKsqlRequest,
+  KsqlResponse,
+  KsqlTableResponse,
+} from 'generated-sources';
+import { StopLoading } from 'components/Topics/Topic/Messages/Messages.styled';
+import toast from 'react-hot-toast';
+
+export function useKsqlkDb(clusterName: ClusterName) {
+  return useQueries({
+    queries: [
+      {
+        queryKey: ['clusters', clusterName, 'ksqlDb', 'tables'],
+        queryFn: () => api.listTables({ clusterName }),
+        suspense: false,
+      },
+      {
+        queryKey: ['clusters', clusterName, 'ksqlDb', 'streams'],
+        queryFn: () => api.listStreams({ clusterName }),
+        suspense: false,
+      },
+    ],
+  });
+}
+
+export function useExecuteKsqlkDbQueryMutation() {
+  return useMutation((props: ExecuteKsqlRequest) => api.executeKsql(props));
+}
+
+const getFormattedErrorFromTableData = (
+  responseValues: KsqlTableResponse['values']
+): { title: string; message: string } => {
+  // We expect someting like that
+  // [[
+  //   "@type",
+  //   "error_code",
+  //   "message",
+  //   "statementText"?,
+  //   "entities"?
+  // ]],
+  // or
+  // [["message"]]
+
+  if (!responseValues || !responseValues.length) {
+    return {
+      title: 'Unknown error',
+      message: 'Recieved empty response',
+    };
+  }
+
+  let title = '';
+  let message = '';
+  if (responseValues[0].length < 2) {
+    const [messageText] = responseValues[0];
+    title = messageText;
+  } else {
+    const [type, errorCode, messageText, statementText, entities] =
+      responseValues[0];
+    title = `[Error #${errorCode}] ${type}`;
+    message =
+      (entities?.length ? `[${entities.join(', ')}] ` : '') +
+      (statementText ? `"${statementText}" ` : '') +
+      messageText;
+  }
+
+  return { title, message };
+};
+
+type UseKsqlkDbSSEProps = {
+  pipeId: string | false;
+  clusterName: ClusterName;
+};
+
+export const useKsqlkDbSSE = ({ clusterName, pipeId }: UseKsqlkDbSSEProps) => {
+  const [data, setData] = React.useState<KsqlTableResponse>();
+  const [isFetching, setIsFetching] = React.useState<boolean>(false);
+
+  const abortController = new AbortController();
+
+  React.useEffect(() => {
+    const fetchData = async () => {
+      const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response`;
+      await fetchEventSource(
+        `${url}?${new URLSearchParams({ pipeId: pipeId || '' }).toString()}`,
+        {
+          method: 'GET',
+          signal: abortController.signal,
+          openWhenHidden: true,
+          async onopen(response) {
+            const { ok, status } = response;
+            if (ok) setData(undefined); // Reset
+            if (status >= 400 && status < 500 && status !== 429) {
+              showServerError(response);
+            }
+          },
+          onmessage(event) {
+            const { table }: KsqlResponse = JSON.parse(event.data);
+            if (!table) {
+              return;
+            }
+            switch (table?.header) {
+              case 'Execution error': {
+                showAlert('error', {
+                  ...getFormattedErrorFromTableData(table.values),
+                  id: `${url}-executionError`,
+                });
+                break;
+              }
+              case 'Schema':
+                setData(table);
+                break;
+              case 'Row':
+                setData((state) => ({
+                  header: state?.header,
+                  columnNames: state?.columnNames,
+                  values: [...(state?.values || []), ...(table?.values || [])],
+                }));
+                break;
+              case 'Query Result':
+                showSuccessAlert({
+                  id: `${url}-querySuccess`,
+                  title: 'Query succeed',
+                  message: '',
+                });
+                break;
+              case 'Source Description':
+              case 'properties':
+              default:
+                setData(table);
+                break;
+            }
+          },
+          onclose() {
+            setIsFetching(false);
+          },
+          onerror(err) {
+            setIsFetching(false);
+            showServerError(err);
+          },
+        }
+      );
+    };
+
+    const abortFetchData = () => {
+      setIsFetching(false);
+      if (pipeId) abortController.abort();
+    };
+    if (pipeId) {
+      toast.promise(
+        fetchData(),
+        {
+          loading: (
+            <>
+              <div>Consuming query execution result...</div>
+              &nbsp;
+              <StopLoading onClick={abortFetchData}>Abort</StopLoading>
+            </>
+          ),
+          success: 'Cancelled',
+          error: 'Something went wrong. Please try again.',
+        },
+        {
+          id: 'messages',
+          success: { duration: 20 },
+        }
+      );
+    }
+
+    return abortFetchData;
+  }, [pipeId]);
+
+  return { data, isFetching };
+};

+ 1 - 1
kafka-ui-react-app/src/lib/hooks/useMessageFiltersStore.ts

@@ -2,7 +2,7 @@ import { LOCAL_STORAGE_KEY_PREFIX } from 'lib/constants';
 import create from 'zustand';
 import { persist } from 'zustand/middleware';
 
-export interface AdvancedFilter {
+interface AdvancedFilter {
   name: string;
   value: string;
 }

+ 4 - 8
kafka-ui-react-app/src/lib/paths.ts

@@ -1,12 +1,8 @@
 import { Broker, Connect, Connector } from 'generated-sources';
-import {
-  ClusterName,
-  ConsumerGroupID,
-  SchemaName,
-  TopicName,
-} from 'redux/interfaces';
+import { ClusterName, SchemaName, TopicName } from 'redux/interfaces';
 
 import { GIT_REPO_LINK } from './constants';
+import { ConsumerGroupID } from './hooks/api/consumers';
 
 export const gitCommitPath = (commit: string) =>
   `${GIT_REPO_LINK}/commit/${commit}`;
@@ -204,7 +200,7 @@ export const clusterConnectorsRelativePath = 'connectors';
 export const clusterConnectorNewRelativePath = 'create-new';
 export const clusterConnectConnectorsRelativePath = `${RouteParams.connectName}/connectors`;
 export const clusterConnectConnectorRelativePath = `${clusterConnectConnectorsRelativePath}/${RouteParams.connectorName}`;
-export const clusterConnectConnectorTasksRelativePath = 'tasks';
+const clusterConnectConnectorTasksRelativePath = 'tasks';
 export const clusterConnectConnectorConfigRelativePath = 'config';
 
 export const clusterConnectsPath = (
@@ -287,5 +283,5 @@ export const clusterConfigPath = (
   clusterName: ClusterName = RouteParams.clusterName
 ) => `${clusterPath(clusterName)}/${clusterConfigRelativePath}`;
 
-export const clusterNewConfigRelativePath = 'create-new-cluster';
+const clusterNewConfigRelativePath = 'create-new-cluster';
 export const clusterNewConfigPath = `/ui/clusters/${clusterNewConfigRelativePath}`;

+ 2 - 1
kafka-ui-react-app/src/lib/yupExtended.ts

@@ -41,7 +41,8 @@ const isJsonObject = () => {
 };
 
 /**
- * due to yup rerunning all the object validiation during any render, it makes sense to cache the async results
+ * due to yup rerunning all the object validiation during any render,
+ * it makes sense to cache the async results
  * */
 export function cacheTest(
   asyncValidate: (val?: string, ctx?: yup.AnyObject) => Promise<boolean>

+ 1 - 2
kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts

@@ -5,10 +5,9 @@ import {
 
 import { ClusterName } from './cluster';
 
-export type ConsumerGroupID = ConsumerGroup['groupId'];
 export interface ConsumerGroupResetOffsetRequestParams {
   clusterName: ClusterName;
-  consumerGroupID: ConsumerGroupID;
+  consumerGroupID: ConsumerGroup['groupId'];
   requestBody: {
     topic: string;
     resetType: ConsumerGroupOffsetsResetType;

+ 0 - 19
kafka-ui-react-app/src/redux/interfaces/ksqlDb.ts

@@ -1,19 +0,0 @@
-import {
-  KsqlCommandV2Response,
-  KsqlStreamDescription,
-  KsqlTableDescription,
-} from 'generated-sources';
-
-export interface KsqlState {
-  tables: KsqlTableDescription[];
-  streams: KsqlStreamDescription[];
-  executionResult: KsqlCommandV2Response | null;
-}
-
-export interface KsqlDescription {
-  name?: string;
-  topic?: string;
-  keyFormat?: string;
-  valueFormat?: string;
-  isWindowed?: boolean;
-}

+ 0 - 49
kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/consumerGroupSlice.spec.ts

@@ -1,49 +0,0 @@
-import { store } from 'redux/store';
-import {
-  sortBy,
-  getConsumerGroupsOrderBy,
-  getConsumerGroupsSortOrder,
-  getAreConsumerGroupsPagedFulfilled,
-  fetchConsumerGroupsPaged,
-  selectAll,
-} from 'redux/reducers/consumerGroups/consumerGroupsSlice';
-import { ConsumerGroupOrdering, SortOrder } from 'generated-sources';
-import { consumerGroups } from 'redux/reducers/consumerGroups/__test__/fixtures';
-
-describe('Consumer Groups Slice', () => {
-  describe('Actions', () => {
-    it('should test the sortBy actions', () => {
-      expect(store.getState().consumerGroups.sortOrder).toBe(SortOrder.ASC);
-
-      store.dispatch(sortBy(ConsumerGroupOrdering.STATE));
-      expect(getConsumerGroupsOrderBy(store.getState())).toBe(
-        ConsumerGroupOrdering.STATE
-      );
-      expect(getConsumerGroupsSortOrder(store.getState())).toBe(SortOrder.DESC);
-      store.dispatch(sortBy(ConsumerGroupOrdering.STATE));
-      expect(getConsumerGroupsSortOrder(store.getState())).toBe(SortOrder.ASC);
-    });
-  });
-
-  describe('Thunk Actions', () => {
-    it('should check the fetchConsumerPaged ', () => {
-      store.dispatch({
-        type: fetchConsumerGroupsPaged.fulfilled.type,
-        payload: {
-          consumerGroups,
-        },
-      });
-
-      expect(getAreConsumerGroupsPagedFulfilled(store.getState())).toBeTruthy();
-      expect(selectAll(store.getState())).toEqual(consumerGroups);
-
-      store.dispatch({
-        type: fetchConsumerGroupsPaged.fulfilled.type,
-        payload: {
-          consumerGroups: null,
-        },
-      });
-      expect(selectAll(store.getState())).toEqual([]);
-    });
-  });
-});

+ 0 - 223
kafka-ui-react-app/src/redux/reducers/consumerGroups/consumerGroupsSlice.ts

@@ -1,223 +0,0 @@
-import {
-  createAsyncThunk,
-  createEntityAdapter,
-  createSlice,
-  createSelector,
-  PayloadAction,
-} from '@reduxjs/toolkit';
-import {
-  ConsumerGroupDetails,
-  ConsumerGroupOrdering,
-  ConsumerGroupsPageResponse,
-  SortOrder,
-} from 'generated-sources';
-import { AsyncRequestStatus } from 'lib/constants';
-import {
-  getResponse,
-  showServerError,
-  showSuccessAlert,
-} from 'lib/errorHandling';
-import {
-  ClusterName,
-  ConsumerGroupID,
-  ConsumerGroupResetOffsetRequestParams,
-  RootState,
-} from 'redux/interfaces';
-import { createFetchingSelector } from 'redux/reducers/loader/selectors';
-import { EntityState } from '@reduxjs/toolkit/src/entities/models';
-import { consumerGroupsApiClient } from 'lib/api';
-
-export const fetchConsumerGroupsPaged = createAsyncThunk<
-  ConsumerGroupsPageResponse,
-  {
-    clusterName: ClusterName;
-    orderBy?: ConsumerGroupOrdering;
-    sortOrder?: SortOrder;
-    page?: number;
-    perPage?: number;
-    search: string;
-  }
->(
-  'consumerGroups/fetchConsumerGroupsPaged',
-  async (
-    { clusterName, orderBy, sortOrder, page, perPage, search },
-    { rejectWithValue }
-  ) => {
-    try {
-      return await consumerGroupsApiClient.getConsumerGroupsPage({
-        clusterName,
-        orderBy,
-        sortOrder,
-        page,
-        perPage,
-        search,
-      });
-    } catch (error) {
-      showServerError(error as Response);
-      return rejectWithValue(await getResponse(error as Response));
-    }
-  }
-);
-
-export const fetchConsumerGroupDetails = createAsyncThunk<
-  ConsumerGroupDetails,
-  { clusterName: ClusterName; consumerGroupID: ConsumerGroupID }
->(
-  'consumerGroups/fetchConsumerGroupDetails',
-  async ({ clusterName, consumerGroupID }, { rejectWithValue }) => {
-    try {
-      return await consumerGroupsApiClient.getConsumerGroup({
-        clusterName,
-        id: consumerGroupID,
-      });
-    } catch (error) {
-      showServerError(error as Response);
-      return rejectWithValue(await getResponse(error as Response));
-    }
-  }
-);
-
-export const deleteConsumerGroup = createAsyncThunk<
-  ConsumerGroupID,
-  { clusterName: ClusterName; consumerGroupID: ConsumerGroupID }
->(
-  'consumerGroups/deleteConsumerGroup',
-  async ({ clusterName, consumerGroupID }, { rejectWithValue }) => {
-    try {
-      await consumerGroupsApiClient.deleteConsumerGroup({
-        clusterName,
-        id: consumerGroupID,
-      });
-      showSuccessAlert({
-        message: `Consumer ${consumerGroupID} group deleted`,
-      });
-      return consumerGroupID;
-    } catch (error) {
-      showServerError(error as Response);
-      return rejectWithValue(await getResponse(error as Response));
-    }
-  }
-);
-
-export const resetConsumerGroupOffsets = createAsyncThunk<
-  ConsumerGroupID,
-  ConsumerGroupResetOffsetRequestParams
->(
-  'consumerGroups/resetConsumerGroupOffsets',
-  async (
-    { clusterName, consumerGroupID, requestBody },
-    { rejectWithValue }
-  ) => {
-    try {
-      await consumerGroupsApiClient.resetConsumerGroupOffsets({
-        clusterName,
-        id: consumerGroupID,
-        consumerGroupOffsetsReset: {
-          topic: requestBody.topic,
-          resetType: requestBody.resetType,
-          partitions: requestBody.partitions,
-          partitionsOffsets: requestBody.partitionsOffsets?.map((offset) => ({
-            ...offset,
-            offset: +offset.offset,
-          })),
-          resetToTimestamp: requestBody.resetToTimestamp?.getTime(),
-        },
-      });
-      showSuccessAlert({
-        message: `Consumer ${consumerGroupID} group offsets reset`,
-      });
-      return consumerGroupID;
-    } catch (error) {
-      showServerError(error as Response);
-      return rejectWithValue(await getResponse(error as Response));
-    }
-  }
-);
-const SCHEMAS_PAGE_COUNT = 1;
-
-const consumerGroupsAdapter = createEntityAdapter<ConsumerGroupDetails>({
-  selectId: (consumerGroup) => consumerGroup.groupId,
-});
-
-interface ConsumerGroupState extends EntityState<ConsumerGroupDetails> {
-  orderBy: ConsumerGroupOrdering | null;
-  sortOrder: SortOrder;
-  totalPages: number;
-}
-
-const initialState: ConsumerGroupState = {
-  orderBy: ConsumerGroupOrdering.NAME,
-  sortOrder: SortOrder.ASC,
-  totalPages: SCHEMAS_PAGE_COUNT,
-  ...consumerGroupsAdapter.getInitialState(),
-};
-
-const consumerGroupsSlice = createSlice({
-  name: 'consumerGroups',
-  initialState,
-  reducers: {
-    sortBy: (state, action: PayloadAction<ConsumerGroupOrdering>) => {
-      state.orderBy = action.payload;
-      state.sortOrder =
-        state.orderBy === action.payload && state.sortOrder === SortOrder.ASC
-          ? SortOrder.DESC
-          : SortOrder.ASC;
-    },
-  },
-  extraReducers: (builder) => {
-    builder.addCase(
-      fetchConsumerGroupsPaged.fulfilled,
-      (state, { payload }) => {
-        state.totalPages = payload.pageCount || SCHEMAS_PAGE_COUNT;
-        consumerGroupsAdapter.setAll(state, payload.consumerGroups || []);
-      }
-    );
-    builder.addCase(fetchConsumerGroupDetails.fulfilled, (state, { payload }) =>
-      consumerGroupsAdapter.upsertOne(state, payload)
-    );
-    builder.addCase(deleteConsumerGroup.fulfilled, (state, { payload }) =>
-      consumerGroupsAdapter.removeOne(state, payload)
-    );
-  },
-});
-
-export const { sortBy } = consumerGroupsSlice.actions;
-
-const consumerGroupsState = ({
-  consumerGroups,
-}: RootState): ConsumerGroupState => consumerGroups;
-
-export const { selectAll, selectById } =
-  consumerGroupsAdapter.getSelectors<RootState>(consumerGroupsState);
-
-export const getAreConsumerGroupsPagedFulfilled = createSelector(
-  createFetchingSelector('consumerGroups/fetchConsumerGroupsPaged'),
-  (status) => status === AsyncRequestStatus.fulfilled
-);
-
-export const getAreConsumerGroupDetailsFulfilled = createSelector(
-  createFetchingSelector('consumerGroups/fetchConsumerGroupDetails'),
-  (status) => status === AsyncRequestStatus.fulfilled
-);
-
-export const getIsOffsetReseted = createSelector(
-  createFetchingSelector('consumerGroups/resetConsumerGroupOffsets'),
-  (status) => status === AsyncRequestStatus.fulfilled
-);
-
-export const getConsumerGroupsOrderBy = createSelector(
-  consumerGroupsState,
-  (state) => state.orderBy
-);
-
-export const getConsumerGroupsSortOrder = createSelector(
-  consumerGroupsState,
-  (state) => state.sortOrder
-);
-
-export const getConsumerGroupsTotalPages = createSelector(
-  consumerGroupsState,
-  (state) => state.totalPages
-);
-
-export default consumerGroupsSlice.reducer;

+ 0 - 4
kafka-ui-react-app/src/redux/reducers/index.ts

@@ -2,13 +2,9 @@ import { combineReducers } from '@reduxjs/toolkit';
 import loader from 'redux/reducers/loader/loaderSlice';
 import schemas from 'redux/reducers/schemas/schemasSlice';
 import topicMessages from 'redux/reducers/topicMessages/topicMessagesSlice';
-import consumerGroups from 'redux/reducers/consumerGroups/consumerGroupsSlice';
-import ksqlDb from 'redux/reducers/ksqlDb/ksqlDbSlice';
 
 export default combineReducers({
   loader,
   topicMessages,
-  consumerGroups,
   schemas,
-  ksqlDb,
 });

+ 0 - 43
kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/fixtures.ts

@@ -1,43 +0,0 @@
-type Dictionary<T> = Record<string, T>;
-
-export const fetchKsqlDbTablesPayload: {
-  tables: Dictionary<string>[];
-  streams: Dictionary<string>[];
-} = {
-  tables: [
-    {
-      type: 'TABLE',
-      name: 'USERS',
-      topic: 'users',
-      keyFormat: 'KAFKA',
-      valueFormat: 'AVRO',
-      isWindowed: 'false',
-    },
-    {
-      type: 'TABLE',
-      name: 'USERS2',
-      topic: 'users',
-      keyFormat: 'KAFKA',
-      valueFormat: 'AVRO',
-      isWindowed: 'false',
-    },
-  ],
-  streams: [
-    {
-      type: 'STREAM',
-      name: 'KSQL_PROCESSING_LOG',
-      topic: 'default_ksql_processing_log',
-      keyFormat: 'KAFKA',
-      valueFormat: 'JSON',
-      isWindowed: 'false',
-    },
-    {
-      type: 'STREAM',
-      name: 'PAGEVIEWS',
-      topic: 'pageviews',
-      keyFormat: 'KAFKA',
-      valueFormat: 'AVRO',
-      isWindowed: 'false',
-    },
-  ],
-};

+ 0 - 51
kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/selectors.spec.ts

@@ -1,51 +0,0 @@
-import { store } from 'redux/store';
-import * as selectors from 'redux/reducers/ksqlDb/selectors';
-import { fetchKsqlDbTables } from 'redux/reducers/ksqlDb/ksqlDbSlice';
-
-import { fetchKsqlDbTablesPayload } from './fixtures';
-
-describe('TopicMessages selectors', () => {
-  describe('Initial state', () => {
-    beforeAll(() => {
-      store.dispatch({
-        type: fetchKsqlDbTables.pending.type,
-        payload: fetchKsqlDbTablesPayload,
-      });
-    });
-
-    it('Returns empty state', () => {
-      expect(selectors.getKsqlDbTables(store.getState())).toEqual({
-        rows: {
-          streams: [],
-          tables: [],
-        },
-        fetched: false,
-        fetching: true,
-        tablesCount: 0,
-        streamsCount: 0,
-      });
-    });
-  });
-
-  describe('State', () => {
-    beforeAll(() => {
-      store.dispatch({
-        type: fetchKsqlDbTables.fulfilled.type,
-        payload: fetchKsqlDbTablesPayload,
-      });
-    });
-
-    it('Returns tables and streams', () => {
-      expect(selectors.getKsqlDbTables(store.getState())).toEqual({
-        rows: {
-          streams: [...fetchKsqlDbTablesPayload.streams],
-          tables: [...fetchKsqlDbTablesPayload.tables],
-        },
-        fetched: true,
-        fetching: false,
-        tablesCount: 2,
-        streamsCount: 2,
-      });
-    });
-  });
-});

+ 0 - 75
kafka-ui-react-app/src/redux/reducers/ksqlDb/ksqlDbSlice.ts

@@ -1,75 +0,0 @@
-import { KsqlState } from 'redux/interfaces/ksqlDb';
-import { createAsyncThunk, createSlice } from '@reduxjs/toolkit';
-import { ExecuteKsqlRequest } from 'generated-sources';
-import { ClusterName } from 'redux/interfaces';
-import { ksqlDbApiClient } from 'lib/api';
-
-const getTables = (clusterName: ClusterName) =>
-  ksqlDbApiClient.listTables({
-    clusterName,
-  });
-
-const getStreams = (clusterName: ClusterName) =>
-  ksqlDbApiClient.listStreams({
-    clusterName,
-  });
-
-export const fetchKsqlDbTables = createAsyncThunk(
-  'ksqlDb/fetchKsqlDbTables',
-  async (clusterName: ClusterName) => {
-    const [tables, streams] = await Promise.all([
-      getTables(clusterName),
-      getStreams(clusterName),
-    ]);
-
-    const processedTables = tables.map((table) => ({
-      type: 'TABLE',
-      ...table,
-    }));
-    const processedStreams = streams.map((stream) => ({
-      type: 'STREAM',
-      ...stream,
-    }));
-
-    return {
-      tables: processedTables,
-      streams: processedStreams,
-    };
-  }
-);
-
-export const executeKsql = createAsyncThunk(
-  'ksqlDb/executeKsql',
-  (params: ExecuteKsqlRequest) => ksqlDbApiClient.executeKsql(params)
-);
-
-const initialState: KsqlState = {
-  streams: [],
-  tables: [],
-  executionResult: null,
-};
-
-const ksqlDbSlice = createSlice({
-  name: 'ksqlDb',
-  initialState,
-  reducers: {
-    resetExecutionResult: (state) => ({
-      ...state,
-      executionResult: null,
-    }),
-  },
-  extraReducers: (builder) => {
-    builder.addCase(fetchKsqlDbTables.fulfilled, (state, action) => ({
-      ...state,
-      ...action.payload,
-    }));
-    builder.addCase(executeKsql.fulfilled, (state, action) => ({
-      ...state,
-      executionResult: action.payload,
-    }));
-  },
-});
-
-export const { resetExecutionResult } = ksqlDbSlice.actions;
-
-export default ksqlDbSlice.reducer;

+ 0 - 33
kafka-ui-react-app/src/redux/reducers/ksqlDb/selectors.ts

@@ -1,33 +0,0 @@
-import { createSelector } from '@reduxjs/toolkit';
-import { RootState } from 'redux/interfaces';
-import { createFetchingSelector } from 'redux/reducers/loader/selectors';
-import { KsqlState } from 'redux/interfaces/ksqlDb';
-import { AsyncRequestStatus } from 'lib/constants';
-
-const ksqlDbState = ({ ksqlDb }: RootState): KsqlState => ksqlDb;
-
-const getKsqlDbFetchTablesAndStreamsFetchingStatus = createFetchingSelector(
-  'ksqlDb/fetchKsqlDbTables'
-);
-
-const getKsqlExecutionStatus = createFetchingSelector('ksqlDb/executeKsql');
-
-export const getKsqlDbTables = createSelector(
-  [ksqlDbState, getKsqlDbFetchTablesAndStreamsFetchingStatus],
-  (state, status) => ({
-    rows: { streams: [...state.streams], tables: [...state.tables] },
-    fetched: status === AsyncRequestStatus.fulfilled,
-    fetching: status === AsyncRequestStatus.pending,
-    tablesCount: state.tables.length,
-    streamsCount: state.streams.length,
-  })
-);
-
-export const getKsqlExecution = createSelector(
-  [ksqlDbState, getKsqlExecutionStatus],
-  (state, status) => ({
-    executionResult: state.executionResult,
-    fetched: status === AsyncRequestStatus.fulfilled,
-    fetching: status === AsyncRequestStatus.pending,
-  })
-);

+ 1 - 1
kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx

@@ -75,7 +75,7 @@ const ClusterConfigForm: React.FC<ClusterConfigFormProps> = ({
   const onReset = () => methods.reset();
 
   const onValidate = async () => {
-    await trigger();
+    await trigger(undefined, { shouldFocus: true });
     if (!methods.formState.isValid) return;
     disableForm();
     const data = methods.getValues();

+ 7 - 0
kafka-ui-react-app/vite.config.ts

@@ -25,6 +25,13 @@ export default defineConfig(({ mode }) => {
     },
     build: {
       outDir: 'build',
+      rollupOptions: {
+        output: {
+          manualChunks: {
+            ace: ['ace-builds', 'react-ace'],
+          },
+        },
+      },
     },
     experimental: {
       renderBuiltUrl(

+ 1 - 1
kafka-ui-serde-api/pom.xml

@@ -87,7 +87,7 @@
 					<configuration>
 						<source>8</source>
 					</configuration>
-					<version>3.4.1</version>
+					<version>3.5.0</version>
 					<executions>
 						<execution>
 							<id>attach-javadocs</id>