浏览代码

Merge branch 'master' into Topic_custom_params_are_disabled_upon_editing

David 2 年之前
父节点
当前提交
f632077bf4
共有 100 个文件被更改,包括 3303 次插入1935 次删除
  1. 1 1
      .github/workflows/backend.yml
  2. 1 1
      .github/workflows/branch-deploy.yml
  3. 1 1
      .github/workflows/branch-remove.yml
  4. 1 1
      .github/workflows/create-branch-for-helm.yaml
  5. 1 1
      .github/workflows/documentation.yaml
  6. 3 3
      .github/workflows/e2e-checks.yaml
  7. 1 1
      .github/workflows/frontend.yaml
  8. 1 1
      .github/workflows/helm.yaml
  9. 1 1
      .github/workflows/master.yaml
  10. 1 1
      .github/workflows/release-serde-api.yaml
  11. 1 1
      .github/workflows/terraform-deploy.yml
  12. 0 1
      README.md
  13. 1 1
      charts/kafka-ui/Chart.yaml
  14. 3 3
      charts/kafka-ui/templates/ingress.yaml
  15. 2 0
      charts/kafka-ui/templates/secret.yaml
  16. 3 0
      charts/kafka-ui/values.yaml
  17. 1 1
      kafka-ui-api/Dockerfile
  18. 0 22
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KafkaConnectClientsFactory.java
  19. 20 71
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
  20. 6 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
  21. 2 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java
  22. 3 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java
  23. 64 61
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/SchemasController.java
  24. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java
  25. 2 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/SchemaCompatibilityException.java
  26. 0 12
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/SchemaTypeNotSupportedException.java
  27. 1 99
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java
  28. 11 7
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
  29. 37 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaSrMapper.java
  30. 0 59
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/FailoverUrlList.java
  31. 0 19
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalKsqlServer.java
  32. 0 33
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalSchemaRegistry.java
  33. 11 5
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java
  34. 0 21
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaConnectCluster.java
  35. 2 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
  36. 72 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java
  37. 6 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java
  38. 13 6
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
  39. 2 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java
  40. 102 89
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
  41. 5 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
  42. 133 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
  43. 40 38
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
  44. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java
  45. 90 37
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
  46. 105 391
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java
  47. 10 8
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
  48. 52 63
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
  49. 6 14
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java
  50. 15 8
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetriever.java
  51. 0 21
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java
  52. 154 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ReactiveFailover.java
  53. 0 66
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SecuredWebClient.java
  54. 136 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java
  55. 27 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AnyFieldSchema.java
  56. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java
  57. 4 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonSchema.java
  58. 3 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ObjectFieldSchema.java
  59. 146 65
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverter.java
  60. 4 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/RefFieldSchema.java
  61. 12 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/SimpleJsonType.java
  62. 0 69
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/model/FailoverUrlListTest.java
  63. 92 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerdeTest.java
  64. 46 8
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
  65. 15 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ConfigTest.java
  66. 3 3
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java
  67. 91 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ReactiveAdminClientTest.java
  68. 43 43
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/SchemaRegistryPaginationTest.java
  69. 0 2
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java
  70. 5 7
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java
  71. 20 13
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java
  72. 44 11
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetrieverTest.java
  73. 233 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/ReactiveFailoverTest.java
  74. 124 54
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java
  75. 26 0
      kafka-ui-contract/pom.xml
  76. 404 0
      kafka-ui-contract/src/main/resources/swagger/kafka-sr-api.yaml
  77. 48 13
      kafka-ui-e2e-checks/README.md
  78. 50 93
      kafka-ui-e2e-checks/pom.xml
  79. 0 1
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java
  80. 5 5
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java
  81. 6 3
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
  82. 94 74
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
  83. 11 10
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java
  84. 5 3
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java
  85. 28 27
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java
  86. 74 73
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java
  87. 90 89
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
  88. 6 6
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorCreateForm.java
  89. 21 21
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorDetails.java
  90. 8 8
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java
  91. 3 3
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersDetails.java
  92. 4 3
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
  93. 137 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
  94. 155 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
  95. 17 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlMenuTabs.java
  96. 19 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlQueryConfig.java
  97. 11 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Stream.java
  98. 11 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Table.java
  99. 22 24
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaCreateForm.java
  100. 10 10
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java

+ 1 - 1
.github/workflows/backend.yml

@@ -1,4 +1,4 @@
-name: backend
+name: Backend build and test
 on:
 on:
   push:
   push:
     branches:
     branches:

+ 1 - 1
.github/workflows/branch-deploy.yml

@@ -1,4 +1,4 @@
-name: DeployFromBranch
+name: Feature testing init
 on:
 on:
   workflow_dispatch:
   workflow_dispatch:
 
 

+ 1 - 1
.github/workflows/branch-remove.yml

@@ -1,4 +1,4 @@
-name: RemoveCustomDeployment
+name: Feature testing destroy
 on:
 on:
   workflow_dispatch:
   workflow_dispatch:
   pull_request:
   pull_request:

+ 1 - 1
.github/workflows/create-branch-for-helm.yaml

@@ -1,4 +1,4 @@
-name: prepare-helm-release
+name: Prepare helm release
 on:
 on:
   repository_dispatch:
   repository_dispatch:
     types: [prepare-helm-release]
     types: [prepare-helm-release]

+ 1 - 1
.github/workflows/documentation.yaml

@@ -1,4 +1,4 @@
-name: Documentation
+name: Documentation URLs linter
 on:
 on:
   pull_request:
   pull_request:
     types:
     types:

+ 3 - 3
.github/workflows/e2e-checks.yaml

@@ -1,4 +1,4 @@
-name: e2e-checks
+name: E2E tests
 on:
 on:
   pull_request_target:
   pull_request_target:
     types: ["opened", "edited", "reopened", "synchronize"]
     types: ["opened", "edited", "reopened", "synchronize"]
@@ -33,7 +33,7 @@ jobs:
         id: build_app
         id: build_app
         run: |
         run: |
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
-          ./mvnw -B -V -ntp clean package -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+          ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
       - name: compose app
       - name: compose app
         id: compose_app
         id: compose_app
         # use the following command until #819 will be fixed
         # use the following command until #819 will be fixed
@@ -42,7 +42,7 @@ jobs:
       - name: e2e run
       - name: e2e run
         run: |
         run: |
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
-          ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -pl '!kafka-ui-api' test -Pprod
+          ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Dsuite=smoke -f 'kafka-ui-e2e-checks' test -Pprod
       - name: Generate allure report
       - name: Generate allure report
         uses: simple-elf/allure-report-action@master
         uses: simple-elf/allure-report-action@master
         if: always()
         if: always()

+ 1 - 1
.github/workflows/frontend.yaml

@@ -1,4 +1,4 @@
-name: frontend
+name: Frontend build and test
 on:
 on:
   push:
   push:
     branches:
     branches:

+ 1 - 1
.github/workflows/helm.yaml

@@ -1,4 +1,4 @@
-name: Helm
+name: Helm linter
 on:
 on:
  pull_request:
  pull_request:
   types: ["opened", "edited", "reopened", "synchronize"]
   types: ["opened", "edited", "reopened", "synchronize"]

+ 1 - 1
.github/workflows/master.yaml

@@ -1,4 +1,4 @@
-name: Master
+name: Master branch build & deploy
 on:
 on:
   workflow_dispatch:
   workflow_dispatch:
   push:
   push:

+ 1 - 1
.github/workflows/release-serde-api.yaml

@@ -1,4 +1,4 @@
-name: Release-serde-api
+name: Release serde api
 on: workflow_dispatch
 on: workflow_dispatch
 
 
 jobs:
 jobs:

+ 1 - 1
.github/workflows/terraform-deploy.yml

@@ -1,4 +1,4 @@
-name: terraform_deploy
+name: Terraform deploy
 on:
 on:
   workflow_dispatch:
   workflow_dispatch:
     inputs:
     inputs:

+ 0 - 1
README.md

@@ -199,7 +199,6 @@ For example, if you want to use an environment variable to set the `name` parame
 |`KAFKA_CLUSTERS_0_METRICS_PORT`        	 |Open metrics port of a broker
 |`KAFKA_CLUSTERS_0_METRICS_PORT`        	 |Open metrics port of a broker
 |`KAFKA_CLUSTERS_0_METRICS_TYPE`        	 |Type of metrics retriever to use. Valid values are JMX (default) or PROMETHEUS. If Prometheus, then metrics are read from prometheus-jmx-exporter instead of jmx
 |`KAFKA_CLUSTERS_0_METRICS_TYPE`        	 |Type of metrics retriever to use. Valid values are JMX (default) or PROMETHEUS. If Prometheus, then metrics are read from prometheus-jmx-exporter instead of jmx
 |`KAFKA_CLUSTERS_0_READONLY`        	|Enable read-only mode. Default: false
 |`KAFKA_CLUSTERS_0_READONLY`        	|Enable read-only mode. Default: false
-|`KAFKA_CLUSTERS_0_DISABLELOGDIRSCOLLECTION`        	|Disable collecting segments information. It should be true for confluent cloud. Default: false
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME`| Kafka Connect cluster's basic authentication username
 |`KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME`| Kafka Connect cluster's basic authentication username

+ 1 - 1
charts/kafka-ui/Chart.yaml

@@ -2,6 +2,6 @@ apiVersion: v2
 name: kafka-ui
 name: kafka-ui
 description: A Helm chart for kafka-UI
 description: A Helm chart for kafka-UI
 type: application
 type: application
-version: 0.5.1
+version: 0.5.3
 appVersion: v0.5.0
 appVersion: v0.5.0
 icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
 icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png

+ 3 - 3
charts/kafka-ui/templates/ingress.yaml

@@ -35,7 +35,7 @@ spec:
 {{- if and ($.Capabilities.APIVersions.Has "networking.k8s.io/v1") $isHigher1p19 -}}
 {{- if and ($.Capabilities.APIVersions.Has "networking.k8s.io/v1") $isHigher1p19 -}}
           {{- range .Values.ingress.precedingPaths }}
           {{- range .Values.ingress.precedingPaths }}
           - path: {{ .path }}
           - path: {{ .path }}
-            pathType: Prefix
+            pathType: {{ .Values.ingress.pathType }}
             backend:
             backend:
               service:
               service:
                 name: {{ .serviceName }}
                 name: {{ .serviceName }}
@@ -47,13 +47,13 @@ spec:
                 name: {{ $fullName }}
                 name: {{ $fullName }}
                 port:
                 port:
                   number: {{ $svcPort }}
                   number: {{ $svcPort }}
-            pathType: Prefix
+            pathType: {{ .Values.ingress.pathType }}
 {{- if .Values.ingress.path }}
 {{- if .Values.ingress.path }}
             path: {{ .Values.ingress.path }}
             path: {{ .Values.ingress.path }}
 {{- end }}
 {{- end }}
           {{- range .Values.ingress.succeedingPaths }}
           {{- range .Values.ingress.succeedingPaths }}
           - path: {{ .path }}
           - path: {{ .path }}
-            pathType: Prefix
+            pathType: {{ .Values.ingress.pathType }}
             backend:
             backend:
               service:
               service:
                 name: {{ .serviceName }}
                 name: {{ .serviceName }}

+ 2 - 0
charts/kafka-ui/templates/secret.yaml

@@ -1,3 +1,4 @@
+{{- if .Values.envs.secret -}}
 apiVersion: v1
 apiVersion: v1
 kind: Secret
 kind: Secret
 metadata:
 metadata:
@@ -9,3 +10,4 @@ data:
   {{- range $key, $val := .Values.envs.secret }}
   {{- range $key, $val := .Values.envs.secret }}
   {{ $key }}: {{ $val | b64enc | quote }}
   {{ $key }}: {{ $val | b64enc | quote }}
   {{- end -}}
   {{- end -}}
+{{- end}}

+ 3 - 0
charts/kafka-ui/values.yaml

@@ -111,6 +111,9 @@ ingress:
   # The path for the Ingress
   # The path for the Ingress
   path: "/"
   path: "/"
 
 
+  # The path type for the Ingress
+  pathType: "Prefix"  
+
   # The hostname for the Ingress
   # The hostname for the Ingress
   host: ""
   host: ""
 
 

+ 1 - 1
kafka-ui-api/Dockerfile

@@ -1,4 +1,4 @@
-FROM azul/zulu-openjdk-alpine:17
+FROM azul/zulu-openjdk-alpine:17-jre
 
 
 RUN apk add --no-cache gcompat # need to make snappy codec work
 RUN apk add --no-cache gcompat # need to make snappy codec work
 RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
 RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui

+ 0 - 22
kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/KafkaConnectClientsFactory.java

@@ -1,22 +0,0 @@
-package com.provectus.kafka.ui.client;
-
-import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
-import com.provectus.kafka.ui.model.KafkaConnectCluster;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.stereotype.Service;
-import org.springframework.util.unit.DataSize;
-
-@Service
-public class KafkaConnectClientsFactory {
-
-  @Value("${webclient.max-in-memory-buffer-size:20MB}")
-  private DataSize maxBuffSize;
-
-  private final Map<String, KafkaConnectClientApi> cache = new ConcurrentHashMap<>();
-
-  public KafkaConnectClientApi withKafkaConnectConfig(KafkaConnectCluster config) {
-    return cache.computeIfAbsent(config.getAddress(), s -> new RetryingKafkaConnectClient(config, maxBuffSize));
-  }
-}

+ 20 - 71
kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java

@@ -1,49 +1,29 @@
 package com.provectus.kafka.ui.client;
 package com.provectus.kafka.ui.client;
 
 
-import com.fasterxml.jackson.databind.DeserializationFeature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
+import static com.provectus.kafka.ui.config.ClustersProperties.ConnectCluster;
+
 import com.provectus.kafka.ui.connect.ApiClient;
 import com.provectus.kafka.ui.connect.ApiClient;
-import com.provectus.kafka.ui.connect.RFC3339DateFormat;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
 import com.provectus.kafka.ui.connect.model.Connector;
 import com.provectus.kafka.ui.connect.model.Connector;
 import com.provectus.kafka.ui.connect.model.NewConnector;
 import com.provectus.kafka.ui.connect.model.NewConnector;
 import com.provectus.kafka.ui.exception.KafkaConnectConflictReponseException;
 import com.provectus.kafka.ui.exception.KafkaConnectConflictReponseException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.model.InternalSchemaRegistry;
-import com.provectus.kafka.ui.model.KafkaCluster;
-import com.provectus.kafka.ui.model.KafkaConnectCluster;
-import com.provectus.kafka.ui.util.SecuredWebClient;
-import io.netty.handler.ssl.SslContext;
-import io.netty.handler.ssl.SslContextBuilder;
-import java.io.FileInputStream;
-import java.security.KeyStore;
-import java.text.DateFormat;
+import com.provectus.kafka.ui.util.WebClientConfigurator;
 import java.time.Duration;
 import java.time.Duration;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
-import java.util.TimeZone;
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.TrustManagerFactory;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
-import org.openapitools.jackson.nullable.JsonNullableModule;
 import org.springframework.core.ParameterizedTypeReference;
 import org.springframework.core.ParameterizedTypeReference;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.HttpMethod;
 import org.springframework.http.HttpMethod;
 import org.springframework.http.MediaType;
 import org.springframework.http.MediaType;
-import org.springframework.http.client.reactive.ReactorClientHttpConnector;
-import org.springframework.http.codec.json.Jackson2JsonDecoder;
-import org.springframework.http.codec.json.Jackson2JsonEncoder;
 import org.springframework.util.MultiValueMap;
 import org.springframework.util.MultiValueMap;
-import org.springframework.util.ResourceUtils;
 import org.springframework.util.unit.DataSize;
 import org.springframework.util.unit.DataSize;
 import org.springframework.web.client.RestClientException;
 import org.springframework.web.client.RestClientException;
-import org.springframework.web.reactive.function.client.ExchangeStrategies;
 import org.springframework.web.reactive.function.client.WebClient;
 import org.springframework.web.reactive.function.client.WebClient;
 import org.springframework.web.reactive.function.client.WebClientResponseException;
 import org.springframework.web.reactive.function.client.WebClientResponseException;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
-import reactor.netty.http.client.HttpClient;
 import reactor.util.retry.Retry;
 import reactor.util.retry.Retry;
 
 
 @Slf4j
 @Slf4j
@@ -51,7 +31,7 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
   private static final int MAX_RETRIES = 5;
   private static final int MAX_RETRIES = 5;
   private static final Duration RETRIES_DELAY = Duration.ofMillis(200);
   private static final Duration RETRIES_DELAY = Duration.ofMillis(200);
 
 
-  public RetryingKafkaConnectClient(KafkaConnectCluster config, DataSize maxBuffSize) {
+  public RetryingKafkaConnectClient(ConnectCluster config, DataSize maxBuffSize) {
     super(new RetryingApiClient(config, maxBuffSize));
     super(new RetryingApiClient(config, maxBuffSize));
   }
   }
 
 
@@ -97,58 +77,27 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
 
 
   private static class RetryingApiClient extends ApiClient {
   private static class RetryingApiClient extends ApiClient {
 
 
-    private static final DateFormat dateFormat = getDefaultDateFormat();
-    private static final ObjectMapper mapper = buildObjectMapper(dateFormat);
-
-    public RetryingApiClient(KafkaConnectCluster config, DataSize maxBuffSize) {
-      super(buildWebClient(mapper, maxBuffSize, config), mapper, dateFormat);
+    public RetryingApiClient(ConnectCluster config, DataSize maxBuffSize) {
+      super(buildWebClient(maxBuffSize, config), null, null);
       setBasePath(config.getAddress());
       setBasePath(config.getAddress());
       setUsername(config.getUserName());
       setUsername(config.getUserName());
       setPassword(config.getPassword());
       setPassword(config.getPassword());
     }
     }
 
 
-    public static DateFormat getDefaultDateFormat() {
-      DateFormat dateFormat = new RFC3339DateFormat();
-      dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-      return dateFormat;
-    }
-
-    public static WebClient buildWebClient(ObjectMapper mapper, DataSize maxBuffSize, KafkaConnectCluster config) {
-      ExchangeStrategies strategies = ExchangeStrategies
-              .builder()
-              .codecs(clientDefaultCodecsConfigurer -> {
-                clientDefaultCodecsConfigurer.defaultCodecs()
-                        .jackson2JsonEncoder(new Jackson2JsonEncoder(mapper, MediaType.APPLICATION_JSON));
-                clientDefaultCodecsConfigurer.defaultCodecs()
-                        .jackson2JsonDecoder(new Jackson2JsonDecoder(mapper, MediaType.APPLICATION_JSON));
-                clientDefaultCodecsConfigurer.defaultCodecs()
-                        .maxInMemorySize((int) maxBuffSize.toBytes());
-              })
-              .build();
-
-      try {
-        WebClient.Builder webClient = SecuredWebClient.configure(
-            config.getKeystoreLocation(),
-            config.getKeystorePassword(),
-            config.getTruststoreLocation(),
-            config.getTruststorePassword()
-        );
-
-        return webClient.exchangeStrategies(strategies).build();
-      } catch (Exception e) {
-        throw new IllegalStateException(
-            "cannot create TLS configuration for kafka-connect cluster " + config.getName(), e);
-      }
-    }
-
-    public static ObjectMapper buildObjectMapper(DateFormat dateFormat) {
-      ObjectMapper mapper = new ObjectMapper();
-      mapper.setDateFormat(dateFormat);
-      mapper.registerModule(new JavaTimeModule());
-      mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-      JsonNullableModule jnm = new JsonNullableModule();
-      mapper.registerModule(jnm);
-      return mapper;
+    public static WebClient buildWebClient(DataSize maxBuffSize, ConnectCluster config) {
+      return new WebClientConfigurator()
+          .configureSsl(
+              config.getKeystoreLocation(),
+              config.getKeystorePassword(),
+              config.getTruststoreLocation(),
+              config.getTruststorePassword()
+          )
+          .configureBasicAuth(
+              config.getUserName(),
+              config.getPassword()
+          )
+          .configureBufferSize(maxBuffSize)
+          .build();
     }
     }
 
 
     @Override
     @Override

+ 6 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -8,7 +8,10 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.Set;
 import java.util.Set;
 import javax.annotation.PostConstruct;
 import javax.annotation.PostConstruct;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
 import lombok.Data;
 import lombok.Data;
+import lombok.NoArgsConstructor;
 import lombok.ToString;
 import lombok.ToString;
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.context.annotation.Configuration;
@@ -35,7 +38,6 @@ public class ClustersProperties {
     MetricsConfigData metrics;
     MetricsConfigData metrics;
     Properties properties;
     Properties properties;
     boolean readOnly = false;
     boolean readOnly = false;
-    boolean disableLogDirsCollection = false;
     List<SerdeConfig> serde = new ArrayList<>();
     List<SerdeConfig> serde = new ArrayList<>();
     String defaultKeySerde;
     String defaultKeySerde;
     String defaultValueSerde;
     String defaultValueSerde;
@@ -53,6 +55,9 @@ public class ClustersProperties {
   }
   }
 
 
   @Data
   @Data
+  @NoArgsConstructor
+  @AllArgsConstructor
+  @Builder(toBuilder = true)
   public static class ConnectCluster {
   public static class ConnectCluster {
     String name;
     String name;
     String address;
     String address;

+ 2 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java

@@ -189,8 +189,8 @@ public class ConsumerGroupsController extends AbstractController implements Cons
   private ConsumerGroupsPageResponseDTO convertPage(ConsumerGroupService.ConsumerGroupsPage
   private ConsumerGroupsPageResponseDTO convertPage(ConsumerGroupService.ConsumerGroupsPage
                                                         consumerGroupConsumerGroupsPage) {
                                                         consumerGroupConsumerGroupsPage) {
     return new ConsumerGroupsPageResponseDTO()
     return new ConsumerGroupsPageResponseDTO()
-        .pageCount(consumerGroupConsumerGroupsPage.getTotalPages())
-        .consumerGroups(consumerGroupConsumerGroupsPage.getConsumerGroups()
+        .pageCount(consumerGroupConsumerGroupsPage.totalPages())
+        .consumerGroups(consumerGroupConsumerGroupsPage.consumerGroups()
             .stream()
             .stream()
             .map(ConsumerGroupMapper::toDto)
             .map(ConsumerGroupMapper::toDto)
             .collect(Collectors.toList()));
             .collect(Collectors.toList()));

+ 3 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java

@@ -234,9 +234,9 @@ public class KafkaConnectController extends AbstractController implements KafkaC
         .build());
         .build());
 
 
     return validateAccess.then(
     return validateAccess.then(
-        kafkaConnectService
-            .getConnectorPlugins(getCluster(clusterName), connectName)
-            .map(ResponseEntity::ok)
+        Mono.just(
+            ResponseEntity.ok(
+                kafkaConnectService.getConnectorPlugins(getCluster(clusterName), connectName)))
     );
     );
   }
   }
 
 

+ 64 - 61
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/SchemasController.java

@@ -2,7 +2,8 @@ package com.provectus.kafka.ui.controller;
 
 
 import com.provectus.kafka.ui.api.SchemasApi;
 import com.provectus.kafka.ui.api.SchemasApi;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.mapper.ClusterMapper;
+import com.provectus.kafka.ui.mapper.KafkaSrMapper;
+import com.provectus.kafka.ui.mapper.KafkaSrMapperImpl;
 import com.provectus.kafka.ui.model.CompatibilityCheckResponseDTO;
 import com.provectus.kafka.ui.model.CompatibilityCheckResponseDTO;
 import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
 import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
@@ -32,7 +33,7 @@ public class SchemasController extends AbstractController implements SchemasApi
 
 
   private static final Integer DEFAULT_PAGE_SIZE = 25;
   private static final Integer DEFAULT_PAGE_SIZE = 25;
 
 
-  private final ClusterMapper mapper;
+  private final KafkaSrMapper kafkaSrMapper = new KafkaSrMapperImpl();
 
 
   private final SchemaRegistryService schemaRegistryService;
   private final SchemaRegistryService schemaRegistryService;
   private final AccessControlService accessControlService;
   private final AccessControlService accessControlService;
@@ -40,7 +41,7 @@ public class SchemasController extends AbstractController implements SchemasApi
   @Override
   @Override
   protected KafkaCluster getCluster(String clusterName) {
   protected KafkaCluster getCluster(String clusterName) {
     var c = super.getCluster(clusterName);
     var c = super.getCluster(clusterName);
-    if (c.getSchemaRegistry() == null) {
+    if (c.getSchemaRegistryClient() == null) {
       throw new ValidationException("Schema Registry is not set for cluster " + clusterName);
       throw new ValidationException("Schema Registry is not set for cluster " + clusterName);
     }
     }
     return c;
     return c;
@@ -48,9 +49,8 @@ public class SchemasController extends AbstractController implements SchemasApi
 
 
   @Override
   @Override
   public Mono<ResponseEntity<CompatibilityCheckResponseDTO>> checkSchemaCompatibility(
   public Mono<ResponseEntity<CompatibilityCheckResponseDTO>> checkSchemaCompatibility(
-      String clusterName, String subject, @Valid Mono<NewSchemaSubjectDTO> newSchemaSubject,
+      String clusterName, String subject, @Valid Mono<NewSchemaSubjectDTO> newSchemaSubjectMono,
       ServerWebExchange exchange) {
       ServerWebExchange exchange) {
-
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
         .schema(subject)
         .schema(subject)
@@ -58,37 +58,41 @@ public class SchemasController extends AbstractController implements SchemasApi
         .build());
         .build());
 
 
     return validateAccess.then(
     return validateAccess.then(
-        schemaRegistryService.checksSchemaCompatibility(
-                getCluster(clusterName), subject, newSchemaSubject)
-            .map(mapper::toCompatibilityCheckResponse)
+        newSchemaSubjectMono.flatMap(subjectDTO ->
+                schemaRegistryService.checksSchemaCompatibility(
+                    getCluster(clusterName),
+                    subject,
+                    kafkaSrMapper.fromDto(subjectDTO)
+                ))
+            .map(kafkaSrMapper::toDto)
             .map(ResponseEntity::ok)
             .map(ResponseEntity::ok)
     );
     );
   }
   }
 
 
   @Override
   @Override
   public Mono<ResponseEntity<SchemaSubjectDTO>> createNewSchema(
   public Mono<ResponseEntity<SchemaSubjectDTO>> createNewSchema(
-      String clusterName, @Valid Mono<NewSchemaSubjectDTO> newSchemaSubject,
+      String clusterName, @Valid Mono<NewSchemaSubjectDTO> newSchemaSubjectMono,
       ServerWebExchange exchange) {
       ServerWebExchange exchange) {
+    Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
+        .cluster(clusterName)
+        .schemaActions(SchemaAction.CREATE)
+        .build());
 
 
-    return newSchemaSubject.flatMap(dto -> {
-      Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
-          .cluster(clusterName)
-          .schemaActions(SchemaAction.CREATE)
-          .build());
-
-      return validateAccess.then(
-          schemaRegistryService
-              .registerNewSchema(getCluster(clusterName), dto)
-              .map(ResponseEntity::ok)
-      );
-    });
+    return validateAccess.then(
+        newSchemaSubjectMono.flatMap(newSubject ->
+                schemaRegistryService.registerNewSchema(
+                    getCluster(clusterName),
+                    newSubject.getSubject(),
+                    kafkaSrMapper.fromDto(newSubject)
+                )
+            ).map(kafkaSrMapper::toDto)
+            .map(ResponseEntity::ok)
+    );
   }
   }
 
 
   @Override
   @Override
-  public Mono<ResponseEntity<Void>> deleteLatestSchema(String clusterName,
-                                                       String subject,
-                                                       ServerWebExchange exchange) {
-
+  public Mono<ResponseEntity<Void>> deleteLatestSchema(
+      String clusterName, String subject, ServerWebExchange exchange) {
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
         .schema(subject)
         .schema(subject)
@@ -102,9 +106,8 @@ public class SchemasController extends AbstractController implements SchemasApi
   }
   }
 
 
   @Override
   @Override
-  public Mono<ResponseEntity<Void>> deleteSchema(String clusterName,
-                                                 String subject,
-                                                 ServerWebExchange exchange) {
+  public Mono<ResponseEntity<Void>> deleteSchema(
+      String clusterName, String subject, ServerWebExchange exchange) {
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
         .schema(subject)
         .schema(subject)
@@ -118,36 +121,32 @@ public class SchemasController extends AbstractController implements SchemasApi
   }
   }
 
 
   @Override
   @Override
-  public Mono<ResponseEntity<Void>> deleteSchemaByVersion(String clusterName,
-                                                          String subject,
-                                                          Integer version,
-                                                          ServerWebExchange exchange) {
-
+  public Mono<ResponseEntity<Void>> deleteSchemaByVersion(
+      String clusterName, String subjectName, Integer version, ServerWebExchange exchange) {
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
-        .schema(subject)
+        .schema(subjectName)
         .schemaActions(SchemaAction.DELETE)
         .schemaActions(SchemaAction.DELETE)
         .build());
         .build());
 
 
     return validateAccess.then(
     return validateAccess.then(
-        schemaRegistryService.deleteSchemaSubjectByVersion(getCluster(clusterName), subject, version)
+        schemaRegistryService.deleteSchemaSubjectByVersion(getCluster(clusterName), subjectName, version)
             .thenReturn(ResponseEntity.ok().build())
             .thenReturn(ResponseEntity.ok().build())
     );
     );
   }
   }
 
 
   @Override
   @Override
   public Mono<ResponseEntity<Flux<SchemaSubjectDTO>>> getAllVersionsBySubject(
   public Mono<ResponseEntity<Flux<SchemaSubjectDTO>>> getAllVersionsBySubject(
-      String clusterName, String subject, ServerWebExchange exchange) {
-
+      String clusterName, String subjectName, ServerWebExchange exchange) {
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
-        .schema(subject)
+        .schema(subjectName)
         .schemaActions(SchemaAction.VIEW)
         .schemaActions(SchemaAction.VIEW)
         .build());
         .build());
 
 
     Flux<SchemaSubjectDTO> schemas =
     Flux<SchemaSubjectDTO> schemas =
-        schemaRegistryService.getAllVersionsBySubject(getCluster(clusterName), subject);
-
+        schemaRegistryService.getAllVersionsBySubject(getCluster(clusterName), subjectName)
+            .map(kafkaSrMapper::toDto);
     return validateAccess.thenReturn(ResponseEntity.ok(schemas));
     return validateAccess.thenReturn(ResponseEntity.ok(schemas));
   }
   }
 
 
@@ -155,7 +154,7 @@ public class SchemasController extends AbstractController implements SchemasApi
   public Mono<ResponseEntity<CompatibilityLevelDTO>> getGlobalSchemaCompatibilityLevel(
   public Mono<ResponseEntity<CompatibilityLevelDTO>> getGlobalSchemaCompatibilityLevel(
       String clusterName, ServerWebExchange exchange) {
       String clusterName, ServerWebExchange exchange) {
     return schemaRegistryService.getGlobalSchemaCompatibilityLevel(getCluster(clusterName))
     return schemaRegistryService.getGlobalSchemaCompatibilityLevel(getCluster(clusterName))
-        .map(mapper::toCompatibilityLevelDto)
+        .map(c -> new CompatibilityLevelDTO().compatibility(kafkaSrMapper.toDto(c)))
         .map(ResponseEntity::ok)
         .map(ResponseEntity::ok)
         .defaultIfEmpty(ResponseEntity.notFound().build());
         .defaultIfEmpty(ResponseEntity.notFound().build());
   }
   }
@@ -172,6 +171,7 @@ public class SchemasController extends AbstractController implements SchemasApi
 
 
     return validateAccess.then(
     return validateAccess.then(
         schemaRegistryService.getLatestSchemaVersionBySubject(getCluster(clusterName), subject)
         schemaRegistryService.getLatestSchemaVersionBySubject(getCluster(clusterName), subject)
+            .map(kafkaSrMapper::toDto)
             .map(ResponseEntity::ok)
             .map(ResponseEntity::ok)
     );
     );
   }
   }
@@ -179,7 +179,6 @@ public class SchemasController extends AbstractController implements SchemasApi
   @Override
   @Override
   public Mono<ResponseEntity<SchemaSubjectDTO>> getSchemaByVersion(
   public Mono<ResponseEntity<SchemaSubjectDTO>> getSchemaByVersion(
       String clusterName, String subject, Integer version, ServerWebExchange exchange) {
       String clusterName, String subject, Integer version, ServerWebExchange exchange) {
-
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
         .schema(subject)
         .schema(subject)
@@ -189,6 +188,7 @@ public class SchemasController extends AbstractController implements SchemasApi
     return validateAccess.then(
     return validateAccess.then(
         schemaRegistryService.getSchemaSubjectByVersion(
         schemaRegistryService.getSchemaSubjectByVersion(
                 getCluster(clusterName), subject, version)
                 getCluster(clusterName), subject, version)
+            .map(kafkaSrMapper::toDto)
             .map(ResponseEntity::ok)
             .map(ResponseEntity::ok)
     );
     );
   }
   }
@@ -198,10 +198,10 @@ public class SchemasController extends AbstractController implements SchemasApi
                                                                     @Valid Integer pageNum,
                                                                     @Valid Integer pageNum,
                                                                     @Valid Integer perPage,
                                                                     @Valid Integer perPage,
                                                                     @Valid String search,
                                                                     @Valid String search,
-                                                                    ServerWebExchange exchange) {
+                                                                    ServerWebExchange serverWebExchange) {
     return schemaRegistryService
     return schemaRegistryService
         .getAllSubjectNames(getCluster(clusterName))
         .getAllSubjectNames(getCluster(clusterName))
-        .flatMapMany(Flux::fromArray)
+        .flatMapIterable(l -> l)
         .filterWhen(schema -> accessControlService.isSchemaAccessible(schema, clusterName))
         .filterWhen(schema -> accessControlService.isSchemaAccessible(schema, clusterName))
         .collectList()
         .collectList()
         .flatMap(subjects -> {
         .flatMap(subjects -> {
@@ -218,46 +218,49 @@ public class SchemasController extends AbstractController implements SchemasApi
               .limit(pageSize)
               .limit(pageSize)
               .collect(Collectors.toList());
               .collect(Collectors.toList());
           return schemaRegistryService.getAllLatestVersionSchemas(getCluster(clusterName), subjectsToRender)
           return schemaRegistryService.getAllLatestVersionSchemas(getCluster(clusterName), subjectsToRender)
-              .map(a -> new SchemaSubjectsResponseDTO().pageCount(totalPages).schemas(a));
-        })
-        .map(ResponseEntity::ok);
+              .map(subjs -> subjs.stream().map(kafkaSrMapper::toDto).toList())
+              .map(subjs -> new SchemaSubjectsResponseDTO().pageCount(totalPages).schemas(subjs));
+        }).map(ResponseEntity::ok);
   }
   }
 
 
   @Override
   @Override
   public Mono<ResponseEntity<Void>> updateGlobalSchemaCompatibilityLevel(
   public Mono<ResponseEntity<Void>> updateGlobalSchemaCompatibilityLevel(
-      String clusterName, @Valid Mono<CompatibilityLevelDTO> compatibilityLevel,
+      String clusterName, @Valid Mono<CompatibilityLevelDTO> compatibilityLevelMono,
       ServerWebExchange exchange) {
       ServerWebExchange exchange) {
-
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
         .schemaActions(SchemaAction.MODIFY_GLOBAL_COMPATIBILITY)
         .schemaActions(SchemaAction.MODIFY_GLOBAL_COMPATIBILITY)
         .build());
         .build());
 
 
-    log.info("Updating schema compatibility globally");
-
     return validateAccess.then(
     return validateAccess.then(
-        schemaRegistryService.updateSchemaCompatibility(
-                getCluster(clusterName), compatibilityLevel)
-            .map(ResponseEntity::ok)
+        compatibilityLevelMono
+            .flatMap(compatibilityLevelDTO ->
+                schemaRegistryService.updateGlobalSchemaCompatibility(
+                    getCluster(clusterName),
+                    kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())
+                ))
+            .thenReturn(ResponseEntity.ok().build())
     );
     );
   }
   }
 
 
   @Override
   @Override
   public Mono<ResponseEntity<Void>> updateSchemaCompatibilityLevel(
   public Mono<ResponseEntity<Void>> updateSchemaCompatibilityLevel(
-      String clusterName, String subject, @Valid Mono<CompatibilityLevelDTO> compatibilityLevel,
+      String clusterName, String subject, @Valid Mono<CompatibilityLevelDTO> compatibilityLevelMono,
       ServerWebExchange exchange) {
       ServerWebExchange exchange) {
-
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
         .schemaActions(SchemaAction.EDIT)
         .schemaActions(SchemaAction.EDIT)
         .build());
         .build());
 
 
-    log.info("Updating schema compatibility for subject: {}", subject);
-
     return validateAccess.then(
     return validateAccess.then(
-        schemaRegistryService.updateSchemaCompatibility(
-                getCluster(clusterName), subject, compatibilityLevel)
-            .map(ResponseEntity::ok)
+        compatibilityLevelMono
+            .flatMap(compatibilityLevelDTO ->
+                schemaRegistryService.updateSchemaCompatibility(
+                    getCluster(clusterName),
+                    subject,
+                    kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())
+                ))
+            .thenReturn(ResponseEntity.ok().build())
     );
     );
   }
   }
 }
 }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java

@@ -175,7 +175,7 @@ public class TopicsController extends AbstractController implements TopicsApi {
           List<InternalTopic> filtered = existingTopics.stream()
           List<InternalTopic> filtered = existingTopics.stream()
               .filter(topic -> !topic.isInternal()
               .filter(topic -> !topic.isInternal()
                   || showInternal != null && showInternal)
                   || showInternal != null && showInternal)
-              .filter(topic -> search == null || StringUtils.contains(topic.getName(), search))
+              .filter(topic -> search == null || StringUtils.containsIgnoreCase(topic.getName(), search))
               .sorted(comparator)
               .sorted(comparator)
               .toList();
               .toList();
           var totalPages = (filtered.size() / pageSize)
           var totalPages = (filtered.size() / pageSize)

+ 2 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/SchemaCompatibilityException.java

@@ -1,8 +1,8 @@
 package com.provectus.kafka.ui.exception;
 package com.provectus.kafka.ui.exception;
 
 
 public class SchemaCompatibilityException extends CustomBaseException {
 public class SchemaCompatibilityException extends CustomBaseException {
-  public SchemaCompatibilityException(String message) {
-    super(message);
+  public SchemaCompatibilityException() {
+    super("Schema being registered is incompatible with an earlier schema");
   }
   }
 
 
   @Override
   @Override

+ 0 - 12
kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/SchemaTypeNotSupportedException.java

@@ -1,12 +0,0 @@
-package com.provectus.kafka.ui.exception;
-
-public class SchemaTypeNotSupportedException extends UnprocessableEntityException {
-
-  private static final String REQUIRED_SCHEMA_REGISTRY_VERSION = "5.5.0";
-
-  public SchemaTypeNotSupportedException() {
-    super(String.format("Current version of Schema Registry does "
-        + "not support provided schema type,"
-        + " version %s or later is required here.", REQUIRED_SCHEMA_REGISTRY_VERSION));
-  }
-}

+ 1 - 99
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java

@@ -8,25 +8,18 @@ import com.provectus.kafka.ui.model.BrokerMetricsDTO;
 import com.provectus.kafka.ui.model.ClusterDTO;
 import com.provectus.kafka.ui.model.ClusterDTO;
 import com.provectus.kafka.ui.model.ClusterMetricsDTO;
 import com.provectus.kafka.ui.model.ClusterMetricsDTO;
 import com.provectus.kafka.ui.model.ClusterStatsDTO;
 import com.provectus.kafka.ui.model.ClusterStatsDTO;
-import com.provectus.kafka.ui.model.CompatibilityCheckResponseDTO;
-import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
 import com.provectus.kafka.ui.model.ConfigSourceDTO;
 import com.provectus.kafka.ui.model.ConfigSourceDTO;
 import com.provectus.kafka.ui.model.ConfigSynonymDTO;
 import com.provectus.kafka.ui.model.ConfigSynonymDTO;
 import com.provectus.kafka.ui.model.ConnectDTO;
 import com.provectus.kafka.ui.model.ConnectDTO;
-import com.provectus.kafka.ui.model.FailoverUrlList;
 import com.provectus.kafka.ui.model.Feature;
 import com.provectus.kafka.ui.model.Feature;
 import com.provectus.kafka.ui.model.InternalBroker;
 import com.provectus.kafka.ui.model.InternalBroker;
 import com.provectus.kafka.ui.model.InternalBrokerConfig;
 import com.provectus.kafka.ui.model.InternalBrokerConfig;
 import com.provectus.kafka.ui.model.InternalBrokerDiskUsage;
 import com.provectus.kafka.ui.model.InternalBrokerDiskUsage;
 import com.provectus.kafka.ui.model.InternalClusterState;
 import com.provectus.kafka.ui.model.InternalClusterState;
-import com.provectus.kafka.ui.model.InternalKsqlServer;
 import com.provectus.kafka.ui.model.InternalPartition;
 import com.provectus.kafka.ui.model.InternalPartition;
 import com.provectus.kafka.ui.model.InternalReplica;
 import com.provectus.kafka.ui.model.InternalReplica;
-import com.provectus.kafka.ui.model.InternalSchemaRegistry;
 import com.provectus.kafka.ui.model.InternalTopic;
 import com.provectus.kafka.ui.model.InternalTopic;
 import com.provectus.kafka.ui.model.InternalTopicConfig;
 import com.provectus.kafka.ui.model.InternalTopicConfig;
-import com.provectus.kafka.ui.model.KafkaCluster;
-import com.provectus.kafka.ui.model.KafkaConnectCluster;
 import com.provectus.kafka.ui.model.MetricDTO;
 import com.provectus.kafka.ui.model.MetricDTO;
 import com.provectus.kafka.ui.model.Metrics;
 import com.provectus.kafka.ui.model.Metrics;
 import com.provectus.kafka.ui.model.PartitionDTO;
 import com.provectus.kafka.ui.model.PartitionDTO;
@@ -34,35 +27,20 @@ import com.provectus.kafka.ui.model.ReplicaDTO;
 import com.provectus.kafka.ui.model.TopicConfigDTO;
 import com.provectus.kafka.ui.model.TopicConfigDTO;
 import com.provectus.kafka.ui.model.TopicDTO;
 import com.provectus.kafka.ui.model.TopicDTO;
 import com.provectus.kafka.ui.model.TopicDetailsDTO;
 import com.provectus.kafka.ui.model.TopicDetailsDTO;
-import com.provectus.kafka.ui.model.schemaregistry.InternalCompatibilityCheck;
-import com.provectus.kafka.ui.model.schemaregistry.InternalCompatibilityLevel;
 import com.provectus.kafka.ui.service.masking.DataMasking;
 import com.provectus.kafka.ui.service.masking.DataMasking;
 import com.provectus.kafka.ui.service.metrics.RawMetric;
 import com.provectus.kafka.ui.service.metrics.RawMetric;
-import com.provectus.kafka.ui.util.PollingThrottler;
-import java.util.Arrays;
-import java.util.Collections;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
-import java.util.Properties;
-import java.util.function.Supplier;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.mapstruct.Mapper;
 import org.mapstruct.Mapper;
 import org.mapstruct.Mapping;
 import org.mapstruct.Mapping;
-import org.mapstruct.Named;
 
 
 @Mapper(componentModel = "spring")
 @Mapper(componentModel = "spring")
 public interface ClusterMapper {
 public interface ClusterMapper {
 
 
   ClusterDTO toCluster(InternalClusterState clusterState);
   ClusterDTO toCluster(InternalClusterState clusterState);
 
 
-  @Mapping(target = "properties", source = "properties", qualifiedByName = "setProperties")
-  @Mapping(target = "schemaRegistry", source = ".", qualifiedByName = "setSchemaRegistry")
-  @Mapping(target = "ksqldbServer", source = ".", qualifiedByName = "setKsqldbServer")
-  @Mapping(target = "metricsConfig", source = "metrics")
-  @Mapping(target = "throttler", source = ".", qualifiedByName = "createClusterThrottler")
-  KafkaCluster toKafkaCluster(ClustersProperties.Cluster clusterProperties);
-
   ClusterStatsDTO toClusterStats(InternalClusterState clusterState);
   ClusterStatsDTO toClusterStats(InternalClusterState clusterState);
 
 
   default ClusterMetricsDTO toClusterMetrics(Metrics metrics) {
   default ClusterMetricsDTO toClusterMetrics(Metrics metrics) {
@@ -107,67 +85,6 @@ public interface ClusterMapper {
 
 
   BrokerDTO toBrokerDto(InternalBroker broker);
   BrokerDTO toBrokerDto(InternalBroker broker);
 
 
-  @Named("setSchemaRegistry")
-  default InternalSchemaRegistry setSchemaRegistry(ClustersProperties.Cluster clusterProperties) {
-    if (clusterProperties == null
-        || clusterProperties.getSchemaRegistry() == null) {
-      return null;
-    }
-
-    InternalSchemaRegistry.InternalSchemaRegistryBuilder internalSchemaRegistry =
-        InternalSchemaRegistry.builder();
-
-    internalSchemaRegistry.url(
-        clusterProperties.getSchemaRegistry() != null
-            ? new FailoverUrlList(Arrays.asList(clusterProperties.getSchemaRegistry().split(",")))
-            : new FailoverUrlList(Collections.emptyList())
-    );
-
-    if (clusterProperties.getSchemaRegistryAuth() != null) {
-      internalSchemaRegistry.username(clusterProperties.getSchemaRegistryAuth().getUsername());
-      internalSchemaRegistry.password(clusterProperties.getSchemaRegistryAuth().getPassword());
-    }
-
-    if (clusterProperties.getSchemaRegistrySsl() != null) {
-      internalSchemaRegistry.keystoreLocation(clusterProperties.getSchemaRegistrySsl().getKeystoreLocation());
-      internalSchemaRegistry.keystorePassword(clusterProperties.getSchemaRegistrySsl().getKeystorePassword());
-      internalSchemaRegistry.truststoreLocation(clusterProperties.getSchemaRegistrySsl().getTruststoreLocation());
-      internalSchemaRegistry.truststorePassword(clusterProperties.getSchemaRegistrySsl().getTruststorePassword());
-    }
-
-    return internalSchemaRegistry.build();
-  }
-
-  @Named("setKsqldbServer")
-  default InternalKsqlServer setKsqldbServer(ClustersProperties.Cluster clusterProperties) {
-    if (clusterProperties == null
-            || clusterProperties.getKsqldbServer() == null) {
-      return null;
-    }
-
-    InternalKsqlServer.InternalKsqlServerBuilder internalKsqlServerBuilder =
-            InternalKsqlServer.builder().url(clusterProperties.getKsqldbServer());
-
-    if (clusterProperties.getKsqldbServerAuth() != null) {
-      internalKsqlServerBuilder.username(clusterProperties.getKsqldbServerAuth().getUsername());
-      internalKsqlServerBuilder.password(clusterProperties.getKsqldbServerAuth().getPassword());
-    }
-
-    if (clusterProperties.getKsqldbServerSsl() != null) {
-      internalKsqlServerBuilder.keystoreLocation(clusterProperties.getKsqldbServerSsl().getKeystoreLocation());
-      internalKsqlServerBuilder.keystorePassword(clusterProperties.getKsqldbServerSsl().getKeystorePassword());
-      internalKsqlServerBuilder.truststoreLocation(clusterProperties.getKsqldbServerSsl().getTruststoreLocation());
-      internalKsqlServerBuilder.truststorePassword(clusterProperties.getKsqldbServerSsl().getTruststorePassword());
-    }
-
-    return internalKsqlServerBuilder.build();
-  }
-
-  @Named("createClusterThrottler")
-  default Supplier<PollingThrottler> createClusterThrottler(ClustersProperties.Cluster cluster) {
-    return PollingThrottler.throttlerSupplier(cluster);
-  }
-
   TopicDetailsDTO toTopicDetails(InternalTopic topic);
   TopicDetailsDTO toTopicDetails(InternalTopic topic);
 
 
   @Mapping(target = "isReadOnly", source = "readOnly")
   @Mapping(target = "isReadOnly", source = "readOnly")
@@ -176,16 +93,10 @@ public interface ClusterMapper {
 
 
   ReplicaDTO toReplica(InternalReplica replica);
   ReplicaDTO toReplica(InternalReplica replica);
 
 
-  ConnectDTO toKafkaConnect(KafkaConnectCluster connect);
+  ConnectDTO toKafkaConnect(ClustersProperties.ConnectCluster connect);
 
 
   List<ClusterDTO.FeaturesEnum> toFeaturesEnum(List<Feature> features);
   List<ClusterDTO.FeaturesEnum> toFeaturesEnum(List<Feature> features);
 
 
-  @Mapping(target = "isCompatible", source = "compatible")
-  CompatibilityCheckResponseDTO toCompatibilityCheckResponse(InternalCompatibilityCheck dto);
-
-  @Mapping(target = "compatibility", source = "compatibilityLevel")
-  CompatibilityLevelDTO toCompatibilityLevelDto(InternalCompatibilityLevel dto);
-
   default List<PartitionDTO> map(Map<Integer, InternalPartition> map) {
   default List<PartitionDTO> map(Map<Integer, InternalPartition> map) {
     return map.values().stream().map(this::toPartition).collect(Collectors.toList());
     return map.values().stream().map(this::toPartition).collect(Collectors.toList());
   }
   }
@@ -202,13 +113,4 @@ public interface ClusterMapper {
     return DataMasking.create(maskingProperties);
     return DataMasking.create(maskingProperties);
   }
   }
 
 
-  @Named("setProperties")
-  default Properties setProperties(Properties properties) {
-    Properties copy = new Properties();
-    if (properties != null) {
-      copy.putAll(properties);
-    }
-    return copy;
-  }
-
 }
 }

+ 11 - 7
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java

@@ -89,13 +89,17 @@ public class ConsumerGroupMapper {
             .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
             .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
     ).collect(Collectors.toSet()).size();
     ).collect(Collectors.toSet()).size();
 
 
-    long messagesBehind = c.getOffsets().entrySet().stream()
-        .mapToLong(e ->
-            Optional.ofNullable(c.getEndOffsets())
-                .map(o -> o.get(e.getKey()))
-                .map(o -> o - e.getValue())
-                .orElse(0L)
-        ).sum();
+    Long messagesBehind = null;
+    // messagesBehind should be undefined if no committed offsets found for topic
+    if (!c.getOffsets().isEmpty()) {
+      messagesBehind = c.getOffsets().entrySet().stream()
+          .mapToLong(e ->
+              Optional.ofNullable(c.getEndOffsets())
+                  .map(o -> o.get(e.getKey()))
+                  .map(o -> o - e.getValue())
+                  .orElse(0L)
+          ).sum();
+    }
 
 
     consumerGroup.setMessagesBehind(messagesBehind);
     consumerGroup.setMessagesBehind(messagesBehind);
     consumerGroup.setTopics(numTopics);
     consumerGroup.setTopics(numTopics);

+ 37 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaSrMapper.java

@@ -0,0 +1,37 @@
+package com.provectus.kafka.ui.mapper;
+
+import com.provectus.kafka.ui.model.CompatibilityCheckResponseDTO;
+import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
+import com.provectus.kafka.ui.model.NewSchemaSubjectDTO;
+import com.provectus.kafka.ui.model.SchemaSubjectDTO;
+import com.provectus.kafka.ui.model.SchemaTypeDTO;
+import com.provectus.kafka.ui.service.SchemaRegistryService;
+import com.provectus.kafka.ui.sr.model.Compatibility;
+import com.provectus.kafka.ui.sr.model.CompatibilityCheckResponse;
+import com.provectus.kafka.ui.sr.model.NewSubject;
+import com.provectus.kafka.ui.sr.model.SchemaType;
+import java.util.Optional;
+import org.mapstruct.Mapper;
+
+
+@Mapper(componentModel = "spring")
+public interface KafkaSrMapper {
+
+  default SchemaSubjectDTO toDto(SchemaRegistryService.SubjectWithCompatibilityLevel s) {
+    return new SchemaSubjectDTO()
+        .id(s.getId())
+        .version(s.getVersion())
+        .subject(s.getSubject())
+        .schema(s.getSchema())
+        .schemaType(SchemaTypeDTO.fromValue(Optional.ofNullable(s.getSchemaType()).orElse(SchemaType.AVRO).getValue()))
+        .compatibilityLevel(s.getCompatibility().toString());
+  }
+
+  CompatibilityCheckResponseDTO toDto(CompatibilityCheckResponse ccr);
+
+  CompatibilityLevelDTO.CompatibilityEnum toDto(Compatibility compatibility);
+
+  NewSubject fromDto(NewSchemaSubjectDTO subjectDto);
+
+  Compatibility fromDto(CompatibilityLevelDTO.CompatibilityEnum dtoEnum);
+}

+ 0 - 59
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/FailoverUrlList.java

@@ -1,59 +0,0 @@
-package com.provectus.kafka.ui.model;
-
-import java.time.Instant;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.AtomicInteger;
-import lombok.experimental.Delegate;
-
-public class FailoverUrlList {
-
-  public static final int DEFAULT_RETRY_GRACE_PERIOD_IN_MS = 5000;
-
-  private final Map<Integer, Instant> failures = new ConcurrentHashMap<>();
-  private final AtomicInteger index = new AtomicInteger(0);
-  @Delegate
-  private final List<String> urls;
-  private final int retryGracePeriodInMs;
-
-  public FailoverUrlList(List<String> urls) {
-    this(urls, DEFAULT_RETRY_GRACE_PERIOD_IN_MS);
-  }
-
-  public FailoverUrlList(List<String> urls, int retryGracePeriodInMs) {
-    if (urls != null && !urls.isEmpty()) {
-      this.urls = new ArrayList<>(urls);
-    } else {
-      throw new IllegalArgumentException("Expected at least one URL to be passed in constructor");
-    }
-    this.retryGracePeriodInMs = retryGracePeriodInMs;
-  }
-
-  public String current() {
-    return this.urls.get(this.index.get());
-  }
-
-  public void fail(String url) {
-    int currentIndex = this.index.get();
-    if ((this.urls.get(currentIndex)).equals(url)) {
-      this.failures.put(currentIndex, Instant.now());
-      this.index.compareAndSet(currentIndex, (currentIndex + 1) % this.urls.size());
-    }
-  }
-
-  public boolean isFailoverAvailable() {
-    var now = Instant.now();
-    return this.urls.size() > this.failures.size()
-            || this.failures
-                    .values()
-                    .stream()
-                    .anyMatch(e -> now.isAfter(e.plusMillis(retryGracePeriodInMs)));
-  }
-
-  @Override
-  public String toString() {
-    return this.urls.toString();
-  }
-}

+ 0 - 19
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalKsqlServer.java

@@ -1,19 +0,0 @@
-package com.provectus.kafka.ui.model;
-
-import lombok.Builder;
-import lombok.Data;
-import lombok.ToString;
-
-@Data
-@ToString(exclude = {"password", "keystorePassword", "truststorePassword"})
-@Builder(toBuilder = true)
-public class InternalKsqlServer {
-  private final String url;
-  private final String username;
-  private final String password;
-
-  private final String keystoreLocation;
-  private final String truststoreLocation;
-  private final String keystorePassword;
-  private final String truststorePassword;
-}

+ 0 - 33
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalSchemaRegistry.java

@@ -1,33 +0,0 @@
-package com.provectus.kafka.ui.model;
-
-import lombok.Builder;
-import lombok.Data;
-
-@Data
-@Builder(toBuilder = true)
-public class InternalSchemaRegistry {
-  private final String username;
-  private final String password;
-  private final FailoverUrlList url;
-
-  private final String keystoreLocation;
-  private final String truststoreLocation;
-  private final String keystorePassword;
-  private final String truststorePassword;
-
-  public String getPrimaryNodeUri() {
-    return url.get(0);
-  }
-
-  public String getUri() {
-    return url.current();
-  }
-
-  public void markAsUnavailable(String url) {
-    this.url.fail(url);
-  }
-
-  public boolean isFailoverAvailable() {
-    return this.url.isFailoverAvailable();
-  }
-}

+ 11 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java

@@ -1,8 +1,13 @@
 package com.provectus.kafka.ui.model;
 package com.provectus.kafka.ui.model;
 
 
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
+import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
 import com.provectus.kafka.ui.service.masking.DataMasking;
 import com.provectus.kafka.ui.service.masking.DataMasking;
+import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
 import com.provectus.kafka.ui.util.PollingThrottler;
 import com.provectus.kafka.ui.util.PollingThrottler;
-import java.util.List;
+import com.provectus.kafka.ui.util.ReactiveFailover;
+import java.util.Map;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.function.Supplier;
 import java.util.function.Supplier;
 import lombok.AccessLevel;
 import lombok.AccessLevel;
@@ -14,16 +19,17 @@ import lombok.Data;
 @Builder(toBuilder = true)
 @Builder(toBuilder = true)
 @AllArgsConstructor(access = AccessLevel.PRIVATE)
 @AllArgsConstructor(access = AccessLevel.PRIVATE)
 public class KafkaCluster {
 public class KafkaCluster {
+  private final ClustersProperties.Cluster originalProperties;
+
   private final String name;
   private final String name;
   private final String version;
   private final String version;
   private final String bootstrapServers;
   private final String bootstrapServers;
-  private final InternalSchemaRegistry schemaRegistry;
-  private final InternalKsqlServer ksqldbServer;
-  private final List<KafkaConnectCluster> kafkaConnect;
   private final Properties properties;
   private final Properties properties;
   private final boolean readOnly;
   private final boolean readOnly;
-  private final boolean disableLogDirsCollection;
   private final MetricsConfig metricsConfig;
   private final MetricsConfig metricsConfig;
   private final DataMasking masking;
   private final DataMasking masking;
   private final Supplier<PollingThrottler> throttler;
   private final Supplier<PollingThrottler> throttler;
+  private final ReactiveFailover<KafkaSrClientApi> schemaRegistryClient;
+  private final Map<String, ReactiveFailover<KafkaConnectClientApi>> connectsClients;
+  private final ReactiveFailover<KsqlApiClient> ksqlClient;
 }
 }

+ 0 - 21
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaConnectCluster.java

@@ -1,21 +0,0 @@
-package com.provectus.kafka.ui.model;
-
-import lombok.AccessLevel;
-import lombok.AllArgsConstructor;
-import lombok.Builder;
-import lombok.Data;
-
-@Data
-@Builder(toBuilder = true)
-@AllArgsConstructor(access = AccessLevel.PRIVATE)
-public class KafkaConnectCluster {
-  private final String name;
-  private final String address;
-  private final String userName;
-  private final String password;
-
-  private final String keystoreLocation;
-  private final String truststoreLocation;
-  private final String keystorePassword;
-  private final String truststorePassword;
-}

+ 2 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java

@@ -9,6 +9,7 @@ import com.provectus.kafka.ui.config.ClustersProperties.SerdeConfig;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.serde.api.PropertyResolver;
 import com.provectus.kafka.ui.serde.api.PropertyResolver;
 import com.provectus.kafka.ui.serde.api.Serde;
 import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.builtin.AvroEmbeddedSerde;
 import com.provectus.kafka.ui.serdes.builtin.Base64Serde;
 import com.provectus.kafka.ui.serdes.builtin.Base64Serde;
 import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
 import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
 import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
 import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
@@ -43,6 +44,7 @@ public class SerdesInitializer {
             .put(Int64Serde.name(), Int64Serde.class)
             .put(Int64Serde.name(), Int64Serde.class)
             .put(UInt32Serde.name(), UInt32Serde.class)
             .put(UInt32Serde.name(), UInt32Serde.class)
             .put(UInt64Serde.name(), UInt64Serde.class)
             .put(UInt64Serde.name(), UInt64Serde.class)
+            .put(AvroEmbeddedSerde.name(), AvroEmbeddedSerde.class)
             .put(Base64Serde.name(), Base64Serde.class)
             .put(Base64Serde.name(), Base64Serde.class)
             .put(UuidBinarySerde.name(), UuidBinarySerde.class)
             .put(UuidBinarySerde.name(), UuidBinarySerde.class)
             .build(),
             .build(),

+ 72 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java

@@ -0,0 +1,72 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import java.util.Map;
+import java.util.Optional;
+import lombok.SneakyThrows;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.SeekableByteArrayInput;
+import org.apache.avro.generic.GenericDatumReader;
+
+public class AvroEmbeddedSerde implements BuiltInSerde {
+
+  public static String name() {
+    return "Avro (Embedded)";
+  }
+
+  @Override
+  public void configure(PropertyResolver serdeProperties,
+                        PropertyResolver kafkaClusterProperties,
+                        PropertyResolver globalProperties) {
+  }
+
+  @Override
+  public Optional<String> getDescription() {
+    return Optional.empty();
+  }
+
+  @Override
+  public Optional<SchemaDescription> getSchema(String topic, Target type) {
+    return Optional.empty();
+  }
+
+  @Override
+  public boolean canDeserialize(String topic, Target type) {
+    return true;
+  }
+
+  @Override
+  public boolean canSerialize(String topic, Target type) {
+    return false;
+  }
+
+  @Override
+  public Serializer serializer(String topic, Target type) {
+    throw new IllegalStateException();
+  }
+
+  @Override
+  public Deserializer deserializer(String topic, Target type) {
+    return new Deserializer() {
+      @SneakyThrows
+      @Override
+      public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+        try (var reader = new DataFileReader<>(new SeekableByteArrayInput(data), new GenericDatumReader<>())) {
+          if (!reader.hasNext()) {
+            // this is very strange situation, when only header present in payload
+            // returning null in this case
+            return new DeserializeResult(null, DeserializeResult.Type.JSON, Map.of());
+          }
+          Object avroObj = reader.next();
+          String jsonValue = new String(AvroSchemaUtils.toJson(avroObj));
+          return new DeserializeResult(jsonValue, DeserializeResult.Type.JSON, Map.of());
+        }
+      }
+    };
+  }
+}

+ 6 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java

@@ -2,9 +2,9 @@ package com.provectus.kafka.ui.serdes.builtin.sr;
 
 
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.google.protobuf.Message;
 import com.google.protobuf.Message;
+import com.google.protobuf.util.JsonFormat;
 import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
 import io.confluent.kafka.serializers.KafkaAvroDeserializer;
 import io.confluent.kafka.serializers.KafkaAvroDeserializer;
 import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
 import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
 import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
 import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
@@ -52,8 +52,11 @@ interface MessageFormatter {
     @SneakyThrows
     @SneakyThrows
     public String format(String topic, byte[] value) {
     public String format(String topic, byte[] value) {
       final Message message = protobufDeserializer.deserialize(topic, value);
       final Message message = protobufDeserializer.deserialize(topic, value);
-      byte[] jsonBytes = ProtobufSchemaUtils.toJson(message);
-      return new String(jsonBytes);
+      return JsonFormat.printer()
+          .includingDefaultValueFields()
+          .omittingInsignificantWhitespace()
+          .preservingProtoFieldNames()
+          .print(message);
     }
     }
   }
   }
 
 

+ 13 - 6
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java

@@ -46,6 +46,7 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   private List<String> schemaRegistryUrls;
   private List<String> schemaRegistryUrls;
   private String valueSchemaNameTemplate;
   private String valueSchemaNameTemplate;
   private String keySchemaNameTemplate;
   private String keySchemaNameTemplate;
+  private boolean checkSchemaExistenceForDeserialize;
 
 
   private Map<SchemaType, MessageFormatter> schemaRegistryFormatters;
   private Map<SchemaType, MessageFormatter> schemaRegistryFormatters;
 
 
@@ -75,7 +76,9 @@ public class SchemaRegistrySerde implements BuiltInSerde {
             kafkaClusterProperties.getProperty("schemaRegistrySSL.truststorePassword", String.class).orElse(null)
             kafkaClusterProperties.getProperty("schemaRegistrySSL.truststorePassword", String.class).orElse(null)
         ),
         ),
         kafkaClusterProperties.getProperty("schemaRegistryKeySchemaNameTemplate", String.class).orElse("%s-key"),
         kafkaClusterProperties.getProperty("schemaRegistryKeySchemaNameTemplate", String.class).orElse("%s-key"),
-        kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value")
+        kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value"),
+        kafkaClusterProperties.getProperty("schemaRegistryCheckSchemaExistenceForDeserialize", Boolean.class)
+            .orElse(false)
     );
     );
   }
   }
 
 
@@ -99,7 +102,9 @@ public class SchemaRegistrySerde implements BuiltInSerde {
             serdeProperties.getProperty("truststorePassword", String.class).orElse(null)
             serdeProperties.getProperty("truststorePassword", String.class).orElse(null)
         ),
         ),
         serdeProperties.getProperty("keySchemaNameTemplate", String.class).orElse("%s-key"),
         serdeProperties.getProperty("keySchemaNameTemplate", String.class).orElse("%s-key"),
-        serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value")
+        serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value"),
+        kafkaClusterProperties.getProperty("checkSchemaExistenceForDeserialize", Boolean.class)
+            .orElse(false)
     );
     );
   }
   }
 
 
@@ -108,12 +113,14 @@ public class SchemaRegistrySerde implements BuiltInSerde {
       List<String> schemaRegistryUrls,
       List<String> schemaRegistryUrls,
       SchemaRegistryClient schemaRegistryClient,
       SchemaRegistryClient schemaRegistryClient,
       String keySchemaNameTemplate,
       String keySchemaNameTemplate,
-      String valueSchemaNameTemplate) {
+      String valueSchemaNameTemplate,
+      boolean checkTopicSchemaExistenceForDeserialize) {
     this.schemaRegistryUrls = schemaRegistryUrls;
     this.schemaRegistryUrls = schemaRegistryUrls;
     this.schemaRegistryClient = schemaRegistryClient;
     this.schemaRegistryClient = schemaRegistryClient;
     this.keySchemaNameTemplate = keySchemaNameTemplate;
     this.keySchemaNameTemplate = keySchemaNameTemplate;
     this.valueSchemaNameTemplate = valueSchemaNameTemplate;
     this.valueSchemaNameTemplate = valueSchemaNameTemplate;
     this.schemaRegistryFormatters = MessageFormatter.createMap(schemaRegistryClient);
     this.schemaRegistryFormatters = MessageFormatter.createMap(schemaRegistryClient);
+    this.checkSchemaExistenceForDeserialize = checkTopicSchemaExistenceForDeserialize;
   }
   }
 
 
   private static SchemaRegistryClient createSchemaRegistryClient(List<String> urls,
   private static SchemaRegistryClient createSchemaRegistryClient(List<String> urls,
@@ -122,8 +129,7 @@ public class SchemaRegistrySerde implements BuiltInSerde {
                                                                  @Nullable String keyStoreLocation,
                                                                  @Nullable String keyStoreLocation,
                                                                  @Nullable String keyStorePassword,
                                                                  @Nullable String keyStorePassword,
                                                                  @Nullable String trustStoreLocation,
                                                                  @Nullable String trustStoreLocation,
-                                                                 @Nullable String trustStorePassword
-                                                                 ) {
+                                                                 @Nullable String trustStorePassword) {
     Map<String, String> configs = new HashMap<>();
     Map<String, String> configs = new HashMap<>();
     if (username != null && password != null) {
     if (username != null && password != null) {
       configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
       configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
@@ -169,7 +175,8 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   @Override
   @Override
   public boolean canDeserialize(String topic, Target type) {
   public boolean canDeserialize(String topic, Target type) {
     String subject = schemaSubject(topic, type);
     String subject = schemaSubject(topic, type);
-    return getSchemaBySubject(subject).isPresent();
+    return !checkSchemaExistenceForDeserialize
+        || getSchemaBySubject(subject).isPresent();
   }
   }
 
 
   @Override
   @Override

+ 2 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java

@@ -2,7 +2,6 @@ package com.provectus.kafka.ui.service;
 
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableMap;
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.config.ClustersProperties;
-import com.provectus.kafka.ui.mapper.ClusterMapper;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import java.util.Collection;
 import java.util.Collection;
 import java.util.Optional;
 import java.util.Optional;
@@ -13,9 +12,9 @@ public class ClustersStorage {
 
 
   private final ImmutableMap<String, KafkaCluster> kafkaClusters;
   private final ImmutableMap<String, KafkaCluster> kafkaClusters;
 
 
-  public ClustersStorage(ClustersProperties properties, ClusterMapper mapper) {
+  public ClustersStorage(ClustersProperties properties, KafkaClusterFactory factory) {
     var builder = ImmutableMap.<String, KafkaCluster>builder();
     var builder = ImmutableMap.<String, KafkaCluster>builder();
-    properties.getClusters().forEach(c -> builder.put(c.getName(), mapper.toKafkaCluster(c)));
+    properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(c)));
     this.kafkaClusters = builder.build();
     this.kafkaClusters = builder.build();
   }
   }
 
 

+ 102 - 89
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java

@@ -1,5 +1,6 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
+import com.google.common.collect.Table;
 import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
 import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
 import com.provectus.kafka.ui.model.InternalConsumerGroup;
 import com.provectus.kafka.ui.model.InternalConsumerGroup;
 import com.provectus.kafka.ui.model.InternalTopicConsumerGroup;
 import com.provectus.kafka.ui.model.InternalTopicConsumerGroup;
@@ -7,6 +8,7 @@ import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.SortOrderDTO;
 import com.provectus.kafka.ui.model.SortOrderDTO;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
 import java.util.ArrayList;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Comparator;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
@@ -14,22 +16,21 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Properties;
 import java.util.function.ToIntFunction;
 import java.util.function.ToIntFunction;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
+import java.util.stream.Stream;
 import javax.annotation.Nullable;
 import javax.annotation.Nullable;
 import lombok.RequiredArgsConstructor;
 import lombok.RequiredArgsConstructor;
-import lombok.Value;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kafka.clients.admin.ConsumerGroupDescription;
 import org.apache.kafka.clients.admin.ConsumerGroupDescription;
+import org.apache.kafka.clients.admin.ConsumerGroupListing;
 import org.apache.kafka.clients.admin.OffsetSpec;
 import org.apache.kafka.clients.admin.OffsetSpec;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
 import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.ConsumerGroupState;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.serialization.BytesDeserializer;
 import org.apache.kafka.common.serialization.BytesDeserializer;
 import org.apache.kafka.common.utils.Bytes;
 import org.apache.kafka.common.utils.Bytes;
 import org.springframework.stereotype.Service;
 import org.springframework.stereotype.Service;
-import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
-import reactor.util.function.Tuple2;
-import reactor.util.function.Tuples;
 
 
 @Service
 @Service
 @RequiredArgsConstructor
 @RequiredArgsConstructor
@@ -41,21 +42,16 @@ public class ConsumerGroupService {
   private Mono<List<InternalConsumerGroup>> getConsumerGroups(
   private Mono<List<InternalConsumerGroup>> getConsumerGroups(
       ReactiveAdminClient ac,
       ReactiveAdminClient ac,
       List<ConsumerGroupDescription> descriptions) {
       List<ConsumerGroupDescription> descriptions) {
-    return Flux.fromIterable(descriptions)
-        // 1. getting committed offsets for all groups
-        .flatMap(desc -> ac.listConsumerGroupOffsets(desc.groupId())
-            .map(offsets -> Tuples.of(desc, offsets)))
-        .collectMap(Tuple2::getT1, Tuple2::getT2)
-        .flatMap((Map<ConsumerGroupDescription, Map<TopicPartition, Long>> groupOffsetsMap) -> {
-          var tpsFromGroupOffsets = groupOffsetsMap.values().stream()
-              .flatMap(v -> v.keySet().stream())
-              .collect(Collectors.toSet());
+    var groupNames = descriptions.stream().map(ConsumerGroupDescription::groupId).toList();
+    // 1. getting committed offsets for all groups
+    return ac.listConsumerGroupOffsets(groupNames, null)
+        .flatMap((Table<String, TopicPartition, Long> committedOffsets) -> {
           // 2. getting end offsets for partitions with committed offsets
           // 2. getting end offsets for partitions with committed offsets
-          return ac.listOffsets(tpsFromGroupOffsets, OffsetSpec.latest(), false)
+          return ac.listOffsets(committedOffsets.columnKeySet(), OffsetSpec.latest(), false)
               .map(endOffsets ->
               .map(endOffsets ->
                   descriptions.stream()
                   descriptions.stream()
                       .map(desc -> {
                       .map(desc -> {
-                        var groupOffsets = groupOffsetsMap.get(desc);
+                        var groupOffsets = committedOffsets.row(desc.groupId());
                         var endOffsetsForGroup = new HashMap<>(endOffsets);
                         var endOffsetsForGroup = new HashMap<>(endOffsets);
                         endOffsetsForGroup.keySet().retainAll(groupOffsets.keySet());
                         endOffsetsForGroup.keySet().retainAll(groupOffsets.keySet());
                         // 3. gathering description & offsets
                         // 3. gathering description & offsets
@@ -73,105 +69,122 @@ public class ConsumerGroupService {
             .flatMap(endOffsets -> {
             .flatMap(endOffsets -> {
               var tps = new ArrayList<>(endOffsets.keySet());
               var tps = new ArrayList<>(endOffsets.keySet());
               // 2. getting all consumer groups
               // 2. getting all consumer groups
-              return describeConsumerGroups(ac, null)
-                  .flatMap((List<ConsumerGroupDescription> groups) ->
-                      Flux.fromIterable(groups)
-                          // 3. for each group trying to find committed offsets for topic
-                          .flatMap(g ->
-                              ac.listConsumerGroupOffsets(g.groupId(), tps)
-                                  // 4. keeping only groups that relates to topic
-                                  .filter(offsets -> isConsumerGroupRelatesToTopic(topic, g, offsets))
-                                  // 5. constructing results
-                                  .map(offsets -> InternalTopicConsumerGroup.create(topic, g, offsets, endOffsets))
-                          ).collectList());
+              return describeConsumerGroups(ac)
+                  .flatMap((List<ConsumerGroupDescription> groups) -> {
+                        // 3. trying to find committed offsets for topic
+                        var groupNames = groups.stream().map(ConsumerGroupDescription::groupId).toList();
+                        return ac.listConsumerGroupOffsets(groupNames, tps).map(offsets ->
+                            groups.stream()
+                                // 4. keeping only groups that relates to topic
+                                .filter(g -> isConsumerGroupRelatesToTopic(topic, g, offsets.containsRow(g.groupId())))
+                                .map(g ->
+                                    // 5. constructing results
+                                    InternalTopicConsumerGroup.create(topic, g, offsets.row(g.groupId()), endOffsets))
+                                .toList()
+                        );
+                      }
+                  );
             }));
             }));
   }
   }
 
 
   private boolean isConsumerGroupRelatesToTopic(String topic,
   private boolean isConsumerGroupRelatesToTopic(String topic,
                                                 ConsumerGroupDescription description,
                                                 ConsumerGroupDescription description,
-                                                Map<TopicPartition, Long> committedGroupOffsetsForTopic) {
+                                                boolean hasCommittedOffsets) {
     boolean hasActiveMembersForTopic = description.members()
     boolean hasActiveMembersForTopic = description.members()
         .stream()
         .stream()
         .anyMatch(m -> m.assignment().topicPartitions().stream().anyMatch(tp -> tp.topic().equals(topic)));
         .anyMatch(m -> m.assignment().topicPartitions().stream().anyMatch(tp -> tp.topic().equals(topic)));
-    boolean hasCommittedOffsets = !committedGroupOffsetsForTopic.isEmpty();
     return hasActiveMembersForTopic || hasCommittedOffsets;
     return hasActiveMembersForTopic || hasCommittedOffsets;
   }
   }
 
 
-  @Value
-  public static class ConsumerGroupsPage {
-    List<InternalConsumerGroup> consumerGroups;
-    int totalPages;
+  public record ConsumerGroupsPage(List<InternalConsumerGroup> consumerGroups, int totalPages) {
   }
   }
 
 
   public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
   public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
       KafkaCluster cluster,
       KafkaCluster cluster,
-      int page,
+      int pageNum,
       int perPage,
       int perPage,
       @Nullable String search,
       @Nullable String search,
       ConsumerGroupOrderingDTO orderBy,
       ConsumerGroupOrderingDTO orderBy,
       SortOrderDTO sortOrderDto) {
       SortOrderDTO sortOrderDto) {
-    var comparator = sortOrderDto.equals(SortOrderDTO.ASC)
-        ? getPaginationComparator(orderBy)
-        : getPaginationComparator(orderBy).reversed();
     return adminClientService.get(cluster).flatMap(ac ->
     return adminClientService.get(cluster).flatMap(ac ->
-        describeConsumerGroups(ac, search).flatMap(descriptions ->
-            getConsumerGroups(
-                ac,
-                descriptions.stream()
-                    .sorted(comparator)
-                    .skip((long) (page - 1) * perPage)
-                    .limit(perPage)
-                    .collect(Collectors.toList())
+        ac.listConsumerGroups()
+            .map(listing -> search == null
+                ? listing
+                : listing.stream()
+                .filter(g -> StringUtils.containsIgnoreCase(g.groupId(), search))
+                .toList()
             )
             )
-                .flatMapMany(Flux::fromIterable)
-                .filterWhen(
-                    cg -> accessControlService.isConsumerGroupAccessible(cg.getGroupId(), cluster.getName()))
-                .collect(Collectors.toList())
-                .map(cgs -> new ConsumerGroupsPage(
-                    cgs,
-                    (descriptions.size() / perPage) + (descriptions.size() % perPage == 0 ? 0 : 1))))
-    );
+            .flatMapIterable(lst -> lst)
+            .filterWhen(cg -> accessControlService.isConsumerGroupAccessible(cg.groupId(), cluster.getName()))
+            .collectList()
+            .flatMap(allGroups ->
+                loadSortedDescriptions(ac, allGroups, pageNum, perPage, orderBy, sortOrderDto)
+                    .flatMap(descriptions -> getConsumerGroups(ac, descriptions)
+                        .map(page -> new ConsumerGroupsPage(
+                            page,
+                            (allGroups.size() / perPage) + (allGroups.size() % perPage == 0 ? 0 : 1))))));
   }
   }
 
 
-  private Comparator<ConsumerGroupDescription> getPaginationComparator(ConsumerGroupOrderingDTO
-                                                                           orderBy) {
-    switch (orderBy) {
-      case NAME:
-        return Comparator.comparing(ConsumerGroupDescription::groupId);
-      case STATE:
-        ToIntFunction<ConsumerGroupDescription> statesPriorities = cg -> {
-          switch (cg.state()) {
-            case STABLE:
-              return 0;
-            case COMPLETING_REBALANCE:
-              return 1;
-            case PREPARING_REBALANCE:
-              return 2;
-            case EMPTY:
-              return 3;
-            case DEAD:
-              return 4;
-            case UNKNOWN:
-              return 5;
-            default:
-              return 100;
-          }
-        };
-        return Comparator.comparingInt(statesPriorities);
-      case MEMBERS:
-        return Comparator.comparingInt(cg -> cg.members().size());
-      default:
-        throw new IllegalStateException("Unsupported order by: " + orderBy);
-    }
+  private Mono<List<ConsumerGroupDescription>> loadSortedDescriptions(ReactiveAdminClient ac,
+                                                                      List<ConsumerGroupListing> groups,
+                                                                      int pageNum,
+                                                                      int perPage,
+                                                                      ConsumerGroupOrderingDTO orderBy,
+                                                                      SortOrderDTO sortOrderDto) {
+    return switch (orderBy) {
+      case NAME -> {
+        Comparator<ConsumerGroupListing> comparator = Comparator.comparing(ConsumerGroupListing::groupId);
+        yield loadDescriptionsByListings(ac, groups, comparator, pageNum, perPage, sortOrderDto);
+      }
+      case STATE -> {
+        ToIntFunction<ConsumerGroupListing> statesPriorities =
+            cg -> switch (cg.state().orElse(ConsumerGroupState.UNKNOWN)) {
+              case STABLE -> 0;
+              case COMPLETING_REBALANCE -> 1;
+              case PREPARING_REBALANCE -> 2;
+              case EMPTY -> 3;
+              case DEAD -> 4;
+              case UNKNOWN -> 5;
+            };
+        var comparator = Comparator.comparingInt(statesPriorities);
+        yield loadDescriptionsByListings(ac, groups, comparator, pageNum, perPage, sortOrderDto);
+      }
+      case MEMBERS -> {
+        var comparator = Comparator.<ConsumerGroupDescription>comparingInt(cg -> cg.members().size());
+        var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+        yield ac.describeConsumerGroups(groupNames)
+            .map(descriptions ->
+                sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
+      }
+    };
   }
   }
 
 
-  private Mono<List<ConsumerGroupDescription>> describeConsumerGroups(ReactiveAdminClient ac,
-                                                                      @Nullable String search) {
-    return ac.listConsumerGroups()
-        .map(groupIds -> groupIds
-            .stream()
-            .filter(groupId -> search == null || StringUtils.containsIgnoreCase(groupId, search))
-            .collect(Collectors.toList()))
+  private Mono<List<ConsumerGroupDescription>> loadDescriptionsByListings(ReactiveAdminClient ac,
+                                                                          List<ConsumerGroupListing> listings,
+                                                                          Comparator<ConsumerGroupListing> comparator,
+                                                                          int pageNum,
+                                                                          int perPage,
+                                                                          SortOrderDTO sortOrderDto) {
+    List<String> sortedGroups = sortAndPaginate(listings, comparator, pageNum, perPage, sortOrderDto)
+        .map(ConsumerGroupListing::groupId)
+        .toList();
+    return ac.describeConsumerGroups(sortedGroups)
+        .map(descrMap -> sortedGroups.stream().map(descrMap::get).toList());
+  }
+
+  private <T> Stream<T> sortAndPaginate(Collection<T> collection,
+                                        Comparator<T> comparator,
+                                        int pageNum,
+                                        int perPage,
+                                        SortOrderDTO sortOrderDto) {
+    return collection.stream()
+        .sorted(sortOrderDto == SortOrderDTO.ASC ? comparator : comparator.reversed())
+        .skip((long) (pageNum - 1) * perPage)
+        .limit(perPage);
+  }
+
+  private Mono<List<ConsumerGroupDescription>> describeConsumerGroups(ReactiveAdminClient ac) {
+    return ac.listConsumerGroupNames()
         .flatMap(ac::describeConsumerGroups)
         .flatMap(ac::describeConsumerGroups)
         .map(cgs -> new ArrayList<>(cgs.values()));
         .map(cgs -> new ArrayList<>(cgs.values()));
   }
   }

+ 5 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java

@@ -5,6 +5,7 @@ import com.provectus.kafka.ui.model.KafkaCluster;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collection;
 import java.util.List;
 import java.util.List;
+import java.util.Map;
 import java.util.Optional;
 import java.util.Optional;
 import java.util.function.Predicate;
 import java.util.function.Predicate;
 import javax.annotation.Nullable;
 import javax.annotation.Nullable;
@@ -27,17 +28,17 @@ public class FeatureService {
   public Mono<List<Feature>> getAvailableFeatures(KafkaCluster cluster, @Nullable Node controller) {
   public Mono<List<Feature>> getAvailableFeatures(KafkaCluster cluster, @Nullable Node controller) {
     List<Mono<Feature>> features = new ArrayList<>();
     List<Mono<Feature>> features = new ArrayList<>();
 
 
-    if (Optional.ofNullable(cluster.getKafkaConnect())
-        .filter(Predicate.not(List::isEmpty))
+    if (Optional.ofNullable(cluster.getConnectsClients())
+        .filter(Predicate.not(Map::isEmpty))
         .isPresent()) {
         .isPresent()) {
       features.add(Mono.just(Feature.KAFKA_CONNECT));
       features.add(Mono.just(Feature.KAFKA_CONNECT));
     }
     }
 
 
-    if (cluster.getKsqldbServer() != null) {
+    if (cluster.getKsqlClient() != null) {
       features.add(Mono.just(Feature.KSQL_DB));
       features.add(Mono.just(Feature.KSQL_DB));
     }
     }
 
 
-    if (cluster.getSchemaRegistry() != null) {
+    if (cluster.getSchemaRegistryClient() != null) {
       features.add(Mono.just(Feature.SCHEMA_REGISTRY));
       features.add(Mono.just(Feature.SCHEMA_REGISTRY));
     }
     }
 
 

+ 133 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java

@@ -0,0 +1,133 @@
+package com.provectus.kafka.ui.service;
+
+import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.MetricsConfig;
+import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
+import com.provectus.kafka.ui.service.masking.DataMasking;
+import com.provectus.kafka.ui.sr.ApiClient;
+import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
+import com.provectus.kafka.ui.util.PollingThrottler;
+import com.provectus.kafka.ui.util.ReactiveFailover;
+import com.provectus.kafka.ui.util.WebClientConfigurator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Properties;
+import java.util.stream.Stream;
+import javax.annotation.Nullable;
+import lombok.RequiredArgsConstructor;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Service;
+import org.springframework.util.unit.DataSize;
+import org.springframework.web.reactive.function.client.WebClient;
+
+@Service
+@RequiredArgsConstructor
+public class KafkaClusterFactory {
+
+  @Value("${webclient.max-in-memory-buffer-size:20MB}")
+  private DataSize maxBuffSize;
+
+  public KafkaCluster create(ClustersProperties.Cluster clusterProperties) {
+    KafkaCluster.KafkaClusterBuilder builder = KafkaCluster.builder();
+
+    builder.name(clusterProperties.getName());
+    builder.bootstrapServers(clusterProperties.getBootstrapServers());
+    builder.properties(Optional.ofNullable(clusterProperties.getProperties()).orElse(new Properties()));
+    builder.readOnly(clusterProperties.isReadOnly());
+    builder.masking(DataMasking.create(clusterProperties.getMasking()));
+    builder.metricsConfig(metricsConfigDataToMetricsConfig(clusterProperties.getMetrics()));
+    builder.throttler(PollingThrottler.throttlerSupplier(clusterProperties));
+
+    builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));
+    builder.connectsClients(connectClients(clusterProperties));
+    builder.ksqlClient(ksqlClient(clusterProperties));
+
+    builder.originalProperties(clusterProperties);
+
+    return builder.build();
+  }
+
+  @Nullable
+  private Map<String, ReactiveFailover<KafkaConnectClientApi>> connectClients(
+      ClustersProperties.Cluster clusterProperties) {
+    if (clusterProperties.getKafkaConnect() == null) {
+      return null;
+    }
+    Map<String, ReactiveFailover<KafkaConnectClientApi>> connects = new HashMap<>();
+    clusterProperties.getKafkaConnect().forEach(c -> {
+      ReactiveFailover<KafkaConnectClientApi> failover = ReactiveFailover.create(
+          parseUrlList(c.getAddress()),
+          url -> new RetryingKafkaConnectClient(c.toBuilder().address(url).build(), maxBuffSize),
+          ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
+          "No alive connect instances available",
+          ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS
+      );
+      connects.put(c.getName(), failover);
+    });
+    return connects;
+  }
+
+  @Nullable
+  private ReactiveFailover<KafkaSrClientApi> schemaRegistryClient(ClustersProperties.Cluster clusterProperties) {
+    if (clusterProperties.getSchemaRegistry() == null) {
+      return null;
+    }
+    var auth = Optional.ofNullable(clusterProperties.getSchemaRegistryAuth())
+        .orElse(new ClustersProperties.SchemaRegistryAuth());
+    WebClient webClient = new WebClientConfigurator()
+        .configureSsl(clusterProperties.getSchemaRegistrySsl())
+        .configureBasicAuth(auth.getUsername(), auth.getPassword())
+        .configureBufferSize(maxBuffSize)
+        .build();
+    return ReactiveFailover.create(
+        parseUrlList(clusterProperties.getSchemaRegistry()),
+        url -> new KafkaSrClientApi(new ApiClient(webClient, null, null).setBasePath(url)),
+        ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
+        "No live schemaRegistry instances available",
+        ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS
+    );
+  }
+
+  @Nullable
+  private ReactiveFailover<KsqlApiClient> ksqlClient(ClustersProperties.Cluster clusterProperties) {
+    if (clusterProperties.getKsqldbServer() == null) {
+      return null;
+    }
+    return ReactiveFailover.create(
+        parseUrlList(clusterProperties.getKsqldbServer()),
+        url -> new KsqlApiClient(
+            url,
+            clusterProperties.getKsqldbServerAuth(),
+            clusterProperties.getKsqldbServerSsl(),
+            maxBuffSize
+        ),
+        ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
+        "No live ksqldb instances available",
+        ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS
+    );
+  }
+
+  private List<String> parseUrlList(String url) {
+    return Stream.of(url.split(",")).map(String::trim).filter(s -> !s.isBlank()).toList();
+  }
+
+  @Nullable
+  private MetricsConfig metricsConfigDataToMetricsConfig(ClustersProperties.MetricsConfigData metricsConfigData) {
+    if (metricsConfigData == null) {
+      return null;
+    }
+    MetricsConfig.MetricsConfigBuilder builder = MetricsConfig.builder();
+    builder.type(metricsConfigData.getType());
+    builder.port(metricsConfigData.getPort());
+    builder.ssl(metricsConfigData.isSsl());
+    builder.username(metricsConfigData.getUsername());
+    builder.password(metricsConfigData.getPassword());
+    return builder.build();
+  }
+
+}

+ 40 - 38
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java

@@ -2,13 +2,12 @@ package com.provectus.kafka.ui.service;
 
 
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.provectus.kafka.ui.client.KafkaConnectClientsFactory;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
 import com.provectus.kafka.ui.connect.model.ConnectorStatus;
 import com.provectus.kafka.ui.connect.model.ConnectorStatus;
 import com.provectus.kafka.ui.connect.model.ConnectorStatusConnector;
 import com.provectus.kafka.ui.connect.model.ConnectorStatusConnector;
 import com.provectus.kafka.ui.connect.model.ConnectorTopics;
 import com.provectus.kafka.ui.connect.model.ConnectorTopics;
 import com.provectus.kafka.ui.connect.model.TaskStatus;
 import com.provectus.kafka.ui.connect.model.TaskStatus;
-import com.provectus.kafka.ui.exception.ConnectNotFoundException;
+import com.provectus.kafka.ui.exception.NotFoundException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.mapper.ClusterMapper;
 import com.provectus.kafka.ui.mapper.ClusterMapper;
 import com.provectus.kafka.ui.mapper.KafkaConnectMapper;
 import com.provectus.kafka.ui.mapper.KafkaConnectMapper;
@@ -24,9 +23,11 @@ import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.NewConnectorDTO;
 import com.provectus.kafka.ui.model.NewConnectorDTO;
 import com.provectus.kafka.ui.model.TaskDTO;
 import com.provectus.kafka.ui.model.TaskDTO;
 import com.provectus.kafka.ui.model.connect.InternalConnectInfo;
 import com.provectus.kafka.ui.model.connect.InternalConnectInfo;
+import com.provectus.kafka.ui.util.ReactiveFailover;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Optional;
 import java.util.function.Function;
 import java.util.function.Function;
 import java.util.function.Predicate;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
@@ -50,12 +51,11 @@ public class KafkaConnectService {
   private final KafkaConnectMapper kafkaConnectMapper;
   private final KafkaConnectMapper kafkaConnectMapper;
   private final ObjectMapper objectMapper;
   private final ObjectMapper objectMapper;
   private final KafkaConfigSanitizer kafkaConfigSanitizer;
   private final KafkaConfigSanitizer kafkaConfigSanitizer;
-  private final KafkaConnectClientsFactory kafkaConnectClientsFactory;
 
 
   public List<ConnectDTO> getConnects(KafkaCluster cluster) {
   public List<ConnectDTO> getConnects(KafkaCluster cluster) {
-    return cluster.getKafkaConnect().stream()
-        .map(clusterMapper::toKafkaConnect)
-        .collect(Collectors.toList());
+    return Optional.ofNullable(cluster.getOriginalProperties().getKafkaConnect())
+        .map(lst -> lst.stream().map(clusterMapper::toKafkaConnect).toList())
+        .orElse(List.of());
   }
   }
 
 
   public Flux<FullConnectorInfoDTO> getAllConnectors(final KafkaCluster cluster,
   public Flux<FullConnectorInfoDTO> getAllConnectors(final KafkaCluster cluster,
@@ -118,8 +118,9 @@ public class KafkaConnectService {
 
 
   private Mono<ConnectorTopics> getConnectorTopics(KafkaCluster cluster, String connectClusterName,
   private Mono<ConnectorTopics> getConnectorTopics(KafkaCluster cluster, String connectClusterName,
                                                    String connectorName) {
                                                    String connectorName) {
-    return withConnectClient(cluster, connectClusterName)
-        .flatMap(c -> c.getConnectorTopics(connectorName).map(result -> result.get(connectorName)))
+    return api(cluster, connectClusterName)
+        .mono(c -> c.getConnectorTopics(connectorName))
+        .map(result -> result.get(connectorName))
         // old connectors don't have this api, setting empty list for
         // old connectors don't have this api, setting empty list for
         // backward-compatibility
         // backward-compatibility
         .onErrorResume(Exception.class, e -> Mono.just(new ConnectorTopics().topics(List.of())));
         .onErrorResume(Exception.class, e -> Mono.just(new ConnectorTopics().topics(List.of())));
@@ -141,8 +142,8 @@ public class KafkaConnectService {
   }
   }
 
 
   public Flux<String> getConnectors(KafkaCluster cluster, String connectName) {
   public Flux<String> getConnectors(KafkaCluster cluster, String connectName) {
-    return withConnectClient(cluster, connectName)
-        .flatMapMany(client ->
+    return api(cluster, connectName)
+        .flux(client ->
             client.getConnectors(null)
             client.getConnectors(null)
                 .doOnError(e -> log.error("Unexpected error upon getting connectors", e))
                 .doOnError(e -> log.error("Unexpected error upon getting connectors", e))
         );
         );
@@ -150,8 +151,8 @@ public class KafkaConnectService {
 
 
   public Mono<ConnectorDTO> createConnector(KafkaCluster cluster, String connectName,
   public Mono<ConnectorDTO> createConnector(KafkaCluster cluster, String connectName,
                                             Mono<NewConnectorDTO> connector) {
                                             Mono<NewConnectorDTO> connector) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(client ->
+    return api(cluster, connectName)
+        .mono(client ->
             connector
             connector
                 .flatMap(c -> connectorExists(cluster, connectName, c.getName())
                 .flatMap(c -> connectorExists(cluster, connectName, c.getName())
                     .map(exists -> {
                     .map(exists -> {
@@ -177,8 +178,8 @@ public class KafkaConnectService {
 
 
   public Mono<ConnectorDTO> getConnector(KafkaCluster cluster, String connectName,
   public Mono<ConnectorDTO> getConnector(KafkaCluster cluster, String connectName,
                                          String connectorName) {
                                          String connectorName) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(client -> client.getConnector(connectorName)
+    return api(cluster, connectName)
+        .mono(client -> client.getConnector(connectorName)
             .map(kafkaConnectMapper::fromClient)
             .map(kafkaConnectMapper::fromClient)
             .flatMap(connector ->
             .flatMap(connector ->
                 client.getConnectorStatus(connector.getName())
                 client.getConnectorStatus(connector.getName())
@@ -226,8 +227,8 @@ public class KafkaConnectService {
 
 
   public Mono<Map<String, Object>> getConnectorConfig(KafkaCluster cluster, String connectName,
   public Mono<Map<String, Object>> getConnectorConfig(KafkaCluster cluster, String connectName,
                                                       String connectorName) {
                                                       String connectorName) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(c -> c.getConnectorConfig(connectorName))
+    return api(cluster, connectName)
+        .mono(c -> c.getConnectorConfig(connectorName))
         .map(connectorConfig -> {
         .map(connectorConfig -> {
           final Map<String, Object> obfuscatedMap = new HashMap<>();
           final Map<String, Object> obfuscatedMap = new HashMap<>();
           connectorConfig.forEach((key, value) ->
           connectorConfig.forEach((key, value) ->
@@ -238,8 +239,8 @@ public class KafkaConnectService {
 
 
   public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,
   public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,
                                                String connectorName, Mono<Object> requestBody) {
                                                String connectorName, Mono<Object> requestBody) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(c ->
+    return api(cluster, connectName)
+        .mono(c ->
             requestBody
             requestBody
                 .flatMap(body -> c.setConnectorConfig(connectorName, (Map<String, Object>) body))
                 .flatMap(body -> c.setConnectorConfig(connectorName, (Map<String, Object>) body))
                 .map(kafkaConnectMapper::fromClient));
                 .map(kafkaConnectMapper::fromClient));
@@ -247,14 +248,14 @@ public class KafkaConnectService {
 
 
   public Mono<Void> deleteConnector(
   public Mono<Void> deleteConnector(
       KafkaCluster cluster, String connectName, String connectorName) {
       KafkaCluster cluster, String connectName, String connectorName) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(c -> c.deleteConnector(connectorName));
+    return api(cluster, connectName)
+        .mono(c -> c.deleteConnector(connectorName));
   }
   }
 
 
   public Mono<Void> updateConnectorState(KafkaCluster cluster, String connectName,
   public Mono<Void> updateConnectorState(KafkaCluster cluster, String connectName,
                                          String connectorName, ConnectorActionDTO action) {
                                          String connectorName, ConnectorActionDTO action) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(client -> {
+    return api(cluster, connectName)
+        .mono(client -> {
           switch (action) {
           switch (action) {
             case RESTART:
             case RESTART:
               return client.restartConnector(connectorName, false, false);
               return client.restartConnector(connectorName, false, false);
@@ -283,8 +284,8 @@ public class KafkaConnectService {
   }
   }
 
 
   public Flux<TaskDTO> getConnectorTasks(KafkaCluster cluster, String connectName, String connectorName) {
   public Flux<TaskDTO> getConnectorTasks(KafkaCluster cluster, String connectName, String connectorName) {
-    return withConnectClient(cluster, connectName)
-        .flatMapMany(client ->
+    return api(cluster, connectName)
+        .flux(client ->
             client.getConnectorTasks(connectorName)
             client.getConnectorTasks(connectorName)
                 .onErrorResume(WebClientResponseException.NotFound.class, e -> Flux.empty())
                 .onErrorResume(WebClientResponseException.NotFound.class, e -> Flux.empty())
                 .map(kafkaConnectMapper::fromClient)
                 .map(kafkaConnectMapper::fromClient)
@@ -299,20 +300,20 @@ public class KafkaConnectService {
 
 
   public Mono<Void> restartConnectorTask(KafkaCluster cluster, String connectName,
   public Mono<Void> restartConnectorTask(KafkaCluster cluster, String connectName,
                                          String connectorName, Integer taskId) {
                                          String connectorName, Integer taskId) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(client -> client.restartConnectorTask(connectorName, taskId));
+    return api(cluster, connectName)
+        .mono(client -> client.restartConnectorTask(connectorName, taskId));
   }
   }
 
 
-  public Mono<Flux<ConnectorPluginDTO>> getConnectorPlugins(KafkaCluster cluster,
-                                                            String connectName) {
-    return withConnectClient(cluster, connectName)
-        .map(client -> client.getConnectorPlugins().map(kafkaConnectMapper::fromClient));
+  public Flux<ConnectorPluginDTO> getConnectorPlugins(KafkaCluster cluster,
+                                                      String connectName) {
+    return api(cluster, connectName)
+        .flux(client -> client.getConnectorPlugins().map(kafkaConnectMapper::fromClient));
   }
   }
 
 
   public Mono<ConnectorPluginConfigValidationResponseDTO> validateConnectorPluginConfig(
   public Mono<ConnectorPluginConfigValidationResponseDTO> validateConnectorPluginConfig(
       KafkaCluster cluster, String connectName, String pluginName, Mono<Object> requestBody) {
       KafkaCluster cluster, String connectName, String pluginName, Mono<Object> requestBody) {
-    return withConnectClient(cluster, connectName)
-        .flatMap(client ->
+    return api(cluster, connectName)
+        .mono(client ->
             requestBody
             requestBody
                 .flatMap(body ->
                 .flatMap(body ->
                     client.validateConnectorPluginConfig(pluginName, (Map<String, Object>) body))
                     client.validateConnectorPluginConfig(pluginName, (Map<String, Object>) body))
@@ -320,11 +321,12 @@ public class KafkaConnectService {
         );
         );
   }
   }
 
 
-  private Mono<KafkaConnectClientApi> withConnectClient(KafkaCluster cluster, String connectName) {
-    return Mono.justOrEmpty(cluster.getKafkaConnect().stream()
-            .filter(connect -> connect.getName().equals(connectName))
-            .findFirst())
-        .switchIfEmpty(Mono.error(ConnectNotFoundException::new))
-        .map(kafkaConnectClientsFactory::withKafkaConnectConfig);
+  private ReactiveFailover<KafkaConnectClientApi> api(KafkaCluster cluster, String connectName) {
+    var client = cluster.getConnectsClients().get(connectName);
+    if (client == null) {
+      throw new NotFoundException(
+          "Connect %s not found for cluster %s".formatted(connectName, cluster.getName()));
+    }
+    return client;
   }
   }
 }
 }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java

@@ -98,7 +98,7 @@ public class OffsetsResetService {
         .flatMap(ac ->
         .flatMap(ac ->
             // we need to call listConsumerGroups() to check group existence, because
             // we need to call listConsumerGroups() to check group existence, because
             // describeConsumerGroups() will return consumer group even if it doesn't exist
             // describeConsumerGroups() will return consumer group even if it doesn't exist
-            ac.listConsumerGroups()
+            ac.listConsumerGroupNames()
                 .filter(cgs -> cgs.stream().anyMatch(g -> g.equals(groupId)))
                 .filter(cgs -> cgs.stream().anyMatch(g -> g.equals(groupId)))
                 .flatMap(cgs -> ac.describeConsumerGroups(List.of(groupId)))
                 .flatMap(cgs -> ac.describeConsumerGroups(List.of(groupId)))
                 .filter(cgs -> cgs.containsKey(groupId))
                 .filter(cgs -> cgs.containsKey(groupId))

+ 90 - 37
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java

@@ -4,12 +4,12 @@ import static java.util.stream.Collectors.toList;
 import static java.util.stream.Collectors.toMap;
 import static java.util.stream.Collectors.toMap;
 import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
 import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
 
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Iterators;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Table;
 import com.provectus.kafka.ui.exception.IllegalEntityStateException;
 import com.provectus.kafka.ui.exception.IllegalEntityStateException;
 import com.provectus.kafka.ui.exception.NotFoundException;
 import com.provectus.kafka.ui.exception.NotFoundException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.util.MapUtil;
 import com.provectus.kafka.ui.util.NumberUtil;
 import com.provectus.kafka.ui.util.NumberUtil;
 import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
 import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
 import java.io.Closeable;
 import java.io.Closeable;
@@ -18,7 +18,6 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Optional;
@@ -45,7 +44,7 @@ import org.apache.kafka.clients.admin.ConsumerGroupListing;
 import org.apache.kafka.clients.admin.DescribeClusterOptions;
 import org.apache.kafka.clients.admin.DescribeClusterOptions;
 import org.apache.kafka.clients.admin.DescribeClusterResult;
 import org.apache.kafka.clients.admin.DescribeClusterResult;
 import org.apache.kafka.clients.admin.DescribeConfigsOptions;
 import org.apache.kafka.clients.admin.DescribeConfigsOptions;
-import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsOptions;
+import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsSpec;
 import org.apache.kafka.clients.admin.ListOffsetsResult;
 import org.apache.kafka.clients.admin.ListOffsetsResult;
 import org.apache.kafka.clients.admin.ListTopicsOptions;
 import org.apache.kafka.clients.admin.ListTopicsOptions;
 import org.apache.kafka.clients.admin.NewPartitionReassignment;
 import org.apache.kafka.clients.admin.NewPartitionReassignment;
@@ -54,7 +53,6 @@ import org.apache.kafka.clients.admin.NewTopic;
 import org.apache.kafka.clients.admin.OffsetSpec;
 import org.apache.kafka.clients.admin.OffsetSpec;
 import org.apache.kafka.clients.admin.RecordsToDelete;
 import org.apache.kafka.clients.admin.RecordsToDelete;
 import org.apache.kafka.clients.admin.TopicDescription;
 import org.apache.kafka.clients.admin.TopicDescription;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
 import org.apache.kafka.clients.consumer.OffsetAndMetadata;
 import org.apache.kafka.clients.consumer.OffsetAndMetadata;
 import org.apache.kafka.common.KafkaException;
 import org.apache.kafka.common.KafkaException;
 import org.apache.kafka.common.KafkaFuture;
 import org.apache.kafka.common.KafkaFuture;
@@ -68,7 +66,9 @@ import org.apache.kafka.common.errors.GroupIdNotFoundException;
 import org.apache.kafka.common.errors.GroupNotEmptyException;
 import org.apache.kafka.common.errors.GroupNotEmptyException;
 import org.apache.kafka.common.errors.InvalidRequestException;
 import org.apache.kafka.common.errors.InvalidRequestException;
 import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
 import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
+import org.apache.kafka.common.errors.UnsupportedVersionException;
 import org.apache.kafka.common.requests.DescribeLogDirsResponse;
 import org.apache.kafka.common.requests.DescribeLogDirsResponse;
+import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
 import reactor.core.scheduler.Schedulers;
 import reactor.core.scheduler.Schedulers;
 import reactor.util.function.Tuple2;
 import reactor.util.function.Tuple2;
@@ -183,7 +183,7 @@ public class ReactiveAdminClient implements Closeable {
         topicNames,
         topicNames,
         200,
         200,
         part -> getTopicsConfigImpl(part, includeDocFixed),
         part -> getTopicsConfigImpl(part, includeDocFixed),
-        (m1, m2) -> ImmutableMap.<String, List<ConfigEntry>>builder().putAll(m1).putAll(m2).build()
+        mapMerger()
     );
     );
   }
   }
 
 
@@ -236,7 +236,7 @@ public class ReactiveAdminClient implements Closeable {
         topics,
         topics,
         200,
         200,
         this::describeTopicsImpl,
         this::describeTopicsImpl,
-        (m1, m2) -> ImmutableMap.<String, TopicDescription>builder().putAll(m1).putAll(m2).build()
+        mapMerger()
     );
     );
   }
   }
 
 
@@ -298,7 +298,12 @@ public class ReactiveAdminClient implements Closeable {
 
 
   public Mono<Map<Integer, Map<String, DescribeLogDirsResponse.LogDirInfo>>> describeLogDirs(
   public Mono<Map<Integer, Map<String, DescribeLogDirsResponse.LogDirInfo>>> describeLogDirs(
       Collection<Integer> brokerIds) {
       Collection<Integer> brokerIds) {
-    return toMono(client.describeLogDirs(brokerIds).all());
+    return toMono(client.describeLogDirs(brokerIds).all())
+        .onErrorResume(UnsupportedVersionException.class, th -> Mono.just(Map.of()))
+        .onErrorResume(th -> true, th -> {
+          log.warn("Error while calling describeLogDirs", th);
+          return Mono.just(Map.of());
+        });
   }
   }
 
 
   public Mono<ClusterDescription> describeCluster() {
   public Mono<ClusterDescription> describeCluster() {
@@ -383,32 +388,57 @@ public class ReactiveAdminClient implements Closeable {
     }
     }
   }
   }
 
 
-  public Mono<List<String>> listConsumerGroups() {
-    return toMono(client.listConsumerGroups().all())
-        .map(lst -> lst.stream().map(ConsumerGroupListing::groupId).collect(toList()));
+  public Mono<List<String>> listConsumerGroupNames() {
+    return listConsumerGroups().map(lst -> lst.stream().map(ConsumerGroupListing::groupId).toList());
   }
   }
 
 
-  public Mono<Map<String, ConsumerGroupDescription>> describeConsumerGroups(Collection<String> groupIds) {
-    return toMono(client.describeConsumerGroups(groupIds).all());
+  public Mono<Collection<ConsumerGroupListing>> listConsumerGroups() {
+    return toMono(client.listConsumerGroups().all());
   }
   }
 
 
-  public Mono<Map<TopicPartition, Long>> listConsumerGroupOffsets(String groupId) {
-    return listConsumerGroupOffsets(groupId, new ListConsumerGroupOffsetsOptions());
+  public Mono<Map<String, ConsumerGroupDescription>> describeConsumerGroups(Collection<String> groupIds) {
+    return partitionCalls(
+        groupIds,
+        25,
+        4,
+        ids -> toMono(client.describeConsumerGroups(ids).all()),
+        mapMerger()
+    );
   }
   }
 
 
-  public Mono<Map<TopicPartition, Long>> listConsumerGroupOffsets(
-      String groupId, List<TopicPartition> partitions) {
-    return listConsumerGroupOffsets(groupId,
-        new ListConsumerGroupOffsetsOptions().topicPartitions(partitions));
-  }
+  // group -> partition -> offset
+  // NOTE: partitions with no committed offsets will be skipped
+  public Mono<Table<String, TopicPartition, Long>> listConsumerGroupOffsets(List<String> consumerGroups,
+                                                                            // all partitions if null passed
+                                                                            @Nullable List<TopicPartition> partitions) {
+    Function<Collection<String>, Mono<Map<String, Map<TopicPartition, OffsetAndMetadata>>>> call =
+        groups -> toMono(
+            client.listConsumerGroupOffsets(
+                groups.stream()
+                    .collect(Collectors.toMap(
+                        g -> g,
+                        g -> new ListConsumerGroupOffsetsSpec().topicPartitions(partitions)
+                    ))).all()
+        );
+
+    Mono<Map<String, Map<TopicPartition, OffsetAndMetadata>>> merged = partitionCalls(
+        consumerGroups,
+        25,
+        4,
+        call,
+        mapMerger()
+    );
 
 
-  private Mono<Map<TopicPartition, Long>> listConsumerGroupOffsets(
-      String groupId, ListConsumerGroupOffsetsOptions options) {
-    return toMono(client.listConsumerGroupOffsets(groupId, options).partitionsToOffsetAndMetadata())
-        .map(MapUtil::removeNullValues)
-        .map(m -> m.entrySet().stream()
-            .map(e -> Tuples.of(e.getKey(), e.getValue().offset()))
-            .collect(Collectors.toMap(Tuple2::getT1, Tuple2::getT2)));
+    return merged.map(map -> {
+      var table = ImmutableTable.<String, TopicPartition, Long>builder();
+      map.forEach((g, tpOffsets) -> tpOffsets.forEach((tp, offset) -> {
+        if (offset != null) {
+          // offset will be null for partitions that don't have committed offset for this group
+          table.put(g, tp, offset.offset());
+        }
+      }));
+      return table.build();
+    });
   }
   }
 
 
   public Mono<Void> alterConsumerGroupOffsets(String groupId, Map<TopicPartition, Long> offsets) {
   public Mono<Void> alterConsumerGroupOffsets(String groupId, Map<TopicPartition, Long> offsets) {
@@ -501,7 +531,7 @@ public class ReactiveAdminClient implements Closeable {
         partitions,
         partitions,
         200,
         200,
         call,
         call,
-        (m1, m2) -> ImmutableMap.<TopicPartition, Long>builder().putAll(m1).putAll(m2).build()
+        mapMerger()
     );
     );
   }
   }
 
 
@@ -551,7 +581,7 @@ public class ReactiveAdminClient implements Closeable {
   }
   }
 
 
   /**
   /**
-   * Splits input collection into batches, applies each batch sequentially to function
+   * Splits input collection into batches, converts each batch into Mono, sequentially subscribes to them
    * and merges output Monos into one Mono.
    * and merges output Monos into one Mono.
    */
    */
   private static <R, I> Mono<R> partitionCalls(Collection<I> items,
   private static <R, I> Mono<R> partitionCalls(Collection<I> items,
@@ -561,14 +591,37 @@ public class ReactiveAdminClient implements Closeable {
     if (items.isEmpty()) {
     if (items.isEmpty()) {
       return call.apply(items);
       return call.apply(items);
     }
     }
-    Iterator<List<I>> parts = Iterators.partition(items.iterator(), partitionSize);
-    Mono<R> mono = call.apply(parts.next());
-    while (parts.hasNext()) {
-      var nextPart = parts.next();
-      // calls will be executed sequentially
-      mono = mono.flatMap(res1 -> call.apply(nextPart).map(res2 -> merger.apply(res1, res2)));
+    Iterable<List<I>> parts = Iterables.partition(items, partitionSize);
+    return Flux.fromIterable(parts)
+        .concatMap(call)
+        .reduce(merger);
+  }
+
+  /**
+   * Splits input collection into batches, converts each batch into Mono, subscribes to them (concurrently,
+   * with specified concurrency level) and merges output Monos into one Mono.
+   */
+  private static <R, I> Mono<R> partitionCalls(Collection<I> items,
+                                               int partitionSize,
+                                               int concurrency,
+                                               Function<Collection<I>, Mono<R>> call,
+                                               BiFunction<R, R, R> merger) {
+    if (items.isEmpty()) {
+      return call.apply(items);
     }
     }
-    return mono;
+    Iterable<List<I>> parts = Iterables.partition(items, partitionSize);
+    return Flux.fromIterable(parts)
+        .flatMap(call, concurrency)
+        .reduce(merger);
+  }
+
+  private static <K, V> BiFunction<Map<K, V>, Map<K, V>, Map<K, V>> mapMerger() {
+    return (m1, m2) -> {
+      var merged = new HashMap<K, V>();
+      merged.putAll(m1);
+      merged.putAll(m2);
+      return merged;
+    };
   }
   }
 
 
   @Override
   @Override

+ 105 - 391
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java

@@ -1,54 +1,31 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
-import static org.springframework.http.HttpStatus.CONFLICT;
-import static org.springframework.http.HttpStatus.NOT_FOUND;
-import static org.springframework.http.HttpStatus.UNPROCESSABLE_ENTITY;
-
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.json.JsonMapper;
 import com.provectus.kafka.ui.exception.SchemaCompatibilityException;
 import com.provectus.kafka.ui.exception.SchemaCompatibilityException;
-import com.provectus.kafka.ui.exception.SchemaFailedToDeleteException;
 import com.provectus.kafka.ui.exception.SchemaNotFoundException;
 import com.provectus.kafka.ui.exception.SchemaNotFoundException;
-import com.provectus.kafka.ui.exception.SchemaTypeNotSupportedException;
-import com.provectus.kafka.ui.exception.UnprocessableEntityException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.model.CompatibilityLevelDTO;
-import com.provectus.kafka.ui.model.InternalSchemaRegistry;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
-import com.provectus.kafka.ui.model.NewSchemaSubjectDTO;
-import com.provectus.kafka.ui.model.SchemaSubjectDTO;
-import com.provectus.kafka.ui.model.SchemaTypeDTO;
-import com.provectus.kafka.ui.model.schemaregistry.ErrorResponse;
-import com.provectus.kafka.ui.model.schemaregistry.InternalCompatibilityCheck;
-import com.provectus.kafka.ui.model.schemaregistry.InternalCompatibilityLevel;
-import com.provectus.kafka.ui.model.schemaregistry.InternalNewSchema;
-import com.provectus.kafka.ui.model.schemaregistry.SubjectIdResponse;
-import com.provectus.kafka.ui.util.SecuredWebClient;
+import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
+import com.provectus.kafka.ui.sr.model.Compatibility;
+import com.provectus.kafka.ui.sr.model.CompatibilityCheckResponse;
+import com.provectus.kafka.ui.sr.model.CompatibilityConfig;
+import com.provectus.kafka.ui.sr.model.CompatibilityLevelChange;
+import com.provectus.kafka.ui.sr.model.NewSubject;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import com.provectus.kafka.ui.util.ReactiveFailover;
+import com.provectus.kafka.ui.util.WebClientConfigurator;
 import java.io.IOException;
 import java.io.IOException;
-import java.net.URI;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Formatter;
 import java.util.List;
 import java.util.List;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.function.Function;
-import java.util.function.Supplier;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
 import lombok.RequiredArgsConstructor;
 import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
+import lombok.experimental.Delegate;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.HttpMethod;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.MediaType;
 import org.springframework.stereotype.Service;
 import org.springframework.stereotype.Service;
-import org.springframework.util.LinkedMultiValueMap;
-import org.springframework.util.MultiValueMap;
-import org.springframework.web.reactive.function.BodyInserters;
-import org.springframework.web.reactive.function.client.ClientResponse;
-import org.springframework.web.reactive.function.client.WebClient;
-import org.springframework.web.reactive.function.client.WebClientRequestException;
-import org.springframework.web.util.UriComponentsBuilder;
+import org.springframework.web.reactive.function.client.WebClientResponseException;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
 
 
@@ -57,404 +34,141 @@ import reactor.core.publisher.Mono;
 @RequiredArgsConstructor
 @RequiredArgsConstructor
 public class SchemaRegistryService {
 public class SchemaRegistryService {
 
 
-  public static final String NO_SUCH_SCHEMA_VERSION = "No such schema %s with version %s";
-  public static final String NO_SUCH_SCHEMA = "No such schema %s";
-
-  private static final String URL_SUBJECTS = "/subjects";
-  private static final String URL_SUBJECT = "/subjects/{schemaName}";
-  private static final String URL_SUBJECT_VERSIONS = "/subjects/{schemaName}/versions";
-  private static final String URL_SUBJECT_BY_VERSION = "/subjects/{schemaName}/versions/{version}";
   private static final String LATEST = "latest";
   private static final String LATEST = "latest";
 
 
-  private static final String UNRECOGNIZED_FIELD_SCHEMA_TYPE = "Unrecognized field: schemaType";
-  private static final String INCOMPATIBLE_WITH_AN_EARLIER_SCHEMA = "incompatible with an earlier schema";
-  private static final String INVALID_SCHEMA = "Invalid Schema";
+  @AllArgsConstructor
+  public static class SubjectWithCompatibilityLevel {
+    @Delegate
+    SchemaSubject subject;
+    @Getter
+    Compatibility compatibility;
+  }
+
+  private ReactiveFailover<KafkaSrClientApi> api(KafkaCluster cluster) {
+    return cluster.getSchemaRegistryClient();
+  }
 
 
-  public Mono<List<SchemaSubjectDTO>> getAllLatestVersionSchemas(KafkaCluster cluster,
-                                                                 List<String> subjects) {
+  public Mono<List<SubjectWithCompatibilityLevel>> getAllLatestVersionSchemas(KafkaCluster cluster,
+                                                                              List<String> subjects) {
     return Flux.fromIterable(subjects)
     return Flux.fromIterable(subjects)
         .concatMap(subject -> getLatestSchemaVersionBySubject(cluster, subject))
         .concatMap(subject -> getLatestSchemaVersionBySubject(cluster, subject))
         .collect(Collectors.toList());
         .collect(Collectors.toList());
   }
   }
 
 
-  public Mono<String[]> getAllSubjectNames(KafkaCluster cluster) {
-    return configuredWebClient(
-        cluster,
-        HttpMethod.GET,
-        URL_SUBJECTS)
-        .retrieve()
-        .bodyToMono(String[].class)
-        .doOnError(e -> log.error("Unexpected error", e))
-        .as(m -> failoverAble(m,
-            new FailoverMono<>(cluster.getSchemaRegistry(), () -> this.getAllSubjectNames(cluster))));
+  public Mono<List<String>> getAllSubjectNames(KafkaCluster cluster) {
+    return api(cluster)
+        .mono(c -> c.getAllSubjectNames(null, false))
+        .flatMapIterable(this::parseSubjectListString)
+        .collectList();
+  }
+
+  @SneakyThrows
+  private List<String> parseSubjectListString(String subjectNamesStr) {
+    //workaround for https://github.com/spring-projects/spring-framework/issues/24734
+    return new JsonMapper().readValue(subjectNamesStr, new TypeReference<List<String>>() {
+    });
   }
   }
 
 
-  public Flux<SchemaSubjectDTO> getAllVersionsBySubject(KafkaCluster cluster, String subject) {
+  public Flux<SubjectWithCompatibilityLevel> getAllVersionsBySubject(KafkaCluster cluster, String subject) {
     Flux<Integer> versions = getSubjectVersions(cluster, subject);
     Flux<Integer> versions = getSubjectVersions(cluster, subject);
     return versions.flatMap(version -> getSchemaSubjectByVersion(cluster, subject, version));
     return versions.flatMap(version -> getSchemaSubjectByVersion(cluster, subject, version));
   }
   }
 
 
   private Flux<Integer> getSubjectVersions(KafkaCluster cluster, String schemaName) {
   private Flux<Integer> getSubjectVersions(KafkaCluster cluster, String schemaName) {
-    return configuredWebClient(
-        cluster,
-        HttpMethod.GET,
-        URL_SUBJECT_VERSIONS,
-        schemaName)
-        .retrieve()
-        .onStatus(NOT_FOUND::equals,
-            throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)))
-        .bodyToFlux(Integer.class)
-        .as(f -> failoverAble(f, new FailoverFlux<>(cluster.getSchemaRegistry(),
-            () -> this.getSubjectVersions(cluster, schemaName))));
+    return api(cluster).flux(c -> c.getSubjectVersions(schemaName));
   }
   }
 
 
-  public Mono<SchemaSubjectDTO> getSchemaSubjectByVersion(KafkaCluster cluster, String schemaName,
-                                                          Integer version) {
-    return this.getSchemaSubject(cluster, schemaName, String.valueOf(version));
+  public Mono<SubjectWithCompatibilityLevel> getSchemaSubjectByVersion(KafkaCluster cluster,
+                                                                       String schemaName,
+                                                                       Integer version) {
+    return getSchemaSubject(cluster, schemaName, String.valueOf(version));
   }
   }
 
 
-  public Mono<SchemaSubjectDTO> getLatestSchemaVersionBySubject(KafkaCluster cluster,
-                                                                String schemaName) {
-    return this.getSchemaSubject(cluster, schemaName, LATEST);
+  public Mono<SubjectWithCompatibilityLevel> getLatestSchemaVersionBySubject(KafkaCluster cluster,
+                                                                             String schemaName) {
+    return getSchemaSubject(cluster, schemaName, LATEST);
   }
   }
 
 
-  private Mono<SchemaSubjectDTO> getSchemaSubject(KafkaCluster cluster, String schemaName,
-                                                  String version) {
-    return configuredWebClient(
-        cluster,
-        HttpMethod.GET,
-        SchemaRegistryService.URL_SUBJECT_BY_VERSION,
-        List.of(schemaName, version))
-        .retrieve()
-        .onStatus(NOT_FOUND::equals,
-            throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version))
-        )
-        .bodyToMono(SchemaSubjectDTO.class)
-        .map(this::withSchemaType)
+  private Mono<SubjectWithCompatibilityLevel> getSchemaSubject(KafkaCluster cluster, String schemaName,
+                                                               String version) {
+    return api(cluster)
+        .mono(c -> c.getSubjectVersion(schemaName, version))
         .zipWith(getSchemaCompatibilityInfoOrGlobal(cluster, schemaName))
         .zipWith(getSchemaCompatibilityInfoOrGlobal(cluster, schemaName))
-        .map(tuple -> {
-          SchemaSubjectDTO schema = tuple.getT1();
-          String compatibilityLevel = tuple.getT2().getCompatibilityLevel();
-          schema.setCompatibilityLevel(compatibilityLevel);
-          return schema;
-        })
-        .as(m -> failoverAble(m, new FailoverMono<>(cluster.getSchemaRegistry(),
-            () -> this.getSchemaSubject(cluster, schemaName, version))));
-  }
-
-  /**
-   * If {@link SchemaSubjectDTO#getSchemaType()} is null, then AVRO, otherwise,
-   * adds the schema type as is.
-   */
-  @NotNull
-  private SchemaSubjectDTO withSchemaType(SchemaSubjectDTO s) {
-    return s.schemaType(Optional.ofNullable(s.getSchemaType()).orElse(SchemaTypeDTO.AVRO));
+        .map(t -> new SubjectWithCompatibilityLevel(t.getT1(), t.getT2()))
+        .onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.error(new SchemaNotFoundException()));
   }
   }
 
 
-  public Mono<Void> deleteSchemaSubjectByVersion(KafkaCluster cluster,
-                                                 String schemaName,
-                                                 Integer version) {
-    return this.deleteSchemaSubject(cluster, schemaName, String.valueOf(version));
+  public Mono<Void> deleteSchemaSubjectByVersion(KafkaCluster cluster, String schemaName, Integer version) {
+    return deleteSchemaSubject(cluster, schemaName, String.valueOf(version));
   }
   }
 
 
-  public Mono<Void> deleteLatestSchemaSubject(KafkaCluster cluster,
-                                              String schemaName) {
-    return this.deleteSchemaSubject(cluster, schemaName, LATEST);
+  public Mono<Void> deleteLatestSchemaSubject(KafkaCluster cluster, String schemaName) {
+    return deleteSchemaSubject(cluster, schemaName, LATEST);
   }
   }
 
 
-  private Mono<Void> deleteSchemaSubject(KafkaCluster cluster, String schemaName,
-                                         String version) {
-    return configuredWebClient(
-        cluster,
-        HttpMethod.DELETE,
-        SchemaRegistryService.URL_SUBJECT_BY_VERSION,
-        List.of(schemaName, version))
-        .retrieve()
-        .onStatus(NOT_FOUND::equals,
-            throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version))
-        )
-        .toBodilessEntity()
-        .then()
-        .as(m -> failoverAble(m, new FailoverMono<>(cluster.getSchemaRegistry(),
-            () -> this.deleteSchemaSubject(cluster, schemaName, version))));
+  private Mono<Void> deleteSchemaSubject(KafkaCluster cluster, String schemaName, String version) {
+    return api(cluster).mono(c -> c.deleteSubjectVersion(schemaName, version, false));
   }
   }
 
 
-  public Mono<Void> deleteSchemaSubjectEntirely(KafkaCluster cluster,
-                                                String schemaName) {
-    return configuredWebClient(
-        cluster,
-        HttpMethod.DELETE,
-        URL_SUBJECT,
-        schemaName)
-        .retrieve()
-        .onStatus(HttpStatus::isError, errorOnSchemaDeleteFailure(schemaName))
-        .toBodilessEntity()
-        .then()
-        .as(m -> failoverAble(m, new FailoverMono<>(cluster.getSchemaRegistry(),
-            () -> this.deleteSchemaSubjectEntirely(cluster, schemaName))));
+  public Mono<Void> deleteSchemaSubjectEntirely(KafkaCluster cluster, String schemaName) {
+    return api(cluster).mono(c -> c.deleteAllSubjectVersions(schemaName, false));
   }
   }
 
 
   /**
   /**
    * Checks whether the provided schema duplicates the previous or not, creates a new schema
    * Checks whether the provided schema duplicates the previous or not, creates a new schema
    * and then returns the whole content by requesting its latest version.
    * and then returns the whole content by requesting its latest version.
    */
    */
-  public Mono<SchemaSubjectDTO> registerNewSchema(KafkaCluster cluster,
-                                                  NewSchemaSubjectDTO dto) {
-    SchemaTypeDTO schemaType = SchemaTypeDTO.AVRO == dto.getSchemaType() ? null : dto.getSchemaType();
-    Mono<InternalNewSchema> newSchema = Mono.just(new InternalNewSchema(dto.getSchema(), schemaType));
-    String subject = dto.getSubject();
-    return submitNewSchema(subject, newSchema, cluster)
-        .flatMap(resp -> getLatestSchemaVersionBySubject(cluster, subject));
-  }
-
-  @NotNull
-  private Mono<SubjectIdResponse> submitNewSchema(String subject,
-                                                  Mono<InternalNewSchema> newSchemaSubject,
-                                                  KafkaCluster cluster) {
-    return configuredWebClient(
-        cluster,
-        HttpMethod.POST,
-        URL_SUBJECT_VERSIONS, subject)
-        .contentType(MediaType.APPLICATION_JSON)
-        .body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class))
-        .retrieve()
-        .onStatus(status -> UNPROCESSABLE_ENTITY.equals(status) || CONFLICT.equals(status),
-            r -> r.bodyToMono(ErrorResponse.class)
-                .flatMap(this::getMonoError))
-        .bodyToMono(SubjectIdResponse.class)
-        .as(m -> failoverAble(m, new FailoverMono<>(cluster.getSchemaRegistry(),
-            () -> submitNewSchema(subject, newSchemaSubject, cluster))));
-  }
-
-  @NotNull
-  private Mono<Throwable> getMonoError(ErrorResponse x) {
-    if (isUnrecognizedFieldSchemaTypeMessage(x.getMessage())) {
-      return Mono.error(new SchemaTypeNotSupportedException());
-    } else if (isIncompatibleSchemaMessage(x.getMessage())) {
-      return Mono.error(new SchemaCompatibilityException(x.getMessage()));
-    } else {
-      log.error(x.getMessage());
-      return Mono.error(new UnprocessableEntityException(INVALID_SCHEMA));
-    }
-  }
-
-  @NotNull
-  private Function<ClientResponse, Mono<? extends Throwable>> throwIfNotFoundStatus(
-      String formatted) {
-    return resp -> Mono.error(new SchemaNotFoundException(formatted));
-  }
-
-  /**
-   * Updates a compatibility level for a <code>schemaName</code>.
-   *
-   * @param schemaName is a schema subject name
-   * @see com.provectus.kafka.ui.model.CompatibilityLevelDTO.CompatibilityEnum
-   */
-  public Mono<Void> updateSchemaCompatibility(KafkaCluster cluster, @Nullable String schemaName,
-                                              Mono<CompatibilityLevelDTO> compatibilityLevel) {
-    String configEndpoint = Objects.isNull(schemaName) ? "/config" : "/config/{schemaName}";
-    return configuredWebClient(
-        cluster,
-        HttpMethod.PUT,
-        configEndpoint,
-        schemaName)
-        .contentType(MediaType.APPLICATION_JSON)
-        .body(BodyInserters.fromPublisher(compatibilityLevel, CompatibilityLevelDTO.class))
-        .retrieve()
-        .onStatus(NOT_FOUND::equals,
-            throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)))
-        .bodyToMono(Void.class)
-        .as(m -> failoverAble(m, new FailoverMono<>(cluster.getSchemaRegistry(),
-            () -> this.updateSchemaCompatibility(cluster, schemaName, compatibilityLevel))));
+  public Mono<SubjectWithCompatibilityLevel> registerNewSchema(KafkaCluster cluster,
+                                                               String subject,
+                                                               NewSubject newSchemaSubject) {
+    return api(cluster)
+        .mono(c -> c.registerNewSchema(subject, newSchemaSubject))
+        .onErrorMap(WebClientResponseException.Conflict.class,
+            th -> new SchemaCompatibilityException())
+        .onErrorMap(WebClientResponseException.UnprocessableEntity.class,
+            th -> new ValidationException("Invalid schema"))
+        .then(getLatestSchemaVersionBySubject(cluster, subject));
   }
   }
 
 
   public Mono<Void> updateSchemaCompatibility(KafkaCluster cluster,
   public Mono<Void> updateSchemaCompatibility(KafkaCluster cluster,
-                                              Mono<CompatibilityLevelDTO> compatibilityLevel) {
-    return updateSchemaCompatibility(cluster, null, compatibilityLevel);
-  }
-
-  public Mono<InternalCompatibilityLevel> getSchemaCompatibilityLevel(KafkaCluster cluster,
-                                                                      String schemaName) {
-    String globalConfig = Objects.isNull(schemaName) ? "/config" : "/config/{schemaName}";
-    final var values = new LinkedMultiValueMap<String, String>();
-    values.add("defaultToGlobal", "true");
-    return configuredWebClient(
-        cluster,
-        HttpMethod.GET,
-        globalConfig,
-        (schemaName == null ? Collections.emptyList() : List.of(schemaName)),
-        values)
-        .retrieve()
-        .bodyToMono(InternalCompatibilityLevel.class)
+                                              String schemaName,
+                                              Compatibility compatibility) {
+    return api(cluster)
+        .mono(c -> c.updateSubjectCompatibilityLevel(
+            schemaName, new CompatibilityLevelChange().compatibility(compatibility)))
+        .then();
+  }
+
+  public Mono<Void> updateGlobalSchemaCompatibility(KafkaCluster cluster,
+                                                    Compatibility compatibility) {
+    return api(cluster)
+        .mono(c -> c.updateGlobalCompatibilityLevel(new CompatibilityLevelChange().compatibility(compatibility)))
+        .then();
+  }
+
+  public Mono<Compatibility> getSchemaCompatibilityLevel(KafkaCluster cluster,
+                                                         String schemaName) {
+    return api(cluster)
+        .mono(c -> c.getSubjectCompatibilityLevel(schemaName, true))
+        .map(CompatibilityConfig::getCompatibilityLevel)
         .onErrorResume(error -> Mono.empty());
         .onErrorResume(error -> Mono.empty());
   }
   }
 
 
-  public Mono<InternalCompatibilityLevel> getGlobalSchemaCompatibilityLevel(KafkaCluster cluster) {
-    return this.getSchemaCompatibilityLevel(cluster, null);
+  public Mono<Compatibility> getGlobalSchemaCompatibilityLevel(KafkaCluster cluster) {
+    return api(cluster)
+        .mono(KafkaSrClientApi::getGlobalCompatibilityLevel)
+        .map(CompatibilityConfig::getCompatibilityLevel);
   }
   }
 
 
-  private Mono<InternalCompatibilityLevel> getSchemaCompatibilityInfoOrGlobal(KafkaCluster cluster,
-                                                                              String schemaName) {
-    return this.getSchemaCompatibilityLevel(cluster, schemaName)
+  private Mono<Compatibility> getSchemaCompatibilityInfoOrGlobal(KafkaCluster cluster,
+                                                                 String schemaName) {
+    return getSchemaCompatibilityLevel(cluster, schemaName)
         .switchIfEmpty(this.getGlobalSchemaCompatibilityLevel(cluster));
         .switchIfEmpty(this.getGlobalSchemaCompatibilityLevel(cluster));
   }
   }
 
 
-  public Mono<InternalCompatibilityCheck> checksSchemaCompatibility(
-      KafkaCluster cluster, String schemaName, Mono<NewSchemaSubjectDTO> newSchemaSubject) {
-    return configuredWebClient(
-        cluster,
-        HttpMethod.POST,
-        "/compatibility/subjects/{schemaName}/versions/latest",
-        schemaName)
-        .contentType(MediaType.APPLICATION_JSON)
-        .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubjectDTO.class))
-        .retrieve()
-        .onStatus(NOT_FOUND::equals,
-            throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)))
-        .bodyToMono(InternalCompatibilityCheck.class)
-        .as(m -> failoverAble(m, new FailoverMono<>(cluster.getSchemaRegistry(),
-            () -> this.checksSchemaCompatibility(cluster, schemaName, newSchemaSubject))));
-  }
-
-  public String formatted(String str, Object... args) {
-    try (Formatter formatter = new Formatter()) {
-      return formatter.format(str, args).toString();
-    }
-  }
-
-  private void setBasicAuthIfEnabled(InternalSchemaRegistry schemaRegistry, HttpHeaders headers) {
-    if (schemaRegistry.getUsername() != null && schemaRegistry.getPassword() != null) {
-      headers.setBasicAuth(
-          schemaRegistry.getUsername(),
-          schemaRegistry.getPassword()
-      );
-    } else if (schemaRegistry.getUsername() != null) {
-      throw new ValidationException(
-          "You specified username but did not specify password");
-    } else if (schemaRegistry.getPassword() != null) {
-      throw new ValidationException(
-          "You specified password but did not specify username");
-    }
-  }
-
-  private boolean isUnrecognizedFieldSchemaTypeMessage(String errorMessage) {
-    return errorMessage.contains(UNRECOGNIZED_FIELD_SCHEMA_TYPE);
-  }
-
-  private boolean isIncompatibleSchemaMessage(String message) {
-    return message.contains(INCOMPATIBLE_WITH_AN_EARLIER_SCHEMA);
-  }
-
-  private WebClient.RequestBodySpec configuredWebClient(KafkaCluster cluster, HttpMethod method,
-                                                        String uri) {
-    return configuredWebClient(cluster, method, uri, Collections.emptyList(),
-        new LinkedMultiValueMap<>());
-  }
-
-  private WebClient.RequestBodySpec configuredWebClient(KafkaCluster cluster, HttpMethod method,
-                                                        String uri, List<String> uriVariables) {
-    return configuredWebClient(cluster, method, uri, uriVariables, new LinkedMultiValueMap<>());
-  }
-
-  private WebClient.RequestBodySpec configuredWebClient(KafkaCluster cluster, HttpMethod method,
-                                                        String uri, @Nullable String uriVariable) {
-    List<String> uriVariables = uriVariable == null ? Collections.emptyList() : List.of(uriVariable);
-    return configuredWebClient(cluster, method, uri, uriVariables, new LinkedMultiValueMap<>());
-  }
-
-  private WebClient.RequestBodySpec configuredWebClient(KafkaCluster cluster,
-                                                        HttpMethod method, String path,
-                                                        List<String> uriVariables,
-                                                        MultiValueMap<String, String> queryParams) {
-    final var schemaRegistry = cluster.getSchemaRegistry();
-
-    try {
-      WebClient.Builder schemaRegistryWebClient = SecuredWebClient.configure(
-          schemaRegistry.getKeystoreLocation(),
-          schemaRegistry.getKeystorePassword(),
-          schemaRegistry.getTruststoreLocation(),
-          schemaRegistry.getTruststorePassword()
-      );
-
-      return schemaRegistryWebClient.build()
-          .method(method)
-          .uri(buildUri(schemaRegistry, path, uriVariables, queryParams))
-          .headers(headers -> setBasicAuthIfEnabled(schemaRegistry, headers));
-    } catch (Exception e) {
-      throw new IllegalStateException(
-          "cannot create TLS configuration for schema-registry in cluster " + cluster.getName(), e);
-    }
-  }
-
-  private URI buildUri(InternalSchemaRegistry schemaRegistry, String path, List<String> uriVariables,
-                       MultiValueMap<String, String> queryParams) {
-    final var builder = UriComponentsBuilder
-        .fromHttpUrl(schemaRegistry.getUri() + path);
-    builder.queryParams(queryParams);
-    return builder.build(uriVariables.toArray());
-  }
-
-  private Function<ClientResponse, Mono<? extends Throwable>> errorOnSchemaDeleteFailure(String schemaName) {
-    return resp -> {
-      if (NOT_FOUND.equals(resp.statusCode())) {
-        return Mono.error(new SchemaNotFoundException(schemaName));
-      }
-      return Mono.error(new SchemaFailedToDeleteException(schemaName));
-    };
-  }
-
-  private <T> Mono<T> failoverAble(Mono<T> request, FailoverMono<T> failoverMethod) {
-    return request.onErrorResume(failoverMethod::failover);
-  }
-
-  private <T> Flux<T> failoverAble(Flux<T> request, FailoverFlux<T> failoverMethod) {
-    return request.onErrorResume(failoverMethod::failover);
-  }
-
-  private abstract static class Failover<E> {
-    private final InternalSchemaRegistry schemaRegistry;
-    private final Supplier<E> failover;
-
-    private Failover(InternalSchemaRegistry schemaRegistry, Supplier<E> failover) {
-      this.schemaRegistry = Objects.requireNonNull(schemaRegistry);
-      this.failover = Objects.requireNonNull(failover);
-    }
-
-    abstract E error(Throwable error);
-
-    public E failover(Throwable error) {
-      if (error instanceof WebClientRequestException
-          && error.getCause() instanceof IOException
-          && schemaRegistry.isFailoverAvailable()) {
-        var uri = ((WebClientRequestException) error).getUri();
-        schemaRegistry.markAsUnavailable(String.format("%s://%s", uri.getScheme(), uri.getAuthority()));
-        return failover.get();
-      }
-      return error(error);
-    }
-  }
-
-  private static class FailoverMono<T> extends Failover<Mono<T>> {
-
-    private FailoverMono(InternalSchemaRegistry schemaRegistry, Supplier<Mono<T>> failover) {
-      super(schemaRegistry, failover);
-    }
-
-    @Override
-    Mono<T> error(Throwable error) {
-      return Mono.error(error);
-    }
-  }
-
-  private static class FailoverFlux<T> extends Failover<Flux<T>> {
-
-    private FailoverFlux(InternalSchemaRegistry schemaRegistry, Supplier<Flux<T>> failover) {
-      super(schemaRegistry, failover);
-    }
-
-    @Override
-    Flux<T> error(Throwable error) {
-      return Flux.error(error);
-    }
+  public Mono<CompatibilityCheckResponse> checksSchemaCompatibility(KafkaCluster cluster,
+                                                                    String schemaName,
+                                                                    NewSubject newSchemaSubject) {
+    return api(cluster).mono(c -> c.checkSchemaCompatibility(schemaName, LATEST, true, newSchemaSubject));
   }
   }
 }
 }

+ 10 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java

@@ -1,5 +1,7 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
+import static com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
+
 import com.provectus.kafka.ui.model.Feature;
 import com.provectus.kafka.ui.model.Feature;
 import com.provectus.kafka.ui.model.InternalLogDirStats;
 import com.provectus.kafka.ui.model.InternalLogDirStats;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
@@ -9,10 +11,12 @@ import com.provectus.kafka.ui.model.Statistics;
 import com.provectus.kafka.ui.service.metrics.MetricsCollector;
 import com.provectus.kafka.ui.service.metrics.MetricsCollector;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.stream.Collectors;
 import lombok.RequiredArgsConstructor;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.apache.kafka.clients.admin.TopicDescription;
 import org.apache.kafka.clients.admin.TopicDescription;
+import org.apache.kafka.common.Node;
 import org.springframework.stereotype.Service;
 import org.springframework.stereotype.Service;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
 
 
@@ -21,7 +25,7 @@ import reactor.core.publisher.Mono;
 @Slf4j
 @Slf4j
 public class StatisticsService {
 public class StatisticsService {
 
 
-  private final MetricsCollector metricsClusterUtil;
+  private final MetricsCollector metricsCollector;
   private final AdminClientService adminClientService;
   private final AdminClientService adminClientService;
   private final FeatureService featureService;
   private final FeatureService featureService;
   private final StatisticsCache cache;
   private final StatisticsCache cache;
@@ -35,8 +39,8 @@ public class StatisticsService {
             ac.describeCluster().flatMap(description ->
             ac.describeCluster().flatMap(description ->
                 Mono.zip(
                 Mono.zip(
                     List.of(
                     List.of(
-                        metricsClusterUtil.getBrokerMetrics(cluster, description.getNodes()),
-                        getLogDirInfo(cluster, ac),
+                        metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
+                        getLogDirInfo(description, ac),
                         featureService.getAvailableFeatures(cluster, description.getController()),
                         featureService.getAvailableFeatures(cluster, description.getController()),
                         loadTopicConfigs(cluster),
                         loadTopicConfigs(cluster),
                         describeTopics(cluster)),
                         describeTopics(cluster)),
@@ -58,11 +62,9 @@ public class StatisticsService {
             e -> Mono.just(Statistics.empty().toBuilder().lastKafkaException(e).build()));
             e -> Mono.just(Statistics.empty().toBuilder().lastKafkaException(e).build()));
   }
   }
 
 
-  private Mono<InternalLogDirStats> getLogDirInfo(KafkaCluster cluster, ReactiveAdminClient c) {
-    if (!cluster.isDisableLogDirsCollection()) {
-      return c.describeLogDirs().map(InternalLogDirStats::new);
-    }
-    return Mono.just(InternalLogDirStats.empty());
+  private Mono<InternalLogDirStats> getLogDirInfo(ClusterDescription desc, ReactiveAdminClient ac) {
+    var brokerIds = desc.getNodes().stream().map(Node::id).collect(Collectors.toSet());
+    return ac.describeLogDirs(brokerIds).map(InternalLogDirStats::new);
   }
   }
 
 
   private Mono<Map<String, TopicDescription>> describeTopics(KafkaCluster c) {
   private Mono<Map<String, TopicDescription>> describeTopics(KafkaCluster c) {

+ 52 - 63
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java

@@ -4,28 +4,29 @@ import static ksql.KsqlGrammarParser.DefineVariableContext;
 import static ksql.KsqlGrammarParser.PrintTopicContext;
 import static ksql.KsqlGrammarParser.PrintTopicContext;
 import static ksql.KsqlGrammarParser.SingleStatementContext;
 import static ksql.KsqlGrammarParser.SingleStatementContext;
 import static ksql.KsqlGrammarParser.UndefineVariableContext;
 import static ksql.KsqlGrammarParser.UndefineVariableContext;
+import static org.springframework.http.MediaType.APPLICATION_JSON;
 
 
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
 import com.fasterxml.jackson.databind.node.TextNode;
 import com.fasterxml.jackson.databind.node.TextNode;
-import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.service.ksql.response.ResponseParser;
 import com.provectus.kafka.ui.service.ksql.response.ResponseParser;
-import com.provectus.kafka.ui.util.SecuredWebClient;
+import com.provectus.kafka.ui.util.WebClientConfigurator;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Optional;
 import java.util.Set;
 import java.util.Set;
+import javax.annotation.Nullable;
 import lombok.Builder;
 import lombok.Builder;
 import lombok.Value;
 import lombok.Value;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.core.codec.DecodingException;
 import org.springframework.core.codec.DecodingException;
-import org.springframework.http.HttpHeaders;
 import org.springframework.http.MediaType;
 import org.springframework.http.MediaType;
 import org.springframework.http.codec.json.Jackson2JsonDecoder;
 import org.springframework.http.codec.json.Jackson2JsonDecoder;
+import org.springframework.http.codec.json.Jackson2JsonEncoder;
+import org.springframework.util.MimeType;
 import org.springframework.util.MimeTypeUtils;
 import org.springframework.util.MimeTypeUtils;
 import org.springframework.util.unit.DataSize;
 import org.springframework.util.unit.DataSize;
-import org.springframework.web.reactive.function.client.ExchangeStrategies;
 import org.springframework.web.reactive.function.client.WebClient;
 import org.springframework.web.reactive.function.client.WebClient;
 import org.springframework.web.reactive.function.client.WebClientResponseException;
 import org.springframework.web.reactive.function.client.WebClientResponseException;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Flux;
@@ -34,6 +35,8 @@ import reactor.core.publisher.Mono;
 @Slf4j
 @Slf4j
 public class KsqlApiClient {
 public class KsqlApiClient {
 
 
+  private static final MimeType KQL_API_MIME_TYPE = MimeTypeUtils.parseMimeType("application/vnd.ksql.v1+json");
+
   private static final Set<Class<?>> UNSUPPORTED_STMT_TYPES = Set.of(
   private static final Set<Class<?>> UNSUPPORTED_STMT_TYPES = Set.of(
       PrintTopicContext.class,
       PrintTopicContext.class,
       DefineVariableContext.class,
       DefineVariableContext.class,
@@ -60,60 +63,46 @@ public class KsqlApiClient {
 
 
   //--------------------------------------------------------------------------------------------
   //--------------------------------------------------------------------------------------------
 
 
-  private final KafkaCluster cluster;
-  private final DataSize maxBuffSize;
+  private final String baseUrl;
+  private final WebClient webClient;
 
 
-  public KsqlApiClient(KafkaCluster cluster, DataSize maxBuffSize) {
-    this.cluster = cluster;
-    this.maxBuffSize = maxBuffSize;
+  public KsqlApiClient(String baseUrl,
+                       @Nullable ClustersProperties.KsqldbServerAuth ksqldbServerAuth,
+                       @Nullable ClustersProperties.WebClientSsl ksqldbServerSsl,
+                       @Nullable DataSize maxBuffSize) {
+    this.baseUrl = baseUrl;
+    this.webClient = webClient(ksqldbServerAuth, ksqldbServerSsl, maxBuffSize);
   }
   }
 
 
-  private WebClient webClient() {
-    var exchangeStrategies = ExchangeStrategies.builder()
-        .codecs(configurer -> {
-          configurer.customCodecs()
-              .register(
-                  new Jackson2JsonDecoder(
-                      new ObjectMapper(),
-                      // some ksqldb versions do not set content-type header in response,
-                      // but we still need to use JsonDecoder for it
-                      MimeTypeUtils.APPLICATION_OCTET_STREAM));
+  private static WebClient webClient(@Nullable ClustersProperties.KsqldbServerAuth ksqldbServerAuth,
+                                     @Nullable ClustersProperties.WebClientSsl ksqldbServerSsl,
+                                     @Nullable DataSize maxBuffSize) {
+    ksqldbServerAuth = Optional.ofNullable(ksqldbServerAuth).orElse(new ClustersProperties.KsqldbServerAuth());
+    ksqldbServerSsl = Optional.ofNullable(ksqldbServerSsl).orElse(new ClustersProperties.WebClientSsl());
+    maxBuffSize = Optional.ofNullable(maxBuffSize).orElse(DataSize.ofMegabytes(20));
+
+    return new WebClientConfigurator()
+        .configureSsl(
+            ksqldbServerSsl.getKeystoreLocation(),
+            ksqldbServerSsl.getKeystorePassword(),
+            ksqldbServerSsl.getTruststoreLocation(),
+            ksqldbServerSsl.getTruststorePassword()
+        )
+        .configureBasicAuth(
+            ksqldbServerAuth.getUsername(),
+            ksqldbServerAuth.getPassword()
+        )
+        .configureBufferSize(maxBuffSize)
+        .configureCodecs(codecs -> {
+          var mapper = new JsonMapper();
+          codecs.defaultCodecs()
+              .jackson2JsonEncoder(new Jackson2JsonEncoder(mapper, KQL_API_MIME_TYPE, APPLICATION_JSON));
+          // some ksqldb versions do not set content-type header in response,
+          // but we still need to use JsonDecoder for it
+          codecs.defaultCodecs()
+              .jackson2JsonDecoder(new Jackson2JsonDecoder(mapper, MimeTypeUtils.ALL));
         })
         })
         .build();
         .build();
-
-    try {
-      WebClient.Builder securedWebClient = SecuredWebClient.configure(
-          cluster.getKsqldbServer().getKeystoreLocation(),
-          cluster.getKsqldbServer().getKeystorePassword(),
-          cluster.getKsqldbServer().getTruststoreLocation(),
-          cluster.getKsqldbServer().getTruststorePassword()
-      );
-
-      return securedWebClient
-          .codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()))
-          .defaultHeaders(httpHeaders -> setBasicAuthIfEnabled(httpHeaders, cluster))
-          .exchangeStrategies(exchangeStrategies)
-          .build();
-    } catch (Exception e) {
-      throw new IllegalStateException(
-          "cannot create TLS configuration for ksqlDB in cluster " + cluster.getName(), e);
-    }
-  }
-
-  public static void setBasicAuthIfEnabled(HttpHeaders headers, KafkaCluster cluster) {
-    String username = cluster.getKsqldbServer().getUsername();
-    String password = cluster.getKsqldbServer().getPassword();
-    if (username != null && password != null) {
-      headers.setBasicAuth(username, password);
-    } else if (username != null) {
-      throw new ValidationException("You specified username but did not specify password");
-    } else if (password != null) {
-      throw new ValidationException("You specified password but did not specify username");
-    }
-  }
-
-  private String baseKsqlDbUri() {
-    return cluster.getKsqldbServer().getUrl();
   }
   }
 
 
   private KsqlRequest ksqlRequest(String ksql, Map<String, String> streamProperties) {
   private KsqlRequest ksqlRequest(String ksql, Map<String, String> streamProperties) {
@@ -121,11 +110,11 @@ public class KsqlApiClient {
   }
   }
 
 
   private Flux<KsqlResponseTable> executeSelect(String ksql, Map<String, String> streamProperties) {
   private Flux<KsqlResponseTable> executeSelect(String ksql, Map<String, String> streamProperties) {
-    return webClient()
+    return webClient
         .post()
         .post()
-        .uri(baseKsqlDbUri() + "/query")
-        .accept(MediaType.parseMediaType("application/vnd.ksql.v1+json"))
-        .contentType(MediaType.parseMediaType("application/vnd.ksql.v1+json"))
+        .uri(baseUrl + "/query")
+        .accept(new MediaType(KQL_API_MIME_TYPE))
+        .contentType(new MediaType(KQL_API_MIME_TYPE))
         .bodyValue(ksqlRequest(ksql, streamProperties))
         .bodyValue(ksqlRequest(ksql, streamProperties))
         .retrieve()
         .retrieve()
         .bodyToFlux(JsonNode.class)
         .bodyToFlux(JsonNode.class)
@@ -151,11 +140,11 @@ public class KsqlApiClient {
 
 
   private Flux<KsqlResponseTable> executeStatement(String ksql,
   private Flux<KsqlResponseTable> executeStatement(String ksql,
                                                    Map<String, String> streamProperties) {
                                                    Map<String, String> streamProperties) {
-    return webClient()
+    return webClient
         .post()
         .post()
-        .uri(baseKsqlDbUri() + "/ksql")
-        .accept(MediaType.parseMediaType("application/vnd.ksql.v1+json"))
-        .contentType(MediaType.parseMediaType("application/json"))
+        .uri(baseUrl + "/ksql")
+        .accept(new MediaType(KQL_API_MIME_TYPE))
+        .contentType(APPLICATION_JSON)
         .bodyValue(ksqlRequest(ksql, streamProperties))
         .bodyValue(ksqlRequest(ksql, streamProperties))
         .exchangeToFlux(
         .exchangeToFlux(
             resp -> {
             resp -> {

+ 6 - 14
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java

@@ -14,21 +14,13 @@ import java.util.UUID;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Service;
 import org.springframework.stereotype.Service;
-import org.springframework.util.unit.DataSize;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Flux;
 
 
 @Slf4j
 @Slf4j
 @Service
 @Service
 public class KsqlServiceV2 {
 public class KsqlServiceV2 {
 
 
-  private final DataSize maxBuffSize;
-
-  public KsqlServiceV2(@Value("${webclient.max-in-memory-buffer-size:20MB}") DataSize maxBuffSize) {
-    this.maxBuffSize = maxBuffSize;
-  }
-
   @lombok.Value
   @lombok.Value
   private static class KsqlExecuteCommand {
   private static class KsqlExecuteCommand {
     KafkaCluster cluster;
     KafkaCluster cluster;
@@ -55,13 +47,13 @@ public class KsqlServiceV2 {
       throw new ValidationException("No command registered with id " + commandId);
       throw new ValidationException("No command registered with id " + commandId);
     }
     }
     registeredCommands.invalidate(commandId);
     registeredCommands.invalidate(commandId);
-    return new KsqlApiClient(cmd.cluster, maxBuffSize)
-        .execute(cmd.ksql, cmd.streamProperties);
+    return cmd.cluster.getKsqlClient()
+        .flux(client -> client.execute(cmd.ksql, cmd.streamProperties));
   }
   }
 
 
   public Flux<KsqlTableDescriptionDTO> listTables(KafkaCluster cluster) {
   public Flux<KsqlTableDescriptionDTO> listTables(KafkaCluster cluster) {
-    return new KsqlApiClient(cluster, maxBuffSize)
-        .execute("LIST TABLES;", Map.of())
+    return cluster.getKsqlClient()
+        .flux(client -> client.execute("LIST TABLES;", Map.of()))
         .flatMap(resp -> {
         .flatMap(resp -> {
           if (!resp.getHeader().equals("Tables")) {
           if (!resp.getHeader().equals("Tables")) {
             log.error("Unexpected result header: {}", resp.getHeader());
             log.error("Unexpected result header: {}", resp.getHeader());
@@ -82,8 +74,8 @@ public class KsqlServiceV2 {
   }
   }
 
 
   public Flux<KsqlStreamDescriptionDTO> listStreams(KafkaCluster cluster) {
   public Flux<KsqlStreamDescriptionDTO> listStreams(KafkaCluster cluster) {
-    return new KsqlApiClient(cluster, maxBuffSize)
-        .execute("LIST STREAMS;", Map.of())
+    return cluster.getKsqlClient()
+        .flux(client -> client.execute("LIST STREAMS;", Map.of()))
         .flatMap(resp -> {
         .flatMap(resp -> {
           if (!resp.getHeader().equals("Streams")) {
           if (!resp.getHeader().equals("Streams")) {
             log.error("Unexpected result header: {}", resp.getHeader());
             log.error("Unexpected result header: {}", resp.getHeader());

+ 15 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetriever.java

@@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service.metrics;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Strings;
 import com.google.common.base.Strings;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.MetricsConfig;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Optional;
 import java.util.Optional;
 import lombok.RequiredArgsConstructor;
 import lombok.RequiredArgsConstructor;
@@ -27,20 +28,26 @@ class PrometheusMetricsRetriever implements MetricsRetriever {
   @Override
   @Override
   public Flux<RawMetric> retrieve(KafkaCluster c, Node node) {
   public Flux<RawMetric> retrieve(KafkaCluster c, Node node) {
     log.debug("Retrieving metrics from prometheus exporter: {}:{}", node.host(), c.getMetricsConfig().getPort());
     log.debug("Retrieving metrics from prometheus exporter: {}:{}", node.host(), c.getMetricsConfig().getPort());
-    var metricsConfig = c.getMetricsConfig();
-    Integer port = Optional.ofNullable(metricsConfig.getPort()).orElse(DEFAULT_EXPORTER_PORT);
-    return retrieve(node.host(), port, metricsConfig.isSsl());
+    return retrieve(node.host(), c.getMetricsConfig());
   }
   }
 
 
   @VisibleForTesting
   @VisibleForTesting
-  Flux<RawMetric> retrieve(String host, int port, boolean ssl) {
-    WebClient.ResponseSpec responseSpec = webClient.get()
+  Flux<RawMetric> retrieve(String host, MetricsConfig metricsConfig) {
+    int port = Optional.ofNullable(metricsConfig.getPort()).orElse(DEFAULT_EXPORTER_PORT);
+
+    var request = webClient.get()
         .uri(UriComponentsBuilder.newInstance()
         .uri(UriComponentsBuilder.newInstance()
-            .scheme(ssl ? "https" : "http")
+            .scheme(metricsConfig.isSsl() ? "https" : "http")
             .host(host)
             .host(host)
             .port(port)
             .port(port)
-            .path(METRICS_ENDPOINT_PATH).build().toUri())
-        .retrieve();
+            .path(METRICS_ENDPOINT_PATH).build().toUri());
+
+    if (metricsConfig.getUsername() != null && metricsConfig.getPassword() != null) {
+      request.headers(
+          httpHeaders -> httpHeaders.setBasicAuth(metricsConfig.getUsername(), metricsConfig.getPassword()));
+    }
+
+    WebClient.ResponseSpec responseSpec = request.retrieve();
 
 
     return responseSpec.bodyToMono(String.class)
     return responseSpec.bodyToMono(String.class)
         .doOnError(e -> log.error("Error while getting metrics from {}", host, e))
         .doOnError(e -> log.error("Error while getting metrics from {}", host, e))

+ 0 - 21
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java

@@ -1,21 +0,0 @@
-package com.provectus.kafka.ui.util;
-
-import java.util.Map;
-import java.util.stream.Collectors;
-
-public class MapUtil {
-
-  private MapUtil() {
-  }
-
-  public static <K, V> Map<K, V> removeNullValues(Map<K, V> map) {
-    return map.entrySet().stream()
-        .filter(e -> e.getValue() != null)
-        .collect(
-            Collectors.toMap(
-                Map.Entry::getKey,
-                Map.Entry::getValue
-            )
-        );
-  }
-}

+ 154 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ReactiveFailover.java

@@ -0,0 +1,154 @@
+package com.provectus.kafka.ui.util;
+
+import com.google.common.base.Preconditions;
+import java.io.IOException;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.function.Supplier;
+import org.springframework.web.reactive.function.client.WebClientRequestException;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+public class ReactiveFailover<T> {
+
+  public static final Duration DEFAULT_RETRY_GRACE_PERIOD_MS = Duration.ofSeconds(5);
+  public static final Predicate<Throwable> CONNECTION_REFUSED_EXCEPTION_FILTER =
+      error -> error.getCause() instanceof IOException && error.getCause().getMessage().contains("Connection refused");
+
+  private final List<PublisherHolder<T>> publishers;
+  private int currentIndex = 0;
+
+  private final Predicate<Throwable> failoverExceptionsPredicate;
+  private final String noAvailablePublishersMsg;
+
+  public static <T> ReactiveFailover<T> create(List<T> publishers,
+                                               Predicate<Throwable> failoverExeptionsPredicate,
+                                               String noAvailablePublishersMsg,
+                                               Duration retryGracePeriodMs) {
+    return new ReactiveFailover<>(
+        publishers.stream().map(p -> new PublisherHolder<>(() -> p, retryGracePeriodMs.toMillis())).toList(),
+        failoverExeptionsPredicate,
+        noAvailablePublishersMsg
+    );
+  }
+
+  public static <T, A> ReactiveFailover<T> create(List<A> args,
+                                                  Function<A, T> factory,
+                                                  Predicate<Throwable> failoverExeptionsPredicate,
+                                                  String noAvailablePublishersMsg,
+                                                  Duration retryGracePeriodMs) {
+    return new ReactiveFailover<>(
+        args.stream().map(arg ->
+            new PublisherHolder<>(() -> factory.apply(arg), retryGracePeriodMs.toMillis())).toList(),
+        failoverExeptionsPredicate,
+        noAvailablePublishersMsg
+    );
+  }
+
+  private ReactiveFailover(List<PublisherHolder<T>> publishers,
+                   Predicate<Throwable> failoverExceptionsPredicate,
+                   String noAvailablePublishersMsg) {
+    Preconditions.checkArgument(!publishers.isEmpty());
+    this.publishers = publishers;
+    this.failoverExceptionsPredicate = failoverExceptionsPredicate;
+    this.noAvailablePublishersMsg = noAvailablePublishersMsg;
+  }
+
+  public <V> Mono<V> mono(Function<T, Mono<V>> f) {
+    List<PublisherHolder<T>> candidates = getActivePublishers();
+    if (candidates.isEmpty()) {
+      return Mono.error(() -> new IllegalStateException(noAvailablePublishersMsg));
+    }
+    return mono(f, candidates);
+  }
+
+  private <V> Mono<V> mono(Function<T, Mono<V>> f, List<PublisherHolder<T>> candidates) {
+    var publisher = candidates.get(0);
+    return f.apply(publisher.get())
+        .onErrorResume(failoverExceptionsPredicate, th -> {
+          publisher.markFailed();
+          if (candidates.size() == 1) {
+            return Mono.error(th);
+          }
+          var newCandidates = candidates.stream().skip(1).filter(PublisherHolder::isActive).toList();
+          if (newCandidates.isEmpty()) {
+            return Mono.error(th);
+          }
+          return mono(f, newCandidates);
+        });
+  }
+
+  public <V> Flux<V> flux(Function<T, Flux<V>> f) {
+    List<PublisherHolder<T>> candidates = getActivePublishers();
+    if (candidates.isEmpty()) {
+      return Flux.error(() -> new IllegalStateException(noAvailablePublishersMsg));
+    }
+    return flux(f, candidates);
+  }
+
+  private <V> Flux<V> flux(Function<T, Flux<V>> f, List<PublisherHolder<T>> candidates) {
+    var publisher = candidates.get(0);
+    return f.apply(publisher.get())
+        .onErrorResume(failoverExceptionsPredicate, th -> {
+          publisher.markFailed();
+          if (candidates.size() == 1) {
+            return Flux.error(th);
+          }
+          var newCandidates = candidates.stream().skip(1).filter(PublisherHolder::isActive).toList();
+          if (newCandidates.isEmpty()) {
+            return Flux.error(th);
+          }
+          return flux(f, newCandidates);
+        });
+  }
+
+  /**
+   * Returns list of active publishers, starting with latest active.
+   */
+  private synchronized List<PublisherHolder<T>> getActivePublishers() {
+    var result = new ArrayList<PublisherHolder<T>>();
+    for (int i = 0, j = currentIndex; i < publishers.size(); i++) {
+      var publisher = publishers.get(j);
+      if (publisher.isActive()) {
+        result.add(publisher);
+      } else if (currentIndex == j) {
+        currentIndex = ++currentIndex == publishers.size() ? 0 : currentIndex;
+      }
+      j = ++j == publishers.size() ? 0 : j;
+    }
+    return result;
+  }
+
+  static class PublisherHolder<T> {
+
+    private final long retryGracePeriodMs;
+    private final Supplier<T> supplier;
+    private final AtomicLong lastErrorTs = new AtomicLong();
+    private T publisherInstance;
+
+    PublisherHolder(Supplier<T> supplier, long retryGracePeriodMs) {
+      this.supplier = supplier;
+      this.retryGracePeriodMs = retryGracePeriodMs;
+    }
+
+    synchronized T get() {
+      if (publisherInstance == null) {
+        publisherInstance = supplier.get();
+      }
+      return publisherInstance;
+    }
+
+    void markFailed() {
+      lastErrorTs.set(System.currentTimeMillis());
+    }
+
+    boolean isActive() {
+      return System.currentTimeMillis() - lastErrorTs.get() > retryGracePeriodMs;
+    }
+  }
+
+}

+ 0 - 66
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SecuredWebClient.java

@@ -1,66 +0,0 @@
-package com.provectus.kafka.ui.util;
-
-import io.netty.handler.ssl.SslContext;
-import io.netty.handler.ssl.SslContextBuilder;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.security.KeyStore;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
-import java.security.UnrecoverableKeyException;
-import java.security.cert.CertificateException;
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.TrustManagerFactory;
-import org.springframework.http.client.reactive.ReactorClientHttpConnector;
-import org.springframework.util.ResourceUtils;
-import org.springframework.web.reactive.function.client.WebClient;
-import reactor.netty.http.client.HttpClient;
-
-public class SecuredWebClient {
-  public static WebClient.Builder configure(
-      String keystoreLocation,
-      String keystorePassword,
-      String truststoreLocation,
-      String truststorePassword)
-      throws NoSuchAlgorithmException, IOException, KeyStoreException, CertificateException, UnrecoverableKeyException {
-    // If we want to customize our TLS configuration, we need at least a truststore
-    if (truststoreLocation == null || truststorePassword == null) {
-      return WebClient.builder();
-    }
-
-    SslContextBuilder contextBuilder = SslContextBuilder.forClient();
-
-    // Prepare truststore
-    KeyStore trustStore = KeyStore.getInstance("JKS");
-    trustStore.load(
-        new FileInputStream((ResourceUtils.getFile(truststoreLocation))),
-        truststorePassword.toCharArray()
-    );
-
-    TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(
-        TrustManagerFactory.getDefaultAlgorithm()
-    );
-    trustManagerFactory.init(trustStore);
-    contextBuilder.trustManager(trustManagerFactory);
-
-    // Prepare keystore only if we got a keystore
-    if (keystoreLocation != null && keystorePassword != null) {
-      KeyStore keyStore = KeyStore.getInstance("JKS");
-      keyStore.load(
-          new FileInputStream(ResourceUtils.getFile(keystoreLocation)),
-          keystorePassword.toCharArray()
-      );
-
-      KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
-      keyManagerFactory.init(keyStore, keystorePassword.toCharArray());
-      contextBuilder.keyManager(keyManagerFactory);
-    }
-
-    // Create webclient
-    SslContext context = contextBuilder.build();
-
-    return WebClient.builder()
-        .clientConnector(new ReactorClientHttpConnector(HttpClient.create().secure(t -> t.sslContext(context))));
-  }
-}

+ 136 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java

@@ -0,0 +1,136 @@
+package com.provectus.kafka.ui.util;
+
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.exception.ValidationException;
+import io.netty.handler.ssl.SslContext;
+import io.netty.handler.ssl.SslContextBuilder;
+import java.io.FileInputStream;
+import java.security.KeyStore;
+import java.util.function.Consumer;
+import javax.annotation.Nullable;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.TrustManagerFactory;
+import lombok.SneakyThrows;
+import org.openapitools.jackson.nullable.JsonNullableModule;
+import org.springframework.http.MediaType;
+import org.springframework.http.client.reactive.ReactorClientHttpConnector;
+import org.springframework.http.codec.ClientCodecConfigurer;
+import org.springframework.http.codec.json.Jackson2JsonDecoder;
+import org.springframework.http.codec.json.Jackson2JsonEncoder;
+import org.springframework.util.ResourceUtils;
+import org.springframework.util.unit.DataSize;
+import org.springframework.web.reactive.function.client.WebClient;
+import reactor.netty.http.client.HttpClient;
+
+public class WebClientConfigurator {
+
+  private final WebClient.Builder builder = WebClient.builder();
+
+  public WebClientConfigurator() {
+    configureObjectMapper(defaultOM());
+  }
+
+  private static ObjectMapper defaultOM() {
+    return new ObjectMapper()
+        .registerModule(new JavaTimeModule())
+        .registerModule(new JsonNullableModule())
+        .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
+
+
+  public WebClientConfigurator configureSsl(@Nullable ClustersProperties.WebClientSsl ssl) {
+    if (ssl != null) {
+      return configureSsl(
+          ssl.getKeystoreLocation(),
+          ssl.getKeystorePassword(),
+          ssl.getTruststoreLocation(),
+          ssl.getTruststorePassword()
+      );
+    }
+    return this;
+  }
+
+  @SneakyThrows
+  public WebClientConfigurator configureSsl(
+      @Nullable String keystoreLocation,
+      @Nullable String keystorePassword,
+      @Nullable String truststoreLocation,
+      @Nullable String truststorePassword) {
+    // If we want to customize our TLS configuration, we need at least a truststore
+    if (truststoreLocation == null || truststorePassword == null) {
+      return this;
+    }
+
+    SslContextBuilder contextBuilder = SslContextBuilder.forClient();
+
+    // Prepare truststore
+    KeyStore trustStore = KeyStore.getInstance("JKS");
+    trustStore.load(
+        new FileInputStream((ResourceUtils.getFile(truststoreLocation))),
+        truststorePassword.toCharArray()
+    );
+
+    TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(
+        TrustManagerFactory.getDefaultAlgorithm()
+    );
+    trustManagerFactory.init(trustStore);
+    contextBuilder.trustManager(trustManagerFactory);
+
+    // Prepare keystore only if we got a keystore
+    if (keystoreLocation != null && keystorePassword != null) {
+      KeyStore keyStore = KeyStore.getInstance("JKS");
+      keyStore.load(
+          new FileInputStream(ResourceUtils.getFile(keystoreLocation)),
+          keystorePassword.toCharArray()
+      );
+
+      KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
+      keyManagerFactory.init(keyStore, keystorePassword.toCharArray());
+      contextBuilder.keyManager(keyManagerFactory);
+    }
+
+    // Create webclient
+    SslContext context = contextBuilder.build();
+
+    builder.clientConnector(new ReactorClientHttpConnector(HttpClient.create().secure(t -> t.sslContext(context))));
+    return this;
+  }
+
+  public WebClientConfigurator configureBasicAuth(@Nullable String username, @Nullable String password) {
+    if (username != null && password != null) {
+      builder.defaultHeaders(httpHeaders -> httpHeaders.setBasicAuth(username, password));
+    } else if (username != null) {
+      throw new ValidationException("You specified username but did not specify password");
+    } else if (password != null) {
+      throw new ValidationException("You specified password but did not specify username");
+    }
+    return this;
+  }
+
+  public WebClientConfigurator configureBufferSize(DataSize maxBuffSize) {
+    builder.codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()));
+    return this;
+  }
+
+  public WebClientConfigurator configureObjectMapper(ObjectMapper mapper) {
+    builder.codecs(codecs -> {
+      codecs.defaultCodecs()
+          .jackson2JsonEncoder(new Jackson2JsonEncoder(mapper, MediaType.APPLICATION_JSON));
+      codecs.defaultCodecs()
+          .jackson2JsonDecoder(new Jackson2JsonDecoder(mapper, MediaType.APPLICATION_JSON));
+    });
+    return this;
+  }
+
+  public WebClientConfigurator configureCodecs(Consumer<ClientCodecConfigurer> configurer) {
+    builder.codecs(configurer);
+    return this;
+  }
+
+  public WebClient build() {
+    return builder.build();
+  }
+}

+ 27 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AnyFieldSchema.java

@@ -0,0 +1,27 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+// Specifies field that can contain any kind of value - primitive, complex and nulls
+public class AnyFieldSchema implements FieldSchema {
+
+  public static AnyFieldSchema get() {
+    return new AnyFieldSchema();
+  }
+
+  private AnyFieldSchema() {
+  }
+
+  @Override
+  public JsonNode toJsonNode(ObjectMapper mapper) {
+    var arr = mapper.createArrayNode();
+    arr.add("number");
+    arr.add("string");
+    arr.add("object");
+    arr.add("array");
+    arr.add("boolean");
+    arr.add("null");
+    return mapper.createObjectNode().set("type", arr);
+  }
+}

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java

@@ -110,7 +110,7 @@ public class AvroJsonSchemaConverter implements JsonSchemaConverter<Schema> {
       return createRefField(definitionName);
       return createRefField(definitionName);
     }
     }
     // adding stub record, need to avoid infinite recursion
     // adding stub record, need to avoid infinite recursion
-    definitions.put(definitionName, new ObjectFieldSchema(Map.of(), List.of()));
+    definitions.put(definitionName, ObjectFieldSchema.EMPTY);
 
 
     final Map<String, FieldSchema> fields = schema.getFields().stream()
     final Map<String, FieldSchema> fields = schema.getFields().stream()
         .map(f -> Tuples.of(f.name(), convertField(f, definitions)))
         .map(f -> Tuples.of(f.name(), convertField(f, definitions)))

+ 4 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonSchema.java

@@ -23,6 +23,7 @@ public class JsonSchema {
   private final Map<String, FieldSchema> properties;
   private final Map<String, FieldSchema> properties;
   private final Map<String, FieldSchema> definitions;
   private final Map<String, FieldSchema> definitions;
   private final List<String> required;
   private final List<String> required;
+  private final String rootRef;
 
 
   public String toJson() {
   public String toJson() {
     final ObjectMapper mapper = new ObjectMapper();
     final ObjectMapper mapper = new ObjectMapper();
@@ -53,6 +54,9 @@ public class JsonSchema {
               ))
               ))
       ));
       ));
     }
     }
+    if (rootRef != null) {
+      objectNode.set("$ref", new TextNode(rootRef));
+    }
     return objectNode.toString();
     return objectNode.toString();
   }
   }
 
 

+ 3 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ObjectFieldSchema.java

@@ -10,6 +10,9 @@ import reactor.util.function.Tuple2;
 import reactor.util.function.Tuples;
 import reactor.util.function.Tuples;
 
 
 public class ObjectFieldSchema implements FieldSchema {
 public class ObjectFieldSchema implements FieldSchema {
+
+  public static final ObjectFieldSchema EMPTY = new ObjectFieldSchema(Map.of(), List.of());
+
   private final Map<String, FieldSchema> properties;
   private final Map<String, FieldSchema> properties;
   private final List<String> required;
   private final List<String> required;
 
 

+ 146 - 65
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverter.java

@@ -1,90 +1,103 @@
 package com.provectus.kafka.ui.util.jsonschema;
 package com.provectus.kafka.ui.util.jsonschema;
 
 
+import static java.util.Objects.requireNonNull;
+
+import com.fasterxml.jackson.databind.node.BigIntegerNode;
+import com.fasterxml.jackson.databind.node.IntNode;
+import com.fasterxml.jackson.databind.node.LongNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import com.google.common.primitives.UnsignedInteger;
+import com.google.common.primitives.UnsignedLong;
+import com.google.protobuf.Any;
+import com.google.protobuf.BoolValue;
+import com.google.protobuf.BytesValue;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Descriptors;
+import com.google.protobuf.DoubleValue;
+import com.google.protobuf.Duration;
+import com.google.protobuf.FieldMask;
+import com.google.protobuf.FloatValue;
+import com.google.protobuf.Int32Value;
+import com.google.protobuf.Int64Value;
+import com.google.protobuf.ListValue;
+import com.google.protobuf.StringValue;
+import com.google.protobuf.Struct;
+import com.google.protobuf.Timestamp;
+import com.google.protobuf.UInt32Value;
+import com.google.protobuf.UInt64Value;
+import com.google.protobuf.Value;
 import java.net.URI;
 import java.net.URI;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import reactor.util.function.Tuple2;
 import reactor.util.function.Tuple2;
 import reactor.util.function.Tuples;
 import reactor.util.function.Tuples;
 
 
 public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.Descriptor> {
 public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.Descriptor> {
-  @Override
-  public JsonSchema convert(URI basePath, Descriptors.Descriptor schema) {
-    final JsonSchema.JsonSchemaBuilder builder = JsonSchema.builder();
 
 
-    builder.id(basePath.resolve(schema.getFullName()));
-    builder.type(new SimpleJsonType(JsonType.Type.OBJECT));
+  private final Set<String> simpleTypesWrapperNames = Set.of(
+      BoolValue.getDescriptor().getFullName(),
+      Int32Value.getDescriptor().getFullName(),
+      UInt32Value.getDescriptor().getFullName(),
+      Int64Value.getDescriptor().getFullName(),
+      UInt64Value.getDescriptor().getFullName(),
+      StringValue.getDescriptor().getFullName(),
+      BytesValue.getDescriptor().getFullName(),
+      FloatValue.getDescriptor().getFullName(),
+      DoubleValue.getDescriptor().getFullName()
+  );
 
 
+  @Override
+  public JsonSchema convert(URI basePath, Descriptors.Descriptor schema) {
     Map<String, FieldSchema> definitions = new HashMap<>();
     Map<String, FieldSchema> definitions = new HashMap<>();
-    final ObjectFieldSchema root =
-        (ObjectFieldSchema) convertObjectSchema(schema, definitions, false);
-    builder.definitions(definitions);
-
-    builder.properties(root.getProperties());
-    builder.required(root.getRequired());
-
-    return builder.build();
+    RefFieldSchema rootRef = registerObjectAndReturnRef(schema, definitions);
+    return JsonSchema.builder()
+        .id(basePath.resolve(schema.getFullName()))
+        .type(new SimpleJsonType(JsonType.Type.OBJECT))
+        .rootRef(rootRef.getRef())
+        .definitions(definitions)
+        .build();
   }
   }
 
 
-  private FieldSchema convertObjectSchema(Descriptors.Descriptor schema,
-                                          Map<String, FieldSchema> definitions, boolean ref) {
-    final Map<String, FieldSchema> fields = schema.getFields().stream()
-        .map(f -> Tuples.of(f.getName(), convertField(f, definitions)))
-        .collect(Collectors.toMap(
-            Tuple2::getT1,
-            Tuple2::getT2
-        ));
-
-    final Map<String, OneOfFieldSchema> oneOfFields = schema.getOneofs().stream().map(o ->
-        Tuples.of(
-            o.getName(),
-            new OneOfFieldSchema(
-                o.getFields().stream().map(
-                    Descriptors.FieldDescriptor::getName
-                ).map(fields::get).collect(Collectors.toList())
-            )
-        )
-    ).collect(Collectors.toMap(
-        Tuple2::getT1,
-        Tuple2::getT2
-    ));
-
-    final List<String> allOneOfFields = schema.getOneofs().stream().flatMap(o ->
-        o.getFields().stream().map(Descriptors.FieldDescriptor::getName)
-    ).collect(Collectors.toList());
+  private RefFieldSchema registerObjectAndReturnRef(Descriptors.Descriptor schema,
+                                                    Map<String, FieldSchema> definitions) {
+    var definition = schema.getFullName();
+    if (definitions.containsKey(definition)) {
+      return createRefField(definition);
+    }
+    // adding stub record, need to avoid infinite recursion
+    definitions.put(definition, ObjectFieldSchema.EMPTY);
 
 
-    final Map<String, FieldSchema> excludedOneOf = fields.entrySet().stream()
-        .filter(f -> !allOneOfFields.contains(f.getKey()))
-        .collect(Collectors.toMap(
-            Map.Entry::getKey,
-            Map.Entry::getValue
-        ));
+    Map<String, FieldSchema> fields = schema.getFields().stream()
+        .map(f -> Tuples.of(f.getName(), convertField(f, definitions)))
+        .collect(Collectors.toMap(Tuple2::getT1, Tuple2::getT2));
 
 
-    Map<String, FieldSchema> finalFields = new HashMap<>(excludedOneOf);
-    finalFields.putAll(oneOfFields);
+    List<String> required = schema.getFields().stream()
+        .filter(Descriptors.FieldDescriptor::isRequired)
+        .map(Descriptors.FieldDescriptor::getName)
+        .collect(Collectors.toList());
 
 
-    final List<String> required = schema.getFields().stream()
-        .filter(f -> !f.isOptional())
-        .map(Descriptors.FieldDescriptor::getName).collect(Collectors.toList());
+    // replacing stub record with actual object structure
+    definitions.put(definition, new ObjectFieldSchema(fields, required));
+    return createRefField(definition);
+  }
 
 
-    if (ref) {
-      String definitionName = String.format("record.%s", schema.getFullName());
-      definitions.put(definitionName, new ObjectFieldSchema(finalFields, required));
-      return new RefFieldSchema(String.format("#/definitions/%s", definitionName));
-    } else {
-      return new ObjectFieldSchema(fields, required);
-    }
+  private RefFieldSchema createRefField(String definition) {
+    return new RefFieldSchema("#/definitions/%s".formatted(definition));
   }
   }
 
 
   private FieldSchema convertField(Descriptors.FieldDescriptor field,
   private FieldSchema convertField(Descriptors.FieldDescriptor field,
                                    Map<String, FieldSchema> definitions) {
                                    Map<String, FieldSchema> definitions) {
+    Optional<FieldSchema> wellKnownTypeSchema = convertProtoWellKnownTypes(field);
+    if (wellKnownTypeSchema.isPresent()) {
+      return wellKnownTypeSchema.get();
+    }
     final JsonType jsonType = convertType(field);
     final JsonType jsonType = convertType(field);
-
     FieldSchema fieldSchema;
     FieldSchema fieldSchema;
     if (jsonType.getType().equals(JsonType.Type.OBJECT)) {
     if (jsonType.getType().equals(JsonType.Type.OBJECT)) {
-      fieldSchema = convertObjectSchema(field.getMessageType(), definitions, true);
+      fieldSchema = registerObjectAndReturnRef(field.getMessageType(), definitions);
     } else {
     } else {
       fieldSchema = new SimpleFieldSchema(jsonType);
       fieldSchema = new SimpleFieldSchema(jsonType);
     }
     }
@@ -96,20 +109,88 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
     }
     }
   }
   }
 
 
+  // converts Protobuf Well-known type (from google.protobuf.* packages) to Json-schema types
+  // see JsonFormat::buildWellKnownTypePrinters for impl details
+  private Optional<FieldSchema> convertProtoWellKnownTypes(Descriptors.FieldDescriptor field) {
+    // all well-known types are messages
+    if (field.getType() != Descriptors.FieldDescriptor.Type.MESSAGE) {
+      return Optional.empty();
+    }
+    String typeName = field.getMessageType().getFullName();
+    if (typeName.equals(Timestamp.getDescriptor().getFullName())) {
+      return Optional.of(
+          new SimpleFieldSchema(
+              new SimpleJsonType(JsonType.Type.STRING, Map.of("format", new TextNode("date-time")))));
+    }
+    if (typeName.equals(Duration.getDescriptor().getFullName())) {
+      return Optional.of(
+          new SimpleFieldSchema(
+              //TODO: current UI is failing when format=duration is set - need to fix this first
+              new SimpleJsonType(JsonType.Type.STRING // , Map.of("format", new TextNode("duration"))
+              )));
+    }
+    if (typeName.equals(FieldMask.getDescriptor().getFullName())) {
+      return Optional.of(new SimpleFieldSchema(new SimpleJsonType(JsonType.Type.STRING)));
+    }
+    if (typeName.equals(Any.getDescriptor().getFullName()) || typeName.equals(Struct.getDescriptor().getFullName())) {
+      return Optional.of(ObjectFieldSchema.EMPTY);
+    }
+    if (typeName.equals(Value.getDescriptor().getFullName())) {
+      return Optional.of(AnyFieldSchema.get());
+    }
+    if (typeName.equals(ListValue.getDescriptor().getFullName())) {
+      return Optional.of(new ArrayFieldSchema(AnyFieldSchema.get()));
+    }
+    if (simpleTypesWrapperNames.contains(typeName)) {
+      return Optional.of(new SimpleFieldSchema(
+          convertType(requireNonNull(field.getMessageType().findFieldByName("value")))));
+    }
+    return Optional.empty();
+  }
 
 
   private JsonType convertType(Descriptors.FieldDescriptor field) {
   private JsonType convertType(Descriptors.FieldDescriptor field) {
     switch (field.getType()) {
     switch (field.getType()) {
       case INT32:
       case INT32:
-      case INT64:
+      case FIXED32:
+      case SFIXED32:
       case SINT32:
       case SINT32:
-      case SINT64:
+        return new SimpleJsonType(
+            JsonType.Type.INTEGER,
+            Map.of(
+                "maximum", IntNode.valueOf(Integer.MAX_VALUE),
+                "minimum", IntNode.valueOf(Integer.MIN_VALUE)
+            )
+        );
       case UINT32:
       case UINT32:
-      case UINT64:
-      case FIXED32:
+        return new SimpleJsonType(
+            JsonType.Type.INTEGER,
+            Map.of(
+                "maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
+                "minimum", IntNode.valueOf(0)
+            )
+        );
+      //TODO: actually all *64 types will be printed with quotes (as strings),
+      // see JsonFormat::printSingleFieldValue for impl. This can cause problems when you copy-paste from messages
+      // table to `Produce` area - need to think if it is critical or not.
+      case INT64:
       case FIXED64:
       case FIXED64:
-      case SFIXED32:
       case SFIXED64:
       case SFIXED64:
-        return new SimpleJsonType(JsonType.Type.INTEGER);
+      case SINT64:
+        return new SimpleJsonType(
+            JsonType.Type.INTEGER,
+            Map.of(
+                "maximum", LongNode.valueOf(Long.MAX_VALUE),
+                "minimum", LongNode.valueOf(Long.MIN_VALUE)
+            )
+        );
+      case UINT64:
+        return new SimpleJsonType(
+            JsonType.Type.INTEGER,
+            Map.of(
+                "maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
+                "minimum", LongNode.valueOf(0)
+            )
+        );
       case MESSAGE:
       case MESSAGE:
       case GROUP:
       case GROUP:
         return new SimpleJsonType(JsonType.Type.OBJECT);
         return new SimpleJsonType(JsonType.Type.OBJECT);

+ 4 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/RefFieldSchema.java

@@ -15,4 +15,8 @@ public class RefFieldSchema implements FieldSchema {
   public JsonNode toJsonNode(ObjectMapper mapper) {
   public JsonNode toJsonNode(ObjectMapper mapper) {
     return mapper.createObjectNode().set("$ref", new TextNode(ref));
     return mapper.createObjectNode().set("$ref", new TextNode(ref));
   }
   }
+
+  public String getRef() {
+    return ref;
+  }
 }
 }

+ 12 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/SimpleJsonType.java

@@ -3,19 +3,27 @@ package com.provectus.kafka.ui.util.jsonschema;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.TextNode;
 import com.fasterxml.jackson.databind.node.TextNode;
+import com.google.common.collect.ImmutableMap;
 import java.util.Map;
 import java.util.Map;
 
 
 public class SimpleJsonType extends JsonType {
 public class SimpleJsonType extends JsonType {
 
 
+  private final Map<String, JsonNode> additionalTypeProperties;
+
   public SimpleJsonType(Type type) {
   public SimpleJsonType(Type type) {
+    this(type, Map.of());
+  }
+
+  public SimpleJsonType(Type type, Map<String, JsonNode> additionalTypeProperties) {
     super(type);
     super(type);
+    this.additionalTypeProperties = additionalTypeProperties;
   }
   }
 
 
   @Override
   @Override
   public Map<String, JsonNode> toJsonNode(ObjectMapper mapper) {
   public Map<String, JsonNode> toJsonNode(ObjectMapper mapper) {
-    return Map.of(
-        "type",
-        new TextNode(type.getName())
-    );
+    return ImmutableMap.<String, JsonNode>builder()
+        .put("type", new TextNode(type.getName()))
+        .putAll(additionalTypeProperties)
+        .build();
   }
   }
 }
 }

+ 0 - 69
kafka-ui-api/src/test/java/com/provectus/kafka/ui/model/FailoverUrlListTest.java

@@ -1,69 +0,0 @@
-package com.provectus.kafka.ui.model;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-import java.util.List;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Nested;
-import org.junit.jupiter.api.Test;
-
-
-class FailoverUrlListTest {
-
-  public static final int RETRY_GRACE_PERIOD_IN_MS = 10;
-
-  @Nested
-  @SuppressWarnings("all")
-  class ShouldHaveFailoverAvailableWhen {
-
-    private FailoverUrlList failoverUrlList;
-
-    @BeforeEach
-    void before() {
-      failoverUrlList = new FailoverUrlList(List.of("localhost:123", "farawayhost:5678"), RETRY_GRACE_PERIOD_IN_MS);
-    }
-
-    @Test
-    void thereAreNoFailures() {
-      assertThat(failoverUrlList.isFailoverAvailable()).isTrue();
-    }
-
-    @Test
-    void withLessFailuresThenAvailableUrls() {
-      failoverUrlList.fail(failoverUrlList.current());
-
-      assertThat(failoverUrlList.isFailoverAvailable()).isTrue();
-    }
-
-    @Test
-    void withAllFailuresAndAtLeastOneAfterTheGraceTimeoutPeriod() throws InterruptedException {
-      failoverUrlList.fail(failoverUrlList.current());
-      failoverUrlList.fail(failoverUrlList.current());
-
-      Thread.sleep(RETRY_GRACE_PERIOD_IN_MS + 1);
-
-      assertThat(failoverUrlList.isFailoverAvailable()).isTrue();
-    }
-
-    @Nested
-    @SuppressWarnings("all")
-    class ShouldNotHaveFailoverAvailableWhen {
-
-      private FailoverUrlList failoverUrlList;
-
-      @BeforeEach
-      void before() {
-        failoverUrlList = new FailoverUrlList(List.of("localhost:123", "farawayhost:5678"), 1000);
-      }
-
-      @Test
-      void allFailuresWithinGracePeriod() {
-        failoverUrlList.fail(failoverUrlList.current());
-        failoverUrlList.fail(failoverUrlList.current());
-
-        assertThat(failoverUrlList.isFailoverAvailable()).isFalse();
-      }
-    }
-  }
-}
-

+ 92 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerdeTest.java

@@ -0,0 +1,92 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+
+class AvroEmbeddedSerdeTest {
+
+  private AvroEmbeddedSerde avroEmbeddedSerde;
+
+  @BeforeEach
+  void init() {
+    avroEmbeddedSerde = new AvroEmbeddedSerde();
+    avroEmbeddedSerde.configure(
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty(),
+        PropertyResolverImpl.empty()
+    );
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void canDeserializeReturnsTrueForAllTargets(Serde.Target target) {
+    assertThat(avroEmbeddedSerde.canDeserialize("anyTopic", target))
+        .isTrue();
+  }
+
+  @ParameterizedTest
+  @EnumSource
+  void canSerializeReturnsFalseForAllTargets(Serde.Target target) {
+    assertThat(avroEmbeddedSerde.canSerialize("anyTopic", target))
+        .isFalse();
+  }
+
+  @Test
+  void deserializerParsesAvroDataWithEmbeddedSchema() throws Exception {
+    Schema schema = new Schema.Parser().parse("""
+        {
+          "type": "record",
+          "name": "TestAvroRecord",
+          "fields": [
+            { "name": "field1", "type": "string" },
+            { "name": "field2", "type": "int" }
+          ]
+        }
+        """
+    );
+    GenericRecord record = new GenericData.Record(schema);
+    record.put("field1", "this is test msg");
+    record.put("field2", 100500);
+
+    String jsonRecord = new String(AvroSchemaUtils.toJson(record));
+    byte[] serializedRecordBytes = serializeAvroWithEmbeddedSchema(record);
+
+    var deserializer = avroEmbeddedSerde.deserializer("anyTopic", Serde.Target.KEY);
+    DeserializeResult result = deserializer.deserialize(null, serializedRecordBytes);
+    assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
+    assertThat(result.getAdditionalProperties()).isEmpty();
+    assertJsonEquals(jsonRecord, result.getResult());
+  }
+
+  private void assertJsonEquals(String expected, String actual) throws IOException {
+    var mapper = new JsonMapper();
+    assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected));
+  }
+
+  private byte[] serializeAvroWithEmbeddedSchema(GenericRecord record) throws IOException {
+    try (DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<>());
+         ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
+      writer.create(record.getSchema(), baos);
+      writer.append(record);
+      writer.flush();
+      return baos.toByteArray();
+    }
+  }
+
+}

+ 46 - 8
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java

@@ -22,6 +22,7 @@ import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.io.Encoder;
 import org.apache.avro.io.Encoder;
 import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.io.EncoderFactory;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.CsvSource;
 import org.junit.jupiter.params.provider.CsvSource;
@@ -35,7 +36,7 @@ class SchemaRegistrySerdeTest {
   @BeforeEach
   @BeforeEach
   void init() {
   void init() {
     serde = new SchemaRegistrySerde();
     serde = new SchemaRegistrySerde();
-    serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value");
+    serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true);
   }
   }
 
 
   @ParameterizedTest
   @ParameterizedTest
@@ -129,24 +130,61 @@ class SchemaRegistrySerdeTest {
         .contains(Map.entry("schemaId", schemaId));
         .contains(Map.entry("schemaId", schemaId));
   }
   }
 
 
+  @Nested
+  class SerdeWithDisabledSubjectExistenceCheck {
+
+    @BeforeEach
+    void init() {
+      serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", false);
+    }
+
+    @Test
+    void canDeserializeAlwaysReturnsTrue() {
+      String topic = RandomString.make(10);
+      assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
+      assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
+    }
+  }
+
+  @Nested
+  class SerdeWithEnabledSubjectExistenceCheck {
+
+    @BeforeEach
+    void init() {
+      serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true);
+    }
+
+    @Test
+    void canDeserializeReturnsTrueIfSubjectExists() throws Exception {
+      String topic = RandomString.make(10);
+      registryClient.register(topic + "-key", new AvroSchema("\"int\""));
+      registryClient.register(topic + "-value", new AvroSchema("\"int\""));
+
+      assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
+      assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
+    }
+
+    @Test
+    void canDeserializeReturnsFalseIfSubjectDoesNotExist() {
+      String topic = RandomString.make(10);
+      assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isFalse();
+      assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isFalse();
+    }
+  }
+
   @Test
   @Test
   void canDeserializeAndCanSerializeReturnsTrueIfSubjectExists() throws Exception {
   void canDeserializeAndCanSerializeReturnsTrueIfSubjectExists() throws Exception {
     String topic = RandomString.make(10);
     String topic = RandomString.make(10);
     registryClient.register(topic + "-key", new AvroSchema("\"int\""));
     registryClient.register(topic + "-key", new AvroSchema("\"int\""));
     registryClient.register(topic + "-value", new AvroSchema("\"int\""));
     registryClient.register(topic + "-value", new AvroSchema("\"int\""));
 
 
-    assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
-    assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
-
     assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isTrue();
     assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isTrue();
     assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isTrue();
     assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isTrue();
   }
   }
 
 
   @Test
   @Test
-  void canDeserializeAndCanSerializeReturnsFalseIfSubjectDoesNotExist() {
+  void canSerializeReturnsFalseIfSubjectDoesNotExist() {
     String topic = RandomString.make(10);
     String topic = RandomString.make(10);
-    assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isFalse();
-    assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isFalse();
     assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isFalse();
     assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isFalse();
     assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse();
     assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse();
   }
   }
@@ -178,4 +216,4 @@ class SchemaRegistrySerdeTest {
     return output.toByteArray();
     return output.toByteArray();
   }
   }
 
 
-}
+}

+ 15 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ConfigTest.java

@@ -4,10 +4,13 @@ import static org.assertj.core.api.Assertions.assertThat;
 
 
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.model.BrokerConfigDTO;
 import com.provectus.kafka.ui.model.BrokerConfigDTO;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.ServerStatusDTO;
 import java.time.Duration;
 import java.time.Duration;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Optional;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.core.ParameterizedTypeReference;
 import org.springframework.core.ParameterizedTypeReference;
@@ -19,6 +22,18 @@ public class ConfigTest extends AbstractIntegrationTest {
   @Autowired
   @Autowired
   private WebTestClient webTestClient;
   private WebTestClient webTestClient;
 
 
+  @BeforeEach
+  void waitUntilStatsInitialized() {
+    Awaitility.await()
+        .atMost(Duration.ofSeconds(10))
+        .pollInSameThread()
+        .until(() -> {
+          var stats = applicationContext.getBean(StatisticsCache.class)
+              .get(KafkaCluster.builder().name(LOCAL).build());
+          return stats.getStatus() == ServerStatusDTO.ONLINE;
+        });
+  }
+
   @Test
   @Test
   public void testAlterConfig() {
   public void testAlterConfig() {
     String name = "background.threads";
     String name = "background.threads";

+ 3 - 3
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java

@@ -61,12 +61,12 @@ class MessagesServiceTest extends AbstractIntegrationTest {
   }
   }
 
 
   @Test
   @Test
-  void maskingAppliedOnConfiguredClusters() {
+  void maskingAppliedOnConfiguredClusters() throws Exception {
     String testTopic = MASKED_TOPICS_PREFIX + UUID.randomUUID();
     String testTopic = MASKED_TOPICS_PREFIX + UUID.randomUUID();
     try (var producer = KafkaTestProducer.forKafka(kafka)) {
     try (var producer = KafkaTestProducer.forKafka(kafka)) {
       createTopic(new NewTopic(testTopic, 1, (short) 1));
       createTopic(new NewTopic(testTopic, 1, (short) 1));
       producer.send(testTopic, "message1");
       producer.send(testTopic, "message1");
-      producer.send(testTopic, "message2");
+      producer.send(testTopic, "message2").get();
 
 
       Flux<TopicMessageDTO> msgsFlux = messagesService.loadMessages(
       Flux<TopicMessageDTO> msgsFlux = messagesService.loadMessages(
           cluster,
           cluster,
@@ -91,4 +91,4 @@ class MessagesServiceTest extends AbstractIntegrationTest {
     }
     }
   }
   }
 
 
-}
+}

+ 91 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ReactiveAdminClientTest.java

@@ -2,14 +2,18 @@ package com.provectus.kafka.ui.service;
 
 
 import static com.provectus.kafka.ui.service.ReactiveAdminClient.toMonoWithExceptionFilter;
 import static com.provectus.kafka.ui.service.ReactiveAdminClient.toMonoWithExceptionFilter;
 import static java.util.Objects.requireNonNull;
 import static java.util.Objects.requireNonNull;
+import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThat;
 
 
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.producer.KafkaTestProducer;
 import com.provectus.kafka.ui.producer.KafkaTestProducer;
+import java.time.Duration;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Properties;
 import java.util.UUID;
 import java.util.UUID;
+import java.util.function.Function;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 import lombok.SneakyThrows;
 import lombok.SneakyThrows;
 import org.apache.kafka.clients.admin.AdminClient;
 import org.apache.kafka.clients.admin.AdminClient;
@@ -18,12 +22,16 @@ import org.apache.kafka.clients.admin.Config;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.apache.kafka.clients.admin.ConfigEntry;
 import org.apache.kafka.clients.admin.NewTopic;
 import org.apache.kafka.clients.admin.NewTopic;
 import org.apache.kafka.clients.admin.OffsetSpec;
 import org.apache.kafka.clients.admin.OffsetSpec;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.consumer.OffsetAndMetadata;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.clients.producer.ProducerRecord;
 import org.apache.kafka.common.KafkaFuture;
 import org.apache.kafka.common.KafkaFuture;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.config.ConfigResource;
 import org.apache.kafka.common.config.ConfigResource;
 import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
 import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
 import org.apache.kafka.common.internals.KafkaFutureImpl;
 import org.apache.kafka.common.internals.KafkaFutureImpl;
+import org.apache.kafka.common.serialization.StringDeserializer;
 import org.junit.function.ThrowingRunnable;
 import org.junit.function.ThrowingRunnable;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.BeforeEach;
@@ -96,6 +104,14 @@ class ReactiveAdminClientTest extends AbstractIntegrationTest {
     clearings.add(() -> adminClient.deleteTopics(Stream.of(topics).map(NewTopic::name).toList()).all().get());
     clearings.add(() -> adminClient.deleteTopics(Stream.of(topics).map(NewTopic::name).toList()).all().get());
   }
   }
 
 
+  void fillTopic(String topic, int msgsCnt) {
+    try (var producer = KafkaTestProducer.forKafka(kafka)) {
+      for (int i = 0; i < msgsCnt; i++) {
+        producer.send(topic, UUID.randomUUID().toString());
+      }
+    }
+  }
+
   @Test
   @Test
   void testToMonoWithExceptionFilter() {
   void testToMonoWithExceptionFilter() {
     var failedFuture = new KafkaFutureImpl<String>();
     var failedFuture = new KafkaFutureImpl<String>();
@@ -152,4 +168,79 @@ class ReactiveAdminClientTest extends AbstractIntegrationTest {
         .verifyComplete();
         .verifyComplete();
   }
   }
 
 
+
+  @Test
+  void testListConsumerGroupOffsets() throws Exception {
+    String topic = UUID.randomUUID().toString();
+    String anotherTopic = UUID.randomUUID().toString();
+    createTopics(new NewTopic(topic, 2, (short) 1), new NewTopic(anotherTopic, 1, (short) 1));
+    fillTopic(topic, 10);
+
+    Function<String, KafkaConsumer<String, String>> consumerSupplier = groupName -> {
+      Properties p = new Properties();
+      p.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
+      p.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupName);
+      p.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+      p.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+      p.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+      p.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
+      return new KafkaConsumer<String, String>(p);
+    };
+
+    String fullyPolledConsumer = UUID.randomUUID().toString();
+    try (KafkaConsumer<String, String> c = consumerSupplier.apply(fullyPolledConsumer)) {
+      c.subscribe(List.of(topic));
+      int polled = 0;
+      while (polled < 10) {
+        polled += c.poll(Duration.ofMillis(50)).count();
+      }
+      c.commitSync();
+    }
+
+    String polled1MsgConsumer = UUID.randomUUID().toString();
+    try (KafkaConsumer<String, String> c = consumerSupplier.apply(polled1MsgConsumer)) {
+      c.subscribe(List.of(topic));
+      c.poll(Duration.ofMillis(100));
+      c.commitSync(Map.of(tp(topic, 0), new OffsetAndMetadata(1)));
+    }
+
+    String noCommitConsumer = UUID.randomUUID().toString();
+    try (KafkaConsumer<String, String> c = consumerSupplier.apply(noCommitConsumer)) {
+      c.subscribe(List.of(topic));
+      c.poll(Duration.ofMillis(100));
+    }
+
+    Map<TopicPartition, ListOffsetsResultInfo> endOffsets = adminClient.listOffsets(Map.of(
+        tp(topic, 0), OffsetSpec.latest(),
+        tp(topic, 1), OffsetSpec.latest())).all().get();
+
+    StepVerifier.create(
+            reactiveAdminClient.listConsumerGroupOffsets(
+                List.of(fullyPolledConsumer, polled1MsgConsumer, noCommitConsumer),
+                List.of(
+                    tp(topic, 0),
+                    tp(topic, 1),
+                    tp(anotherTopic, 0))
+            )
+        ).assertNext(table -> {
+
+          assertThat(table.row(polled1MsgConsumer))
+              .containsEntry(tp(topic, 0), 1L)
+              .hasSize(1);
+
+          assertThat(table.row(noCommitConsumer))
+              .isEmpty();
+
+          assertThat(table.row(fullyPolledConsumer))
+              .containsEntry(tp(topic, 0), endOffsets.get(tp(topic, 0)).offset())
+              .containsEntry(tp(topic, 1), endOffsets.get(tp(topic, 1)).offset())
+              .hasSize(2);
+        })
+        .verifyComplete();
+  }
+
+  private static TopicPartition tp(String topic, int partition) {
+    return new TopicPartition(topic, partition);
+  }
+
 }
 }

+ 43 - 43
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/SchemaRegistryPaginationTest.java

@@ -7,78 +7,78 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 import static org.mockito.Mockito.when;
 
 
 import com.provectus.kafka.ui.controller.SchemasController;
 import com.provectus.kafka.ui.controller.SchemasController;
-import com.provectus.kafka.ui.mapper.ClusterMapper;
-import com.provectus.kafka.ui.model.InternalSchemaRegistry;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.SchemaSubjectDTO;
 import com.provectus.kafka.ui.model.SchemaSubjectDTO;
-import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import com.provectus.kafka.ui.sr.model.Compatibility;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
 import com.provectus.kafka.ui.util.AccessControlServiceMock;
 import com.provectus.kafka.ui.util.AccessControlServiceMock;
+import com.provectus.kafka.ui.util.ReactiveFailover;
 import java.util.Comparator;
 import java.util.Comparator;
+import java.util.List;
 import java.util.Optional;
 import java.util.Optional;
 import java.util.stream.IntStream;
 import java.util.stream.IntStream;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
-import org.springframework.test.util.ReflectionTestUtils;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
 
 
 public class SchemaRegistryPaginationTest {
 public class SchemaRegistryPaginationTest {
 
 
   private static final String LOCAL_KAFKA_CLUSTER_NAME = "local";
   private static final String LOCAL_KAFKA_CLUSTER_NAME = "local";
 
 
-  private final SchemaRegistryService schemaRegistryService = mock(SchemaRegistryService.class);
-  private final ClustersStorage clustersStorage = mock(ClustersStorage.class);
-  private final ClusterMapper clusterMapper = mock(ClusterMapper.class);
-  private final AccessControlService accessControlService = new AccessControlServiceMock().getMock();
+  private SchemasController controller;
 
 
-  private final SchemasController controller
-      = new SchemasController(clusterMapper, schemaRegistryService, accessControlService);
+  private void init(List<String> subjects) {
+    ClustersStorage clustersStorage = mock(ClustersStorage.class);
+    when(clustersStorage.getClusterByName(isA(String.class)))
+        .thenReturn(Optional.of(buildKafkaCluster(LOCAL_KAFKA_CLUSTER_NAME)));
 
 
-  private void init(String[] subjects) {
+    SchemaRegistryService schemaRegistryService = mock(SchemaRegistryService.class);
     when(schemaRegistryService.getAllSubjectNames(isA(KafkaCluster.class)))
     when(schemaRegistryService.getAllSubjectNames(isA(KafkaCluster.class)))
-        .thenReturn(Mono.just(subjects));
+                .thenReturn(Mono.just(subjects));
     when(schemaRegistryService
     when(schemaRegistryService
-        .getAllLatestVersionSchemas(isA(KafkaCluster.class), anyList())).thenCallRealMethod();
-    when(clustersStorage.getClusterByName(isA(String.class)))
-        .thenReturn(Optional.of(buildKafkaCluster(LOCAL_KAFKA_CLUSTER_NAME)));
+            .getAllLatestVersionSchemas(isA(KafkaCluster.class), anyList())).thenCallRealMethod();
     when(schemaRegistryService.getLatestSchemaVersionBySubject(isA(KafkaCluster.class), isA(String.class)))
     when(schemaRegistryService.getLatestSchemaVersionBySubject(isA(KafkaCluster.class), isA(String.class)))
-        .thenAnswer(a -> Mono.just(new SchemaSubjectDTO().subject(a.getArgument(1))));
+            .thenAnswer(a -> Mono.just(
+                new SchemaRegistryService.SubjectWithCompatibilityLevel(
+                    new SchemaSubject().subject(a.getArgument(1)), Compatibility.FULL)));
 
 
-    ReflectionTestUtils.setField(controller, "clustersStorage", clustersStorage);
+    this.controller = new SchemasController(schemaRegistryService, new AccessControlServiceMock().getMock());
+    this.controller.setClustersStorage(clustersStorage);
   }
   }
 
 
   @Test
   @Test
   void shouldListFirst25andThen10Schemas() {
   void shouldListFirst25andThen10Schemas() {
     init(
     init(
-        IntStream.rangeClosed(1, 100)
-            .boxed()
-            .map(num -> "subject" + num)
-            .toArray(String[]::new)
+            IntStream.rangeClosed(1, 100)
+                    .boxed()
+                    .map(num -> "subject" + num)
+                    .toList()
     );
     );
     var schemasFirst25 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
     var schemasFirst25 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
-        null, null, null, null).block();
+            null, null, null, null).block();
     assertThat(schemasFirst25.getBody().getPageCount()).isEqualTo(4);
     assertThat(schemasFirst25.getBody().getPageCount()).isEqualTo(4);
     assertThat(schemasFirst25.getBody().getSchemas()).hasSize(25);
     assertThat(schemasFirst25.getBody().getSchemas()).hasSize(25);
     assertThat(schemasFirst25.getBody().getSchemas())
     assertThat(schemasFirst25.getBody().getSchemas())
-        .isSortedAccordingTo(Comparator.comparing(SchemaSubjectDTO::getSubject));
+            .isSortedAccordingTo(Comparator.comparing(SchemaSubjectDTO::getSubject));
 
 
     var schemasFirst10 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
     var schemasFirst10 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
-        null, 10, null, null).block();
+            null, 10, null, null).block();
 
 
     assertThat(schemasFirst10.getBody().getPageCount()).isEqualTo(10);
     assertThat(schemasFirst10.getBody().getPageCount()).isEqualTo(10);
     assertThat(schemasFirst10.getBody().getSchemas()).hasSize(10);
     assertThat(schemasFirst10.getBody().getSchemas()).hasSize(10);
     assertThat(schemasFirst10.getBody().getSchemas())
     assertThat(schemasFirst10.getBody().getSchemas())
-        .isSortedAccordingTo(Comparator.comparing(SchemaSubjectDTO::getSubject));
+            .isSortedAccordingTo(Comparator.comparing(SchemaSubjectDTO::getSubject));
   }
   }
 
 
   @Test
   @Test
   void shouldListSchemasContaining_1() {
   void shouldListSchemasContaining_1() {
     init(
     init(
-        IntStream.rangeClosed(1, 100)
-            .boxed()
-            .map(num -> "subject" + num)
-            .toArray(String[]::new)
+              IntStream.rangeClosed(1, 100)
+                      .boxed()
+                      .map(num -> "subject" + num)
+                      .toList()
     );
     );
     var schemasSearch7 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
     var schemasSearch7 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
-        null, null, "1", null).block();
+            null, null, "1", null).block();
     assertThat(schemasSearch7.getBody().getPageCount()).isEqualTo(1);
     assertThat(schemasSearch7.getBody().getPageCount()).isEqualTo(1);
     assertThat(schemasSearch7.getBody().getSchemas()).hasSize(20);
     assertThat(schemasSearch7.getBody().getSchemas()).hasSize(20);
   }
   }
@@ -86,13 +86,13 @@ public class SchemaRegistryPaginationTest {
   @Test
   @Test
   void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() {
   void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() {
     init(
     init(
-        IntStream.rangeClosed(1, 100)
-            .boxed()
-            .map(num -> "subject" + num)
-            .toArray(String[]::new)
+                IntStream.rangeClosed(1, 100)
+                        .boxed()
+                        .map(num -> "subject" + num)
+                        .toList()
     );
     );
     var schemas = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
     var schemas = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
-        0, -1, null, null).block();
+            0, -1, null, null).block();
 
 
     assertThat(schemas.getBody().getPageCount()).isEqualTo(4);
     assertThat(schemas.getBody().getPageCount()).isEqualTo(4);
     assertThat(schemas.getBody().getSchemas()).hasSize(25);
     assertThat(schemas.getBody().getSchemas()).hasSize(25);
@@ -102,14 +102,14 @@ public class SchemaRegistryPaginationTest {
   @Test
   @Test
   void shouldCalculateCorrectPageCountForNonDivisiblePageSize() {
   void shouldCalculateCorrectPageCountForNonDivisiblePageSize() {
     init(
     init(
-        IntStream.rangeClosed(1, 100)
-            .boxed()
-            .map(num -> "subject" + num)
-            .toArray(String[]::new)
+                IntStream.rangeClosed(1, 100)
+                        .boxed()
+                        .map(num -> "subject" + num)
+                        .toList()
     );
     );
 
 
     var schemas = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
     var schemas = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME,
-        4, 33, null, null).block();
+            4, 33, null, null).block();
 
 
     assertThat(schemas.getBody().getPageCount()).isEqualTo(4);
     assertThat(schemas.getBody().getPageCount()).isEqualTo(4);
     assertThat(schemas.getBody().getSchemas()).hasSize(1);
     assertThat(schemas.getBody().getSchemas()).hasSize(1);
@@ -118,8 +118,8 @@ public class SchemaRegistryPaginationTest {
 
 
   private KafkaCluster buildKafkaCluster(String clusterName) {
   private KafkaCluster buildKafkaCluster(String clusterName) {
     return KafkaCluster.builder()
     return KafkaCluster.builder()
-        .name(clusterName)
-        .schemaRegistry(InternalSchemaRegistry.builder().build())
-        .build();
+            .name(clusterName)
+            .schemaRegistryClient(mock(ReactiveFailover.class))
+            .build();
   }
   }
 }
 }

+ 0 - 2
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java

@@ -11,7 +11,6 @@ import com.provectus.kafka.ui.mapper.ClusterMapper;
 import com.provectus.kafka.ui.mapper.ClusterMapperImpl;
 import com.provectus.kafka.ui.mapper.ClusterMapperImpl;
 import com.provectus.kafka.ui.model.InternalLogDirStats;
 import com.provectus.kafka.ui.model.InternalLogDirStats;
 import com.provectus.kafka.ui.model.InternalPartitionsOffsets;
 import com.provectus.kafka.ui.model.InternalPartitionsOffsets;
-import com.provectus.kafka.ui.model.InternalSchemaRegistry;
 import com.provectus.kafka.ui.model.InternalTopic;
 import com.provectus.kafka.ui.model.InternalTopic;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.Metrics;
 import com.provectus.kafka.ui.model.Metrics;
@@ -87,7 +86,6 @@ class TopicsServicePaginationTest {
   private KafkaCluster buildKafkaCluster(String clusterName) {
   private KafkaCluster buildKafkaCluster(String clusterName) {
     return KafkaCluster.builder()
     return KafkaCluster.builder()
         .name(clusterName)
         .name(clusterName)
-        .schemaRegistry(InternalSchemaRegistry.builder().build())
         .build();
         .build();
   }
   }
 
 

+ 5 - 7
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java

@@ -9,15 +9,12 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory;
 import com.fasterxml.jackson.databind.node.TextNode;
 import com.fasterxml.jackson.databind.node.TextNode;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.container.KsqlDbContainer;
 import com.provectus.kafka.ui.container.KsqlDbContainer;
-import com.provectus.kafka.ui.model.InternalKsqlServer;
-import com.provectus.kafka.ui.model.KafkaCluster;
 import java.time.Duration;
 import java.time.Duration;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
-import org.springframework.util.unit.DataSize;
 import org.testcontainers.shaded.org.awaitility.Awaitility;
 import org.testcontainers.shaded.org.awaitility.Awaitility;
 import org.testcontainers.utility.DockerImageName;
 import org.testcontainers.utility.DockerImageName;
 import reactor.test.StepVerifier;
 import reactor.test.StepVerifier;
@@ -28,8 +25,6 @@ class KsqlApiClientTest extends AbstractIntegrationTest {
       DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0"))
       DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0"))
       .withKafka(kafka);
       .withKafka(kafka);
 
 
-  private static final DataSize maxBuffSize = DataSize.ofMegabytes(20);
-
   @BeforeAll
   @BeforeAll
   static void startContainer() {
   static void startContainer() {
     KSQL_DB.start();
     KSQL_DB.start();
@@ -43,8 +38,7 @@ class KsqlApiClientTest extends AbstractIntegrationTest {
   // Tutorial is here: https://ksqldb.io/quickstart.html
   // Tutorial is here: https://ksqldb.io/quickstart.html
   @Test
   @Test
   void ksqTutorialQueriesWork() {
   void ksqTutorialQueriesWork() {
-    var client = new KsqlApiClient(KafkaCluster.builder().ksqldbServer(
-            InternalKsqlServer.builder().url(KSQL_DB.url()).build()).build(), maxBuffSize);
+    var client = ksqlClient();
     execCommandSync(client,
     execCommandSync(client,
         "CREATE STREAM riderLocations (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE) "
         "CREATE STREAM riderLocations (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE) "
             + "WITH (kafka_topic='locations', value_format='json', partitions=1);",
             + "WITH (kafka_topic='locations', value_format='json', partitions=1);",
@@ -130,5 +124,9 @@ class KsqlApiClientTest extends AbstractIntegrationTest {
     }
     }
   }
   }
 
 
+  private KsqlApiClient ksqlClient() {
+    return new KsqlApiClient(KSQL_DB.url(), null, null, null);
+  }
+
 
 
 }
 }

+ 20 - 13
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java

@@ -4,10 +4,11 @@ import static org.assertj.core.api.Assertions.assertThat;
 
 
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.container.KsqlDbContainer;
 import com.provectus.kafka.ui.container.KsqlDbContainer;
-import com.provectus.kafka.ui.model.InternalKsqlServer;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KsqlStreamDescriptionDTO;
 import com.provectus.kafka.ui.model.KsqlStreamDescriptionDTO;
 import com.provectus.kafka.ui.model.KsqlTableDescriptionDTO;
 import com.provectus.kafka.ui.model.KsqlTableDescriptionDTO;
+import com.provectus.kafka.ui.util.ReactiveFailover;
+import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArraySet;
 import java.util.concurrent.CopyOnWriteArraySet;
@@ -35,29 +36,25 @@ class KsqlServiceV2Test extends AbstractIntegrationTest {
 
 
   @AfterAll
   @AfterAll
   static void cleanup() {
   static void cleanup() {
-    var client = new KsqlApiClient(KafkaCluster.builder().ksqldbServer(
-        InternalKsqlServer.builder().url(KSQL_DB.url()).build()).build(), maxBuffSize);
-
     TABLES_TO_DELETE.forEach(t ->
     TABLES_TO_DELETE.forEach(t ->
-        client.execute(String.format("DROP TABLE IF EXISTS %s DELETE TOPIC;", t), Map.of())
+        ksqlClient().execute(String.format("DROP TABLE IF EXISTS %s DELETE TOPIC;", t), Map.of())
             .blockLast());
             .blockLast());
 
 
     STREAMS_TO_DELETE.forEach(s ->
     STREAMS_TO_DELETE.forEach(s ->
-        client.execute(String.format("DROP STREAM IF EXISTS %s DELETE TOPIC;", s), Map.of())
+        ksqlClient().execute(String.format("DROP STREAM IF EXISTS %s DELETE TOPIC;", s), Map.of())
             .blockLast());
             .blockLast());
 
 
     KSQL_DB.stop();
     KSQL_DB.stop();
   }
   }
 
 
-  private final KsqlServiceV2 ksqlService = new KsqlServiceV2(maxBuffSize);
+  private final KsqlServiceV2 ksqlService = new KsqlServiceV2();
 
 
   @Test
   @Test
   void listStreamsReturnsAllKsqlStreams() {
   void listStreamsReturnsAllKsqlStreams() {
-    var cluster = KafkaCluster.builder().ksqldbServer(InternalKsqlServer.builder().url(KSQL_DB.url()).build()).build();
     var streamName = "stream_" + System.currentTimeMillis();
     var streamName = "stream_" + System.currentTimeMillis();
     STREAMS_TO_DELETE.add(streamName);
     STREAMS_TO_DELETE.add(streamName);
 
 
-    new KsqlApiClient(cluster, maxBuffSize)
+    ksqlClient()
         .execute(
         .execute(
             String.format("CREATE STREAM %s ( "
             String.format("CREATE STREAM %s ( "
                 + "  c1 BIGINT KEY, "
                 + "  c1 BIGINT KEY, "
@@ -70,7 +67,7 @@ class KsqlServiceV2Test extends AbstractIntegrationTest {
             Map.of())
             Map.of())
         .blockLast();
         .blockLast();
 
 
-    var streams = ksqlService.listStreams(cluster).collectList().block();
+    var streams = ksqlService.listStreams(cluster()).collectList().block();
     assertThat(streams).contains(
     assertThat(streams).contains(
         new KsqlStreamDescriptionDTO()
         new KsqlStreamDescriptionDTO()
             .name(streamName.toUpperCase())
             .name(streamName.toUpperCase())
@@ -82,11 +79,10 @@ class KsqlServiceV2Test extends AbstractIntegrationTest {
 
 
   @Test
   @Test
   void listTablesReturnsAllKsqlTables() {
   void listTablesReturnsAllKsqlTables() {
-    var cluster = KafkaCluster.builder().ksqldbServer(InternalKsqlServer.builder().url(KSQL_DB.url()).build()).build();
     var tableName = "table_" + System.currentTimeMillis();
     var tableName = "table_" + System.currentTimeMillis();
     TABLES_TO_DELETE.add(tableName);
     TABLES_TO_DELETE.add(tableName);
 
 
-    new KsqlApiClient(cluster, maxBuffSize)
+    ksqlClient()
         .execute(
         .execute(
             String.format("CREATE TABLE %s ( "
             String.format("CREATE TABLE %s ( "
                 + "   c1 BIGINT PRIMARY KEY, "
                 + "   c1 BIGINT PRIMARY KEY, "
@@ -99,7 +95,7 @@ class KsqlServiceV2Test extends AbstractIntegrationTest {
             Map.of())
             Map.of())
         .blockLast();
         .blockLast();
 
 
-    var tables = ksqlService.listTables(cluster).collectList().block();
+    var tables = ksqlService.listTables(cluster()).collectList().block();
     assertThat(tables).contains(
     assertThat(tables).contains(
         new KsqlTableDescriptionDTO()
         new KsqlTableDescriptionDTO()
             .name(tableName.toUpperCase())
             .name(tableName.toUpperCase())
@@ -110,4 +106,15 @@ class KsqlServiceV2Test extends AbstractIntegrationTest {
     );
     );
   }
   }
 
 
+  private static KafkaCluster cluster() {
+    return KafkaCluster.builder()
+        .ksqlClient(ReactiveFailover.create(
+            List.of(ksqlClient()), th -> true, "", ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS))
+        .build();
+  }
+
+  private static KsqlApiClient ksqlClient() {
+    return new KsqlApiClient(KSQL_DB.url(), null, null, null);
+  }
+
 }
 }

+ 44 - 11
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetrieverTest.java

@@ -1,7 +1,9 @@
 package com.provectus.kafka.ui.service.metrics;
 package com.provectus.kafka.ui.service.metrics;
 
 
+import com.provectus.kafka.ui.model.MetricsConfig;
 import java.io.IOException;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.math.BigDecimal;
+import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import okhttp3.mockwebserver.MockResponse;
 import okhttp3.mockwebserver.MockResponse;
 import okhttp3.mockwebserver.MockWebServer;
 import okhttp3.mockwebserver.MockWebServer;
@@ -30,8 +32,33 @@ class PrometheusMetricsRetrieverTest {
   @Test
   @Test
   void callsMetricsEndpointAndConvertsResponceToRawMetric() {
   void callsMetricsEndpointAndConvertsResponceToRawMetric() {
     var url = mockWebServer.url("/metrics");
     var url = mockWebServer.url("/metrics");
+    mockWebServer.enqueue(prepareResponse());
+
+    MetricsConfig metricsConfig = prepareMetricsConfig(url.port(), null, null);
+
+    StepVerifier.create(retriever.retrieve(url.host(), metricsConfig))
+        .expectNextSequence(expectedRawMetrics())
+        // third metric should not be present, since it has "NaN" value
+        .verifyComplete();
+  }
+
+  @Test
+  void callsSecureMetricsEndpointAndConvertsResponceToRawMetric() {
+    var url = mockWebServer.url("/metrics");
+    mockWebServer.enqueue(prepareResponse());
+
+
+    MetricsConfig metricsConfig = prepareMetricsConfig(url.port(), "username", "password");
+
+    StepVerifier.create(retriever.retrieve(url.host(), metricsConfig))
+        .expectNextSequence(expectedRawMetrics())
+        // third metric should not be present, since it has "NaN" value
+        .verifyComplete();
+  }
+
+  MockResponse prepareResponse() {
     // body copied from real jmx exporter
     // body copied from real jmx exporter
-    MockResponse response = new MockResponse().setBody(
+    return new MockResponse().setBody(
         "# HELP kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate Attribute exposed for management \n"
         "# HELP kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate Attribute exposed for management \n"
             + "# TYPE kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate untyped\n"
             + "# TYPE kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate untyped\n"
             + "kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate{name=\"RequestHandlerAvgIdlePercent\",} 0.898\n"
             + "kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate{name=\"RequestHandlerAvgIdlePercent\",} 0.898\n"
@@ -40,7 +67,19 @@ class PrometheusMetricsRetrieverTest {
             + "kafka_server_socket_server_metrics_request_size_avg{listener=\"PLAIN\",networkProcessor=\"1\",} 101.1\n"
             + "kafka_server_socket_server_metrics_request_size_avg{listener=\"PLAIN\",networkProcessor=\"1\",} 101.1\n"
             + "kafka_server_socket_server_metrics_request_size_avg{listener=\"PLAIN2\",networkProcessor=\"5\",} NaN"
             + "kafka_server_socket_server_metrics_request_size_avg{listener=\"PLAIN2\",networkProcessor=\"5\",} NaN"
     );
     );
-    mockWebServer.enqueue(response);
+  }
+
+  MetricsConfig prepareMetricsConfig(Integer port, String username, String password) {
+    return MetricsConfig.builder()
+        .ssl(false)
+        .port(port)
+        .type(MetricsConfig.PROMETHEUS_METRICS_TYPE)
+        .username(username)
+        .password(password)
+        .build();
+  }
+
+  List<RawMetric> expectedRawMetrics() {
 
 
     var firstMetric = RawMetric.create(
     var firstMetric = RawMetric.create(
         "kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate",
         "kafka_server_KafkaRequestHandlerPool_FifteenMinuteRate",
@@ -48,17 +87,11 @@ class PrometheusMetricsRetrieverTest {
         new BigDecimal("0.898")
         new BigDecimal("0.898")
     );
     );
 
 
-    var second = RawMetric.create(
+    var secondMetric = RawMetric.create(
         "kafka_server_socket_server_metrics_request_size_avg",
         "kafka_server_socket_server_metrics_request_size_avg",
         Map.of("listener", "PLAIN", "networkProcessor", "1"),
         Map.of("listener", "PLAIN", "networkProcessor", "1"),
         new BigDecimal("101.1")
         new BigDecimal("101.1")
     );
     );
-
-    StepVerifier.create(retriever.retrieve(url.host(), url.port(), false))
-        .expectNext(firstMetric)
-        .expectNext(second)
-        // third metric should not be present, since it has "NaN" value
-        .verifyComplete();
+    return List.of(firstMetric, secondMetric);
   }
   }
-
-}
+}

+ 233 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/ReactiveFailoverTest.java

@@ -0,0 +1,233 @@
+package com.provectus.kafka.ui.util;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.google.common.base.Preconditions;
+import java.time.Duration;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Consumer;
+import java.util.function.Predicate;
+import java.util.function.Supplier;
+import java.util.stream.Stream;
+import org.junit.jupiter.api.Test;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+import reactor.test.StepVerifier;
+
+class ReactiveFailoverTest {
+
+  private static final String NO_AVAILABLE_PUBLISHERS_MSG = "no active publishers!";
+  private static final Predicate<Throwable> FAILING_EXCEPTION_FILTER = th -> th.getMessage().contains("fail!");
+  private static final Supplier<Throwable> FAILING_EXCEPTION_SUPPLIER = () -> new IllegalStateException("fail!");
+  private static final Duration RETRY_PERIOD = Duration.ofMillis(300);
+
+  private final List<Publisher> publishers = Stream.generate(Publisher::new).limit(3).toList();
+
+  private final ReactiveFailover<Publisher> failover = ReactiveFailover.create(
+      publishers,
+      FAILING_EXCEPTION_FILTER,
+      NO_AVAILABLE_PUBLISHERS_MSG,
+      RETRY_PERIOD
+  );
+
+  @Test
+  void testMonoFailoverCycle() throws InterruptedException {
+    // starting with first publisher:
+    // 0 -> ok : ok
+    monoCheck(
+        Map.of(
+            0, okMono()
+        ),
+        List.of(0),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 0 -> fail, 1 -> ok : ok
+    monoCheck(
+        Map.of(
+            0, failingMono(),
+            1, okMono()
+        ),
+        List.of(0, 1),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 0.failed, 1.failed, 2 -> ok : ok
+    monoCheck(
+        Map.of(
+            1, failingMono(),
+            2, okMono()
+        ),
+        List.of(1, 2),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 0.failed, 1.failed, 2 -> fail : failing exception
+    monoCheck(
+        Map.of(
+            2, failingMono()
+        ),
+        List.of(2),
+        step -> step.verifyErrorMessage(FAILING_EXCEPTION_SUPPLIER.get().getMessage())
+    );
+
+    // 0.failed, 1.failed, 2.failed : No alive publisher exception
+    monoCheck(
+        Map.of(),
+        List.of(),
+        step -> step.verifyErrorMessage(NO_AVAILABLE_PUBLISHERS_MSG)
+    );
+
+    // resetting retry: all publishers became alive: 0.ok, 1.ok, 2.ok
+    Thread.sleep(RETRY_PERIOD.toMillis() + 1);
+
+    // starting with last errored publisher:
+    // 2 -> fail, 0 -> fail, 1 -> ok : ok
+    monoCheck(
+        Map.of(
+            2, failingMono(),
+            0, failingMono(),
+            1, okMono()
+        ),
+        List.of(2, 0, 1),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 1 -> ok : ok
+    monoCheck(
+        Map.of(
+            1, okMono()
+        ),
+        List.of(1),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+  }
+
+  @Test
+  void testFluxFailoverCycle() throws InterruptedException {
+    // starting with first publisher:
+    // 0 -> ok : ok
+    fluxCheck(
+        Map.of(
+            0, okFlux()
+        ),
+        List.of(0),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 0 -> fail, 1 -> ok : ok
+    fluxCheck(
+        Map.of(
+            0, failingFlux(),
+            1, okFlux()
+        ),
+        List.of(0, 1),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 0.failed, 1.failed, 2 -> ok : ok
+    fluxCheck(
+        Map.of(
+            1, failingFlux(),
+            2, okFlux()
+        ),
+        List.of(1, 2),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 0.failed, 1.failed, 2 -> fail : failing exception
+    fluxCheck(
+        Map.of(
+            2, failingFlux()
+        ),
+        List.of(2),
+        step -> step.verifyErrorMessage(FAILING_EXCEPTION_SUPPLIER.get().getMessage())
+    );
+
+    // 0.failed, 1.failed, 2.failed : No alive publisher exception
+    fluxCheck(
+        Map.of(),
+        List.of(),
+        step -> step.verifyErrorMessage(NO_AVAILABLE_PUBLISHERS_MSG)
+    );
+
+    // resetting retry: all publishers became alive: 0.ok, 1.ok, 2.ok
+    Thread.sleep(RETRY_PERIOD.toMillis() + 1);
+
+    // starting with last errored publisher:
+    // 2 -> fail, 0 -> fail, 1 -> ok : ok
+    fluxCheck(
+        Map.of(
+            2, failingFlux(),
+            0, failingFlux(),
+            1, okFlux()
+        ),
+        List.of(2, 0, 1),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+
+    // 1 -> ok : ok
+    fluxCheck(
+        Map.of(
+            1, okFlux()
+        ),
+        List.of(1),
+        step -> step.expectNextCount(1).verifyComplete()
+    );
+  }
+
+  private void monoCheck(Map<Integer, Mono<String>> mock,
+                         List<Integer> publishersToBeCalled, // for checking calls order
+                         Consumer<StepVerifier.Step<?>> stepVerifier) {
+    AtomicInteger calledCount = new AtomicInteger();
+    var mono = failover.mono(publisher -> {
+      int calledPublisherIdx = publishers.indexOf(publisher);
+      assertThat(calledPublisherIdx).isEqualTo(publishersToBeCalled.get(calledCount.getAndIncrement()));
+      return Preconditions.checkNotNull(
+          mock.get(calledPublisherIdx),
+          "Mono result not set for publisher %d", calledPublisherIdx
+      );
+    });
+    stepVerifier.accept(StepVerifier.create(mono));
+    assertThat(calledCount.get()).isEqualTo(publishersToBeCalled.size());
+  }
+
+
+  private void fluxCheck(Map<Integer, Flux<String>> mock,
+                         List<Integer> publishersToBeCalled, // for checking calls order
+                         Consumer<StepVerifier.Step<?>> stepVerifier) {
+    AtomicInteger calledCount = new AtomicInteger();
+    var flux = failover.flux(publisher -> {
+      int calledPublisherIdx = publishers.indexOf(publisher);
+      assertThat(calledPublisherIdx).isEqualTo(publishersToBeCalled.get(calledCount.getAndIncrement()));
+      return Preconditions.checkNotNull(
+          mock.get(calledPublisherIdx),
+          "Mono result not set for publisher %d", calledPublisherIdx
+      );
+    });
+    stepVerifier.accept(StepVerifier.create(flux));
+    assertThat(calledCount.get()).isEqualTo(publishersToBeCalled.size());
+  }
+
+  private Flux<String> okFlux() {
+    return Flux.just("ok");
+  }
+
+  private Flux<String> failingFlux() {
+    return Flux.error(FAILING_EXCEPTION_SUPPLIER);
+  }
+
+  private Mono<String> okMono() {
+    return Mono.just("ok");
+  }
+
+  private Mono<String> failingMono() {
+    return Mono.error(FAILING_EXCEPTION_SUPPLIER);
+  }
+
+  public static class Publisher {
+  }
+
+}

+ 124 - 54
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java

@@ -1,70 +1,140 @@
 package com.provectus.kafka.ui.util.jsonschema;
 package com.provectus.kafka.ui.util.jsonschema;
 
 
-import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
 import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
 import java.net.URI;
 import java.net.URI;
-import java.net.URISyntaxException;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
 
 
 
 
-public class ProtobufSchemaConverterTest {
+class ProtobufSchemaConverterTest {
 
 
   @Test
   @Test
-  public void testSimpleProto() throws URISyntaxException, JsonProcessingException {
-
-    String proto = "syntax = \"proto3\";\n"
-        + "package com.acme;\n"
-        + "\n"
-        + "message MyRecord {\n"
-        + "  string f1 = 1;\n"
-        + "  OtherRecord f2 = 2;\n"
-        + "  repeated OtherRecord f3 = 3;\n"
-        + "}\n"
-        + "\n"
-        + "message OtherRecord {\n"
-        + "  int32 other_id = 1;\n"
-        + "  Order order = 2;\n"
-        + "  oneof optionalField {"
-        + "    string name = 3;"
-        + "    uint64 size = 4;"
-        + "  }"
-        + "}\n"
-        + "\n"
-        + "enum Order {\n"
-        + "    FIRST = 1;\n"
-        + "    SECOND = 1;\n"
-        + "}\n";
-
-    String expected =
-        "{\"$id\":\"http://example.com/com.acme.MyRecord\","
-        + "\"$schema\":\"https://json-schema.org/draft/2020-12/schema\","
-        + "\"type\":\"object\",\"properties\":{\"f1\":{\"type\":\"string\"},"
-        + "\"f2\":{\"$ref\":\"#/definitions/record.com.acme.OtherRecord\"},"
-        + "\"f3\":{\"type\":\"array\","
-        + "\"items\":{\"$ref\":\"#/definitions/record.com.acme.OtherRecord\"}}},"
-        + "\"required\":[\"f3\"],"
-        + "\"definitions\":"
-        + "{\"record.com.acme.OtherRecord\":"
-        + "{\"type\":\"object\",\"properties\":"
-        + "{\"optionalField\":{\"oneOf\":[{\"type\":\"string\"},"
-        + "{\"type\":\"integer\"}]},\"other_id\":"
-        + "{\"type\":\"integer\"},\"order\":{\"enum\":[\"FIRST\",\"SECOND\"],"
-        + "\"type\":\"string\"}}}}}";
-
-    ProtobufSchema protobufSchema = new ProtobufSchema(proto);
-
-    final ProtobufSchemaConverter converter = new ProtobufSchemaConverter();
+  void testSchemaConvert() throws Exception {
+    String protoSchema = """
+        syntax = "proto3";
+        package test;
+
+        import "google/protobuf/timestamp.proto";
+        import "google/protobuf/duration.proto";
+        import "google/protobuf/struct.proto";
+        import "google/protobuf/wrappers.proto";
+
+        message TestMsg {
+            string string_field = 1;
+            int32 int32_field = 2;
+            bool bool_field = 3;
+            SampleEnum enum_field = 4;
+
+            enum SampleEnum {
+                ENUM_V1 = 0;
+                ENUM_V2 = 1;
+            }
+
+            google.protobuf.Timestamp ts_field = 5;
+            google.protobuf.Struct struct_field = 6;
+            google.protobuf.ListValue lst_v_field = 7;
+            google.protobuf.Duration duration_field = 8;
+
+            oneof some_oneof1 {
+                google.protobuf.Value v1 = 9;
+                google.protobuf.Value v2 = 10;
+            }
+            // wrapper fields:
+            google.protobuf.Int64Value int64_w_field = 11;
+            google.protobuf.Int32Value int32_w_field = 12;
+            google.protobuf.UInt64Value uint64_w_field = 13;
+            google.protobuf.UInt32Value uint32_w_field = 14;
+            google.protobuf.StringValue string_w_field = 15;
+            google.protobuf.BoolValue bool_w_field = 16;
+            google.protobuf.DoubleValue double_w_field = 17;
+            google.protobuf.FloatValue float_w_field = 18;
+
+            //embedded msg
+            EmbeddedMsg emb = 19;
+            repeated EmbeddedMsg emb_list = 20;
+
+            message EmbeddedMsg {
+                int32 emb_f1 = 1;
+                TestMsg outer_ref = 2;
+                EmbeddedMsg self_ref = 3;
+            }
+        }""";
+
+
+    String expectedJsonSchema = """
+        {
+            "$id": "http://example.com/test.TestMsg",
+            "$schema": "https://json-schema.org/draft/2020-12/schema",
+            "type": "object",
+            "definitions":
+            {
+                "test.TestMsg":
+                {
+                    "type": "object",
+                    "properties":
+                    {
+                        "enum_field": {
+                            "enum":
+                            [
+                                "ENUM_V1",
+                                "ENUM_V2"
+                            ],
+                            "type": "string"
+                        },
+                        "string_w_field": { "type": "string" },
+                        "ts_field": { "type": "string", "format": "date-time" },
+                        "emb_list": {
+                            "type": "array",
+                            "items": { "$ref": "#/definitions/test.TestMsg.EmbeddedMsg" }
+                        },
+                        "float_w_field": { "type": "number" },
+                        "lst_v_field": {
+                            "type": "array",
+                            "items": { "type":[ "number", "string", "object", "array", "boolean", "null" ] }
+                        },
+                        "struct_field": { "type": "object", "properties": {} },
+                        "string_field": { "type": "string" },
+                        "double_w_field": { "type": "number" },
+                        "bool_field": { "type": "boolean" },
+                        "int32_w_field": { "type": "integer", "maximum": 2147483647, "minimum": -2147483648 },
+                        "duration_field": { "type": "string" },
+                        "int32_field": { "type": "integer", "maximum": 2147483647, "minimum": -2147483648 },
+                        "int64_w_field": {
+                            "type": "integer",
+                            "maximum": 9223372036854775807, "minimum": -9223372036854775808
+                        },
+                        "v1": { "type": [ "number", "string", "object", "array", "boolean", "null" ] },
+                        "emb": { "$ref": "#/definitions/test.TestMsg.EmbeddedMsg" },
+                        "v2": { "type": [ "number", "string", "object", "array", "boolean", "null" ] },
+                        "uint32_w_field": { "type": "integer", "maximum": 4294967295, "minimum": 0 },
+                        "bool_w_field": { "type": "boolean" },
+                        "uint64_w_field": { "type": "integer", "maximum": 18446744073709551615, "minimum": 0 }
+                    }
+                },
+                "test.TestMsg.EmbeddedMsg": {
+                    "type": "object",
+                    "properties":
+                    {
+                        "emb_f1": { "type": "integer", "maximum": 2147483647, "minimum": -2147483648 },
+                        "outer_ref": { "$ref": "#/definitions/test.TestMsg" },
+                        "self_ref": { "$ref": "#/definitions/test.TestMsg.EmbeddedMsg" }
+                    }
+                }
+            },
+            "$ref": "#/definitions/test.TestMsg"
+        }""";
+
+    ProtobufSchemaConverter converter = new ProtobufSchemaConverter();
+    ProtobufSchema protobufSchema = new ProtobufSchema(protoSchema);
     URI basePath = new URI("http://example.com/");
     URI basePath = new URI("http://example.com/");
 
 
-    final JsonSchema convert =
-        converter.convert(basePath, protobufSchema.toDescriptor("MyRecord"));
+    JsonSchema converted = converter.convert(basePath, protobufSchema.toDescriptor());
+    assertJsonEqual(expectedJsonSchema, converted.toJson());
+  }
 
 
+  private void assertJsonEqual(String expected, String actual) throws Exception {
     ObjectMapper om = new ObjectMapper();
     ObjectMapper om = new ObjectMapper();
-    Assertions.assertEquals(
-        om.readTree(expected),
-        om.readTree(convert.toJson())
-    );
+    Assertions.assertEquals(om.readTree(expected), om.readTree(actual));
   }
   }
-}
+}

+ 26 - 0
kafka-ui-contract/pom.xml

@@ -122,6 +122,32 @@
                                         <asyncNative>true</asyncNative>
                                         <asyncNative>true</asyncNative>
                                         <library>webclient</library>
                                         <library>webclient</library>
 
 
+                                        <useBeanValidation>true</useBeanValidation>
+                                        <dateLibrary>java8</dateLibrary>
+                                    </configOptions>
+                                </configuration>
+                            </execution>
+                            <execution>
+                                <id>generate-sr-client</id>
+                                <goals>
+                                    <goal>generate</goal>
+                                </goals>
+                                <configuration>
+                                    <inputSpec>${project.basedir}/src/main/resources/swagger/kafka-sr-api.yaml
+                                    </inputSpec>
+                                    <output>${project.build.directory}/generated-sources/kafka-sr-client</output>
+                                    <generatorName>java</generatorName>
+                                    <generateApiTests>false</generateApiTests>
+                                    <generateModelTests>false</generateModelTests>
+
+                                    <configOptions>
+                                        <modelPackage>com.provectus.kafka.ui.sr.model</modelPackage>
+                                        <apiPackage>com.provectus.kafka.ui.sr.api</apiPackage>
+                                        <sourceFolder>kafka-sr-client</sourceFolder>
+
+                                        <asyncNative>true</asyncNative>
+                                        <library>webclient</library>
+
                                         <useBeanValidation>true</useBeanValidation>
                                         <useBeanValidation>true</useBeanValidation>
                                         <dateLibrary>java8</dateLibrary>
                                         <dateLibrary>java8</dateLibrary>
                                     </configOptions>
                                     </configOptions>

+ 404 - 0
kafka-ui-contract/src/main/resources/swagger/kafka-sr-api.yaml

@@ -0,0 +1,404 @@
+openapi: 3.0.0
+info:
+    description: Api Documentation
+    version: 0.1.0
+    title: Api Documentation
+    termsOfService: urn:tos
+    contact: {}
+    license:
+        name: Apache 2.0
+        url: http://www.apache.org/licenses/LICENSE-2.0
+tags:
+    - name: /schemaregistry
+servers:
+    - url: /localhost
+
+paths:
+    /subjects:
+        get:
+            tags:
+              - KafkaSrClient
+            summary: get all connectors from Kafka Connect service
+            operationId: getAllSubjectNames
+            parameters:
+              - name: subjectPrefix
+                in: query
+                required: false
+                schema:
+                  type: string
+              - name: deleted
+                in: query
+                schema:
+                  type: boolean
+            responses:
+                200:
+                  description: OK
+                  content:
+                      application/json:
+                          schema:
+                            #workaround for https://github.com/spring-projects/spring-framework/issues/24734
+                            type: string
+
+    /subjects/{subject}:
+        delete:
+            tags:
+                - KafkaSrClient
+            operationId: deleteAllSubjectVersions
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                    type: string
+                - name: permanent
+                  in: query
+                  schema:
+                    type: boolean
+                  required: false
+            responses:
+                200:
+                    description: OK
+                404:
+                    description: Not found
+
+    /subjects/{subject}/versions/{version}:
+        get:
+            tags:
+              - KafkaSrClient
+            operationId: getSubjectVersion
+            parameters:
+              - name: subject
+                in: path
+                required: true
+                schema:
+                  type: string
+              - name: version
+                in: path
+                required: true
+                schema:
+                  type: string
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                $ref: '#/components/schemas/SchemaSubject'
+                404:
+                    description: Not found
+                422:
+                    description: Invalid version
+        delete:
+            tags:
+                - KafkaSrClient
+            operationId: deleteSubjectVersion
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                    type: string
+                - name: permanent
+                  in: query
+                  required: false
+                  schema:
+                    type: boolean
+                    default: false
+                - name: version
+                  in: path
+                  required: true
+                  schema:
+                    type: string
+            responses:
+                200:
+                    description: OK
+                404:
+                    description: Not found
+
+    /subjects/{subject}/versions:
+        get:
+            tags:
+                - KafkaSrClient
+            operationId: getSubjectVersions
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                      type: string
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                type: array
+                                items:
+                                    type: integer
+                                    format: int32
+                404:
+                    description: Not found
+        post:
+            tags:
+                - KafkaSrClient
+            operationId: registerNewSchema
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                      type: string
+            requestBody:
+                content:
+                    application/json:
+                        schema:
+                            $ref: '#/components/schemas/NewSubject'
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                $ref: '#/components/schemas/SubjectId'
+
+    /config/:
+        get:
+            tags:
+                - KafkaSrClient
+            operationId: getGlobalCompatibilityLevel
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                $ref: '#/components/schemas/CompatibilityConfig'
+                404:
+                    description: Not found
+        put:
+            tags:
+                - KafkaSrClient
+            operationId: updateGlobalCompatibilityLevel
+            requestBody:
+                content:
+                    application/json:
+                        schema:
+                            $ref: '#/components/schemas/CompatibilityLevelChange'
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                $ref: '#/components/schemas/CompatibilityLevelChange'
+                404:
+                    description: Not found
+
+    /config/{subject}:
+        get:
+            tags:
+                - KafkaSrClient
+            operationId: getSubjectCompatibilityLevel
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                      type: string
+                - name: defaultToGlobal
+                  in: query
+                  required: true
+                  schema:
+                      type: boolean
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                $ref: '#/components/schemas/CompatibilityConfig'
+                404:
+                    description: Not found
+        put:
+            tags:
+                - KafkaSrClient
+            operationId: updateSubjectCompatibilityLevel
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                      type: string
+            requestBody:
+                content:
+                    application/json:
+                        schema:
+                            $ref: '#/components/schemas/CompatibilityLevelChange'
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                $ref: '#/components/schemas/CompatibilityLevelChange'
+                404:
+                    description: Not found
+        delete:
+            tags:
+                - KafkaSrClient
+            operationId: deleteSubjectCompatibilityLevel
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                      type: string
+            responses:
+                200:
+                    description: OK
+                404:
+                    description: Not found
+
+    /compatibility/subjects/{subject}/versions/{version}:
+        post:
+            tags:
+                - KafkaSrClient
+            operationId: checkSchemaCompatibility
+            parameters:
+                - name: subject
+                  in: path
+                  required: true
+                  schema:
+                      type: string
+                - name: version
+                  in: path
+                  required: true
+                  schema:
+                      type: string
+                - name: verbose
+                  in: query
+                  description: Show reason a schema fails the compatibility test
+                  schema:
+                      type: boolean
+            requestBody:
+                content:
+                    application/json:
+                        schema:
+                            $ref: '#/components/schemas/NewSubject'
+            responses:
+                200:
+                    description: OK
+                    content:
+                        application/json:
+                            schema:
+                                $ref: '#/components/schemas/CompatibilityCheckResponse'
+                404:
+                    description: Not found
+
+security:
+    - basicAuth: []
+
+components:
+    securitySchemes:
+        basicAuth:
+            type: http
+            scheme: basic
+    schemas:
+        SchemaSubject:
+            type: object
+            properties:
+              subject:
+                type: string
+              version:
+                type: string
+              id:
+                type: integer
+              schema:
+                type: string
+              schemaType:
+                  $ref: '#/components/schemas/SchemaType'
+            required:
+              - id
+              - subject
+              - version
+              - schema
+              - schemaType
+
+        SchemaType:
+          type: string
+          description: upon updating a schema, the type of an existing schema can't be changed
+          enum:
+            - AVRO
+            - JSON
+            - PROTOBUF
+
+        SchemaReference:
+            type: object
+            properties:
+                name:
+                    type: string
+                subject:
+                    type: string
+                version:
+                    type: integer
+            required:
+                - name
+                - subject
+                - version
+
+        SubjectId:
+            type: object
+            properties:
+                id:
+                    type: integer
+
+        NewSubject:
+            type: object
+            description: should be set for creating/updating schema subject
+            properties:
+                schema:
+                    type: string
+                schemaType:
+                    $ref: '#/components/schemas/SchemaType'
+                references:
+                    type: array
+                    items:
+                        $ref: '#/components/schemas/SchemaReference'
+            required:
+                - schema
+                - schemaType
+
+        CompatibilityConfig:
+            type: object
+            properties:
+                compatibilityLevel:
+                    $ref: '#/components/schemas/Compatibility'
+            required:
+                - compatibilityLevel
+
+        CompatibilityLevelChange:
+            type: object
+            properties:
+                compatibility:
+                    $ref: '#/components/schemas/Compatibility'
+            required:
+                - compatibility
+
+
+        Compatibility:
+            type: string
+            enum:
+                - BACKWARD
+                - BACKWARD_TRANSITIVE
+                - FORWARD
+                - FORWARD_TRANSITIVE
+                - FULL
+                - FULL_TRANSITIVE
+                - NONE
+
+
+        CompatibilityCheckResponse:
+            type: object
+            properties:
+                is_compatible:
+                    type: boolean

+ 48 - 13
kafka-ui-e2e-checks/README.md

@@ -1,6 +1,6 @@
 ### E2E UI automation for Kafka-ui
 ### E2E UI automation for Kafka-ui
 
 
-This repository is for E2E UI automation. 
+This repository is for E2E UI automation.
 
 
 ### Table of Contents
 ### Table of Contents
 
 
@@ -16,28 +16,48 @@ This repository is for E2E UI automation.
 - [How to develop](#how-to-develop)
 - [How to develop](#how-to-develop)
 
 
 ### Prerequisites
 ### Prerequisites
+
 - Docker & Docker-compose
 - Docker & Docker-compose
 - Java (install aarch64 jdk if you have M1/arm chip)
 - Java (install aarch64 jdk if you have M1/arm chip)
 - Maven
 - Maven
-  
+
 ### How to install
 ### How to install
+
 ```
 ```
 git clone https://github.com/provectus/kafka-ui.git
 git clone https://github.com/provectus/kafka-ui.git
 cd  kafka-ui-e2e-checks
 cd  kafka-ui-e2e-checks
 docker pull selenoid/vnc:chrome_86.0  
 docker pull selenoid/vnc:chrome_86.0  
 ```
 ```
+
 ### How to run checks
 ### How to run checks
 
 
-1. Run `kafka-ui`: 
+1. Run `kafka-ui`:
+
 ```
 ```
 cd kafka-ui
 cd kafka-ui
 docker-compose -f documentation/compose/e2e-tests.yaml up -d
 docker-compose -f documentation/compose/e2e-tests.yaml up -d
 ```
 ```
-2. Run tests using your QaseIO API token as environment variable (put instead %s into command below)
+
+2. Run Smoke test suite using your QaseIO API token as environment variable (put instead %s into command below)
+
+```
+./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Dsuite=smoke -f 'kafka-ui-e2e-checks' test -Pprod
 ```
 ```
-./mvnw -DQASEIO_API_TOKEN='%s' -pl '!kafka-ui-api' test -Pprod
+
+3. Run Sanity test suite using your QaseIO API token as environment variable (put instead %s into command below)
+
+```
+./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/sanity.xml' -Dsuite=sanity -f 'kafka-ui-e2e-checks' test -Pprod
+```
+
+4. Run Regression test suite using your QaseIO API token as environment variable (put instead %s into command below)
+
 ```
 ```
-3. To run tests on your local Chrome browser just add next VM option to the Run Configuration
+./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/regression.xml' -Dsuite=regression -f 'kafka-ui-e2e-checks' test -Pprod
+```
+
+5. To run tests on your local Chrome browser just add next VM option to the Run Configuration
+
 ```
 ```
 -Dbrowser=local
 -Dbrowser=local
 ```
 ```
@@ -47,25 +67,40 @@ docker-compose -f documentation/compose/e2e-tests.yaml up -d
 Reports are in `allure-results` folder.
 Reports are in `allure-results` folder.
 If you have installed allure commandline [here](https://www.npmjs.com/package/allure-commandline))
 If you have installed allure commandline [here](https://www.npmjs.com/package/allure-commandline))
 You can see allure report with command:
 You can see allure report with command:
+
 ```
 ```
 allure serve
 allure serve
 ```
 ```
+
 ### Screenshots
 ### Screenshots
 
 
 Reference screenshots are in `SCREENSHOTS_FOLDER`  (default,`kafka-ui-e2e-checks/screenshots`)
 Reference screenshots are in `SCREENSHOTS_FOLDER`  (default,`kafka-ui-e2e-checks/screenshots`)
 
 
 ### How to develop
 ### How to develop
-> ⚠️ todo 
+
+> ⚠️ todo
+
 ### Setting for different environments
 ### Setting for different environments
-> ⚠️ todo 
+
+> ⚠️ todo
+
 ### Test Data
 ### Test Data
-> ⚠️ todo 
+
+> ⚠️ todo
+
 ### Actions
 ### Actions
-> ⚠️ todo 
+
+> ⚠️ todo
+
 ### Checks
 ### Checks
-> ⚠️ todo 
+
+> ⚠️ todo
+
 ### Parallelization
 ### Parallelization
-> ⚠️ todo 
+
+> ⚠️ todo
+
 ### Tips
 ### Tips
- - install `Selenium UI Testing plugin` in IDEA
+
+- install `Selenium UI Testing plugin` in IDEA
 
 

+ 50 - 93
kafka-ui-e2e-checks/pom.xml

@@ -1,42 +1,34 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xmlns="http://maven.apache.org/POM/4.0.0"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
     <parent>
         <artifactId>kafka-ui</artifactId>
         <artifactId>kafka-ui</artifactId>
         <groupId>com.provectus</groupId>
         <groupId>com.provectus</groupId>
         <version>0.0.1-SNAPSHOT</version>
         <version>0.0.1-SNAPSHOT</version>
     </parent>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
 
 
+    <modelVersion>4.0.0</modelVersion>
     <artifactId>kafka-ui-e2e-checks</artifactId>
     <artifactId>kafka-ui-e2e-checks</artifactId>
+
     <properties>
     <properties>
+        <maven.surefire-plugin.version>3.0.0-M8</maven.surefire-plugin.version>
         <kafka-ui-contract>${project.version}</kafka-ui-contract>
         <kafka-ui-contract>${project.version}</kafka-ui-contract>
-        <aspectj.version>1.9.9.1</aspectj.version>
-        <allure.version>2.18.1</allure.version>
-        <json-smart.version>2.4.8</json-smart.version>
+        <testcontainers.version>1.17.6</testcontainers.version>
+        <junit.platform.version>1.9.2</junit.platform.version>
         <selenide.version>6.6.3</selenide.version>
         <selenide.version>6.6.3</selenide.version>
+        <testng.version>7.6.1</testng.version>
+        <allure.version>2.20.1</allure.version>
+        <aspectj.version>1.9.9.1</aspectj.version>
         <assertj.version>3.23.1</assertj.version>
         <assertj.version>3.23.1</assertj.version>
-        <google.auto-service.version>1.0.1</google.auto-service.version>
         <hamcrest.version>2.2</hamcrest.version>
         <hamcrest.version>2.2</hamcrest.version>
         <slf4j.version>1.7.36</slf4j.version>
         <slf4j.version>1.7.36</slf4j.version>
-        <allure.java-commons.version>2.20.1</allure.java-commons.version>
         <dotenv.version>2.3.1</dotenv.version>
         <dotenv.version>2.3.1</dotenv.version>
-        <allure.maven-plugin.version>2.6</allure.maven-plugin.version>
-        <ashot.version>1.5.4</ashot.version>
-        <allure.screendiff-plugin.version>2.18.1</allure.screendiff-plugin.version>
-        <maven.surefire-plugin.version>2.22.2</maven.surefire-plugin.version>
-        <allure-maven.version>2.10.0</allure-maven.version>
         <kafka.version>3.3.1</kafka.version>
         <kafka.version>3.3.1</kafka.version>
         <qase.io.version>2.1.3</qase.io.version>
         <qase.io.version>2.1.3</qase.io.version>
     </properties>
     </properties>
 
 
     <dependencies>
     <dependencies>
-        <dependency>
-            <groupId>net.minidev</groupId>
-            <artifactId>json-smart</artifactId>
-            <version>${json-smart.version}</version>
-        </dependency>
         <dependency>
         <dependency>
             <groupId>org.apache.kafka</groupId>
             <groupId>org.apache.kafka</groupId>
             <artifactId>kafka_2.13</artifactId>
             <artifactId>kafka_2.13</artifactId>
@@ -122,23 +114,43 @@
         <dependency>
         <dependency>
             <groupId>org.testcontainers</groupId>
             <groupId>org.testcontainers</groupId>
             <artifactId>testcontainers</artifactId>
             <artifactId>testcontainers</artifactId>
+            <version>${testcontainers.version}</version>
         </dependency>
         </dependency>
-
         <dependency>
         <dependency>
-            <groupId>io.qameta.allure</groupId>
-            <artifactId>allure-junit5</artifactId>
-            <version>${allure.version}</version>
+            <groupId>org.testcontainers</groupId>
+            <artifactId>selenium</artifactId>
+            <version>${testcontainers.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.junit.platform</groupId>
+            <artifactId>junit-platform-launcher</artifactId>
+            <version>${junit.platform.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.junit.platform</groupId>
+            <artifactId>junit-platform-engine</artifactId>
+            <version>${junit.platform.version}</version>
         </dependency>
         </dependency>
         <dependency>
         <dependency>
             <groupId>com.codeborne</groupId>
             <groupId>com.codeborne</groupId>
             <artifactId>selenide</artifactId>
             <artifactId>selenide</artifactId>
             <version>${selenide.version}</version>
             <version>${selenide.version}</version>
         </dependency>
         </dependency>
+        <dependency>
+            <groupId>org.testng</groupId>
+            <artifactId>testng</artifactId>
+            <version>${testng.version}</version>
+        </dependency>
         <dependency>
         <dependency>
             <groupId>io.qameta.allure</groupId>
             <groupId>io.qameta.allure</groupId>
             <artifactId>allure-selenide</artifactId>
             <artifactId>allure-selenide</artifactId>
             <version>${allure.version}</version>
             <version>${allure.version}</version>
         </dependency>
         </dependency>
+        <dependency>
+            <groupId>io.qameta.allure</groupId>
+            <artifactId>allure-testng</artifactId>
+            <version>${allure.version}</version>
+        </dependency>
         <dependency>
         <dependency>
             <groupId>org.hamcrest</groupId>
             <groupId>org.hamcrest</groupId>
             <artifactId>hamcrest</artifactId>
             <artifactId>hamcrest</artifactId>
@@ -150,20 +162,9 @@
             <version>${assertj.version}</version>
             <version>${assertj.version}</version>
         </dependency>
         </dependency>
         <dependency>
         <dependency>
-            <groupId>com.google.auto.service</groupId>
-            <artifactId>auto-service</artifactId>
-            <version>${google.auto-service.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.junit.jupiter</groupId>
-            <artifactId>junit-jupiter-api</artifactId>
-            <version>${junit.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.junit.jupiter</groupId>
-            <artifactId>junit-jupiter-engine</artifactId>
-            <version>${junit.version}</version>
-            <scope>test</scope>
+            <groupId>org.aspectj</groupId>
+            <artifactId>aspectjrt</artifactId>
+            <version>${aspectj.version}</version>
         </dependency>
         </dependency>
         <dependency>
         <dependency>
             <groupId>org.slf4j</groupId>
             <groupId>org.slf4j</groupId>
@@ -175,61 +176,16 @@
             <artifactId>lombok</artifactId>
             <artifactId>lombok</artifactId>
             <version>${org.projectlombok.version}</version>
             <version>${org.projectlombok.version}</version>
         </dependency>
         </dependency>
-        <dependency>
-            <groupId>org.aspectj</groupId>
-            <artifactId>aspectjrt</artifactId>
-            <version>${aspectj.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.testcontainers</groupId>
-            <artifactId>junit-jupiter</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>io.qameta.allure</groupId>
-            <artifactId>allure-java-commons</artifactId>
-            <version>${allure.java-commons.version}</version>
-        </dependency>
         <dependency>
         <dependency>
             <groupId>io.github.cdimascio</groupId>
             <groupId>io.github.cdimascio</groupId>
             <artifactId>dotenv-java</artifactId>
             <artifactId>dotenv-java</artifactId>
             <version>${dotenv.version}</version>
             <version>${dotenv.version}</version>
         </dependency>
         </dependency>
-        <dependency>
-            <groupId>org.junit.platform</groupId>
-            <artifactId>junit-platform-launcher</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>ru.yandex.qatools.allure</groupId>
-            <artifactId>allure-maven-plugin</artifactId>
-            <version>${allure.maven-plugin.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>ru.yandex.qatools.ashot</groupId>
-            <artifactId>ashot</artifactId>
-            <version>${ashot.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.seleniumhq.selenium</groupId>
-                    <artifactId>selenium-remote-driver</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>io.qameta.allure.plugins</groupId>
-            <artifactId>screen-diff-plugin</artifactId>
-            <version>${allure.screendiff-plugin.version}</version>
-        </dependency>
         <dependency>
         <dependency>
             <groupId>com.provectus</groupId>
             <groupId>com.provectus</groupId>
             <artifactId>kafka-ui-contract</artifactId>
             <artifactId>kafka-ui-contract</artifactId>
             <version>${kafka-ui-contract}</version>
             <version>${kafka-ui-contract}</version>
         </dependency>
         </dependency>
-        <dependency>
-            <groupId>org.testcontainers</groupId>
-            <artifactId>selenium</artifactId>
-            <scope>test</scope>
-        </dependency>
         <dependency>
         <dependency>
             <groupId>io.qase</groupId>
             <groupId>io.qase</groupId>
             <artifactId>qase-api</artifactId>
             <artifactId>qase-api</artifactId>
@@ -252,6 +208,13 @@
                         <configuration>
                         <configuration>
                             <skipTests>true</skipTests>
                             <skipTests>true</skipTests>
                         </configuration>
                         </configuration>
+                        <dependencies>
+                            <dependency>
+                                <groupId>org.apache.maven.surefire</groupId>
+                                <artifactId>surefire-testng</artifactId>
+                                <version>${maven.surefire-plugin.version}</version>
+                            </dependency>
+                        </dependencies>
                     </plugin>
                     </plugin>
                     <plugin>
                     <plugin>
                         <groupId>org.apache.maven.plugins</groupId>
                         <groupId>org.apache.maven.plugins</groupId>
@@ -269,18 +232,16 @@
                         <artifactId>maven-surefire-plugin</artifactId>
                         <artifactId>maven-surefire-plugin</artifactId>
                         <version>${maven.surefire-plugin.version}</version>
                         <version>${maven.surefire-plugin.version}</version>
                         <configuration>
                         <configuration>
-                            <testFailureIgnore>false</testFailureIgnore>
-                            <systemProperties>
-                                <property>
-                                    <name>junit.jupiter.extensions.autodetection.enabled</name>
-                                    <value>true</value>
-                                </property>
-                            </systemProperties>
                             <argLine>
                             <argLine>
                                 -javaagent:"${settings.localRepository}/org/aspectj/aspectjweaver/${aspectj.version}/aspectjweaver-${aspectj.version}.jar"
                                 -javaagent:"${settings.localRepository}/org/aspectj/aspectjweaver/${aspectj.version}/aspectjweaver-${aspectj.version}.jar"
                             </argLine>
                             </argLine>
                         </configuration>
                         </configuration>
                         <dependencies>
                         <dependencies>
+                            <dependency>
+                                <groupId>org.apache.maven.surefire</groupId>
+                                <artifactId>surefire-testng</artifactId>
+                                <version>${maven.surefire-plugin.version}</version>
+                            </dependency>
                             <dependency>
                             <dependency>
                                 <groupId>org.aspectj</groupId>
                                 <groupId>org.aspectj</groupId>
                                 <artifactId>aspectjweaver</artifactId>
                                 <artifactId>aspectjweaver</artifactId>
@@ -291,11 +252,7 @@
                     <plugin>
                     <plugin>
                         <groupId>io.qameta.allure</groupId>
                         <groupId>io.qameta.allure</groupId>
                         <artifactId>allure-maven</artifactId>
                         <artifactId>allure-maven</artifactId>
-                        <version>${allure-maven.version}</version>
-                    </plugin>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-compiler-plugin</artifactId>
+                        <version>2.10.0</version>
                     </plugin>
                     </plugin>
                 </plugins>
                 </plugins>
             </build>
             </build>

+ 0 - 1
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java

@@ -8,5 +8,4 @@ import lombok.experimental.Accessors;
 public class Connector {
 public class Connector {
 
 
     private String name, config;
     private String name, config;
-
 }
 }

+ 5 - 5
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java

@@ -4,28 +4,28 @@ import com.provectus.kafka.ui.api.model.SchemaType;
 import lombok.Data;
 import lombok.Data;
 import lombok.experimental.Accessors;
 import lombok.experimental.Accessors;
 
 
-import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
+import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
 
 
 @Data
 @Data
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Schema {
 public class Schema {
 
 
-    private String name,valuePath;
+    private String name, valuePath;
     private SchemaType type;
     private SchemaType type;
 
 
-    public static Schema createSchemaAvro(){
+    public static Schema createSchemaAvro() {
         return new Schema().setName("schema_avro-" + randomAlphabetic(5))
         return new Schema().setName("schema_avro-" + randomAlphabetic(5))
                 .setType(SchemaType.AVRO)
                 .setType(SchemaType.AVRO)
                 .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_avro_value.json");
                 .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_avro_value.json");
     }
     }
 
 
-    public static Schema createSchemaJson(){
+    public static Schema createSchemaJson() {
         return new Schema().setName("schema_json-" + randomAlphabetic(5))
         return new Schema().setName("schema_json-" + randomAlphabetic(5))
                 .setType(SchemaType.JSON)
                 .setType(SchemaType.JSON)
                 .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_Json_Value.json");
                 .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_Json_Value.json");
     }
     }
 
 
-    public static Schema createSchemaProtobuf(){
+    public static Schema createSchemaProtobuf() {
         return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
         return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
                 .setType(SchemaType.PROTOBUF)
                 .setType(SchemaType.PROTOBUF)
                 .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_protobuf_value.txt");
                 .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_protobuf_value.txt");

+ 6 - 3
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java

@@ -1,17 +1,20 @@
 package com.provectus.kafka.ui.models;
 package com.provectus.kafka.ui.models;
 
 
-import com.provectus.kafka.ui.pages.topic.enums.CleanupPolicyValue;
-import com.provectus.kafka.ui.pages.topic.enums.CustomParameterType;
-import com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk;
+import com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue;
+import com.provectus.kafka.ui.pages.topics.enums.CustomParameterType;
+import com.provectus.kafka.ui.pages.topics.enums.MaxSizeOnDisk;
+import com.provectus.kafka.ui.pages.topics.enums.TimeToRetain;
 import lombok.Data;
 import lombok.Data;
 import lombok.experimental.Accessors;
 import lombok.experimental.Accessors;
 
 
 @Data
 @Data
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Topic {
 public class Topic {
+
     private String name, timeToRetainData, maxMessageBytes, messageKey, messageContent, customParameterValue;
     private String name, timeToRetainData, maxMessageBytes, messageKey, messageContent, customParameterValue;
     private int numberOfPartitions;
     private int numberOfPartitions;
     private CustomParameterType customParameterType;
     private CustomParameterType customParameterType;
     private CleanupPolicyValue cleanupPolicyValue;
     private CleanupPolicyValue cleanupPolicyValue;
     private MaxSizeOnDisk maxSizeOnDisk;
     private MaxSizeOnDisk maxSizeOnDisk;
+    private TimeToRetain timeToRetain;
 }
 }

+ 94 - 74
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java

@@ -1,89 +1,109 @@
 package com.provectus.kafka.ui.pages;
 package com.provectus.kafka.ui.pages;
 
 
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.ElementsCollection;
 import com.codeborne.selenide.ElementsCollection;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.utilities.WebUtils;
 import com.provectus.kafka.ui.utilities.WebUtils;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 
 
+import java.time.Duration;
+
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
 @Slf4j
 @Slf4j
 public abstract class BasePage extends WebUtils {
 public abstract class BasePage extends WebUtils {
 
 
-  protected SelenideElement loadingSpinner = $x("//*[contains(text(),'Loading')]");
-  protected SelenideElement submitBtn = $x("//button[@type='submit']");
-  protected SelenideElement tableGrid = $x("//table");
-  protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
-  protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
-  protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
-  protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
-  protected ElementsCollection allGridItems = $$x("//tr[@class]");
-  protected String summaryCellLocator = "//div[contains(text(),'%s')]";
-  protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
-  protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
-
-  protected void waitUntilSpinnerDisappear() {
-    log.debug("\nwaitUntilSpinnerDisappear");
-    loadingSpinner.shouldBe(Condition.disappear);
-  }
-
-  protected void clickSubmitBtn() {
-    clickByJavaScript(submitBtn);
-  }
-
-  protected SelenideElement getTableElement(String elementName) {
-    log.debug("\ngetTableElement: {}", elementName);
-    return $x(String.format(tableElementNameLocator, elementName));
-  }
-
-  protected String getAlertHeader() {
-    log.debug("\ngetAlertHeader");
-    String result = alertHeader.shouldBe(Condition.visible).getText();
-    log.debug("-> {}", result);
-    return result;
-  }
-
-  protected String getAlertMessage() {
-    log.debug("\ngetAlertMessage");
-    String result = alertMessage.shouldBe(Condition.visible).getText();
-    log.debug("-> {}", result);
-    return result;
-  }
-
-  protected boolean isAlertVisible(AlertHeader header) {
-    log.debug("\nisAlertVisible: {}", header.toString());
-    boolean result = getAlertHeader().equals(header.toString());
-    log.debug("-> {}", result);
-    return result;
-  }
-
-  protected boolean isAlertVisible(AlertHeader header, String message) {
-    log.debug("\nisAlertVisible: {} {}", header, message);
-    boolean result = isAlertVisible(header) && getAlertMessage().equals(message);
-    log.debug("-> {}", result);
-    return result;
-  }
-
-  protected void clickConfirmButton() {
-    confirmBtn.shouldBe(Condition.enabled).click();
-    confirmBtn.shouldBe(Condition.disappear);
-  }
-
-  public enum AlertHeader {
-    SUCCESS("Success"),
-    VALIDATION_ERROR("Validation Error"),
-    BAD_REQUEST("400 Bad Request");
-
-    private final String value;
-
-    AlertHeader(String value) {
-      this.value = value;
+    protected SelenideElement loadingSpinner = $x("//div[@role='progressbar']");
+    protected SelenideElement submitBtn = $x("//button[@type='submit']");
+    protected SelenideElement tableGrid = $x("//table");
+    protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
+    protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
+    protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
+    protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
+    protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
+    protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
+    protected ElementsCollection ddlOptions = $$x("//li[@value]");
+    protected ElementsCollection gridItems = $$x("//tr[@class]");
+    protected String summaryCellLocator = "//div[contains(text(),'%s')]";
+    protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
+    protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
+
+    protected void waitUntilSpinnerDisappear() {
+        log.debug("\nwaitUntilSpinnerDisappear");
+        if (isVisible(loadingSpinner)) {
+            loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(30));
+        }
+    }
+
+    protected void clickSubmitBtn() {
+        clickByJavaScript(submitBtn);
+    }
+
+    protected SelenideElement getTableElement(String elementName) {
+        log.debug("\ngetTableElement: {}", elementName);
+        return $x(String.format(tableElementNameLocator, elementName));
+    }
+
+    protected ElementsCollection getDdlOptions() {
+        return ddlOptions;
+    }
+
+    protected String getAlertHeader() {
+        log.debug("\ngetAlertHeader");
+        String result = alertHeader.shouldBe(Condition.visible).getText();
+        log.debug("-> {}", result);
+        return result;
     }
     }
 
 
-    public String toString() {
-      return value;
+    protected String getAlertMessage() {
+        log.debug("\ngetAlertMessage");
+        String result = alertMessage.shouldBe(Condition.visible).getText();
+        log.debug("-> {}", result);
+        return result;
+    }
+
+    protected boolean isAlertVisible(AlertHeader header) {
+        log.debug("\nisAlertVisible: {}", header.toString());
+        boolean result = getAlertHeader().equals(header.toString());
+        log.debug("-> {}", result);
+        return result;
+    }
+
+    protected boolean isAlertVisible(AlertHeader header, String message) {
+        log.debug("\nisAlertVisible: {} {}", header, message);
+        boolean result = isAlertVisible(header) && getAlertMessage().equals(message);
+        log.debug("-> {}", result);
+        return result;
+    }
+
+    protected void clickConfirmButton() {
+        confirmBtn.shouldBe(Condition.enabled).click();
+        confirmBtn.shouldBe(Condition.disappear);
+    }
+
+    protected void clickCancelButton() {
+        cancelBtn.shouldBe(Condition.enabled).click();
+        cancelBtn.shouldBe(Condition.disappear);
+    }
+
+    protected boolean isConfirmationModalVisible() {
+        return isVisible(confirmationMdl);
+    }
+
+    public enum AlertHeader {
+        SUCCESS("Success"),
+        VALIDATION_ERROR("Validation Error"),
+        BAD_REQUEST("400 Bad Request");
+
+        private final String value;
+
+        AlertHeader(String value) {
+            this.value = value;
+        }
+
+        public String toString() {
+            return value;
+        }
     }
     }
-  }
 }
 }

+ 11 - 10
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java

@@ -1,16 +1,17 @@
 package com.provectus.kafka.ui.pages;
 package com.provectus.kafka.ui.pages;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
-
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
+
 import java.time.Duration;
 import java.time.Duration;
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
+
 public class NaviSideBar extends BasePage {
 public class NaviSideBar extends BasePage {
 
 
     protected SelenideElement dashboardMenuItem = $x("//a[@title='Dashboard']");
     protected SelenideElement dashboardMenuItem = $x("//a[@title='Dashboard']");
@@ -45,6 +46,13 @@ public class NaviSideBar extends BasePage {
         return this;
         return this;
     }
     }
 
 
+    public List<SelenideElement> getAllMenuButtons() {
+        expandCluster(CLUSTER_NAME);
+        return Stream.of(SideMenuOption.values())
+                .map(option -> $x(String.format(sideMenuOptionElementLocator, option.value)))
+                .collect(Collectors.toList());
+    }
+
     public enum SideMenuOption {
     public enum SideMenuOption {
         DASHBOARD("Dashboard"),
         DASHBOARD("Dashboard"),
         BROKERS("Brokers"),
         BROKERS("Brokers"),
@@ -60,11 +68,4 @@ public class NaviSideBar extends BasePage {
             this.value = value;
             this.value = value;
         }
         }
     }
     }
-
-    public List<SelenideElement> getAllMenuButtons() {
-        expandCluster(CLUSTER_NAME);
-        return Stream.of(SideMenuOption.values())
-                .map(option -> $x(String.format(sideMenuOptionElementLocator, option.value)))
-                .collect(Collectors.toList());
-    }
 }
 }

+ 5 - 3
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java

@@ -1,12 +1,14 @@
 package com.provectus.kafka.ui.pages;
 package com.provectus.kafka.ui.pages;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
+
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.List;
 import java.util.List;
 
 
-public class TopPanel extends BasePage{
+import static com.codeborne.selenide.Selenide.$x;
+
+public class TopPanel extends BasePage {
+
     protected SelenideElement kafkaLogo = $x("//a[contains(text(),'UI for Apache Kafka')]");
     protected SelenideElement kafkaLogo = $x("//a[contains(text(),'UI for Apache Kafka')]");
     protected SelenideElement kafkaVersion = $x("//a[@title='Current commit']");
     protected SelenideElement kafkaVersion = $x("//a[@title='Current commit']");
     protected SelenideElement logOutBtn = $x("//button[contains(text(),'Log out')]");
     protected SelenideElement logOutBtn = $x("//button[contains(text(),'Log out')]");

+ 28 - 27
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java

@@ -1,40 +1,41 @@
 package com.provectus.kafka.ui.pages.brokers;
 package com.provectus.kafka.ui.pages.brokers;
 
 
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
+
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 
 
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
 public class BrokersConfigTab extends BasePage {
 public class BrokersConfigTab extends BasePage {
 
 
-  protected List<SelenideElement> editBtn = $$x("//button[@aria-label='editAction']");
-  protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key']");
-
-  @Step
-  public BrokersConfigTab waitUntilScreenReady(){
-    waitUntilSpinnerDisappear();
-    searchByKeyField.shouldBe(Condition.visible);
-    return this;
-  }
-
-  @Step
-  public boolean isSearchByKeyVisible() {
-   return isVisible(searchByKeyField);
-  }
-
-  public List<SelenideElement> getColumnHeaders() {
-    return Stream.of("Key", "Value", "Source")
-        .map(name -> $x(String.format(columnHeaderLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  public List<SelenideElement> getEditButtons() {
-    return editBtn;
-  }
+    protected List<SelenideElement> editBtn = $$x("//button[@aria-label='editAction']");
+    protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key']");
+
+    @Step
+    public BrokersConfigTab waitUntilScreenReady() {
+        waitUntilSpinnerDisappear();
+        searchByKeyField.shouldBe(Condition.visible);
+        return this;
+    }
+
+    @Step
+    public boolean isSearchByKeyVisible() {
+        return isVisible(searchByKeyField);
+    }
+
+    public List<SelenideElement> getColumnHeaders() {
+        return Stream.of("Key", "Value", "Source")
+                .map(name -> $x(String.format(columnHeaderLocator, name)))
+                .collect(Collectors.toList());
+    }
+
+    public List<SelenideElement> getEditButtons() {
+        return editBtn;
+    }
 }
 }

+ 74 - 73
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java

@@ -1,91 +1,92 @@
 package com.provectus.kafka.ui.pages.brokers;
 package com.provectus.kafka.ui.pages.brokers;
 
 
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$x;
-
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
+import org.openqa.selenium.By;
+
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
-import org.openqa.selenium.By;
+
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$x;
 
 
 public class BrokersDetails extends BasePage {
 public class BrokersDetails extends BasePage {
 
 
-  protected SelenideElement logDirectoriesTab = $x("//a[text()='Log directories']");
-  protected SelenideElement metricsTab = $x("//a[text()='Metrics']");
-  protected String brokersTabLocator = "//a[text()='%s']";
-
-  @Step
-  public BrokersDetails waitUntilScreenReady() {
-    waitUntilSpinnerDisappear();
-    Arrays.asList(logDirectoriesTab, metricsTab).forEach(element -> element.shouldBe(Condition.visible));
-    return this;
-  }
-
-  @Step
-  public BrokersDetails openDetailsTab(DetailsTab menu) {
-    $(By.linkText(menu.toString())).shouldBe(Condition.enabled).click();
-    waitUntilSpinnerDisappear();
-    return this;
-  }
-
-  private List<SelenideElement> getVisibleColumnHeaders() {
-    return Stream.of("Name", "Topics", "Error", "Partitions")
-        .map(name -> $x(String.format(columnHeaderLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  private List<SelenideElement> getEnabledColumnHeaders() {
-    return Stream.of("Name", "Error")
-        .map(name -> $x(String.format(columnHeaderLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  private List<SelenideElement> getVisibleSummaryCells() {
-    return Stream.of("Segment Size", "Segment Count", "Port", "Host")
-        .map(name -> $x(String.format(summaryCellLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  private List<SelenideElement> getDetailsTabs() {
-    return Stream.of(DetailsTab.values())
-        .map(name -> $x(String.format(brokersTabLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  @Step
-  public List<SelenideElement> getAllEnabledElements() {
-    List<SelenideElement> enabledElements = new ArrayList<>(getEnabledColumnHeaders());
-    enabledElements.addAll(getDetailsTabs());
-    return enabledElements;
-  }
-
-  @Step
-  public List<SelenideElement> getAllVisibleElements() {
-    List<SelenideElement> visibleElements = new ArrayList<>(getVisibleSummaryCells());
-    visibleElements.addAll(getVisibleColumnHeaders());
-    visibleElements.addAll(getDetailsTabs());
-    return visibleElements;
-  }
-
-  public enum DetailsTab {
-    LOG_DIRECTORIES("Log directories"),
-    CONFIGS("Configs"),
-    METRICS("Metrics");
-
-    private final String value;
-
-    DetailsTab(String value) {
-      this.value = value;
+    protected SelenideElement logDirectoriesTab = $x("//a[text()='Log directories']");
+    protected SelenideElement metricsTab = $x("//a[text()='Metrics']");
+    protected String brokersTabLocator = "//a[text()='%s']";
+
+    @Step
+    public BrokersDetails waitUntilScreenReady() {
+        waitUntilSpinnerDisappear();
+        Arrays.asList(logDirectoriesTab, metricsTab).forEach(element -> element.shouldBe(Condition.visible));
+        return this;
+    }
+
+    @Step
+    public BrokersDetails openDetailsTab(DetailsTab menu) {
+        $(By.linkText(menu.toString())).shouldBe(Condition.enabled).click();
+        waitUntilSpinnerDisappear();
+        return this;
+    }
+
+    private List<SelenideElement> getVisibleColumnHeaders() {
+        return Stream.of("Name", "Topics", "Error", "Partitions")
+                .map(name -> $x(String.format(columnHeaderLocator, name)))
+                .collect(Collectors.toList());
     }
     }
 
 
-    public String toString() {
-      return value;
+    private List<SelenideElement> getEnabledColumnHeaders() {
+        return Stream.of("Name", "Error")
+                .map(name -> $x(String.format(columnHeaderLocator, name)))
+                .collect(Collectors.toList());
+    }
+
+    private List<SelenideElement> getVisibleSummaryCells() {
+        return Stream.of("Segment Size", "Segment Count", "Port", "Host")
+                .map(name -> $x(String.format(summaryCellLocator, name)))
+                .collect(Collectors.toList());
+    }
+
+    private List<SelenideElement> getDetailsTabs() {
+        return Stream.of(DetailsTab.values())
+                .map(name -> $x(String.format(brokersTabLocator, name)))
+                .collect(Collectors.toList());
+    }
+
+    @Step
+    public List<SelenideElement> getAllEnabledElements() {
+        List<SelenideElement> enabledElements = new ArrayList<>(getEnabledColumnHeaders());
+        enabledElements.addAll(getDetailsTabs());
+        return enabledElements;
+    }
+
+    @Step
+    public List<SelenideElement> getAllVisibleElements() {
+        List<SelenideElement> visibleElements = new ArrayList<>(getVisibleSummaryCells());
+        visibleElements.addAll(getVisibleColumnHeaders());
+        visibleElements.addAll(getDetailsTabs());
+        return visibleElements;
+    }
+
+    public enum DetailsTab {
+        LOG_DIRECTORIES("Log directories"),
+        CONFIGS("Configs"),
+        METRICS("Metrics");
+
+        private final String value;
+
+        DetailsTab(String value) {
+            this.value = value;
+        }
+
+        public String toString() {
+            return value;
+        }
     }
     }
-  }
 }
 }

+ 90 - 89
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java

@@ -1,123 +1,124 @@
 package com.provectus.kafka.ui.pages.brokers;
 package com.provectus.kafka.ui.pages.brokers;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-
 import com.codeborne.selenide.CollectionCondition;
 import com.codeborne.selenide.CollectionCondition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
+
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 public class BrokersList extends BasePage {
 public class BrokersList extends BasePage {
 
 
-  protected SelenideElement brokersListHeader = $x("//h1[text()='Brokers']");
-
-  @Step
-  public BrokersList waitUntilScreenReady() {
-    waitUntilSpinnerDisappear();
-    brokersListHeader.shouldBe(Condition.visible);
-    return this;
-  }
-
-  @Step
-  public BrokersList openBroker(int brokerId) {
-    getBrokerItem(brokerId).openItem();
-    return this;
-  }
-
-  private List<SelenideElement> getUptimeSummaryCells() {
-    return Stream.of("Broker Count", "Active Controller", "Version")
-        .map(name -> $x(String.format(summaryCellLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  private List<SelenideElement> getPartitionsSummaryCells() {
-    return Stream.of("Online", "URP", "In Sync Replicas", "Out Of Sync Replicas")
-        .map(name -> $x(String.format(summaryCellLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  @Step
-  public List<SelenideElement> getAllVisibleElements() {
-    List<SelenideElement> visibleElements = new ArrayList<>(getUptimeSummaryCells());
-    visibleElements.addAll(getPartitionsSummaryCells());
-    return visibleElements;
-  }
-
-  private List<SelenideElement> getEnabledColumnHeaders() {
-    return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
-        .map(name -> $x(String.format(columnHeaderLocator, name)))
-        .collect(Collectors.toList());
-  }
-
-  @Step
-  public List<SelenideElement> getAllEnabledElements() {
-    return getEnabledColumnHeaders();
-  }
-
-  private List<BrokersList.BrokerGridItem> initGridItems() {
-    List<BrokersList.BrokerGridItem> gridItemList = new ArrayList<>();
-    allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
-        .forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
-    return gridItemList;
-  }
-
-  @Step
-  public BrokerGridItem getBrokerItem(int id){
-    return initGridItems().stream()
-        .filter(e ->e.getId() == id)
-        .findFirst().orElse(null);
-  }
-
-  @Step
-  public List<BrokerGridItem> getAllBrokers(){
-    return initGridItems();
-  }
-
-  public static class BrokerGridItem extends BasePage {
-
-    private final SelenideElement element;
-
-    public BrokerGridItem(SelenideElement element) {
-      this.element = element;
-    }
+    protected SelenideElement brokersListHeader = $x("//h1[text()='Brokers']");
 
 
-    private SelenideElement getIdElm() {
-      return element.$x("./td[1]/div/a");
+    @Step
+    public BrokersList waitUntilScreenReady() {
+        waitUntilSpinnerDisappear();
+        brokersListHeader.shouldBe(Condition.visible);
+        return this;
     }
     }
 
 
     @Step
     @Step
-    public int getId() {
-      return Integer.parseInt(getIdElm().getText().trim());
+    public BrokersList openBroker(int brokerId) {
+        getBrokerItem(brokerId).openItem();
+        return this;
     }
     }
 
 
-    @Step
-    public void openItem() {
-      getIdElm().click();
+    private List<SelenideElement> getUptimeSummaryCells() {
+        return Stream.of("Broker Count", "Active Controller", "Version")
+                .map(name -> $x(String.format(summaryCellLocator, name)))
+                .collect(Collectors.toList());
+    }
+
+    private List<SelenideElement> getPartitionsSummaryCells() {
+        return Stream.of("Online", "URP", "In Sync Replicas", "Out Of Sync Replicas")
+                .map(name -> $x(String.format(summaryCellLocator, name)))
+                .collect(Collectors.toList());
     }
     }
 
 
     @Step
     @Step
-    public int getSegmentSize(){
-      return Integer.parseInt(element.$x("./td[2]").getText().trim());
+    public List<SelenideElement> getAllVisibleElements() {
+        List<SelenideElement> visibleElements = new ArrayList<>(getUptimeSummaryCells());
+        visibleElements.addAll(getPartitionsSummaryCells());
+        return visibleElements;
+    }
+
+    private List<SelenideElement> getEnabledColumnHeaders() {
+        return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
+                .map(name -> $x(String.format(columnHeaderLocator, name)))
+                .collect(Collectors.toList());
     }
     }
 
 
     @Step
     @Step
-    public int getSegmentCount(){
-      return Integer.parseInt(element.$x("./td[3]").getText().trim());
+    public List<SelenideElement> getAllEnabledElements() {
+        return getEnabledColumnHeaders();
+    }
+
+    private List<BrokersList.BrokerGridItem> initGridItems() {
+        List<BrokersList.BrokerGridItem> gridItemList = new ArrayList<>();
+        gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+                .forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
+        return gridItemList;
     }
     }
 
 
     @Step
     @Step
-    public int getPort(){
-      return Integer.parseInt(element.$x("./td[4]").getText().trim());
+    public BrokerGridItem getBrokerItem(int id) {
+        return initGridItems().stream()
+                .filter(e -> e.getId() == id)
+                .findFirst().orElseThrow();
     }
     }
 
 
     @Step
     @Step
-    public String getHost(){
-      return element.$x("./td[5]").getText().trim();
+    public List<BrokerGridItem> getAllBrokers() {
+        return initGridItems();
+    }
+
+    public static class BrokerGridItem extends BasePage {
+
+        private final SelenideElement element;
+
+        public BrokerGridItem(SelenideElement element) {
+            this.element = element;
+        }
+
+        private SelenideElement getIdElm() {
+            return element.$x("./td[1]/div/a");
+        }
+
+        @Step
+        public int getId() {
+            return Integer.parseInt(getIdElm().getText().trim());
+        }
+
+        @Step
+        public void openItem() {
+            getIdElm().click();
+        }
+
+        @Step
+        public int getSegmentSize() {
+            return Integer.parseInt(element.$x("./td[2]").getText().trim());
+        }
+
+        @Step
+        public int getSegmentCount() {
+            return Integer.parseInt(element.$x("./td[3]").getText().trim());
+        }
+
+        @Step
+        public int getPort() {
+            return Integer.parseInt(element.$x("./td[4]").getText().trim());
+        }
+
+        @Step
+        public String getHost() {
+            return element.$x("./td[5]").getText().trim();
+        }
     }
     }
-  }
 }
 }

+ 6 - 6
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorCreateForm.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorCreateForm.java

@@ -1,12 +1,12 @@
-package com.provectus.kafka.ui.pages.connector;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.connectors;
 
 
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 public class ConnectorCreateForm extends BasePage {
 public class ConnectorCreateForm extends BasePage {
 
 
     protected SelenideElement nameField = $x("//input[@name='name']");
     protected SelenideElement nameField = $x("//input[@name='name']");
@@ -31,8 +31,8 @@ public class ConnectorCreateForm extends BasePage {
 
 
     @Step
     @Step
     public ConnectorCreateForm clickSubmitButton() {
     public ConnectorCreateForm clickSubmitButton() {
-      clickSubmitBtn();
-      waitUntilSpinnerDisappear();
-      return this;
+        clickSubmitBtn();
+        waitUntilSpinnerDisappear();
+        return this;
     }
     }
 }
 }

+ 21 - 21
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorDetails.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorDetails.java

@@ -1,28 +1,28 @@
-package com.provectus.kafka.ui.pages.connector;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.connectors;
 
 
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 public class ConnectorDetails extends BasePage {
 public class ConnectorDetails extends BasePage {
 
 
-  protected SelenideElement deleteBtn = $x("//li/div[contains(text(),'Delete')]");
-  protected SelenideElement confirmBtnMdl = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
-  protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
-  protected SelenideElement taskTab = $x("//a[contains(text(),'Tasks')]");
-  protected SelenideElement configTab = $x("//a[contains(text(),'Config')]");
-  protected SelenideElement configField = $x("//div[@id='config']");
-  protected String connectorHeaderLocator = "//h1[contains(text(),'%s')]";
+    protected SelenideElement deleteBtn = $x("//li/div[contains(text(),'Delete')]");
+    protected SelenideElement confirmBtnMdl = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
+    protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
+    protected SelenideElement taskTab = $x("//a[contains(text(),'Tasks')]");
+    protected SelenideElement configTab = $x("//a[contains(text(),'Config')]");
+    protected SelenideElement configField = $x("//div[@id='config']");
+    protected String connectorHeaderLocator = "//h1[contains(text(),'%s')]";
 
 
-  @Step
-  public ConnectorDetails waitUntilScreenReady() {
-    waitUntilSpinnerDisappear();
-    dotMenuBtn.shouldBe(Condition.visible);
-    return this;
-  }
+    @Step
+    public ConnectorDetails waitUntilScreenReady() {
+        waitUntilSpinnerDisappear();
+        dotMenuBtn.shouldBe(Condition.visible);
+        return this;
+    }
 
 
     @Step
     @Step
     public ConnectorDetails openConfigTab() {
     public ConnectorDetails openConfigTab() {
@@ -41,8 +41,8 @@ public class ConnectorDetails extends BasePage {
 
 
     @Step
     @Step
     public ConnectorDetails clickSubmitButton() {
     public ConnectorDetails clickSubmitButton() {
-      clickSubmitBtn();
-      return this;
+        clickSubmitBtn();
+        return this;
     }
     }
 
 
     @Step
     @Step
@@ -74,11 +74,11 @@ public class ConnectorDetails extends BasePage {
 
 
     @Step
     @Step
     public boolean isConnectorHeaderVisible(String connectorName) {
     public boolean isConnectorHeaderVisible(String connectorName) {
-        return isVisible($x(String.format(connectorHeaderLocator,connectorName)));
+        return isVisible($x(String.format(connectorHeaderLocator, connectorName)));
     }
     }
 
 
     @Step
     @Step
-    public boolean isAlertWithMessageVisible(AlertHeader header, String message){
-      return isAlertVisible(header, message);
+    public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
+        return isAlertVisible(header, message);
     }
     }
 }
 }

+ 8 - 8
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/KafkaConnectList.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java

@@ -1,26 +1,26 @@
-package com.provectus.kafka.ui.pages.connector;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.connectors;
 
 
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 
 
 public class KafkaConnectList extends BasePage {
 public class KafkaConnectList extends BasePage {
 
 
     protected SelenideElement createConnectorBtn = $x("//button[contains(text(),'Create Connector')]");
     protected SelenideElement createConnectorBtn = $x("//button[contains(text(),'Create Connector')]");
 
 
-    public KafkaConnectList(){
+    public KafkaConnectList() {
         tableElementNameLocator = "//tbody//td[contains(text(),'%s')]";
         tableElementNameLocator = "//tbody//td[contains(text(),'%s')]";
     }
     }
 
 
     @Step
     @Step
     public KafkaConnectList waitUntilScreenReady() {
     public KafkaConnectList waitUntilScreenReady() {
-      waitUntilSpinnerDisappear();
-      createConnectorBtn.shouldBe(Condition.visible);
-      return this;
+        waitUntilSpinnerDisappear();
+        createConnectorBtn.shouldBe(Condition.visible);
+        return this;
     }
     }
 
 
     @Step
     @Step
@@ -31,7 +31,7 @@ public class KafkaConnectList extends BasePage {
 
 
     @Step
     @Step
     public KafkaConnectList openConnector(String connectorName) {
     public KafkaConnectList openConnector(String connectorName) {
-      getTableElement(connectorName).shouldBe(Condition.enabled).click();
+        getTableElement(connectorName).shouldBe(Condition.enabled).click();
         return this;
         return this;
     }
     }
 
 

+ 3 - 3
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersDetails.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersDetails.java

@@ -1,11 +1,11 @@
-package com.provectus.kafka.ui.pages.consumer;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.consumers;
 
 
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 public class ConsumersDetails extends BasePage {
 public class ConsumersDetails extends BasePage {
 
 
     protected String consumerIdHeaderLocator = "//h1[contains(text(),'%s')]";
     protected String consumerIdHeaderLocator = "//h1[contains(text(),'%s')]";

+ 4 - 3
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersList.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java

@@ -1,9 +1,10 @@
-package com.provectus.kafka.ui.pages.consumer;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.consumers;
 
 
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
+
+import static com.codeborne.selenide.Selenide.$x;
+
 public class ConsumersList extends BasePage {
 public class ConsumersList extends BasePage {
 
 
     protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']");
     protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']");

+ 137 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java

@@ -0,0 +1,137 @@
+package com.provectus.kafka.ui.pages.ksqlDb;
+
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$x;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs;
+import io.qameta.allure.Step;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import org.openqa.selenium.By;
+
+public class KsqlDbList extends BasePage {
+  protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
+  protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
+  protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
+
+  @Step
+  public KsqlDbList waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    Arrays.asList(tablesTab, streamsTab).forEach(tab -> tab.shouldBe(Condition.visible));
+    return this;
+  }
+
+  @Step
+  public KsqlDbList clickExecuteKsqlRequestBtn() {
+    clickByJavaScript(executeKsqlBtn);
+    return this;
+  }
+
+  @Step
+  public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
+    $(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
+    waitUntilSpinnerDisappear();
+    return this;
+  }
+
+  private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
+    List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
+    gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+        .forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
+    return gridItemList;
+  }
+
+  @Step
+  public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
+    return initTablesItems().stream()
+        .filter(e -> e.getTableName().equals(tableName))
+        .findFirst().orElseThrow();
+  }
+
+  public static class KsqlTablesGridItem extends BasePage {
+
+    private final SelenideElement element;
+
+    public KsqlTablesGridItem(SelenideElement element) {
+      this.element = element;
+    }
+
+    @Step
+    public String getTableName() {
+      return element.$x("./td[1]").getText().trim();
+    }
+
+    @Step
+    public String getTopicName() {
+      return element.$x("./td[2]").getText().trim();
+    }
+
+    @Step
+    public String getKeyFormat() {
+      return element.$x("./td[3]").getText().trim();
+    }
+
+    @Step
+    public String getValueFormat() {
+      return element.$x("./td[4]").getText().trim();
+    }
+
+    @Step
+    public String getIsWindowed() {
+      return element.$x("./td[5]").getText().trim();
+    }
+  }
+
+  private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
+    List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
+    gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+        .forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
+    return gridItemList;
+  }
+
+  @Step
+  public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
+    return initStreamsItems().stream()
+        .filter(e -> e.getStreamName().equals(streamName))
+        .findFirst().orElseThrow();
+  }
+
+  public static class KsqlStreamsGridItem extends BasePage {
+
+    private final SelenideElement element;
+
+    public KsqlStreamsGridItem(SelenideElement element) {
+      this.element = element;
+    }
+
+    @Step
+    public String getStreamName() {
+      return element.$x("./td[1]").getText().trim();
+    }
+
+    @Step
+    public String getTopicName() {
+      return element.$x("./td[2]").getText().trim();
+    }
+
+    @Step
+    public String getKeyFormat() {
+      return element.$x("./td[3]").getText().trim();
+    }
+
+    @Step
+    public String getValueFormat() {
+      return element.$x("./td[4]").getText().trim();
+    }
+
+    @Step
+    public String getIsWindowed() {
+      return element.$x("./td[5]").getText().trim();
+    }
+  }
+}

+ 155 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java

@@ -0,0 +1,155 @@
+package com.provectus.kafka.ui.pages.ksqlDb;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.ElementsCollection;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+
+import static com.codeborne.selenide.Condition.visible;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
+public class KsqlQueryForm extends BasePage {
+    protected SelenideElement pageTitle = $x("//h1[text()='Query']");
+    protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
+    protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
+    protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
+    protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
+    protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
+    protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
+    protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
+    protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
+    protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
+    protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
+
+    @Step
+    public KsqlQueryForm waitUntilScreenReady() {
+        waitUntilSpinnerDisappear();
+        pageTitle.shouldBe(Condition.visible);
+        return this;
+    }
+
+    @Step
+    public KsqlQueryForm clickClearBtn() {
+        clickByJavaScript(clearBtn);
+        return this;
+    }
+
+    @Step
+    public KsqlQueryForm clickExecuteBtn() {
+        clickByActions(executeBtn);
+        if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
+            loadingSpinner.shouldBe(Condition.visible);
+        } else {
+            waitUntilSpinnerDisappear();
+        }
+        return this;
+    }
+
+    @Step
+    public KsqlQueryForm clickStopQueryBtn() {
+        clickByActions(stopQueryBtn);
+        waitUntilSpinnerDisappear();
+        return this;
+    }
+
+    @Step
+    public KsqlQueryForm clickClearResultsBtn() {
+        clickByActions(clearResultsBtn);
+        waitUntilSpinnerDisappear();
+        return this;
+    }
+
+    @Step
+    public KsqlQueryForm clickAddStreamProperty() {
+        clickByJavaScript(addStreamPropertyBtn);
+        return this;
+    }
+
+    @Step
+    public KsqlQueryForm setQuery(String query) {
+        queryAreaValue.shouldBe(Condition.visible).click();
+        queryArea.setValue(query);
+        return this;
+    }
+
+    @Step
+    public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
+        return initItems().stream()
+                .filter(e -> e.getName().equalsIgnoreCase(name))
+                .findFirst().orElseThrow();
+    }
+
+    @Step
+    public boolean areResultsVisible() {
+        boolean visible = false;
+        try {
+            visible = initItems().size() > 0;
+        } catch (Throwable ignored) {
+        }
+        return visible;
+    }
+
+    private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
+        List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
+        ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+                .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
+        return gridItemList;
+    }
+
+    public static class KsqlResponseGridItem extends BasePage {
+
+        private final SelenideElement element;
+
+        private KsqlResponseGridItem(SelenideElement element) {
+            this.element = element;
+        }
+
+        @Step
+        public String getType() {
+            return element.$x("./td[1]").getText().trim();
+        }
+
+        @Step
+        public String getName() {
+            return element.$x("./td[2]").scrollTo().getText().trim();
+        }
+
+        @Step
+        public boolean isVisible() {
+            boolean isVisible = false;
+            try {
+                element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
+                isVisible = true;
+            } catch (Throwable ignored) {
+            }
+            return isVisible;
+        }
+
+        @Step
+        public String getTopic() {
+            return element.$x("./td[3]").getText().trim();
+        }
+
+        @Step
+        public String getKeyFormat() {
+            return element.$x("./td[4]").getText().trim();
+        }
+
+        @Step
+        public String getValueFormat() {
+            return element.$x("./td[5]").getText().trim();
+        }
+
+        @Step
+        public String getIsWindowed() {
+            return element.$x("./td[6]").getText().trim();
+        }
+    }
+}

+ 17 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlMenuTabs.java

@@ -0,0 +1,17 @@
+package com.provectus.kafka.ui.pages.ksqlDb.enums;
+
+public enum KsqlMenuTabs {
+
+    TABLES("Table"),
+    STREAMS("Streams");
+
+    private final String value;
+
+    KsqlMenuTabs(String value) {
+        this.value = value;
+    }
+
+    public String toString() {
+        return value;
+    }
+}

+ 19 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlQueryConfig.java

@@ -0,0 +1,19 @@
+package com.provectus.kafka.ui.pages.ksqlDb.enums;
+
+public enum KsqlQueryConfig {
+
+    SHOW_TABLES("show tables;"),
+    SHOW_STREAMS("show streams;"),
+    SELECT_ALL_FROM("SELECT * FROM %s\n" +
+            "EMIT CHANGES;");
+
+    private final String query;
+
+    KsqlQueryConfig(String query) {
+        this.query = query;
+    }
+
+    public String getQuery() {
+        return query;
+    }
+}

+ 11 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Stream.java

@@ -0,0 +1,11 @@
+package com.provectus.kafka.ui.pages.ksqlDb.models;
+
+import lombok.Data;
+import lombok.experimental.Accessors;
+
+@Data
+@Accessors(chain = true)
+public class Stream {
+
+    private String name, topicName, valueFormat, partitions;
+}

+ 11 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Table.java

@@ -0,0 +1,11 @@
+package com.provectus.kafka.ui.pages.ksqlDb.models;
+
+import lombok.Data;
+import lombok.experimental.Accessors;
+
+@Data
+@Accessors(chain = true)
+public class Table {
+
+    private String name, streamName;
+}

+ 22 - 24
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaCreateForm.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaCreateForm.java

@@ -1,8 +1,4 @@
-package com.provectus.kafka.ui.pages.schema;
-
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.schemas;
 
 
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
@@ -10,10 +6,13 @@ import com.provectus.kafka.ui.api.model.CompatibilityLevel;
 import com.provectus.kafka.ui.api.model.SchemaType;
 import com.provectus.kafka.ui.api.model.SchemaType;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
+
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 
 
+import static com.codeborne.selenide.Selenide.*;
+
 public class SchemaCreateForm extends BasePage {
 public class SchemaCreateForm extends BasePage {
 
 
     protected SelenideElement schemaNameField = $x("//input[@name='subject']");
     protected SelenideElement schemaNameField = $x("//input[@name='subject']");
@@ -30,7 +29,7 @@ public class SchemaCreateForm extends BasePage {
     protected String ddlElementLocator = "//li[@value='%s']";
     protected String ddlElementLocator = "//li[@value='%s']";
 
 
     @Step
     @Step
-    public SchemaCreateForm waitUntilScreenReady(){
+    public SchemaCreateForm waitUntilScreenReady() {
         waitUntilSpinnerDisappear();
         waitUntilSpinnerDisappear();
         pageTitle.shouldBe(Condition.visible);
         pageTitle.shouldBe(Condition.visible);
         return this;
         return this;
@@ -69,25 +68,25 @@ public class SchemaCreateForm extends BasePage {
     }
     }
 
 
     @Step
     @Step
-    public SchemaCreateForm openSchemaVersionDdl(){
-      schemaVersionDdl.shouldBe(Condition.enabled).click();
-      return this;
+    public SchemaCreateForm openSchemaVersionDdl() {
+        schemaVersionDdl.shouldBe(Condition.enabled).click();
+        return this;
     }
     }
 
 
     @Step
     @Step
-    public int getVersionsNumberFromList(){
-      return elementsCompareVersionDdl.size();
+    public int getVersionsNumberFromList() {
+        return elementsCompareVersionDdl.size();
     }
     }
 
 
     @Step
     @Step
-    public SchemaCreateForm selectVersionFromDropDown(int versionNumberDd){
-      $x(String.format(ddlElementLocator,versionNumberDd)).shouldBe(Condition.visible).click();
-      return this;
+    public SchemaCreateForm selectVersionFromDropDown(int versionNumberDd) {
+        $x(String.format(ddlElementLocator, versionNumberDd)).shouldBe(Condition.visible).click();
+        return this;
     }
     }
 
 
     @Step
     @Step
-    public int getMarkedLinesNumber(){
-      return visibleMarkers.size();
+    public int getMarkedLinesNumber() {
+        return visibleMarkers.size();
     }
     }
 
 
     @Step
     @Step
@@ -100,23 +99,22 @@ public class SchemaCreateForm extends BasePage {
 
 
     @Step
     @Step
     public List<SelenideElement> getAllDetailsPageElements() {
     public List<SelenideElement> getAllDetailsPageElements() {
-      return Stream.of(compatibilityLevelList, newSchemaTextArea, latestSchemaTextArea, submitBtn, schemaTypeDdl)
-          .collect(Collectors.toList());
+        return Stream.of(compatibilityLevelList, newSchemaTextArea, latestSchemaTextArea, submitBtn, schemaTypeDdl)
+                .collect(Collectors.toList());
     }
     }
 
 
     @Step
     @Step
-    public boolean isSubmitBtnEnabled(){
-      return isEnabled(submitBtn);
+    public boolean isSubmitBtnEnabled() {
+        return isEnabled(submitBtn);
     }
     }
 
 
     @Step
     @Step
-    public boolean isSchemaDropDownEnabled(){
+    public boolean isSchemaDropDownEnabled() {
         boolean enabled = true;
         boolean enabled = true;
-        try{
+        try {
             String attribute = schemaTypeDdl.getAttribute("disabled");
             String attribute = schemaTypeDdl.getAttribute("disabled");
             enabled = false;
             enabled = false;
-        }
-        catch (Throwable ignored){
+        } catch (Throwable ignored) {
         }
         }
         return enabled;
         return enabled;
     }
     }

+ 10 - 10
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaDetails.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java

@@ -1,12 +1,12 @@
-package com.provectus.kafka.ui.pages.schema;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.schemas;
 
 
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 public class SchemaDetails extends BasePage {
 public class SchemaDetails extends BasePage {
 
 
     protected SelenideElement actualVersionTextArea = $x("//div[@id='schema']");
     protected SelenideElement actualVersionTextArea = $x("//div[@id='schema']");
@@ -33,12 +33,12 @@ public class SchemaDetails extends BasePage {
 
 
     @Step
     @Step
     public boolean isSchemaHeaderVisible(String schemaName) {
     public boolean isSchemaHeaderVisible(String schemaName) {
-        return isVisible($x(String.format(schemaHeaderLocator,schemaName)));
+        return isVisible($x(String.format(schemaHeaderLocator, schemaName)));
     }
     }
 
 
     @Step
     @Step
-    public int getLatestVersion(){
-      return Integer.parseInt(latestVersionField.getText());
+    public int getLatestVersion() {
+        return Integer.parseInt(latestVersionField.getText());
     }
     }
 
 
     @Step
     @Step
@@ -47,15 +47,15 @@ public class SchemaDetails extends BasePage {
     }
     }
 
 
     @Step
     @Step
-    public SchemaDetails openEditSchema(){
+    public SchemaDetails openEditSchema() {
         editSchemaBtn.shouldBe(Condition.visible).click();
         editSchemaBtn.shouldBe(Condition.visible).click();
         return this;
         return this;
     }
     }
 
 
     @Step
     @Step
-    public SchemaDetails openCompareVersionMenu(){
-      compareVersionBtn.shouldBe(Condition.enabled).click();
-      return this;
+    public SchemaDetails openCompareVersionMenu() {
+        compareVersionBtn.shouldBe(Condition.enabled).click();
+        return this;
     }
     }
 
 
     @Step
     @Step

部分文件因为文件数量过多而无法显示