Browse Source

Merge branch 'master' into vlad/develop

VladSenyuta 2 years ago
parent
commit
ed35154edf
100 changed files with 2491 additions and 2498 deletions
  1. 36 0
      .devcontainer/devcontainer.json
  2. 3 2
      .github/workflows/e2e-automation.yml
  3. 8 7
      .github/workflows/e2e-checks.yaml
  4. 3 2
      .github/workflows/e2e-weekly.yml
  5. 4 2
      CONTRIBUTING.md
  6. 54 131
      README.md
  7. 2 2
      charts/kafka-ui/Chart.yaml
  8. 1 34
      charts/kafka-ui/README.md
  9. 0 43
      docker-compose.md
  10. 27 27
      documentation/compose/e2e-tests.yaml
  11. 0 41
      documentation/guides/AWS_IAM.md
  12. 0 123
      documentation/guides/DataMasking.md
  13. 0 55
      documentation/guides/Protobuf.md
  14. 0 58
      documentation/guides/SASL_SCRAM.md
  15. 0 7
      documentation/guides/SECURE_BROKER.md
  16. 0 71
      documentation/guides/SSO.md
  17. 0 167
      documentation/guides/Serialization.md
  18. 0 22
      documentation/project/ROADMAP.md
  19. 0 8
      documentation/project/contributing/README.md
  20. 0 24
      documentation/project/contributing/building-and-running-without-docker.md
  21. 0 63
      documentation/project/contributing/building.md
  22. 0 42
      documentation/project/contributing/prerequisites.md
  23. 0 8
      documentation/project/contributing/set-up-git.md
  24. 0 28
      documentation/project/contributing/testing.md
  25. 333 0
      etc/checkstyle/checkstyle-e2e.xml
  26. 2 2
      etc/checkstyle/checkstyle.xml
  27. 0 65
      helm_chart.md
  28. 2 1
      kafka-ui-api/Dockerfile
  29. 177 35
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
  30. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
  31. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
  32. 3 10
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
  33. 4 6
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java
  34. 14 30
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java
  35. 4 6
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java
  36. 4 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ConsumingStats.java
  37. 4 8
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java
  38. 0 16
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilterStats.java
  39. 82 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessagesProcessing.java
  40. 2 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java
  41. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/GlobalErrorWebExceptionHandler.java
  42. 1 13
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
  43. 19 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
  44. 76 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ApplicationInfoService.java
  45. 19 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
  46. 11 9
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
  47. 49 16
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java
  48. 4 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
  49. 18 39
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
  50. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
  51. 3 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisStats.java
  52. 4 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
  53. 8 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java
  54. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
  55. 3 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/CognitoAuthorityExtractor.java
  56. 18 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
  57. 53 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java
  58. 38 22
      kafka-ui-api/src/main/resources/application-local.yml
  59. 2 2
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
  60. 4 5
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java
  61. 15 10
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java
  62. 0 3
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java
  63. 54 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/GithubReleaseInfoTest.java
  64. 17 19
      kafka-ui-contract/pom.xml
  65. 25 0
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  66. 10 1
      kafka-ui-e2e-checks/README.md
  67. 7 5
      kafka-ui-e2e-checks/docker/selenoid-git.yaml
  68. 33 0
      kafka-ui-e2e-checks/docker/selenoid-local.yaml
  69. 44 14
      kafka-ui-e2e-checks/pom.xml
  70. 15 0
      kafka-ui-e2e-checks/selenoid/config/browsersGit.json
  71. 3 3
      kafka-ui-e2e-checks/selenoid/config/browsersLocal.json
  72. 1 1
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java
  73. 20 19
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java
  74. 6 6
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
  75. 122 115
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
  76. 27 28
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java
  77. 73 74
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java
  78. 88 89
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
  79. 39 39
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorCreateForm.java
  80. 64 64
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorDetails.java
  81. 33 33
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java
  82. 19 19
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersDetails.java
  83. 8 8
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
  84. 0 139
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
  85. 0 154
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
  86. 0 17
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlMenuTabs.java
  87. 0 19
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlQueryConfig.java
  88. 138 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
  89. 153 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
  90. 17 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
  91. 18 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
  92. 2 2
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
  93. 2 2
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
  94. 47 48
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/NaviSideBar.java
  95. 13 14
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/TopPanel.java
  96. 24 24
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/enums/MenuItem.java
  97. 124 122
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaCreateForm.java
  98. 51 51
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java
  99. 30 30
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java
  100. 45 46
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java

+ 36 - 0
.devcontainer/devcontainer.json

@@ -0,0 +1,36 @@
+{
+	"name": "Java",
+
+	"image": "mcr.microsoft.com/devcontainers/java:0-17",
+
+	"features": {
+		"ghcr.io/devcontainers/features/java:1": {
+			"version": "none",
+			"installMaven": "true",
+			"installGradle": "false"
+		},
+		"ghcr.io/devcontainers/features/docker-in-docker:2": {}
+	},
+
+	// Use 'forwardPorts' to make a list of ports inside the container available locally.
+	// "forwardPorts": [],
+
+	// Use 'postCreateCommand' to run commands after the container is created.
+	// "postCreateCommand": "java -version",
+
+	"customizations": {
+		"vscode": {
+			"extensions" : [
+				"vscjava.vscode-java-pack",
+				"vscjava.vscode-maven",
+				"vscjava.vscode-java-debug",
+				"EditorConfig.EditorConfig",
+				"ms-azuretools.vscode-docker",
+				"antfu.vite",
+				"ms-kubernetes-tools.vscode-kubernetes-tools",
+                "github.vscode-pull-request-github"
+			]
+		}
+	}
+
+}

+ 3 - 2
.github/workflows/e2e-automation.yml

@@ -36,7 +36,7 @@ jobs:
       - name: Pull with Docker
       - name: Pull with Docker
         id: pull_chrome
         id: pull_chrome
         run: |
         run: |
-          docker pull selenium/standalone-chrome:103.0
+          docker pull selenoid/vnc_chrome:103.0
       - name: Set up JDK
       - name: Set up JDK
         uses: actions/setup-java@v3
         uses: actions/setup-java@v3
         with:
         with:
@@ -52,6 +52,7 @@ jobs:
         id: compose_app
         id: compose_app
         # use the following command until #819 will be fixed
         # use the following command until #819 will be fixed
         run: |
         run: |
+          docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
           docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
           docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
       - name: Run test suite
       - name: Run test suite
         run: |
         run: |
@@ -78,7 +79,7 @@ jobs:
         uses: Sibz/github-status-action@v1.1.6
         uses: Sibz/github-status-action@v1.1.6
         with:
         with:
           authToken: ${{secrets.GITHUB_TOKEN}}
           authToken: ${{secrets.GITHUB_TOKEN}}
-          context: "Test report"
+          context: "Click Details button to open Allure report"
           state: "success"
           state: "success"
           sha: ${{ github.sha }}
           sha: ${{ github.sha }}
           target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
           target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}

+ 8 - 7
.github/workflows/e2e-checks.yaml

@@ -15,20 +15,20 @@ jobs:
       - uses: actions/checkout@v3
       - uses: actions/checkout@v3
         with:
         with:
           ref: ${{ github.event.pull_request.head.sha }}
           ref: ${{ github.event.pull_request.head.sha }}
-      - name: Configure AWS credentials for Kafka-UI account
+      - name: Configure AWS credentials
         uses: aws-actions/configure-aws-credentials@v2
         uses: aws-actions/configure-aws-credentials@v2
         with:
         with:
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
           aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
           aws-region: eu-central-1
           aws-region: eu-central-1
-      - name: Set the values
+      - name: Set up environment
         id: set_env_values
         id: set_env_values
         run: |
         run: |
           cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
           cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
-      - name: pull docker
+      - name: Pull with Docker
         id: pull_chrome
         id: pull_chrome
         run: |
         run: |
-          docker pull selenium/standalone-chrome:103.0
+          docker pull selenoid/vnc_chrome:103.0
       - name: Set up JDK
       - name: Set up JDK
         uses: actions/setup-java@v3
         uses: actions/setup-java@v3
         with:
         with:
@@ -40,12 +40,13 @@ jobs:
         run: |
         run: |
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
           ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
           ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
-      - name: compose app
+      - name: Compose with Docker
         id: compose_app
         id: compose_app
         # use the following command until #819 will be fixed
         # use the following command until #819 will be fixed
         run: |
         run: |
+          docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
           docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
           docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
-      - name: e2e run
+      - name: Run test suite
         run: |
         run: |
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
           ./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
           ./mvnw -B -V -ntp -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -f 'kafka-ui-e2e-checks' test -Pprod
           ./mvnw -B -V -ntp -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -f 'kafka-ui-e2e-checks' test -Pprod
@@ -65,7 +66,7 @@ jobs:
           AWS_S3_BUCKET: 'kafkaui-allure-reports'
           AWS_S3_BUCKET: 'kafkaui-allure-reports'
           AWS_REGION: 'eu-central-1'
           AWS_REGION: 'eu-central-1'
           SOURCE_DIR: 'allure-history/allure-results'
           SOURCE_DIR: 'allure-history/allure-results'
-      - name: Post the link to allure report
+      - name: Deploy report to Amazon S3
         if: always()
         if: always()
         uses: Sibz/github-status-action@v1.1.6
         uses: Sibz/github-status-action@v1.1.6
         with:
         with:

+ 3 - 2
.github/workflows/e2e-weekly.yml

@@ -23,7 +23,7 @@ jobs:
       - name: Pull with Docker
       - name: Pull with Docker
         id: pull_chrome
         id: pull_chrome
         run: |
         run: |
-          docker pull selenium/standalone-chrome:103.0
+          docker pull selenoid/vnc_chrome:103.0
       - name: Set up JDK
       - name: Set up JDK
         uses: actions/setup-java@v3
         uses: actions/setup-java@v3
         with:
         with:
@@ -39,6 +39,7 @@ jobs:
         id: compose_app
         id: compose_app
         # use the following command until #819 will be fixed
         # use the following command until #819 will be fixed
         run: |
         run: |
+          docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
           docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
           docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
       - name: Run test suite
       - name: Run test suite
         run: |
         run: |
@@ -65,7 +66,7 @@ jobs:
         uses: Sibz/github-status-action@v1.1.6
         uses: Sibz/github-status-action@v1.1.6
         with:
         with:
           authToken: ${{secrets.GITHUB_TOKEN}}
           authToken: ${{secrets.GITHUB_TOKEN}}
-          context: "Test report"
+          context: "Click Details button to open Allure report"
           state: "success"
           state: "success"
           sha: ${{ github.sha }}
           sha: ${{ github.sha }}
           target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
           target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}

+ 4 - 2
CONTRIBUTING.md

@@ -1,3 +1,5 @@
+This guide is an exact copy of the same documented located [in our official docs](https://docs.kafka-ui.provectus.io/development/contributing). If there are any differences between the documents, the one located in our official docs should prevail.
+
 This guide aims to walk you through the process of working on issues and Pull Requests (PRs).
 This guide aims to walk you through the process of working on issues and Pull Requests (PRs).
 
 
 Bear in mind that you will not be able to complete some steps on your own if you do not have a “write” permission. Feel free to reach out to the maintainers to help you unlock these activities.
 Bear in mind that you will not be able to complete some steps on your own if you do not have a “write” permission. Feel free to reach out to the maintainers to help you unlock these activities.
@@ -20,7 +22,7 @@ You also need to consider labels. You can sort the issues by scope labels, such
 ## Grabbing the issue
 ## Grabbing the issue
 
 
 There is a bunch of criteria that make an issue feasible for development. <br/>
 There is a bunch of criteria that make an issue feasible for development. <br/>
-The implementation of any features and/or their enhancements should be reasonable, must be backed by justified requirements (demanded by the community, [roadmap](documentation/project/ROADMAP.md) plans, etc.). The final decision is left for the maintainers' discretion.
+The implementation of any features and/or their enhancements should be reasonable, must be backed by justified requirements (demanded by the community, [roadmap](https://docs.kafka-ui.provectus.io/project/roadmap) plans, etc.). The final decision is left for the maintainers' discretion.
 
 
 All bugs should be confirmed as such (i.e. the behavior is unintended).
 All bugs should be confirmed as such (i.e. the behavior is unintended).
 
 
@@ -39,7 +41,7 @@ To keep the status of the issue clear to everyone, please keep the card's status
 
 
 ## Setting up a local development environment
 ## Setting up a local development environment
 
 
-Please refer to [this guide](documentation/project/contributing/README.md).
+Please refer to [this guide](https://docs.kafka-ui.provectus.io/development/contributing).
 
 
 # Pull Requests
 # Pull Requests
 
 

+ 54 - 131
README.md

@@ -1,21 +1,31 @@
 ![UI for Apache Kafka logo](documentation/images/kafka-ui-logo.png) UI for Apache Kafka&nbsp;
 ![UI for Apache Kafka logo](documentation/images/kafka-ui-logo.png) UI for Apache Kafka&nbsp;
 ------------------
 ------------------
 #### Versatile, fast and lightweight web UI for managing Apache Kafka® clusters. Built by developers, for developers.
 #### Versatile, fast and lightweight web UI for managing Apache Kafka® clusters. Built by developers, for developers.
+<br/>
 
 
 [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/provectus/kafka-ui/blob/master/LICENSE)
 [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/provectus/kafka-ui/blob/master/LICENSE)
 ![UI for Apache Kafka Price Free](documentation/images/free-open-source.svg)
 ![UI for Apache Kafka Price Free](documentation/images/free-open-source.svg)
 [![Release version](https://img.shields.io/github/v/release/provectus/kafka-ui)](https://github.com/provectus/kafka-ui/releases)
 [![Release version](https://img.shields.io/github/v/release/provectus/kafka-ui)](https://github.com/provectus/kafka-ui/releases)
 [![Chat with us](https://img.shields.io/discord/897805035122077716)](https://discord.gg/4DWzD7pGE5)
 [![Chat with us](https://img.shields.io/discord/897805035122077716)](https://discord.gg/4DWzD7pGE5)
+[![Docker pulls](https://img.shields.io/docker/pulls/provectuslabs/kafka-ui)](https://hub.docker.com/r/provectuslabs/kafka-ui)
 
 
-### DISCLAIMER
-<em>UI for Apache Kafka is a free tool built and supported by the open-source community. Curated by Provectus, it will remain free and open-source, without any paid features or subscription plans to be added in the future.
-Looking for the help of Kafka experts? Provectus can help you design, build, deploy, and manage Apache Kafka clusters and streaming applications. Discover [Professional Services for Apache Kafka](https://provectus.com/professional-services-apache-kafka/), to unlock the full potential of Kafka in your enterprise! </em>
-
+<p align="center">
+    <a href="https://docs.kafka-ui.provectus.io/">DOCS</a> • 
+    <a href="https://docs.kafka-ui.provectus.io/configuration/quick-start">QUICK START</a> • 
+    <a href="https://discord.gg/4DWzD7pGE5">COMMUNITY DISCORD</a>
+    <br/>
+    <a href="https://aws.amazon.com/marketplace/pp/prodview-ogtt5hfhzkq6a">AWS Marketplace</a>  •
+    <a href="https://www.producthunt.com/products/ui-for-apache-kafka/reviews/new">ProductHunt</a>
+</p>
 
 
 #### UI for Apache Kafka is a free, open-source web UI to monitor and manage Apache Kafka clusters.
 #### UI for Apache Kafka is a free, open-source web UI to monitor and manage Apache Kafka clusters.
 
 
 UI for Apache Kafka is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption.
 UI for Apache Kafka is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption.
 
 
+### DISCLAIMER
+<em>UI for Apache Kafka is a free tool built and supported by the open-source community. Curated by Provectus, it will remain free and open-source, without any paid features or subscription plans to be added in the future.
+Looking for the help of Kafka experts? Provectus can help you design, build, deploy, and manage Apache Kafka clusters and streaming applications. Discover [Professional Services for Apache Kafka](https://provectus.com/professional-services-apache-kafka/), to unlock the full potential of Kafka in your enterprise! </em>
+
 Set up UI for Apache Kafka with just a couple of easy commands to visualize your Kafka data in a comprehensible way. You can run the tool locally or in
 Set up UI for Apache Kafka with just a couple of easy commands to visualize your Kafka data in a comprehensible way. You can run the tool locally or in
 the cloud.
 the cloud.
 
 
@@ -29,10 +39,10 @@ the cloud.
 * **View Consumer Groups** — view per-partition parked offsets, combined and per-partition lag
 * **View Consumer Groups** — view per-partition parked offsets, combined and per-partition lag
 * **Browse Messages** — browse messages with JSON, plain text, and Avro encoding
 * **Browse Messages** — browse messages with JSON, plain text, and Avro encoding
 * **Dynamic Topic Configuration** — create and configure new topics with dynamic configuration
 * **Dynamic Topic Configuration** — create and configure new topics with dynamic configuration
-* **Configurable Authentification** — secure your installation with optional Github/Gitlab/Google OAuth 2.0
-* **Custom serialization/deserialization plugins** - use a ready-to-go serde for your data like AWS Glue or Smile, or code your own!
-* **Role based access control** - [manage permissions](https://github.com/provectus/kafka-ui/wiki/RBAC-(role-based-access-control)) to access the UI with granular precision
-* **Data masking** - [obfuscate](https://github.com/provectus/kafka-ui/blob/master/documentation/guides/DataMasking.md) sensitive data in topic messages
+* **Configurable Authentification** — [secure](https://docs.kafka-ui.provectus.io/configuration/authentication) your installation with optional Github/Gitlab/Google OAuth 2.0
+* **Custom serialization/deserialization plugins** - [use](https://docs.kafka-ui.provectus.io/configuration/serialization-serde) a ready-to-go serde for your data like AWS Glue or Smile, or code your own!
+* **Role based access control** - [manage permissions](https://docs.kafka-ui.provectus.io/configuration/rbac-role-based-access-control) to access the UI with granular precision
+* **Data masking** - [obfuscate](https://docs.kafka-ui.provectus.io/configuration/data-masking) sensitive data in topic messages
 
 
 # The Interface
 # The Interface
 UI for Apache Kafka wraps major functions of Apache Kafka with an intuitive user interface.
 UI for Apache Kafka wraps major functions of Apache Kafka with an intuitive user interface.
@@ -60,155 +70,68 @@ There are 3 supported types of schemas: Avro®, JSON Schema, and Protobuf schema
 
 
 ![Create Schema Registry](documentation/images/Create_schema.gif)
 ![Create Schema Registry](documentation/images/Create_schema.gif)
 
 
-Before producing avro-encoded messages, you have to add an avro schema for the topic in Schema Registry. Now all these steps are easy to do
+Before producing avro/protobuf encoded messages, you have to add a schema for the topic in Schema Registry. Now all these steps are easy to do
 with a few clicks in a user-friendly interface.
 with a few clicks in a user-friendly interface.
 
 
 ![Avro Schema Topic](documentation/images/Schema_Topic.gif)
 ![Avro Schema Topic](documentation/images/Schema_Topic.gif)
 
 
 # Getting Started
 # Getting Started
 
 
-To run UI for Apache Kafka, you can use a pre-built Docker image or build it locally.
-
-## Configuration
-
-We have plenty of [docker-compose files](documentation/compose/DOCKER_COMPOSE.md) as examples. They're built for various configuration stacks.
-
-# Guides
-
-- [SSO configuration](documentation/guides/SSO.md)
-- [AWS IAM configuration](documentation/guides/AWS_IAM.md)
-- [Docker-compose files](documentation/compose/DOCKER_COMPOSE.md)
-- [Connection to a secure broker](documentation/guides/SECURE_BROKER.md)
-- [Configure seriliazation/deserialization plugins or code your own](documentation/guides/Serialization.md)
+To run UI for Apache Kafka, you can use either a pre-built Docker image or build it (or a jar file) yourself.
 
 
-### Configuration File
-Example of how to configure clusters in the [application-local.yml](https://github.com/provectus/kafka-ui/blob/master/kafka-ui-api/src/main/resources/application-local.yml) configuration file:
+## Quick start (Demo run)
 
 
-
-```sh
-kafka:
-  clusters:
-    -
-      name: local
-      bootstrapServers: localhost:29091
-      schemaRegistry: http://localhost:8085
-      schemaRegistryAuth:
-        username: username
-        password: password
-#     schemaNameTemplate: "%s-value"
-      metrics:
-        port: 9997
-        type: JMX
-    -
+```
+docker run -it -p 8080:8080 -e DYNAMIC_CONFIG_ENABLED=true provectuslabs/kafka-ui
 ```
 ```
 
 
-* `name`: cluster name
-* `bootstrapServers`: where to connect
-* `schemaRegistry`: schemaRegistry's address
-* `schemaRegistryAuth.username`: schemaRegistry's basic authentication username
-* `schemaRegistryAuth.password`: schemaRegistry's basic authentication password
-* `schemaNameTemplate`: how keys are saved to schemaRegistry
-* `metrics.port`: open JMX port of a broker
-* `metrics.type`: Type of metrics, either JMX or PROMETHEUS. Defaulted to JMX.
-* `readOnly`: enable read only mode
-
-Configure as many clusters as you need by adding their configs below separated with `-`.
-
-## Running a Docker Image
-The official Docker image for UI for Apache Kafka is hosted here: [hub.docker.com/r/provectuslabs/kafka-ui](https://hub.docker.com/r/provectuslabs/kafka-ui).
+Then access the web UI at [http://localhost:8080](http://localhost:8080)
 
 
-Launch Docker container in the background:
-```sh
+The command is sufficient to try things out. When you're done trying things out, you can proceed with a [persistent installation](https://docs.kafka-ui.provectus.io/configuration/quick-start#persistent-start)
 
 
-docker run -p 8080:8080 \
-	-e KAFKA_CLUSTERS_0_NAME=local \
-	-e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 \
-	-d provectuslabs/kafka-ui:latest
+## Persistent installation
 
 
 ```
 ```
-Then access the web UI at [http://localhost:8080](http://localhost:8080).
-Further configuration with environment variables - [see environment variables](#env_variables)
-
-### Docker Compose
-
-If you prefer to use `docker-compose` please refer to the [documentation](docker-compose.md).
-
-### Helm chart
-Helm chart could be found under [charts/kafka-ui](https://github.com/provectus/kafka-ui/tree/master/charts/kafka-ui) directory
+services:
+  kafka-ui:
+    container_name: kafka-ui
+    image: provectuslabs/kafka-ui:latest
+    ports:
+      - 8080:8080
+    environment:
+      DYNAMIC_CONFIG_ENABLED: true
+    volumes:
+      - ~/kui/config.yml:/etc/kafkaui/dynamic_config.yaml
+```
 
 
-Quick-start instruction [here](helm_chart.md)
+Please refer to our [configuration](https://docs.kafka-ui.provectus.io/configuration/quick-start) page to proceed with further app configuration.
 
 
-## Building With Docker
+## Some useful configuration related links
 
 
-### Prerequisites
+[Web UI Cluster Configuration Wizard](https://docs.kafka-ui.provectus.io/configuration/configuration-wizard)
 
 
-Check [prerequisites.md](documentation/project/contributing/prerequisites.md)
+[Configuration file explanation](https://docs.kafka-ui.provectus.io/configuration/configuration-file)
 
 
-### Building and Running
+[Docker Compose examples](https://docs.kafka-ui.provectus.io/configuration/compose-examples)
 
 
-Check [building.md](documentation/project/contributing/building.md)
+[Misc configuration properties](https://docs.kafka-ui.provectus.io/configuration/misc-configuration-properties)
 
 
-## Building Without Docker
+## Helm charts
 
 
-### Prerequisites
+[Quick start](https://docs.kafka-ui.provectus.io/configuration/helm-charts/quick-start)
 
 
-[Prerequisites](documentation/project/contributing/prerequisites.md) will mostly remain the same with the exception of docker.
+## Building from sources
 
 
-### Running without Building
+[Quick start](https://docs.kafka-ui.provectus.io/development/building/prerequisites) with building
 
 
-[How to run quickly without building](documentation/project/contributing/building-and-running-without-docker.md#run_without_docker_quickly)
+## Liveliness and readiness probes
+Liveliness and readiness endpoint is at `/actuator/health`.<br/>
+Info endpoint (build info) is located at `/actuator/info`.
 
 
-### Building and Running
+# Configuration options
 
 
-[How to build and run](documentation/project/contributing/building-and-running-without-docker.md#build_and_run_without_docker)
+All of the environment variables/config properties could be found [here](https://docs.kafka-ui.provectus.io/configuration/misc-configuration-properties).
 
 
-## Liveliness and readiness probes
-Liveliness and readiness endpoint is at `/actuator/health`.
-Info endpoint (build info) is located at `/actuator/info`.
+# Contributing
 
 
-## <a name="env_variables"></a> Environment Variables
-
-Alternatively, each variable of the .yml file can be set with an environment variable.
-For example, if you want to use an environment variable to set the `name` parameter, you can write it like this: `KAFKA_CLUSTERS_2_NAME`
-
-|Name               	|Description
-|-----------------------|-------------------------------
-|`SERVER_SERVLET_CONTEXT_PATH` | URI basePath
-|`LOGGING_LEVEL_ROOT`        	| Setting log level (trace, debug, info, warn, error). Default: info
-|`LOGGING_LEVEL_COM_PROVECTUS` |Setting log level (trace, debug, info, warn, error). Default: debug
-|`SERVER_PORT` |Port for the embedded server. Default: `8080`
-|`KAFKA_ADMIN-CLIENT-TIMEOUT` | Kafka API timeout in ms. Default: `30000`
-|`KAFKA_CLUSTERS_0_NAME` | Cluster name
-|`KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS` 	|Address where to connect
-|`KAFKA_CLUSTERS_0_KSQLDBSERVER` 	| KSQL DB server address
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERAUTH_USERNAME` 	| KSQL DB server's basic authentication username
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERAUTH_PASSWORD` 	| KSQL DB server's basic authentication password
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTORELOCATION`   	|Path to the JKS keystore to communicate to KSQL DB
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTOREPASSWORD`   	|Password of the JKS keystore for KSQL DB
-|`KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL` 	|Security protocol to connect to the brokers. For SSL connection use "SSL", for plaintext connection don't set this environment variable
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRY`   	|SchemaRegistry's address
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME`   	|SchemaRegistry's basic authentication username
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD`   	|SchemaRegistry's basic authentication password
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION`   	|Path to the JKS keystore to communicate to SchemaRegistry
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD`   	|Password of the JKS keystore for SchemaRegistry
-|`KAFKA_CLUSTERS_0_METRICS_SSL`          |Enable SSL for Metrics (for PROMETHEUS metrics type). Default: false.
-|`KAFKA_CLUSTERS_0_METRICS_USERNAME` |Username for Metrics authentication
-|`KAFKA_CLUSTERS_0_METRICS_PASSWORD` |Password for Metrics authentication
-|`KAFKA_CLUSTERS_0_METRICS_KEYSTORELOCATION` |Path to the JKS keystore to communicate to metrics source (JMX/PROMETHEUS). For advanced setup, see `kafka-ui-jmx-secured.yml`
-|`KAFKA_CLUSTERS_0_METRICS_KEYSTOREPASSWORD` |Password of the JKS metrics keystore
-|`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry
-|`KAFKA_CLUSTERS_0_METRICS_PORT`        	 |Open metrics port of a broker
-|`KAFKA_CLUSTERS_0_METRICS_TYPE`        	 |Type of metrics retriever to use. Valid values are JMX (default) or PROMETHEUS. If Prometheus, then metrics are read from prometheus-jmx-exporter instead of jmx
-|`KAFKA_CLUSTERS_0_READONLY`        	|Enable read-only mode. Default: false
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME`| Kafka Connect cluster's basic authentication username
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD`| Kafka Connect cluster's basic authentication password
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTORELOCATION`| Path to the JKS keystore to communicate to Kafka Connect
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTOREPASSWORD`| Password of the JKS keystore for Kafka Connect
-|`KAFKA_CLUSTERS_0_POLLING_THROTTLE_RATE` |Max traffic rate (bytes/sec) that kafka-ui allowed to reach when polling messages from the cluster. Default: 0 (not limited)
-|`KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION`| Path to the JKS truststore to communicate to Kafka Connect, SchemaRegistry, KSQL, Metrics
-|`KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD`| Password of the JKS truststore for Kafka Connect, SchemaRegistry, KSQL, Metrics
-|`TOPIC_RECREATE_DELAY_SECONDS` |Time delay between topic deletion and topic creation attempts for topic recreate functionality. Default: 1
-|`TOPIC_RECREATE_MAXRETRIES`  |Number of attempts of topic creation after topic deletion for topic recreate functionality. Default: 15
-|`DYNAMIC_CONFIG_ENABLED`|Allow to change application config in runtime. Default: false.
+Please refer to [contributing guide](https://docs.kafka-ui.provectus.io/development/contributing), we'll guide you from there.

+ 2 - 2
charts/kafka-ui/Chart.yaml

@@ -2,6 +2,6 @@ apiVersion: v2
 name: kafka-ui
 name: kafka-ui
 description: A Helm chart for kafka-UI
 description: A Helm chart for kafka-UI
 type: application
 type: application
-version: 0.6.0
-appVersion: v0.6.0
+version: 0.6.1
+appVersion: v0.6.1
 icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
 icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png

+ 1 - 34
charts/kafka-ui/README.md

@@ -1,34 +1 @@
-# Kafka-UI Helm Chart
-
-## Configuration
-
-Most of the Helm charts parameters are common, follow table describe unique parameters related to application configuration.
-
-### Kafka-UI parameters
-
-| Parameter                                | Description                                                                                                                                    | Default |
-| ---------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ------- |
-| `existingConfigMap`                      | Name of the existing ConfigMap with Kafka-UI environment variables                                                                             | `nil`   |
-| `existingSecret`                         | Name of the existing Secret with Kafka-UI environment variables                                                                                | `nil`   |
-| `envs.secret`                            | Set of the sensitive environment variables to pass to Kafka-UI                                                                                 | `{}`    |
-| `envs.config`                            | Set of the environment variables to pass to Kafka-UI                                                                                           | `{}`    |
-| `yamlApplicationConfigConfigMap`         | Map with name and keyName keys, name refers to the existing ConfigMap, keyName refers to the ConfigMap key with Kafka-UI config in Yaml format | `{}`    |
-| `yamlApplicationConfig`                  | Kafka-UI config in Yaml format                                                                                                                 | `{}`    |
-| `networkPolicy.enabled`                  | Enable network policies                                                                                                                        | `false` |
-| `networkPolicy.egressRules.customRules`  | Custom network egress policy rules                                                                                                             | `[]`    |
-| `networkPolicy.ingressRules.customRules` | Custom network ingress policy rules                                                                                                            | `[]`    |
-| `podLabels`                              | Extra labels for Kafka-UI pod                                                                                                                  | `{}`    |
-
-
-## Example
-
-To install Kafka-UI need to execute follow:
-``` bash
-helm repo add kafka-ui https://provectus.github.io/kafka-ui
-helm install kafka-ui kafka-ui/kafka-ui --set envs.config.KAFKA_CLUSTERS_0_NAME=local --set envs.config.KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-```
-To connect to Kafka-UI web application need to execute:
-``` bash
-kubectl port-forward svc/kafka-ui 8080:80
-```
-Open the `http://127.0.0.1:8080` on the browser to access Kafka-UI.
+Please refer to our [documentation](https://docs.kafka-ui.provectus.io/configuration/helm-charts) to get some info on our helm charts.

+ 0 - 43
docker-compose.md

@@ -1,43 +0,0 @@
-# Quick Start with docker-compose
-
-Environment variables documentation - [see usage](README.md#env_variables).<br/>
-We have plenty of example files with more complex configurations. Please check them out in ``docker`` directory.
-
-* Add a new service in docker-compose.yml
-
-```yaml
-version: '2'
-services:
-  kafka-ui:
-    image: provectuslabs/kafka-ui
-    container_name: kafka-ui
-    ports:
-      - "8080:8080"
-    restart: always
-    environment:
-      - KAFKA_CLUSTERS_0_NAME=local
-      - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-```
-
-* If you prefer UI for Apache Kafka in read only mode
-   
-```yaml
-version: '2'
-services:
-  kafka-ui:
-    image: provectuslabs/kafka-ui
-    container_name: kafka-ui
-    ports:
-      - "8080:8080"
-    restart: always
-    environment:
-      - KAFKA_CLUSTERS_0_NAME=local
-      - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-      - KAFKA_CLUSTERS_0_READONLY=true
-```
-  
-* Start UI for Apache Kafka process
-
-```bash
-docker-compose up -d kafka-ui
-```

+ 27 - 27
documentation/compose/e2e-tests.yaml

@@ -11,14 +11,14 @@ services:
       test: wget --no-verbose --tries=1 --spider  http://localhost:8080/actuator/health
       test: wget --no-verbose --tries=1 --spider  http://localhost:8080/actuator/health
       interval: 30s
       interval: 30s
       timeout: 10s
       timeout: 10s
-      retries: 10  
+      retries: 10
     depends_on:
     depends_on:
-        kafka0:
-          condition: service_healthy
-        schemaregistry0:
-          condition: service_healthy
-        kafka-connect0:
-          condition: service_healthy
+      kafka0:
+        condition: service_healthy
+      schemaregistry0:
+        condition: service_healthy
+      kafka-connect0:
+        condition: service_healthy
     environment:
     environment:
       KAFKA_CLUSTERS_0_NAME: local
       KAFKA_CLUSTERS_0_NAME: local
       KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
       KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
@@ -33,10 +33,10 @@ services:
     hostname: kafka0
     hostname: kafka0
     container_name: kafka0
     container_name: kafka0
     healthcheck:
     healthcheck:
-     test: unset JMX_PORT && KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9999" && kafka-broker-api-versions --bootstrap-server=localhost:9092
-     interval: 30s
-     timeout: 10s
-     retries: 10
+      test: unset JMX_PORT && KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9999" && kafka-broker-api-versions --bootstrap-server=localhost:9092
+      interval: 30s
+      timeout: 10s
+      retries: 10
     ports:
     ports:
       - "9092:9092"
       - "9092:9092"
       - "9997:9997"
       - "9997:9997"
@@ -68,12 +68,12 @@ services:
       - 8085:8085
       - 8085:8085
     depends_on:
     depends_on:
       kafka0:
       kafka0:
-          condition: service_healthy
+        condition: service_healthy
     healthcheck:
     healthcheck:
-     test: ["CMD", "timeout", "1", "curl", "--silent", "--fail", "http://schemaregistry0:8085/subjects"]
-     interval: 30s
-     timeout: 10s
-     retries: 10
+      test: [ "CMD", "timeout", "1", "curl", "--silent", "--fail", "http://schemaregistry0:8085/subjects" ]
+      interval: 30s
+      timeout: 10s
+      retries: 10
     environment:
     environment:
       SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
       SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
       SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
       SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
@@ -93,11 +93,11 @@ services:
       - 8083:8083
       - 8083:8083
     depends_on:
     depends_on:
       kafka0:
       kafka0:
-          condition: service_healthy
+        condition: service_healthy
       schemaregistry0:
       schemaregistry0:
-          condition: service_healthy
+        condition: service_healthy
     healthcheck:
     healthcheck:
-      test: ["CMD", "nc", "127.0.0.1", "8083"]
+      test: [ "CMD", "nc", "127.0.0.1", "8083" ]
       interval: 30s
       interval: 30s
       timeout: 10s
       timeout: 10s
       retries: 10
       retries: 10
@@ -118,8 +118,8 @@ services:
       CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
       CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
       CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
       CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
       CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
       CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-#      AWS_ACCESS_KEY_ID: ""
-#      AWS_SECRET_ACCESS_KEY: ""
+  #      AWS_ACCESS_KEY_ID: ""
+  #      AWS_SECRET_ACCESS_KEY: ""
 
 
   kafka-init-topics:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     image: confluentinc/cp-kafka:7.2.1
@@ -127,7 +127,7 @@ services:
       - ./message.json:/data/message.json
       - ./message.json:/data/message.json
     depends_on:
     depends_on:
       kafka0:
       kafka0:
-          condition: service_healthy
+        condition: service_healthy
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
                cub kafka-ready -b kafka0:29092 1 30 && \
                cub kafka-ready -b kafka0:29092 1 30 && \
                kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
                kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
@@ -142,10 +142,10 @@ services:
     ports:
     ports:
       - 5432:5432
       - 5432:5432
     healthcheck:
     healthcheck:
-      test: ["CMD-SHELL", "pg_isready -U dev_user"]
+      test: [ "CMD-SHELL", "pg_isready -U dev_user" ]
       interval: 10s
       interval: 10s
       timeout: 5s
       timeout: 5s
-      retries: 5  
+      retries: 5
     environment:
     environment:
       POSTGRES_USER: 'dev_user'
       POSTGRES_USER: 'dev_user'
       POSTGRES_PASSWORD: '12345'
       POSTGRES_PASSWORD: '12345'
@@ -154,7 +154,7 @@ services:
     image: ellerbrock/alpine-bash-curl-ssl
     image: ellerbrock/alpine-bash-curl-ssl
     depends_on:
     depends_on:
       postgres-db:
       postgres-db:
-          condition: service_healthy
+        condition: service_healthy
       kafka-connect0:
       kafka-connect0:
         condition: service_healthy
         condition: service_healthy
     volumes:
     volumes:
@@ -164,7 +164,7 @@ services:
   ksqldb:
   ksqldb:
     image: confluentinc/ksqldb-server:0.18.0
     image: confluentinc/ksqldb-server:0.18.0
     healthcheck:
     healthcheck:
-      test: ["CMD", "timeout", "1", "curl", "--silent", "--fail", "http://localhost:8088/info"]
+      test: [ "CMD", "timeout", "1", "curl", "--silent", "--fail", "http://localhost:8088/info" ]
       interval: 30s
       interval: 30s
       timeout: 10s
       timeout: 10s
       retries: 10
       retries: 10
@@ -174,7 +174,7 @@ services:
       kafka-connect0:
       kafka-connect0:
         condition: service_healthy
         condition: service_healthy
       schemaregistry0:
       schemaregistry0:
-         condition: service_healthy
+        condition: service_healthy
     ports:
     ports:
       - 8088:8088
       - 8088:8088
     environment:
     environment:

+ 0 - 41
documentation/guides/AWS_IAM.md

@@ -1,41 +0,0 @@
-# How to configure AWS IAM Authentication
-
-UI for Apache Kafka comes with built-in [aws-msk-iam-auth](https://github.com/aws/aws-msk-iam-auth) library.
-
-You could pass sasl configs in properties section for each cluster.
-
-More details could be found here: [aws-msk-iam-auth](https://github.com/aws/aws-msk-iam-auth)
- 
-## Examples: 
-
-Please replace 
-* <KAFKA_URL> with broker list
-* <PROFILE_NAME> with your aws profile
-
-
-### Running From Docker Image
-
-```sh
-docker run -p 8080:8080 \
-    -e KAFKA_CLUSTERS_0_NAME=local \
-    -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<KAFKA_URL> \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=AWS_MSK_IAM \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_CLIENT_CALLBACK_HANDLER_CLASS=software.amazon.msk.auth.iam.IAMClientCallbackHandler \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=software.amazon.msk.auth.iam.IAMLoginModule required awsProfileName="<PROFILE_NAME>"; \
-    -d provectuslabs/kafka-ui:latest 
-```
-
-### Configuring by application.yaml
-
-```yaml
-kafka:
-  clusters:
-    - name: local
-      bootstrapServers: <KAFKA_URL>
-      properties:
-        security.protocol: SASL_SSL
-        sasl.mechanism: AWS_MSK_IAM
-        sasl.client.callback.handler.class: software.amazon.msk.auth.iam.IAMClientCallbackHandler
-        sasl.jaas.config: software.amazon.msk.auth.iam.IAMLoginModule required awsProfileName="<PROFILE_NAME>";
-```

+ 0 - 123
documentation/guides/DataMasking.md

@@ -1,123 +0,0 @@
-# Topics data masking
-
-You can configure kafka-ui to mask sensitive data shown in Messages page.
-
-Several masking policies supported:
-
-### REMOVE
-For json objects - remove target fields, otherwise - return "null" string.
-```yaml
-- type: REMOVE
-  fields: [ "id", "name" ]
-  ...
-```
-
-Apply examples:
-```
-{ "id": 1234, "name": { "first": "James" }, "age": 30 } 
- ->
-{ "age": 30 } 
-```
-```
-non-json string -> null
-```
-
-### REPLACE
-For json objects - replace target field's values with specified replacement string (by default with `***DATA_MASKED***`). Note: if target field's value is object, then replacement applied to all its fields recursively (see example). 
-
-```yaml
-- type: REPLACE
-  fields: [ "id", "name" ]
-  replacement: "***"  #optional, "***DATA_MASKED***" by default
-  ...
-```
-
-Apply examples:
-```
-{ "id": 1234, "name": { "first": "James", "last": "Bond" }, "age": 30 } 
- ->
-{ "id": "***", "name": { "first": "***", "last": "***" }, "age": 30 } 
-```
-```
-non-json string -> ***
-```
-
-### MASK
-Mask target field's values with specified masking characters, recursively (spaces and line separators will be kept as-is).
-`pattern` array specifies what symbols will be used to replace upper-case chars, lower-case chars, digits and other symbols correspondingly.
-
-```yaml
-- type: MASK
-  fields: [ "id", "name" ]
-  pattern: ["A", "a", "N", "_"]   # optional, default is ["X", "x", "n", "-"]
-  ...
-```
-
-Apply examples:
-```
-{ "id": 1234, "name": { "first": "James", "last": "Bond!" }, "age": 30 } 
- ->
-{ "id": "NNNN", "name": { "first": "Aaaaa", "last": "Aaaa_" }, "age": 30 } 
-```
-```
-Some string! -> Aaaa aaaaaa_
-```
-
-----
-
-For each policy, if `fields` not specified, then policy will be applied to all object's fields or whole string if it is not a json-object.
-
-You can specify which masks will be applied to topic's keys/values. Multiple policies will be applied if topic matches both policy's patterns.
-
-Yaml configuration example:
-```yaml
-kafka:
-  clusters:
-    - name: ClusterName
-      # Other Cluster configuration omitted ... 
-      masking:
-        - type: REMOVE
-          fields: [ "id" ]
-          topicKeysPattern: "events-with-ids-.*"
-          topicValuesPattern: "events-with-ids-.*"
-          
-        - type: REPLACE
-          fields: [ "companyName", "organizationName" ]
-          replacement: "***MASKED_ORG_NAME***"   #optional
-          topicValuesPattern: "org-events-.*"
-        
-        - type: MASK
-          fields: [ "name", "surname" ]
-          pattern: ["A", "a", "N", "_"]  #optional
-          topicValuesPattern: "user-states"
-
-        - type: MASK
-          topicValuesPattern: "very-secured-topic"
-```
-
-Same configuration in env-vars fashion:
-```
-...
-KAFKA_CLUSTERS_0_MASKING_0_TYPE: REMOVE
-KAFKA_CLUSTERS_0_MASKING_0_FIELDS_0: "id"
-KAFKA_CLUSTERS_0_MASKING_0_TOPICKEYSPATTERN: "events-with-ids-.*"
-KAFKA_CLUSTERS_0_MASKING_0_TOPICVALUESPATTERN: "events-with-ids-.*"
-
-KAFKA_CLUSTERS_0_MASKING_1_TYPE: REPLACE
-KAFKA_CLUSTERS_0_MASKING_1_FIELDS_0: "companyName"
-KAFKA_CLUSTERS_0_MASKING_1_FIELDS_1: "organizationName"
-KAFKA_CLUSTERS_0_MASKING_1_REPLACEMENT: "***MASKED_ORG_NAME***"
-KAFKA_CLUSTERS_0_MASKING_1_TOPICVALUESPATTERN: "org-events-.*"
-
-KAFKA_CLUSTERS_0_MASKING_2_TYPE: MASK
-KAFKA_CLUSTERS_0_MASKING_2_FIELDS_0: "name"
-KAFKA_CLUSTERS_0_MASKING_2_FIELDS_1: "surname"
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_0: 'A'
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_1: 'a'
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_2: 'N'
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_3: '_'
-KAFKA_CLUSTERS_0_MASKING_2_TOPICVALUESPATTERN: "user-states"
-
-KAFKA_CLUSTERS_0_MASKING_3_TYPE: MASK
-KAFKA_CLUSTERS_0_MASKING_3_TOPICVALUESPATTERN: "very-secured-topic"
-```

+ 0 - 55
documentation/guides/Protobuf.md

@@ -1,55 +0,0 @@
-# Kafkaui Protobuf Support
-
-### This document is deprecated, please see examples in [Serialization document](Serialization.md).
-
-Kafkaui supports deserializing protobuf messages in two ways:
-1. Using Confluent Schema Registry's [protobuf support](https://docs.confluent.io/platform/current/schema-registry/serdes-develop/serdes-protobuf.html).
-2. Supplying a protobuf file as well as a configuration that maps topic names to protobuf types.
-
-## Configuring Kafkaui with a Protobuf File
-
-To configure Kafkaui to deserialize protobuf messages using a supplied protobuf schema add the following to the config:
-```yaml
-kafka:
-  clusters:
-    - # Cluster configuration omitted...
-      # protobufFilesDir specifies root location for proto files (will be scanned recursively)
-      # NOTE: if 'protobufFilesDir' specified, then 'protobufFile' and 'protobufFiles' settings will be ignored
-      protobufFilesDir: "/path/to/my-protobufs"
-      # (DEPRECATED) protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
-      protobufFile: path/to/my.proto
-      # (DEPRECATED) protobufFiles is the location of one or more protobuf schemas
-      protobufFiles:
-        - /path/to/my-protobufs/my.proto
-        - /path/to/my-protobufs/another.proto
-        - /path/to/my-protobufs:test/test.proto
-      # protobufMessageName is the default protobuf type that is used to deserialize
-      # the message's value if the topic is not found in protobufMessageNameByTopic.    
-      protobufMessageName: my.DefaultValType
-      # protobufMessageNameByTopic is a mapping of topic names to protobuf types.
-      # This mapping is required and is used to deserialize the Kafka message's value.
-      protobufMessageNameByTopic:
-        topic1: my.Type1
-        topic2: my.Type2
-      # protobufMessageNameForKey is the default protobuf type that is used to deserialize
-      # the message's key if the topic is not found in protobufMessageNameForKeyByTopic.
-      protobufMessageNameForKey: my.DefaultKeyType
-      # protobufMessageNameForKeyByTopic is a mapping of topic names to protobuf types.
-      # This mapping is optional and is used to deserialize the Kafka message's key.
-      # If a protobuf type is not found for a topic's key, the key is deserialized as a string,
-      # unless protobufMessageNameForKey is specified.
-      protobufMessageNameForKeyByTopic:
-        topic1: my.KeyType1
-```
-
-Same config with flattened config (for docker-compose):
-
-```text
-kafka.clusters.0.protobufFiles.0: /path/to/my.proto
-kafka.clusters.0.protobufFiles.1: /path/to/another.proto
-kafka.clusters.0.protobufMessageName: my.DefaultValType
-kafka.clusters.0.protobufMessageNameByTopic.topic1: my.Type1
-kafka.clusters.0.protobufMessageNameByTopic.topic2: my.Type2
-kafka.clusters.0.protobufMessageNameForKey: my.DefaultKeyType
-kafka.clusters.0.protobufMessageNameForKeyByTopic.topic1: my.KeyType1
-```

+ 0 - 58
documentation/guides/SASL_SCRAM.md

@@ -1,58 +0,0 @@
-# How to configure SASL SCRAM Authentication
-
-You could pass sasl configs in properties section for each cluster.
- 
-## Examples: 
-
-Please replace 
-- <KAFKA_NAME> with cluster name
-- <KAFKA_URL> with broker list
-- <KAFKA_USERNAME> with username
-- <KAFKA_PASSWORD> with password
-
-### Running From Docker Image
-
-```sh
-docker run -p 8080:8080 \
-    -e KAFKA_CLUSTERS_0_NAME=<KAFKA_NAME> \
-    -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<KAFKA_URL> \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=SCRAM-SHA-512 \     
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.scram.ScramLoginModule required username="<KAFKA_USERNAME>" password="<KAFKA_PASSWORD>"; \
-    -d provectuslabs/kafka-ui:latest 
-```
-
-### Running From Docker-compose file
-
-```yaml
-
-version: '3.4'
-services:
-  
-  kafka-ui:
-    image: provectuslabs/kafka-ui
-    container_name: kafka-ui
-    ports:
-      - "888:8080"
-    restart: always
-    environment:
-      - KAFKA_CLUSTERS_0_NAME=<KAFKA_NAME>
-      - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<KAFKA_URL>
-      - KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL
-      - KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=SCRAM-SHA-512
-      - KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.scram.ScramLoginModule required username="<KAFKA_USERNAME>" password="<KAFKA_PASSWORD>";
-      - KAFKA_CLUSTERS_0_PROPERTIES_PROTOCOL=SASL
-```
-
-### Configuring by application.yaml
-
-```yaml
-kafka:
-  clusters:
-    - name: local
-      bootstrapServers: <KAFKA_URL>
-      properties:
-        security.protocol: SASL_SSL
-        sasl.mechanism: SCRAM-SHA-512        
-        sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="<KAFKA_USERNAME>" password="<KAFKA_PASSWORD>";
-```

+ 0 - 7
documentation/guides/SECURE_BROKER.md

@@ -1,7 +0,0 @@
-## Connecting to a Secure Broker
-
-The app supports TLS (SSL) and SASL connections for [encryption and authentication](http://kafka.apache.org/090/documentation.html#security). <br/>
-
-### Running From Docker-compose file
-
-See [this](/documentation/compose/kafka-ssl.yml) docker-compose file reference for ssl-enabled kafka

+ 0 - 71
documentation/guides/SSO.md

@@ -1,71 +0,0 @@
-# How to configure SSO
-SSO require additionaly to configure TLS for application, in that example we will use self-signed certificate, in case of use legal certificates please skip step 1.
-## Step 1
-At this step we will generate self-signed PKCS12 keypair.
-``` bash
-mkdir cert
-keytool -genkeypair -alias ui-for-apache-kafka -keyalg RSA -keysize 2048 \
-  -storetype PKCS12 -keystore cert/ui-for-apache-kafka.p12 -validity 3650
-```
-## Step 2
-Create new application in any SSO provider, we will continue with [Auth0](https://auth0.com).
-
-<img src="https://github.com/provectus/kafka-ui/raw/images/images/sso-new-app.png" width="70%"/>
-
-After that need to provide callback URLs, in our case we will use `https://127.0.0.1:8080/login/oauth2/code/auth0`
-
-<img src="https://github.com/provectus/kafka-ui/raw/images/images/sso-configuration.png" width="70%"/>
-
-This is a main parameters required for enabling SSO
-
-<img src="https://github.com/provectus/kafka-ui/raw/images/images/sso-parameters.png" width="70%"/>
-
-## Step 3
-To launch UI for Apache Kafka with enabled TLS and SSO run following:
-``` bash
-docker run -p 8080:8080 -v `pwd`/cert:/opt/cert -e AUTH_TYPE=LOGIN_FORM \
-  -e SECURITY_BASIC_ENABLED=true \
-  -e SERVER_SSL_KEY_STORE_TYPE=PKCS12 \
-  -e SERVER_SSL_KEY_STORE=/opt/cert/ui-for-apache-kafka.p12 \
-  -e SERVER_SSL_KEY_STORE_PASSWORD=123456 \
-  -e SERVER_SSL_KEY_ALIAS=ui-for-apache-kafka \
-  -e SERVER_SSL_ENABLED=true \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI=https://dev-a63ggcut.auth0.com/ \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE=openid \
-  -e TRUST_STORE=/opt/cert/ui-for-apache-kafka.p12 \
-  -e TRUST_STORE_PASSWORD=123456 \
-provectuslabs/kafka-ui:latest
-```
-In the case with trusted CA-signed SSL certificate and SSL termination somewhere outside of application we can pass only SSO related environment variables:
-``` bash
-docker run -p 8080:8080 -v `pwd`/cert:/opt/cert -e AUTH_TYPE=OAUTH2 \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI=https://dev-a63ggcut.auth0.com/ \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE=openid \
-provectuslabs/kafka-ui:latest
-```
-
-## Step 4 (Load Balancer HTTP) (optional)
-If you're using load balancer/proxy and use HTTP between the proxy and the app, you might want to set `server_forward-headers-strategy` to `native` as well (`SERVER_FORWARDHEADERSSTRATEGY=native`), for more info refer to [this issue](https://github.com/provectus/kafka-ui/issues/1017).
-
-## Step 5 (Azure) (optional)
-For Azure AD (Office365) OAUTH2 you'll want to add additional environment variables:
-
-```bash
-docker run -p 8080:8080 \
-        -e KAFKA_CLUSTERS_0_NAME="${cluster_name}"\
-        -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS="${kafka_listeners}" \
-        -e KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS="${kafka_connect_servers}"
-        -e AUTH_TYPE=OAUTH2 \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE="https://graph.microsoft.com/User.Read" \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI="https://login.microsoftonline.com/{tenant-id}/v2.0" \
-        -d provectuslabs/kafka-ui:latest"
-```
-
-Note that scope is created by default when Application registration is done in Azure portal.
-You'll need to update application registration manifest to include `"accessTokenAcceptedVersion": 2`

+ 0 - 167
documentation/guides/Serialization.md

@@ -1,167 +0,0 @@
-## Serialization and deserialization and custom plugins
-
-Kafka-ui supports multiple ways to serialize/deserialize data.
-
-
-### Int32, Int64, UInt32, UInt64
-Big-endian 4/8 bytes representation of signed/unsigned integers.
-
-### Base64
-Base64 (RFC4648) binary data representation. Can be useful in case if the actual data is not important, but exactly the same (byte-wise) key/value should be send.
-
-### String 
-Treats binary data as a string in specified encoding. Default encoding is UTF-8.
-
-Class name: `com.provectus.kafka.ui.serdes.builtin.StringSerde`
-
-Sample configuration (if you want to overwrite default configuration):
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      # Other Cluster configuration omitted ... 
-      serde:
-          # registering String serde with custom config
-        - name: AsciiString
-          className: com.provectus.kafka.ui.serdes.builtin.StringSerde
-          properties:
-            encoding: "ASCII"
-        
-          # overriding build-it String serde config   
-        - name: String 
-          properties:
-            encoding: "UTF-16"
-```
-
-### Protobuf
-
-Class name: `com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde`
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      # Other Cluster configuration omitted ... 
-      serde:
-        - name: ProtobufFile
-          properties:
-            # path to the protobuf schema files directory
-            protobufFilesDir: "path/to/protofiles"
-            # default protobuf type that is used for KEY serialization/deserialization
-            # optional
-            protobufMessageNameForKey: my.Type1
-            # mapping of topic names to protobuf types, that will be used for KEYS  serialization/deserialization
-            # optional
-            protobufMessageNameForKeyByTopic:
-              topic1: my.KeyType1
-              topic2: my.KeyType2
-            # default protobuf type that is used for VALUE serialization/deserialization
-            # optional, if not set - first type in file will be used as default
-            protobufMessageName: my.Type1
-            # mapping of topic names to protobuf types, that will be used for VALUES  serialization/deserialization
-            # optional
-            protobufMessageNameByTopic:
-              topic1: my.Type1
-              "topic.2": my.Type2
-```
-Docker-compose sample for Protobuf serialization is [here](../compose/kafka-ui-serdes.yaml).
-
-Legacy configuration for protobuf is [here](Protobuf.md).
-
-### SchemaRegistry
-SchemaRegistry serde is automatically configured if schema registry properties set on cluster level.
-But you can add new SchemaRegistry-typed serdes that will connect to another schema-registry instance. 
-
-Class name: `com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde`
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      # this url will be used by "SchemaRegistry" by default
-      schemaRegistry: http://main-schema-registry:8081
-      serde:
-        - name: AnotherSchemaRegistry
-          className: com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde
-          properties:
-            url:  http://another-schema-registry:8081
-            # auth properties, optional
-            username: nameForAuth
-            password: P@ssW0RdForAuth
-        
-          # and also add another SchemaRegistry serde
-        - name: ThirdSchemaRegistry
-          className: com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde
-          properties:
-            url:  http://another-yet-schema-registry:8081
-```
-
-## Setting serdes for specific topics
-You can specify preferable serde for topics key/value. This serde will be chosen by default in UI on topic's view/produce pages. 
-To do so, set `topicValuesPattern/topicValuesPattern` properties for the selected serde. Kafka-ui will choose a first serde that matches specified pattern.
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      serde:
-        - name: String
-          topicKeysPattern: click-events|imp-events
-        
-        - name: Int64
-          topicKeysPattern: ".*-events"
-        
-        - name: SchemaRegistry
-          topicValuesPattern: click-events|imp-events
-```
-
-
-## Default serdes
-You can specify which serde will be chosen in UI by default if no other serdes selected via `topicKeysPattern/topicValuesPattern` settings.
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      defaultKeySerde: Int32
-      defaultValueSerde: String
-      serde:
-        - name: Int32
-          topicKeysPattern: click-events|imp-events
-```
-
-## Fallback
-If selected serde couldn't be applied (exception was thrown), then fallback (String serde with UTF-8 encoding) serde will be applied. Such messages will be specially highlighted in UI.
-
-## Custom pluggable serde registration
-You can implement your own serde and register it in kafka-ui application.
-To do so:
-1. Add `kafka-ui-serde-api` dependency (should be downloadable via maven central)
-2. Implement `com.provectus.kafka.ui.serde.api.Serde` interface. See javadoc for implementation requirements.
-3. Pack your serde into uber jar, or provide directory with no-dependency jar and it's dependencies jars
-
-
-Example pluggable serdes :
-https://github.com/provectus/kafkaui-smile-serde
-https://github.com/provectus/kafkaui-glue-sr-serde
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      serde:
-        - name: MyCustomSerde
-          className: my.lovely.org.KafkaUiSerde
-          filePath: /var/lib/kui-serde/my-kui-serde.jar
-          
-        - name: MyCustomSerde2
-          className: my.lovely.org.KafkaUiSerde2
-          filePath: /var/lib/kui-serde2
-          properties:
-            prop1: v1
-```

+ 0 - 22
documentation/project/ROADMAP.md

@@ -1,22 +0,0 @@
-Kafka-UI Project Roadmap
-====================
-
-Roadmap exists in a form of a github project board and is located [here](https://github.com/provectus/kafka-ui/projects/8).
-
-### How to use this document
-
-The roadmap provides a list of features we decided to prioritize in project development. It should serve as a reference point to understand projects' goals.
-
-We do prioritize them based on the feedback from the community, our own vision and other conditions and circumstances. 
-
-The roadmap sets the general way of development. The roadmap is mostly about long-term features. All the features could be re-prioritized, rescheduled or canceled.
-
-If there's no feature `X`, that **doesn't** mean we're **not** going to implement it. Feel free to raise the issue for the consideration. <br/>
-If a feature you want to see live is not present on roadmap, but there's an issue for the feature, feel free to vote for it using reactions in the issue.
-
-
-### How to contribute
-
-Since the roadmap consists mostly of big long-term features, implementing them might be not easy for a beginner outside collaborator.
-
-A good starting point is checking the [CONTRIBUTING.md](https://github.com/provectus/kafka-ui/blob/master/CONTRIBUTING.md) document.

+ 0 - 8
documentation/project/contributing/README.md

@@ -1,8 +0,0 @@
-# Contributing guidelines
-
-### Set up the local environment for development
-
-* [Prerequisites](prerequisites.md)
-<!--* [Setting up git](set-up-git.md)-->
-* [Building the app](building.md)
-* [Writing tests](testing.md)

+ 0 - 24
documentation/project/contributing/building-and-running-without-docker.md

@@ -1,24 +0,0 @@
-# Build & Run Without Docker
-
-Once you installed the prerequisites and cloned the repository, run the following steps in your project directory:
-
-## <a name="run_without_docker_quickly"></a> Running Without Docker Quickly
-
-- [Download the latest kafka-ui jar file](https://github.com/provectus/kafka-ui/releases)
-#### <a name="run_kafkaui_jar_file"></a> Execute the jar
-```sh
-java -Dspring.config.additional-location=<path-to-application-local.yml> -jar <path-to-kafka-ui-jar>
-```
-- Example of how to configure clusters in the [application-local.yml](https://github.com/provectus/kafka-ui/blob/master/kafka-ui-api/src/main/resources/application-local.yml) configuration file.
-
-## <a name="build_and_run_without_docker"></a> Building And Running Without Docker
-
-> **_NOTE:_**  If you want to get kafka-ui up and running locally quickly without building the jar file manually, then just follow [Running Without Docker Quickly](#run_without_docker_quickly)
-
-> Comment out `docker-maven-plugin` plugin in `kafka-ui-api` pom.xml
-
-- [Command to build the jar](./building.md#cmd_to_build_kafkaui_without_docker)
-
-> Once your build is successful and the jar file named kafka-ui-api-0.0.1-SNAPSHOT.jar is generated inside `kafka-ui-api/target`.
-
-- [Execute the jar](#run_kafkaui_jar_file)

+ 0 - 63
documentation/project/contributing/building.md

@@ -1,63 +0,0 @@
-# Build & Run
-
-Once you installed the prerequisites and cloned the repository, run the following steps in your project directory:
-
-## Step 1 : Build
-> **_NOTE:_**  If you are an macOS M1 User then please keep in mind below things
-
-> Make sure you have ARM supported java installed
-
-> Skip the maven tests as they might not be successful
-
-- Build a docker image with the app
-```sh
-./mvnw clean install -Pprod
-```
-- if you need to build the frontend `kafka-ui-react-app`, go here
-     - [kafka-ui-react-app-build-documentation](../../../kafka-ui-react-app/README.md)
-
-<a name="cmd_to_build_kafkaui_without_docker"></a>
-- In case you want to build `kafka-ui-api` by skipping the tests
-```sh
-./mvnw clean install -Dmaven.test.skip=true -Pprod
-```
-
-- To build only the `kafka-ui-api` you can use this command:
-```sh
-./mvnw -f kafka-ui-api/pom.xml clean install -Pprod -DskipUIBuild=true
-```
-
-If this step is successful, it should create a docker image named `provectuslabs/kafka-ui` with `latest` tag on your local machine except macOS M1.
-
-## Step 2 : Run
-#### Using Docker Compose
-> **_NOTE:_**  If you are an macOS M1 User then you can use arm64 supported docker compose script `./documentation/compose/kafka-ui-arm64.yaml`
- - Start the `kafka-ui` app using docker image built in step 1 along with Kafka clusters:
-```sh
-docker-compose -f ./documentation/compose/kafka-ui.yaml up -d
-```
-
-#### Using Spring Boot Run
- - If you want to start only kafka clusters (to run the `kafka-ui` app via `spring-boot:run`):
-```sh
-docker-compose -f ./documentation/compose/kafka-clusters-only.yaml up -d
-```
-- Then start the app.
-```sh
-./mvnw spring-boot:run -Pprod
-
-# or
-
-./mvnw spring-boot:run -Pprod -Dspring.config.location=file:///path/to/conf.yaml
-```
-
-#### Running in kubernetes
-- Using Helm Charts
-```sh bash
-helm repo add kafka-ui https://provectus.github.io/kafka-ui
-helm install kafka-ui kafka-ui/kafka-ui
-```
-To read more please follow to [chart documentation](../../../charts/kafka-ui/README.md).
-
-## Step 3 : Access Kafka-UI
- - To see the `kafka-ui` app running, navigate to http://localhost:8080.

+ 0 - 42
documentation/project/contributing/prerequisites.md

@@ -1,42 +0,0 @@
-### Prerequisites
-
-This page explains how to get the software you need to use a Linux or macOS
-machine for local development.
-
-Before you begin contributing you must have:
-
-* A GitHub account
-* `Java` 17 or newer
-* `Git`
-* `Docker`
-
-### Installing prerequisites on macOS
-
-1. Install [brew](https://brew.sh/).
-2. Install brew cask:
-```sh
-brew cask
-```
-3. Install Eclipse Temurin 17 via Homebrew cask:
-```sh
-brew tap homebrew/cask-versions
-brew install temurin17
-```
-4. Verify Installation
-```sh
-java -version
-```
-Note : In case OpenJDK 17 is not set as your default Java, you can consider to include it in your `$PATH` after installation
-```sh
-export PATH="$(/usr/libexec/java_home -v 17)/bin:$PATH"
-export JAVA_HOME="$(/usr/libexec/java_home -v 17)"
-```
-
-## Tips
-
-Consider allocating not less than 4GB of memory for your docker.
-Otherwise, some apps within a stack (e.g. `kafka-ui.yaml`) might crash.
-
-## Where to go next
-
-In the next section, you'll [learn how to Build and Run kafka-ui](building.md).

+ 0 - 8
documentation/project/contributing/set-up-git.md

@@ -1,8 +0,0 @@
-### Nothing special here yet.
-<!--
-TODO:
-
-1. Cloning
-2. Credentials set up (git user.name & email)
-3. Signing off (DCO)
--->

+ 0 - 28
documentation/project/contributing/testing.md

@@ -1,28 +0,0 @@
-# Testing
-
-
-
-## Test suites
-
-
-## Writing new tests
-
-
-### Writing tests for new features
-
-
-### Writing tests for bug fixes
-
-
-### Writing new integration tests
-
-
-
-## Running tests
-
-### Unit Tests
-
-
-### Integration Tests
-
-

+ 333 - 0
etc/checkstyle/checkstyle-e2e.xml

@@ -0,0 +1,333 @@
+<?xml version="1.0"?>
+<!DOCTYPE module PUBLIC
+        "-//Checkstyle//DTD Checkstyle Configuration 1.3//EN"
+        "https://checkstyle.org/dtds/configuration_1_3.dtd">
+
+<!--
+    Checkstyle configuration that checks the Google coding conventions from Google Java Style
+    that can be found at https://google.github.io/styleguide/javaguide.html
+
+    Checkstyle is very configurable. Be sure to read the documentation at
+    http://checkstyle.org (or in your downloaded distribution).
+
+    To completely disable a check, just comment it out or delete it from the file.
+    To suppress certain violations please review suppression filters.
+
+    Authors: Max Vetrenko, Ruslan Diachenko, Roman Ivanov.
+ -->
+
+<module name = "Checker">
+    <property name="charset" value="UTF-8"/>
+
+    <property name="severity" value="warning"/>
+
+    <property name="fileExtensions" value="java, properties, xml"/>
+    <!-- Excludes all 'module-info.java' files              -->
+    <!-- See https://checkstyle.org/config_filefilters.html -->
+    <module name="BeforeExecutionExclusionFileFilter">
+        <property name="fileNamePattern" value="module\-info\.java$"/>
+    </module>
+    <!-- https://checkstyle.org/config_filters.html#SuppressionFilter -->
+    <module name="SuppressionFilter">
+        <property name="file" value="${org.checkstyle.google.suppressionfilter.config}"
+                  default="checkstyle-suppressions.xml" />
+        <property name="optional" value="true"/>
+    </module>
+
+    <!-- Checks for whitespace                               -->
+    <!-- See http://checkstyle.org/config_whitespace.html -->
+    <module name="FileTabCharacter">
+        <property name="eachLine" value="true"/>
+    </module>
+
+    <module name="LineLength">
+        <property name="fileExtensions" value="java"/>
+        <property name="max" value="120"/>
+        <property name="ignorePattern" value="^package.*|^import.*|a href|href|http://|https://|ftp://"/>
+    </module>
+
+    <module name="TreeWalker">
+        <module name="OuterTypeFilename"/>
+        <module name="IllegalTokenText">
+            <property name="tokens" value="STRING_LITERAL, CHAR_LITERAL"/>
+            <property name="format"
+                      value="\\u00(09|0(a|A)|0(c|C)|0(d|D)|22|27|5(C|c))|\\(0(10|11|12|14|15|42|47)|134)"/>
+            <property name="message"
+                      value="Consider using special escape sequence instead of octal value or Unicode escaped value."/>
+        </module>
+        <module name="AvoidEscapedUnicodeCharacters">
+            <property name="allowEscapesForControlCharacters" value="true"/>
+            <property name="allowByTailComment" value="true"/>
+            <property name="allowNonPrintableEscapes" value="true"/>
+        </module>
+        <module name="AvoidStarImport"/>
+        <module name="OneTopLevelClass"/>
+        <module name="NoLineWrap">
+            <property name="tokens" value="PACKAGE_DEF, IMPORT, STATIC_IMPORT"/>
+        </module>
+        <module name="EmptyBlock">
+            <property name="option" value="TEXT"/>
+            <property name="tokens"
+                      value="LITERAL_TRY, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE, LITERAL_SWITCH"/>
+        </module>
+        <module name="NeedBraces">
+            <property name="tokens"
+                      value="LITERAL_DO, LITERAL_ELSE, LITERAL_FOR, LITERAL_IF, LITERAL_WHILE"/>
+        </module>
+        <module name="LeftCurly">
+            <property name="tokens"
+                      value="ANNOTATION_DEF, CLASS_DEF, CTOR_DEF, ENUM_CONSTANT_DEF, ENUM_DEF,
+                    INTERFACE_DEF, LAMBDA, LITERAL_CASE, LITERAL_CATCH, LITERAL_DEFAULT,
+                    LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF,
+                    LITERAL_SWITCH, LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, METHOD_DEF,
+                    OBJBLOCK, STATIC_INIT"/>
+        </module>
+        <module name="RightCurly">
+            <property name="id" value="RightCurlySame"/>
+            <property name="tokens"
+                      value="LITERAL_TRY, LITERAL_CATCH, LITERAL_FINALLY, LITERAL_IF, LITERAL_ELSE,
+                    LITERAL_DO"/>
+        </module>
+        <module name="RightCurly">
+            <property name="id" value="RightCurlyAlone"/>
+            <property name="option" value="alone"/>
+            <property name="tokens"
+                      value="CLASS_DEF, METHOD_DEF, CTOR_DEF, LITERAL_FOR, LITERAL_WHILE, STATIC_INIT,
+                    INSTANCE_INIT, ANNOTATION_DEF, ENUM_DEF"/>
+        </module>
+        <module name="SuppressionXpathSingleFilter">
+            <!-- suppresion is required till https://github.com/checkstyle/checkstyle/issues/7541 -->
+            <property name="id" value="RightCurlyAlone"/>
+            <property name="query" value="//RCURLY[parent::SLIST[count(./*)=1]
+                                                 or preceding-sibling::*[last()][self::LCURLY]]"/>
+        </module>
+        <module name="WhitespaceAfter">
+            <property name="tokens"
+                      value="COMMA, SEMI, TYPECAST, LITERAL_IF, LITERAL_ELSE,
+                    LITERAL_WHILE, LITERAL_DO, LITERAL_FOR, DO_WHILE"/>
+        </module>
+        <module name="WhitespaceAround">
+            <property name="allowEmptyConstructors" value="true"/>
+            <property name="allowEmptyLambdas" value="true"/>
+            <property name="allowEmptyMethods" value="true"/>
+            <property name="allowEmptyTypes" value="true"/>
+            <property name="allowEmptyLoops" value="true"/>
+            <property name="tokens"
+                      value="ASSIGN, BAND, BAND_ASSIGN, BOR, BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR,
+                    BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN, DO_WHILE, EQUAL, GE, GT, LAMBDA, LAND,
+                    LCURLY, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY,
+                    LITERAL_FOR, LITERAL_IF, LITERAL_RETURN, LITERAL_SWITCH, LITERAL_SYNCHRONIZED,
+                     LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS, MINUS_ASSIGN, MOD, MOD_ASSIGN,
+                     NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION, RCURLY, SL, SLIST, SL_ASSIGN, SR,
+                     SR_ASSIGN, STAR, STAR_ASSIGN, LITERAL_ASSERT, TYPE_EXTENSION_AND"/>
+            <message key="ws.notFollowed"
+                     value="WhitespaceAround: ''{0}'' is not followed by whitespace. Empty blocks may only be represented as '{}' when not part of a multi-block statement (4.1.3)"/>
+            <message key="ws.notPreceded"
+                     value="WhitespaceAround: ''{0}'' is not preceded with whitespace."/>
+        </module>
+        <module name="OneStatementPerLine"/>
+<!--        <module name="MultipleVariableDeclarations"/>-->
+        <module name="ArrayTypeStyle"/>
+        <module name="MissingSwitchDefault"/>
+        <module name="FallThrough"/>
+        <module name="UpperEll"/>
+        <module name="ModifierOrder"/>
+        <module name="EmptyLineSeparator">
+            <property name="tokens"
+                      value="PACKAGE_DEF, IMPORT, STATIC_IMPORT, CLASS_DEF, INTERFACE_DEF, ENUM_DEF,
+                    STATIC_INIT, INSTANCE_INIT, METHOD_DEF, CTOR_DEF, VARIABLE_DEF"/>
+            <property name="allowNoEmptyLineBetweenFields" value="true"/>
+        </module>
+        <module name="SeparatorWrap">
+            <property name="id" value="SeparatorWrapDot"/>
+            <property name="tokens" value="DOT"/>
+            <property name="option" value="nl"/>
+        </module>
+        <module name="SeparatorWrap">
+            <property name="id" value="SeparatorWrapComma"/>
+            <property name="tokens" value="COMMA"/>
+            <property name="option" value="EOL"/>
+        </module>
+        <module name="SeparatorWrap">
+            <!-- ELLIPSIS is EOL until https://github.com/google/styleguide/issues/258 -->
+            <property name="id" value="SeparatorWrapEllipsis"/>
+            <property name="tokens" value="ELLIPSIS"/>
+            <property name="option" value="EOL"/>
+        </module>
+        <module name="SeparatorWrap">
+            <!-- ARRAY_DECLARATOR is EOL until https://github.com/google/styleguide/issues/259 -->
+            <property name="id" value="SeparatorWrapArrayDeclarator"/>
+            <property name="tokens" value="ARRAY_DECLARATOR"/>
+            <property name="option" value="EOL"/>
+        </module>
+        <module name="SeparatorWrap">
+            <property name="id" value="SeparatorWrapMethodRef"/>
+            <property name="tokens" value="METHOD_REF"/>
+            <property name="option" value="nl"/>
+        </module>
+        <module name="PackageName">
+            <property name="format" value="^[a-z]+(\.[a-z][a-z0-9]*)*$"/>
+            <message key="name.invalidPattern"
+                     value="Package name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="TypeName">
+            <property name="tokens" value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, ANNOTATION_DEF"/>
+            <message key="name.invalidPattern"
+                     value="Type name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="MemberName">
+            <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9]*$"/>
+            <message key="name.invalidPattern"
+                     value="Member name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="ParameterName">
+            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <message key="name.invalidPattern"
+                     value="Parameter name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="LambdaParameterName">
+            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <message key="name.invalidPattern"
+                     value="Lambda parameter name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="CatchParameterName">
+            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <message key="name.invalidPattern"
+                     value="Catch parameter name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="LocalVariableName">
+            <property name="format" value="^[a-z]([a-z0-9][a-zA-Z0-9]*)?$"/>
+            <message key="name.invalidPattern"
+                     value="Local variable name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="ClassTypeParameterName">
+            <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
+            <message key="name.invalidPattern"
+                     value="Class type name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="MethodTypeParameterName">
+            <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
+            <message key="name.invalidPattern"
+                     value="Method type name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="InterfaceTypeParameterName">
+            <property name="format" value="(^[A-Z][0-9]?)$|([A-Z][a-zA-Z0-9]*[T]$)"/>
+            <message key="name.invalidPattern"
+                     value="Interface type name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="NoFinalizer"/>
+        <module name="GenericWhitespace">
+            <message key="ws.followed"
+                     value="GenericWhitespace ''{0}'' is followed by whitespace."/>
+            <message key="ws.preceded"
+                     value="GenericWhitespace ''{0}'' is preceded with whitespace."/>
+            <message key="ws.illegalFollow"
+                     value="GenericWhitespace ''{0}'' should followed by whitespace."/>
+            <message key="ws.notPreceded"
+                     value="GenericWhitespace ''{0}'' is not preceded with whitespace."/>
+        </module>
+        <module name="Indentation">
+            <property name="basicOffset" value="2"/>
+            <property name="braceAdjustment" value="0"/>
+            <property name="caseIndent" value="2"/>
+            <property name="throwsIndent" value="4"/>
+            <property name="lineWrappingIndentation" value="4"/>
+            <property name="arrayInitIndent" value="2"/>
+        </module>
+        <module name="AbbreviationAsWordInName">
+            <property name="ignoreFinal" value="false"/>
+            <property name="allowedAbbreviationLength" value="1"/>
+            <property name="tokens"
+                      value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, ANNOTATION_DEF, ANNOTATION_FIELD_DEF,
+                    PARAMETER_DEF, VARIABLE_DEF, METHOD_DEF"/>
+        </module>
+        <module name="OverloadMethodsDeclarationOrder"/>
+<!--        <module name="VariableDeclarationUsageDistance"/>-->
+        <module name="CustomImportOrder">
+            <property name="sortImportsInGroupAlphabetically" value="true"/>
+            <property name="separateLineBetweenGroups" value="true"/>
+            <property name="customImportOrderRules" value="STATIC###THIRD_PARTY_PACKAGE"/>
+            <property name="tokens" value="IMPORT, STATIC_IMPORT, PACKAGE_DEF"/>
+        </module>
+        <module name="MethodParamPad">
+            <property name="tokens"
+                      value="CTOR_DEF, LITERAL_NEW, METHOD_CALL, METHOD_DEF,
+                    SUPER_CTOR_CALL, ENUM_CONSTANT_DEF"/>
+        </module>
+        <module name="NoWhitespaceBefore">
+            <property name="tokens"
+                      value="COMMA, SEMI, POST_INC, POST_DEC, DOT, ELLIPSIS,
+                    LABELED_STAT, METHOD_REF"/>
+            <property name="allowLineBreaks" value="true"/>
+        </module>
+        <module name="ParenPad">
+            <property name="tokens"
+                      value="ANNOTATION, ANNOTATION_FIELD_DEF, CTOR_CALL, CTOR_DEF, DOT, ENUM_CONSTANT_DEF,
+                    EXPR, LITERAL_CATCH, LITERAL_DO, LITERAL_FOR, LITERAL_IF, LITERAL_NEW,
+                    LITERAL_SWITCH, LITERAL_SYNCHRONIZED, LITERAL_WHILE, METHOD_CALL,
+                    METHOD_DEF, QUESTION, RESOURCE_SPECIFICATION, SUPER_CTOR_CALL, LAMBDA"/>
+        </module>
+        <module name="OperatorWrap">
+            <property name="option" value="NL"/>
+            <property name="tokens"
+                      value="BAND, BOR, BSR, BXOR, DIV, EQUAL, GE, GT, LAND, LE, LITERAL_INSTANCEOF, LOR,
+                    LT, MINUS, MOD, NOT_EQUAL, PLUS, QUESTION, SL, SR, STAR, METHOD_REF "/>
+        </module>
+        <module name="AnnotationLocation">
+            <property name="id" value="AnnotationLocationMostCases"/>
+            <property name="tokens"
+                      value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF"/>
+        </module>
+        <module name="AnnotationLocation">
+            <property name="id" value="AnnotationLocationVariables"/>
+            <property name="tokens" value="VARIABLE_DEF"/>
+            <property name="allowSamelineMultipleAnnotations" value="true"/>
+        </module>
+        <module name="NonEmptyAtclauseDescription"/>
+        <module name="InvalidJavadocPosition"/>
+        <module name="JavadocTagContinuationIndentation"/>
+        <module name="SummaryJavadoc">
+            <property name="forbiddenSummaryFragments"
+                      value="^@return the *|^This method returns |^A [{]@code [a-zA-Z0-9]+[}]( is a )"/>
+        </module>
+        <module name="JavadocParagraph"/>
+        <module name="AtclauseOrder">
+            <property name="tagOrder" value="@param, @return, @throws, @deprecated"/>
+            <property name="target"
+                      value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, METHOD_DEF, CTOR_DEF, VARIABLE_DEF"/>
+        </module>
+        <module name="JavadocMethod">
+            <property name="accessModifiers" value="public"/>
+            <property name="allowMissingParamTags" value="true"/>
+            <property name="allowMissingReturnTag" value="true"/>
+            <property name="allowedAnnotations" value="Override, Test"/>
+            <property name="tokens" value="METHOD_DEF, CTOR_DEF, ANNOTATION_FIELD_DEF"/>
+        </module>
+<!--        <module name="MissingJavadocMethod">-->
+<!--            <property name="scope" value="public"/>-->
+<!--            <property name="minLineCount" value="2"/>-->
+<!--            <property name="allowedAnnotations" value="Override, Test"/>-->
+<!--            <property name="tokens" value="METHOD_DEF, CTOR_DEF, ANNOTATION_FIELD_DEF"/>-->
+<!--        </module>-->
+        <module name="MethodName">
+            <property name="format" value="^[a-z][a-z0-9][a-zA-Z0-9_]*$"/>
+            <message key="name.invalidPattern"
+                     value="Method name ''{0}'' must match pattern ''{1}''."/>
+        </module>
+        <module name="SingleLineJavadoc">
+            <property name="ignoreInlineTags" value="false"/>
+        </module>
+        <module name="EmptyCatchBlock">
+            <property name="exceptionVariableName" value="ignored"/>
+        </module>
+        <module name="CommentsIndentation">
+            <property name="tokens" value="SINGLE_LINE_COMMENT, BLOCK_COMMENT_BEGIN"/>
+        </module>
+        <!-- https://checkstyle.org/config_filters.html#SuppressionXpathFilter -->
+        <module name="SuppressionXpathFilter">
+            <property name="file" value="${org.checkstyle.google.suppressionxpathfilter.config}"
+                      default="checkstyle-xpath-suppressions.xml" />
+            <property name="optional" value="true"/>
+        </module>
+    </module>
+</module>

+ 2 - 2
etc/checkstyle/checkstyle.xml

@@ -318,7 +318,7 @@
             <property name="ignoreInlineTags" value="false"/>
             <property name="ignoreInlineTags" value="false"/>
         </module>
         </module>
         <module name="EmptyCatchBlock">
         <module name="EmptyCatchBlock">
-            <property name="exceptionVariableName" value="expected"/>
+            <property name="exceptionVariableName" value="ignored"/>
         </module>
         </module>
         <module name="CommentsIndentation">
         <module name="CommentsIndentation">
             <property name="tokens" value="SINGLE_LINE_COMMENT, BLOCK_COMMENT_BEGIN"/>
             <property name="tokens" value="SINGLE_LINE_COMMENT, BLOCK_COMMENT_BEGIN"/>
@@ -330,4 +330,4 @@
             <property name="optional" value="true"/>
             <property name="optional" value="true"/>
         </module>
         </module>
     </module>
     </module>
-</module>
+</module>

+ 0 - 65
helm_chart.md

@@ -1,65 +0,0 @@
-# Quick Start with Helm Chart
-
-### General
-1. Clone/Copy Chart to your working directory
-2. Execute command ```helm install helm-release-name charts/kafka-ui```
-
-### Passing Kafka-UI configuration as Dict
-Create values.yml file
-```
-yamlApplicationConfig:
-  kafka:
-    clusters:
-      - name: yaml
-        bootstrapServers:  kafka-cluster-broker-endpoints:9092
-  auth:
-    type: disabled
-  management:
-    health:
-      ldap:
-        enabled: false
-```
-Install by executing command
-> helm install helm-release-name charts/kafka-ui -f values.yml
-
-
-### Passing configuration file as ConfigMap 
-Create config map
-```
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: kafka-ui-existing-configmap-as-a-configfile
-data:
-  config.yml: |-
-    kafka:
-      clusters:
-        - name: yaml
-          bootstrapServers: kafka-cluster-broker-endpoints:9092
-    auth:
-      type: disabled
-    management:
-      health:
-        ldap:
-          enabled: false
-```
-This ConfigMap will be mounted to the Pod
-
-Install by executing command
-> helm install helm-release-name charts/kafka-ui --set yamlApplicationConfigConfigMap.name="kafka-ui-config",yamlApplicationConfigConfigMap.keyName="config.yml"
-
-### Passing environment variables as ConfigMap
-Create config map
-```
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: kafka-ui-helm-values
-data:
-  KAFKA_CLUSTERS_0_NAME: "kafka-cluster-name"
-  KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: "kafka-cluster-broker-endpoints:9092"
-  AUTH_TYPE: "DISABLED"
-  MANAGEMENT_HEALTH_LDAP_ENABLED: "FALSE" 
-```
-Install by executing command
-> helm install helm-release-name charts/kafka-ui --set existingConfigMap="kafka-ui-helm-values"  

+ 2 - 1
kafka-ui-api/Dockerfile

@@ -1,4 +1,5 @@
-FROM azul/zulu-openjdk-alpine:17-jre
+#FROM azul/zulu-openjdk-alpine:17-jre-headless
+FROM azul/zulu-openjdk-alpine@sha256:a36679ac0d28cb835e2a8c00e1e0d95509c6c51c5081c7782b85edb1f37a771a
 
 
 RUN apk add --no-cache gcompat # need to make snappy codec work
 RUN apk add --no-cache gcompat # need to make snappy codec work
 RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
 RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui

+ 177 - 35
kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java

@@ -6,7 +6,13 @@ import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.connect.ApiClient;
 import com.provectus.kafka.ui.connect.ApiClient;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
 import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
 import com.provectus.kafka.ui.connect.model.Connector;
 import com.provectus.kafka.ui.connect.model.Connector;
+import com.provectus.kafka.ui.connect.model.ConnectorPlugin;
+import com.provectus.kafka.ui.connect.model.ConnectorPluginConfigValidationResponse;
+import com.provectus.kafka.ui.connect.model.ConnectorStatus;
+import com.provectus.kafka.ui.connect.model.ConnectorTask;
+import com.provectus.kafka.ui.connect.model.ConnectorTopics;
 import com.provectus.kafka.ui.connect.model.NewConnector;
 import com.provectus.kafka.ui.connect.model.NewConnector;
+import com.provectus.kafka.ui.connect.model.TaskStatus;
 import com.provectus.kafka.ui.exception.KafkaConnectConflictReponseException;
 import com.provectus.kafka.ui.exception.KafkaConnectConflictReponseException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.util.WebClientConfigurator;
 import com.provectus.kafka.ui.util.WebClientConfigurator;
@@ -15,11 +21,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import javax.annotation.Nullable;
 import javax.annotation.Nullable;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
-import org.springframework.core.ParameterizedTypeReference;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.HttpMethod;
-import org.springframework.http.MediaType;
-import org.springframework.util.MultiValueMap;
+import org.springframework.http.ResponseEntity;
 import org.springframework.util.unit.DataSize;
 import org.springframework.util.unit.DataSize;
 import org.springframework.web.client.RestClientException;
 import org.springframework.web.client.RestClientException;
 import org.springframework.web.reactive.function.client.WebClient;
 import org.springframework.web.reactive.function.client.WebClient;
@@ -79,6 +81,176 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
     );
     );
   }
   }
 
 
+  @Override
+  public Mono<ResponseEntity<Connector>> createConnectorWithHttpInfo(NewConnector newConnector)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.createConnectorWithHttpInfo(newConnector));
+  }
+
+  @Override
+  public Mono<Void> deleteConnector(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.deleteConnector(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Void>> deleteConnectorWithHttpInfo(String connectorName)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.deleteConnectorWithHttpInfo(connectorName));
+  }
+
+
+  @Override
+  public Mono<Connector> getConnector(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnector(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Connector>> getConnectorWithHttpInfo(String connectorName)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorWithHttpInfo(connectorName));
+  }
+
+  @Override
+  public Mono<Map<String, Object>> getConnectorConfig(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorConfig(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Map<String, Object>>> getConnectorConfigWithHttpInfo(String connectorName)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorConfigWithHttpInfo(connectorName));
+  }
+
+  @Override
+  public Flux<ConnectorPlugin> getConnectorPlugins() throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorPlugins());
+  }
+
+  @Override
+  public Mono<ResponseEntity<List<ConnectorPlugin>>> getConnectorPluginsWithHttpInfo()
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorPluginsWithHttpInfo());
+  }
+
+  @Override
+  public Mono<ConnectorStatus> getConnectorStatus(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorStatus(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<ConnectorStatus>> getConnectorStatusWithHttpInfo(String connectorName)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorStatusWithHttpInfo(connectorName));
+  }
+
+  @Override
+  public Mono<TaskStatus> getConnectorTaskStatus(String connectorName, Integer taskId)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorTaskStatus(connectorName, taskId));
+  }
+
+  @Override
+  public Mono<ResponseEntity<TaskStatus>> getConnectorTaskStatusWithHttpInfo(String connectorName, Integer taskId)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorTaskStatusWithHttpInfo(connectorName, taskId));
+  }
+
+  @Override
+  public Flux<ConnectorTask> getConnectorTasks(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorTasks(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<List<ConnectorTask>>> getConnectorTasksWithHttpInfo(String connectorName)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorTasksWithHttpInfo(connectorName));
+  }
+
+  @Override
+  public Mono<Map<String, ConnectorTopics>> getConnectorTopics(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorTopics(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Map<String, ConnectorTopics>>> getConnectorTopicsWithHttpInfo(String connectorName)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorTopicsWithHttpInfo(connectorName));
+  }
+
+  @Override
+  public Flux<String> getConnectors(String search) throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectors(search));
+  }
+
+  @Override
+  public Mono<ResponseEntity<List<String>>> getConnectorsWithHttpInfo(String search) throws WebClientResponseException {
+    return withRetryOnConflict(super.getConnectorsWithHttpInfo(search));
+  }
+
+  @Override
+  public Mono<Void> pauseConnector(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.pauseConnector(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Void>> pauseConnectorWithHttpInfo(String connectorName) throws WebClientResponseException {
+    return withRetryOnConflict(super.pauseConnectorWithHttpInfo(connectorName));
+  }
+
+  @Override
+  public Mono<Void> restartConnector(String connectorName, Boolean includeTasks, Boolean onlyFailed)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.restartConnector(connectorName, includeTasks, onlyFailed));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Void>> restartConnectorWithHttpInfo(String connectorName, Boolean includeTasks,
+                                                                 Boolean onlyFailed) throws WebClientResponseException {
+    return withRetryOnConflict(super.restartConnectorWithHttpInfo(connectorName, includeTasks, onlyFailed));
+  }
+
+  @Override
+  public Mono<Void> restartConnectorTask(String connectorName, Integer taskId) throws WebClientResponseException {
+    return withRetryOnConflict(super.restartConnectorTask(connectorName, taskId));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Void>> restartConnectorTaskWithHttpInfo(String connectorName, Integer taskId)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.restartConnectorTaskWithHttpInfo(connectorName, taskId));
+  }
+
+  @Override
+  public Mono<Void> resumeConnector(String connectorName) throws WebClientResponseException {
+    return super.resumeConnector(connectorName);
+  }
+
+  @Override
+  public Mono<ResponseEntity<Void>> resumeConnectorWithHttpInfo(String connectorName)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.resumeConnectorWithHttpInfo(connectorName));
+  }
+
+  @Override
+  public Mono<ResponseEntity<Connector>> setConnectorConfigWithHttpInfo(String connectorName,
+                                                                        Map<String, Object> requestBody)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.setConnectorConfigWithHttpInfo(connectorName, requestBody));
+  }
+
+  @Override
+  public Mono<ConnectorPluginConfigValidationResponse> validateConnectorPluginConfig(String pluginName,
+                                                                                     Map<String, Object> requestBody)
+      throws WebClientResponseException {
+    return withRetryOnConflict(super.validateConnectorPluginConfig(pluginName, requestBody));
+  }
+
+  @Override
+  public Mono<ResponseEntity<ConnectorPluginConfigValidationResponse>> validateConnectorPluginConfigWithHttpInfo(
+      String pluginName, Map<String, Object> requestBody) throws WebClientResponseException {
+    return withRetryOnConflict(super.validateConnectorPluginConfigWithHttpInfo(pluginName, requestBody));
+  }
+
   private static class RetryingApiClient extends ApiClient {
   private static class RetryingApiClient extends ApiClient {
 
 
     public RetryingApiClient(ConnectCluster config,
     public RetryingApiClient(ConnectCluster config,
@@ -108,35 +280,5 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
           .configureBufferSize(maxBuffSize)
           .configureBufferSize(maxBuffSize)
           .build();
           .build();
     }
     }
-
-    @Override
-    public <T> Mono<T> invokeAPI(String path, HttpMethod method, Map<String, Object> pathParams,
-                                 MultiValueMap<String, String> queryParams, Object body,
-                                 HttpHeaders headerParams,
-                                 MultiValueMap<String, String> cookieParams,
-                                 MultiValueMap<String, Object> formParams, List<MediaType> accept,
-                                 MediaType contentType, String[] authNames,
-                                 ParameterizedTypeReference<T> returnType)
-        throws RestClientException {
-      return withRetryOnConflict(
-          super.invokeAPI(path, method, pathParams, queryParams, body, headerParams, cookieParams,
-              formParams, accept, contentType, authNames, returnType)
-      );
-    }
-
-    @Override
-    public <T> Flux<T> invokeFluxAPI(String path, HttpMethod method, Map<String, Object> pathParams,
-                                     MultiValueMap<String, String> queryParams, Object body,
-                                     HttpHeaders headerParams,
-                                     MultiValueMap<String, String> cookieParams,
-                                     MultiValueMap<String, Object> formParams,
-                                     List<MediaType> accept, MediaType contentType,
-                                     String[] authNames, ParameterizedTypeReference<T> returnType)
-        throws RestClientException {
-      return withRetryOnConflict(
-          super.invokeFluxAPI(path, method, pathParams, queryParams, body, headerParams,
-              cookieParams, formParams, accept, contentType, authNames, returnType)
-      );
-    }
   }
   }
 }
 }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.config;
 package com.provectus.kafka.ui.config;
 
 
 import com.provectus.kafka.ui.model.MetricsConfig;
 import com.provectus.kafka.ui.model.MetricsConfig;
+import jakarta.annotation.PostConstruct;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.HashSet;
@@ -8,7 +9,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
 import javax.annotation.Nullable;
 import javax.annotation.Nullable;
-import javax.annotation.PostConstruct;
 import lombok.AllArgsConstructor;
 import lombok.AllArgsConstructor;
 import lombok.Builder;
 import lombok.Builder;
 import lombok.Data;
 import lombok.Data;

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java

@@ -1,9 +1,9 @@
 package com.provectus.kafka.ui.config.auth;
 package com.provectus.kafka.ui.config.auth;
 
 
+import jakarta.annotation.PostConstruct;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
-import javax.annotation.PostConstruct;
 import lombok.Data;
 import lombok.Data;
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.boot.context.properties.ConfigurationProperties;
 import org.springframework.util.Assert;
 import org.springframework.util.Assert;

+ 3 - 10
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java

@@ -13,12 +13,12 @@ import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
 import com.provectus.kafka.ui.model.RestartRequestDTO;
 import com.provectus.kafka.ui.model.RestartRequestDTO;
 import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
 import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
 import com.provectus.kafka.ui.model.rbac.AccessContext;
 import com.provectus.kafka.ui.model.rbac.AccessContext;
+import com.provectus.kafka.ui.service.ApplicationInfoService;
 import com.provectus.kafka.ui.service.KafkaClusterFactory;
 import com.provectus.kafka.ui.service.KafkaClusterFactory;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
 import com.provectus.kafka.ui.util.ApplicationRestarter;
 import com.provectus.kafka.ui.util.ApplicationRestarter;
 import com.provectus.kafka.ui.util.DynamicConfigOperations;
 import com.provectus.kafka.ui.util.DynamicConfigOperations;
 import com.provectus.kafka.ui.util.DynamicConfigOperations.PropertiesStructure;
 import com.provectus.kafka.ui.util.DynamicConfigOperations.PropertiesStructure;
-import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import javax.annotation.Nullable;
 import javax.annotation.Nullable;
 import lombok.RequiredArgsConstructor;
 import lombok.RequiredArgsConstructor;
@@ -53,18 +53,11 @@ public class ApplicationConfigController implements ApplicationConfigApi {
   private final DynamicConfigOperations dynamicConfigOperations;
   private final DynamicConfigOperations dynamicConfigOperations;
   private final ApplicationRestarter restarter;
   private final ApplicationRestarter restarter;
   private final KafkaClusterFactory kafkaClusterFactory;
   private final KafkaClusterFactory kafkaClusterFactory;
-
+  private final ApplicationInfoService applicationInfoService;
 
 
   @Override
   @Override
   public Mono<ResponseEntity<ApplicationInfoDTO>> getApplicationInfo(ServerWebExchange exchange) {
   public Mono<ResponseEntity<ApplicationInfoDTO>> getApplicationInfo(ServerWebExchange exchange) {
-    return Mono.just(
-        new ApplicationInfoDTO()
-            .enabledFeatures(
-                dynamicConfigOperations.dynamicConfigEnabled()
-                    ? List.of(ApplicationInfoDTO.EnabledFeaturesEnum.DYNAMIC_CONFIG)
-                    : List.of()
-            )
-    ).map(ResponseEntity::ok);
+    return Mono.just(applicationInfoService.getApplicationInfo()).map(ResponseEntity::ok);
   }
   }
 
 
   @Override
   @Override

+ 4 - 6
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java

@@ -149,10 +149,9 @@ public class KafkaConnectController extends AbstractController implements KafkaC
   }
   }
 
 
   @Override
   @Override
-  public Mono<ResponseEntity<ConnectorDTO>> setConnectorConfig(String clusterName,
-                                                               String connectName,
+  public Mono<ResponseEntity<ConnectorDTO>> setConnectorConfig(String clusterName, String connectName,
                                                                String connectorName,
                                                                String connectorName,
-                                                               @Valid Mono<Object> requestBody,
+                                                               Mono<Map<String, Object>> requestBody,
                                                                ServerWebExchange exchange) {
                                                                ServerWebExchange exchange) {
 
 
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
@@ -164,8 +163,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
     return validateAccess.then(
     return validateAccess.then(
         kafkaConnectService
         kafkaConnectService
             .setConnectorConfig(getCluster(clusterName), connectName, connectorName, requestBody)
             .setConnectorConfig(getCluster(clusterName), connectName, connectorName, requestBody)
-            .map(ResponseEntity::ok)
-    );
+            .map(ResponseEntity::ok));
   }
   }
 
 
   @Override
   @Override
@@ -242,7 +240,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
 
 
   @Override
   @Override
   public Mono<ResponseEntity<ConnectorPluginConfigValidationResponseDTO>> validateConnectorPluginConfig(
   public Mono<ResponseEntity<ConnectorPluginConfigValidationResponseDTO>> validateConnectorPluginConfig(
-      String clusterName, String connectName, String pluginName, @Valid Mono<Object> requestBody,
+      String clusterName, String connectName, String pluginName, @Valid Mono<Map<String, Object>> requestBody,
       ServerWebExchange exchange) {
       ServerWebExchange exchange) {
     return kafkaConnectService
     return kafkaConnectService
         .validateConnectorPluginConfig(
         .validateConnectorPluginConfig(

+ 14 - 30
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java

@@ -1,9 +1,6 @@
 package com.provectus.kafka.ui.emitter;
 package com.provectus.kafka.ui.emitter;
 
 
-import com.provectus.kafka.ui.model.TopicMessageDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
-import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import java.time.Duration;
 import java.time.Duration;
 import java.time.Instant;
 import java.time.Instant;
 import org.apache.kafka.clients.consumer.Consumer;
 import org.apache.kafka.clients.consumer.Consumer;
@@ -14,13 +11,12 @@ import reactor.core.publisher.FluxSink;
 
 
 public abstract class AbstractEmitter {
 public abstract class AbstractEmitter {
 
 
-  private final ConsumerRecordDeserializer recordDeserializer;
-  private final ConsumingStats consumingStats = new ConsumingStats();
+  private final MessagesProcessing messagesProcessing;
   private final PollingThrottler throttler;
   private final PollingThrottler throttler;
   protected final PollingSettings pollingSettings;
   protected final PollingSettings pollingSettings;
 
 
-  protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingSettings pollingSettings) {
-    this.recordDeserializer = recordDeserializer;
+  protected AbstractEmitter(MessagesProcessing messagesProcessing, PollingSettings pollingSettings) {
+    this.messagesProcessing = messagesProcessing;
     this.pollingSettings = pollingSettings;
     this.pollingSettings = pollingSettings;
     this.throttler = pollingSettings.getPollingThrottler();
     this.throttler = pollingSettings.getPollingThrottler();
   }
   }
@@ -40,39 +36,27 @@ public abstract class AbstractEmitter {
     return records;
     return records;
   }
   }
 
 
+  protected boolean sendLimitReached() {
+    return messagesProcessing.limitReached();
+  }
+
   protected void sendMessage(FluxSink<TopicMessageEventDTO> sink,
   protected void sendMessage(FluxSink<TopicMessageEventDTO> sink,
-                                                       ConsumerRecord<Bytes, Bytes> msg) {
-    final TopicMessageDTO topicMessage = recordDeserializer.deserialize(msg);
-    sink.next(
-        new TopicMessageEventDTO()
-            .type(TopicMessageEventDTO.TypeEnum.MESSAGE)
-            .message(topicMessage)
-    );
+                             ConsumerRecord<Bytes, Bytes> msg) {
+    messagesProcessing.sendMsg(sink, msg);
   }
   }
 
 
   protected void sendPhase(FluxSink<TopicMessageEventDTO> sink, String name) {
   protected void sendPhase(FluxSink<TopicMessageEventDTO> sink, String name) {
-    sink.next(
-        new TopicMessageEventDTO()
-            .type(TopicMessageEventDTO.TypeEnum.PHASE)
-            .phase(new TopicMessagePhaseDTO().name(name))
-    );
+    messagesProcessing.sendPhase(sink, name);
   }
   }
 
 
   protected int sendConsuming(FluxSink<TopicMessageEventDTO> sink,
   protected int sendConsuming(FluxSink<TopicMessageEventDTO> sink,
-                               ConsumerRecords<Bytes, Bytes> records,
-                               long elapsed) {
-    return consumingStats.sendConsumingEvt(sink, records, elapsed, getFilterApplyErrors(sink));
+                              ConsumerRecords<Bytes, Bytes> records,
+                              long elapsed) {
+    return messagesProcessing.sentConsumingInfo(sink, records, elapsed);
   }
   }
 
 
   protected void sendFinishStatsAndCompleteSink(FluxSink<TopicMessageEventDTO> sink) {
   protected void sendFinishStatsAndCompleteSink(FluxSink<TopicMessageEventDTO> sink) {
-    consumingStats.sendFinishEvent(sink, getFilterApplyErrors(sink));
+    messagesProcessing.sendFinishEvent(sink);
     sink.complete();
     sink.complete();
   }
   }
-
-  protected Number getFilterApplyErrors(FluxSink<?> sink) {
-    return sink.contextView()
-        .<MessageFilterStats>getOrEmpty(MessageFilterStats.class)
-        .<Number>map(MessageFilterStats::getFilterApplyErrors)
-        .orElse(0);
-  }
 }
 }

+ 4 - 6
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java

@@ -2,7 +2,6 @@ package com.provectus.kafka.ui.emitter;
 
 
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Comparator;
@@ -31,9 +30,9 @@ public class BackwardRecordEmitter
       Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
       Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
       ConsumerPosition consumerPosition,
       ConsumerPosition consumerPosition,
       int messagesPerPage,
       int messagesPerPage,
-      ConsumerRecordDeserializer recordDeserializer,
+      MessagesProcessing messagesProcessing,
       PollingSettings pollingSettings) {
       PollingSettings pollingSettings) {
-    super(recordDeserializer, pollingSettings);
+    super(messagesProcessing, pollingSettings);
     this.consumerPosition = consumerPosition;
     this.consumerPosition = consumerPosition;
     this.messagesPerPage = messagesPerPage;
     this.messagesPerPage = messagesPerPage;
     this.consumerSupplier = consumerSupplier;
     this.consumerSupplier = consumerSupplier;
@@ -52,7 +51,7 @@ public class BackwardRecordEmitter
       int msgsToPollPerPartition = (int) Math.ceil((double) messagesPerPage / readUntilOffsets.size());
       int msgsToPollPerPartition = (int) Math.ceil((double) messagesPerPage / readUntilOffsets.size());
       log.debug("'Until' offsets for polling: {}", readUntilOffsets);
       log.debug("'Until' offsets for polling: {}", readUntilOffsets);
 
 
-      while (!sink.isCancelled() && !readUntilOffsets.isEmpty()) {
+      while (!sink.isCancelled() && !readUntilOffsets.isEmpty() && !sendLimitReached()) {
         new TreeMap<>(readUntilOffsets).forEach((tp, readToOffset) -> {
         new TreeMap<>(readUntilOffsets).forEach((tp, readToOffset) -> {
           if (sink.isCancelled()) {
           if (sink.isCancelled()) {
             return; //fast return in case of sink cancellation
             return; //fast return in case of sink cancellation
@@ -61,8 +60,6 @@ public class BackwardRecordEmitter
           long readFromOffset = Math.max(beginOffset, readToOffset - msgsToPollPerPartition);
           long readFromOffset = Math.max(beginOffset, readToOffset - msgsToPollPerPartition);
 
 
           partitionPollIteration(tp, readFromOffset, readToOffset, consumer, sink)
           partitionPollIteration(tp, readFromOffset, readToOffset, consumer, sink)
-              .stream()
-              .filter(r -> !sink.isCancelled())
               .forEach(r -> sendMessage(sink, r));
               .forEach(r -> sendMessage(sink, r));
 
 
           if (beginOffset == readFromOffset) {
           if (beginOffset == readFromOffset) {
@@ -106,6 +103,7 @@ public class BackwardRecordEmitter
 
 
     EmptyPollsCounter emptyPolls  = pollingSettings.createEmptyPollsCounter();
     EmptyPollsCounter emptyPolls  = pollingSettings.createEmptyPollsCounter();
     while (!sink.isCancelled()
     while (!sink.isCancelled()
+        && !sendLimitReached()
         && recordsToSend.size() < desiredMsgsToPoll
         && recordsToSend.size() < desiredMsgsToPoll
         && !emptyPolls.noDataEmptyPollsReached()) {
         && !emptyPolls.noDataEmptyPollsReached()) {
       var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout());
       var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout());

+ 4 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ConsumingStats.java

@@ -19,7 +19,7 @@ class ConsumingStats {
   int sendConsumingEvt(FluxSink<TopicMessageEventDTO> sink,
   int sendConsumingEvt(FluxSink<TopicMessageEventDTO> sink,
                         ConsumerRecords<Bytes, Bytes> polledRecords,
                         ConsumerRecords<Bytes, Bytes> polledRecords,
                         long elapsed,
                         long elapsed,
-                        Number filterApplyErrors) {
+                        int filterApplyErrors) {
     int polledBytes = ConsumerRecordsUtil.calculatePolledSize(polledRecords);
     int polledBytes = ConsumerRecordsUtil.calculatePolledSize(polledRecords);
     bytes += polledBytes;
     bytes += polledBytes;
     this.records += polledRecords.count();
     this.records += polledRecords.count();
@@ -32,7 +32,7 @@ class ConsumingStats {
     return polledBytes;
     return polledBytes;
   }
   }
 
 
-  void sendFinishEvent(FluxSink<TopicMessageEventDTO> sink, Number filterApplyErrors) {
+  void sendFinishEvent(FluxSink<TopicMessageEventDTO> sink, int filterApplyErrors) {
     sink.next(
     sink.next(
         new TopicMessageEventDTO()
         new TopicMessageEventDTO()
             .type(TopicMessageEventDTO.TypeEnum.DONE)
             .type(TopicMessageEventDTO.TypeEnum.DONE)
@@ -41,12 +41,12 @@ class ConsumingStats {
   }
   }
 
 
   private TopicMessageConsumingDTO createConsumingStats(FluxSink<TopicMessageEventDTO> sink,
   private TopicMessageConsumingDTO createConsumingStats(FluxSink<TopicMessageEventDTO> sink,
-                                                        Number filterApplyErrors) {
+                                                        int filterApplyErrors) {
     return new TopicMessageConsumingDTO()
     return new TopicMessageConsumingDTO()
         .bytesConsumed(this.bytes)
         .bytesConsumed(this.bytes)
         .elapsedMs(this.elapsed)
         .elapsedMs(this.elapsed)
         .isCancelled(sink.isCancelled())
         .isCancelled(sink.isCancelled())
-        .filterApplyErrors(filterApplyErrors.intValue())
+        .filterApplyErrors(filterApplyErrors)
         .messagesConsumed(this.records);
         .messagesConsumed(this.records);
   }
   }
 }
 }

+ 4 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java

@@ -2,7 +2,6 @@ package com.provectus.kafka.ui.emitter;
 
 
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import java.util.function.Supplier;
 import java.util.function.Supplier;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
 import org.apache.kafka.clients.consumer.ConsumerRecord;
@@ -23,9 +22,9 @@ public class ForwardRecordEmitter
   public ForwardRecordEmitter(
   public ForwardRecordEmitter(
       Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
       Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
       ConsumerPosition position,
       ConsumerPosition position,
-      ConsumerRecordDeserializer recordDeserializer,
+      MessagesProcessing messagesProcessing,
       PollingSettings pollingSettings) {
       PollingSettings pollingSettings) {
-    super(recordDeserializer, pollingSettings);
+    super(messagesProcessing, pollingSettings);
     this.position = position;
     this.position = position;
     this.consumerSupplier = consumerSupplier;
     this.consumerSupplier = consumerSupplier;
   }
   }
@@ -40,6 +39,7 @@ public class ForwardRecordEmitter
 
 
       EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
       EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter();
       while (!sink.isCancelled()
       while (!sink.isCancelled()
+          && !sendLimitReached()
           && !seekOperations.assignedPartitionsFullyPolled()
           && !seekOperations.assignedPartitionsFullyPolled()
           && !emptyPolls.noDataEmptyPollsReached()) {
           && !emptyPolls.noDataEmptyPollsReached()) {
 
 
@@ -50,11 +50,7 @@ public class ForwardRecordEmitter
         log.debug("{} records polled", records.count());
         log.debug("{} records polled", records.count());
 
 
         for (ConsumerRecord<Bytes, Bytes> msg : records) {
         for (ConsumerRecord<Bytes, Bytes> msg : records) {
-          if (!sink.isCancelled()) {
-            sendMessage(sink, msg);
-          } else {
-            break;
-          }
+          sendMessage(sink, msg);
         }
         }
       }
       }
       sendFinishStatsAndCompleteSink(sink);
       sendFinishStatsAndCompleteSink(sink);

+ 0 - 16
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilterStats.java

@@ -1,16 +0,0 @@
-package com.provectus.kafka.ui.emitter;
-
-import java.util.concurrent.atomic.AtomicLong;
-import lombok.AccessLevel;
-import lombok.Getter;
-
-public class MessageFilterStats {
-
-  @Getter(AccessLevel.PACKAGE)
-  private final AtomicLong filterApplyErrors = new AtomicLong();
-
-  public final void incrementApplyErrors() {
-    filterApplyErrors.incrementAndGet();
-  }
-
-}

+ 82 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessagesProcessing.java

@@ -0,0 +1,82 @@
+package com.provectus.kafka.ui.emitter;
+
+import com.provectus.kafka.ui.model.TopicMessageDTO;
+import com.provectus.kafka.ui.model.TopicMessageEventDTO;
+import com.provectus.kafka.ui.model.TopicMessagePhaseDTO;
+import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
+import java.util.function.Predicate;
+import javax.annotation.Nullable;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.ConsumerRecords;
+import org.apache.kafka.common.utils.Bytes;
+import reactor.core.publisher.FluxSink;
+
+@Slf4j
+public class MessagesProcessing {
+
+  private final ConsumingStats consumingStats = new ConsumingStats();
+  private long sentMessages = 0;
+  private int filterApplyErrors = 0;
+
+  private final ConsumerRecordDeserializer deserializer;
+  private final Predicate<TopicMessageDTO> filter;
+  private final @Nullable Integer limit;
+
+  public MessagesProcessing(ConsumerRecordDeserializer deserializer,
+                            Predicate<TopicMessageDTO> filter,
+                            @Nullable Integer limit) {
+    this.deserializer = deserializer;
+    this.filter = filter;
+    this.limit = limit;
+  }
+
+  boolean limitReached() {
+    return limit != null && sentMessages >= limit;
+  }
+
+  void sendMsg(FluxSink<TopicMessageEventDTO> sink, ConsumerRecord<Bytes, Bytes> rec) {
+    if (!sink.isCancelled() && !limitReached()) {
+      TopicMessageDTO topicMessage = deserializer.deserialize(rec);
+      try {
+        if (filter.test(topicMessage)) {
+          sink.next(
+              new TopicMessageEventDTO()
+                  .type(TopicMessageEventDTO.TypeEnum.MESSAGE)
+                  .message(topicMessage)
+          );
+          sentMessages++;
+        }
+      } catch (Exception e) {
+        filterApplyErrors++;
+        log.trace("Error applying filter for message {}", topicMessage);
+      }
+    }
+  }
+
+  int sentConsumingInfo(FluxSink<TopicMessageEventDTO> sink,
+                        ConsumerRecords<Bytes, Bytes> polledRecords,
+                        long elapsed) {
+    if (!sink.isCancelled()) {
+      return consumingStats.sendConsumingEvt(sink, polledRecords, elapsed, filterApplyErrors);
+    }
+    return 0;
+  }
+
+  void sendFinishEvent(FluxSink<TopicMessageEventDTO> sink) {
+    if (!sink.isCancelled()) {
+      consumingStats.sendFinishEvent(sink, filterApplyErrors);
+    }
+  }
+
+  void sendPhase(FluxSink<TopicMessageEventDTO> sink, String name) {
+    if (!sink.isCancelled()) {
+      sink.next(
+          new TopicMessageEventDTO()
+              .type(TopicMessageEventDTO.TypeEnum.PHASE)
+              .phase(new TopicMessagePhaseDTO().name(name))
+      );
+    }
+  }
+
+}

+ 2 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java

@@ -2,7 +2,6 @@ package com.provectus.kafka.ui.emitter;
 
 
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
-import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.function.Supplier;
 import java.util.function.Supplier;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
@@ -20,9 +19,9 @@ public class TailingEmitter extends AbstractEmitter
 
 
   public TailingEmitter(Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
   public TailingEmitter(Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier,
                         ConsumerPosition consumerPosition,
                         ConsumerPosition consumerPosition,
-                        ConsumerRecordDeserializer recordDeserializer,
+                        MessagesProcessing messagesProcessing,
                         PollingSettings pollingSettings) {
                         PollingSettings pollingSettings) {
-    super(recordDeserializer, pollingSettings);
+    super(messagesProcessing, pollingSettings);
     this.consumerSupplier = consumerSupplier;
     this.consumerSupplier = consumerSupplier;
     this.consumerPosition = consumerPosition;
     this.consumerPosition = consumerPosition;
   }
   }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/GlobalErrorWebExceptionHandler.java

@@ -134,7 +134,7 @@ public class GlobalErrorWebExceptionHandler extends AbstractErrorWebExceptionHan
         .timestamp(currentTimestamp())
         .timestamp(currentTimestamp())
         .stackTrace(Throwables.getStackTraceAsString(exception));
         .stackTrace(Throwables.getStackTraceAsString(exception));
     return ServerResponse
     return ServerResponse
-        .status(exception.getStatus())
+        .status(exception.getStatusCode())
         .contentType(MediaType.APPLICATION_JSON)
         .contentType(MediaType.APPLICATION_JSON)
         .bodyValue(response);
         .bodyValue(response);
   }
   }

+ 1 - 13
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java

@@ -89,19 +89,7 @@ public class ConsumerGroupMapper {
             .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
             .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
     ).collect(Collectors.toSet()).size();
     ).collect(Collectors.toSet()).size();
 
 
-    Long messagesBehind = null;
-    // messagesBehind should be undefined if no committed offsets found for topic
-    if (!c.getOffsets().isEmpty()) {
-      messagesBehind = c.getOffsets().entrySet().stream()
-          .mapToLong(e ->
-              Optional.ofNullable(c.getEndOffsets())
-                  .map(o -> o.get(e.getKey()))
-                  .map(o -> o - e.getValue())
-                  .orElse(0L)
-          ).sum();
-    }
-
-    consumerGroup.setMessagesBehind(messagesBehind);
+    consumerGroup.setMessagesBehind(c.getMessagesBehind());
     consumerGroup.setTopics(numTopics);
     consumerGroup.setTopics(numTopics);
     consumerGroup.setSimple(c.isSimple());
     consumerGroup.setSimple(c.isSimple());
 
 

+ 19 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java

@@ -20,6 +20,7 @@ public class InternalConsumerGroup {
   private final Collection<InternalMember> members;
   private final Collection<InternalMember> members;
   private final Map<TopicPartition, Long> offsets;
   private final Map<TopicPartition, Long> offsets;
   private final Map<TopicPartition, Long> endOffsets;
   private final Map<TopicPartition, Long> endOffsets;
+  private final Long messagesBehind;
   private final String partitionAssignor;
   private final String partitionAssignor;
   private final ConsumerGroupState state;
   private final ConsumerGroupState state;
   private final Node coordinator;
   private final Node coordinator;
@@ -58,7 +59,25 @@ public class InternalConsumerGroup {
     );
     );
     builder.offsets(groupOffsets);
     builder.offsets(groupOffsets);
     builder.endOffsets(topicEndOffsets);
     builder.endOffsets(topicEndOffsets);
+    builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
     Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
     Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
     return builder.build();
     return builder.build();
   }
   }
+
+  private static Long calculateMessagesBehind(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
+    Long messagesBehind = null;
+    // messagesBehind should be undefined if no committed offsets found for topic
+    if (!offsets.isEmpty()) {
+      messagesBehind = offsets.entrySet().stream()
+          .mapToLong(e ->
+              Optional.ofNullable(endOffsets)
+                  .map(o -> o.get(e.getKey()))
+                  .map(o -> o - e.getValue())
+                  .orElse(0L)
+          ).sum();
+    }
+
+    return messagesBehind;
+  }
+
 }
 }

+ 76 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ApplicationInfoService.java

@@ -0,0 +1,76 @@
+package com.provectus.kafka.ui.service;
+
+import static com.provectus.kafka.ui.model.ApplicationInfoDTO.EnabledFeaturesEnum;
+
+import com.provectus.kafka.ui.model.ApplicationInfoBuildDTO;
+import com.provectus.kafka.ui.model.ApplicationInfoDTO;
+import com.provectus.kafka.ui.model.ApplicationInfoLatestReleaseDTO;
+import com.provectus.kafka.ui.util.DynamicConfigOperations;
+import com.provectus.kafka.ui.util.GithubReleaseInfo;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.Properties;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.info.BuildProperties;
+import org.springframework.boot.info.GitProperties;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+@Service
+public class ApplicationInfoService {
+
+  private final GithubReleaseInfo githubReleaseInfo = new GithubReleaseInfo();
+
+  private final DynamicConfigOperations dynamicConfigOperations;
+  private final BuildProperties buildProperties;
+  private final GitProperties gitProperties;
+
+  public ApplicationInfoService(DynamicConfigOperations dynamicConfigOperations,
+                                @Autowired(required = false) BuildProperties buildProperties,
+                                @Autowired(required = false) GitProperties gitProperties) {
+    this.dynamicConfigOperations = dynamicConfigOperations;
+    this.buildProperties = Optional.ofNullable(buildProperties).orElse(new BuildProperties(new Properties()));
+    this.gitProperties = Optional.ofNullable(gitProperties).orElse(new GitProperties(new Properties()));
+  }
+
+  public ApplicationInfoDTO getApplicationInfo() {
+    var releaseInfo = githubReleaseInfo.get();
+    return new ApplicationInfoDTO()
+        .build(getBuildInfo(releaseInfo))
+        .enabledFeatures(getEnabledFeatures())
+        .latestRelease(convert(releaseInfo));
+  }
+
+  private ApplicationInfoLatestReleaseDTO convert(GithubReleaseInfo.GithubReleaseDto releaseInfo) {
+    return new ApplicationInfoLatestReleaseDTO()
+        .htmlUrl(releaseInfo.html_url())
+        .publishedAt(releaseInfo.published_at())
+        .versionTag(releaseInfo.tag_name());
+  }
+
+  private ApplicationInfoBuildDTO getBuildInfo(GithubReleaseInfo.GithubReleaseDto release) {
+    return new ApplicationInfoBuildDTO()
+        .isLatestRelease(release.tag_name() != null && release.tag_name().equals(buildProperties.getVersion()))
+        .commitId(gitProperties.getShortCommitId())
+        .version(buildProperties.getVersion())
+        .buildTime(buildProperties.getTime() != null
+            ? DateTimeFormatter.ISO_INSTANT.format(buildProperties.getTime()) : null);
+  }
+
+  private List<EnabledFeaturesEnum> getEnabledFeatures() {
+    var enabledFeatures = new ArrayList<EnabledFeaturesEnum>();
+    if (dynamicConfigOperations.dynamicConfigEnabled()) {
+      enabledFeatures.add(EnabledFeaturesEnum.DYNAMIC_CONFIG);
+    }
+    return enabledFeatures;
+  }
+
+  // updating on startup and every hour
+  @Scheduled(fixedRateString = "${github-release-info-update-rate:3600000}")
+  public void updateGithubReleaseInfo() {
+    githubReleaseInfo.refresh().block();
+  }
+
+}

+ 19 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java

@@ -1,5 +1,6 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
+import com.google.common.collect.Streams;
 import com.google.common.collect.Table;
 import com.google.common.collect.Table;
 import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
 import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
 import com.provectus.kafka.ui.model.InternalConsumerGroup;
 import com.provectus.kafka.ui.model.InternalConsumerGroup;
@@ -157,6 +158,24 @@ public class ConsumerGroupService {
             .map(descriptions ->
             .map(descriptions ->
                 sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
                 sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
       }
       }
+      case MESSAGES_BEHIND -> {
+        record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
+
+        Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
+            gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
+
+        var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+
+        yield ac.describeConsumerGroups(groupNames)
+            .flatMap(descriptionsMap -> {
+                  List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
+                  return getConsumerGroups(ac, descriptions)
+                      .map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
+                      .map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
+                            .map(GroupWithDescr::cgd).toList());
+                }
+            );
+      }
     };
     };
   }
   }
 
 

+ 11 - 9
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java

@@ -25,7 +25,8 @@ public class FeatureService {
 
 
   private final AdminClientService adminClientService;
   private final AdminClientService adminClientService;
 
 
-  public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster, @Nullable Node controller) {
+  public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
+                                                         ReactiveAdminClient.ClusterDescription clusterDescription) {
     List<Mono<ClusterFeature>> features = new ArrayList<>();
     List<Mono<ClusterFeature>> features = new ArrayList<>();
 
 
     if (Optional.ofNullable(cluster.getConnectsClients())
     if (Optional.ofNullable(cluster.getConnectsClients())
@@ -42,17 +43,15 @@ public class FeatureService {
       features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
       features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
     }
     }
 
 
-    if (controller != null) {
-      features.add(
-          isTopicDeletionEnabled(cluster, controller)
-              .flatMap(r -> Boolean.TRUE.equals(r) ? Mono.just(ClusterFeature.TOPIC_DELETION) : Mono.empty())
-      );
-    }
+    features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
 
 
     return Flux.fromIterable(features).flatMap(m -> m).collectList();
     return Flux.fromIterable(features).flatMap(m -> m).collectList();
   }
   }
 
 
-  private Mono<Boolean> isTopicDeletionEnabled(KafkaCluster cluster, Node controller) {
+  private Mono<ClusterFeature> topicDeletionEnabled(KafkaCluster cluster, @Nullable Node controller) {
+    if (controller == null) {
+      return Mono.just(ClusterFeature.TOPIC_DELETION); // assuming it is enabled by default
+    }
     return adminClientService.get(cluster)
     return adminClientService.get(cluster)
         .flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
         .flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
         .map(config ->
         .map(config ->
@@ -61,6 +60,9 @@ public class FeatureService {
                 .filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
                 .filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
                 .map(e -> Boolean.parseBoolean(e.value()))
                 .map(e -> Boolean.parseBoolean(e.value()))
                 .findFirst()
                 .findFirst()
-                .orElse(true));
+                .orElse(true))
+        .flatMap(enabled -> enabled
+            ? Mono.just(ClusterFeature.TOPIC_DELETION)
+            : Mono.empty());
   }
   }
 }
 }

+ 49 - 16
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java

@@ -1,38 +1,58 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
+import static java.util.regex.Pattern.CASE_INSENSITIVE;
+
+import com.google.common.collect.ImmutableList;
 import java.util.Arrays;
 import java.util.Arrays;
-import java.util.HashSet;
+import java.util.Collection;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
+import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import org.apache.kafka.common.config.ConfigDef;
 import org.apache.kafka.common.config.ConfigDef;
 import org.apache.kafka.common.config.SaslConfigs;
 import org.apache.kafka.common.config.SaslConfigs;
 import org.apache.kafka.common.config.SslConfigs;
 import org.apache.kafka.common.config.SslConfigs;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.actuate.endpoint.Sanitizer;
 import org.springframework.stereotype.Component;
 import org.springframework.stereotype.Component;
 
 
 @Component
 @Component
-class KafkaConfigSanitizer extends Sanitizer {
-  private static final List<String> DEFAULT_PATTERNS_TO_SANITIZE = Arrays.asList(
-      "basic.auth.user.info",  /* For Schema Registry credentials */
-      "password", "secret", "token", "key", ".*credentials.*",   /* General credential patterns */
-      "aws.access.*", "aws.secret.*", "aws.session.*"   /* AWS-related credential patterns */
-  );
+class KafkaConfigSanitizer  {
+
+  private static final String SANITIZED_VALUE = "******";
+
+  private static final String[] REGEX_PARTS = {"*", "$", "^", "+"};
+
+  private static final List<String> DEFAULT_PATTERNS_TO_SANITIZE = ImmutableList.<String>builder()
+      .addAll(kafkaConfigKeysToSanitize())
+      .add(
+          "basic.auth.user.info",  /* For Schema Registry credentials */
+          "password", "secret", "token", "key", ".*credentials.*",   /* General credential patterns */
+          "aws.access.*", "aws.secret.*", "aws.session.*"   /* AWS-related credential patterns */
+      )
+      .build();
+
+  private final List<Pattern> sanitizeKeysPatterns;
 
 
   KafkaConfigSanitizer(
   KafkaConfigSanitizer(
       @Value("${kafka.config.sanitizer.enabled:true}") boolean enabled,
       @Value("${kafka.config.sanitizer.enabled:true}") boolean enabled,
       @Value("${kafka.config.sanitizer.patterns:}") List<String> patternsToSanitize
       @Value("${kafka.config.sanitizer.patterns:}") List<String> patternsToSanitize
   ) {
   ) {
-    if (!enabled) {
-      setKeysToSanitize();
-    } else {
-      var keysToSanitize = new HashSet<>(
-          patternsToSanitize.isEmpty() ? DEFAULT_PATTERNS_TO_SANITIZE : patternsToSanitize);
-      keysToSanitize.addAll(kafkaConfigKeysToSanitize());
-      setKeysToSanitize(keysToSanitize.toArray(new String[] {}));
-    }
+    this.sanitizeKeysPatterns = enabled
+        ? compile(patternsToSanitize.isEmpty() ? DEFAULT_PATTERNS_TO_SANITIZE : patternsToSanitize)
+        : List.of();
+  }
+
+  private static List<Pattern> compile(Collection<String> patternStrings) {
+    return patternStrings.stream()
+        .map(p -> isRegex(p)
+            ? Pattern.compile(p, CASE_INSENSITIVE)
+            : Pattern.compile(".*" + p + "$", CASE_INSENSITIVE))
+        .toList();
+  }
+
+  private static boolean isRegex(String str) {
+    return Arrays.stream(REGEX_PARTS).anyMatch(str::contains);
   }
   }
 
 
   private static Set<String> kafkaConfigKeysToSanitize() {
   private static Set<String> kafkaConfigKeysToSanitize() {
@@ -45,4 +65,17 @@ class KafkaConfigSanitizer extends Sanitizer {
         .collect(Collectors.toSet());
         .collect(Collectors.toSet());
   }
   }
 
 
+  public Object sanitize(String key, Object value) {
+    if (value == null) {
+      return null;
+    }
+    for (Pattern pattern : sanitizeKeysPatterns) {
+      if (pattern.matcher(key).matches()) {
+        return SANITIZED_VALUE;
+      }
+    }
+    return value;
+  }
+
+
 }
 }

+ 4 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java

@@ -225,11 +225,11 @@ public class KafkaConnectService {
   }
   }
 
 
   public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,
   public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,
-                                               String connectorName, Mono<Object> requestBody) {
+                                               String connectorName, Mono<Map<String, Object>> requestBody) {
     return api(cluster, connectName)
     return api(cluster, connectName)
         .mono(c ->
         .mono(c ->
             requestBody
             requestBody
-                .flatMap(body -> c.setConnectorConfig(connectorName, (Map<String, Object>) body))
+                .flatMap(body -> c.setConnectorConfig(connectorName, body))
                 .map(kafkaConnectMapper::fromClient));
                 .map(kafkaConnectMapper::fromClient));
   }
   }
 
 
@@ -298,12 +298,12 @@ public class KafkaConnectService {
   }
   }
 
 
   public Mono<ConnectorPluginConfigValidationResponseDTO> validateConnectorPluginConfig(
   public Mono<ConnectorPluginConfigValidationResponseDTO> validateConnectorPluginConfig(
-      KafkaCluster cluster, String connectName, String pluginName, Mono<Object> requestBody) {
+      KafkaCluster cluster, String connectName, String pluginName, Mono<Map<String, Object>> requestBody) {
     return api(cluster, connectName)
     return api(cluster, connectName)
         .mono(client ->
         .mono(client ->
             requestBody
             requestBody
                 .flatMap(body ->
                 .flatMap(body ->
-                    client.validateConnectorPluginConfig(pluginName, (Map<String, Object>) body))
+                    client.validateConnectorPluginConfig(pluginName, body))
                 .map(kafkaConnectMapper::fromClient)
                 .map(kafkaConnectMapper::fromClient)
         );
         );
   }
   }

+ 18 - 39
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java

@@ -3,9 +3,8 @@ package com.provectus.kafka.ui.service;
 import com.google.common.util.concurrent.RateLimiter;
 import com.google.common.util.concurrent.RateLimiter;
 import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
-import com.provectus.kafka.ui.emitter.MessageFilterStats;
 import com.provectus.kafka.ui.emitter.MessageFilters;
 import com.provectus.kafka.ui.emitter.MessageFilters;
-import com.provectus.kafka.ui.emitter.ResultSizeLimiter;
+import com.provectus.kafka.ui.emitter.MessagesProcessing;
 import com.provectus.kafka.ui.emitter.TailingEmitter;
 import com.provectus.kafka.ui.emitter.TailingEmitter;
 import com.provectus.kafka.ui.exception.TopicNotFoundException;
 import com.provectus.kafka.ui.exception.TopicNotFoundException;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
@@ -14,9 +13,9 @@ import com.provectus.kafka.ui.model.CreateTopicMessageDTO;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.model.MessageFilterTypeDTO;
 import com.provectus.kafka.ui.model.MessageFilterTypeDTO;
 import com.provectus.kafka.ui.model.SeekDirectionDTO;
 import com.provectus.kafka.ui.model.SeekDirectionDTO;
+import com.provectus.kafka.ui.model.TopicMessageDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.serde.api.Serde;
 import com.provectus.kafka.ui.serde.api.Serde;
-import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
 import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
 import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
 import com.provectus.kafka.ui.util.SslPropertiesUtil;
 import com.provectus.kafka.ui.util.SslPropertiesUtil;
 import java.util.List;
 import java.util.List;
@@ -162,13 +161,18 @@ public class MessagesService {
                                                       @Nullable String valueSerde) {
                                                       @Nullable String valueSerde) {
 
 
     java.util.function.Consumer<? super FluxSink<TopicMessageEventDTO>> emitter;
     java.util.function.Consumer<? super FluxSink<TopicMessageEventDTO>> emitter;
-    ConsumerRecordDeserializer recordDeserializer =
-        deserializationService.deserializerFor(cluster, topic, keySerde, valueSerde);
+
+    var processing = new MessagesProcessing(
+        deserializationService.deserializerFor(cluster, topic, keySerde, valueSerde),
+        getMsgFilter(query, filterQueryType),
+        seekDirection == SeekDirectionDTO.TAILING ? null : limit
+    );
+
     if (seekDirection.equals(SeekDirectionDTO.FORWARD)) {
     if (seekDirection.equals(SeekDirectionDTO.FORWARD)) {
       emitter = new ForwardRecordEmitter(
       emitter = new ForwardRecordEmitter(
           () -> consumerGroupService.createConsumer(cluster),
           () -> consumerGroupService.createConsumer(cluster),
           consumerPosition,
           consumerPosition,
-          recordDeserializer,
+          processing,
           cluster.getPollingSettings()
           cluster.getPollingSettings()
       );
       );
     } else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
     } else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) {
@@ -176,33 +180,22 @@ public class MessagesService {
           () -> consumerGroupService.createConsumer(cluster),
           () -> consumerGroupService.createConsumer(cluster),
           consumerPosition,
           consumerPosition,
           limit,
           limit,
-          recordDeserializer,
+          processing,
           cluster.getPollingSettings()
           cluster.getPollingSettings()
       );
       );
     } else {
     } else {
       emitter = new TailingEmitter(
       emitter = new TailingEmitter(
           () -> consumerGroupService.createConsumer(cluster),
           () -> consumerGroupService.createConsumer(cluster),
           consumerPosition,
           consumerPosition,
-          recordDeserializer,
+          processing,
           cluster.getPollingSettings()
           cluster.getPollingSettings()
       );
       );
     }
     }
-    MessageFilterStats filterStats = new MessageFilterStats();
     return Flux.create(emitter)
     return Flux.create(emitter)
-        .contextWrite(ctx -> ctx.put(MessageFilterStats.class, filterStats))
-        .filter(getMsgFilter(query, filterQueryType, filterStats))
         .map(getDataMasker(cluster, topic))
         .map(getDataMasker(cluster, topic))
-        .takeWhile(createTakeWhilePredicate(seekDirection, limit))
         .map(throttleUiPublish(seekDirection));
         .map(throttleUiPublish(seekDirection));
   }
   }
 
 
-  private Predicate<TopicMessageEventDTO> createTakeWhilePredicate(
-      SeekDirectionDTO seekDirection, int limit) {
-    return seekDirection == SeekDirectionDTO.TAILING
-        ? evt -> true // no limit for tailing
-        : new ResultSizeLimiter(limit);
-  }
-
   private UnaryOperator<TopicMessageEventDTO> getDataMasker(KafkaCluster cluster, String topicName) {
   private UnaryOperator<TopicMessageEventDTO> getDataMasker(KafkaCluster cluster, String topicName) {
     var keyMasker = cluster.getMasking().getMaskingFunction(topicName, Serde.Target.KEY);
     var keyMasker = cluster.getMasking().getMaskingFunction(topicName, Serde.Target.KEY);
     var valMasker = cluster.getMasking().getMaskingFunction(topicName, Serde.Target.VALUE);
     var valMasker = cluster.getMasking().getMaskingFunction(topicName, Serde.Target.VALUE);
@@ -211,32 +204,18 @@ public class MessagesService {
         return evt;
         return evt;
       }
       }
       return evt.message(
       return evt.message(
-        evt.getMessage()
-            .key(keyMasker.apply(evt.getMessage().getKey()))
-            .content(valMasker.apply(evt.getMessage().getContent())));
+          evt.getMessage()
+              .key(keyMasker.apply(evt.getMessage().getKey()))
+              .content(valMasker.apply(evt.getMessage().getContent())));
     };
     };
   }
   }
 
 
-  private Predicate<TopicMessageEventDTO> getMsgFilter(String query,
-                                                       MessageFilterTypeDTO filterQueryType,
-                                                       MessageFilterStats filterStats) {
+  private Predicate<TopicMessageDTO> getMsgFilter(String query,
+                                                  MessageFilterTypeDTO filterQueryType) {
     if (StringUtils.isEmpty(query)) {
     if (StringUtils.isEmpty(query)) {
       return evt -> true;
       return evt -> true;
     }
     }
-    var messageFilter = MessageFilters.createMsgFilter(query, filterQueryType);
-    return evt -> {
-      // we only apply filter for message events
-      if (evt.getType() == TopicMessageEventDTO.TypeEnum.MESSAGE) {
-        try {
-          return messageFilter.test(evt.getMessage());
-        } catch (Exception e) {
-          filterStats.incrementApplyErrors();
-          log.trace("Error applying filter '{}' for message {}", query, evt.getMessage());
-          return false;
-        }
-      }
-      return true;
-    };
+    return MessageFilters.createMsgFilter(query, filterQueryType);
   }
   }
 
 
   private <T> UnaryOperator<T> throttleUiPublish(SeekDirectionDTO seekDirection) {
   private <T> UnaryOperator<T> throttleUiPublish(SeekDirectionDTO seekDirection) {

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java

@@ -41,7 +41,7 @@ public class StatisticsService {
                     List.of(
                     List.of(
                         metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
                         metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
                         getLogDirInfo(description, ac),
                         getLogDirInfo(description, ac),
-                        featureService.getAvailableFeatures(cluster, description.getController()),
+                        featureService.getAvailableFeatures(cluster, description),
                         loadTopicConfigs(cluster),
                         loadTopicConfigs(cluster),
                         describeTopics(cluster)),
                         describeTopics(cluster)),
                     results ->
                     results ->

+ 3 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisStats.java

@@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service.analyze;
 
 
 import com.provectus.kafka.ui.model.TopicAnalysisSizeStatsDTO;
 import com.provectus.kafka.ui.model.TopicAnalysisSizeStatsDTO;
 import com.provectus.kafka.ui.model.TopicAnalysisStatsDTO;
 import com.provectus.kafka.ui.model.TopicAnalysisStatsDTO;
-import com.provectus.kafka.ui.model.TopicAnalysisStatsHourlyMsgCountsDTO;
+import com.provectus.kafka.ui.model.TopicAnalysisStatsHourlyMsgCountsInnerDTO;
 import java.time.Duration;
 import java.time.Duration;
 import java.time.Instant;
 import java.time.Instant;
 import java.util.Comparator;
 import java.util.Comparator;
@@ -78,10 +78,10 @@ class TopicAnalysisStats {
       }
       }
     }
     }
 
 
-    List<TopicAnalysisStatsHourlyMsgCountsDTO> toDto() {
+    List<TopicAnalysisStatsHourlyMsgCountsInnerDTO> toDto() {
       return hourlyStats.entrySet().stream()
       return hourlyStats.entrySet().stream()
           .sorted(Comparator.comparingLong(Map.Entry::getKey))
           .sorted(Comparator.comparingLong(Map.Entry::getKey))
-          .map(e -> new TopicAnalysisStatsHourlyMsgCountsDTO()
+          .map(e -> new TopicAnalysisStatsHourlyMsgCountsInnerDTO()
               .hourStart(e.getKey())
               .hourStart(e.getKey())
               .count(e.getValue()))
               .count(e.getValue()))
           .collect(Collectors.toList());
           .collect(Collectors.toList());

+ 4 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java

@@ -52,7 +52,10 @@ public class KsqlApiClient {
     boolean error;
     boolean error;
 
 
     public Optional<JsonNode> getColumnValue(List<JsonNode> row, String column) {
     public Optional<JsonNode> getColumnValue(List<JsonNode> row, String column) {
-      return Optional.ofNullable(row.get(columnNames.indexOf(column)));
+      int colIdx = columnNames.indexOf(column);
+      return colIdx >= 0
+          ? Optional.ofNullable(row.get(colIdx))
+          : Optional.empty();
     }
     }
   }
   }
 
 

+ 8 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java

@@ -89,7 +89,14 @@ public class KsqlServiceV2 {
                       .name(resp.getColumnValue(row, "name").map(JsonNode::asText).orElse(null))
                       .name(resp.getColumnValue(row, "name").map(JsonNode::asText).orElse(null))
                       .topic(resp.getColumnValue(row, "topic").map(JsonNode::asText).orElse(null))
                       .topic(resp.getColumnValue(row, "topic").map(JsonNode::asText).orElse(null))
                       .keyFormat(resp.getColumnValue(row, "keyFormat").map(JsonNode::asText).orElse(null))
                       .keyFormat(resp.getColumnValue(row, "keyFormat").map(JsonNode::asText).orElse(null))
-                      .valueFormat(resp.getColumnValue(row, "valueFormat").map(JsonNode::asText).orElse(null)))
+                      .valueFormat(
+                          // for old versions (<0.13) "format" column is filled,
+                          // for new version "keyFormat" & "valueFormat" columns should be filled
+                          resp.getColumnValue(row, "valueFormat")
+                              .or(() -> resp.getColumnValue(row, "format"))
+                              .map(JsonNode::asText)
+                              .orElse(null))
+              )
               .collect(Collectors.toList()));
               .collect(Collectors.toList()));
         });
         });
   }
   }

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java

@@ -21,6 +21,7 @@ import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
+import jakarta.annotation.PostConstruct;
 import java.util.Collections;
 import java.util.Collections;
 import java.util.List;
 import java.util.List;
 import java.util.Set;
 import java.util.Set;
@@ -28,7 +29,6 @@ import java.util.function.Predicate;
 import java.util.regex.Pattern;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import javax.annotation.Nullable;
 import javax.annotation.Nullable;
-import javax.annotation.PostConstruct;
 import lombok.RequiredArgsConstructor;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.collections.CollectionUtils;

+ 3 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/CognitoAuthorityExtractor.java

@@ -1,9 +1,9 @@
 package com.provectus.kafka.ui.service.rbac.extractor;
 package com.provectus.kafka.ui.service.rbac.extractor;
 
 
-import com.nimbusds.jose.shaded.json.JSONArray;
 import com.provectus.kafka.ui.model.rbac.Role;
 import com.provectus.kafka.ui.model.rbac.Role;
 import com.provectus.kafka.ui.model.rbac.provider.Provider;
 import com.provectus.kafka.ui.model.rbac.provider.Provider;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
@@ -44,7 +44,7 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
         .map(Role::getName)
         .map(Role::getName)
         .collect(Collectors.toSet());
         .collect(Collectors.toSet());
 
 
-    JSONArray groups = principal.getAttribute(COGNITO_GROUPS_ATTRIBUTE_NAME);
+    List<String> groups = principal.getAttribute(COGNITO_GROUPS_ATTRIBUTE_NAME);
     if (groups == null) {
     if (groups == null) {
       log.debug("Cognito groups param is not present");
       log.debug("Cognito groups param is not present");
       return Mono.just(groupsByUsername);
       return Mono.just(groupsByUsername);
@@ -56,9 +56,8 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
             .stream()
             .stream()
             .filter(s -> s.getProvider().equals(Provider.OAUTH_COGNITO))
             .filter(s -> s.getProvider().equals(Provider.OAUTH_COGNITO))
             .filter(s -> s.getType().equals("group"))
             .filter(s -> s.getType().equals("group"))
-            .anyMatch(subject -> Stream.of(groups.toArray())
+            .anyMatch(subject -> Stream.of(groups)
                 .map(Object::toString)
                 .map(Object::toString)
-                .distinct()
                 .anyMatch(cognitoGroup -> cognitoGroup.equals(subject.getValue()))
                 .anyMatch(cognitoGroup -> cognitoGroup.equals(subject.getValue()))
             ))
             ))
         .map(Role::getName)
         .map(Role::getName)

+ 18 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java

@@ -12,6 +12,7 @@ import java.util.stream.Stream;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.core.ParameterizedTypeReference;
 import org.springframework.core.ParameterizedTypeReference;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.HttpHeaders;
+import org.springframework.security.config.oauth2.client.CommonOAuth2Provider;
 import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
 import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
 import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
 import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
 import org.springframework.web.reactive.function.client.WebClient;
 import org.springframework.web.reactive.function.client.WebClient;
@@ -24,8 +25,7 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
   private static final String USERNAME_ATTRIBUTE_NAME = "login";
   private static final String USERNAME_ATTRIBUTE_NAME = "login";
   private static final String ORGANIZATION_NAME = "login";
   private static final String ORGANIZATION_NAME = "login";
   private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json";
   private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json";
-
-  private final WebClient webClient = WebClient.create("https://api.github.com");
+  private static final String DUMMY = "dummy";
 
 
   @Override
   @Override
   public boolean isApplicable(String provider) {
   public boolean isApplicable(String provider) {
@@ -64,9 +64,24 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
       return Mono.just(groupsByUsername);
       return Mono.just(groupsByUsername);
     }
     }
 
 
+    OAuth2UserRequest req = (OAuth2UserRequest) additionalParams.get("request");
+    String infoEndpoint = req.getClientRegistration().getProviderDetails().getUserInfoEndpoint().getUri();
+
+    if (infoEndpoint == null) {
+      infoEndpoint = CommonOAuth2Provider.GITHUB
+          .getBuilder(DUMMY)
+          .clientId(DUMMY)
+          .build()
+          .getProviderDetails()
+          .getUserInfoEndpoint()
+          .getUri();
+    }
+
+    WebClient webClient = WebClient.create(infoEndpoint);
+
     final Mono<List<Map<String, Object>>> userOrganizations = webClient
     final Mono<List<Map<String, Object>>> userOrganizations = webClient
         .get()
         .get()
-        .uri("/user/orgs")
+        .uri("/orgs")
         .headers(headers -> {
         .headers(headers -> {
           headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER);
           headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER);
           OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");
           OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");

+ 53 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java

@@ -0,0 +1,53 @@
+package com.provectus.kafka.ui.util;
+
+import com.google.common.annotations.VisibleForTesting;
+import java.time.Duration;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.web.reactive.function.client.WebClient;
+import reactor.core.publisher.Mono;
+
+@Slf4j
+public class GithubReleaseInfo {
+
+  private static final String GITHUB_LATEST_RELEASE_RETRIEVAL_URL =
+      "https://api.github.com/repos/provectus/kafka-ui/releases/latest";
+
+  private static final Duration GITHUB_API_MAX_WAIT_TIME = Duration.ofSeconds(2);
+
+  public record GithubReleaseDto(String html_url, String tag_name, String published_at) {
+
+    static GithubReleaseDto empty() {
+      return new GithubReleaseDto(null, null, null);
+    }
+  }
+
+  private volatile GithubReleaseDto release = GithubReleaseDto.empty();
+
+  private final Mono<Void> refreshMono;
+
+  public GithubReleaseInfo() {
+    this(GITHUB_LATEST_RELEASE_RETRIEVAL_URL);
+  }
+
+  @VisibleForTesting
+  GithubReleaseInfo(String url) {
+    this.refreshMono = WebClient.create()
+        .get()
+        .uri(url)
+        .exchangeToMono(resp -> resp.bodyToMono(GithubReleaseDto.class))
+        .timeout(GITHUB_API_MAX_WAIT_TIME)
+        .doOnError(th -> log.trace("Error getting latest github release info", th))
+        .onErrorResume(th -> true, th -> Mono.just(GithubReleaseDto.empty()))
+        .doOnNext(release -> this.release = release)
+        .then();
+  }
+
+  public GithubReleaseDto get() {
+    return release;
+  }
+
+  public Mono<Void> refresh() {
+    return refreshMono;
+  }
+
+}

+ 38 - 22
kafka-ui-api/src/main/resources/application-local.yml

@@ -6,6 +6,9 @@ logging:
     #org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
     #org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
     reactor.netty.http.server.AccessLog: INFO
     reactor.netty.http.server.AccessLog: INFO
 
 
+#server:
+#  port: 8080 #- Port in which kafka-ui will run.
+
 kafka:
 kafka:
   clusters:
   clusters:
     - name: local
     - name: local
@@ -42,27 +45,40 @@ kafka:
 spring:
 spring:
   jmx:
   jmx:
     enabled: true
     enabled: true
-  security:
-    oauth2:
-      client:
-        registration:
-          cognito:
-            clientId: xx
-            clientSecret: yy
-            scope: openid
-            client-name: cognito
-            provider: cognito
-            redirect-uri: http://localhost:8080/login/oauth2/code/cognito
-            authorization-grant-type: authorization_code
-        provider:
-          cognito:
-            issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
-            jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
-            user-name-attribute: username
+
 auth:
 auth:
   type: DISABLED
   type: DISABLED
-
-roles.file: /tmp/roles.yml
-
-#server:
-#  port: 8080 #- Port in which kafka-ui will run.
+#  type: OAUTH2
+#  oauth2:
+#    client:
+#      cognito:
+#        clientId:
+#        clientSecret:
+#        scope: openid
+#        client-name: cognito
+#        provider: cognito
+#        redirect-uri: http://localhost:8080/login/oauth2/code/cognito
+#        authorization-grant-type: authorization_code
+#        issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
+#        jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
+#        user-name-attribute: username
+#        custom-params:
+#          type: cognito
+#          logoutUrl: https://kafka-ui.auth.eu-central-1.amazoncognito.com/logout
+#      google:
+#        provider: google
+#        clientId:
+#        clientSecret:
+#        user-name-attribute: email
+#        custom-params:
+#          type: google
+#          allowedDomain: provectus.com
+#      github:
+#        provider: github
+#        clientId:
+#        clientSecret:
+#        scope:
+#          - read:org
+#        user-name-attribute: login
+#        custom-params:
+#          type: github

+ 2 - 2
kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java

@@ -16,7 +16,7 @@ import org.springframework.context.ApplicationContextInitializer;
 import org.springframework.context.ConfigurableApplicationContext;
 import org.springframework.context.ConfigurableApplicationContext;
 import org.springframework.test.context.ActiveProfiles;
 import org.springframework.test.context.ActiveProfiles;
 import org.springframework.test.context.ContextConfiguration;
 import org.springframework.test.context.ContextConfiguration;
-import org.springframework.util.SocketUtils;
+import org.springframework.test.util.TestSocketUtils;
 import org.testcontainers.containers.KafkaContainer;
 import org.testcontainers.containers.KafkaContainer;
 import org.testcontainers.containers.Network;
 import org.testcontainers.containers.Network;
 import org.testcontainers.utility.DockerImageName;
 import org.testcontainers.utility.DockerImageName;
@@ -61,7 +61,7 @@ public abstract class AbstractIntegrationTest {
       System.setProperty("kafka.clusters.0.bootstrapServers", kafka.getBootstrapServers());
       System.setProperty("kafka.clusters.0.bootstrapServers", kafka.getBootstrapServers());
       // List unavailable hosts to verify failover
       // List unavailable hosts to verify failover
       System.setProperty("kafka.clusters.0.schemaRegistry", String.format("http://localhost:%1$s,http://localhost:%1$s,%2$s",
       System.setProperty("kafka.clusters.0.schemaRegistry", String.format("http://localhost:%1$s,http://localhost:%1$s,%2$s",
-              SocketUtils.findAvailableTcpPort(), schemaRegistry.getUrl()));
+              TestSocketUtils.findAvailableTcpPort(), schemaRegistry.getUrl()));
       System.setProperty("kafka.clusters.0.kafkaConnect.0.name", "kafka-connect");
       System.setProperty("kafka.clusters.0.kafkaConnect.0.name", "kafka-connect");
       System.setProperty("kafka.clusters.0.kafkaConnect.0.userName", "kafka-connect");
       System.setProperty("kafka.clusters.0.kafkaConnect.0.userName", "kafka-connect");
       System.setProperty("kafka.clusters.0.kafkaConnect.0.password", "kafka-connect");
       System.setProperty("kafka.clusters.0.kafkaConnect.0.password", "kafka-connect");

+ 4 - 5
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java

@@ -5,13 +5,12 @@ import static org.assertj.core.api.Assertions.assertThat;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Collections;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
-import org.springframework.boot.actuate.endpoint.Sanitizer;
 
 
 class KafkaConfigSanitizerTest {
 class KafkaConfigSanitizerTest {
 
 
   @Test
   @Test
   void doNothingIfEnabledPropertySetToFalse() {
   void doNothingIfEnabledPropertySetToFalse() {
-    final Sanitizer sanitizer = new KafkaConfigSanitizer(false, Collections.emptyList());
+    final var sanitizer = new KafkaConfigSanitizer(false, Collections.emptyList());
     assertThat(sanitizer.sanitize("password", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("password", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("secret");
@@ -19,7 +18,7 @@ class KafkaConfigSanitizerTest {
 
 
   @Test
   @Test
   void obfuscateCredentials() {
   void obfuscateCredentials() {
-    final Sanitizer sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
+    final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("consumer.sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("consumer.sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("producer.sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("producer.sasl.jaas.config", "secret")).isEqualTo("******");
@@ -37,7 +36,7 @@ class KafkaConfigSanitizerTest {
 
 
   @Test
   @Test
   void notObfuscateNormalConfigs() {
   void notObfuscateNormalConfigs() {
-    final Sanitizer sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
+    final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
     assertThat(sanitizer.sanitize("security.protocol", "SASL_SSL")).isEqualTo("SASL_SSL");
     assertThat(sanitizer.sanitize("security.protocol", "SASL_SSL")).isEqualTo("SASL_SSL");
     final String[] bootstrapServer = new String[] {"test1:9092", "test2:9092"};
     final String[] bootstrapServer = new String[] {"test1:9092", "test2:9092"};
     assertThat(sanitizer.sanitize("bootstrap.servers", bootstrapServer)).isEqualTo(bootstrapServer);
     assertThat(sanitizer.sanitize("bootstrap.servers", bootstrapServer)).isEqualTo(bootstrapServer);
@@ -45,7 +44,7 @@ class KafkaConfigSanitizerTest {
 
 
   @Test
   @Test
   void obfuscateCredentialsWithDefinedPatterns() {
   void obfuscateCredentialsWithDefinedPatterns() {
-    final Sanitizer sanitizer = new KafkaConfigSanitizer(true, Arrays.asList("kafka.ui", ".*test.*"));
+    final var sanitizer = new KafkaConfigSanitizer(true, Arrays.asList("kafka.ui", ".*test.*"));
     assertThat(sanitizer.sanitize("consumer.kafka.ui", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("consumer.kafka.ui", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("this.is.test.credentials", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("this.is.test.credentials", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("this.is.not.credential", "not.credential"))
     assertThat(sanitizer.sanitize("this.is.not.credential", "not.credential"))

+ 15 - 10
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java

@@ -9,6 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.AbstractIntegrationTest;
 import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
 import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
+import com.provectus.kafka.ui.emitter.MessagesProcessing;
 import com.provectus.kafka.ui.emitter.PollingSettings;
 import com.provectus.kafka.ui.emitter.PollingSettings;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.ConsumerPosition;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
 import com.provectus.kafka.ui.model.TopicMessageEventDTO;
@@ -106,12 +107,16 @@ class RecordEmitterTest extends AbstractIntegrationTest {
     );
     );
   }
   }
 
 
+  private MessagesProcessing createMessagesProcessing() {
+    return new MessagesProcessing(RECORD_DESERIALIZER, msg -> true, null);
+  }
+
   @Test
   @Test
   void pollNothingOnEmptyTopic() {
   void pollNothingOnEmptyTopic() {
     var forwardEmitter = new ForwardRecordEmitter(
     var forwardEmitter = new ForwardRecordEmitter(
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
         new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -119,7 +124,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
         new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null),
         100,
         100,
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -141,7 +146,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
     var forwardEmitter = new ForwardRecordEmitter(
     var forwardEmitter = new ForwardRecordEmitter(
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(BEGINNING, TOPIC, null),
         new ConsumerPosition(BEGINNING, TOPIC, null),
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -149,7 +154,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(LATEST, TOPIC, null),
         new ConsumerPosition(LATEST, TOPIC, null),
         PARTITIONS * MSGS_PER_PARTITION,
         PARTITIONS * MSGS_PER_PARTITION,
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -170,7 +175,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
     var forwardEmitter = new ForwardRecordEmitter(
     var forwardEmitter = new ForwardRecordEmitter(
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -178,7 +183,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         PARTITIONS * MSGS_PER_PARTITION,
         PARTITIONS * MSGS_PER_PARTITION,
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -215,7 +220,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
     var forwardEmitter = new ForwardRecordEmitter(
     var forwardEmitter = new ForwardRecordEmitter(
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
         new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -223,7 +228,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
         new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps),
         PARTITIONS * MSGS_PER_PARTITION,
         PARTITIONS * MSGS_PER_PARTITION,
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -254,7 +259,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         new ConsumerPosition(OFFSET, TOPIC, targetOffsets),
         numMessages,
         numMessages,
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 
@@ -280,7 +285,7 @@ class RecordEmitterTest extends AbstractIntegrationTest {
         this::createConsumer,
         this::createConsumer,
         new ConsumerPosition(OFFSET, TOPIC, offsets),
         new ConsumerPosition(OFFSET, TOPIC, offsets),
         100,
         100,
-        RECORD_DESERIALIZER,
+        createMessagesProcessing(),
         PollingSettings.createDefault()
         PollingSettings.createDefault()
     );
     );
 
 

+ 0 - 3
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java

@@ -15,7 +15,6 @@ import java.util.concurrent.CopyOnWriteArraySet;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
-import org.springframework.util.unit.DataSize;
 import org.testcontainers.utility.DockerImageName;
 import org.testcontainers.utility.DockerImageName;
 
 
 class KsqlServiceV2Test extends AbstractIntegrationTest {
 class KsqlServiceV2Test extends AbstractIntegrationTest {
@@ -27,8 +26,6 @@ class KsqlServiceV2Test extends AbstractIntegrationTest {
   private static final Set<String> STREAMS_TO_DELETE = new CopyOnWriteArraySet<>();
   private static final Set<String> STREAMS_TO_DELETE = new CopyOnWriteArraySet<>();
   private static final Set<String> TABLES_TO_DELETE = new CopyOnWriteArraySet<>();
   private static final Set<String> TABLES_TO_DELETE = new CopyOnWriteArraySet<>();
 
 
-  private static final DataSize maxBuffSize = DataSize.ofMegabytes(20);
-
   @BeforeAll
   @BeforeAll
   static void init() {
   static void init() {
     KSQL_DB.start();
     KSQL_DB.start();

+ 54 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/GithubReleaseInfoTest.java

@@ -0,0 +1,54 @@
+package com.provectus.kafka.ui.util;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.io.IOException;
+import java.time.Duration;
+import okhttp3.mockwebserver.MockResponse;
+import okhttp3.mockwebserver.MockWebServer;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import reactor.test.StepVerifier;
+
+class GithubReleaseInfoTest {
+
+  private final MockWebServer mockWebServer = new MockWebServer();
+
+  @BeforeEach
+  void startMockServer() throws IOException {
+    mockWebServer.start();
+  }
+
+  @AfterEach
+  void stopMockServer() throws IOException {
+    mockWebServer.close();
+  }
+
+  @Test
+  void test() {
+    mockWebServer.enqueue(new MockResponse()
+        .addHeader("content-type: application/json")
+        .setBody("""
+            {
+              "published_at": "2023-03-09T16:11:31Z",
+              "tag_name": "v0.6.0",
+              "html_url": "https://github.com/provectus/kafka-ui/releases/tag/v0.6.0",
+              "some_unused_prop": "ololo"
+            }
+            """));
+    var url = mockWebServer.url("repos/provectus/kafka-ui/releases/latest").toString();
+
+    var infoHolder = new GithubReleaseInfo(url);
+    infoHolder.refresh().block();
+
+    var i = infoHolder.get();
+    assertThat(i.html_url())
+        .isEqualTo("https://github.com/provectus/kafka-ui/releases/tag/v0.6.0");
+    assertThat(i.published_at())
+        .isEqualTo("2023-03-09T16:11:31Z");
+    assertThat(i.tag_name())
+        .isEqualTo("v0.6.0");
+  }
+
+}

+ 17 - 19
kafka-ui-contract/pom.xml

@@ -27,20 +27,24 @@
                     <artifactId>spring-boot-starter-validation</artifactId>
                     <artifactId>spring-boot-starter-validation</artifactId>
                 </dependency>
                 </dependency>
                 <dependency>
                 <dependency>
-                    <groupId>io.swagger</groupId>
-                    <artifactId>swagger-annotations</artifactId>
-                    <version>${swagger-annotations.version}</version>
+                    <groupId>io.swagger.core.v3</groupId>
+                    <artifactId>swagger-integration-jakarta</artifactId>
+                    <version>2.2.8</version>
                 </dependency>
                 </dependency>
                 <dependency>
                 <dependency>
                     <groupId>org.openapitools</groupId>
                     <groupId>org.openapitools</groupId>
                     <artifactId>jackson-databind-nullable</artifactId>
                     <artifactId>jackson-databind-nullable</artifactId>
-                    <version>${jackson-databind-nullable.version}</version>
+                    <version>0.2.4</version>
                 </dependency>
                 </dependency>
                 <dependency>
                 <dependency>
-                    <groupId>com.google.code.findbugs</groupId>
-                    <artifactId>jsr305</artifactId>
-                    <version>3.0.2</version>
-                    <scope>provided</scope>
+                    <groupId>jakarta.annotation</groupId>
+                    <artifactId>jakarta.annotation-api</artifactId>
+                    <version>2.1.1</version>
+                </dependency>
+                <dependency>
+                    <groupId>javax.annotation</groupId>
+                    <artifactId>javax.annotation-api</artifactId>
+                    <version>1.3.2</version>
                 </dependency>
                 </dependency>
             </dependencies>
             </dependencies>
 
 
@@ -71,6 +75,7 @@
                                         <library>webclient</library>
                                         <library>webclient</library>
                                         <useBeanValidation>true</useBeanValidation>
                                         <useBeanValidation>true</useBeanValidation>
                                         <dateLibrary>java8</dateLibrary>
                                         <dateLibrary>java8</dateLibrary>
+                                        <useJakartaEe>true</useJakartaEe>
                                     </configOptions>
                                     </configOptions>
                                 </configuration>
                                 </configuration>
                             </execution>
                             </execution>
@@ -80,8 +85,7 @@
                                     <goal>generate</goal>
                                     <goal>generate</goal>
                                 </goals>
                                 </goals>
                                 <configuration>
                                 <configuration>
-                                    <inputSpec>${project.basedir}/src/main/resources/swagger/kafka-ui-api.yaml
-                                    </inputSpec>
+                                    <inputSpec>${project.basedir}/src/main/resources/swagger/kafka-ui-api.yaml</inputSpec>
                                     <output>${project.build.directory}/generated-sources/api</output>
                                     <output>${project.build.directory}/generated-sources/api</output>
                                     <generatorName>spring</generatorName>
                                     <generatorName>spring</generatorName>
                                     <modelNameSuffix>DTO</modelNameSuffix>
                                     <modelNameSuffix>DTO</modelNameSuffix>
@@ -89,14 +93,12 @@
                                         <modelPackage>com.provectus.kafka.ui.model</modelPackage>
                                         <modelPackage>com.provectus.kafka.ui.model</modelPackage>
                                         <apiPackage>com.provectus.kafka.ui.api</apiPackage>
                                         <apiPackage>com.provectus.kafka.ui.api</apiPackage>
                                         <sourceFolder>kafka-ui-contract</sourceFolder>
                                         <sourceFolder>kafka-ui-contract</sourceFolder>
-
                                         <reactive>true</reactive>
                                         <reactive>true</reactive>
-
                                         <interfaceOnly>true</interfaceOnly>
                                         <interfaceOnly>true</interfaceOnly>
                                         <skipDefaultInterface>true</skipDefaultInterface>
                                         <skipDefaultInterface>true</skipDefaultInterface>
                                         <useBeanValidation>true</useBeanValidation>
                                         <useBeanValidation>true</useBeanValidation>
                                         <useTags>true</useTags>
                                         <useTags>true</useTags>
-
+                                        <useSpringBoot3>true</useSpringBoot3>
                                         <dateLibrary>java8</dateLibrary>
                                         <dateLibrary>java8</dateLibrary>
                                     </configOptions>
                                     </configOptions>
                                     <typeMappings>
                                     <typeMappings>
@@ -116,15 +118,13 @@
                                     <generatorName>java</generatorName>
                                     <generatorName>java</generatorName>
                                     <generateApiTests>false</generateApiTests>
                                     <generateApiTests>false</generateApiTests>
                                     <generateModelTests>false</generateModelTests>
                                     <generateModelTests>false</generateModelTests>
-
                                     <configOptions>
                                     <configOptions>
                                         <modelPackage>com.provectus.kafka.ui.connect.model</modelPackage>
                                         <modelPackage>com.provectus.kafka.ui.connect.model</modelPackage>
                                         <apiPackage>com.provectus.kafka.ui.connect.api</apiPackage>
                                         <apiPackage>com.provectus.kafka.ui.connect.api</apiPackage>
                                         <sourceFolder>kafka-connect-client</sourceFolder>
                                         <sourceFolder>kafka-connect-client</sourceFolder>
-
                                         <asyncNative>true</asyncNative>
                                         <asyncNative>true</asyncNative>
                                         <library>webclient</library>
                                         <library>webclient</library>
-
+                                        <useJakartaEe>true</useJakartaEe>
                                         <useBeanValidation>true</useBeanValidation>
                                         <useBeanValidation>true</useBeanValidation>
                                         <dateLibrary>java8</dateLibrary>
                                         <dateLibrary>java8</dateLibrary>
                                     </configOptions>
                                     </configOptions>
@@ -142,15 +142,13 @@
                                     <generatorName>java</generatorName>
                                     <generatorName>java</generatorName>
                                     <generateApiTests>false</generateApiTests>
                                     <generateApiTests>false</generateApiTests>
                                     <generateModelTests>false</generateModelTests>
                                     <generateModelTests>false</generateModelTests>
-
                                     <configOptions>
                                     <configOptions>
                                         <modelPackage>com.provectus.kafka.ui.sr.model</modelPackage>
                                         <modelPackage>com.provectus.kafka.ui.sr.model</modelPackage>
                                         <apiPackage>com.provectus.kafka.ui.sr.api</apiPackage>
                                         <apiPackage>com.provectus.kafka.ui.sr.api</apiPackage>
                                         <sourceFolder>kafka-sr-client</sourceFolder>
                                         <sourceFolder>kafka-sr-client</sourceFolder>
-
                                         <asyncNative>true</asyncNative>
                                         <asyncNative>true</asyncNative>
                                         <library>webclient</library>
                                         <library>webclient</library>
-
+                                        <useJakartaEe>true</useJakartaEe>
                                         <useBeanValidation>true</useBeanValidation>
                                         <useBeanValidation>true</useBeanValidation>
                                         <dateLibrary>java8</dateLibrary>
                                         <dateLibrary>java8</dateLibrary>
                                     </configOptions>
                                     </configOptions>

+ 25 - 0
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -1917,6 +1917,26 @@ components:
             type: string
             type: string
             enum:
             enum:
               - DYNAMIC_CONFIG
               - DYNAMIC_CONFIG
+        build:
+          type: object
+          properties:
+            commitId:
+              type: string
+            version:
+              type: string
+            buildTime:
+              type: string
+            isLatestRelease:
+              type: boolean
+        latestRelease:
+          type: object
+          properties:
+            versionTag:
+              type: string
+            publishedAt:
+              type: string
+            htmlUrl:
+              type: string
 
 
     Cluster:
     Cluster:
       type: object
       type: object
@@ -2387,6 +2407,10 @@ components:
         - UNKNOWN
         - UNKNOWN
 
 
     ConsumerGroup:
     ConsumerGroup:
+      discriminator:
+        propertyName: inherit
+        mapping:
+          details: "#/components/schemas/ConsumerGroupDetails"
       type: object
       type: object
       properties:
       properties:
         groupId:
         groupId:
@@ -2416,6 +2440,7 @@ components:
         - NAME
         - NAME
         - MEMBERS
         - MEMBERS
         - STATE
         - STATE
+        - MESSAGES_BEHIND
 
 
     ConsumerGroupsPageResponse:
     ConsumerGroupsPageResponse:
       type: object
       type: object

+ 10 - 1
kafka-ui-e2e-checks/README.md

@@ -27,7 +27,7 @@ This repository is for E2E UI automation.
 ```
 ```
 git clone https://github.com/provectus/kafka-ui.git
 git clone https://github.com/provectus/kafka-ui.git
 cd  kafka-ui-e2e-checks
 cd  kafka-ui-e2e-checks
-docker pull selenoid/vnc:chrome_86.0  
+docker pull selenoid/vnc_chrome:103.0 
 ```
 ```
 
 
 ### How to run checks
 ### How to run checks
@@ -36,6 +36,7 @@ docker pull selenoid/vnc:chrome_86.0
 
 
 ```
 ```
 cd kafka-ui
 cd kafka-ui
+docker-compose -f kafka-ui-e2e-checks/docker/selenoid-local.yaml up -d
 docker-compose -f documentation/compose/e2e-tests.yaml up -d
 docker-compose -f documentation/compose/e2e-tests.yaml up -d
 ```
 ```
 
 
@@ -51,6 +52,14 @@ docker-compose -f documentation/compose/e2e-tests.yaml up -d
 -Dbrowser=local
 -Dbrowser=local
 ```
 ```
 
 
+Expected Location of Chrome
+```
+Linux:	                    /usr/bin/google-chrome1
+Mac:	                    /Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome
+Windows XP:                 %HOMEPATH%\Local Settings\Application Data\Google\Chrome\Application\chrome.exe
+Windows Vista and newer:    C:\Users%USERNAME%\AppData\Local\Google\Chrome\Application\chrome.exe
+```
+
 ### Qase integration
 ### Qase integration
 
 
 Found instruction for Qase.io integration (for internal use only) at `kafka-ui-e2e-checks/QASE.md`
 Found instruction for Qase.io integration (for internal use only) at `kafka-ui-e2e-checks/QASE.md`

+ 7 - 5
kafka-ui-e2e-checks/docker/selenoid.yaml → kafka-ui-e2e-checks/docker/selenoid-git.yaml

@@ -1,17 +1,19 @@
+---
 version: '3'
 version: '3'
 
 
 services:
 services:
+
   selenoid:
   selenoid:
     network_mode: bridge
     network_mode: bridge
     image: aerokube/selenoid:1.10.7
     image: aerokube/selenoid:1.10.7
     volumes:
     volumes:
       - "../selenoid/config:/etc/selenoid"
       - "../selenoid/config:/etc/selenoid"
       - "/var/run/docker.sock:/var/run/docker.sock"
       - "/var/run/docker.sock:/var/run/docker.sock"
-      - "../selenoid/video:/video"
+      - "../selenoid/video:/opt/selenoid/video"
       - "../selenoid/logs:/opt/selenoid/logs"
       - "../selenoid/logs:/opt/selenoid/logs"
     environment:
     environment:
-      - OVERRIDE_VIDEO_OUTPUT_DIR=video
-    command: [ "-conf", "/etc/selenoid/browsers.json", "-video-output-dir", "/opt/selenoid/video", "-log-output-dir", "/opt/selenoid/logs" ]
+      - OVERRIDE_VIDEO_OUTPUT_DIR=../selenoid/video
+    command: [ "-conf", "/etc/selenoid/browsersGit.json", "-video-output-dir", "/opt/selenoid/video", "-log-output-dir", "/opt/selenoid/logs" ]
     ports:
     ports:
       - "4444:4444"
       - "4444:4444"
 
 
@@ -22,10 +24,10 @@ services:
       - selenoid
       - selenoid
     ports:
     ports:
       - "8081:8080"
       - "8081:8080"
-    command: [ "--selenoid-uri", "http://localhost:4444" ]
+    command: [ "--selenoid-uri", "http://selenoid:4444" ]
 
 
   selenoid-chrome:
   selenoid-chrome:
     network_mode: bridge
     network_mode: bridge
-    image: selenoid/vnc:chrome_96.0
+    image: selenoid/vnc_chrome:103.0
     extra_hosts:
     extra_hosts:
       - "host.docker.internal:host-gateway"
       - "host.docker.internal:host-gateway"

+ 33 - 0
kafka-ui-e2e-checks/docker/selenoid-local.yaml

@@ -0,0 +1,33 @@
+---
+version: '3'
+
+services:
+
+  selenoid:
+    network_mode: bridge
+    image: aerokube/selenoid:1.10.7
+    volumes:
+      - "../selenoid/config:/etc/selenoid"
+      - "/var/run/docker.sock:/var/run/docker.sock"
+      - "../selenoid/video:/opt/selenoid/video"
+      - "../selenoid/logs:/opt/selenoid/logs"
+    environment:
+      - OVERRIDE_VIDEO_OUTPUT_DIR=../selenoid/video
+    command: [ "-conf", "/etc/selenoid/browsersLocal.json", "-video-output-dir", "/opt/selenoid/video", "-log-output-dir", "/opt/selenoid/logs" ]
+    ports:
+      - "4444:4444"
+
+  selenoid-ui:
+    network_mode: bridge
+    image: aerokube/selenoid-ui:latest-release
+    links:
+      - selenoid
+    ports:
+      - "8081:8080"
+    command: [ "--selenoid-uri", "http://selenoid:4444" ]
+
+  selenoid-chrome:
+    network_mode: bridge
+    image: selenoid/vnc_chrome:103.0
+    extra_hosts:
+      - "host.docker.internal:host-gateway"

+ 44 - 14
kafka-ui-e2e-checks/pom.xml

@@ -17,15 +17,14 @@
         <testcontainers.version>1.17.6</testcontainers.version>
         <testcontainers.version>1.17.6</testcontainers.version>
         <httpcomponents.version>5.2.1</httpcomponents.version>
         <httpcomponents.version>5.2.1</httpcomponents.version>
         <selenium.version>4.8.1</selenium.version>
         <selenium.version>4.8.1</selenium.version>
-        <selenide.version>6.11.2</selenide.version>
+        <selenide.version>6.12.3</selenide.version>
         <testng.version>7.7.0</testng.version>
         <testng.version>7.7.0</testng.version>
         <allure.version>2.21.0</allure.version>
         <allure.version>2.21.0</allure.version>
-        <qase.io.version>3.0.3</qase.io.version>
+        <qase.io.version>3.0.4</qase.io.version>
         <aspectj.version>1.9.9.1</aspectj.version>
         <aspectj.version>1.9.9.1</aspectj.version>
         <assertj.version>3.24.2</assertj.version>
         <assertj.version>3.24.2</assertj.version>
         <hamcrest.version>2.2</hamcrest.version>
         <hamcrest.version>2.2</hamcrest.version>
-        <slf4j.version>1.7.36</slf4j.version>
-        <dotenv.version>2.3.1</dotenv.version>
+        <slf4j.version>2.0.5</slf4j.version>
         <kafka.version>3.3.1</kafka.version>
         <kafka.version>3.3.1</kafka.version>
     </properties>
     </properties>
 
 
@@ -122,6 +121,11 @@
             <artifactId>selenium</artifactId>
             <artifactId>selenium</artifactId>
             <version>${testcontainers.version}</version>
             <version>${testcontainers.version}</version>
         </dependency>
         </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <version>${org.projectlombok.version}</version>
+        </dependency>
         <dependency>
         <dependency>
             <groupId>org.apache.httpcomponents.core5</groupId>
             <groupId>org.apache.httpcomponents.core5</groupId>
             <artifactId>httpcore5</artifactId>
             <artifactId>httpcore5</artifactId>
@@ -132,6 +136,11 @@
             <artifactId>httpclient5</artifactId>
             <artifactId>httpclient5</artifactId>
             <version>${httpcomponents.version}</version>
             <version>${httpcomponents.version}</version>
         </dependency>
         </dependency>
+        <dependency>
+            <groupId>org.seleniumhq.selenium</groupId>
+            <artifactId>selenium-http-jdk-client</artifactId>
+            <version>${selenium.version}</version>
+        </dependency>
         <dependency>
         <dependency>
             <groupId>org.seleniumhq.selenium</groupId>
             <groupId>org.seleniumhq.selenium</groupId>
             <artifactId>selenium-http</artifactId>
             <artifactId>selenium-http</artifactId>
@@ -187,16 +196,6 @@
             <artifactId>slf4j-simple</artifactId>
             <artifactId>slf4j-simple</artifactId>
             <version>${slf4j.version}</version>
             <version>${slf4j.version}</version>
         </dependency>
         </dependency>
-        <dependency>
-            <groupId>org.projectlombok</groupId>
-            <artifactId>lombok</artifactId>
-            <version>${org.projectlombok.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>io.github.cdimascio</groupId>
-            <artifactId>dotenv-java</artifactId>
-            <version>${dotenv.version}</version>
-        </dependency>
         <dependency>
         <dependency>
             <groupId>com.provectus</groupId>
             <groupId>com.provectus</groupId>
             <artifactId>kafka-ui-contract</artifactId>
             <artifactId>kafka-ui-contract</artifactId>
@@ -265,6 +264,37 @@
                         <artifactId>allure-maven</artifactId>
                         <artifactId>allure-maven</artifactId>
                         <version>2.10.0</version>
                         <version>2.10.0</version>
                     </plugin>
                     </plugin>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-checkstyle-plugin</artifactId>
+                        <version>3.1.2</version>
+                        <dependencies>
+                            <dependency>
+                                <groupId>com.puppycrawl.tools</groupId>
+                                <artifactId>checkstyle</artifactId>
+                                <version>10.3.1</version>
+                            </dependency>
+                        </dependencies>
+                        <executions>
+                            <execution>
+                                <id>checkstyle</id>
+                                <phase>validate</phase>
+                                <goals>
+                                    <goal>check</goal>
+                                </goals>
+                                <configuration>
+                                    <violationSeverity>warning</violationSeverity>
+                                    <failOnViolation>true</failOnViolation>
+                                    <failsOnError>true</failsOnError>
+                                    <includeTestSourceDirectory>true</includeTestSourceDirectory>
+                                    <configLocation>file:${basedir}/../etc/checkstyle/checkstyle-e2e.xml</configLocation>
+                                    <headerLocation>file:${basedir}/../etc/checkstyle/apache-header.txt</headerLocation>
+                                </configuration>
+                            </execution>
+                        </executions>
+
+                    </plugin>
+
                 </plugins>
                 </plugins>
             </build>
             </build>
         </profile>
         </profile>

+ 15 - 0
kafka-ui-e2e-checks/selenoid/config/browsersGit.json

@@ -0,0 +1,15 @@
+{
+  "chrome": {
+    "default": "103.0",
+    "versions": {
+      "103.0": {
+        "image": "selenoid/vnc_chrome:103.0",
+        "hosts": [
+          "host.docker.internal:172.17.0.1"
+        ],
+        "port": "4444",
+        "path": "/"
+      }
+    }
+  }
+}

+ 3 - 3
kafka-ui-e2e-checks/selenoid/config/browsers.json → kafka-ui-e2e-checks/selenoid/config/browsersLocal.json

@@ -1,9 +1,9 @@
 {
 {
   "chrome": {
   "chrome": {
-    "default": "96.0",
+    "default": "103.0",
     "versions": {
     "versions": {
-      "96.0": {
-        "image": "selenoid/vnc_chrome:96.0",
+      "103.0": {
+        "image": "selenoid/vnc_chrome:103.0",
         "port": "4444",
         "port": "4444",
         "path": "/"
         "path": "/"
       }
       }

+ 1 - 1
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java

@@ -7,5 +7,5 @@ import lombok.experimental.Accessors;
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Connector {
 public class Connector {
 
 
-    private String name, config;
+  private String name, config;
 }
 }

+ 20 - 19
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java

@@ -1,33 +1,34 @@
 package com.provectus.kafka.ui.models;
 package com.provectus.kafka.ui.models;
 
 
+import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
+
 import com.provectus.kafka.ui.api.model.SchemaType;
 import com.provectus.kafka.ui.api.model.SchemaType;
 import lombok.Data;
 import lombok.Data;
 import lombok.experimental.Accessors;
 import lombok.experimental.Accessors;
 
 
-import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
-
 @Data
 @Data
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Schema {
 public class Schema {
 
 
-    private String name, valuePath;
-    private SchemaType type;
+  private String name, valuePath;
+  private SchemaType type;
 
 
-    public static Schema createSchemaAvro() {
-        return new Schema().setName("schema_avro-" + randomAlphabetic(5))
-                .setType(SchemaType.AVRO)
-                .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_avro_value.json");
-    }
+  public static Schema createSchemaAvro() {
+    return new Schema().setName("schema_avro-" + randomAlphabetic(5))
+        .setType(SchemaType.AVRO)
+        .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_avro_value.json");
+  }
 
 
-    public static Schema createSchemaJson() {
-        return new Schema().setName("schema_json-" + randomAlphabetic(5))
-                .setType(SchemaType.JSON)
-                .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_json_Value.json");
-    }
+  public static Schema createSchemaJson() {
+    return new Schema().setName("schema_json-" + randomAlphabetic(5))
+        .setType(SchemaType.JSON)
+        .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_json_Value.json");
+  }
 
 
-    public static Schema createSchemaProtobuf() {
-        return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
-                .setType(SchemaType.PROTOBUF)
-                .setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_protobuf_value.txt");
-    }
+  public static Schema createSchemaProtobuf() {
+    return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
+        .setType(SchemaType.PROTOBUF)
+        .setValuePath(
+            System.getProperty("user.dir") + "/src/main/resources/testData/schemas/schema_protobuf_value.txt");
+  }
 }
 }

+ 6 - 6
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java

@@ -11,10 +11,10 @@ import lombok.experimental.Accessors;
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Topic {
 public class Topic {
 
 
-    private String name, timeToRetainData, maxMessageBytes, messageKey, messageContent, customParameterValue;
-    private int numberOfPartitions;
-    private CustomParameterType customParameterType;
-    private CleanupPolicyValue cleanupPolicyValue;
-    private MaxSizeOnDisk maxSizeOnDisk;
-    private TimeToRetain timeToRetain;
+  private String name, timeToRetainData, maxMessageBytes, messageKey, messageContent, customParameterValue;
+  private int numberOfPartitions;
+  private CustomParameterType customParameterType;
+  private CleanupPolicyValue cleanupPolicyValue;
+  private MaxSizeOnDisk maxSizeOnDisk;
+  private TimeToRetain timeToRetain;
 }
 }

+ 122 - 115
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java

@@ -1,135 +1,142 @@
 package com.provectus.kafka.ui.pages;
 package com.provectus.kafka.ui.pages;
 
 
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.ElementsCollection;
 import com.codeborne.selenide.ElementsCollection;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.WebDriverRunner;
 import com.codeborne.selenide.WebDriverRunner;
 import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
 import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
 import com.provectus.kafka.ui.utilities.WebUtils;
 import com.provectus.kafka.ui.utilities.WebUtils;
+import java.time.Duration;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.openqa.selenium.Keys;
 import org.openqa.selenium.Keys;
 import org.openqa.selenium.interactions.Actions;
 import org.openqa.selenium.interactions.Actions;
 
 
-import java.time.Duration;
-
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
 @Slf4j
 @Slf4j
 public abstract class BasePage extends WebUtils {
 public abstract class BasePage extends WebUtils {
 
 
-    protected SelenideElement loadingSpinner = $x("//div[@role='progressbar']");
-    protected SelenideElement submitBtn = $x("//button[@type='submit']");
-    protected SelenideElement tableGrid = $x("//table");
-    protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
-    protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
-    protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
-    protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
-    protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
-    protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
-    protected SelenideElement backBtn = $x("//button[contains(text(),'Back')]");
-    protected SelenideElement nextBtn = $x("//button[contains(text(),'Next')]");
-    protected ElementsCollection ddlOptions = $$x("//li[@value]");
-    protected ElementsCollection gridItems = $$x("//tr[@class]");
-    protected String summaryCellLocator = "//div[contains(text(),'%s')]";
-    protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
-    protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
-    protected String pageTitleFromHeader = "//h1[text()='%s']";
-    protected String pagePathFromHeader = "//a[text()='%s']/../h1";
-
-    protected void waitUntilSpinnerDisappear() {
-        log.debug("\nwaitUntilSpinnerDisappear");
-        if (isVisible(loadingSpinner)) {
-            loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(60));
-        }
+  protected SelenideElement loadingSpinner = $x("//div[@role='progressbar']");
+  protected SelenideElement submitBtn = $x("//button[@type='submit']");
+  protected SelenideElement tableGrid = $x("//table");
+  protected SelenideElement searchFld = $x("//input[@type='text'][contains(@id, ':r')]");
+  protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
+  protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
+  protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
+  protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
+  protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
+  protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
+  protected SelenideElement backBtn = $x("//button[contains(text(),'Back')]");
+  protected SelenideElement nextBtn = $x("//button[contains(text(),'Next')]");
+  protected ElementsCollection ddlOptions = $$x("//li[@value]");
+  protected ElementsCollection gridItems = $$x("//tr[@class]");
+  protected String summaryCellLocator = "//div[contains(text(),'%s')]";
+  protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
+  protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
+  protected String pageTitleFromHeader = "//h1[text()='%s']";
+  protected String pagePathFromHeader = "//a[text()='%s']/../h1";
+
+  protected void waitUntilSpinnerDisappear(int... timeoutInSeconds) {
+    log.debug("\nwaitUntilSpinnerDisappear");
+    if (isVisible(loadingSpinner, timeoutInSeconds)) {
+      loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(60));
     }
     }
-
-    protected SelenideElement getPageTitleFromHeader(MenuItem menuItem) {
-        return $x(String.format(pageTitleFromHeader, menuItem.getPageTitle()));
+  }
+
+  protected void searchItem(String tag) {
+    log.debug("\nsearchItem: {}", tag);
+    sendKeysAfterClear(searchFld, tag);
+    searchFld.pressEnter().shouldHave(Condition.value(tag));
+    waitUntilSpinnerDisappear(1);
+  }
+
+  protected SelenideElement getPageTitleFromHeader(MenuItem menuItem) {
+    return $x(String.format(pageTitleFromHeader, menuItem.getPageTitle()));
+  }
+
+  protected SelenideElement getPagePathFromHeader(MenuItem menuItem) {
+    return $x(String.format(pagePathFromHeader, menuItem.getPageTitle()));
+  }
+
+  protected void clickSubmitBtn() {
+    clickByJavaScript(submitBtn);
+  }
+
+  protected void setJsonInputValue(SelenideElement jsonInput, String jsonConfig) {
+    sendKeysByActions(jsonInput, jsonConfig.replace("  ", ""));
+    new Actions(WebDriverRunner.getWebDriver())
+        .keyDown(Keys.SHIFT)
+        .sendKeys(Keys.PAGE_DOWN)
+        .keyUp(Keys.SHIFT)
+        .sendKeys(Keys.DELETE)
+        .perform();
+  }
+
+  protected SelenideElement getTableElement(String elementName) {
+    log.debug("\ngetTableElement: {}", elementName);
+    return $x(String.format(tableElementNameLocator, elementName));
+  }
+
+  protected ElementsCollection getDdlOptions() {
+    return ddlOptions;
+  }
+
+  protected String getAlertHeader() {
+    log.debug("\ngetAlertHeader");
+    String result = alertHeader.shouldBe(Condition.visible).getText();
+    log.debug("-> {}", result);
+    return result;
+  }
+
+  protected String getAlertMessage() {
+    log.debug("\ngetAlertMessage");
+    String result = alertMessage.shouldBe(Condition.visible).getText();
+    log.debug("-> {}", result);
+    return result;
+  }
+
+  protected boolean isAlertVisible(AlertHeader header) {
+    log.debug("\nisAlertVisible: {}", header.toString());
+    boolean result = getAlertHeader().equals(header.toString());
+    log.debug("-> {}", result);
+    return result;
+  }
+
+  protected boolean isAlertVisible(AlertHeader header, String message) {
+    log.debug("\nisAlertVisible: {} {}", header, message);
+    boolean result = isAlertVisible(header) && getAlertMessage().equals(message);
+    log.debug("-> {}", result);
+    return result;
+  }
+
+  protected void clickConfirmButton() {
+    confirmBtn.shouldBe(Condition.enabled).click();
+    confirmBtn.shouldBe(Condition.disappear);
+  }
+
+  protected void clickCancelButton() {
+    cancelBtn.shouldBe(Condition.enabled).click();
+    cancelBtn.shouldBe(Condition.disappear);
+  }
+
+  protected boolean isConfirmationModalVisible() {
+    return isVisible(confirmationMdl);
+  }
+
+  public enum AlertHeader {
+    SUCCESS("Success"),
+    VALIDATION_ERROR("Validation Error"),
+    BAD_REQUEST("400 Bad Request");
+
+    private final String value;
+
+    AlertHeader(String value) {
+      this.value = value;
     }
     }
 
 
-    protected SelenideElement getPagePathFromHeader(MenuItem menuItem) {
-        return $x(String.format(pagePathFromHeader, menuItem.getPageTitle()));
-    }
-
-    protected void clickSubmitBtn() {
-        clickByJavaScript(submitBtn);
-    }
-
-    protected void setJsonInputValue(SelenideElement jsonInput, String jsonConfig) {
-        sendKeysByActions(jsonInput, jsonConfig.replace("  ", ""));
-        new Actions(WebDriverRunner.getWebDriver())
-                .keyDown(Keys.SHIFT)
-                .sendKeys(Keys.PAGE_DOWN)
-                .keyUp(Keys.SHIFT)
-                .sendKeys(Keys.DELETE)
-                .perform();
-    }
-
-    protected SelenideElement getTableElement(String elementName) {
-        log.debug("\ngetTableElement: {}", elementName);
-        return $x(String.format(tableElementNameLocator, elementName));
-    }
-
-    protected ElementsCollection getDdlOptions() {
-        return ddlOptions;
-    }
-
-    protected String getAlertHeader() {
-        log.debug("\ngetAlertHeader");
-        String result = alertHeader.shouldBe(Condition.visible).getText();
-        log.debug("-> {}", result);
-        return result;
-    }
-
-    protected String getAlertMessage() {
-        log.debug("\ngetAlertMessage");
-        String result = alertMessage.shouldBe(Condition.visible).getText();
-        log.debug("-> {}", result);
-        return result;
-    }
-
-    protected boolean isAlertVisible(AlertHeader header) {
-        log.debug("\nisAlertVisible: {}", header.toString());
-        boolean result = getAlertHeader().equals(header.toString());
-        log.debug("-> {}", result);
-        return result;
-    }
-
-    protected boolean isAlertVisible(AlertHeader header, String message) {
-        log.debug("\nisAlertVisible: {} {}", header, message);
-        boolean result = isAlertVisible(header) && getAlertMessage().equals(message);
-        log.debug("-> {}", result);
-        return result;
-    }
-
-    protected void clickConfirmButton() {
-        confirmBtn.shouldBe(Condition.enabled).click();
-        confirmBtn.shouldBe(Condition.disappear);
-    }
-
-    protected void clickCancelButton() {
-        cancelBtn.shouldBe(Condition.enabled).click();
-        cancelBtn.shouldBe(Condition.disappear);
-    }
-
-    protected boolean isConfirmationModalVisible() {
-        return isVisible(confirmationMdl);
-    }
-
-    public enum AlertHeader {
-        SUCCESS("Success"),
-        VALIDATION_ERROR("Validation Error"),
-        BAD_REQUEST("400 Bad Request");
-
-        private final String value;
-
-        AlertHeader(String value) {
-            this.value = value;
-        }
-
-        public String toString() {
-            return value;
-        }
+    public String toString() {
+      return value;
     }
     }
+  }
 }
 }

+ 27 - 28
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java

@@ -1,41 +1,40 @@
 package com.provectus.kafka.ui.pages.brokers;
 package com.provectus.kafka.ui.pages.brokers;
 
 
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
-
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 
 
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
 public class BrokersConfigTab extends BasePage {
 public class BrokersConfigTab extends BasePage {
 
 
-    protected List<SelenideElement> editBtn = $$x("//button[@aria-label='editAction']");
-    protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key']");
-
-    @Step
-    public BrokersConfigTab waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        searchByKeyField.shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public boolean isSearchByKeyVisible() {
-        return isVisible(searchByKeyField);
-    }
-
-    public List<SelenideElement> getColumnHeaders() {
-        return Stream.of("Key", "Value", "Source")
-                .map(name -> $x(String.format(columnHeaderLocator, name)))
-                .collect(Collectors.toList());
-    }
-
-    public List<SelenideElement> getEditButtons() {
-        return editBtn;
-    }
+  protected List<SelenideElement> editBtn = $$x("//button[@aria-label='editAction']");
+  protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key']");
+
+  @Step
+  public BrokersConfigTab waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    searchByKeyField.shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public boolean isSearchByKeyVisible() {
+    return isVisible(searchByKeyField);
+  }
+
+  public List<SelenideElement> getColumnHeaders() {
+    return Stream.of("Key", "Value", "Source")
+        .map(name -> $x(String.format(columnHeaderLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  public List<SelenideElement> getEditButtons() {
+    return editBtn;
+  }
 }
 }

+ 73 - 74
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java

@@ -1,92 +1,91 @@
 package com.provectus.kafka.ui.pages.brokers;
 package com.provectus.kafka.ui.pages.brokers;
 
 
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
-import org.openqa.selenium.By;
-
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
-
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$x;
+import org.openqa.selenium.By;
 
 
 public class BrokersDetails extends BasePage {
 public class BrokersDetails extends BasePage {
 
 
-    protected SelenideElement logDirectoriesTab = $x("//a[text()='Log directories']");
-    protected SelenideElement metricsTab = $x("//a[text()='Metrics']");
-    protected String brokersTabLocator = "//a[text()='%s']";
-
-    @Step
-    public BrokersDetails waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        Arrays.asList(logDirectoriesTab, metricsTab).forEach(element -> element.shouldBe(Condition.visible));
-        return this;
-    }
-
-    @Step
-    public BrokersDetails openDetailsTab(DetailsTab menu) {
-        $(By.linkText(menu.toString())).shouldBe(Condition.enabled).click();
-        waitUntilSpinnerDisappear();
-        return this;
-    }
-
-    private List<SelenideElement> getVisibleColumnHeaders() {
-        return Stream.of("Name", "Topics", "Error", "Partitions")
-                .map(name -> $x(String.format(columnHeaderLocator, name)))
-                .collect(Collectors.toList());
+  protected SelenideElement logDirectoriesTab = $x("//a[text()='Log directories']");
+  protected SelenideElement metricsTab = $x("//a[text()='Metrics']");
+  protected String brokersTabLocator = "//a[text()='%s']";
+
+  @Step
+  public BrokersDetails waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    Arrays.asList(logDirectoriesTab, metricsTab).forEach(element -> element.shouldBe(Condition.visible));
+    return this;
+  }
+
+  @Step
+  public BrokersDetails openDetailsTab(DetailsTab menu) {
+    $(By.linkText(menu.toString())).shouldBe(Condition.enabled).click();
+    waitUntilSpinnerDisappear();
+    return this;
+  }
+
+  private List<SelenideElement> getVisibleColumnHeaders() {
+    return Stream.of("Name", "Topics", "Error", "Partitions")
+        .map(name -> $x(String.format(columnHeaderLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  private List<SelenideElement> getEnabledColumnHeaders() {
+    return Stream.of("Name", "Error")
+        .map(name -> $x(String.format(columnHeaderLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  private List<SelenideElement> getVisibleSummaryCells() {
+    return Stream.of("Segment Size", "Segment Count", "Port", "Host")
+        .map(name -> $x(String.format(summaryCellLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  private List<SelenideElement> getDetailsTabs() {
+    return Stream.of(DetailsTab.values())
+        .map(name -> $x(String.format(brokersTabLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  @Step
+  public List<SelenideElement> getAllEnabledElements() {
+    List<SelenideElement> enabledElements = new ArrayList<>(getEnabledColumnHeaders());
+    enabledElements.addAll(getDetailsTabs());
+    return enabledElements;
+  }
+
+  @Step
+  public List<SelenideElement> getAllVisibleElements() {
+    List<SelenideElement> visibleElements = new ArrayList<>(getVisibleSummaryCells());
+    visibleElements.addAll(getVisibleColumnHeaders());
+    visibleElements.addAll(getDetailsTabs());
+    return visibleElements;
+  }
+
+  public enum DetailsTab {
+    LOG_DIRECTORIES("Log directories"),
+    CONFIGS("Configs"),
+    METRICS("Metrics");
+
+    private final String value;
+
+    DetailsTab(String value) {
+      this.value = value;
     }
     }
 
 
-    private List<SelenideElement> getEnabledColumnHeaders() {
-        return Stream.of("Name", "Error")
-                .map(name -> $x(String.format(columnHeaderLocator, name)))
-                .collect(Collectors.toList());
-    }
-
-    private List<SelenideElement> getVisibleSummaryCells() {
-        return Stream.of("Segment Size", "Segment Count", "Port", "Host")
-                .map(name -> $x(String.format(summaryCellLocator, name)))
-                .collect(Collectors.toList());
-    }
-
-    private List<SelenideElement> getDetailsTabs() {
-        return Stream.of(DetailsTab.values())
-                .map(name -> $x(String.format(brokersTabLocator, name)))
-                .collect(Collectors.toList());
-    }
-
-    @Step
-    public List<SelenideElement> getAllEnabledElements() {
-        List<SelenideElement> enabledElements = new ArrayList<>(getEnabledColumnHeaders());
-        enabledElements.addAll(getDetailsTabs());
-        return enabledElements;
-    }
-
-    @Step
-    public List<SelenideElement> getAllVisibleElements() {
-        List<SelenideElement> visibleElements = new ArrayList<>(getVisibleSummaryCells());
-        visibleElements.addAll(getVisibleColumnHeaders());
-        visibleElements.addAll(getDetailsTabs());
-        return visibleElements;
-    }
-
-    public enum DetailsTab {
-        LOG_DIRECTORIES("Log directories"),
-        CONFIGS("Configs"),
-        METRICS("Metrics");
-
-        private final String value;
-
-        DetailsTab(String value) {
-            this.value = value;
-        }
-
-        public String toString() {
-            return value;
-        }
+    public String toString() {
+      return value;
     }
     }
+  }
 }
 }

+ 88 - 89
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java

@@ -1,123 +1,122 @@
 package com.provectus.kafka.ui.pages.brokers;
 package com.provectus.kafka.ui.pages.brokers;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.BROKERS;
+
 import com.codeborne.selenide.CollectionCondition;
 import com.codeborne.selenide.CollectionCondition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
-
 import java.util.ArrayList;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.BROKERS;
-
 public class BrokersList extends BasePage {
 public class BrokersList extends BasePage {
 
 
-    @Step
-    public BrokersList waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        getPageTitleFromHeader(BROKERS).shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public BrokersList openBroker(int brokerId) {
-        getBrokerItem(brokerId).openItem();
-        return this;
-    }
-
-    private List<SelenideElement> getUptimeSummaryCells() {
-        return Stream.of("Broker Count", "Active Controller", "Version")
-                .map(name -> $x(String.format(summaryCellLocator, name)))
-                .collect(Collectors.toList());
+  @Step
+  public BrokersList waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    getPageTitleFromHeader(BROKERS).shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public BrokersList openBroker(int brokerId) {
+    getBrokerItem(brokerId).openItem();
+    return this;
+  }
+
+  private List<SelenideElement> getUptimeSummaryCells() {
+    return Stream.of("Broker Count", "Active Controller", "Version")
+        .map(name -> $x(String.format(summaryCellLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  private List<SelenideElement> getPartitionsSummaryCells() {
+    return Stream.of("Online", "URP", "In Sync Replicas", "Out Of Sync Replicas")
+        .map(name -> $x(String.format(summaryCellLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  @Step
+  public List<SelenideElement> getAllVisibleElements() {
+    List<SelenideElement> visibleElements = new ArrayList<>(getUptimeSummaryCells());
+    visibleElements.addAll(getPartitionsSummaryCells());
+    return visibleElements;
+  }
+
+  private List<SelenideElement> getEnabledColumnHeaders() {
+    return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
+        .map(name -> $x(String.format(columnHeaderLocator, name)))
+        .collect(Collectors.toList());
+  }
+
+  @Step
+  public List<SelenideElement> getAllEnabledElements() {
+    return getEnabledColumnHeaders();
+  }
+
+  private List<BrokersList.BrokerGridItem> initGridItems() {
+    List<BrokersList.BrokerGridItem> gridItemList = new ArrayList<>();
+    gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+        .forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
+    return gridItemList;
+  }
+
+  @Step
+  public BrokerGridItem getBrokerItem(int id) {
+    return initGridItems().stream()
+        .filter(e -> e.getId() == id)
+        .findFirst().orElseThrow();
+  }
+
+  @Step
+  public List<BrokerGridItem> getAllBrokers() {
+    return initGridItems();
+  }
+
+  public static class BrokerGridItem extends BasePage {
+
+    private final SelenideElement element;
+
+    public BrokerGridItem(SelenideElement element) {
+      this.element = element;
     }
     }
 
 
-    private List<SelenideElement> getPartitionsSummaryCells() {
-        return Stream.of("Online", "URP", "In Sync Replicas", "Out Of Sync Replicas")
-                .map(name -> $x(String.format(summaryCellLocator, name)))
-                .collect(Collectors.toList());
+    private SelenideElement getIdElm() {
+      return element.$x("./td[1]/div/a");
     }
     }
 
 
     @Step
     @Step
-    public List<SelenideElement> getAllVisibleElements() {
-        List<SelenideElement> visibleElements = new ArrayList<>(getUptimeSummaryCells());
-        visibleElements.addAll(getPartitionsSummaryCells());
-        return visibleElements;
-    }
-
-    private List<SelenideElement> getEnabledColumnHeaders() {
-        return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
-                .map(name -> $x(String.format(columnHeaderLocator, name)))
-                .collect(Collectors.toList());
+    public int getId() {
+      return Integer.parseInt(getIdElm().getText().trim());
     }
     }
 
 
     @Step
     @Step
-    public List<SelenideElement> getAllEnabledElements() {
-        return getEnabledColumnHeaders();
+    public void openItem() {
+      getIdElm().click();
     }
     }
 
 
-    private List<BrokersList.BrokerGridItem> initGridItems() {
-        List<BrokersList.BrokerGridItem> gridItemList = new ArrayList<>();
-        gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
-                .forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
-        return gridItemList;
+    @Step
+    public int getSegmentSize() {
+      return Integer.parseInt(element.$x("./td[2]").getText().trim());
     }
     }
 
 
     @Step
     @Step
-    public BrokerGridItem getBrokerItem(int id) {
-        return initGridItems().stream()
-                .filter(e -> e.getId() == id)
-                .findFirst().orElseThrow();
+    public int getSegmentCount() {
+      return Integer.parseInt(element.$x("./td[3]").getText().trim());
     }
     }
 
 
     @Step
     @Step
-    public List<BrokerGridItem> getAllBrokers() {
-        return initGridItems();
+    public int getPort() {
+      return Integer.parseInt(element.$x("./td[4]").getText().trim());
     }
     }
 
 
-    public static class BrokerGridItem extends BasePage {
-
-        private final SelenideElement element;
-
-        public BrokerGridItem(SelenideElement element) {
-            this.element = element;
-        }
-
-        private SelenideElement getIdElm() {
-            return element.$x("./td[1]/div/a");
-        }
-
-        @Step
-        public int getId() {
-            return Integer.parseInt(getIdElm().getText().trim());
-        }
-
-        @Step
-        public void openItem() {
-            getIdElm().click();
-        }
-
-        @Step
-        public int getSegmentSize() {
-            return Integer.parseInt(element.$x("./td[2]").getText().trim());
-        }
-
-        @Step
-        public int getSegmentCount() {
-            return Integer.parseInt(element.$x("./td[3]").getText().trim());
-        }
-
-        @Step
-        public int getPort() {
-            return Integer.parseInt(element.$x("./td[4]").getText().trim());
-        }
-
-        @Step
-        public String getHost() {
-            return element.$x("./td[5]").getText().trim();
-        }
+    @Step
+    public String getHost() {
+      return element.$x("./td[5]").getText().trim();
     }
     }
+  }
 }
 }

+ 39 - 39
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorCreateForm.java

@@ -1,49 +1,49 @@
 package com.provectus.kafka.ui.pages.connectors;
 package com.provectus.kafka.ui.pages.connectors;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-
 public class ConnectorCreateForm extends BasePage {
 public class ConnectorCreateForm extends BasePage {
 
 
-    protected SelenideElement nameField = $x("//input[@name='name']");
-    protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
-    protected SelenideElement configField = $x("//div[@id='config']");
-
-    @Step
-    public ConnectorCreateForm waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        nameField.shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public ConnectorCreateForm setName(String connectName) {
-        nameField.shouldBe(Condition.enabled).setValue(connectName);
-        return this;
-    }
-
-    @Step
-    public ConnectorCreateForm setConfig(String configJson) {
-        configField.shouldBe(Condition.enabled).click();
-        setJsonInputValue(contentTextArea, configJson);
-        return this;
-    }
-
-    @Step
-    public ConnectorCreateForm setConnectorDetails(String connectName, String configJson) {
-        setName(connectName);
-        setConfig(configJson);
-        return this;
-    }
-
-    @Step
-    public ConnectorCreateForm clickSubmitButton() {
-        clickSubmitBtn();
-        waitUntilSpinnerDisappear();
-        return this;
-    }
+  protected SelenideElement nameField = $x("//input[@name='name']");
+  protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
+  protected SelenideElement configField = $x("//div[@id='config']");
+
+  @Step
+  public ConnectorCreateForm waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    nameField.shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public ConnectorCreateForm setName(String connectName) {
+    nameField.shouldBe(Condition.enabled).setValue(connectName);
+    return this;
+  }
+
+  @Step
+  public ConnectorCreateForm setConfig(String configJson) {
+    configField.shouldBe(Condition.enabled).click();
+    setJsonInputValue(contentTextArea, configJson);
+    return this;
+  }
+
+  @Step
+  public ConnectorCreateForm setConnectorDetails(String connectName, String configJson) {
+    setName(connectName);
+    setConfig(configJson);
+    return this;
+  }
+
+  @Step
+  public ConnectorCreateForm clickSubmitButton() {
+    clickSubmitBtn();
+    waitUntilSpinnerDisappear();
+    return this;
+  }
 }
 }

+ 64 - 64
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorDetails.java

@@ -1,84 +1,84 @@
 package com.provectus.kafka.ui.pages.connectors;
 package com.provectus.kafka.ui.pages.connectors;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-
 public class ConnectorDetails extends BasePage {
 public class ConnectorDetails extends BasePage {
 
 
-    protected SelenideElement deleteBtn = $x("//li/div[contains(text(),'Delete')]");
-    protected SelenideElement confirmBtnMdl = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
-    protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
-    protected SelenideElement taskTab = $x("//a[contains(text(),'Tasks')]");
-    protected SelenideElement configTab = $x("//a[contains(text(),'Config')]");
-    protected SelenideElement configField = $x("//div[@id='config']");
-    protected String connectorHeaderLocator = "//h1[contains(text(),'%s')]";
+  protected SelenideElement deleteBtn = $x("//li/div[contains(text(),'Delete')]");
+  protected SelenideElement confirmBtnMdl = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
+  protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
+  protected SelenideElement taskTab = $x("//a[contains(text(),'Tasks')]");
+  protected SelenideElement configTab = $x("//a[contains(text(),'Config')]");
+  protected SelenideElement configField = $x("//div[@id='config']");
+  protected String connectorHeaderLocator = "//h1[contains(text(),'%s')]";
 
 
-    @Step
-    public ConnectorDetails waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        dotMenuBtn.shouldBe(Condition.visible);
-        return this;
-    }
+  @Step
+  public ConnectorDetails waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    dotMenuBtn.shouldBe(Condition.visible);
+    return this;
+  }
 
 
-    @Step
-    public ConnectorDetails openConfigTab() {
-        clickByJavaScript(configTab);
-        return this;
-    }
+  @Step
+  public ConnectorDetails openConfigTab() {
+    clickByJavaScript(configTab);
+    return this;
+  }
 
 
-    @Step
-    public ConnectorDetails setConfig(String configJson) {
-        configField.shouldBe(Condition.enabled).click();
-        clearByKeyboard(contentTextArea);
-        contentTextArea.setValue(configJson);
-        configField.shouldBe(Condition.enabled).click();
-        return this;
-    }
+  @Step
+  public ConnectorDetails setConfig(String configJson) {
+    configField.shouldBe(Condition.enabled).click();
+    clearByKeyboard(contentTextArea);
+    contentTextArea.setValue(configJson);
+    configField.shouldBe(Condition.enabled).click();
+    return this;
+  }
 
 
-    @Step
-    public ConnectorDetails clickSubmitButton() {
-        clickSubmitBtn();
-        return this;
-    }
+  @Step
+  public ConnectorDetails clickSubmitButton() {
+    clickSubmitBtn();
+    return this;
+  }
 
 
-    @Step
-    public ConnectorDetails openDotMenu() {
-        clickByJavaScript(dotMenuBtn);
-        return this;
-    }
+  @Step
+  public ConnectorDetails openDotMenu() {
+    clickByJavaScript(dotMenuBtn);
+    return this;
+  }
 
 
-    @Step
-    public ConnectorDetails clickDeleteBtn() {
-        clickByJavaScript(deleteBtn);
-        return this;
-    }
+  @Step
+  public ConnectorDetails clickDeleteBtn() {
+    clickByJavaScript(deleteBtn);
+    return this;
+  }
 
 
-    @Step
-    public ConnectorDetails clickConfirmBtn() {
-        confirmBtnMdl.shouldBe(Condition.enabled).click();
-        confirmBtnMdl.shouldBe(Condition.disappear);
-        return this;
-    }
+  @Step
+  public ConnectorDetails clickConfirmBtn() {
+    confirmBtnMdl.shouldBe(Condition.enabled).click();
+    confirmBtnMdl.shouldBe(Condition.disappear);
+    return this;
+  }
 
 
-    @Step
-    public ConnectorDetails deleteConnector() {
-        openDotMenu();
-        clickDeleteBtn();
-        clickConfirmBtn();
-        return this;
-    }
+  @Step
+  public ConnectorDetails deleteConnector() {
+    openDotMenu();
+    clickDeleteBtn();
+    clickConfirmBtn();
+    return this;
+  }
 
 
-    @Step
-    public boolean isConnectorHeaderVisible(String connectorName) {
-        return isVisible($x(String.format(connectorHeaderLocator, connectorName)));
-    }
+  @Step
+  public boolean isConnectorHeaderVisible(String connectorName) {
+    return isVisible($x(String.format(connectorHeaderLocator, connectorName)));
+  }
 
 
-    @Step
-    public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
-        return isAlertVisible(header, message);
-    }
+  @Step
+  public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
+    return isAlertVisible(header, message);
+  }
 }
 }

+ 33 - 33
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java

@@ -1,44 +1,44 @@
 package com.provectus.kafka.ui.pages.connectors;
 package com.provectus.kafka.ui.pages.connectors;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KAFKA_CONNECT;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KAFKA_CONNECT;
-
 
 
 public class KafkaConnectList extends BasePage {
 public class KafkaConnectList extends BasePage {
 
 
-    protected SelenideElement createConnectorBtn = $x("//button[contains(text(),'Create Connector')]");
-
-    public KafkaConnectList() {
-        tableElementNameLocator = "//tbody//td[contains(text(),'%s')]";
-    }
-
-    @Step
-    public KafkaConnectList waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        getPageTitleFromHeader(KAFKA_CONNECT).shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public KafkaConnectList clickCreateConnectorBtn() {
-        clickByJavaScript(createConnectorBtn);
-        return this;
-    }
-
-    @Step
-    public KafkaConnectList openConnector(String connectorName) {
-        getTableElement(connectorName).shouldBe(Condition.enabled).click();
-        return this;
-    }
-
-    @Step
-    public boolean isConnectorVisible(String connectorName) {
-        tableGrid.shouldBe(Condition.visible);
-        return isVisible(getTableElement(connectorName));
-    }
+  protected SelenideElement createConnectorBtn = $x("//button[contains(text(),'Create Connector')]");
+
+  public KafkaConnectList() {
+    tableElementNameLocator = "//tbody//td[contains(text(),'%s')]";
+  }
+
+  @Step
+  public KafkaConnectList waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    getPageTitleFromHeader(KAFKA_CONNECT).shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public KafkaConnectList clickCreateConnectorBtn() {
+    clickByJavaScript(createConnectorBtn);
+    return this;
+  }
+
+  @Step
+  public KafkaConnectList openConnector(String connectorName) {
+    getTableElement(connectorName).shouldBe(Condition.enabled).click();
+    return this;
+  }
+
+  @Step
+  public boolean isConnectorVisible(String connectorName) {
+    tableGrid.shouldBe(Condition.visible);
+    return isVisible(getTableElement(connectorName));
+  }
 }
 }

+ 19 - 19
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersDetails.java

@@ -1,31 +1,31 @@
 package com.provectus.kafka.ui.pages.consumers;
 package com.provectus.kafka.ui.pages.consumers;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-
 public class ConsumersDetails extends BasePage {
 public class ConsumersDetails extends BasePage {
 
 
-    protected String consumerIdHeaderLocator = "//h1[contains(text(),'%s')]";
-    protected String topicElementLocator = "//tbody//td//a[text()='%s']";
+  protected String consumerIdHeaderLocator = "//h1[contains(text(),'%s')]";
+  protected String topicElementLocator = "//tbody//td//a[text()='%s']";
 
 
-    @Step
-    public ConsumersDetails waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        tableGrid.shouldBe(Condition.visible);
-        return this;
-    }
+  @Step
+  public ConsumersDetails waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    tableGrid.shouldBe(Condition.visible);
+    return this;
+  }
 
 
-    @Step
-    public boolean isRedirectedConsumerTitleVisible(String consumerGroupId) {
-        return isVisible($x(String.format(consumerIdHeaderLocator, consumerGroupId)));
-    }
+  @Step
+  public boolean isRedirectedConsumerTitleVisible(String consumerGroupId) {
+    return isVisible($x(String.format(consumerIdHeaderLocator, consumerGroupId)));
+  }
 
 
-    @Step
-    public boolean isTopicInConsumersDetailsVisible(String topicName) {
-        tableGrid.shouldBe(Condition.visible);
-        return isVisible($x(String.format(topicElementLocator, topicName)));
-    }
+  @Step
+  public boolean isTopicInConsumersDetailsVisible(String topicName) {
+    tableGrid.shouldBe(Condition.visible);
+    return isVisible($x(String.format(topicElementLocator, topicName)));
+  }
 }
 }

+ 8 - 8
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java

@@ -1,17 +1,17 @@
 package com.provectus.kafka.ui.pages.consumers;
 package com.provectus.kafka.ui.pages.consumers;
 
 
+import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.CONSUMERS;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
-import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.CONSUMERS;
-
 public class ConsumersList extends BasePage {
 public class ConsumersList extends BasePage {
 
 
-    @Step
-    public ConsumersList waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        getPageTitleFromHeader(CONSUMERS).shouldBe(Condition.visible);
-        return this;
-    }
+  @Step
+  public ConsumersList waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    getPageTitleFromHeader(CONSUMERS).shouldBe(Condition.visible);
+    return this;
+  }
 }
 }

+ 0 - 139
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java

@@ -1,139 +0,0 @@
-package com.provectus.kafka.ui.pages.ksqlDb;
-
-import com.codeborne.selenide.CollectionCondition;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.SelenideElement;
-import com.provectus.kafka.ui.pages.BasePage;
-import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs;
-import io.qameta.allure.Step;
-import org.openqa.selenium.By;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$x;
-import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
-
-public class KsqlDbList extends BasePage {
-
-    protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
-    protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
-    protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
-
-    @Step
-    public KsqlDbList waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        getPageTitleFromHeader(KSQL_DB).shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public KsqlDbList clickExecuteKsqlRequestBtn() {
-        clickByJavaScript(executeKsqlBtn);
-        return this;
-    }
-
-    @Step
-    public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
-        $(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
-        waitUntilSpinnerDisappear();
-        return this;
-    }
-
-    private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
-        List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
-        gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
-                .forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
-        return gridItemList;
-    }
-
-    @Step
-    public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
-        return initTablesItems().stream()
-                .filter(e -> e.getTableName().equals(tableName))
-                .findFirst().orElseThrow();
-    }
-
-    private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
-        List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
-        gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
-                .forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
-        return gridItemList;
-    }
-
-    @Step
-    public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
-        return initStreamsItems().stream()
-                .filter(e -> e.getStreamName().equals(streamName))
-                .findFirst().orElseThrow();
-    }
-
-    public static class KsqlTablesGridItem extends BasePage {
-
-        private final SelenideElement element;
-
-        public KsqlTablesGridItem(SelenideElement element) {
-            this.element = element;
-        }
-
-        @Step
-        public String getTableName() {
-            return element.$x("./td[1]").getText().trim();
-        }
-
-        @Step
-        public String getTopicName() {
-            return element.$x("./td[2]").getText().trim();
-        }
-
-        @Step
-        public String getKeyFormat() {
-            return element.$x("./td[3]").getText().trim();
-        }
-
-        @Step
-        public String getValueFormat() {
-            return element.$x("./td[4]").getText().trim();
-        }
-
-        @Step
-        public String getIsWindowed() {
-            return element.$x("./td[5]").getText().trim();
-        }
-    }
-
-    public static class KsqlStreamsGridItem extends BasePage {
-
-        private final SelenideElement element;
-
-        public KsqlStreamsGridItem(SelenideElement element) {
-            this.element = element;
-        }
-
-        @Step
-        public String getStreamName() {
-            return element.$x("./td[1]").getText().trim();
-        }
-
-        @Step
-        public String getTopicName() {
-            return element.$x("./td[2]").getText().trim();
-        }
-
-        @Step
-        public String getKeyFormat() {
-            return element.$x("./td[3]").getText().trim();
-        }
-
-        @Step
-        public String getValueFormat() {
-            return element.$x("./td[4]").getText().trim();
-        }
-
-        @Step
-        public String getIsWindowed() {
-            return element.$x("./td[5]").getText().trim();
-        }
-    }
-}

+ 0 - 154
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java

@@ -1,154 +0,0 @@
-package com.provectus.kafka.ui.pages.ksqlDb;
-
-import com.codeborne.selenide.CollectionCondition;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.ElementsCollection;
-import com.codeborne.selenide.SelenideElement;
-import com.provectus.kafka.ui.pages.BasePage;
-import io.qameta.allure.Step;
-
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-
-import static com.codeborne.selenide.Condition.visible;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
-public class KsqlQueryForm extends BasePage {
-    protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
-    protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
-    protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
-    protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
-    protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
-    protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
-    protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
-    protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
-    protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
-    protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
-
-    @Step
-    public KsqlQueryForm waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        executeBtn.shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public KsqlQueryForm clickClearBtn() {
-        clickByJavaScript(clearBtn);
-        return this;
-    }
-
-    @Step
-    public KsqlQueryForm clickExecuteBtn() {
-        clickByActions(executeBtn);
-        if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
-            loadingSpinner.shouldBe(Condition.visible);
-        } else {
-            waitUntilSpinnerDisappear();
-        }
-        return this;
-    }
-
-    @Step
-    public KsqlQueryForm clickStopQueryBtn() {
-        clickByActions(stopQueryBtn);
-        waitUntilSpinnerDisappear();
-        return this;
-    }
-
-    @Step
-    public KsqlQueryForm clickClearResultsBtn() {
-        clickByActions(clearResultsBtn);
-        waitUntilSpinnerDisappear();
-        return this;
-    }
-
-    @Step
-    public KsqlQueryForm clickAddStreamProperty() {
-        clickByJavaScript(addStreamPropertyBtn);
-        return this;
-    }
-
-    @Step
-    public KsqlQueryForm setQuery(String query) {
-        queryAreaValue.shouldBe(Condition.visible).click();
-        queryArea.setValue(query);
-        return this;
-    }
-
-    @Step
-    public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
-        return initItems().stream()
-                .filter(e -> e.getName().equalsIgnoreCase(name))
-                .findFirst().orElseThrow();
-    }
-
-    @Step
-    public boolean areResultsVisible() {
-        boolean visible = false;
-        try {
-            visible = initItems().size() > 0;
-        } catch (Throwable ignored) {
-        }
-        return visible;
-    }
-
-    private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
-        List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
-        ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
-                .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
-        return gridItemList;
-    }
-
-    public static class KsqlResponseGridItem extends BasePage {
-
-        private final SelenideElement element;
-
-        private KsqlResponseGridItem(SelenideElement element) {
-            this.element = element;
-        }
-
-        @Step
-        public String getType() {
-            return element.$x("./td[1]").getText().trim();
-        }
-
-        @Step
-        public String getName() {
-            return element.$x("./td[2]").scrollTo().getText().trim();
-        }
-
-        @Step
-        public boolean isVisible() {
-            boolean isVisible = false;
-            try {
-                element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
-                isVisible = true;
-            } catch (Throwable ignored) {
-            }
-            return isVisible;
-        }
-
-        @Step
-        public String getTopic() {
-            return element.$x("./td[3]").getText().trim();
-        }
-
-        @Step
-        public String getKeyFormat() {
-            return element.$x("./td[4]").getText().trim();
-        }
-
-        @Step
-        public String getValueFormat() {
-            return element.$x("./td[5]").getText().trim();
-        }
-
-        @Step
-        public String getIsWindowed() {
-            return element.$x("./td[6]").getText().trim();
-        }
-    }
-}

+ 0 - 17
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlMenuTabs.java

@@ -1,17 +0,0 @@
-package com.provectus.kafka.ui.pages.ksqlDb.enums;
-
-public enum KsqlMenuTabs {
-
-    TABLES("Table"),
-    STREAMS("Streams");
-
-    private final String value;
-
-    KsqlMenuTabs(String value) {
-        this.value = value;
-    }
-
-    public String toString() {
-        return value;
-    }
-}

+ 0 - 19
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlQueryConfig.java

@@ -1,19 +0,0 @@
-package com.provectus.kafka.ui.pages.ksqlDb.enums;
-
-public enum KsqlQueryConfig {
-
-    SHOW_TABLES("show tables;"),
-    SHOW_STREAMS("show streams;"),
-    SELECT_ALL_FROM("SELECT * FROM %s\n" +
-            "EMIT CHANGES;");
-
-    private final String query;
-
-    KsqlQueryConfig(String query) {
-        this.query = query;
-    }
-
-    public String getQuery() {
-        return query;
-    }
-}

+ 138 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java

@@ -0,0 +1,138 @@
+package com.provectus.kafka.ui.pages.ksqldb;
+
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$x;
+import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
+import io.qameta.allure.Step;
+import java.util.ArrayList;
+import java.util.List;
+import org.openqa.selenium.By;
+
+public class KsqlDbList extends BasePage {
+
+  protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
+  protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
+  protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
+
+  @Step
+  public KsqlDbList waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    getPageTitleFromHeader(KSQL_DB).shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public KsqlDbList clickExecuteKsqlRequestBtn() {
+    clickByJavaScript(executeKsqlBtn);
+    return this;
+  }
+
+  @Step
+  public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
+    $(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
+    waitUntilSpinnerDisappear();
+    return this;
+  }
+
+  private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
+    List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
+    gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+        .forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
+    return gridItemList;
+  }
+
+  @Step
+  public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
+    return initTablesItems().stream()
+        .filter(e -> e.getTableName().equals(tableName))
+        .findFirst().orElseThrow();
+  }
+
+  private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
+    List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
+    gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+        .forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
+    return gridItemList;
+  }
+
+  @Step
+  public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
+    return initStreamsItems().stream()
+        .filter(e -> e.getStreamName().equals(streamName))
+        .findFirst().orElseThrow();
+  }
+
+  public static class KsqlTablesGridItem extends BasePage {
+
+    private final SelenideElement element;
+
+    public KsqlTablesGridItem(SelenideElement element) {
+      this.element = element;
+    }
+
+    @Step
+    public String getTableName() {
+      return element.$x("./td[1]").getText().trim();
+    }
+
+    @Step
+    public String getTopicName() {
+      return element.$x("./td[2]").getText().trim();
+    }
+
+    @Step
+    public String getKeyFormat() {
+      return element.$x("./td[3]").getText().trim();
+    }
+
+    @Step
+    public String getValueFormat() {
+      return element.$x("./td[4]").getText().trim();
+    }
+
+    @Step
+    public String getIsWindowed() {
+      return element.$x("./td[5]").getText().trim();
+    }
+  }
+
+  public static class KsqlStreamsGridItem extends BasePage {
+
+    private final SelenideElement element;
+
+    public KsqlStreamsGridItem(SelenideElement element) {
+      this.element = element;
+    }
+
+    @Step
+    public String getStreamName() {
+      return element.$x("./td[1]").getText().trim();
+    }
+
+    @Step
+    public String getTopicName() {
+      return element.$x("./td[2]").getText().trim();
+    }
+
+    @Step
+    public String getKeyFormat() {
+      return element.$x("./td[3]").getText().trim();
+    }
+
+    @Step
+    public String getValueFormat() {
+      return element.$x("./td[4]").getText().trim();
+    }
+
+    @Step
+    public String getIsWindowed() {
+      return element.$x("./td[5]").getText().trim();
+    }
+  }
+}

+ 153 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java

@@ -0,0 +1,153 @@
+package com.provectus.kafka.ui.pages.ksqldb;
+
+import static com.codeborne.selenide.Condition.visible;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.ElementsCollection;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+
+public class KsqlQueryForm extends BasePage {
+  protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
+  protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
+  protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
+  protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
+  protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
+  protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
+  protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
+  protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
+  protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
+  protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
+
+  @Step
+  public KsqlQueryForm waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    executeBtn.shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public KsqlQueryForm clickClearBtn() {
+    clickByJavaScript(clearBtn);
+    return this;
+  }
+
+  @Step
+  public KsqlQueryForm clickExecuteBtn() {
+    clickByActions(executeBtn);
+    if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
+      loadingSpinner.shouldBe(Condition.visible);
+    } else {
+      waitUntilSpinnerDisappear();
+    }
+    return this;
+  }
+
+  @Step
+  public KsqlQueryForm clickStopQueryBtn() {
+    clickByActions(stopQueryBtn);
+    waitUntilSpinnerDisappear();
+    return this;
+  }
+
+  @Step
+  public KsqlQueryForm clickClearResultsBtn() {
+    clickByActions(clearResultsBtn);
+    waitUntilSpinnerDisappear();
+    return this;
+  }
+
+  @Step
+  public KsqlQueryForm clickAddStreamProperty() {
+    clickByJavaScript(addStreamPropertyBtn);
+    return this;
+  }
+
+  @Step
+  public KsqlQueryForm setQuery(String query) {
+    queryAreaValue.shouldBe(Condition.visible).click();
+    queryArea.setValue(query);
+    return this;
+  }
+
+  @Step
+  public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
+    return initItems().stream()
+        .filter(e -> e.getName().equalsIgnoreCase(name))
+        .findFirst().orElseThrow();
+  }
+
+  @Step
+  public boolean areResultsVisible() {
+    boolean visible = false;
+    try {
+      visible = initItems().size() > 0;
+    } catch (Throwable ignored) {
+    }
+    return visible;
+  }
+
+  private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
+    List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
+    ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+        .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
+    return gridItemList;
+  }
+
+  public static class KsqlResponseGridItem extends BasePage {
+
+    private final SelenideElement element;
+
+    private KsqlResponseGridItem(SelenideElement element) {
+      this.element = element;
+    }
+
+    @Step
+    public String getType() {
+      return element.$x("./td[1]").getText().trim();
+    }
+
+    @Step
+    public String getName() {
+      return element.$x("./td[2]").scrollTo().getText().trim();
+    }
+
+    @Step
+    public boolean isVisible() {
+      boolean isVisible = false;
+      try {
+        element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
+        isVisible = true;
+      } catch (Throwable ignored) {
+      }
+      return isVisible;
+    }
+
+    @Step
+    public String getTopic() {
+      return element.$x("./td[3]").getText().trim();
+    }
+
+    @Step
+    public String getKeyFormat() {
+      return element.$x("./td[4]").getText().trim();
+    }
+
+    @Step
+    public String getValueFormat() {
+      return element.$x("./td[5]").getText().trim();
+    }
+
+    @Step
+    public String getIsWindowed() {
+      return element.$x("./td[6]").getText().trim();
+    }
+  }
+}

+ 17 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java

@@ -0,0 +1,17 @@
+package com.provectus.kafka.ui.pages.ksqldb.enums;
+
+public enum KsqlMenuTabs {
+
+  TABLES("Table"),
+  STREAMS("Streams");
+
+  private final String value;
+
+  KsqlMenuTabs(String value) {
+    this.value = value;
+  }
+
+  public String toString() {
+    return value;
+  }
+}

+ 18 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java

@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.pages.ksqldb.enums;
+
+public enum KsqlQueryConfig {
+
+  SHOW_TABLES("show tables;"),
+  SHOW_STREAMS("show streams;"),
+  SELECT_ALL_FROM("SELECT * FROM %s\n" + "EMIT CHANGES;");
+
+  private final String query;
+
+  KsqlQueryConfig(String query) {
+    this.query = query;
+  }
+
+  public String getQuery() {
+    return query;
+  }
+}

+ 2 - 2
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Stream.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java

@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.pages.ksqlDb.models;
+package com.provectus.kafka.ui.pages.ksqldb.models;
 
 
 import lombok.Data;
 import lombok.Data;
 import lombok.experimental.Accessors;
 import lombok.experimental.Accessors;
@@ -7,5 +7,5 @@ import lombok.experimental.Accessors;
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Stream {
 public class Stream {
 
 
-    private String name, topicName, valueFormat, partitions;
+  private String name, topicName, valueFormat, partitions;
 }
 }

+ 2 - 2
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Table.java → kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java

@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.pages.ksqlDb.models;
+package com.provectus.kafka.ui.pages.ksqldb.models;
 
 
 import lombok.Data;
 import lombok.Data;
 import lombok.experimental.Accessors;
 import lombok.experimental.Accessors;
@@ -7,5 +7,5 @@ import lombok.experimental.Accessors;
 @Accessors(chain = true)
 @Accessors(chain = true)
 public class Table {
 public class Table {
 
 
-    private String name, streamName;
+  private String name, streamName;
 }
 }

+ 47 - 48
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/NaviSideBar.java

@@ -1,64 +1,63 @@
 package com.provectus.kafka.ui.pages.panels;
 package com.provectus.kafka.ui.pages.panels;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
-import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
+import com.provectus.kafka.ui.pages.panels.enums.MenuItem;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
-
 import java.time.Duration;
 import java.time.Duration;
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
-
 public class NaviSideBar extends BasePage {
 public class NaviSideBar extends BasePage {
 
 
-    protected SelenideElement dashboardMenuItem = $x("//a[@title='Dashboard']");
-    protected String sideMenuOptionElementLocator = ".//ul/li[contains(.,'%s')]";
-    protected String clusterElementLocator = "//aside/ul/li[contains(.,'%s')]";
-
-    private SelenideElement expandCluster(String clusterName) {
-        SelenideElement clusterElement = $x(String.format(clusterElementLocator, clusterName)).shouldBe(Condition.visible);
-        if (clusterElement.parent().$$x(".//ul").size() == 0) {
-            clickByActions(clusterElement);
-        }
-        return clusterElement;
-    }
-
-    @Step
-    public NaviSideBar waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        dashboardMenuItem.shouldBe(Condition.visible, Duration.ofSeconds(30));
-        return this;
-    }
-
-    @Step
-    public String getPagePath(MenuItem menuItem) {
-        return getPagePathFromHeader(menuItem)
-                .shouldBe(Condition.visible)
-                .getText().trim();
-    }
-
-    @Step
-    public NaviSideBar openSideMenu(String clusterName, MenuItem menuItem) {
-        clickByActions(expandCluster(clusterName).parent()
-                .$x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())));
-        return this;
-    }
-
-    @Step
-    public NaviSideBar openSideMenu(MenuItem menuItem) {
-        openSideMenu(CLUSTER_NAME, menuItem);
-        return this;
-    }
+  protected SelenideElement dashboardMenuItem = $x("//a[@title='Dashboard']");
+  protected String sideMenuOptionElementLocator = ".//ul/li[contains(.,'%s')]";
+  protected String clusterElementLocator = "//aside/ul/li[contains(.,'%s')]";
 
 
-    public List<SelenideElement> getAllMenuButtons() {
-        expandCluster(CLUSTER_NAME);
-        return Stream.of(MenuItem.values())
-                .map(menuItem -> $x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())))
-                .collect(Collectors.toList());
+  private SelenideElement expandCluster(String clusterName) {
+    SelenideElement clusterElement = $x(String.format(clusterElementLocator, clusterName)).shouldBe(Condition.visible);
+    if (clusterElement.parent().$$x(".//ul").size() == 0) {
+      clickByActions(clusterElement);
     }
     }
+    return clusterElement;
+  }
+
+  @Step
+  public NaviSideBar waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    dashboardMenuItem.shouldBe(Condition.visible, Duration.ofSeconds(30));
+    return this;
+  }
+
+  @Step
+  public String getPagePath(MenuItem menuItem) {
+    return getPagePathFromHeader(menuItem)
+        .shouldBe(Condition.visible)
+        .getText().trim();
+  }
+
+  @Step
+  public NaviSideBar openSideMenu(String clusterName, MenuItem menuItem) {
+    clickByActions(expandCluster(clusterName).parent()
+        .$x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())));
+    return this;
+  }
+
+  @Step
+  public NaviSideBar openSideMenu(MenuItem menuItem) {
+    openSideMenu(CLUSTER_NAME, menuItem);
+    return this;
+  }
+
+  public List<SelenideElement> getAllMenuButtons() {
+    expandCluster(CLUSTER_NAME);
+    return Stream.of(MenuItem.values())
+        .map(menuItem -> $x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle())))
+        .collect(Collectors.toList());
+  }
 }
 }

+ 13 - 14
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/TopPanel.java

@@ -1,26 +1,25 @@
 package com.provectus.kafka.ui.pages.panels;
 package com.provectus.kafka.ui.pages.panels;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
-
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.List;
 import java.util.List;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-
 public class TopPanel extends BasePage {
 public class TopPanel extends BasePage {
 
 
-    protected SelenideElement kafkaLogo = $x("//a[contains(text(),'UI for Apache Kafka')]");
-    protected SelenideElement kafkaVersion = $x("//a[@title='Current commit']");
-    protected SelenideElement logOutBtn = $x("//button[contains(text(),'Log out')]");
-    protected SelenideElement gitBtn = $x("//a[@href='https://github.com/provectus/kafka-ui']");
-    protected SelenideElement discordBtn = $x("//a[contains(@href,'https://discord.com/invite')]");
+  protected SelenideElement kafkaLogo = $x("//a[contains(text(),'UI for Apache Kafka')]");
+  protected SelenideElement kafkaVersion = $x("//a[@title='Current commit']");
+  protected SelenideElement logOutBtn = $x("//button[contains(text(),'Log out')]");
+  protected SelenideElement gitBtn = $x("//a[@href='https://github.com/provectus/kafka-ui']");
+  protected SelenideElement discordBtn = $x("//a[contains(@href,'https://discord.com/invite')]");
 
 
-    public List<SelenideElement> getAllVisibleElements() {
-        return Arrays.asList(kafkaLogo, kafkaVersion, gitBtn, discordBtn);
-    }
+  public List<SelenideElement> getAllVisibleElements() {
+    return Arrays.asList(kafkaLogo, kafkaVersion, gitBtn, discordBtn);
+  }
 
 
-    public List<SelenideElement> getAllEnabledElements() {
-        return Arrays.asList(gitBtn, discordBtn, kafkaLogo);
-    }
+  public List<SelenideElement> getAllEnabledElements() {
+    return Arrays.asList(gitBtn, discordBtn, kafkaLogo);
+  }
 }
 }

+ 24 - 24
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/enums/MenuItem.java

@@ -1,28 +1,28 @@
 package com.provectus.kafka.ui.pages.panels.enums;
 package com.provectus.kafka.ui.pages.panels.enums;
 
 
 public enum MenuItem {
 public enum MenuItem {
-    
-    DASHBOARD("Dashboard", "Dashboard"),
-    BROKERS("Brokers", "Brokers"),
-    TOPICS("Topics", "Topics"),
-    CONSUMERS("Consumers", "Consumers"),
-    SCHEMA_REGISTRY("Schema Registry", "Schema Registry"),
-    KAFKA_CONNECT("Kafka Connect", "Connectors"),
-    KSQL_DB("KSQL DB", "KSQL DB");
-    
-    private final String naviTitle;
-    private final String pageTitle;
-    
-    MenuItem(String naviTitle, String pageTitle) {
-        this.naviTitle = naviTitle;
-        this.pageTitle = pageTitle;
-    }
-    
-    public String getNaviTitle() {
-        return naviTitle;
-    }
-    
-    public String getPageTitle() {
-        return pageTitle;
-    }
+
+  DASHBOARD("Dashboard", "Dashboard"),
+  BROKERS("Brokers", "Brokers"),
+  TOPICS("Topics", "Topics"),
+  CONSUMERS("Consumers", "Consumers"),
+  SCHEMA_REGISTRY("Schema Registry", "Schema Registry"),
+  KAFKA_CONNECT("Kafka Connect", "Connectors"),
+  KSQL_DB("KSQL DB", "KSQL DB");
+
+  private final String naviTitle;
+  private final String pageTitle;
+
+  MenuItem(String naviTitle, String pageTitle) {
+    this.naviTitle = naviTitle;
+    this.pageTitle = pageTitle;
+  }
+
+  public String getNaviTitle() {
+    return naviTitle;
+  }
+
+  public String getPageTitle() {
+    return pageTitle;
+  }
 }
 }

+ 124 - 122
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaCreateForm.java

@@ -1,5 +1,10 @@
 package com.provectus.kafka.ui.pages.schemas;
 package com.provectus.kafka.ui.pages.schemas;
 
 
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+import static org.openqa.selenium.By.id;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.WebDriverRunner;
 import com.codeborne.selenide.WebDriverRunner;
@@ -7,133 +12,130 @@ import com.provectus.kafka.ui.api.model.CompatibilityLevel;
 import com.provectus.kafka.ui.api.model.SchemaType;
 import com.provectus.kafka.ui.api.model.SchemaType;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
-import org.openqa.selenium.Keys;
-import org.openqa.selenium.interactions.Actions;
-
 import java.util.List;
 import java.util.List;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.Stream;
-
-import static com.codeborne.selenide.Selenide.*;
-import static org.openqa.selenium.By.id;
+import org.openqa.selenium.Keys;
+import org.openqa.selenium.interactions.Actions;
 
 
 public class SchemaCreateForm extends BasePage {
 public class SchemaCreateForm extends BasePage {
 
 
-    protected SelenideElement schemaNameField = $x("//input[@name='subject']");
-    protected SelenideElement pageTitle = $x("//h1['Edit']");
-    protected SelenideElement schemaTextArea = $x("//textarea[@name='schema']");
-    protected SelenideElement newSchemaInput = $("#newSchema [wrap]");
-    protected SelenideElement schemaTypeDdl = $x("//ul[@name='schemaType']");
-    protected SelenideElement compatibilityLevelList = $x("//ul[@name='compatibilityLevel']");
-    protected SelenideElement newSchemaTextArea = $x("//div[@id='newSchema']");
-    protected SelenideElement latestSchemaTextArea = $x("//div[@id='latestSchema']");
-    protected SelenideElement leftVersionDdl = $(id("left-select"));
-    protected SelenideElement rightVersionDdl = $(id("right-select"));
-    protected List<SelenideElement> visibleMarkers = $$x("//div[@class='ace_scroller']//div[contains(@class,'codeMarker')]");
-    protected List<SelenideElement> elementsCompareVersionDdl = $$x("//ul[@role='listbox']/ul/li");
-    protected String ddlElementLocator = "//li[@value='%s']";
-
-    @Step
-    public SchemaCreateForm waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        pageTitle.shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public SchemaCreateForm setSubjectName(String name) {
-        schemaNameField.setValue(name);
-        return this;
-    }
-
-    @Step
-    public SchemaCreateForm setSchemaField(String text) {
-        schemaTextArea.setValue(text);
-        return this;
-    }
-
-    @Step
-    public SchemaCreateForm selectSchemaTypeFromDropdown(SchemaType schemaType) {
-        schemaTypeDdl.shouldBe(Condition.enabled).click();
-        $x(String.format(ddlElementLocator, schemaType.getValue())).shouldBe(Condition.visible).click();
-        return this;
-    }
-
-    @Step
-    public SchemaCreateForm clickSubmitButton() {
-        clickSubmitBtn();
-        return this;
-    }
-
-    @Step
-    public SchemaCreateForm selectCompatibilityLevelFromDropdown(CompatibilityLevel.CompatibilityEnum level) {
-        compatibilityLevelList.shouldBe(Condition.enabled).click();
-        $x(String.format(ddlElementLocator, level.getValue())).shouldBe(Condition.visible).click();
-        return this;
-    }
-
-    @Step
-    public SchemaCreateForm openLeftVersionDdl() {
-        leftVersionDdl.shouldBe(Condition.enabled).click();
-        return this;
-    }
-
-    @Step
-    public SchemaCreateForm openRightVersionDdl() {
-        rightVersionDdl.shouldBe(Condition.enabled).click();
-        return this;
-    }
-
-    @Step
-    public int getVersionsNumberFromList() {
-        return elementsCompareVersionDdl.size();
-    }
-
-    @Step
-    public SchemaCreateForm selectVersionFromDropDown(int versionNumberDd) {
-        $x(String.format(ddlElementLocator, versionNumberDd)).shouldBe(Condition.visible).click();
-        return this;
-    }
-
-    @Step
-    public int getMarkedLinesNumber() {
-        return visibleMarkers.size();
-    }
-
-    @Step
-    public SchemaCreateForm setNewSchemaValue(String configJson) {
-        newSchemaTextArea.shouldBe(Condition.visible).click();
-        newSchemaInput.shouldBe(Condition.enabled);
-        new Actions(WebDriverRunner.getWebDriver())
-                .sendKeys(Keys.PAGE_UP)
-                .keyDown(Keys.SHIFT)
-                .sendKeys(Keys.PAGE_DOWN)
-                .keyUp(Keys.SHIFT)
-                .sendKeys(Keys.DELETE)
-                .perform();
-        setJsonInputValue(newSchemaInput, configJson);
-        return this;
-    }
-
-    @Step
-    public List<SelenideElement> getAllDetailsPageElements() {
-        return Stream.of(compatibilityLevelList, newSchemaTextArea, latestSchemaTextArea, submitBtn, schemaTypeDdl)
-                .collect(Collectors.toList());
-    }
-
-    @Step
-    public boolean isSubmitBtnEnabled() {
-        return isEnabled(submitBtn);
-    }
-
-    @Step
-    public boolean isSchemaDropDownEnabled() {
-        boolean enabled = true;
-        try {
-            String attribute = schemaTypeDdl.getAttribute("disabled");
-            enabled = false;
-        } catch (Throwable ignored) {
-        }
-        return enabled;
+  protected SelenideElement schemaNameField = $x("//input[@name='subject']");
+  protected SelenideElement pageTitle = $x("//h1['Edit']");
+  protected SelenideElement schemaTextArea = $x("//textarea[@name='schema']");
+  protected SelenideElement newSchemaInput = $("#newSchema [wrap]");
+  protected SelenideElement schemaTypeDdl = $x("//ul[@name='schemaType']");
+  protected SelenideElement compatibilityLevelList = $x("//ul[@name='compatibilityLevel']");
+  protected SelenideElement newSchemaTextArea = $x("//div[@id='newSchema']");
+  protected SelenideElement latestSchemaTextArea = $x("//div[@id='latestSchema']");
+  protected SelenideElement leftVersionDdl = $(id("left-select"));
+  protected SelenideElement rightVersionDdl = $(id("right-select"));
+  protected List<SelenideElement> visibleMarkers =
+      $$x("//div[@class='ace_scroller']//div[contains(@class,'codeMarker')]");
+  protected List<SelenideElement> elementsCompareVersionDdl = $$x("//ul[@role='listbox']/ul/li");
+  protected String ddlElementLocator = "//li[@value='%s']";
+
+  @Step
+  public SchemaCreateForm waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    pageTitle.shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public SchemaCreateForm setSubjectName(String name) {
+    schemaNameField.setValue(name);
+    return this;
+  }
+
+  @Step
+  public SchemaCreateForm setSchemaField(String text) {
+    schemaTextArea.setValue(text);
+    return this;
+  }
+
+  @Step
+  public SchemaCreateForm selectSchemaTypeFromDropdown(SchemaType schemaType) {
+    schemaTypeDdl.shouldBe(Condition.enabled).click();
+    $x(String.format(ddlElementLocator, schemaType.getValue())).shouldBe(Condition.visible).click();
+    return this;
+  }
+
+  @Step
+  public SchemaCreateForm clickSubmitButton() {
+    clickSubmitBtn();
+    return this;
+  }
+
+  @Step
+  public SchemaCreateForm selectCompatibilityLevelFromDropdown(CompatibilityLevel.CompatibilityEnum level) {
+    compatibilityLevelList.shouldBe(Condition.enabled).click();
+    $x(String.format(ddlElementLocator, level.getValue())).shouldBe(Condition.visible).click();
+    return this;
+  }
+
+  @Step
+  public SchemaCreateForm openLeftVersionDdl() {
+    leftVersionDdl.shouldBe(Condition.enabled).click();
+    return this;
+  }
+
+  @Step
+  public SchemaCreateForm openRightVersionDdl() {
+    rightVersionDdl.shouldBe(Condition.enabled).click();
+    return this;
+  }
+
+  @Step
+  public int getVersionsNumberFromList() {
+    return elementsCompareVersionDdl.size();
+  }
+
+  @Step
+  public SchemaCreateForm selectVersionFromDropDown(int versionNumberDd) {
+    $x(String.format(ddlElementLocator, versionNumberDd)).shouldBe(Condition.visible).click();
+    return this;
+  }
+
+  @Step
+  public int getMarkedLinesNumber() {
+    return visibleMarkers.size();
+  }
+
+  @Step
+  public SchemaCreateForm setNewSchemaValue(String configJson) {
+    newSchemaTextArea.shouldBe(Condition.visible).click();
+    newSchemaInput.shouldBe(Condition.enabled);
+    new Actions(WebDriverRunner.getWebDriver())
+        .sendKeys(Keys.PAGE_UP)
+        .keyDown(Keys.SHIFT)
+        .sendKeys(Keys.PAGE_DOWN)
+        .keyUp(Keys.SHIFT)
+        .sendKeys(Keys.DELETE)
+        .perform();
+    setJsonInputValue(newSchemaInput, configJson);
+    return this;
+  }
+
+  @Step
+  public List<SelenideElement> getAllDetailsPageElements() {
+    return Stream.of(compatibilityLevelList, newSchemaTextArea, latestSchemaTextArea, submitBtn, schemaTypeDdl)
+        .collect(Collectors.toList());
+  }
+
+  @Step
+  public boolean isSubmitBtnEnabled() {
+    return isEnabled(submitBtn);
+  }
+
+  @Step
+  public boolean isSchemaDropDownEnabled() {
+    boolean enabled = true;
+    try {
+      String attribute = schemaTypeDdl.getAttribute("disabled");
+      enabled = false;
+    } catch (Throwable ignored) {
     }
     }
+    return enabled;
+  }
 }
 }

+ 51 - 51
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java

@@ -1,69 +1,69 @@
 package com.provectus.kafka.ui.pages.schemas;
 package com.provectus.kafka.ui.pages.schemas;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-
 public class SchemaDetails extends BasePage {
 public class SchemaDetails extends BasePage {
 
 
-    protected SelenideElement actualVersionTextArea = $x("//div[@id='schema']");
-    protected SelenideElement compatibilityField = $x("//h4[contains(text(),'Compatibility')]/../p");
-    protected SelenideElement editSchemaBtn = $x("//button[contains(text(),'Edit Schema')]");
-    protected SelenideElement removeBtn = $x("//*[contains(text(),'Remove')]");
-    protected SelenideElement confirmBtn = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
-    protected SelenideElement schemaTypeField = $x("//h4[contains(text(),'Type')]/../p");
-    protected SelenideElement latestVersionField = $x("//h4[contains(text(),'Latest version')]/../p");
-    protected SelenideElement compareVersionBtn = $x("//button[text()='Compare Versions']");
-    protected String schemaHeaderLocator = "//h1[contains(text(),'%s')]";
+  protected SelenideElement actualVersionTextArea = $x("//div[@id='schema']");
+  protected SelenideElement compatibilityField = $x("//h4[contains(text(),'Compatibility')]/../p");
+  protected SelenideElement editSchemaBtn = $x("//button[contains(text(),'Edit Schema')]");
+  protected SelenideElement removeBtn = $x("//*[contains(text(),'Remove')]");
+  protected SelenideElement confirmBtn = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
+  protected SelenideElement schemaTypeField = $x("//h4[contains(text(),'Type')]/../p");
+  protected SelenideElement latestVersionField = $x("//h4[contains(text(),'Latest version')]/../p");
+  protected SelenideElement compareVersionBtn = $x("//button[text()='Compare Versions']");
+  protected String schemaHeaderLocator = "//h1[contains(text(),'%s')]";
 
 
-    @Step
-    public SchemaDetails waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        actualVersionTextArea.shouldBe(Condition.visible);
-        return this;
-    }
+  @Step
+  public SchemaDetails waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    actualVersionTextArea.shouldBe(Condition.visible);
+    return this;
+  }
 
 
-    @Step
-    public String getCompatibility() {
-        return compatibilityField.getText();
-    }
+  @Step
+  public String getCompatibility() {
+    return compatibilityField.getText();
+  }
 
 
-    @Step
-    public boolean isSchemaHeaderVisible(String schemaName) {
-        return isVisible($x(String.format(schemaHeaderLocator, schemaName)));
-    }
+  @Step
+  public boolean isSchemaHeaderVisible(String schemaName) {
+    return isVisible($x(String.format(schemaHeaderLocator, schemaName)));
+  }
 
 
-    @Step
-    public int getLatestVersion() {
-        return Integer.parseInt(latestVersionField.getText());
-    }
+  @Step
+  public int getLatestVersion() {
+    return Integer.parseInt(latestVersionField.getText());
+  }
 
 
-    @Step
-    public String getSchemaType() {
-        return schemaTypeField.getText();
-    }
+  @Step
+  public String getSchemaType() {
+    return schemaTypeField.getText();
+  }
 
 
-    @Step
-    public SchemaDetails openEditSchema() {
-        editSchemaBtn.shouldBe(Condition.visible).click();
-        return this;
-    }
+  @Step
+  public SchemaDetails openEditSchema() {
+    editSchemaBtn.shouldBe(Condition.visible).click();
+    return this;
+  }
 
 
-    @Step
-    public SchemaDetails openCompareVersionMenu() {
-        compareVersionBtn.shouldBe(Condition.enabled).click();
-        return this;
-    }
+  @Step
+  public SchemaDetails openCompareVersionMenu() {
+    compareVersionBtn.shouldBe(Condition.enabled).click();
+    return this;
+  }
 
 
-    @Step
-    public SchemaDetails removeSchema() {
-        clickByJavaScript(dotMenuBtn);
-        removeBtn.shouldBe(Condition.enabled).click();
-        confirmBtn.shouldBe(Condition.visible).click();
-        confirmBtn.shouldBe(Condition.disappear);
-        return this;
-    }
+  @Step
+  public SchemaDetails removeSchema() {
+    clickByJavaScript(dotMenuBtn);
+    removeBtn.shouldBe(Condition.enabled).click();
+    confirmBtn.shouldBe(Condition.visible).click();
+    confirmBtn.shouldBe(Condition.disappear);
+    return this;
+  }
 }
 }

+ 30 - 30
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java

@@ -1,42 +1,42 @@
 package com.provectus.kafka.ui.pages.schemas;
 package com.provectus.kafka.ui.pages.schemas;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.SCHEMA_REGISTRY;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.SCHEMA_REGISTRY;
-
 public class SchemaRegistryList extends BasePage {
 public class SchemaRegistryList extends BasePage {
 
 
-    protected SelenideElement createSchemaBtn = $x("//button[contains(text(),'Create Schema')]");
-
-    @Step
-    public SchemaRegistryList waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        getPageTitleFromHeader(SCHEMA_REGISTRY).shouldBe(Condition.visible);
-        return this;
-    }
-
-    @Step
-    public SchemaRegistryList clickCreateSchema() {
-        clickByJavaScript(createSchemaBtn);
-        return this;
-    }
-
-    @Step
-    public SchemaRegistryList openSchema(String schemaName) {
-        getTableElement(schemaName)
-                .shouldBe(Condition.enabled).click();
-        return this;
-    }
-
-    @Step
-    public boolean isSchemaVisible(String schemaName) {
-        tableGrid.shouldBe(Condition.visible);
-        return isVisible(getTableElement(schemaName));
-    }
+  protected SelenideElement createSchemaBtn = $x("//button[contains(text(),'Create Schema')]");
+
+  @Step
+  public SchemaRegistryList waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    getPageTitleFromHeader(SCHEMA_REGISTRY).shouldBe(Condition.visible);
+    return this;
+  }
+
+  @Step
+  public SchemaRegistryList clickCreateSchema() {
+    clickByJavaScript(createSchemaBtn);
+    return this;
+  }
+
+  @Step
+  public SchemaRegistryList openSchema(String schemaName) {
+    getTableElement(schemaName)
+        .shouldBe(Condition.enabled).click();
+    return this;
+  }
+
+  @Step
+  public boolean isSchemaVisible(String schemaName) {
+    tableGrid.shouldBe(Condition.visible);
+    return isVisible(getTableElement(schemaName));
+  }
 }
 }
 
 
 
 

+ 45 - 46
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java

@@ -1,57 +1,56 @@
 package com.provectus.kafka.ui.pages.topics;
 package com.provectus.kafka.ui.pages.topics;
 
 
+import static com.codeborne.selenide.Selenide.$x;
+import static com.codeborne.selenide.Selenide.refresh;
+
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.SelenideElement;
 import com.codeborne.selenide.SelenideElement;
 import com.provectus.kafka.ui.pages.BasePage;
 import com.provectus.kafka.ui.pages.BasePage;
 import io.qameta.allure.Step;
 import io.qameta.allure.Step;
-
 import java.util.Arrays;
 import java.util.Arrays;
 
 
-import static com.codeborne.selenide.Selenide.$x;
-import static com.codeborne.selenide.Selenide.refresh;
-
 public class ProduceMessagePanel extends BasePage {
 public class ProduceMessagePanel extends BasePage {
 
 
-    protected SelenideElement keyTextArea = $x("//div[@id='key']/textarea");
-    protected SelenideElement contentTextArea = $x("//div[@id='content']/textarea");
-    protected SelenideElement headersTextArea = $x("//div[@id='headers']/textarea");
-    protected SelenideElement submitBtn = headersTextArea.$x("../../../..//button[@type='submit']");
-    protected SelenideElement partitionDdl = $x("//ul[@name='partition']");
-    protected SelenideElement keySerdeDdl = $x("//ul[@name='keySerde']");
-    protected SelenideElement contentSerdeDdl = $x("//ul[@name='valueSerde']");
-
-    @Step
-    public ProduceMessagePanel waitUntilScreenReady() {
-        waitUntilSpinnerDisappear();
-        Arrays.asList(partitionDdl, keySerdeDdl, contentSerdeDdl).forEach(element -> element.shouldBe(Condition.visible));
-        return this;
-    }
-
-    @Step
-    public ProduceMessagePanel setKeyField(String value) {
-        clearByKeyboard(keyTextArea);
-        keyTextArea.setValue(value);
-        return this;
-    }
-
-    @Step
-    public ProduceMessagePanel setContentFiled(String value) {
-        clearByKeyboard(contentTextArea);
-        contentTextArea.setValue(value);
-        return this;
-    }
-
-    @Step
-    public ProduceMessagePanel setHeaderFiled(String value) {
-        headersTextArea.setValue(value);
-        return this;
-    }
-
-    @Step
-    public ProduceMessagePanel submitProduceMessage() {
-        clickByActions(submitBtn);
-        submitBtn.shouldBe(Condition.disappear);
-        refresh();
-        return this;
-    }
+  protected SelenideElement keyTextArea = $x("//div[@id='key']/textarea");
+  protected SelenideElement contentTextArea = $x("//div[@id='content']/textarea");
+  protected SelenideElement headersTextArea = $x("//div[@id='headers']/textarea");
+  protected SelenideElement submitBtn = headersTextArea.$x("../../../..//button[@type='submit']");
+  protected SelenideElement partitionDdl = $x("//ul[@name='partition']");
+  protected SelenideElement keySerdeDdl = $x("//ul[@name='keySerde']");
+  protected SelenideElement contentSerdeDdl = $x("//ul[@name='valueSerde']");
+
+  @Step
+  public ProduceMessagePanel waitUntilScreenReady() {
+    waitUntilSpinnerDisappear();
+    Arrays.asList(partitionDdl, keySerdeDdl, contentSerdeDdl).forEach(element -> element.shouldBe(Condition.visible));
+    return this;
+  }
+
+  @Step
+  public ProduceMessagePanel setKeyField(String value) {
+    clearByKeyboard(keyTextArea);
+    keyTextArea.setValue(value);
+    return this;
+  }
+
+  @Step
+  public ProduceMessagePanel setContentFiled(String value) {
+    clearByKeyboard(contentTextArea);
+    contentTextArea.setValue(value);
+    return this;
+  }
+
+  @Step
+  public ProduceMessagePanel setHeaderFiled(String value) {
+    headersTextArea.setValue(value);
+    return this;
+  }
+
+  @Step
+  public ProduceMessagePanel submitProduceMessage() {
+    clickByActions(submitBtn);
+    submitBtn.shouldBe(Condition.disappear);
+    refresh();
+    return this;
+  }
 }
 }

Some files were not shown because too many files changed in this diff