浏览代码

Merge branch 'master' of github.com:provectus/kafka-ui into ISSUE_754_acl

 Conflicts:
	kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
iliax 2 年之前
父节点
当前提交
be80920afe
共有 54 个文件被更改,包括 443 次插入1190 次删除
  1. 4 2
      CONTRIBUTING.md
  2. 54 131
      README.md
  3. 2 2
      charts/kafka-ui/Chart.yaml
  4. 1 34
      charts/kafka-ui/README.md
  5. 0 43
      docker-compose.md
  6. 0 41
      documentation/guides/AWS_IAM.md
  7. 0 123
      documentation/guides/DataMasking.md
  8. 0 55
      documentation/guides/Protobuf.md
  9. 0 58
      documentation/guides/SASL_SCRAM.md
  10. 0 7
      documentation/guides/SECURE_BROKER.md
  11. 0 71
      documentation/guides/SSO.md
  12. 0 167
      documentation/guides/Serialization.md
  13. 0 22
      documentation/project/ROADMAP.md
  14. 0 8
      documentation/project/contributing/README.md
  15. 0 24
      documentation/project/contributing/building-and-running-without-docker.md
  16. 0 63
      documentation/project/contributing/building.md
  17. 0 42
      documentation/project/contributing/prerequisites.md
  18. 0 8
      documentation/project/contributing/set-up-git.md
  19. 0 28
      documentation/project/contributing/testing.md
  20. 0 65
      helm_chart.md
  21. 1 13
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
  22. 19 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
  23. 19 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
  24. 2 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
  25. 18 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
  26. 38 22
      kafka-ui-api/src/main/resources/application-local.yml
  27. 1 0
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  28. 6 6
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
  29. 14 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java
  30. 17 4
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
  31. 1 1
      kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
  32. 2 1
      kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts
  33. 1 1
      kafka-ui-react-app/src/components/ErrorPage/ErrorPage.tsx
  34. 1 1
      kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
  35. 16 3
      kafka-ui-react-app/src/components/Topics/List/BatchActionsBar.tsx
  36. 7 2
      kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
  37. 1 5
      kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
  38. 10 11
      kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
  39. 30 13
      kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx
  40. 14 1
      kafka-ui-react-app/src/components/common/Button/Button.tsx
  41. 6 0
      kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx
  42. 3 1
      kafka-ui-react-app/src/components/common/Editor/Editor.tsx
  43. 1 1
      kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts
  44. 4 4
      kafka-ui-react-app/src/components/common/NewTable/Table.tsx
  45. 1 21
      kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts
  46. 2 1
      kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx
  47. 1 0
      kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx
  48. 86 77
      kafka-ui-react-app/src/components/common/Select/Select.tsx
  49. 26 0
      kafka-ui-react-app/src/components/common/Spinner/Spinner.styled.ts
  50. 20 0
      kafka-ui-react-app/src/components/common/Spinner/Spinner.tsx
  51. 6 0
      kafka-ui-react-app/src/components/common/Spinner/types.ts
  52. 3 1
      kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx
  53. 3 1
      kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx
  54. 2 1
      kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx

+ 4 - 2
CONTRIBUTING.md

@@ -1,3 +1,5 @@
+This guide is an exact copy of the same documented located [in our official docs](https://docs.kafka-ui.provectus.io/development/contributing). If there are any differences between the documents, the one located in our official docs should prevail.
+
 This guide aims to walk you through the process of working on issues and Pull Requests (PRs).
 This guide aims to walk you through the process of working on issues and Pull Requests (PRs).
 
 
 Bear in mind that you will not be able to complete some steps on your own if you do not have a “write” permission. Feel free to reach out to the maintainers to help you unlock these activities.
 Bear in mind that you will not be able to complete some steps on your own if you do not have a “write” permission. Feel free to reach out to the maintainers to help you unlock these activities.
@@ -20,7 +22,7 @@ You also need to consider labels. You can sort the issues by scope labels, such
 ## Grabbing the issue
 ## Grabbing the issue
 
 
 There is a bunch of criteria that make an issue feasible for development. <br/>
 There is a bunch of criteria that make an issue feasible for development. <br/>
-The implementation of any features and/or their enhancements should be reasonable, must be backed by justified requirements (demanded by the community, [roadmap](documentation/project/ROADMAP.md) plans, etc.). The final decision is left for the maintainers' discretion.
+The implementation of any features and/or their enhancements should be reasonable, must be backed by justified requirements (demanded by the community, [roadmap](https://docs.kafka-ui.provectus.io/project/roadmap) plans, etc.). The final decision is left for the maintainers' discretion.
 
 
 All bugs should be confirmed as such (i.e. the behavior is unintended).
 All bugs should be confirmed as such (i.e. the behavior is unintended).
 
 
@@ -39,7 +41,7 @@ To keep the status of the issue clear to everyone, please keep the card's status
 
 
 ## Setting up a local development environment
 ## Setting up a local development environment
 
 
-Please refer to [this guide](documentation/project/contributing/README.md).
+Please refer to [this guide](https://docs.kafka-ui.provectus.io/development/contributing).
 
 
 # Pull Requests
 # Pull Requests
 
 

+ 54 - 131
README.md

@@ -1,21 +1,31 @@
 ![UI for Apache Kafka logo](documentation/images/kafka-ui-logo.png) UI for Apache Kafka&nbsp;
 ![UI for Apache Kafka logo](documentation/images/kafka-ui-logo.png) UI for Apache Kafka&nbsp;
 ------------------
 ------------------
 #### Versatile, fast and lightweight web UI for managing Apache Kafka® clusters. Built by developers, for developers.
 #### Versatile, fast and lightweight web UI for managing Apache Kafka® clusters. Built by developers, for developers.
+<br/>
 
 
 [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/provectus/kafka-ui/blob/master/LICENSE)
 [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/provectus/kafka-ui/blob/master/LICENSE)
 ![UI for Apache Kafka Price Free](documentation/images/free-open-source.svg)
 ![UI for Apache Kafka Price Free](documentation/images/free-open-source.svg)
 [![Release version](https://img.shields.io/github/v/release/provectus/kafka-ui)](https://github.com/provectus/kafka-ui/releases)
 [![Release version](https://img.shields.io/github/v/release/provectus/kafka-ui)](https://github.com/provectus/kafka-ui/releases)
 [![Chat with us](https://img.shields.io/discord/897805035122077716)](https://discord.gg/4DWzD7pGE5)
 [![Chat with us](https://img.shields.io/discord/897805035122077716)](https://discord.gg/4DWzD7pGE5)
+[![Docker pulls](https://img.shields.io/docker/pulls/provectuslabs/kafka-ui)](https://hub.docker.com/r/provectuslabs/kafka-ui)
 
 
-### DISCLAIMER
-<em>UI for Apache Kafka is a free tool built and supported by the open-source community. Curated by Provectus, it will remain free and open-source, without any paid features or subscription plans to be added in the future.
-Looking for the help of Kafka experts? Provectus can help you design, build, deploy, and manage Apache Kafka clusters and streaming applications. Discover [Professional Services for Apache Kafka](https://provectus.com/professional-services-apache-kafka/), to unlock the full potential of Kafka in your enterprise! </em>
-
+<p align="center">
+    <a href="https://docs.kafka-ui.provectus.io/">DOCS</a> • 
+    <a href="https://docs.kafka-ui.provectus.io/configuration/quick-start">QUICK START</a> • 
+    <a href="https://discord.gg/4DWzD7pGE5">COMMUNITY DISCORD</a>
+    <br/>
+    <a href="https://aws.amazon.com/marketplace/pp/prodview-ogtt5hfhzkq6a">AWS Marketplace</a>  •
+    <a href="https://www.producthunt.com/products/ui-for-apache-kafka/reviews/new">ProductHunt</a>
+</p>
 
 
 #### UI for Apache Kafka is a free, open-source web UI to monitor and manage Apache Kafka clusters.
 #### UI for Apache Kafka is a free, open-source web UI to monitor and manage Apache Kafka clusters.
 
 
 UI for Apache Kafka is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption.
 UI for Apache Kafka is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption.
 
 
+### DISCLAIMER
+<em>UI for Apache Kafka is a free tool built and supported by the open-source community. Curated by Provectus, it will remain free and open-source, without any paid features or subscription plans to be added in the future.
+Looking for the help of Kafka experts? Provectus can help you design, build, deploy, and manage Apache Kafka clusters and streaming applications. Discover [Professional Services for Apache Kafka](https://provectus.com/professional-services-apache-kafka/), to unlock the full potential of Kafka in your enterprise! </em>
+
 Set up UI for Apache Kafka with just a couple of easy commands to visualize your Kafka data in a comprehensible way. You can run the tool locally or in
 Set up UI for Apache Kafka with just a couple of easy commands to visualize your Kafka data in a comprehensible way. You can run the tool locally or in
 the cloud.
 the cloud.
 
 
@@ -29,10 +39,10 @@ the cloud.
 * **View Consumer Groups** — view per-partition parked offsets, combined and per-partition lag
 * **View Consumer Groups** — view per-partition parked offsets, combined and per-partition lag
 * **Browse Messages** — browse messages with JSON, plain text, and Avro encoding
 * **Browse Messages** — browse messages with JSON, plain text, and Avro encoding
 * **Dynamic Topic Configuration** — create and configure new topics with dynamic configuration
 * **Dynamic Topic Configuration** — create and configure new topics with dynamic configuration
-* **Configurable Authentification** — secure your installation with optional Github/Gitlab/Google OAuth 2.0
-* **Custom serialization/deserialization plugins** - use a ready-to-go serde for your data like AWS Glue or Smile, or code your own!
-* **Role based access control** - [manage permissions](https://github.com/provectus/kafka-ui/wiki/RBAC-(role-based-access-control)) to access the UI with granular precision
-* **Data masking** - [obfuscate](https://github.com/provectus/kafka-ui/blob/master/documentation/guides/DataMasking.md) sensitive data in topic messages
+* **Configurable Authentification** — [secure](https://docs.kafka-ui.provectus.io/configuration/authentication) your installation with optional Github/Gitlab/Google OAuth 2.0
+* **Custom serialization/deserialization plugins** - [use](https://docs.kafka-ui.provectus.io/configuration/serialization-serde) a ready-to-go serde for your data like AWS Glue or Smile, or code your own!
+* **Role based access control** - [manage permissions](https://docs.kafka-ui.provectus.io/configuration/rbac-role-based-access-control) to access the UI with granular precision
+* **Data masking** - [obfuscate](https://docs.kafka-ui.provectus.io/configuration/data-masking) sensitive data in topic messages
 
 
 # The Interface
 # The Interface
 UI for Apache Kafka wraps major functions of Apache Kafka with an intuitive user interface.
 UI for Apache Kafka wraps major functions of Apache Kafka with an intuitive user interface.
@@ -60,155 +70,68 @@ There are 3 supported types of schemas: Avro®, JSON Schema, and Protobuf schema
 
 
 ![Create Schema Registry](documentation/images/Create_schema.gif)
 ![Create Schema Registry](documentation/images/Create_schema.gif)
 
 
-Before producing avro-encoded messages, you have to add an avro schema for the topic in Schema Registry. Now all these steps are easy to do
+Before producing avro/protobuf encoded messages, you have to add a schema for the topic in Schema Registry. Now all these steps are easy to do
 with a few clicks in a user-friendly interface.
 with a few clicks in a user-friendly interface.
 
 
 ![Avro Schema Topic](documentation/images/Schema_Topic.gif)
 ![Avro Schema Topic](documentation/images/Schema_Topic.gif)
 
 
 # Getting Started
 # Getting Started
 
 
-To run UI for Apache Kafka, you can use a pre-built Docker image or build it locally.
-
-## Configuration
-
-We have plenty of [docker-compose files](documentation/compose/DOCKER_COMPOSE.md) as examples. They're built for various configuration stacks.
-
-# Guides
-
-- [SSO configuration](documentation/guides/SSO.md)
-- [AWS IAM configuration](documentation/guides/AWS_IAM.md)
-- [Docker-compose files](documentation/compose/DOCKER_COMPOSE.md)
-- [Connection to a secure broker](documentation/guides/SECURE_BROKER.md)
-- [Configure seriliazation/deserialization plugins or code your own](documentation/guides/Serialization.md)
+To run UI for Apache Kafka, you can use either a pre-built Docker image or build it (or a jar file) yourself.
 
 
-### Configuration File
-Example of how to configure clusters in the [application-local.yml](https://github.com/provectus/kafka-ui/blob/master/kafka-ui-api/src/main/resources/application-local.yml) configuration file:
+## Quick start (Demo run)
 
 
-
-```sh
-kafka:
-  clusters:
-    -
-      name: local
-      bootstrapServers: localhost:29091
-      schemaRegistry: http://localhost:8085
-      schemaRegistryAuth:
-        username: username
-        password: password
-#     schemaNameTemplate: "%s-value"
-      metrics:
-        port: 9997
-        type: JMX
-    -
+```
+docker run -it -p 8080:8080 -e DYNAMIC_CONFIG_ENABLED=true provectuslabs/kafka-ui
 ```
 ```
 
 
-* `name`: cluster name
-* `bootstrapServers`: where to connect
-* `schemaRegistry`: schemaRegistry's address
-* `schemaRegistryAuth.username`: schemaRegistry's basic authentication username
-* `schemaRegistryAuth.password`: schemaRegistry's basic authentication password
-* `schemaNameTemplate`: how keys are saved to schemaRegistry
-* `metrics.port`: open JMX port of a broker
-* `metrics.type`: Type of metrics, either JMX or PROMETHEUS. Defaulted to JMX.
-* `readOnly`: enable read only mode
-
-Configure as many clusters as you need by adding their configs below separated with `-`.
-
-## Running a Docker Image
-The official Docker image for UI for Apache Kafka is hosted here: [hub.docker.com/r/provectuslabs/kafka-ui](https://hub.docker.com/r/provectuslabs/kafka-ui).
+Then access the web UI at [http://localhost:8080](http://localhost:8080)
 
 
-Launch Docker container in the background:
-```sh
+The command is sufficient to try things out. When you're done trying things out, you can proceed with a [persistent installation](https://docs.kafka-ui.provectus.io/configuration/quick-start#persistent-start)
 
 
-docker run -p 8080:8080 \
-	-e KAFKA_CLUSTERS_0_NAME=local \
-	-e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 \
-	-d provectuslabs/kafka-ui:latest
+## Persistent installation
 
 
 ```
 ```
-Then access the web UI at [http://localhost:8080](http://localhost:8080).
-Further configuration with environment variables - [see environment variables](#env_variables)
-
-### Docker Compose
-
-If you prefer to use `docker-compose` please refer to the [documentation](docker-compose.md).
-
-### Helm chart
-Helm chart could be found under [charts/kafka-ui](https://github.com/provectus/kafka-ui/tree/master/charts/kafka-ui) directory
+services:
+  kafka-ui:
+    container_name: kafka-ui
+    image: provectuslabs/kafka-ui:latest
+    ports:
+      - 8080:8080
+    environment:
+      DYNAMIC_CONFIG_ENABLED: true
+    volumes:
+      - ~/kui/config.yml:/etc/kafkaui/dynamic_config.yaml
+```
 
 
-Quick-start instruction [here](helm_chart.md)
+Please refer to our [configuration](https://docs.kafka-ui.provectus.io/configuration/quick-start) page to proceed with further app configuration.
 
 
-## Building With Docker
+## Some useful configuration related links
 
 
-### Prerequisites
+[Web UI Cluster Configuration Wizard](https://docs.kafka-ui.provectus.io/configuration/configuration-wizard)
 
 
-Check [prerequisites.md](documentation/project/contributing/prerequisites.md)
+[Configuration file explanation](https://docs.kafka-ui.provectus.io/configuration/configuration-file)
 
 
-### Building and Running
+[Docker Compose examples](https://docs.kafka-ui.provectus.io/configuration/compose-examples)
 
 
-Check [building.md](documentation/project/contributing/building.md)
+[Misc configuration properties](https://docs.kafka-ui.provectus.io/configuration/misc-configuration-properties)
 
 
-## Building Without Docker
+## Helm charts
 
 
-### Prerequisites
+[Quick start](https://docs.kafka-ui.provectus.io/configuration/helm-charts/quick-start)
 
 
-[Prerequisites](documentation/project/contributing/prerequisites.md) will mostly remain the same with the exception of docker.
+## Building from sources
 
 
-### Running without Building
+[Quick start](https://docs.kafka-ui.provectus.io/development/building/prerequisites) with building
 
 
-[How to run quickly without building](documentation/project/contributing/building-and-running-without-docker.md#run_without_docker_quickly)
+## Liveliness and readiness probes
+Liveliness and readiness endpoint is at `/actuator/health`.<br/>
+Info endpoint (build info) is located at `/actuator/info`.
 
 
-### Building and Running
+# Configuration options
 
 
-[How to build and run](documentation/project/contributing/building-and-running-without-docker.md#build_and_run_without_docker)
+All of the environment variables/config properties could be found [here](https://docs.kafka-ui.provectus.io/configuration/misc-configuration-properties).
 
 
-## Liveliness and readiness probes
-Liveliness and readiness endpoint is at `/actuator/health`.
-Info endpoint (build info) is located at `/actuator/info`.
+# Contributing
 
 
-## <a name="env_variables"></a> Environment Variables
-
-Alternatively, each variable of the .yml file can be set with an environment variable.
-For example, if you want to use an environment variable to set the `name` parameter, you can write it like this: `KAFKA_CLUSTERS_2_NAME`
-
-|Name               	|Description
-|-----------------------|-------------------------------
-|`SERVER_SERVLET_CONTEXT_PATH` | URI basePath
-|`LOGGING_LEVEL_ROOT`        	| Setting log level (trace, debug, info, warn, error). Default: info
-|`LOGGING_LEVEL_COM_PROVECTUS` |Setting log level (trace, debug, info, warn, error). Default: debug
-|`SERVER_PORT` |Port for the embedded server. Default: `8080`
-|`KAFKA_ADMIN-CLIENT-TIMEOUT` | Kafka API timeout in ms. Default: `30000`
-|`KAFKA_CLUSTERS_0_NAME` | Cluster name
-|`KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS` 	|Address where to connect
-|`KAFKA_CLUSTERS_0_KSQLDBSERVER` 	| KSQL DB server address
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERAUTH_USERNAME` 	| KSQL DB server's basic authentication username
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERAUTH_PASSWORD` 	| KSQL DB server's basic authentication password
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTORELOCATION`   	|Path to the JKS keystore to communicate to KSQL DB
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTOREPASSWORD`   	|Password of the JKS keystore for KSQL DB
-|`KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL` 	|Security protocol to connect to the brokers. For SSL connection use "SSL", for plaintext connection don't set this environment variable
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRY`   	|SchemaRegistry's address
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME`   	|SchemaRegistry's basic authentication username
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD`   	|SchemaRegistry's basic authentication password
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION`   	|Path to the JKS keystore to communicate to SchemaRegistry
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD`   	|Password of the JKS keystore for SchemaRegistry
-|`KAFKA_CLUSTERS_0_METRICS_SSL`          |Enable SSL for Metrics (for PROMETHEUS metrics type). Default: false.
-|`KAFKA_CLUSTERS_0_METRICS_USERNAME` |Username for Metrics authentication
-|`KAFKA_CLUSTERS_0_METRICS_PASSWORD` |Password for Metrics authentication
-|`KAFKA_CLUSTERS_0_METRICS_KEYSTORELOCATION` |Path to the JKS keystore to communicate to metrics source (JMX/PROMETHEUS). For advanced setup, see `kafka-ui-jmx-secured.yml`
-|`KAFKA_CLUSTERS_0_METRICS_KEYSTOREPASSWORD` |Password of the JKS metrics keystore
-|`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry
-|`KAFKA_CLUSTERS_0_METRICS_PORT`        	 |Open metrics port of a broker
-|`KAFKA_CLUSTERS_0_METRICS_TYPE`        	 |Type of metrics retriever to use. Valid values are JMX (default) or PROMETHEUS. If Prometheus, then metrics are read from prometheus-jmx-exporter instead of jmx
-|`KAFKA_CLUSTERS_0_READONLY`        	|Enable read-only mode. Default: false
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME`| Kafka Connect cluster's basic authentication username
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD`| Kafka Connect cluster's basic authentication password
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTORELOCATION`| Path to the JKS keystore to communicate to Kafka Connect
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTOREPASSWORD`| Password of the JKS keystore for Kafka Connect
-|`KAFKA_CLUSTERS_0_POLLING_THROTTLE_RATE` |Max traffic rate (bytes/sec) that kafka-ui allowed to reach when polling messages from the cluster. Default: 0 (not limited)
-|`KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION`| Path to the JKS truststore to communicate to Kafka Connect, SchemaRegistry, KSQL, Metrics
-|`KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD`| Password of the JKS truststore for Kafka Connect, SchemaRegistry, KSQL, Metrics
-|`TOPIC_RECREATE_DELAY_SECONDS` |Time delay between topic deletion and topic creation attempts for topic recreate functionality. Default: 1
-|`TOPIC_RECREATE_MAXRETRIES`  |Number of attempts of topic creation after topic deletion for topic recreate functionality. Default: 15
-|`DYNAMIC_CONFIG_ENABLED`|Allow to change application config in runtime. Default: false.
+Please refer to [contributing guide](https://docs.kafka-ui.provectus.io/development/contributing), we'll guide you from there.

+ 2 - 2
charts/kafka-ui/Chart.yaml

@@ -2,6 +2,6 @@ apiVersion: v2
 name: kafka-ui
 name: kafka-ui
 description: A Helm chart for kafka-UI
 description: A Helm chart for kafka-UI
 type: application
 type: application
-version: 0.6.0
-appVersion: v0.6.0
+version: 0.6.1
+appVersion: v0.6.1
 icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
 icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png

+ 1 - 34
charts/kafka-ui/README.md

@@ -1,34 +1 @@
-# Kafka-UI Helm Chart
-
-## Configuration
-
-Most of the Helm charts parameters are common, follow table describe unique parameters related to application configuration.
-
-### Kafka-UI parameters
-
-| Parameter                                | Description                                                                                                                                    | Default |
-| ---------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------- | ------- |
-| `existingConfigMap`                      | Name of the existing ConfigMap with Kafka-UI environment variables                                                                             | `nil`   |
-| `existingSecret`                         | Name of the existing Secret with Kafka-UI environment variables                                                                                | `nil`   |
-| `envs.secret`                            | Set of the sensitive environment variables to pass to Kafka-UI                                                                                 | `{}`    |
-| `envs.config`                            | Set of the environment variables to pass to Kafka-UI                                                                                           | `{}`    |
-| `yamlApplicationConfigConfigMap`         | Map with name and keyName keys, name refers to the existing ConfigMap, keyName refers to the ConfigMap key with Kafka-UI config in Yaml format | `{}`    |
-| `yamlApplicationConfig`                  | Kafka-UI config in Yaml format                                                                                                                 | `{}`    |
-| `networkPolicy.enabled`                  | Enable network policies                                                                                                                        | `false` |
-| `networkPolicy.egressRules.customRules`  | Custom network egress policy rules                                                                                                             | `[]`    |
-| `networkPolicy.ingressRules.customRules` | Custom network ingress policy rules                                                                                                            | `[]`    |
-| `podLabels`                              | Extra labels for Kafka-UI pod                                                                                                                  | `{}`    |
-
-
-## Example
-
-To install Kafka-UI need to execute follow:
-``` bash
-helm repo add kafka-ui https://provectus.github.io/kafka-ui
-helm install kafka-ui kafka-ui/kafka-ui --set envs.config.KAFKA_CLUSTERS_0_NAME=local --set envs.config.KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-```
-To connect to Kafka-UI web application need to execute:
-``` bash
-kubectl port-forward svc/kafka-ui 8080:80
-```
-Open the `http://127.0.0.1:8080` on the browser to access Kafka-UI.
+Please refer to our [documentation](https://docs.kafka-ui.provectus.io/configuration/helm-charts) to get some info on our helm charts.

+ 0 - 43
docker-compose.md

@@ -1,43 +0,0 @@
-# Quick Start with docker-compose
-
-Environment variables documentation - [see usage](README.md#env_variables).<br/>
-We have plenty of example files with more complex configurations. Please check them out in ``docker`` directory.
-
-* Add a new service in docker-compose.yml
-
-```yaml
-version: '2'
-services:
-  kafka-ui:
-    image: provectuslabs/kafka-ui
-    container_name: kafka-ui
-    ports:
-      - "8080:8080"
-    restart: always
-    environment:
-      - KAFKA_CLUSTERS_0_NAME=local
-      - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-```
-
-* If you prefer UI for Apache Kafka in read only mode
-   
-```yaml
-version: '2'
-services:
-  kafka-ui:
-    image: provectuslabs/kafka-ui
-    container_name: kafka-ui
-    ports:
-      - "8080:8080"
-    restart: always
-    environment:
-      - KAFKA_CLUSTERS_0_NAME=local
-      - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-      - KAFKA_CLUSTERS_0_READONLY=true
-```
-  
-* Start UI for Apache Kafka process
-
-```bash
-docker-compose up -d kafka-ui
-```

+ 0 - 41
documentation/guides/AWS_IAM.md

@@ -1,41 +0,0 @@
-# How to configure AWS IAM Authentication
-
-UI for Apache Kafka comes with built-in [aws-msk-iam-auth](https://github.com/aws/aws-msk-iam-auth) library.
-
-You could pass sasl configs in properties section for each cluster.
-
-More details could be found here: [aws-msk-iam-auth](https://github.com/aws/aws-msk-iam-auth)
- 
-## Examples: 
-
-Please replace 
-* <KAFKA_URL> with broker list
-* <PROFILE_NAME> with your aws profile
-
-
-### Running From Docker Image
-
-```sh
-docker run -p 8080:8080 \
-    -e KAFKA_CLUSTERS_0_NAME=local \
-    -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<KAFKA_URL> \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=AWS_MSK_IAM \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_CLIENT_CALLBACK_HANDLER_CLASS=software.amazon.msk.auth.iam.IAMClientCallbackHandler \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=software.amazon.msk.auth.iam.IAMLoginModule required awsProfileName="<PROFILE_NAME>"; \
-    -d provectuslabs/kafka-ui:latest 
-```
-
-### Configuring by application.yaml
-
-```yaml
-kafka:
-  clusters:
-    - name: local
-      bootstrapServers: <KAFKA_URL>
-      properties:
-        security.protocol: SASL_SSL
-        sasl.mechanism: AWS_MSK_IAM
-        sasl.client.callback.handler.class: software.amazon.msk.auth.iam.IAMClientCallbackHandler
-        sasl.jaas.config: software.amazon.msk.auth.iam.IAMLoginModule required awsProfileName="<PROFILE_NAME>";
-```

+ 0 - 123
documentation/guides/DataMasking.md

@@ -1,123 +0,0 @@
-# Topics data masking
-
-You can configure kafka-ui to mask sensitive data shown in Messages page.
-
-Several masking policies supported:
-
-### REMOVE
-For json objects - remove target fields, otherwise - return "null" string.
-```yaml
-- type: REMOVE
-  fields: [ "id", "name" ]
-  ...
-```
-
-Apply examples:
-```
-{ "id": 1234, "name": { "first": "James" }, "age": 30 } 
- ->
-{ "age": 30 } 
-```
-```
-non-json string -> null
-```
-
-### REPLACE
-For json objects - replace target field's values with specified replacement string (by default with `***DATA_MASKED***`). Note: if target field's value is object, then replacement applied to all its fields recursively (see example). 
-
-```yaml
-- type: REPLACE
-  fields: [ "id", "name" ]
-  replacement: "***"  #optional, "***DATA_MASKED***" by default
-  ...
-```
-
-Apply examples:
-```
-{ "id": 1234, "name": { "first": "James", "last": "Bond" }, "age": 30 } 
- ->
-{ "id": "***", "name": { "first": "***", "last": "***" }, "age": 30 } 
-```
-```
-non-json string -> ***
-```
-
-### MASK
-Mask target field's values with specified masking characters, recursively (spaces and line separators will be kept as-is).
-`pattern` array specifies what symbols will be used to replace upper-case chars, lower-case chars, digits and other symbols correspondingly.
-
-```yaml
-- type: MASK
-  fields: [ "id", "name" ]
-  pattern: ["A", "a", "N", "_"]   # optional, default is ["X", "x", "n", "-"]
-  ...
-```
-
-Apply examples:
-```
-{ "id": 1234, "name": { "first": "James", "last": "Bond!" }, "age": 30 } 
- ->
-{ "id": "NNNN", "name": { "first": "Aaaaa", "last": "Aaaa_" }, "age": 30 } 
-```
-```
-Some string! -> Aaaa aaaaaa_
-```
-
-----
-
-For each policy, if `fields` not specified, then policy will be applied to all object's fields or whole string if it is not a json-object.
-
-You can specify which masks will be applied to topic's keys/values. Multiple policies will be applied if topic matches both policy's patterns.
-
-Yaml configuration example:
-```yaml
-kafka:
-  clusters:
-    - name: ClusterName
-      # Other Cluster configuration omitted ... 
-      masking:
-        - type: REMOVE
-          fields: [ "id" ]
-          topicKeysPattern: "events-with-ids-.*"
-          topicValuesPattern: "events-with-ids-.*"
-          
-        - type: REPLACE
-          fields: [ "companyName", "organizationName" ]
-          replacement: "***MASKED_ORG_NAME***"   #optional
-          topicValuesPattern: "org-events-.*"
-        
-        - type: MASK
-          fields: [ "name", "surname" ]
-          pattern: ["A", "a", "N", "_"]  #optional
-          topicValuesPattern: "user-states"
-
-        - type: MASK
-          topicValuesPattern: "very-secured-topic"
-```
-
-Same configuration in env-vars fashion:
-```
-...
-KAFKA_CLUSTERS_0_MASKING_0_TYPE: REMOVE
-KAFKA_CLUSTERS_0_MASKING_0_FIELDS_0: "id"
-KAFKA_CLUSTERS_0_MASKING_0_TOPICKEYSPATTERN: "events-with-ids-.*"
-KAFKA_CLUSTERS_0_MASKING_0_TOPICVALUESPATTERN: "events-with-ids-.*"
-
-KAFKA_CLUSTERS_0_MASKING_1_TYPE: REPLACE
-KAFKA_CLUSTERS_0_MASKING_1_FIELDS_0: "companyName"
-KAFKA_CLUSTERS_0_MASKING_1_FIELDS_1: "organizationName"
-KAFKA_CLUSTERS_0_MASKING_1_REPLACEMENT: "***MASKED_ORG_NAME***"
-KAFKA_CLUSTERS_0_MASKING_1_TOPICVALUESPATTERN: "org-events-.*"
-
-KAFKA_CLUSTERS_0_MASKING_2_TYPE: MASK
-KAFKA_CLUSTERS_0_MASKING_2_FIELDS_0: "name"
-KAFKA_CLUSTERS_0_MASKING_2_FIELDS_1: "surname"
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_0: 'A'
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_1: 'a'
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_2: 'N'
-KAFKA_CLUSTERS_0_MASKING_2_PATTERN_3: '_'
-KAFKA_CLUSTERS_0_MASKING_2_TOPICVALUESPATTERN: "user-states"
-
-KAFKA_CLUSTERS_0_MASKING_3_TYPE: MASK
-KAFKA_CLUSTERS_0_MASKING_3_TOPICVALUESPATTERN: "very-secured-topic"
-```

+ 0 - 55
documentation/guides/Protobuf.md

@@ -1,55 +0,0 @@
-# Kafkaui Protobuf Support
-
-### This document is deprecated, please see examples in [Serialization document](Serialization.md).
-
-Kafkaui supports deserializing protobuf messages in two ways:
-1. Using Confluent Schema Registry's [protobuf support](https://docs.confluent.io/platform/current/schema-registry/serdes-develop/serdes-protobuf.html).
-2. Supplying a protobuf file as well as a configuration that maps topic names to protobuf types.
-
-## Configuring Kafkaui with a Protobuf File
-
-To configure Kafkaui to deserialize protobuf messages using a supplied protobuf schema add the following to the config:
-```yaml
-kafka:
-  clusters:
-    - # Cluster configuration omitted...
-      # protobufFilesDir specifies root location for proto files (will be scanned recursively)
-      # NOTE: if 'protobufFilesDir' specified, then 'protobufFile' and 'protobufFiles' settings will be ignored
-      protobufFilesDir: "/path/to/my-protobufs"
-      # (DEPRECATED) protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
-      protobufFile: path/to/my.proto
-      # (DEPRECATED) protobufFiles is the location of one or more protobuf schemas
-      protobufFiles:
-        - /path/to/my-protobufs/my.proto
-        - /path/to/my-protobufs/another.proto
-        - /path/to/my-protobufs:test/test.proto
-      # protobufMessageName is the default protobuf type that is used to deserialize
-      # the message's value if the topic is not found in protobufMessageNameByTopic.    
-      protobufMessageName: my.DefaultValType
-      # protobufMessageNameByTopic is a mapping of topic names to protobuf types.
-      # This mapping is required and is used to deserialize the Kafka message's value.
-      protobufMessageNameByTopic:
-        topic1: my.Type1
-        topic2: my.Type2
-      # protobufMessageNameForKey is the default protobuf type that is used to deserialize
-      # the message's key if the topic is not found in protobufMessageNameForKeyByTopic.
-      protobufMessageNameForKey: my.DefaultKeyType
-      # protobufMessageNameForKeyByTopic is a mapping of topic names to protobuf types.
-      # This mapping is optional and is used to deserialize the Kafka message's key.
-      # If a protobuf type is not found for a topic's key, the key is deserialized as a string,
-      # unless protobufMessageNameForKey is specified.
-      protobufMessageNameForKeyByTopic:
-        topic1: my.KeyType1
-```
-
-Same config with flattened config (for docker-compose):
-
-```text
-kafka.clusters.0.protobufFiles.0: /path/to/my.proto
-kafka.clusters.0.protobufFiles.1: /path/to/another.proto
-kafka.clusters.0.protobufMessageName: my.DefaultValType
-kafka.clusters.0.protobufMessageNameByTopic.topic1: my.Type1
-kafka.clusters.0.protobufMessageNameByTopic.topic2: my.Type2
-kafka.clusters.0.protobufMessageNameForKey: my.DefaultKeyType
-kafka.clusters.0.protobufMessageNameForKeyByTopic.topic1: my.KeyType1
-```

+ 0 - 58
documentation/guides/SASL_SCRAM.md

@@ -1,58 +0,0 @@
-# How to configure SASL SCRAM Authentication
-
-You could pass sasl configs in properties section for each cluster.
- 
-## Examples: 
-
-Please replace 
-- <KAFKA_NAME> with cluster name
-- <KAFKA_URL> with broker list
-- <KAFKA_USERNAME> with username
-- <KAFKA_PASSWORD> with password
-
-### Running From Docker Image
-
-```sh
-docker run -p 8080:8080 \
-    -e KAFKA_CLUSTERS_0_NAME=<KAFKA_NAME> \
-    -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<KAFKA_URL> \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL \
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=SCRAM-SHA-512 \     
-    -e KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.scram.ScramLoginModule required username="<KAFKA_USERNAME>" password="<KAFKA_PASSWORD>"; \
-    -d provectuslabs/kafka-ui:latest 
-```
-
-### Running From Docker-compose file
-
-```yaml
-
-version: '3.4'
-services:
-  
-  kafka-ui:
-    image: provectuslabs/kafka-ui
-    container_name: kafka-ui
-    ports:
-      - "888:8080"
-    restart: always
-    environment:
-      - KAFKA_CLUSTERS_0_NAME=<KAFKA_NAME>
-      - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<KAFKA_URL>
-      - KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL
-      - KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=SCRAM-SHA-512
-      - KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.scram.ScramLoginModule required username="<KAFKA_USERNAME>" password="<KAFKA_PASSWORD>";
-      - KAFKA_CLUSTERS_0_PROPERTIES_PROTOCOL=SASL
-```
-
-### Configuring by application.yaml
-
-```yaml
-kafka:
-  clusters:
-    - name: local
-      bootstrapServers: <KAFKA_URL>
-      properties:
-        security.protocol: SASL_SSL
-        sasl.mechanism: SCRAM-SHA-512        
-        sasl.jaas.config: org.apache.kafka.common.security.scram.ScramLoginModule required username="<KAFKA_USERNAME>" password="<KAFKA_PASSWORD>";
-```

+ 0 - 7
documentation/guides/SECURE_BROKER.md

@@ -1,7 +0,0 @@
-## Connecting to a Secure Broker
-
-The app supports TLS (SSL) and SASL connections for [encryption and authentication](http://kafka.apache.org/090/documentation.html#security). <br/>
-
-### Running From Docker-compose file
-
-See [this](/documentation/compose/kafka-ssl.yml) docker-compose file reference for ssl-enabled kafka

+ 0 - 71
documentation/guides/SSO.md

@@ -1,71 +0,0 @@
-# How to configure SSO
-SSO require additionaly to configure TLS for application, in that example we will use self-signed certificate, in case of use legal certificates please skip step 1.
-## Step 1
-At this step we will generate self-signed PKCS12 keypair.
-``` bash
-mkdir cert
-keytool -genkeypair -alias ui-for-apache-kafka -keyalg RSA -keysize 2048 \
-  -storetype PKCS12 -keystore cert/ui-for-apache-kafka.p12 -validity 3650
-```
-## Step 2
-Create new application in any SSO provider, we will continue with [Auth0](https://auth0.com).
-
-<img src="https://github.com/provectus/kafka-ui/raw/images/images/sso-new-app.png" width="70%"/>
-
-After that need to provide callback URLs, in our case we will use `https://127.0.0.1:8080/login/oauth2/code/auth0`
-
-<img src="https://github.com/provectus/kafka-ui/raw/images/images/sso-configuration.png" width="70%"/>
-
-This is a main parameters required for enabling SSO
-
-<img src="https://github.com/provectus/kafka-ui/raw/images/images/sso-parameters.png" width="70%"/>
-
-## Step 3
-To launch UI for Apache Kafka with enabled TLS and SSO run following:
-``` bash
-docker run -p 8080:8080 -v `pwd`/cert:/opt/cert -e AUTH_TYPE=LOGIN_FORM \
-  -e SECURITY_BASIC_ENABLED=true \
-  -e SERVER_SSL_KEY_STORE_TYPE=PKCS12 \
-  -e SERVER_SSL_KEY_STORE=/opt/cert/ui-for-apache-kafka.p12 \
-  -e SERVER_SSL_KEY_STORE_PASSWORD=123456 \
-  -e SERVER_SSL_KEY_ALIAS=ui-for-apache-kafka \
-  -e SERVER_SSL_ENABLED=true \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI=https://dev-a63ggcut.auth0.com/ \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE=openid \
-  -e TRUST_STORE=/opt/cert/ui-for-apache-kafka.p12 \
-  -e TRUST_STORE_PASSWORD=123456 \
-provectuslabs/kafka-ui:latest
-```
-In the case with trusted CA-signed SSL certificate and SSL termination somewhere outside of application we can pass only SSO related environment variables:
-``` bash
-docker run -p 8080:8080 -v `pwd`/cert:/opt/cert -e AUTH_TYPE=OAUTH2 \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI=https://dev-a63ggcut.auth0.com/ \
-  -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE=openid \
-provectuslabs/kafka-ui:latest
-```
-
-## Step 4 (Load Balancer HTTP) (optional)
-If you're using load balancer/proxy and use HTTP between the proxy and the app, you might want to set `server_forward-headers-strategy` to `native` as well (`SERVER_FORWARDHEADERSSTRATEGY=native`), for more info refer to [this issue](https://github.com/provectus/kafka-ui/issues/1017).
-
-## Step 5 (Azure) (optional)
-For Azure AD (Office365) OAUTH2 you'll want to add additional environment variables:
-
-```bash
-docker run -p 8080:8080 \
-        -e KAFKA_CLUSTERS_0_NAME="${cluster_name}"\
-        -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS="${kafka_listeners}" \
-        -e KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS="${kafka_connect_servers}"
-        -e AUTH_TYPE=OAUTH2 \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID=uhvaPKIHU4ZF8Ne4B6PGvF0hWW6OcUSB \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET=YXfRjmodifiedTujnkVr7zuW9ECCAK4TcnCio-i \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE="https://graph.microsoft.com/User.Read" \
-        -e SPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI="https://login.microsoftonline.com/{tenant-id}/v2.0" \
-        -d provectuslabs/kafka-ui:latest"
-```
-
-Note that scope is created by default when Application registration is done in Azure portal.
-You'll need to update application registration manifest to include `"accessTokenAcceptedVersion": 2`

+ 0 - 167
documentation/guides/Serialization.md

@@ -1,167 +0,0 @@
-## Serialization and deserialization and custom plugins
-
-Kafka-ui supports multiple ways to serialize/deserialize data.
-
-
-### Int32, Int64, UInt32, UInt64
-Big-endian 4/8 bytes representation of signed/unsigned integers.
-
-### Base64
-Base64 (RFC4648) binary data representation. Can be useful in case if the actual data is not important, but exactly the same (byte-wise) key/value should be send.
-
-### String 
-Treats binary data as a string in specified encoding. Default encoding is UTF-8.
-
-Class name: `com.provectus.kafka.ui.serdes.builtin.StringSerde`
-
-Sample configuration (if you want to overwrite default configuration):
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      # Other Cluster configuration omitted ... 
-      serde:
-          # registering String serde with custom config
-        - name: AsciiString
-          className: com.provectus.kafka.ui.serdes.builtin.StringSerde
-          properties:
-            encoding: "ASCII"
-        
-          # overriding build-it String serde config   
-        - name: String 
-          properties:
-            encoding: "UTF-16"
-```
-
-### Protobuf
-
-Class name: `com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde`
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      # Other Cluster configuration omitted ... 
-      serde:
-        - name: ProtobufFile
-          properties:
-            # path to the protobuf schema files directory
-            protobufFilesDir: "path/to/protofiles"
-            # default protobuf type that is used for KEY serialization/deserialization
-            # optional
-            protobufMessageNameForKey: my.Type1
-            # mapping of topic names to protobuf types, that will be used for KEYS  serialization/deserialization
-            # optional
-            protobufMessageNameForKeyByTopic:
-              topic1: my.KeyType1
-              topic2: my.KeyType2
-            # default protobuf type that is used for VALUE serialization/deserialization
-            # optional, if not set - first type in file will be used as default
-            protobufMessageName: my.Type1
-            # mapping of topic names to protobuf types, that will be used for VALUES  serialization/deserialization
-            # optional
-            protobufMessageNameByTopic:
-              topic1: my.Type1
-              "topic.2": my.Type2
-```
-Docker-compose sample for Protobuf serialization is [here](../compose/kafka-ui-serdes.yaml).
-
-Legacy configuration for protobuf is [here](Protobuf.md).
-
-### SchemaRegistry
-SchemaRegistry serde is automatically configured if schema registry properties set on cluster level.
-But you can add new SchemaRegistry-typed serdes that will connect to another schema-registry instance. 
-
-Class name: `com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde`
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      # this url will be used by "SchemaRegistry" by default
-      schemaRegistry: http://main-schema-registry:8081
-      serde:
-        - name: AnotherSchemaRegistry
-          className: com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde
-          properties:
-            url:  http://another-schema-registry:8081
-            # auth properties, optional
-            username: nameForAuth
-            password: P@ssW0RdForAuth
-        
-          # and also add another SchemaRegistry serde
-        - name: ThirdSchemaRegistry
-          className: com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde
-          properties:
-            url:  http://another-yet-schema-registry:8081
-```
-
-## Setting serdes for specific topics
-You can specify preferable serde for topics key/value. This serde will be chosen by default in UI on topic's view/produce pages. 
-To do so, set `topicValuesPattern/topicValuesPattern` properties for the selected serde. Kafka-ui will choose a first serde that matches specified pattern.
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      serde:
-        - name: String
-          topicKeysPattern: click-events|imp-events
-        
-        - name: Int64
-          topicKeysPattern: ".*-events"
-        
-        - name: SchemaRegistry
-          topicValuesPattern: click-events|imp-events
-```
-
-
-## Default serdes
-You can specify which serde will be chosen in UI by default if no other serdes selected via `topicKeysPattern/topicValuesPattern` settings.
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      defaultKeySerde: Int32
-      defaultValueSerde: String
-      serde:
-        - name: Int32
-          topicKeysPattern: click-events|imp-events
-```
-
-## Fallback
-If selected serde couldn't be applied (exception was thrown), then fallback (String serde with UTF-8 encoding) serde will be applied. Such messages will be specially highlighted in UI.
-
-## Custom pluggable serde registration
-You can implement your own serde and register it in kafka-ui application.
-To do so:
-1. Add `kafka-ui-serde-api` dependency (should be downloadable via maven central)
-2. Implement `com.provectus.kafka.ui.serde.api.Serde` interface. See javadoc for implementation requirements.
-3. Pack your serde into uber jar, or provide directory with no-dependency jar and it's dependencies jars
-
-
-Example pluggable serdes :
-https://github.com/provectus/kafkaui-smile-serde
-https://github.com/provectus/kafkaui-glue-sr-serde
-
-Sample configuration:
-```yaml
-kafka:
-  clusters:
-    - name: Cluster1
-      serde:
-        - name: MyCustomSerde
-          className: my.lovely.org.KafkaUiSerde
-          filePath: /var/lib/kui-serde/my-kui-serde.jar
-          
-        - name: MyCustomSerde2
-          className: my.lovely.org.KafkaUiSerde2
-          filePath: /var/lib/kui-serde2
-          properties:
-            prop1: v1
-```

+ 0 - 22
documentation/project/ROADMAP.md

@@ -1,22 +0,0 @@
-Kafka-UI Project Roadmap
-====================
-
-Roadmap exists in a form of a github project board and is located [here](https://github.com/provectus/kafka-ui/projects/8).
-
-### How to use this document
-
-The roadmap provides a list of features we decided to prioritize in project development. It should serve as a reference point to understand projects' goals.
-
-We do prioritize them based on the feedback from the community, our own vision and other conditions and circumstances. 
-
-The roadmap sets the general way of development. The roadmap is mostly about long-term features. All the features could be re-prioritized, rescheduled or canceled.
-
-If there's no feature `X`, that **doesn't** mean we're **not** going to implement it. Feel free to raise the issue for the consideration. <br/>
-If a feature you want to see live is not present on roadmap, but there's an issue for the feature, feel free to vote for it using reactions in the issue.
-
-
-### How to contribute
-
-Since the roadmap consists mostly of big long-term features, implementing them might be not easy for a beginner outside collaborator.
-
-A good starting point is checking the [CONTRIBUTING.md](https://github.com/provectus/kafka-ui/blob/master/CONTRIBUTING.md) document.

+ 0 - 8
documentation/project/contributing/README.md

@@ -1,8 +0,0 @@
-# Contributing guidelines
-
-### Set up the local environment for development
-
-* [Prerequisites](prerequisites.md)
-<!--* [Setting up git](set-up-git.md)-->
-* [Building the app](building.md)
-* [Writing tests](testing.md)

+ 0 - 24
documentation/project/contributing/building-and-running-without-docker.md

@@ -1,24 +0,0 @@
-# Build & Run Without Docker
-
-Once you installed the prerequisites and cloned the repository, run the following steps in your project directory:
-
-## <a name="run_without_docker_quickly"></a> Running Without Docker Quickly
-
-- [Download the latest kafka-ui jar file](https://github.com/provectus/kafka-ui/releases)
-#### <a name="run_kafkaui_jar_file"></a> Execute the jar
-```sh
-java -Dspring.config.additional-location=<path-to-application-local.yml> -jar <path-to-kafka-ui-jar>
-```
-- Example of how to configure clusters in the [application-local.yml](https://github.com/provectus/kafka-ui/blob/master/kafka-ui-api/src/main/resources/application-local.yml) configuration file.
-
-## <a name="build_and_run_without_docker"></a> Building And Running Without Docker
-
-> **_NOTE:_**  If you want to get kafka-ui up and running locally quickly without building the jar file manually, then just follow [Running Without Docker Quickly](#run_without_docker_quickly)
-
-> Comment out `docker-maven-plugin` plugin in `kafka-ui-api` pom.xml
-
-- [Command to build the jar](./building.md#cmd_to_build_kafkaui_without_docker)
-
-> Once your build is successful and the jar file named kafka-ui-api-0.0.1-SNAPSHOT.jar is generated inside `kafka-ui-api/target`.
-
-- [Execute the jar](#run_kafkaui_jar_file)

+ 0 - 63
documentation/project/contributing/building.md

@@ -1,63 +0,0 @@
-# Build & Run
-
-Once you installed the prerequisites and cloned the repository, run the following steps in your project directory:
-
-## Step 1 : Build
-> **_NOTE:_**  If you are an macOS M1 User then please keep in mind below things
-
-> Make sure you have ARM supported java installed
-
-> Skip the maven tests as they might not be successful
-
-- Build a docker image with the app
-```sh
-./mvnw clean install -Pprod
-```
-- if you need to build the frontend `kafka-ui-react-app`, go here
-     - [kafka-ui-react-app-build-documentation](../../../kafka-ui-react-app/README.md)
-
-<a name="cmd_to_build_kafkaui_without_docker"></a>
-- In case you want to build `kafka-ui-api` by skipping the tests
-```sh
-./mvnw clean install -Dmaven.test.skip=true -Pprod
-```
-
-- To build only the `kafka-ui-api` you can use this command:
-```sh
-./mvnw -f kafka-ui-api/pom.xml clean install -Pprod -DskipUIBuild=true
-```
-
-If this step is successful, it should create a docker image named `provectuslabs/kafka-ui` with `latest` tag on your local machine except macOS M1.
-
-## Step 2 : Run
-#### Using Docker Compose
-> **_NOTE:_**  If you are an macOS M1 User then you can use arm64 supported docker compose script `./documentation/compose/kafka-ui-arm64.yaml`
- - Start the `kafka-ui` app using docker image built in step 1 along with Kafka clusters:
-```sh
-docker-compose -f ./documentation/compose/kafka-ui.yaml up -d
-```
-
-#### Using Spring Boot Run
- - If you want to start only kafka clusters (to run the `kafka-ui` app via `spring-boot:run`):
-```sh
-docker-compose -f ./documentation/compose/kafka-clusters-only.yaml up -d
-```
-- Then start the app.
-```sh
-./mvnw spring-boot:run -Pprod
-
-# or
-
-./mvnw spring-boot:run -Pprod -Dspring.config.location=file:///path/to/conf.yaml
-```
-
-#### Running in kubernetes
-- Using Helm Charts
-```sh bash
-helm repo add kafka-ui https://provectus.github.io/kafka-ui
-helm install kafka-ui kafka-ui/kafka-ui
-```
-To read more please follow to [chart documentation](../../../charts/kafka-ui/README.md).
-
-## Step 3 : Access Kafka-UI
- - To see the `kafka-ui` app running, navigate to http://localhost:8080.

+ 0 - 42
documentation/project/contributing/prerequisites.md

@@ -1,42 +0,0 @@
-### Prerequisites
-
-This page explains how to get the software you need to use a Linux or macOS
-machine for local development.
-
-Before you begin contributing you must have:
-
-* A GitHub account
-* `Java` 17 or newer
-* `Git`
-* `Docker`
-
-### Installing prerequisites on macOS
-
-1. Install [brew](https://brew.sh/).
-2. Install brew cask:
-```sh
-brew cask
-```
-3. Install Eclipse Temurin 17 via Homebrew cask:
-```sh
-brew tap homebrew/cask-versions
-brew install temurin17
-```
-4. Verify Installation
-```sh
-java -version
-```
-Note : In case OpenJDK 17 is not set as your default Java, you can consider to include it in your `$PATH` after installation
-```sh
-export PATH="$(/usr/libexec/java_home -v 17)/bin:$PATH"
-export JAVA_HOME="$(/usr/libexec/java_home -v 17)"
-```
-
-## Tips
-
-Consider allocating not less than 4GB of memory for your docker.
-Otherwise, some apps within a stack (e.g. `kafka-ui.yaml`) might crash.
-
-## Where to go next
-
-In the next section, you'll [learn how to Build and Run kafka-ui](building.md).

+ 0 - 8
documentation/project/contributing/set-up-git.md

@@ -1,8 +0,0 @@
-### Nothing special here yet.
-<!--
-TODO:
-
-1. Cloning
-2. Credentials set up (git user.name & email)
-3. Signing off (DCO)
--->

+ 0 - 28
documentation/project/contributing/testing.md

@@ -1,28 +0,0 @@
-# Testing
-
-
-
-## Test suites
-
-
-## Writing new tests
-
-
-### Writing tests for new features
-
-
-### Writing tests for bug fixes
-
-
-### Writing new integration tests
-
-
-
-## Running tests
-
-### Unit Tests
-
-
-### Integration Tests
-
-

+ 0 - 65
helm_chart.md

@@ -1,65 +0,0 @@
-# Quick Start with Helm Chart
-
-### General
-1. Clone/Copy Chart to your working directory
-2. Execute command ```helm install helm-release-name charts/kafka-ui```
-
-### Passing Kafka-UI configuration as Dict
-Create values.yml file
-```
-yamlApplicationConfig:
-  kafka:
-    clusters:
-      - name: yaml
-        bootstrapServers:  kafka-cluster-broker-endpoints:9092
-  auth:
-    type: disabled
-  management:
-    health:
-      ldap:
-        enabled: false
-```
-Install by executing command
-> helm install helm-release-name charts/kafka-ui -f values.yml
-
-
-### Passing configuration file as ConfigMap 
-Create config map
-```
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: kafka-ui-existing-configmap-as-a-configfile
-data:
-  config.yml: |-
-    kafka:
-      clusters:
-        - name: yaml
-          bootstrapServers: kafka-cluster-broker-endpoints:9092
-    auth:
-      type: disabled
-    management:
-      health:
-        ldap:
-          enabled: false
-```
-This ConfigMap will be mounted to the Pod
-
-Install by executing command
-> helm install helm-release-name charts/kafka-ui --set yamlApplicationConfigConfigMap.name="kafka-ui-config",yamlApplicationConfigConfigMap.keyName="config.yml"
-
-### Passing environment variables as ConfigMap
-Create config map
-```
-apiVersion: v1
-kind: ConfigMap
-metadata:
-  name: kafka-ui-helm-values
-data:
-  KAFKA_CLUSTERS_0_NAME: "kafka-cluster-name"
-  KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: "kafka-cluster-broker-endpoints:9092"
-  AUTH_TYPE: "DISABLED"
-  MANAGEMENT_HEALTH_LDAP_ENABLED: "FALSE" 
-```
-Install by executing command
-> helm install helm-release-name charts/kafka-ui --set existingConfigMap="kafka-ui-helm-values"  

+ 1 - 13
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java

@@ -89,19 +89,7 @@ public class ConsumerGroupMapper {
             .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
             .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
     ).collect(Collectors.toSet()).size();
     ).collect(Collectors.toSet()).size();
 
 
-    Long messagesBehind = null;
-    // messagesBehind should be undefined if no committed offsets found for topic
-    if (!c.getOffsets().isEmpty()) {
-      messagesBehind = c.getOffsets().entrySet().stream()
-          .mapToLong(e ->
-              Optional.ofNullable(c.getEndOffsets())
-                  .map(o -> o.get(e.getKey()))
-                  .map(o -> o - e.getValue())
-                  .orElse(0L)
-          ).sum();
-    }
-
-    consumerGroup.setMessagesBehind(messagesBehind);
+    consumerGroup.setMessagesBehind(c.getMessagesBehind());
     consumerGroup.setTopics(numTopics);
     consumerGroup.setTopics(numTopics);
     consumerGroup.setSimple(c.isSimple());
     consumerGroup.setSimple(c.isSimple());
 
 

+ 19 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java

@@ -20,6 +20,7 @@ public class InternalConsumerGroup {
   private final Collection<InternalMember> members;
   private final Collection<InternalMember> members;
   private final Map<TopicPartition, Long> offsets;
   private final Map<TopicPartition, Long> offsets;
   private final Map<TopicPartition, Long> endOffsets;
   private final Map<TopicPartition, Long> endOffsets;
+  private final Long messagesBehind;
   private final String partitionAssignor;
   private final String partitionAssignor;
   private final ConsumerGroupState state;
   private final ConsumerGroupState state;
   private final Node coordinator;
   private final Node coordinator;
@@ -58,7 +59,25 @@ public class InternalConsumerGroup {
     );
     );
     builder.offsets(groupOffsets);
     builder.offsets(groupOffsets);
     builder.endOffsets(topicEndOffsets);
     builder.endOffsets(topicEndOffsets);
+    builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
     Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
     Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
     return builder.build();
     return builder.build();
   }
   }
+
+  private static Long calculateMessagesBehind(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
+    Long messagesBehind = null;
+    // messagesBehind should be undefined if no committed offsets found for topic
+    if (!offsets.isEmpty()) {
+      messagesBehind = offsets.entrySet().stream()
+          .mapToLong(e ->
+              Optional.ofNullable(endOffsets)
+                  .map(o -> o.get(e.getKey()))
+                  .map(o -> o - e.getValue())
+                  .orElse(0L)
+          ).sum();
+    }
+
+    return messagesBehind;
+  }
+
 }
 }

+ 19 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java

@@ -1,5 +1,6 @@
 package com.provectus.kafka.ui.service;
 package com.provectus.kafka.ui.service;
 
 
+import com.google.common.collect.Streams;
 import com.google.common.collect.Table;
 import com.google.common.collect.Table;
 import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
 import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
 import com.provectus.kafka.ui.model.InternalConsumerGroup;
 import com.provectus.kafka.ui.model.InternalConsumerGroup;
@@ -157,6 +158,24 @@ public class ConsumerGroupService {
             .map(descriptions ->
             .map(descriptions ->
                 sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
                 sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
       }
       }
+      case MESSAGES_BEHIND -> {
+        record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
+
+        Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
+            gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
+
+        var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+
+        yield ac.describeConsumerGroups(groupNames)
+            .flatMap(descriptionsMap -> {
+                  List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
+                  return getConsumerGroups(ac, descriptions)
+                      .map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
+                      .map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
+                            .map(GroupWithDescr::cgd).toList());
+                }
+            );
+      }
     };
     };
   }
   }
 
 

+ 2 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java

@@ -28,7 +28,8 @@ public class FeatureService {
 
 
   private final AdminClientService adminClientService;
   private final AdminClientService adminClientService;
 
 
-  public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster, ClusterDescription clusterDescription) {
+  public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
+                                                         ClusterDescription clusterDescription) {
     List<Mono<ClusterFeature>> features = new ArrayList<>();
     List<Mono<ClusterFeature>> features = new ArrayList<>();
 
 
     if (Optional.ofNullable(cluster.getConnectsClients())
     if (Optional.ofNullable(cluster.getConnectsClients())

+ 18 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java

@@ -12,6 +12,7 @@ import java.util.stream.Stream;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.core.ParameterizedTypeReference;
 import org.springframework.core.ParameterizedTypeReference;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.HttpHeaders;
+import org.springframework.security.config.oauth2.client.CommonOAuth2Provider;
 import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
 import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
 import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
 import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
 import org.springframework.web.reactive.function.client.WebClient;
 import org.springframework.web.reactive.function.client.WebClient;
@@ -24,8 +25,7 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
   private static final String USERNAME_ATTRIBUTE_NAME = "login";
   private static final String USERNAME_ATTRIBUTE_NAME = "login";
   private static final String ORGANIZATION_NAME = "login";
   private static final String ORGANIZATION_NAME = "login";
   private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json";
   private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json";
-
-  private final WebClient webClient = WebClient.create("https://api.github.com");
+  private static final String DUMMY = "dummy";
 
 
   @Override
   @Override
   public boolean isApplicable(String provider) {
   public boolean isApplicable(String provider) {
@@ -64,9 +64,24 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
       return Mono.just(groupsByUsername);
       return Mono.just(groupsByUsername);
     }
     }
 
 
+    OAuth2UserRequest req = (OAuth2UserRequest) additionalParams.get("request");
+    String infoEndpoint = req.getClientRegistration().getProviderDetails().getUserInfoEndpoint().getUri();
+
+    if (infoEndpoint == null) {
+      infoEndpoint = CommonOAuth2Provider.GITHUB
+          .getBuilder(DUMMY)
+          .clientId(DUMMY)
+          .build()
+          .getProviderDetails()
+          .getUserInfoEndpoint()
+          .getUri();
+    }
+
+    WebClient webClient = WebClient.create(infoEndpoint);
+
     final Mono<List<Map<String, Object>>> userOrganizations = webClient
     final Mono<List<Map<String, Object>>> userOrganizations = webClient
         .get()
         .get()
-        .uri("/user/orgs")
+        .uri("/orgs")
         .headers(headers -> {
         .headers(headers -> {
           headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER);
           headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER);
           OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");
           OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");

+ 38 - 22
kafka-ui-api/src/main/resources/application-local.yml

@@ -6,6 +6,9 @@ logging:
     #org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
     #org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
     reactor.netty.http.server.AccessLog: INFO
     reactor.netty.http.server.AccessLog: INFO
 
 
+#server:
+#  port: 8080 #- Port in which kafka-ui will run.
+
 kafka:
 kafka:
   clusters:
   clusters:
     - name: local
     - name: local
@@ -42,27 +45,40 @@ kafka:
 spring:
 spring:
   jmx:
   jmx:
     enabled: true
     enabled: true
-  security:
-    oauth2:
-      client:
-        registration:
-          cognito:
-            clientId: xx
-            clientSecret: yy
-            scope: openid
-            client-name: cognito
-            provider: cognito
-            redirect-uri: http://localhost:8080/login/oauth2/code/cognito
-            authorization-grant-type: authorization_code
-        provider:
-          cognito:
-            issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
-            jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
-            user-name-attribute: username
+
 auth:
 auth:
   type: DISABLED
   type: DISABLED
-
-roles.file: /tmp/roles.yml
-
-#server:
-#  port: 8080 #- Port in which kafka-ui will run.
+#  type: OAUTH2
+#  oauth2:
+#    client:
+#      cognito:
+#        clientId:
+#        clientSecret:
+#        scope: openid
+#        client-name: cognito
+#        provider: cognito
+#        redirect-uri: http://localhost:8080/login/oauth2/code/cognito
+#        authorization-grant-type: authorization_code
+#        issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
+#        jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
+#        user-name-attribute: username
+#        custom-params:
+#          type: cognito
+#          logoutUrl: https://kafka-ui.auth.eu-central-1.amazoncognito.com/logout
+#      google:
+#        provider: google
+#        clientId:
+#        clientSecret:
+#        user-name-attribute: email
+#        custom-params:
+#          type: google
+#          allowedDomain: provectus.com
+#      github:
+#        provider: github
+#        clientId:
+#        clientSecret:
+#        scope:
+#          - read:org
+#        user-name-attribute: login
+#        custom-params:
+#          type: github

+ 1 - 0
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -2522,6 +2522,7 @@ components:
         - NAME
         - NAME
         - MEMBERS
         - MEMBERS
         - STATE
         - STATE
+        - MESSAGES_BEHIND
 
 
     ConsumerGroupsPageResponse:
     ConsumerGroupsPageResponse:
       type: object
       type: object

+ 6 - 6
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java

@@ -208,23 +208,23 @@ public class TopicsList extends BasePage {
             return new TopicsList();
             return new TopicsList();
         }
         }
 
 
+        private SelenideElement getNameElm() {
+            return element.$x("./td[2]");
+        }
+
         @Step
         @Step
         public boolean isInternal() {
         public boolean isInternal() {
             boolean internal = false;
             boolean internal = false;
             try {
             try {
-                internal = element.$x("./td[2]/a/span").isDisplayed();
+                internal = getNameElm().$x("./a/span").isDisplayed();
             } catch (Throwable ignored) {
             } catch (Throwable ignored) {
             }
             }
             return internal;
             return internal;
         }
         }
 
 
-        private SelenideElement getNameElm() {
-            return element.$x("./td[2]");
-        }
-
         @Step
         @Step
         public String getName() {
         public String getName() {
-            return getNameElm().getText().trim();
+            return getNameElm().$x("./a").getAttribute("title");
         }
         }
 
 
         @Step
         @Step

+ 14 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java

@@ -58,4 +58,18 @@ public class SmokeBacklog extends BaseManualTest {
     @Test
     @Test
     public void testCaseG() {
     public void testCaseG() {
     }
     }
+
+    @Automation(state = TO_BE_AUTOMATED)
+    @Suite(id = 5)
+    @QaseId(335)
+    @Test
+    public void testCaseH() {
+    }
+
+    @Automation(state = TO_BE_AUTOMATED)
+    @Suite(id = 5)
+    @QaseId(336)
+    @Test
+    public void testCaseI() {
+    }
 }
 }

+ 17 - 4
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java

@@ -359,7 +359,7 @@ public class TopicsTest extends BaseTest {
 
 
     @QaseId(11)
     @QaseId(11)
     @Test(priority = 15)
     @Test(priority = 15)
-    public void checkShowInternalTopicsButtonFunctionality() {
+    public void checkShowInternalTopicsButton() {
         navigateToTopics();
         navigateToTopics();
         SoftAssert softly = new SoftAssert();
         SoftAssert softly = new SoftAssert();
         softly.assertTrue(topicsList.isShowInternalRadioBtnSelected(), "isInternalRadioBtnSelected()");
         softly.assertTrue(topicsList.isShowInternalRadioBtnSelected(), "isInternalRadioBtnSelected()");
@@ -373,8 +373,21 @@ public class TopicsTest extends BaseTest {
         softly.assertAll();
         softly.assertAll();
     }
     }
 
 
-    @QaseId(56)
+    @QaseId(334)
     @Test(priority = 16)
     @Test(priority = 16)
+    public void checkInternalTopicsNaming() {
+        navigateToTopics();
+        SoftAssert softly = new SoftAssert();
+        topicsList
+                .setShowInternalRadioButton(true)
+                .getInternalTopics()
+                .forEach(topic -> softly.assertTrue(topic.getName().startsWith("_"),
+                        String.format("'%s' starts with '_'", topic.getName())));
+        softly.assertAll();
+    }
+
+    @QaseId(56)
+    @Test(priority = 17)
     public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
     public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
         navigateToTopics();
         navigateToTopics();
         topicsList
         topicsList
@@ -422,7 +435,7 @@ public class TopicsTest extends BaseTest {
     }
     }
 
 
     @QaseId(247)
     @QaseId(247)
-    @Test(priority = 17)
+    @Test(priority = 18)
     public void recreateTopicFromTopicProfile() {
     public void recreateTopicFromTopicProfile() {
         Topic topicToRecreate = new Topic()
         Topic topicToRecreate = new Topic()
                 .setName("topic-to-recreate-" + randomAlphabetic(5))
                 .setName("topic-to-recreate-" + randomAlphabetic(5))
@@ -450,7 +463,7 @@ public class TopicsTest extends BaseTest {
     }
     }
 
 
     @QaseId(8)
     @QaseId(8)
-    @Test(priority = 18)
+    @Test(priority = 19)
     public void checkCopyTopicPossibility() {
     public void checkCopyTopicPossibility() {
         Topic topicToCopy = new Topic()
         Topic topicToCopy = new Topic()
                 .setName("topic-to-copy-" + randomAlphabetic(5))
                 .setName("topic-to-copy-" + randomAlphabetic(5))

+ 1 - 1
kafka-ui-react-app/src/components/ConsumerGroups/List.tsx

@@ -56,9 +56,9 @@ const List = () => {
         enableSorting: false,
         enableSorting: false,
       },
       },
       {
       {
+        id: ConsumerGroupOrdering.MESSAGES_BEHIND,
         header: 'Messages Behind',
         header: 'Messages Behind',
         accessorKey: 'messagesBehind',
         accessorKey: 'messagesBehind',
-        enableSorting: false,
       },
       },
       {
       {
         header: 'Coordinator',
         header: 'Coordinator',

+ 2 - 1
kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts

@@ -9,7 +9,7 @@ export const Wrapper = styled.div`
   margin-top: 100px;
   margin-top: 100px;
 `;
 `;
 
 
-export const Number = styled.div`
+export const Status = styled.div`
   font-size: 100px;
   font-size: 100px;
   color: ${({ theme }) => theme.default.color.normal};
   color: ${({ theme }) => theme.default.color.normal};
   line-height: initial;
   line-height: initial;
@@ -17,4 +17,5 @@ export const Number = styled.div`
 
 
 export const Text = styled.div`
 export const Text = styled.div`
   font-size: 20px;
   font-size: 20px;
+  color: ${({ theme }) => theme.default.color.normal};
 `;
 `;

+ 1 - 1
kafka-ui-react-app/src/components/ErrorPage/ErrorPage.tsx

@@ -16,7 +16,7 @@ const ErrorPage: React.FC<Props> = ({
 }) => {
 }) => {
   return (
   return (
     <S.Wrapper>
     <S.Wrapper>
-      <S.Number>{status}</S.Number>
+      <S.Status>{status}</S.Status>
       <S.Text>{text}</S.Text>
       <S.Text>{text}</S.Text>
       <Button buttonType="primary" buttonSize="M" to="/">
       <Button buttonType="primary" buttonSize="M" to="/">
         {btnText}
         {btnText}

+ 1 - 1
kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx

@@ -110,7 +110,7 @@ const Form: React.FC = () => {
   return (
   return (
     <FormProvider {...methods}>
     <FormProvider {...methods}>
       <PageHeading
       <PageHeading
-        text="Edit"
+        text={`${subject} Edit`}
         backText="Schema Registry"
         backText="Schema Registry"
         backTo={clusterSchemasPath(clusterName)}
         backTo={clusterSchemasPath(clusterName)}
       />
       />

+ 16 - 3
kafka-ui-react-app/src/components/Topics/List/BatchActionsBar.tsx

@@ -9,7 +9,6 @@ import {
   useDeleteTopic,
   useDeleteTopic,
 } from 'lib/hooks/api/topics';
 } from 'lib/hooks/api/topics';
 import { useConfirm } from 'lib/hooks/useConfirm';
 import { useConfirm } from 'lib/hooks/useConfirm';
-import { Button } from 'components/common/Button/Button';
 import { clusterTopicCopyRelativePath } from 'lib/paths';
 import { clusterTopicCopyRelativePath } from 'lib/paths';
 import { useQueryClient } from '@tanstack/react-query';
 import { useQueryClient } from '@tanstack/react-query';
 import { ActionCanButton } from 'components/common/ActionComponent';
 import { ActionCanButton } from 'components/common/ActionComponent';
@@ -108,6 +107,19 @@ const BatchActionsbar: React.FC<BatchActionsbarProps> = ({
     );
     );
   }, [selectedTopics, clusterName, roles]);
   }, [selectedTopics, clusterName, roles]);
 
 
+  const canCopySelectedTopic = useMemo(() => {
+    return selectedTopics.every((value) =>
+      isPermitted({
+        roles,
+        resource: ResourceType.TOPIC,
+        action: Action.CREATE,
+        value,
+        clusterName,
+        rbacFlag,
+      })
+    );
+  }, [selectedTopics, clusterName, roles]);
+
   const canPurgeSelectedTopics = useMemo(() => {
   const canPurgeSelectedTopics = useMemo(() => {
     return selectedTopics.every((value) =>
     return selectedTopics.every((value) =>
       isPermitted({
       isPermitted({
@@ -132,14 +144,15 @@ const BatchActionsbar: React.FC<BatchActionsbarProps> = ({
       >
       >
         Delete selected topics
         Delete selected topics
       </ActionCanButton>
       </ActionCanButton>
-      <Button
+      <ActionCanButton
         buttonSize="M"
         buttonSize="M"
         buttonType="secondary"
         buttonType="secondary"
         disabled={selectedTopics.length !== 1}
         disabled={selectedTopics.length !== 1}
+        canDoAction={canCopySelectedTopic}
         to={getCopyTopicPath()}
         to={getCopyTopicPath()}
       >
       >
         Copy selected topic
         Copy selected topic
-      </Button>
+      </ActionCanButton>
       <ActionCanButton
       <ActionCanButton
         buttonSize="M"
         buttonSize="M"
         buttonType="secondary"
         buttonType="secondary"

+ 7 - 2
kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx

@@ -231,7 +231,10 @@ const Filters: React.FC<FiltersProps> = ({
         props.seekType = SeekType.TIMESTAMP;
         props.seekType = SeekType.TIMESTAMP;
       }
       }
 
 
-      if (selectedPartitions.length !== partitions.length) {
+      if (
+        selectedPartitions.length !== partitions.length ||
+        currentSeekType === SeekType.TIMESTAMP
+      ) {
         // not everything in the partition is selected
         // not everything in the partition is selected
         props.seekTo = selectedPartitions.map(({ value }) => {
         props.seekTo = selectedPartitions.map(({ value }) => {
           const offsetProperty =
           const offsetProperty =
@@ -320,7 +323,9 @@ const Filters: React.FC<FiltersProps> = ({
   // eslint-disable-next-line consistent-return
   // eslint-disable-next-line consistent-return
   React.useEffect(() => {
   React.useEffect(() => {
     if (location.search?.length !== 0) {
     if (location.search?.length !== 0) {
-      const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/topics/${topicName}/messages${location.search}`;
+      const url = `${BASE_PARAMS.basePath}/api/clusters/${encodeURIComponent(
+        clusterName
+      )}/topics/${topicName}/messages${location.search}`;
       const sse = new EventSource(url);
       const sse = new EventSource(url);
 
 
       source.current = sse;
       source.current = sse;

+ 1 - 5
kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx

@@ -42,8 +42,6 @@ const Message: React.FC<Props> = ({
     key,
     key,
     partition,
     partition,
     content,
     content,
-    valueFormat,
-    keyFormat,
     headers,
     headers,
   },
   },
   keyFilters,
   keyFilters,
@@ -51,7 +49,7 @@ const Message: React.FC<Props> = ({
 }) => {
 }) => {
   const [isOpen, setIsOpen] = React.useState(false);
   const [isOpen, setIsOpen] = React.useState(false);
   const savedMessageJson = {
   const savedMessageJson = {
-    Content: content,
+    Value: content,
     Offset: offset,
     Offset: offset,
     Key: key,
     Key: key,
     Partition: partition,
     Partition: partition,
@@ -140,9 +138,7 @@ const Message: React.FC<Props> = ({
       {isOpen && (
       {isOpen && (
         <MessageContent
         <MessageContent
           messageKey={key}
           messageKey={key}
-          messageKeyFormat={keyFormat}
           messageContent={content}
           messageContent={content}
-          messageContentFormat={valueFormat}
           headers={headers}
           headers={headers}
           timestamp={timestamp}
           timestamp={timestamp}
           timestampType={timestampType}
           timestampType={timestampType}

+ 10 - 11
kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx

@@ -3,6 +3,7 @@ import EditorViewer from 'components/common/EditorViewer/EditorViewer';
 import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
 import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
 import { SchemaType, TopicMessageTimestampTypeEnum } from 'generated-sources';
 import { SchemaType, TopicMessageTimestampTypeEnum } from 'generated-sources';
 import { formatTimestamp } from 'lib/dateTimeHelpers';
 import { formatTimestamp } from 'lib/dateTimeHelpers';
+import { useSearchParams } from 'react-router-dom';
 
 
 import * as S from './MessageContent.styled';
 import * as S from './MessageContent.styled';
 
 
@@ -10,9 +11,7 @@ type Tab = 'key' | 'content' | 'headers';
 
 
 export interface MessageContentProps {
 export interface MessageContentProps {
   messageKey?: string;
   messageKey?: string;
-  messageKeyFormat?: string;
   messageContent?: string;
   messageContent?: string;
-  messageContentFormat?: string;
   headers?: { [key: string]: string | undefined };
   headers?: { [key: string]: string | undefined };
   timestamp?: Date;
   timestamp?: Date;
   timestampType?: TopicMessageTimestampTypeEnum;
   timestampType?: TopicMessageTimestampTypeEnum;
@@ -20,14 +19,15 @@ export interface MessageContentProps {
 
 
 const MessageContent: React.FC<MessageContentProps> = ({
 const MessageContent: React.FC<MessageContentProps> = ({
   messageKey,
   messageKey,
-  messageKeyFormat,
   messageContent,
   messageContent,
-  messageContentFormat,
   headers,
   headers,
   timestamp,
   timestamp,
   timestampType,
   timestampType,
 }) => {
 }) => {
   const [activeTab, setActiveTab] = React.useState<Tab>('content');
   const [activeTab, setActiveTab] = React.useState<Tab>('content');
+  const [searchParams] = useSearchParams();
+  const keyFormat = searchParams.get('keySerde') || '';
+  const valueFormat = searchParams.get('valueSerde') || '';
 
 
   const activeTabContent = () => {
   const activeTabContent = () => {
     switch (activeTab) {
     switch (activeTab) {
@@ -54,7 +54,6 @@ const MessageContent: React.FC<MessageContentProps> = ({
     e.preventDefault();
     e.preventDefault();
     setActiveTab('headers');
     setActiveTab('headers');
   };
   };
-
   const keySize = new TextEncoder().encode(messageKey).length;
   const keySize = new TextEncoder().encode(messageKey).length;
   const contentSize = new TextEncoder().encode(messageContent).length;
   const contentSize = new TextEncoder().encode(messageContent).length;
   const contentType =
   const contentType =
@@ -106,21 +105,21 @@ const MessageContent: React.FC<MessageContentProps> = ({
             </S.Metadata>
             </S.Metadata>
 
 
             <S.Metadata>
             <S.Metadata>
-              <S.MetadataLabel>Value</S.MetadataLabel>
+              <S.MetadataLabel>Key Serde</S.MetadataLabel>
               <span>
               <span>
-                <S.MetadataValue>{messageContentFormat}</S.MetadataValue>
+                <S.MetadataValue>{keyFormat}</S.MetadataValue>
                 <S.MetadataMeta>
                 <S.MetadataMeta>
-                  Size: <BytesFormatted value={contentSize} />
+                  Size: <BytesFormatted value={keySize} />
                 </S.MetadataMeta>
                 </S.MetadataMeta>
               </span>
               </span>
             </S.Metadata>
             </S.Metadata>
 
 
             <S.Metadata>
             <S.Metadata>
-              <S.MetadataLabel>Key</S.MetadataLabel>
+              <S.MetadataLabel>Value Serde</S.MetadataLabel>
               <span>
               <span>
-                <S.MetadataValue>{messageKeyFormat}</S.MetadataValue>
+                <S.MetadataValue>{valueFormat}</S.MetadataValue>
                 <S.MetadataMeta>
                 <S.MetadataMeta>
-                  Size: <BytesFormatted value={keySize} />
+                  Size: <BytesFormatted value={contentSize} />
                 </S.MetadataMeta>
                 </S.MetadataMeta>
               </span>
               </span>
             </S.Metadata>
             </S.Metadata>

+ 30 - 13
kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx

@@ -16,9 +16,7 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
       <tbody>
       <tbody>
         <MessageContent
         <MessageContent
           messageKey='"test-key"'
           messageKey='"test-key"'
-          messageKeyFormat="JSON"
           messageContent='{"data": "test"}'
           messageContent='{"data": "test"}'
-          messageContentFormat="AVRO"
           headers={{ header: 'test' }}
           headers={{ header: 'test' }}
           timestamp={new Date(0)}
           timestamp={new Date(0)}
           timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME}
           timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME}
@@ -34,14 +32,33 @@ const proto =
 
 
 global.TextEncoder = TextEncoder;
 global.TextEncoder = TextEncoder;
 
 
+const searchParamsContentAVRO = new URLSearchParams({
+  keySerde: 'SchemaRegistry',
+  valueSerde: 'AVRO',
+  limit: '100',
+});
+
+const searchParamsContentJSON = new URLSearchParams({
+  keySerde: 'SchemaRegistry',
+  valueSerde: 'JSON',
+  limit: '100',
+});
+
+const searchParamsContentPROTOBUF = new URLSearchParams({
+  keySerde: 'SchemaRegistry',
+  valueSerde: 'PROTOBUF',
+  limit: '100',
+});
 describe('MessageContent screen', () => {
 describe('MessageContent screen', () => {
   beforeEach(() => {
   beforeEach(() => {
-    render(setupWrapper());
+    render(setupWrapper(), {
+      initialEntries: [`/messages?${searchParamsContentAVRO}`],
+    });
   });
   });
 
 
   describe('renders', () => {
   describe('renders', () => {
     it('key format in document', () => {
     it('key format in document', () => {
-      expect(screen.getByText('JSON')).toBeInTheDocument();
+      expect(screen.getByText('SchemaRegistry')).toBeInTheDocument();
     });
     });
 
 
     it('content format in document', () => {
     it('content format in document', () => {
@@ -86,36 +103,36 @@ describe('checking content type depend on message type', () => {
   it('renders component with message having JSON type', () => {
   it('renders component with message having JSON type', () => {
     render(
     render(
       setupWrapper({
       setupWrapper({
-        messageContentFormat: 'JSON',
         messageContent: '{"data": "test"}',
         messageContent: '{"data": "test"}',
-      })
+      }),
+      { initialEntries: [`/messages?${searchParamsContentJSON}`] }
     );
     );
-    expect(screen.getAllByText('JSON')[1]).toBeInTheDocument();
+    expect(screen.getByText('JSON')).toBeInTheDocument();
   });
   });
   it('renders component with message having AVRO type', () => {
   it('renders component with message having AVRO type', () => {
     render(
     render(
       setupWrapper({
       setupWrapper({
-        messageContentFormat: 'AVRO',
         messageContent: '{"data": "test"}',
         messageContent: '{"data": "test"}',
-      })
+      }),
+      { initialEntries: [`/messages?${searchParamsContentAVRO}`] }
     );
     );
     expect(screen.getByText('AVRO')).toBeInTheDocument();
     expect(screen.getByText('AVRO')).toBeInTheDocument();
   });
   });
   it('renders component with message having PROTOBUF type', () => {
   it('renders component with message having PROTOBUF type', () => {
     render(
     render(
       setupWrapper({
       setupWrapper({
-        messageContentFormat: 'PROTOBUF',
         messageContent: proto,
         messageContent: proto,
-      })
+      }),
+      { initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
     );
     );
     expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
     expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
   });
   });
   it('renders component with message having no type which is equal to having PROTOBUF type', () => {
   it('renders component with message having no type which is equal to having PROTOBUF type', () => {
     render(
     render(
       setupWrapper({
       setupWrapper({
-        messageContentFormat: 'PROTOBUF',
         messageContent: '',
         messageContent: '',
-      })
+      }),
+      { initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
     );
     );
     expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
     expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
   });
   });

+ 14 - 1
kafka-ui-react-app/src/components/common/Button/Button.tsx

@@ -3,11 +3,13 @@ import StyledButton, {
 } from 'components/common/Button/Button.styled';
 } from 'components/common/Button/Button.styled';
 import React from 'react';
 import React from 'react';
 import { Link } from 'react-router-dom';
 import { Link } from 'react-router-dom';
+import Spinner from 'components/common/Spinner/Spinner';
 
 
 export interface Props
 export interface Props
   extends React.ButtonHTMLAttributes<HTMLButtonElement>,
   extends React.ButtonHTMLAttributes<HTMLButtonElement>,
     ButtonProps {
     ButtonProps {
   to?: string | object;
   to?: string | object;
+  inProgress?: boolean;
 }
 }
 
 
 export const Button: React.FC<Props> = ({ to, ...props }) => {
 export const Button: React.FC<Props> = ({ to, ...props }) => {
@@ -20,5 +22,16 @@ export const Button: React.FC<Props> = ({ to, ...props }) => {
       </Link>
       </Link>
     );
     );
   }
   }
-  return <StyledButton type="button" {...props} />;
+  return (
+    <StyledButton
+      type="button"
+      disabled={props.disabled || props.inProgress}
+      {...props}
+    >
+      {props.children}{' '}
+      {props.inProgress ? (
+        <Spinner size={16} borderWidth={2} marginLeft={2} emptyBorderColor />
+      ) : null}
+    </StyledButton>
+  );
 };
 };

+ 6 - 0
kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx

@@ -58,4 +58,10 @@ describe('Button', () => {
       theme.button.primary.invertedColors.normal
       theme.button.primary.invertedColors.normal
     );
     );
   });
   });
+  it('renders disabled button and spinner when inProgress truthy', () => {
+    render(<Button buttonType="primary" buttonSize="M" inProgress />);
+    expect(screen.getByRole('button')).toBeInTheDocument();
+    expect(screen.getByRole('progressbar')).toBeInTheDocument();
+    expect(screen.getByRole('button')).toBeDisabled();
+  });
 });
 });

+ 3 - 1
kafka-ui-react-app/src/components/common/Editor/Editor.tsx

@@ -50,8 +50,10 @@ export default styled(Editor)`
         theme.ksqlDb.query.editor.cell.backgroundColor};
         theme.ksqlDb.query.editor.cell.backgroundColor};
       color: ${({ theme }) => theme.default.color.normal};
       color: ${({ theme }) => theme.default.color.normal};
     }
     }
-    .ace_line {
+    .ace_scroller {
       background-color: ${({ theme }) => theme.default.backgroundColor};
       background-color: ${({ theme }) => theme.default.backgroundColor};
+    }
+    .ace_line {
       color: ${({ theme }) => theme.default.color.normal};
       color: ${({ theme }) => theme.default.color.normal};
     }
     }
     .ace_cursor {
     .ace_cursor {

+ 1 - 1
kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts

@@ -5,7 +5,7 @@ export const Wrapper = styled.div`
   padding: 8px 16px;
   padding: 8px 16px;
   .ace_active-line {
   .ace_active-line {
     background-color: ${({ theme }) =>
     background-color: ${({ theme }) =>
-      theme.viewer.wrapper.backgroundColor} !important;
+      theme.default.backgroundColor} !important;
   }
   }
   .ace_line {
   .ace_line {
     color: ${({ theme }) => theme.viewer.wrapper.color} !important;
     color: ${({ theme }) => theme.viewer.wrapper.color} !important;

+ 4 - 4
kafka-ui-react-app/src/components/common/NewTable/Table.tsx

@@ -142,15 +142,12 @@ const Table: React.FC<TableProps<any>> = ({
     (updater: UpdaterFn<PaginationState>) => {
     (updater: UpdaterFn<PaginationState>) => {
       const newState = updatePaginationState(updater, searchParams);
       const newState = updatePaginationState(updater, searchParams);
       setSearchParams(searchParams);
       setSearchParams(searchParams);
+      setRowSelection({});
       return newState;
       return newState;
     },
     },
     [searchParams]
     [searchParams]
   );
   );
 
 
-  React.useEffect(() => {
-    setRowSelection({});
-  }, [searchParams]);
-
   const table = useReactTable({
   const table = useReactTable({
     data,
     data,
     pageCount,
     pageCount,
@@ -160,6 +157,9 @@ const Table: React.FC<TableProps<any>> = ({
       pagination: getPaginationFromSearchParams(searchParams),
       pagination: getPaginationFromSearchParams(searchParams),
       rowSelection,
       rowSelection,
     },
     },
+    getRowId: (originalRow, index) => {
+      return originalRow.name ? originalRow.name : `${index}`;
+    },
     onSortingChange: onSortingChange as OnChangeFn<SortingState>,
     onSortingChange: onSortingChange as OnChangeFn<SortingState>,
     onPaginationChange: onPaginationChange as OnChangeFn<PaginationState>,
     onPaginationChange: onPaginationChange as OnChangeFn<PaginationState>,
     onRowSelectionChange: setRowSelection,
     onRowSelectionChange: setRowSelection,

+ 1 - 21
kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts

@@ -1,4 +1,4 @@
-import styled, { css } from 'styled-components';
+import styled from 'styled-components';
 
 
 export const Wrapper = styled.div`
 export const Wrapper = styled.div`
   display: flex;
   display: flex;
@@ -8,23 +8,3 @@ export const Wrapper = styled.div`
   height: 100%;
   height: 100%;
   width: 100%;
   width: 100%;
 `;
 `;
-
-export const Spinner = styled.div(
-  ({ theme }) => css`
-    border: 10px solid ${theme.pageLoader.borderColor};
-    border-bottom: 10px solid ${theme.pageLoader.borderBottomColor};
-    border-radius: 50%;
-    width: 80px;
-    height: 80px;
-    animation: spin 1.3s linear infinite;
-
-    @keyframes spin {
-      0% {
-        transform: rotate(0deg);
-      }
-      100% {
-        transform: rotate(360deg);
-      }
-    }
-  `
-);

+ 2 - 1
kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx

@@ -1,10 +1,11 @@
 import React from 'react';
 import React from 'react';
+import Spinner from 'components/common/Spinner/Spinner';
 
 
 import * as S from './PageLoader.styled';
 import * as S from './PageLoader.styled';
 
 
 const PageLoader: React.FC = () => (
 const PageLoader: React.FC = () => (
   <S.Wrapper>
   <S.Wrapper>
-    <S.Spinner role="progressbar" />
+    <Spinner />
   </S.Wrapper>
   </S.Wrapper>
 );
 );
 
 

+ 1 - 0
kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx

@@ -45,6 +45,7 @@ const ControlledSelect: React.FC<ControlledSelectProps> = ({
               options={options}
               options={options}
               placeholder={placeholder}
               placeholder={placeholder}
               disabled={disabled}
               disabled={disabled}
+              ref={field.ref}
             />
             />
           );
           );
         }}
         }}

+ 86 - 77
kafka-ui-react-app/src/components/common/Select/Select.tsx

@@ -27,90 +27,99 @@ export interface SelectOption {
   isLive?: boolean;
   isLive?: boolean;
 }
 }
 
 
-const Select: React.FC<SelectProps> = ({
-  options = [],
-  value,
-  defaultValue,
-  selectSize = 'L',
-  placeholder = '',
-  isLive,
-  disabled = false,
-  onChange,
-  isThemeMode,
-  ...props
-}) => {
-  const [selectedOption, setSelectedOption] = useState(value);
-  const [showOptions, setShowOptions] = useState(false);
+const Select = React.forwardRef<HTMLUListElement, SelectProps>(
+  (
+    {
+      options = [],
+      value,
+      defaultValue,
+      selectSize = 'L',
+      placeholder = '',
+      isLive,
+      disabled = false,
+      onChange,
+      isThemeMode,
+      ...props
+    },
+    ref
+  ) => {
+    const [selectedOption, setSelectedOption] = useState(value);
+    const [showOptions, setShowOptions] = useState(false);
 
 
-  const showOptionsHandler = () => {
-    if (!disabled) setShowOptions(!showOptions);
-  };
+    const showOptionsHandler = () => {
+      if (!disabled) setShowOptions(!showOptions);
+    };
 
 
-  const selectContainerRef = useRef(null);
-  const clickOutsideHandler = () => setShowOptions(false);
-  useClickOutside(selectContainerRef, clickOutsideHandler);
+    const selectContainerRef = useRef(null);
+    const clickOutsideHandler = () => setShowOptions(false);
+    useClickOutside(selectContainerRef, clickOutsideHandler);
 
 
-  const updateSelectedOption = (option: SelectOption) => {
-    if (!option.disabled) {
-      setSelectedOption(option.value);
+    const updateSelectedOption = (option: SelectOption) => {
+      if (!option.disabled) {
+        setSelectedOption(option.value);
 
 
-      if (onChange) {
-        onChange(option.value);
+        if (onChange) {
+          onChange(option.value);
+        }
+
+        setShowOptions(false);
       }
       }
+    };
 
 
-      setShowOptions(false);
-    }
-  };
+    React.useEffect(() => {
+      setSelectedOption(value);
+    }, [isLive, value]);
 
 
-  React.useEffect(() => {
-    setSelectedOption(value);
-  }, [isLive, value]);
+    return (
+      <div ref={selectContainerRef}>
+        <S.Select
+          role="listbox"
+          selectSize={selectSize}
+          isLive={isLive}
+          disabled={disabled}
+          onClick={showOptionsHandler}
+          onKeyDown={showOptionsHandler}
+          isThemeMode={isThemeMode}
+          ref={ref}
+          tabIndex={0}
+          {...props}
+        >
+          <S.SelectedOptionWrapper>
+            {isLive && <LiveIcon />}
+            <S.SelectedOption
+              role="option"
+              tabIndex={0}
+              isThemeMode={isThemeMode}
+            >
+              {options.find(
+                (option) => option.value === (defaultValue || selectedOption)
+              )?.label || placeholder}
+            </S.SelectedOption>
+          </S.SelectedOptionWrapper>
+          {showOptions && (
+            <S.OptionList>
+              {options?.map((option) => (
+                <S.Option
+                  value={option.value}
+                  key={option.value}
+                  disabled={option.disabled}
+                  onClick={() => updateSelectedOption(option)}
+                  tabIndex={0}
+                  role="option"
+                >
+                  {option.isLive && <LiveIcon />}
+                  {option.label}
+                </S.Option>
+              ))}
+            </S.OptionList>
+          )}
+          <DropdownArrowIcon isOpen={showOptions} />
+        </S.Select>
+      </div>
+    );
+  }
+);
 
 
-  return (
-    <div ref={selectContainerRef}>
-      <S.Select
-        role="listbox"
-        selectSize={selectSize}
-        isLive={isLive}
-        disabled={disabled}
-        onClick={showOptionsHandler}
-        onKeyDown={showOptionsHandler}
-        isThemeMode={isThemeMode}
-        {...props}
-      >
-        <S.SelectedOptionWrapper>
-          {isLive && <LiveIcon />}
-          <S.SelectedOption
-            role="option"
-            tabIndex={0}
-            isThemeMode={isThemeMode}
-          >
-            {options.find(
-              (option) => option.value === (defaultValue || selectedOption)
-            )?.label || placeholder}
-          </S.SelectedOption>
-        </S.SelectedOptionWrapper>
-        {showOptions && (
-          <S.OptionList>
-            {options?.map((option) => (
-              <S.Option
-                value={option.value}
-                key={option.value}
-                disabled={option.disabled}
-                onClick={() => updateSelectedOption(option)}
-                tabIndex={0}
-                role="option"
-              >
-                {option.isLive && <LiveIcon />}
-                {option.label}
-              </S.Option>
-            ))}
-          </S.OptionList>
-        )}
-        <DropdownArrowIcon isOpen={showOptions} />
-      </S.Select>
-    </div>
-  );
-};
+Select.displayName = 'Select';
 
 
 export default Select;
 export default Select;

+ 26 - 0
kafka-ui-react-app/src/components/common/Spinner/Spinner.styled.ts

@@ -0,0 +1,26 @@
+import styled from 'styled-components';
+import { SpinnerProps } from 'components/common/Spinner/types';
+
+export const Spinner = styled.div<SpinnerProps>`
+  border-width: ${(props) => props.borderWidth}px;
+  border-style: solid;
+  border-color: ${({ theme }) => theme.pageLoader.borderColor};
+  border-bottom-color: ${(props) =>
+    props.emptyBorderColor
+      ? 'transparent'
+      : props.theme.pageLoader.borderBottomColor};
+  border-radius: 50%;
+  width: ${(props) => props.size}px;
+  height: ${(props) => props.size}px;
+  margin-left: ${(props) => props.marginLeft}px;
+  animation: spin 1.3s linear infinite;
+
+  @keyframes spin {
+    0% {
+      transform: rotate(0deg);
+    }
+    100% {
+      transform: rotate(360deg);
+    }
+  }
+`;

+ 20 - 0
kafka-ui-react-app/src/components/common/Spinner/Spinner.tsx

@@ -0,0 +1,20 @@
+/* eslint-disable react/default-props-match-prop-types */
+import React from 'react';
+import { SpinnerProps } from 'components/common/Spinner/types';
+
+import * as S from './Spinner.styled';
+
+const defaultProps: SpinnerProps = {
+  size: 80,
+  borderWidth: 10,
+  emptyBorderColor: false,
+  marginLeft: 0,
+};
+
+const Spinner: React.FC<SpinnerProps> = (props) => (
+  <S.Spinner role="progressbar" {...props} />
+);
+
+Spinner.defaultProps = defaultProps;
+
+export default Spinner;

+ 6 - 0
kafka-ui-react-app/src/components/common/Spinner/types.ts

@@ -0,0 +1,6 @@
+export interface SpinnerProps {
+  size?: number;
+  borderWidth?: number;
+  emptyBorderColor?: boolean;
+  marginLeft?: number;
+}

+ 3 - 1
kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx

@@ -90,7 +90,9 @@ export const useKsqlkDbSSE = ({ clusterName, pipeId }: UseKsqlkDbSSEProps) => {
 
 
   React.useEffect(() => {
   React.useEffect(() => {
     const fetchData = async () => {
     const fetchData = async () => {
-      const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response`;
+      const url = `${BASE_PARAMS.basePath}/api/clusters/${encodeURIComponent(
+        clusterName
+      )}/ksql/response`;
       await fetchEventSource(
       await fetchEventSource(
         `${url}?${new URLSearchParams({ pipeId: pipeId || '' }).toString()}`,
         `${url}?${new URLSearchParams({ pipeId: pipeId || '' }).toString()}`,
         {
         {

+ 3 - 1
kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx

@@ -51,7 +51,9 @@ export const useTopicMessages = ({
   React.useEffect(() => {
   React.useEffect(() => {
     const fetchData = async () => {
     const fetchData = async () => {
       setIsFetching(true);
       setIsFetching(true);
-      const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/topics/${topicName}/messages`;
+      const url = `${BASE_PARAMS.basePath}/api/clusters/${encodeURIComponent(
+        clusterName
+      )}/topics/${topicName}/messages`;
       const requestParams = new URLSearchParams({
       const requestParams = new URLSearchParams({
         limit,
         limit,
         seekTo: seekTo.replaceAll('-', '::').replaceAll('.', ','),
         seekTo: seekTo.replaceAll('-', '::').replaceAll('.', ','),

+ 2 - 1
kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx

@@ -75,7 +75,7 @@ const ClusterConfigForm: React.FC<ClusterConfigFormProps> = ({
   const onReset = () => methods.reset();
   const onReset = () => methods.reset();
 
 
   const onValidate = async () => {
   const onValidate = async () => {
-    await trigger();
+    await trigger(undefined, { shouldFocus: true });
     if (!methods.formState.isValid) return;
     if (!methods.formState.isValid) return;
     disableForm();
     disableForm();
     const data = methods.getValues();
     const data = methods.getValues();
@@ -142,6 +142,7 @@ const ClusterConfigForm: React.FC<ClusterConfigFormProps> = ({
               buttonSize="L"
               buttonSize="L"
               buttonType="primary"
               buttonType="primary"
               disabled={isSubmitDisabled}
               disabled={isSubmitDisabled}
+              inProgress={isSubmitting}
             >
             >
               Submit
               Submit
             </Button>
             </Button>