Browse Source

Feature/backend init (#3)

* move react-app to its own folder inside project

* move backend to kafka-ui repo

* setup react inside netty

* make application ready in 2 commands

* update readme

* update readme

* update readme

* update readme

* update profiles for application start inside (sdp profile) and outside docker (local profile)

* broker metrics endpoint

* topics endpoint start commit

* topics details endpoint start commit //
dependencies and versions fix

* small pom updates //
continue review fixes

* fix review issues //
save errors //
save connections and update connection logic //
save jmx, zookeeper, kafka statuses //
error with getting one topic doesn't fail others //
async metrics processing //
cluster data storage refactoring

* properties version extracting

* properties versions

* topic details

* remove jmx, topic details, topic configs

* create topic

* final fixes, topic creation

* topic creation ui fixes

* add check for cases when cluster is offline
Yevgen Taran 5 năm trước cách đây
mục cha
commit
498eb96bee
100 tập tin đã thay đổi với 2476 bổ sung68 xóa
  1. 0 2
      .env
  2. 28 19
      .gitignore
  3. 117 0
      .mvn/wrapper/MavenWrapperDownloader.java
  4. BIN
      .mvn/wrapper/maven-wrapper.jar
  5. 2 0
      .mvn/wrapper/maven-wrapper.properties
  6. 14 19
      README.md
  7. 81 0
      docker/kafka-clusters-only.yaml
  8. 55 0
      docker/kafka-ui.yaml
  9. 117 0
      kafka-ui-api/.mvn/wrapper/MavenWrapperDownloader.java
  10. BIN
      kafka-ui-api/.mvn/wrapper/maven-wrapper.jar
  11. 2 0
      kafka-ui-api/.mvn/wrapper/maven-wrapper.properties
  12. 7 0
      kafka-ui-api/Dockerfile
  13. 310 0
      kafka-ui-api/mvnw
  14. 182 0
      kafka-ui-api/mvnw.cmd
  15. 208 0
      kafka-ui-api/pom.xml
  16. 17 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaMetricsApplication.java
  17. 26 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/ClustersMetricsScheduler.java
  18. 26 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/config/ClustersProperties.java
  19. 23 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java
  20. 40 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/ClustersStorage.java
  21. 49 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/KafkaCluster.java
  22. 16 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/KafkaMetrics.java
  23. 61 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java
  24. 25 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/MetricsUpdateService.java
  25. 45 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/kafka/KafkaConstants.java
  26. 262 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/kafka/KafkaService.java
  27. 56 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java
  28. 22 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CorsGlobalConfiguration.java
  29. 19 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java
  30. 10 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/zookeeper/ZooKeeperConstants.java
  31. 60 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/zookeeper/ZookeeperService.java
  32. 14 0
      kafka-ui-api/src/main/resources/application-local.yml
  33. 16 0
      kafka-ui-api/src/main/resources/application-sdp.yml
  34. 16 0
      kafka-ui-api/src/main/resources/application.yml
  35. 26 0
      kafka-ui-api/src/main/resources/log4j2.xml
  36. 12 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaMetricsApplicationTests.java
  37. 79 0
      kafka-ui-contract/pom.xml
  38. 330 0
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  39. 2 0
      kafka-ui-react-app/.env
  40. 26 0
      kafka-ui-react-app/.gitignore
  41. 0 0
      kafka-ui-react-app/LICENSE
  42. 39 0
      kafka-ui-react-app/README.md
  43. 0 0
      kafka-ui-react-app/docker-compose.yaml
  44. 0 0
      kafka-ui-react-app/mock/index.js
  45. 0 0
      kafka-ui-react-app/mock/payload/brokerMetrics.json
  46. 0 0
      kafka-ui-react-app/mock/payload/brokers.json
  47. 0 0
      kafka-ui-react-app/mock/payload/clusters.json
  48. 0 0
      kafka-ui-react-app/mock/payload/topicConfigs.json
  49. 0 0
      kafka-ui-react-app/mock/payload/topicDetails.json
  50. 0 0
      kafka-ui-react-app/mock/payload/topics.json
  51. 0 0
      kafka-ui-react-app/package-lock.json
  52. 0 0
      kafka-ui-react-app/package.json
  53. 0 0
      kafka-ui-react-app/public/favicon.ico
  54. 0 0
      kafka-ui-react-app/public/index.html
  55. 0 0
      kafka-ui-react-app/public/manifest.json
  56. 0 0
      kafka-ui-react-app/public/robots.txt
  57. 0 0
      kafka-ui-react-app/src/components/App.scss
  58. 0 0
      kafka-ui-react-app/src/components/App.tsx
  59. 0 0
      kafka-ui-react-app/src/components/AppContainer.tsx
  60. 0 0
      kafka-ui-react-app/src/components/Brokers/Brokers.tsx
  61. 0 0
      kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts
  62. 0 0
      kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx
  63. 0 0
      kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClustersWidget.tsx
  64. 0 0
      kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClustersWidgetContainer.ts
  65. 0 0
      kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx
  66. 0 0
      kafka-ui-react-app/src/components/Nav/ClusterMenu.tsx
  67. 0 0
      kafka-ui-react-app/src/components/Nav/Nav.tsx
  68. 0 0
      kafka-ui-react-app/src/components/Nav/NavConatiner.ts
  69. 0 0
      kafka-ui-react-app/src/components/Topics/Details/Details.tsx
  70. 0 0
      kafka-ui-react-app/src/components/Topics/Details/DetailsContainer.ts
  71. 0 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx
  72. 0 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/MessagesContainer.ts
  73. 1 1
      kafka-ui-react-app/src/components/Topics/Details/Overview/Overview.tsx
  74. 0 0
      kafka-ui-react-app/src/components/Topics/Details/Overview/OverviewContainer.ts
  75. 0 0
      kafka-ui-react-app/src/components/Topics/Details/Settings/Settings.tsx
  76. 0 0
      kafka-ui-react-app/src/components/Topics/Details/Settings/SettingsContainer.ts
  77. 0 0
      kafka-ui-react-app/src/components/Topics/List/List.tsx
  78. 0 0
      kafka-ui-react-app/src/components/Topics/List/ListContainer.ts
  79. 0 0
      kafka-ui-react-app/src/components/Topics/List/ListItem.tsx
  80. 27 21
      kafka-ui-react-app/src/components/Topics/New/New.tsx
  81. 4 2
      kafka-ui-react-app/src/components/Topics/New/NewContainer.ts
  82. 0 0
      kafka-ui-react-app/src/components/Topics/Topics.tsx
  83. 0 0
      kafka-ui-react-app/src/components/Topics/TopicsContainer.ts
  84. 0 0
      kafka-ui-react-app/src/components/common/Breadcrumb/Breadcrumb.tsx
  85. 0 0
      kafka-ui-react-app/src/components/common/Dashboard/Indicator.tsx
  86. 0 0
      kafka-ui-react-app/src/components/common/Dashboard/MetricsWrapper.tsx
  87. 0 0
      kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx
  88. 0 0
      kafka-ui-react-app/src/index.tsx
  89. 0 0
      kafka-ui-react-app/src/lib/constants.ts
  90. 0 0
      kafka-ui-react-app/src/lib/hooks/useInterval.ts
  91. 0 0
      kafka-ui-react-app/src/lib/paths.ts
  92. 0 0
      kafka-ui-react-app/src/lib/utils/formatBytes.ts
  93. 0 0
      kafka-ui-react-app/src/react-app-env.d.ts
  94. 0 0
      kafka-ui-react-app/src/redux/actionType.ts
  95. 1 1
      kafka-ui-react-app/src/redux/actions/actions.ts
  96. 0 0
      kafka-ui-react-app/src/redux/actions/index.ts
  97. 3 3
      kafka-ui-react-app/src/redux/actions/thunks.ts
  98. 0 0
      kafka-ui-react-app/src/redux/api/brokers.ts
  99. 0 0
      kafka-ui-react-app/src/redux/api/clusters.ts
  100. 0 0
      kafka-ui-react-app/src/redux/api/index.ts

+ 0 - 2
.env

@@ -1,2 +0,0 @@
-# Kafka REST API
-REACT_APP_API_URL=http://localhost:3004

+ 28 - 19
.gitignore

@@ -1,23 +1,32 @@
-# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+HELP.md
+target/
+!.mvn/wrapper/maven-wrapper.jar
+!**/src/main/**
+!**/src/test/**
 
-# dependencies
-node_modules
-.pnp
-.pnp.js
+### STS ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
 
-# testing
-coverage
+### IntelliJ IDEA ###
+.idea
+*.iws
+*.iml
+*.ipr
 
-# production
-build
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
 
-# misc
-.DS_Store
-.env.local
-.env.development.local
-.env.test.local
-.env.production.local
-
-npm-debug.log*
-yarn-debug.log*
-yarn-error.log*
+### VS Code ###
+.vscode/
+/kafka-ui-api/app/node

+ 117 - 0
.mvn/wrapper/MavenWrapperDownloader.java

@@ -0,0 +1,117 @@
+/*
+ * Copyright 2007-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.net.*;
+import java.io.*;
+import java.nio.channels.*;
+import java.util.Properties;
+
+public class MavenWrapperDownloader {
+
+    private static final String WRAPPER_VERSION = "0.5.6";
+    /**
+     * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
+     */
+    private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+        + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
+
+    /**
+     * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
+     * use instead of the default one.
+     */
+    private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
+            ".mvn/wrapper/maven-wrapper.properties";
+
+    /**
+     * Path where the maven-wrapper.jar will be saved to.
+     */
+    private static final String MAVEN_WRAPPER_JAR_PATH =
+            ".mvn/wrapper/maven-wrapper.jar";
+
+    /**
+     * Name of the property which should be used to override the default download url for the wrapper.
+     */
+    private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
+
+    public static void main(String args[]) {
+        System.out.println("- Downloader started");
+        File baseDirectory = new File(args[0]);
+        System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
+
+        // If the maven-wrapper.properties exists, read it and check if it contains a custom
+        // wrapperUrl parameter.
+        File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
+        String url = DEFAULT_DOWNLOAD_URL;
+        if(mavenWrapperPropertyFile.exists()) {
+            FileInputStream mavenWrapperPropertyFileInputStream = null;
+            try {
+                mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
+                Properties mavenWrapperProperties = new Properties();
+                mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
+                url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
+            } catch (IOException e) {
+                System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
+            } finally {
+                try {
+                    if(mavenWrapperPropertyFileInputStream != null) {
+                        mavenWrapperPropertyFileInputStream.close();
+                    }
+                } catch (IOException e) {
+                    // Ignore ...
+                }
+            }
+        }
+        System.out.println("- Downloading from: " + url);
+
+        File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
+        if(!outputFile.getParentFile().exists()) {
+            if(!outputFile.getParentFile().mkdirs()) {
+                System.out.println(
+                        "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
+            }
+        }
+        System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
+        try {
+            downloadFileFromURL(url, outputFile);
+            System.out.println("Done");
+            System.exit(0);
+        } catch (Throwable e) {
+            System.out.println("- Error downloading");
+            e.printStackTrace();
+            System.exit(1);
+        }
+    }
+
+    private static void downloadFileFromURL(String urlString, File destination) throws Exception {
+        if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
+            String username = System.getenv("MVNW_USERNAME");
+            char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
+            Authenticator.setDefault(new Authenticator() {
+                @Override
+                protected PasswordAuthentication getPasswordAuthentication() {
+                    return new PasswordAuthentication(username, password);
+                }
+            });
+        }
+        URL website = new URL(urlString);
+        ReadableByteChannel rbc;
+        rbc = Channels.newChannel(website.openStream());
+        FileOutputStream fos = new FileOutputStream(destination);
+        fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
+        fos.close();
+        rbc.close();
+    }
+
+}

BIN
.mvn/wrapper/maven-wrapper.jar


+ 2 - 0
.mvn/wrapper/maven-wrapper.properties

@@ -0,0 +1,2 @@
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar

+ 14 - 19
README.md

@@ -1,39 +1,34 @@
 # Kafka-UI
-UI for Apache Kafka management
 
-## Table of contents
-- [Getting started](#getting-started)
-- [Links](#links)
+UI for Apache Kafka management
 
 ## Getting started
 
-Install packages
+Build application and docker container
 
 ```
-npm install
+./mvnw clean install -Pprod
 ```
 
-Set correct URL to your API server in `.env`.
+Start application with kafka clusters
 
 ```
-REACT_APP_API_URL=http://api.your-kafka-rest-api.com:3004
+docker-compose -f ./docker/kafka-ui.yaml
 ```
 
-Start JSON Server if you prefer to use default full fake REST API.
-
-```
-npm run mock
-```
+Application should be available at http://localhost:8080 .
 
-Start application
+To start only kafka-clusters:
 
 ```
-npm start
+docker-compose -f ./docker/kafka-clusters-only.yaml
 ```
 
+Kafka-ui then should be started with **local** profile
 
-## Links
+### Run application without docker:
 
-* [JSON Server](https://github.com/typicode/json-server) - Fake REST API.
-* [Bulma](https://bulma.io/documentation/) - free, open source CSS framework based on Flexbox
-* [Create React App](https://github.com/facebook/create-react-app)
+```
+cd kafka-ui-api
+./mvnw spring-boot:run -Pprod
+```

+ 81 - 0
docker/kafka-clusters-only.yaml

@@ -0,0 +1,81 @@
+---
+version: '2'
+services:
+
+  zookeeper0:
+    image: confluentinc/cp-zookeeper:5.1.0
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+      ZOOKEEPER_TICK_TIME: 2000
+    ports:
+     - 2181:2181
+
+  kafka0:
+    image: confluentinc/cp-kafka:5.1.0
+    depends_on:
+      - zookeeper0
+    ports:
+      - 29091:29091
+      - 9997:9997
+    environment:
+      KAFKA_BROKER_ID: 1
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:29091,PLAINTEXT_HOST://localhost:9091,PLAIN://kafka0:29090
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAIN:PLAINTEXT
+      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+      JMX_PORT: 9997
+      KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.rmi.port=9997
+
+  kafka-init-topics0:
+    image: confluentinc/cp-kafka:5.1.0
+    depends_on:
+      - kafka0
+    command: "bash -c 'echo Waiting for Kafka to be ready... && \
+                cub kafka-ready -b kafka0:29090 1 20 && \
+                kafka-topics --create --topic users --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181 && \
+                kafka-topics --create --topic messages --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper0:2181'"
+    environment:
+      KAFKA_BROKER_ID: ignored
+      KAFKA_ZOOKEEPER_CONNECT: ignored
+    networks:
+      - default
+
+  zookeeper1:
+    image: confluentinc/cp-zookeeper:5.1.0
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+      ZOOKEEPER_TICK_TIME: 2000
+    ports:
+      - 2182:2181
+
+  kafka1:
+    image: confluentinc/cp-kafka:5.1.0
+    depends_on:
+      - zookeeper1
+    ports:
+      - 29092:29092
+      - 9998:9998
+    environment:
+      KAFKA_BROKER_ID: 1
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper1:2181
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:29092,PLAINTEXT_HOST://localhost:9092,PLAIN://kafka1:29090
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAIN:PLAINTEXT
+      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+      JMX_PORT: 9998
+      KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.rmi.port=9998
+
+  kafka-init-topics1:
+    image: confluentinc/cp-kafka:5.1.0
+    depends_on:
+      - kafka1
+    command: "bash -c 'echo Waiting for Kafka to be ready... && \
+                cub kafka-ready -b kafka1:29090 1 20 && \
+                kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181 && \
+                kafka-topics --create --topic messages --partitions 2 --replication-factor 1 --if-not-exists --zookeeper zookeeper1:2181'"
+    environment:
+      KAFKA_BROKER_ID: ignored
+      KAFKA_ZOOKEEPER_CONNECT: ignored
+    networks:
+      - default

+ 55 - 0
docker/kafka-ui.yaml

@@ -0,0 +1,55 @@
+---
+version: '2'
+services:
+
+  kafka-ui:
+    container_name: kafka-ui-api
+    image: kafka-ui-api:latest
+    ports:
+      - 8080:8080
+    depends_on:
+      - zookeeper0
+      - zookeeper1
+      - kafka0
+      - kafka1
+    command: [ "java", "-jar", "kafka-ui-api.jar", "--spring.profiles.active=sdp"]
+
+  zookeeper0:
+    image: confluentinc/cp-zookeeper:5.1.0
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+      ZOOKEEPER_TICK_TIME: 2000
+
+  kafka0:
+    image: confluentinc/cp-kafka:5.1.0
+    depends_on:
+      - zookeeper0
+    environment:
+      KAFKA_BROKER_ID: 1
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper0:2181
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+      JMX_PORT: 9997
+      KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka0 -Dcom.sun.management.jmxremote.rmi.port=9997
+
+  zookeeper1:
+    image: confluentinc/cp-zookeeper:5.1.0
+    environment:
+      ZOOKEEPER_CLIENT_PORT: 2181
+      ZOOKEEPER_TICK_TIME: 2000
+
+  kafka1:
+    image: confluentinc/cp-kafka:5.1.0
+    depends_on:
+      - zookeeper1
+    environment:
+      KAFKA_BROKER_ID: 1
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper1:2181
+      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka1:29092,PLAINTEXT_HOST://localhost:9092
+      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+      KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
+      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+      JMX_PORT: 9997
+      KAFKA_JMX_OPTS: -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Djava.rmi.server.hostname=kafka1 -Dcom.sun.management.jmxremote.rmi.port=9997

+ 117 - 0
kafka-ui-api/.mvn/wrapper/MavenWrapperDownloader.java

@@ -0,0 +1,117 @@
+/*
+ * Copyright 2007-present the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.net.*;
+import java.io.*;
+import java.nio.channels.*;
+import java.util.Properties;
+
+public class MavenWrapperDownloader {
+
+    private static final String WRAPPER_VERSION = "0.5.6";
+    /**
+     * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
+     */
+    private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+        + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
+
+    /**
+     * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
+     * use instead of the default one.
+     */
+    private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
+            ".mvn/wrapper/maven-wrapper.properties";
+
+    /**
+     * Path where the maven-wrapper.jar will be saved to.
+     */
+    private static final String MAVEN_WRAPPER_JAR_PATH =
+            ".mvn/wrapper/maven-wrapper.jar";
+
+    /**
+     * Name of the property which should be used to override the default download url for the wrapper.
+     */
+    private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
+
+    public static void main(String args[]) {
+        System.out.println("- Downloader started");
+        File baseDirectory = new File(args[0]);
+        System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
+
+        // If the maven-wrapper.properties exists, read it and check if it contains a custom
+        // wrapperUrl parameter.
+        File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
+        String url = DEFAULT_DOWNLOAD_URL;
+        if(mavenWrapperPropertyFile.exists()) {
+            FileInputStream mavenWrapperPropertyFileInputStream = null;
+            try {
+                mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
+                Properties mavenWrapperProperties = new Properties();
+                mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
+                url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
+            } catch (IOException e) {
+                System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
+            } finally {
+                try {
+                    if(mavenWrapperPropertyFileInputStream != null) {
+                        mavenWrapperPropertyFileInputStream.close();
+                    }
+                } catch (IOException e) {
+                    // Ignore ...
+                }
+            }
+        }
+        System.out.println("- Downloading from: " + url);
+
+        File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
+        if(!outputFile.getParentFile().exists()) {
+            if(!outputFile.getParentFile().mkdirs()) {
+                System.out.println(
+                        "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
+            }
+        }
+        System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
+        try {
+            downloadFileFromURL(url, outputFile);
+            System.out.println("Done");
+            System.exit(0);
+        } catch (Throwable e) {
+            System.out.println("- Error downloading");
+            e.printStackTrace();
+            System.exit(1);
+        }
+    }
+
+    private static void downloadFileFromURL(String urlString, File destination) throws Exception {
+        if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
+            String username = System.getenv("MVNW_USERNAME");
+            char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
+            Authenticator.setDefault(new Authenticator() {
+                @Override
+                protected PasswordAuthentication getPasswordAuthentication() {
+                    return new PasswordAuthentication(username, password);
+                }
+            });
+        }
+        URL website = new URL(urlString);
+        ReadableByteChannel rbc;
+        rbc = Channels.newChannel(website.openStream());
+        FileOutputStream fos = new FileOutputStream(destination);
+        fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
+        fos.close();
+        rbc.close();
+    }
+
+}

BIN
kafka-ui-api/.mvn/wrapper/maven-wrapper.jar


+ 2 - 0
kafka-ui-api/.mvn/wrapper/maven-wrapper.properties

@@ -0,0 +1,2 @@
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar

+ 7 - 0
kafka-ui-api/Dockerfile

@@ -0,0 +1,7 @@
+FROM openjdk:13
+VOLUME /tmp
+ARG JAR_FILE
+COPY "/target/${JAR_FILE}" "/kafka-ui-api.jar"
+
+EXPOSE 8080
+CMD java -jar kafka-ui-api.jar

+ 310 - 0
kafka-ui-api/mvnw

@@ -0,0 +1,310 @@
+#!/bin/sh
+# ----------------------------------------------------------------------------
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ----------------------------------------------------------------------------
+
+# ----------------------------------------------------------------------------
+# Maven Start Up Batch script
+#
+# Required ENV vars:
+# ------------------
+#   JAVA_HOME - location of a JDK home dir
+#
+# Optional ENV vars
+# -----------------
+#   M2_HOME - location of maven2's installed home dir
+#   MAVEN_OPTS - parameters passed to the Java VM when running Maven
+#     e.g. to debug Maven itself, use
+#       set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+#   MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+# ----------------------------------------------------------------------------
+
+if [ -z "$MAVEN_SKIP_RC" ] ; then
+
+  if [ -f /etc/mavenrc ] ; then
+    . /etc/mavenrc
+  fi
+
+  if [ -f "$HOME/.mavenrc" ] ; then
+    . "$HOME/.mavenrc"
+  fi
+
+fi
+
+# OS specific support.  $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+mingw=false
+case "`uname`" in
+  CYGWIN*) cygwin=true ;;
+  MINGW*) mingw=true;;
+  Darwin*) darwin=true
+    # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home
+    # See https://developer.apple.com/library/mac/qa/qa1170/_index.html
+    if [ -z "$JAVA_HOME" ]; then
+      if [ -x "/usr/libexec/java_home" ]; then
+        export JAVA_HOME="`/usr/libexec/java_home`"
+      else
+        export JAVA_HOME="/Library/Java/Home"
+      fi
+    fi
+    ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+  if [ -r /etc/gentoo-release ] ; then
+    JAVA_HOME=`java-config --jre-home`
+  fi
+fi
+
+if [ -z "$M2_HOME" ] ; then
+  ## resolve links - $0 may be a link to maven's home
+  PRG="$0"
+
+  # need this for relative symlinks
+  while [ -h "$PRG" ] ; do
+    ls=`ls -ld "$PRG"`
+    link=`expr "$ls" : '.*-> \(.*\)$'`
+    if expr "$link" : '/.*' > /dev/null; then
+      PRG="$link"
+    else
+      PRG="`dirname "$PRG"`/$link"
+    fi
+  done
+
+  saveddir=`pwd`
+
+  M2_HOME=`dirname "$PRG"`/..
+
+  # make it fully qualified
+  M2_HOME=`cd "$M2_HOME" && pwd`
+
+  cd "$saveddir"
+  # echo Using m2 at $M2_HOME
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --unix "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# For Mingw, ensure paths are in UNIX format before anything is touched
+if $mingw ; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME="`(cd "$M2_HOME"; pwd)`"
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`"
+fi
+
+if [ -z "$JAVA_HOME" ]; then
+  javaExecutable="`which javac`"
+  if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then
+    # readlink(1) is not available as standard on Solaris 10.
+    readLink=`which readlink`
+    if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then
+      if $darwin ; then
+        javaHome="`dirname \"$javaExecutable\"`"
+        javaExecutable="`cd \"$javaHome\" && pwd -P`/javac"
+      else
+        javaExecutable="`readlink -f \"$javaExecutable\"`"
+      fi
+      javaHome="`dirname \"$javaExecutable\"`"
+      javaHome=`expr "$javaHome" : '\(.*\)/bin'`
+      JAVA_HOME="$javaHome"
+      export JAVA_HOME
+    fi
+  fi
+fi
+
+if [ -z "$JAVACMD" ] ; then
+  if [ -n "$JAVA_HOME"  ] ; then
+    if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+      # IBM's JDK on AIX uses strange locations for the executables
+      JAVACMD="$JAVA_HOME/jre/sh/java"
+    else
+      JAVACMD="$JAVA_HOME/bin/java"
+    fi
+  else
+    JAVACMD="`which java`"
+  fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+  echo "Error: JAVA_HOME is not defined correctly." >&2
+  echo "  We cannot execute $JAVACMD" >&2
+  exit 1
+fi
+
+if [ -z "$JAVA_HOME" ] ; then
+  echo "Warning: JAVA_HOME environment variable is not set."
+fi
+
+CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher
+
+# traverses directory structure from process work directory to filesystem root
+# first directory with .mvn subdirectory is considered project base directory
+find_maven_basedir() {
+
+  if [ -z "$1" ]
+  then
+    echo "Path not specified to find_maven_basedir"
+    return 1
+  fi
+
+  basedir="$1"
+  wdir="$1"
+  while [ "$wdir" != '/' ] ; do
+    if [ -d "$wdir"/.mvn ] ; then
+      basedir=$wdir
+      break
+    fi
+    # workaround for JBEAP-8937 (on Solaris 10/Sparc)
+    if [ -d "${wdir}" ]; then
+      wdir=`cd "$wdir/.."; pwd`
+    fi
+    # end of workaround
+  done
+  echo "${basedir}"
+}
+
+# concatenates all lines of a file
+concat_lines() {
+  if [ -f "$1" ]; then
+    echo "$(tr -s '\n' ' ' < "$1")"
+  fi
+}
+
+BASE_DIR=`find_maven_basedir "$(pwd)"`
+if [ -z "$BASE_DIR" ]; then
+  exit 1;
+fi
+
+##########################################################################################
+# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+# This allows using the maven wrapper in projects that prohibit checking in binary data.
+##########################################################################################
+if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Found .mvn/wrapper/maven-wrapper.jar"
+    fi
+else
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..."
+    fi
+    if [ -n "$MVNW_REPOURL" ]; then
+      jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+    else
+      jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+    fi
+    while IFS="=" read key value; do
+      case "$key" in (wrapperUrl) jarUrl="$value"; break ;;
+      esac
+    done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties"
+    if [ "$MVNW_VERBOSE" = true ]; then
+      echo "Downloading from: $jarUrl"
+    fi
+    wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar"
+    if $cygwin; then
+      wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"`
+    fi
+
+    if command -v wget > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found wget ... using wget"
+        fi
+        if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+            wget "$jarUrl" -O "$wrapperJarPath"
+        else
+            wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath"
+        fi
+    elif command -v curl > /dev/null; then
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Found curl ... using curl"
+        fi
+        if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then
+            curl -o "$wrapperJarPath" "$jarUrl" -f
+        else
+            curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f
+        fi
+
+    else
+        if [ "$MVNW_VERBOSE" = true ]; then
+          echo "Falling back to using Java to download"
+        fi
+        javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java"
+        # For Cygwin, switch paths to Windows format before running javac
+        if $cygwin; then
+          javaClass=`cygpath --path --windows "$javaClass"`
+        fi
+        if [ -e "$javaClass" ]; then
+            if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Compiling MavenWrapperDownloader.java ..."
+                fi
+                # Compiling the Java class
+                ("$JAVA_HOME/bin/javac" "$javaClass")
+            fi
+            if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then
+                # Running the downloader
+                if [ "$MVNW_VERBOSE" = true ]; then
+                  echo " - Running MavenWrapperDownloader.java ..."
+                fi
+                ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR")
+            fi
+        fi
+    fi
+fi
+##########################################################################################
+# End of extension
+##########################################################################################
+
+export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"}
+if [ "$MVNW_VERBOSE" = true ]; then
+  echo $MAVEN_PROJECTBASEDIR
+fi
+MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS"
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+  [ -n "$M2_HOME" ] &&
+    M2_HOME=`cygpath --path --windows "$M2_HOME"`
+  [ -n "$JAVA_HOME" ] &&
+    JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+  [ -n "$CLASSPATH" ] &&
+    CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+  [ -n "$MAVEN_PROJECTBASEDIR" ] &&
+    MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"`
+fi
+
+# Provide a "standardized" way to retrieve the CLI args that will
+# work with both Windows and non-Windows executions.
+MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@"
+export MAVEN_CMD_LINE_ARGS
+
+WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+exec "$JAVACMD" \
+  $MAVEN_OPTS \
+  -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \
+  "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \
+  ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@"

+ 182 - 0
kafka-ui-api/mvnw.cmd

@@ -0,0 +1,182 @@
+@REM ----------------------------------------------------------------------------
+@REM Licensed to the Apache Software Foundation (ASF) under one
+@REM or more contributor license agreements.  See the NOTICE file
+@REM distributed with this work for additional information
+@REM regarding copyright ownership.  The ASF licenses this file
+@REM to you under the Apache License, Version 2.0 (the
+@REM "License"); you may not use this file except in compliance
+@REM with the License.  You may obtain a copy of the License at
+@REM
+@REM    https://www.apache.org/licenses/LICENSE-2.0
+@REM
+@REM Unless required by applicable law or agreed to in writing,
+@REM software distributed under the License is distributed on an
+@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+@REM KIND, either express or implied.  See the License for the
+@REM specific language governing permissions and limitations
+@REM under the License.
+@REM ----------------------------------------------------------------------------
+
+@REM ----------------------------------------------------------------------------
+@REM Maven Start Up Batch script
+@REM
+@REM Required ENV vars:
+@REM JAVA_HOME - location of a JDK home dir
+@REM
+@REM Optional ENV vars
+@REM M2_HOME - location of maven2's installed home dir
+@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands
+@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending
+@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven
+@REM     e.g. to debug Maven itself, use
+@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000
+@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files
+@REM ----------------------------------------------------------------------------
+
+@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on'
+@echo off
+@REM set title of command window
+title %0
+@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on'
+@if "%MAVEN_BATCH_ECHO%" == "on"  echo %MAVEN_BATCH_ECHO%
+
+@REM set %HOME% to equivalent of $HOME
+if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%")
+
+@REM Execute a user defined script before this one
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre
+@REM check for pre script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat"
+if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd"
+:skipRcPre
+
+@setlocal
+
+set ERROR_CODE=0
+
+@REM To isolate internal variables from possible post scripts, we use another setlocal
+@setlocal
+
+@REM ==== START VALIDATION ====
+if not "%JAVA_HOME%" == "" goto OkJHome
+
+echo.
+echo Error: JAVA_HOME not found in your environment. >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+:OkJHome
+if exist "%JAVA_HOME%\bin\java.exe" goto init
+
+echo.
+echo Error: JAVA_HOME is set to an invalid directory. >&2
+echo JAVA_HOME = "%JAVA_HOME%" >&2
+echo Please set the JAVA_HOME variable in your environment to match the >&2
+echo location of your Java installation. >&2
+echo.
+goto error
+
+@REM ==== END VALIDATION ====
+
+:init
+
+@REM Find the project base dir, i.e. the directory that contains the folder ".mvn".
+@REM Fallback to current working directory if not found.
+
+set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR%
+IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir
+
+set EXEC_DIR=%CD%
+set WDIR=%EXEC_DIR%
+:findBaseDir
+IF EXIST "%WDIR%"\.mvn goto baseDirFound
+cd ..
+IF "%WDIR%"=="%CD%" goto baseDirNotFound
+set WDIR=%CD%
+goto findBaseDir
+
+:baseDirFound
+set MAVEN_PROJECTBASEDIR=%WDIR%
+cd "%EXEC_DIR%"
+goto endDetectBaseDir
+
+:baseDirNotFound
+set MAVEN_PROJECTBASEDIR=%EXEC_DIR%
+cd "%EXEC_DIR%"
+
+:endDetectBaseDir
+
+IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig
+
+@setlocal EnableExtensions EnableDelayedExpansion
+for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a
+@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS%
+
+:endReadAdditionalConfig
+
+SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe"
+set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar"
+set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain
+
+set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+
+FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO (
+    IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B
+)
+
+@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central
+@REM This allows using the maven wrapper in projects that prohibit checking in binary data.
+if exist %WRAPPER_JAR% (
+    if "%MVNW_VERBOSE%" == "true" (
+        echo Found %WRAPPER_JAR%
+    )
+) else (
+    if not "%MVNW_REPOURL%" == "" (
+        SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar"
+    )
+    if "%MVNW_VERBOSE%" == "true" (
+        echo Couldn't find %WRAPPER_JAR%, downloading it ...
+        echo Downloading from: %DOWNLOAD_URL%
+    )
+
+    powershell -Command "&{"^
+		"$webclient = new-object System.Net.WebClient;"^
+		"if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^
+		"$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^
+		"}"^
+		"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^
+		"}"
+    if "%MVNW_VERBOSE%" == "true" (
+        echo Finished downloading %WRAPPER_JAR%
+    )
+)
+@REM End of extension
+
+@REM Provide a "standardized" way to retrieve the CLI args that will
+@REM work with both Windows and non-Windows executions.
+set MAVEN_CMD_LINE_ARGS=%*
+
+%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %*
+if ERRORLEVEL 1 goto error
+goto end
+
+:error
+set ERROR_CODE=1
+
+:end
+@endlocal & set ERROR_CODE=%ERROR_CODE%
+
+if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost
+@REM check for post script, once with legacy .bat ending and once with .cmd ending
+if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat"
+if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd"
+:skipRcPost
+
+@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on'
+if "%MAVEN_BATCH_PAUSE%" == "on" pause
+
+if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE%
+
+exit /B %ERROR_CODE%

+ 208 - 0
kafka-ui-api/pom.xml

@@ -0,0 +1,208 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>kafka-ui</artifactId>
+        <groupId>com.provectus</groupId>
+        <version>0.0.1-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>kafka-ui-api</artifactId>
+
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-dependencies</artifactId>
+                <version>${spring-boot.version}</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-webflux</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.springframework.boot</groupId>
+                    <artifactId>spring-boot-starter-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.provectus</groupId>
+            <artifactId>kafka-ui-contract</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+            <version>${kafka-clients.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.101tec</groupId>
+            <artifactId>zkclient</artifactId>
+            <version>${zkclient.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.mapstruct</groupId>
+            <artifactId>mapstruct</artifactId>
+            <version>${org.mapstruct.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-log4j2</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>io.projectreactor</groupId>
+            <artifactId>reactor-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+                <version>${spring-boot.version}</version>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>repackage</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>${maven-compiler-plugin.version}</version>
+                <configuration>
+                    <source>13</source>
+                    <target>13</target>
+                    <annotationProcessorPaths>
+                        <path>
+                            <groupId>org.mapstruct</groupId>
+                            <artifactId>mapstruct-processor</artifactId>
+                            <version>${org.mapstruct.version}</version>
+                        </path>
+                        <path>
+                            <groupId>org.projectlombok</groupId>
+                            <artifactId>lombok</artifactId>
+                            <version>${org.projectlombok.version}</version>
+                        </path>
+                        <path>
+                            <groupId>org.springframework.boot</groupId>
+                            <artifactId>spring-boot-configuration-processor</artifactId>
+                            <version>${spring-boot.version}</version>
+                        </path>
+                    </annotationProcessorPaths>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+    <profiles>
+        <profile>
+            <id>prod</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <artifactId>maven-resources-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>copy-resources</id>
+                                <phase>process-classes</phase>
+                                <goals>
+                                    <goal>copy-resources</goal>
+                                </goals>
+                                <configuration>
+                                    <outputDirectory>${basedir}/target/classes/static</outputDirectory>
+                                    <resources>
+                                        <resource>
+                                            <directory>../kafka-ui-react-app/build</directory>
+                                        </resource>
+                                    </resources>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <groupId>com.github.eirslett</groupId>
+                        <artifactId>frontend-maven-plugin</artifactId>
+                        <version>${frontend-maven-plugin.version}</version>
+                        <configuration>
+                            <workingDirectory>../kafka-ui-react-app</workingDirectory>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <id>install node and npm</id>
+                                <goals>
+                                    <goal>install-node-and-npm</goal>
+                                </goals>
+                                <configuration>
+                                    <nodeVersion>${node.version}</nodeVersion>
+                                </configuration>
+                            </execution>
+                            <execution>
+                                <id>npm install</id>
+                                <goals>
+                                    <goal>npm</goal>
+                                </goals>
+                                <configuration>
+                                    <arguments>install</arguments>
+                                    <arguments>run build</arguments>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <groupId>com.spotify</groupId>
+                        <artifactId>dockerfile-maven-plugin</artifactId>
+                        <version>${dockerfile-maven-plugin.version}</version>
+                        <configuration>
+                            <skipPush>true</skipPush>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <id>default</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>build</goal>
+                                </goals>
+                                <configuration>
+                                    <tag>${git.revision}</tag>
+                                    <repository>${project.artifactId}</repository>
+                                    <buildArgs>
+                                        <JAR_FILE>${project.build.finalName}.jar</JAR_FILE>
+                                        <JAR_NAME>${project.artifactId}.jar</JAR_NAME>
+                                    </buildArgs>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+
+
+</project>

+ 17 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaMetricsApplication.java

@@ -0,0 +1,17 @@
+package com.provectus.kafka.ui;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.scheduling.annotation.EnableScheduling;
+
+@SpringBootApplication
+@EnableScheduling
+@EnableAsync
+public class KafkaMetricsApplication {
+
+	public static void main(String[] args) {
+		SpringApplication.run(KafkaMetricsApplication.class, args);
+	}
+
+}

+ 26 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/ClustersMetricsScheduler.java

@@ -0,0 +1,26 @@
+package com.provectus.kafka.ui.cluster;
+
+import com.provectus.kafka.ui.cluster.model.ClustersStorage;
+import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import com.provectus.kafka.ui.cluster.service.MetricsUpdateService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.log4j.Log4j2;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+@Component
+@RequiredArgsConstructor
+@Log4j2
+public class ClustersMetricsScheduler {
+
+    private final ClustersStorage clustersStorage;
+
+    private final MetricsUpdateService metricsUpdateService;
+
+    @Scheduled(fixedRate = 30000)
+    public void updateMetrics() {
+        for (KafkaCluster kafkaCluster : clustersStorage.getKafkaClusters()) {
+            metricsUpdateService.updateMetrics(kafkaCluster);
+        }
+    }
+}

+ 26 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/config/ClustersProperties.java

@@ -0,0 +1,26 @@
+package com.provectus.kafka.ui.cluster.config;
+
+import lombok.Data;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@Configuration
+@ConfigurationProperties("kafka")
+@Data
+public class ClustersProperties {
+
+    List<Cluster> clusters = new ArrayList<>();
+
+    @Data
+    public static class Cluster {
+        String id;
+        String name;
+        String bootstrapServers;
+        String jmxHost;
+        String jmxPort;
+        String zookeeper;
+    }
+}

+ 23 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java

@@ -0,0 +1,23 @@
+package com.provectus.kafka.ui.cluster.mapper;
+
+import com.provectus.kafka.ui.cluster.config.ClustersProperties;
+import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import org.mapstruct.Mapper;
+import org.mapstruct.Mapping;
+
+@Mapper
+public abstract class ClusterMapper {
+
+    @Mapping(source = "name", target = "cluster.name")
+    @Mapping(target = "brokersMetrics", ignore = true)
+    @Mapping(target = "cluster", ignore = true)
+    @Mapping(target = "lastKafkaException", ignore = true)
+    @Mapping(target = "lastZookeeperException", ignore = true)
+    @Mapping(target = "topicConfigsMap", ignore = true)
+    @Mapping(target = "topicDetailsMap", ignore = true)
+    @Mapping(target = "topics", ignore = true)
+    @Mapping(target = "zkClient", ignore = true)
+    @Mapping(target = "zookeeperStatus", ignore = true)
+    @Mapping(target = "adminClient", ignore = true)
+    public abstract KafkaCluster toKafkaCluster(ClustersProperties.Cluster clusterProperties);
+}

+ 40 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/ClustersStorage.java

@@ -0,0 +1,40 @@
+package com.provectus.kafka.ui.cluster.model;
+
+import com.provectus.kafka.ui.cluster.config.ClustersProperties;
+import com.provectus.kafka.ui.cluster.mapper.ClusterMapper;
+import lombok.RequiredArgsConstructor;
+import org.mapstruct.factory.Mappers;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+import java.util.ArrayList;
+import java.util.List;
+
+@Component
+@RequiredArgsConstructor
+public class ClustersStorage {
+
+    private final List<KafkaCluster> kafkaClusters = new ArrayList<>();
+
+    private final ClustersProperties clusterProperties;
+
+    private final ClusterMapper clusterMapper = Mappers.getMapper(ClusterMapper.class);
+
+    @PostConstruct
+    public void init() {
+        for (ClustersProperties.Cluster clusterProperties : clusterProperties.getClusters()) {
+            kafkaClusters.add(clusterMapper.toKafkaCluster(clusterProperties));
+        }
+    }
+
+    public List<KafkaCluster> getKafkaClusters() {
+        return kafkaClusters;
+    }
+
+    public KafkaCluster getClusterById(String clusterId) {
+        return kafkaClusters.stream()
+                .filter(cluster -> cluster.getId() != null && cluster.getId().equals(clusterId))
+                .findFirst()
+                .orElse(null);
+    }
+}

+ 49 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/KafkaCluster.java

@@ -0,0 +1,49 @@
+package com.provectus.kafka.ui.cluster.model;
+
+import com.provectus.kafka.ui.model.*;
+import lombok.AccessLevel;
+import lombok.Data;
+import lombok.experimental.FieldDefaults;
+import org.I0Itec.zkclient.ZkClient;
+import org.apache.kafka.clients.admin.AdminClient;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+@Data
+@FieldDefaults(level = AccessLevel.PRIVATE)
+public class KafkaCluster {
+
+    String id = "";
+    String name;
+    String jmxHost;
+    String jmxPort;
+    String bootstrapServers;
+    String zookeeper;
+
+    Cluster cluster = new Cluster();
+    BrokersMetrics brokersMetrics = new BrokersMetrics();
+
+    List<Topic> topics = new ArrayList<>();
+    private Map<String, TopicDetails> topicDetailsMap = new ConcurrentHashMap<>();
+    private Map<String, List<TopicConfig>> topicConfigsMap = new ConcurrentHashMap<>();
+
+
+    ZkClient zkClient;
+    AdminClient adminClient;
+    ServerStatus zookeeperStatus = ServerStatus.OFFLINE;
+
+    Exception lastKafkaException;
+    Exception lastZookeeperException;
+
+    public TopicDetails getTopicDetails(String key) {
+        var topicDetails = topicDetailsMap.get(key);
+        if(topicDetails == null) {
+            topicDetailsMap.putIfAbsent(key, new TopicDetails());
+            topicDetails = topicDetailsMap.get(key);
+        }
+        return topicDetails;
+    }
+}

+ 16 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/KafkaMetrics.java

@@ -0,0 +1,16 @@
+package com.provectus.kafka.ui.cluster.model;
+
+import lombok.Data;
+
+@Data
+public class KafkaMetrics {
+
+    Double bytesInPerSec;
+    Double bytesOutPerSec;
+    Integer brokersCount;
+    Integer topicCount;
+    Integer activeControllerCount;
+    Integer onlinePartitionCount;
+    Integer offlinePartitionCount;
+    Integer underReplicatedPartitions;
+}

+ 61 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java

@@ -0,0 +1,61 @@
+package com.provectus.kafka.ui.cluster.service;
+
+import com.provectus.kafka.ui.cluster.model.ClustersStorage;
+import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import com.provectus.kafka.ui.kafka.KafkaService;
+import com.provectus.kafka.ui.model.*;
+import lombok.RequiredArgsConstructor;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+@Service
+@RequiredArgsConstructor
+public class ClusterService {
+
+    private final ClustersStorage clustersStorage;
+    private final KafkaService kafkaService;
+
+    public Mono<ResponseEntity<Flux<Cluster>>> getClusters() {
+        List<Cluster> clusters = clustersStorage.getKafkaClusters()
+                .stream()
+                .map(KafkaCluster::getCluster)
+                .collect(Collectors.toList());
+
+        return Mono.just(ResponseEntity.ok(Flux.fromIterable(clusters)));
+    }
+
+    public Mono<ResponseEntity<BrokersMetrics>> getBrokersMetrics(String clusterId) {
+        KafkaCluster cluster = clustersStorage.getClusterById(clusterId);
+        if (cluster == null) return null;
+        return Mono.just(ResponseEntity.ok(cluster.getBrokersMetrics()));
+    }
+
+    public Mono<ResponseEntity<Flux<Topic>>> getTopics(String clusterId) {
+        KafkaCluster cluster = clustersStorage.getClusterById(clusterId);
+        if (cluster == null) return null;
+        return Mono.just(ResponseEntity.ok(Flux.fromIterable(cluster.getTopics())));
+    }
+
+    public Mono<ResponseEntity<TopicDetails>> getTopicDetails(String clusterId, String topicName) {
+        KafkaCluster cluster = clustersStorage.getClusterById(clusterId);
+        if (cluster == null) return null;
+        return Mono.just(ResponseEntity.ok(cluster.getTopicDetails(topicName)));
+    }
+
+    public Mono<ResponseEntity<Flux<TopicConfig>>> getTopicConfigs(String clusterId, String topicName) {
+        KafkaCluster cluster = clustersStorage.getClusterById(clusterId);
+        if (cluster == null) return null;
+        return Mono.just(ResponseEntity.ok(Flux.fromIterable(cluster.getTopicConfigsMap().get(topicName))));
+    }
+
+    public Mono<ResponseEntity<Topic>> createTopic(String clusterId, Mono<TopicFormData> topicFormData) {
+        KafkaCluster cluster = clustersStorage.getClusterById(clusterId);
+        if (cluster == null) return null;
+        return kafkaService.createTopic(cluster, topicFormData);
+    }
+}

+ 25 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/MetricsUpdateService.java

@@ -0,0 +1,25 @@
+package com.provectus.kafka.ui.cluster.service;
+
+import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import com.provectus.kafka.ui.kafka.KafkaService;
+import com.provectus.kafka.ui.zookeeper.ZookeeperService;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.log4j.Log4j2;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.stereotype.Service;
+
+@Service
+@RequiredArgsConstructor
+@Log4j2
+public class MetricsUpdateService {
+
+    private final KafkaService kafkaService;
+    private final ZookeeperService zookeeperService;
+
+    @Async
+    public void updateMetrics(KafkaCluster kafkaCluster) {
+        log.debug("Start getting metrics for kafkaCluster: " + kafkaCluster.getName());
+        kafkaService.loadClusterMetrics(kafkaCluster);
+        zookeeperService.checkZookeeperStatus(kafkaCluster);
+    }
+}

+ 45 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/kafka/KafkaConstants.java

@@ -0,0 +1,45 @@
+package com.provectus.kafka.ui.kafka;
+
+import java.util.AbstractMap;
+import java.util.Map;
+
+import static org.apache.kafka.common.config.TopicConfig.*;
+
+public final class KafkaConstants {
+
+    private KafkaConstants() {
+    }
+
+    public static String IN_BYTE_PER_SEC_METRIC = "incoming-byte-rate";
+    public static String IN_BYTE_PER_SEC_METRIC_DESCRIPTION = "The number of bytes read off all sockets per second";
+    public static String OUT_BYTE_PER_SEC_METRIC = "outgoing-byte-rate";
+    public static String OUT_BYTE_PER_SEC_METRIC_DESCRIPTION = "The number of outgoing bytes sent to all servers per second";
+
+    public static Map<String, String> TOPIC_DEFAULT_CONFIGS = Map.ofEntries(
+            new AbstractMap.SimpleEntry<>(CLEANUP_POLICY_CONFIG, CLEANUP_POLICY_DELETE),
+            new AbstractMap.SimpleEntry<>(COMPRESSION_TYPE_CONFIG, "producer"),
+            new AbstractMap.SimpleEntry<>(DELETE_RETENTION_MS_CONFIG, "86400000"),
+            new AbstractMap.SimpleEntry<>(FILE_DELETE_DELAY_MS_CONFIG, "60000"),
+            new AbstractMap.SimpleEntry<>(FLUSH_MESSAGES_INTERVAL_CONFIG, "9223372036854775807"),
+            new AbstractMap.SimpleEntry<>(FLUSH_MS_CONFIG, "9223372036854775807"),
+            new AbstractMap.SimpleEntry<>("follower.replication.throttled.replicas", ""),
+            new AbstractMap.SimpleEntry<>(INDEX_INTERVAL_BYTES_CONFIG, "4096"),
+            new AbstractMap.SimpleEntry<>("leader.replication.throttled.replicas", ""),
+            new AbstractMap.SimpleEntry<>(MAX_COMPACTION_LAG_MS_CONFIG, "9223372036854775807"),
+            new AbstractMap.SimpleEntry<>(MAX_MESSAGE_BYTES_CONFIG, "1000012"),
+            new AbstractMap.SimpleEntry<>(MESSAGE_TIMESTAMP_DIFFERENCE_MAX_MS_CONFIG, "9223372036854775807"),
+            new AbstractMap.SimpleEntry<>(MESSAGE_TIMESTAMP_TYPE_CONFIG, "CreateTime"),
+            new AbstractMap.SimpleEntry<>(MIN_CLEANABLE_DIRTY_RATIO_CONFIG, "0.5"),
+            new AbstractMap.SimpleEntry<>(MIN_COMPACTION_LAG_MS_CONFIG, "0"),
+            new AbstractMap.SimpleEntry<>(MIN_IN_SYNC_REPLICAS_CONFIG, "1"),
+            new AbstractMap.SimpleEntry<>(PREALLOCATE_CONFIG, "false"),
+            new AbstractMap.SimpleEntry<>(RETENTION_BYTES_CONFIG, "-1"),
+            new AbstractMap.SimpleEntry<>(RETENTION_MS_CONFIG, "604800000"),
+            new AbstractMap.SimpleEntry<>(SEGMENT_BYTES_CONFIG, "1073741824"),
+            new AbstractMap.SimpleEntry<>(SEGMENT_INDEX_BYTES_CONFIG, "10485760"),
+            new AbstractMap.SimpleEntry<>(SEGMENT_JITTER_MS_CONFIG, "0"),
+            new AbstractMap.SimpleEntry<>(SEGMENT_MS_CONFIG, "604800000"),
+            new AbstractMap.SimpleEntry<>(UNCLEAN_LEADER_ELECTION_ENABLE_CONFIG, "false"),
+            new AbstractMap.SimpleEntry<>(MESSAGE_DOWNCONVERSION_ENABLE_CONFIG, "true")
+    );
+}

+ 262 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/kafka/KafkaService.java

@@ -0,0 +1,262 @@
+package com.provectus.kafka.ui.kafka;
+
+import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import com.provectus.kafka.ui.model.*;
+import lombok.RequiredArgsConstructor;
+import lombok.SneakyThrows;
+import lombok.extern.log4j.Log4j2;
+import org.apache.kafka.clients.admin.*;
+import org.apache.kafka.common.*;
+import org.apache.kafka.common.config.ConfigResource;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.stereotype.Service;
+import reactor.core.publisher.Mono;
+
+import java.util.*;
+
+import static com.provectus.kafka.ui.kafka.KafkaConstants.*;
+import static org.apache.kafka.common.config.TopicConfig.MESSAGE_FORMAT_VERSION_CONFIG;
+
+@Service
+@RequiredArgsConstructor
+@Log4j2
+public class KafkaService {
+
+    @SneakyThrows
+    @Async
+    public void loadClusterMetrics(KafkaCluster kafkaCluster) {
+        log.debug("Start getting Kafka metrics for cluster: " + kafkaCluster.getName());
+        boolean isConnected = false;
+        if (kafkaCluster.getAdminClient() != null) {
+            isConnected = isAdminClientConnected(kafkaCluster);
+        }
+        if (kafkaCluster.getAdminClient() == null || !isConnected) {
+            isConnected = createAdminClient(kafkaCluster);
+        }
+
+        if (!isConnected) {
+            kafkaCluster.getCluster().setStatus(ServerStatus.OFFLINE);
+
+            return;
+        }
+
+        kafkaCluster.getCluster().setId(kafkaCluster.getId());
+        kafkaCluster.getCluster().setStatus(ServerStatus.ONLINE);
+        loadMetrics(kafkaCluster);
+        loadTopicsData(kafkaCluster);
+    }
+
+
+    @SneakyThrows
+    public Mono<ResponseEntity<Topic>> createTopic(KafkaCluster cluster, Mono<TopicFormData> topicFormData) {
+        return topicFormData.flatMap(
+                topicData -> {
+                    AdminClient adminClient = cluster.getAdminClient();
+                    NewTopic newTopic = new NewTopic(topicData.getName(), topicData.getPartitions(), topicData.getReplicationFactor().shortValue());
+                    newTopic.configs(topicData.getConfigs());
+
+                    createTopic(adminClient, newTopic);
+
+                    DescribeTopicsResult topicDescriptionsWrapper = adminClient.describeTopics(Collections.singletonList(topicData.getName()));
+                    Map<String, KafkaFuture<TopicDescription>> topicDescriptionFuturesMap = topicDescriptionsWrapper.values();
+                    var entry = topicDescriptionFuturesMap.entrySet().iterator().next();
+                    var topicDescription = getTopicDescription(entry);
+                    if (topicDescription == null) return Mono.error(new RuntimeException("Can't find created topic"));
+
+                    Topic topic = collectTopicData(cluster, topicDescription);
+                    cluster.getTopics().add(topic);
+                    return Mono.just(new ResponseEntity<>(topic, HttpStatus.CREATED));
+                }
+        );
+    }
+
+    @SneakyThrows
+    private String getClusterId(KafkaCluster kafkaCluster) {
+        return kafkaCluster.getAdminClient().describeCluster().clusterId().get();
+    }
+
+    private boolean createAdminClient(KafkaCluster kafkaCluster) {
+        try {
+            Properties properties = new Properties();
+            properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaCluster.getBootstrapServers());
+            properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 5000);
+            kafkaCluster.setAdminClient(AdminClient.create(properties));
+            kafkaCluster.setId(getClusterId(kafkaCluster));
+
+            return true;
+        } catch (Exception e) {
+            log.error(e);
+            kafkaCluster.setLastKafkaException(e);
+
+            return false;
+        }
+    }
+
+    private boolean isAdminClientConnected(KafkaCluster kafkaCluster) {
+        try {
+            getClusterId(kafkaCluster);
+
+            return true;
+        } catch (Exception e) {
+            log.error(e);
+            kafkaCluster.setLastKafkaException(e);
+
+            return false;
+        }
+    }
+
+    @SneakyThrows
+    private void loadTopicsData(KafkaCluster kafkaCluster) {
+        AdminClient adminClient = kafkaCluster.getAdminClient();
+        ListTopicsOptions listTopicsOptions = new ListTopicsOptions();
+        listTopicsOptions.listInternal(true);
+        var topicListings = adminClient.listTopics(listTopicsOptions).names().get();
+        kafkaCluster.getCluster().setTopicCount(topicListings.size());
+
+        DescribeTopicsResult topicDescriptionsWrapper = adminClient.describeTopics(topicListings);
+        Map<String, KafkaFuture<TopicDescription>> topicDescriptionFuturesMap = topicDescriptionsWrapper.values();
+        List<Topic> foundTopics = new ArrayList<>();
+        resetMetrics(kafkaCluster);
+
+        for (var entry : topicDescriptionFuturesMap.entrySet()) {
+            var topicDescription = getTopicDescription(entry);
+            if (topicDescription == null) continue;
+            Topic topic = collectTopicData(kafkaCluster, topicDescription);
+            foundTopics.add(topic);
+        }
+        kafkaCluster.setTopics(foundTopics);
+    }
+
+    private void resetMetrics(KafkaCluster kafkaCluster) {
+        kafkaCluster.getBrokersMetrics().setOnlinePartitionCount(0);
+        kafkaCluster.getBrokersMetrics().setOfflinePartitionCount(0);
+        kafkaCluster.getBrokersMetrics().setUnderReplicatedPartitionCount(0);
+    }
+
+    private Topic collectTopicData(KafkaCluster kafkaCluster, TopicDescription topicDescription) {
+        var topic = new Topic().clusterId(kafkaCluster.getId());
+        topic.setInternal(topicDescription.isInternal());
+        topic.setName(topicDescription.name());
+
+        int inSyncReplicasCount = 0, replicasCount = 0;
+        List<Partition> partitions = new ArrayList<>();
+
+        int urpCount = 0;
+        for (TopicPartitionInfo partition : topicDescription.partitions()) {
+            var partitionDto = new Partition();
+            partitionDto.setLeader(partition.leader().id());
+            partitionDto.setPartition(partition.partition());
+            List<Replica> replicas = new ArrayList<>();
+
+            boolean isUrp = false;
+            for (Node replicaNode : partition.replicas()) {
+                var replica = new Replica();
+                replica.setBroker(replicaNode.id());
+                replica.setLeader(partition.leader() != null && partition.leader().id() == replicaNode.id());
+                replica.setInSync(partition.isr().contains(replicaNode));
+                if (!replica.getInSync()) {
+                    isUrp = true;
+                }
+                replicas.add(replica);
+
+                inSyncReplicasCount += partition.isr().size();
+                replicasCount += partition.replicas().size();
+            }
+            if (isUrp) {
+                urpCount++;
+            }
+            partitionDto.setReplicas(replicas);
+            partitions.add(partitionDto);
+
+            if (partition.leader() != null) {
+                kafkaCluster.getBrokersMetrics().setOnlinePartitionCount(kafkaCluster.getBrokersMetrics().getOnlinePartitionCount() + 1);
+            } else {
+                kafkaCluster.getBrokersMetrics().setOfflinePartitionCount(kafkaCluster.getBrokersMetrics().getOfflinePartitionCount() + 1);
+            }
+        }
+        kafkaCluster.getCluster().setOnlinePartitionCount(kafkaCluster.getBrokersMetrics().getOnlinePartitionCount());
+        kafkaCluster.getBrokersMetrics().setUnderReplicatedPartitionCount(
+                kafkaCluster.getBrokersMetrics().getUnderReplicatedPartitionCount() + urpCount);
+        topic.setPartitions(partitions);
+
+        TopicDetails topicDetails = kafkaCluster.getTopicDetails(topicDescription.name());
+        topicDetails.setReplicas(replicasCount);
+        topicDetails.setPartitionCount(topicDescription.partitions().size());
+        topicDetails.setInSyncReplicas(inSyncReplicasCount);
+        topicDetails.setReplicationFactor(topicDescription.partitions().size() > 0
+                ? topicDescription.partitions().get(0).replicas().size()
+                : null);
+        topicDetails.setUnderReplicatedPartitions(urpCount);
+
+        loadTopicConfig(kafkaCluster, topicDescription.name());
+
+        return topic;
+    }
+
+    private TopicDescription getTopicDescription(Map.Entry<String, KafkaFuture<TopicDescription>> entry) {
+        try {
+            return entry.getValue().get();
+        } catch (Exception e) {
+            log.error("Can't get topic with name: " + entry.getKey(), e);
+
+            return null;
+        }
+    }
+
+    private void loadMetrics(KafkaCluster kafkaCluster) throws InterruptedException, java.util.concurrent.ExecutionException {
+        AdminClient adminClient = kafkaCluster.getAdminClient();
+        int brokerCount = adminClient.describeCluster().nodes().get().size();
+        kafkaCluster.getCluster().setBrokerCount(brokerCount);
+        kafkaCluster.getBrokersMetrics().setBrokerCount(brokerCount);
+        kafkaCluster.getBrokersMetrics().setActiveControllers(adminClient.describeCluster().controller().get() != null ? 1 : 0);
+
+        for (Map.Entry<MetricName, ? extends Metric> metricNameEntry : adminClient.metrics().entrySet()) {
+            if (metricNameEntry.getKey().name().equals(IN_BYTE_PER_SEC_METRIC)
+                    && metricNameEntry.getKey().description().equals(IN_BYTE_PER_SEC_METRIC_DESCRIPTION)) {
+                kafkaCluster.getCluster().setBytesInPerSec((int) Math.round((double) metricNameEntry.getValue().metricValue()));
+            }
+            if (metricNameEntry.getKey().name().equals(OUT_BYTE_PER_SEC_METRIC)
+                    && metricNameEntry.getKey().description().equals(OUT_BYTE_PER_SEC_METRIC_DESCRIPTION)) {
+                kafkaCluster.getCluster().setBytesOutPerSec((int) Math.round((double) metricNameEntry.getValue().metricValue()));
+            }
+        }
+    }
+
+    @SneakyThrows
+    private void loadTopicConfig(KafkaCluster kafkaCluster, String topicName) {
+        AdminClient adminClient = kafkaCluster.getAdminClient();
+
+        Set<ConfigResource> resources = Collections.singleton(new ConfigResource(ConfigResource.Type.TOPIC, topicName));
+        final Map<ConfigResource, Config> configs = adminClient.describeConfigs(resources).all().get();
+
+        if (configs.isEmpty()) return;
+
+        Collection<ConfigEntry> entries = configs.values().iterator().next().entries();
+        List<TopicConfig> topicConfigs = new ArrayList<>();
+        for (ConfigEntry entry : entries) {
+            TopicConfig topicConfig = new TopicConfig();
+            topicConfig.setName(entry.name());
+            topicConfig.setValue(entry.value());
+            if (topicConfig.getName().equals(MESSAGE_FORMAT_VERSION_CONFIG)) {
+                topicConfig.setDefaultValue(topicConfig.getValue());
+            } else {
+                topicConfig.setDefaultValue(TOPIC_DEFAULT_CONFIGS.get(entry.name()));
+            }
+            topicConfigs.add(topicConfig);
+        }
+
+        kafkaCluster.getTopicConfigsMap().put(topicName, topicConfigs);
+    }
+
+    @SneakyThrows
+    private void createTopic(AdminClient adminClient, NewTopic newTopic) {
+        adminClient.createTopics(Collections.singletonList(newTopic))
+                .values()
+                .values()
+                .iterator()
+                .next()
+                .get();
+    }
+}

+ 56 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java

@@ -0,0 +1,56 @@
+package com.provectus.kafka.ui.rest;
+
+import com.provectus.kafka.ui.api.ApiClustersApi;
+import com.provectus.kafka.ui.cluster.service.ClusterService;
+import com.provectus.kafka.ui.model.*;
+import lombok.RequiredArgsConstructor;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.server.ServerWebExchange;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+import javax.validation.Valid;
+import java.util.ArrayList;
+
+@RestController
+@RequiredArgsConstructor
+public class MetricsRestController implements ApiClustersApi {
+
+    private final ClusterService clusterService;
+
+    @Override
+    public Mono<ResponseEntity<Flux<Cluster>>> getClusters(ServerWebExchange exchange) {
+        return clusterService.getClusters();
+    }
+
+    @Override
+    public Mono<ResponseEntity<BrokersMetrics>> getBrokersMetrics(String clusterId, ServerWebExchange exchange) {
+        return clusterService.getBrokersMetrics(clusterId);
+    }
+
+    @Override
+    public Mono<ResponseEntity<Flux<Topic>>> getTopics(String clusterId, ServerWebExchange exchange) {
+        return clusterService.getTopics(clusterId);
+    }
+
+    @Override
+    public Mono<ResponseEntity<TopicDetails>> getTopicDetails(String clusterId, String topicName, ServerWebExchange exchange) {
+        return clusterService.getTopicDetails(clusterId, topicName);
+    }
+
+    @Override
+    public Mono<ResponseEntity<Flux<TopicConfig>>> getTopicConfigs(String clusterId, String topicName, ServerWebExchange exchange) {
+        return clusterService.getTopicConfigs(clusterId, topicName);
+    }
+
+    @Override
+    public Mono<ResponseEntity<Topic>> createTopic(String clusterId, @Valid Mono<TopicFormData> topicFormData, ServerWebExchange exchange) {
+        return clusterService.createTopic(clusterId, topicFormData);
+    }
+
+    @Override
+    public Mono<ResponseEntity<Flux<Broker>>> getBrokers(String clusterId, ServerWebExchange exchange) {
+        return Mono.just(ResponseEntity.ok(Flux.fromIterable(new ArrayList<>())));
+    }
+}

+ 22 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CorsGlobalConfiguration.java

@@ -0,0 +1,22 @@
+package com.provectus.kafka.ui.rest.config;
+
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+import org.springframework.web.reactive.config.CorsRegistry;
+import org.springframework.web.reactive.config.EnableWebFlux;
+import org.springframework.web.reactive.config.WebFluxConfigurer;
+
+@Configuration
+@EnableWebFlux
+@Profile("local")
+public class CorsGlobalConfiguration implements WebFluxConfigurer {
+
+    @Override
+    public void addCorsMappings(CorsRegistry registry) {
+        registry.addMapping("/**")
+                .allowedOrigins("*")
+                .allowedMethods("*")
+                .allowedHeaders("*")
+                .allowCredentials(true);
+    }
+}

+ 19 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java

@@ -0,0 +1,19 @@
+package com.provectus.kafka.ui.rest.config;
+
+import org.springframework.stereotype.Component;
+import org.springframework.web.server.ServerWebExchange;
+import org.springframework.web.server.WebFilter;
+import org.springframework.web.server.WebFilterChain;
+import reactor.core.publisher.Mono;
+
+@Component
+public class CustomWebFilter implements WebFilter {
+    @Override
+    public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) {
+        if (exchange.getRequest().getURI().getPath().equals("/")) {
+            return chain.filter(exchange.mutate().request(exchange.getRequest().mutate().path("/index.html").build()).build());
+        }
+
+        return chain.filter(exchange);
+    }
+}

+ 10 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/zookeeper/ZooKeeperConstants.java

@@ -0,0 +1,10 @@
+package com.provectus.kafka.ui.zookeeper;
+
+public final class ZooKeeperConstants {
+
+    private ZooKeeperConstants() {}
+
+    public static int ONLINE = 1;
+    public static int OFFLINE = 0;
+
+}

+ 60 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/zookeeper/ZookeeperService.java

@@ -0,0 +1,60 @@
+package com.provectus.kafka.ui.zookeeper;
+
+import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.log4j.Log4j2;
+import org.I0Itec.zkclient.ZkClient;
+import org.springframework.scheduling.annotation.Async;
+import org.springframework.stereotype.Service;
+
+@Service
+@RequiredArgsConstructor
+@Log4j2
+public class ZookeeperService {
+
+    @Async
+    public void checkZookeeperStatus(KafkaCluster kafkaCluster) {
+        log.debug("Start getting Zookeeper metrics for kafkaCluster: " + kafkaCluster.getName());
+        boolean isConnected = false;
+        if (kafkaCluster.getZkClient() != null) {
+            isConnected = isZkClientConnected(kafkaCluster);
+        }
+        if (kafkaCluster.getZkClient() == null || !isConnected) {
+            isConnected = createZookeeperConnection(kafkaCluster);
+        }
+
+        if (!isConnected) {
+            kafkaCluster.getBrokersMetrics().setZooKeeperStatus(ZooKeeperConstants.OFFLINE);
+
+            return;
+        }
+
+        kafkaCluster.getBrokersMetrics().setZooKeeperStatus(ZooKeeperConstants.ONLINE);
+    }
+
+    private boolean createZookeeperConnection(KafkaCluster kafkaCluster) {
+        try {
+            kafkaCluster.setZkClient(new ZkClient(kafkaCluster.getZookeeper(), 1000));
+
+            return true;
+        } catch (Exception e) {
+            log.error(e);
+            kafkaCluster.setLastZookeeperException(e);
+
+            return false;
+        }
+    }
+
+    private boolean isZkClientConnected(KafkaCluster kafkaCluster) {
+        try {
+            kafkaCluster.getZkClient().getChildren("/brokers/ids");
+
+            return true;
+        } catch (Exception e) {
+            log.error(e);
+            kafkaCluster.setLastZookeeperException(e);
+
+            return false;
+        }
+    }
+}

+ 14 - 0
kafka-ui-api/src/main/resources/application-local.yml

@@ -0,0 +1,14 @@
+kafka:
+  clusters:
+    -
+      name: local
+      bootstrapServers: localhost:29091
+      jmxHost: localhost
+      jmxPort: 9997
+      zookeeper: localhost:2181
+    -
+      name: secondLocal
+      bootstrapServers: localhost:29092
+      jmxHost: localhost
+      jmxPort: 9998
+      zookeeper: localhost:2182

+ 16 - 0
kafka-ui-api/src/main/resources/application-sdp.yml

@@ -0,0 +1,16 @@
+kafka:
+  clusters:
+    -
+      id: wrYGf-csNgiGdK7B_ADF7Z
+      name: local
+      bootstrapServers: kafka0:29092
+      jmxHost: kafka0
+      jmxPort: 9997
+      zookeeper: zookeeper0:2181
+    -
+      id: dMMQx-WRh77BKYas_g2ZTz
+      name: secondLocal
+      bootstrapServers: kafka1:29092
+      jmxHost: kafka1
+      jmxPort: 9997
+      zookeeper: zookeeper1:2181

+ 16 - 0
kafka-ui-api/src/main/resources/application.yml

@@ -0,0 +1,16 @@
+kafka:
+  clusters:
+    -
+      id: wrYGf-csNgiGdK7B_ADF7Z
+      name: local
+      bootstrapServers: kafka0:29092
+      jmxHost: kafka0
+      jmxPort: 9997
+      zookeeper: zookeeper0:2181
+    -
+      id: dMMQx-WRh77BKYas_g2ZTz
+      name: secondLocal
+      bootstrapServers: kafka1:29092
+      jmxHost: kafka1
+      jmxPort: 9997
+      zookeeper: zookeeper1:2181

+ 26 - 0
kafka-ui-api/src/main/resources/log4j2.xml

@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Configuration status="ERROR">
+    <Appenders>
+        <Console name="Console" target="SYSTEM_OUT">
+            <PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
+        </Console>
+    </Appenders>
+    <Loggers>
+        <Logger name="com.provectus" level="debug" additivity="false">
+            <AppenderRef ref="Console"/>
+        </Logger>
+        <Logger name="org.springframework.http.codec.json.Jackson2JsonDecoder" level="debug" additivity="false">
+            <AppenderRef ref="Console"/>
+        </Logger>
+        <Logger name="org.springframework.http.codec.json.Jackson2JsonEncoder" level="debug" additivity="false">
+            <AppenderRef ref="Console"/>
+        </Logger>
+
+        <logger name="reactor.netty.http.server.AccessLog" level="info" additivity="false">
+            <appender-ref ref="Console"/>
+        </logger>
+        <Root level="info">
+            <AppenderRef ref="Console"/>
+        </Root>
+    </Loggers>
+</Configuration>

+ 12 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaMetricsApplicationTests.java

@@ -0,0 +1,12 @@
+package com.provectus.kafka.ui;
+
+import org.junit.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+
+@SpringBootTest
+class KafkaMetricsApplicationTests {
+
+	@Test
+	public void contextLoads() {
+	}
+}

+ 79 - 0
kafka-ui-contract/pom.xml

@@ -0,0 +1,79 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>kafka-ui</artifactId>
+        <groupId>com.provectus</groupId>
+        <version>0.0.1-SNAPSHOT</version>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>kafka-ui-contract</artifactId>
+
+    <profiles>
+        <profile>
+            <id>generate-spring-webflux-api</id>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+
+            <dependencies>
+                <dependency>
+                    <groupId>org.springframework.boot</groupId>
+                    <artifactId>spring-boot-starter-webflux</artifactId>
+                    <version>${spring-boot.version}</version>
+                </dependency>
+                <dependency>
+                    <groupId>io.swagger</groupId>
+                    <artifactId>swagger-annotations</artifactId>
+                    <version>${swagger-annotations.version}</version>
+                </dependency>
+                <dependency>
+                    <groupId>org.openapitools</groupId>
+                    <artifactId>jackson-databind-nullable</artifactId>
+                    <version>${jackson-databind-nullable.version}</version>
+                </dependency>
+            </dependencies>
+
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.openapitools</groupId>
+                        <artifactId>openapi-generator-maven-plugin</artifactId>
+                        <version>${openapi-generator-maven-plugin.version}</version>
+                        <executions>
+                            <execution>
+                                <id>generate-backend-api</id>
+                                <goals>
+                                    <goal>generate</goal>
+                                </goals>
+                                <configuration>
+                                    <inputSpec>${project.basedir}/src/main/resources/swagger/kafka-ui-api.yaml
+                                    </inputSpec>
+                                    <output>${project.build.directory}/generated-sources/api</output>
+                                    <generatorName>spring</generatorName>
+
+                                    <configOptions>
+                                        <modelPackage>com.provectus.kafka.ui.model</modelPackage>
+                                        <apiPackage>com.provectus.kafka.ui.api</apiPackage>
+                                        <sourceFolder>kafka-ui-contract</sourceFolder>
+
+                                        <reactive>true</reactive>
+
+                                        <interfaceOnly>true</interfaceOnly>
+                                        <skipDefaultInterface>true</skipDefaultInterface>
+                                        <useBeanValidation>true</useBeanValidation>
+                                        <useTags>true</useTags>
+
+                                        <dateLibrary>java8</dateLibrary>
+                                    </configOptions>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+</project>

+ 330 - 0
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -0,0 +1,330 @@
+openapi: 3.0.0
+info:
+  description: Api Documentation
+  version: 0.1.0
+  title: Api Documentation
+  termsOfService: urn:tos
+  contact: {}
+  license:
+    name: Apache 2.0
+    url: http://www.apache.org/licenses/LICENSE-2.0
+tags:
+  - name: /api/clusters
+servers:
+  - url: /localhost
+
+paths:
+  /api/clusters:
+    get:
+      tags:
+        - /api/clusters
+      summary: getClusters
+      operationId: getClusters
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/Cluster'
+
+  /api/clusters/{clusterId}/brokers:
+    get:
+      tags:
+        - /api/clusters
+      summary: getBrokers
+      operationId: getBrokers
+      parameters:
+        - name: clusterId
+          in: path
+          required: true
+          schema:
+            type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/Broker'
+
+  /api/clusters/{clusterId}/metrics/broker:
+    get:
+      tags:
+        - /api/clusters
+      summary: getBrokersMetrics
+      operationId: getBrokersMetrics
+      parameters:
+        - name: clusterId
+          in: path
+          required: true
+          schema:
+            type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/BrokersMetrics'
+
+  /api/clusters/{clusterId}/topics:
+    get:
+      tags:
+        - /api/clusters
+      summary: getTopics
+      operationId: getTopics
+      parameters:
+        - name: clusterId
+          in: path
+          required: true
+          schema:
+            type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/Topic'
+    post:
+      tags:
+        - /api/clusters
+      summary: createTopic
+      operationId: createTopic
+      parameters:
+        - name: clusterId
+          in: path
+          required: true
+          schema:
+            type: string
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/TopicFormData'
+      responses:
+        201:
+          description: Created
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Topic'
+
+  /api/clusters/{clusterId}/topics/{topicName}:
+    get:
+      tags:
+        - /api/clusters
+      summary: getTopicDetails
+      operationId: getTopicDetails
+      parameters:
+        - name: clusterId
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: topicName
+          in: path
+          required: true
+          schema:
+            type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/TopicDetails'
+
+  /api/clusters/{clusterId}/topics/{topicName}/config:
+    get:
+      tags:
+        - /api/clusters
+      summary: getTopicConfigs
+      operationId: getTopicConfigs
+      parameters:
+        - name: clusterId
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: topicName
+          in: path
+          required: true
+          schema:
+            type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/TopicConfig'
+
+components:
+  schemas:
+    Cluster:
+      type: object
+      properties:
+        id:
+          type: string
+        name:
+          type: string
+        defaultCluster:
+          type: boolean
+        status:
+          $ref: '#/components/schemas/ServerStatus'
+        brokerCount:
+          type: integer
+        onlinePartitionCount:
+          type: integer
+        topicCount:
+          type: integer
+        bytesInPerSec:
+          type: integer
+        bytesOutPerSec:
+          type: integer
+      required:
+        - id
+        - name
+        - status
+
+    ServerStatus:
+      type: string
+      enum:
+        - online
+        - offline
+
+    BrokersMetrics:
+      type: object
+      properties:
+        clusterId:
+          type: string
+        bytesInPerSec:
+          type: integer
+        brokerCount:
+          type: integer
+        zooKeeperStatus:
+          type: integer
+        activeControllers:
+          type: integer
+        uncleanLeaderElectionCount:
+          type: integer
+        networkPoolUsage:
+          type: number
+        requestPoolUsage:
+          type: number
+        onlinePartitionCount:
+          type: integer
+        underReplicatedPartitionCount:
+          type: integer
+        offlinePartitionCount:
+          type: integer
+        diskUsage:
+          $ref: '#/components/schemas/DiskUsage'
+        diskUsageDistribution:
+          type: string
+      required:
+        - id
+
+    DiskUsage:
+      type: object
+      properties:
+        brokerId:
+          type: integer
+        segmentSize:
+          type: integer
+
+    Topic:
+      type: object
+      properties:
+        clusterId:
+          type: string
+        name:
+          type: string
+        internal:
+          type: boolean
+        partitions:
+          type: array
+          items:
+            $ref: '#/components/schemas/Partition'
+
+    Partition:
+      type: object
+      properties:
+        partition:
+          type: integer
+        leader:
+          type: integer
+        replicas:
+          type: array
+          items:
+            $ref: '#/components/schemas/Replica'
+
+    Replica:
+      type: object
+      properties:
+        broker:
+          type: integer
+        leader:
+          type: boolean
+        inSync:
+          type: boolean
+
+    TopicDetails:
+      type: object
+      properties:
+        partitionCount:
+          type: integer
+        replicationFactor:
+          type: integer
+        replicas:
+          type: integer
+        inSyncReplicas:
+          type: integer
+        bytesInPerSec:
+          type: integer
+        segmentSize:
+          type: integer
+        segmentCount:
+          type: integer
+        underReplicatedPartitions:
+          type: integer
+
+    TopicConfig:
+      type: object
+      properties:
+        name:
+          type: string
+        value:
+          type: string
+        defaultValue:
+          type: string
+
+    TopicFormData:
+      type: object
+      properties:
+        name:
+          type: string
+        partitions:
+          type: integer
+        replicationFactor:
+          type: integer
+        configs:
+          type: object
+          additionalProperties:
+            type: string
+
+    Broker:
+      type: object
+      properties:
+        id:
+          type: string

+ 2 - 0
kafka-ui-react-app/.env

@@ -0,0 +1,2 @@
+# Kafka REST API
+REACT_APP_API_URL=http://localhost:8080/api

+ 26 - 0
kafka-ui-react-app/.gitignore

@@ -0,0 +1,26 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+node_modules
+.pnp
+.pnp.js
+node
+
+# testing
+coverage
+
+# production
+build
+
+# misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+.idea

+ 0 - 0
LICENSE → kafka-ui-react-app/LICENSE


+ 39 - 0
kafka-ui-react-app/README.md

@@ -0,0 +1,39 @@
+# Kafka-UI
+UI for Apache Kafka management
+
+## Table of contents
+- [Getting started](#getting-started)
+- [Links](#links)
+
+## Getting started
+
+Install packages
+
+```
+npm install
+```
+
+Set correct URL to your API server in `.env`.
+
+```
+REACT_APP_API_URL=http://api.your-kafka-rest-api.com:3004
+```
+
+Start JSON Server if you prefer to use default full fake REST API.
+
+```
+npm run mock
+```
+
+Start application
+
+```
+npm start
+```
+
+
+## Links
+
+* [JSON Server](https://github.com/typicode/json-server) - Fake REST API.
+* [Bulma](https://bulma.io/documentation/) - free, open source CSS framework based on Flexbox
+* [Create React App](https://github.com/facebook/create-react-app)

+ 0 - 0
docker-compose.yaml → kafka-ui-react-app/docker-compose.yaml


+ 0 - 0
mock/index.js → kafka-ui-react-app/mock/index.js


+ 0 - 0
mock/payload/brokerMetrics.json → kafka-ui-react-app/mock/payload/brokerMetrics.json


+ 0 - 0
mock/payload/brokers.json → kafka-ui-react-app/mock/payload/brokers.json


+ 0 - 0
mock/payload/clusters.json → kafka-ui-react-app/mock/payload/clusters.json


+ 0 - 0
mock/payload/topicConfigs.json → kafka-ui-react-app/mock/payload/topicConfigs.json


+ 0 - 0
mock/payload/topicDetails.json → kafka-ui-react-app/mock/payload/topicDetails.json


+ 0 - 0
mock/payload/topics.json → kafka-ui-react-app/mock/payload/topics.json


+ 0 - 0
package-lock.json → kafka-ui-react-app/package-lock.json


+ 0 - 0
package.json → kafka-ui-react-app/package.json


+ 0 - 0
public/favicon.ico → kafka-ui-react-app/public/favicon.ico


+ 0 - 0
public/index.html → kafka-ui-react-app/public/index.html


+ 0 - 0
public/manifest.json → kafka-ui-react-app/public/manifest.json


+ 0 - 0
public/robots.txt → kafka-ui-react-app/public/robots.txt


+ 0 - 0
src/components/App.scss → kafka-ui-react-app/src/components/App.scss


+ 0 - 0
src/components/App.tsx → kafka-ui-react-app/src/components/App.tsx


+ 0 - 0
src/components/AppContainer.tsx → kafka-ui-react-app/src/components/AppContainer.tsx


+ 0 - 0
src/components/Brokers/Brokers.tsx → kafka-ui-react-app/src/components/Brokers/Brokers.tsx


+ 0 - 0
src/components/Brokers/BrokersContainer.ts → kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts


+ 0 - 0
src/components/Dashboard/ClustersWidget/ClusterWidget.tsx → kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx


+ 0 - 0
src/components/Dashboard/ClustersWidget/ClustersWidget.tsx → kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClustersWidget.tsx


+ 0 - 0
src/components/Dashboard/ClustersWidget/ClustersWidgetContainer.ts → kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClustersWidgetContainer.ts


+ 0 - 0
src/components/Dashboard/Dashboard.tsx → kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx


+ 0 - 0
src/components/Nav/ClusterMenu.tsx → kafka-ui-react-app/src/components/Nav/ClusterMenu.tsx


+ 0 - 0
src/components/Nav/Nav.tsx → kafka-ui-react-app/src/components/Nav/Nav.tsx


+ 0 - 0
src/components/Nav/NavConatiner.ts → kafka-ui-react-app/src/components/Nav/NavConatiner.ts


+ 0 - 0
src/components/Topics/Details/Details.tsx → kafka-ui-react-app/src/components/Topics/Details/Details.tsx


+ 0 - 0
src/components/Topics/Details/DetailsContainer.ts → kafka-ui-react-app/src/components/Topics/Details/DetailsContainer.ts


+ 0 - 0
src/components/Topics/Details/Messages/Messages.tsx → kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx


+ 0 - 0
src/components/Topics/Details/Messages/MessagesContainer.ts → kafka-ui-react-app/src/components/Topics/Details/Messages/MessagesContainer.ts


+ 1 - 1
src/components/Topics/Details/Overview/Overview.tsx → kafka-ui-react-app/src/components/Topics/Details/Overview/Overview.tsx

@@ -63,7 +63,7 @@ const Overview: React.FC<Props> = ({
             </tr>
           </thead>
           <tbody>
-            {partitions.map(({ partition, leader }) => (
+            {partitions && partitions.map(({ partition, leader }) => (
               <tr key={`partition-list-item-key-${partition}`}>
                 <td>{partition}</td>
                 <td>{leader}</td>

+ 0 - 0
src/components/Topics/Details/Overview/OverviewContainer.ts → kafka-ui-react-app/src/components/Topics/Details/Overview/OverviewContainer.ts


+ 0 - 0
src/components/Topics/Details/Settings/Settings.tsx → kafka-ui-react-app/src/components/Topics/Details/Settings/Settings.tsx


+ 0 - 0
src/components/Topics/Details/Settings/SettingsContainer.ts → kafka-ui-react-app/src/components/Topics/Details/Settings/SettingsContainer.ts


+ 0 - 0
src/components/Topics/List/List.tsx → kafka-ui-react-app/src/components/Topics/List/List.tsx


+ 0 - 0
src/components/Topics/List/ListContainer.ts → kafka-ui-react-app/src/components/Topics/List/ListContainer.ts


+ 0 - 0
src/components/Topics/List/ListItem.tsx → kafka-ui-react-app/src/components/Topics/List/ListItem.tsx


+ 27 - 21
src/components/Topics/New/New.tsx → kafka-ui-react-app/src/components/Topics/New/New.tsx

@@ -14,21 +14,23 @@ interface Props {
   isTopicCreated: boolean;
   createTopic: (clusterId: ClusterId, form: TopicFormData) => void;
   redirectToTopicPath: (clusterId: ClusterId, topicName: TopicName) => void;
+  resetUploadedState: () => void;
 }
 
 const New: React.FC<Props> = ({
-  clusterId,
-  isTopicCreated,
-  createTopic,
-  redirectToTopicPath,
-}) => {
-  const { register, handleSubmit, errors, getValues } = useForm<TopicFormData>();
+                                clusterId,
+                                isTopicCreated,
+                                createTopic,
+                                redirectToTopicPath,
+                                resetUploadedState
+                              }) => {
+  const {register, handleSubmit, errors, getValues} = useForm<TopicFormData>();
   const [isSubmitting, setIsSubmitting] = React.useState<boolean>(false);
 
   React.useEffect(
     () => {
       if (isSubmitting && isTopicCreated) {
-        const { name } = getValues();
+        const {name} = getValues();
         redirectToTopicPath(clusterId, name);
       }
     },
@@ -36,16 +38,20 @@ const New: React.FC<Props> = ({
   );
 
   const onSubmit = async (data: TopicFormData) => {
+    //TODO: need to fix loader. After success loading the first time, we won't wait for creation any more, because state is
+    //loaded, and we will try to get entity immediately after pressing the button, and we will receive null
+    //going to object page on the second creation. Resetting loaded state is workaround, need to tweak loader logic
+    resetUploadedState();
     setIsSubmitting(true);
     createTopic(clusterId, data);
-  }
+  };
 
   return (
     <div className="section">
       <div className="level">
         <div className="level-item level-left">
           <Breadcrumb links={[
-            { href: clusterTopicsPath(clusterId), label: 'All Topics' },
+            {href: clusterTopicsPath(clusterId), label: 'All Topics'},
           ]}>
             New Topic
           </Breadcrumb>
@@ -74,7 +80,7 @@ const New: React.FC<Props> = ({
                 disabled={isSubmitting}
               />
               <p className="help is-danger">
-                <ErrorMessage errors={errors} name="name" />
+                <ErrorMessage errors={errors} name="name"/>
               </p>
             </div>
 
@@ -87,12 +93,12 @@ const New: React.FC<Props> = ({
                 type="number"
                 placeholder="Number of partitions"
                 defaultValue="1"
-                ref={register({ required: 'Number of partitions is required.' })}
+                ref={register({required: 'Number of partitions is required.'})}
                 name="partitions"
                 disabled={isSubmitting}
               />
               <p className="help is-danger">
-                <ErrorMessage errors={errors} name="partitions" />
+                <ErrorMessage errors={errors} name="partitions"/>
               </p>
             </div>
           </div>
@@ -107,12 +113,12 @@ const New: React.FC<Props> = ({
                 type="number"
                 placeholder="Replication Factor"
                 defaultValue="1"
-                ref={register({ required: 'Replication Factor is required.' })}
+                ref={register({required: 'Replication Factor is required.'})}
                 name="replicationFactor"
                 disabled={isSubmitting}
               />
               <p className="help is-danger">
-                <ErrorMessage errors={errors} name="replicationFactor" />
+                <ErrorMessage errors={errors} name="replicationFactor"/>
               </p>
             </div>
 
@@ -125,12 +131,12 @@ const New: React.FC<Props> = ({
                 type="number"
                 placeholder="Replication Factor"
                 defaultValue="1"
-                ref={register({ required: 'Min In Sync Replicas is required.' })}
+                ref={register({required: 'Min In Sync Replicas is required.'})}
                 name="minInSyncReplicas"
                 disabled={isSubmitting}
               />
               <p className="help is-danger">
-                <ErrorMessage errors={errors} name="minInSyncReplicas" />
+                <ErrorMessage errors={errors} name="minInSyncReplicas"/>
               </p>
             </div>
           </div>
@@ -168,7 +174,7 @@ const New: React.FC<Props> = ({
                   ref={register}
                   disabled={isSubmitting}
                 >
-                  <option value={MILLISECONDS_IN_DAY / 2 }>
+                  <option value={MILLISECONDS_IN_DAY / 2}>
                     12 hours
                   </option>
                   <option value={MILLISECONDS_IN_DAY}>
@@ -227,21 +233,21 @@ const New: React.FC<Props> = ({
                 className="input"
                 type="number"
                 defaultValue="1000012"
-                ref={register({ required: 'Maximum message size in bytes is required' })}
+                ref={register({required: 'Maximum message size in bytes is required'})}
                 name="maxMessageBytes"
                 disabled={isSubmitting}
               />
               <p className="help is-danger">
-                <ErrorMessage errors={errors} name="maxMessageBytes" />
+                <ErrorMessage errors={errors} name="maxMessageBytes"/>
               </p>
             </div>
           </div>
 
-          <input type="submit" className="button is-primary" disabled={isSubmitting} />
+          <input type="submit" className="button is-primary" disabled={isSubmitting}/>
         </form>
       </div>
     </div>
   );
-}
+};
 
 export default New;

+ 4 - 2
src/components/Topics/New/NewContainer.ts → kafka-ui-react-app/src/components/Topics/New/NewContainer.ts

@@ -6,6 +6,7 @@ import { createTopic } from 'redux/actions';
 import { getTopicCreated } from 'redux/reducers/topics/selectors';
 import { clusterTopicPath } from 'lib/paths';
 import { ThunkDispatch } from 'redux-thunk';
+import * as actions from "../../../redux/actions/actions";
 
 interface RouteProps {
   clusterId: string;
@@ -20,11 +21,12 @@ const mapStateToProps = (state: RootState, { match: { params: { clusterId } } }:
 
 const mapDispatchToProps = (dispatch: ThunkDispatch<RootState, undefined, Action>, { history }: OwnProps) => ({
   createTopic: (clusterId: ClusterId, form: TopicFormData) => {
-    dispatch(createTopic(clusterId, form))
+    dispatch(createTopic(clusterId, form));
   },
   redirectToTopicPath: (clusterId: ClusterId, topicName: TopicName) => {
     history.push(clusterTopicPath(clusterId, topicName));
-  }
+  },
+  resetUploadedState: (() => dispatch(actions.createTopicAction.failure()))
 });
 
 

+ 0 - 0
src/components/Topics/Topics.tsx → kafka-ui-react-app/src/components/Topics/Topics.tsx


+ 0 - 0
src/components/Topics/TopicsContainer.ts → kafka-ui-react-app/src/components/Topics/TopicsContainer.ts


+ 0 - 0
src/components/common/Breadcrumb/Breadcrumb.tsx → kafka-ui-react-app/src/components/common/Breadcrumb/Breadcrumb.tsx


+ 0 - 0
src/components/common/Dashboard/Indicator.tsx → kafka-ui-react-app/src/components/common/Dashboard/Indicator.tsx


+ 0 - 0
src/components/common/Dashboard/MetricsWrapper.tsx → kafka-ui-react-app/src/components/common/Dashboard/MetricsWrapper.tsx


+ 0 - 0
src/components/common/PageLoader/PageLoader.tsx → kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx


+ 0 - 0
src/index.tsx → kafka-ui-react-app/src/index.tsx


+ 0 - 0
src/lib/constants.ts → kafka-ui-react-app/src/lib/constants.ts


+ 0 - 0
src/lib/hooks/useInterval.ts → kafka-ui-react-app/src/lib/hooks/useInterval.ts


+ 0 - 0
src/lib/paths.ts → kafka-ui-react-app/src/lib/paths.ts


+ 0 - 0
src/lib/utils/formatBytes.ts → kafka-ui-react-app/src/lib/utils/formatBytes.ts


+ 0 - 0
src/react-app-env.d.ts → kafka-ui-react-app/src/react-app-env.d.ts


+ 0 - 0
src/redux/actionType.ts → kafka-ui-react-app/src/redux/actionType.ts


+ 1 - 1
src/redux/actions/actions.ts → kafka-ui-react-app/src/redux/actions/actions.ts

@@ -50,4 +50,4 @@ export const createTopicAction = createAsyncAction(
   ActionType.POST_TOPIC__REQUEST,
   ActionType.POST_TOPIC__SUCCESS,
   ActionType.POST_TOPIC__FAILURE,
-)<undefined, undefined, undefined>();
+)<undefined, Topic, undefined>();

+ 0 - 0
src/redux/actions/index.ts → kafka-ui-react-app/src/redux/actions/index.ts


+ 3 - 3
src/redux/actions/thunks.ts → kafka-ui-react-app/src/redux/actions/thunks.ts

@@ -5,7 +5,7 @@ import {
   Cluster,
   ClusterId,
   TopicFormData,
-  TopicName,
+  TopicName, Topic,
 } from 'redux/interfaces';
 
 export const fetchBrokers = (clusterId: ClusterId): PromiseThunk<void> => async (dispatch) => {
@@ -71,8 +71,8 @@ export const fetchTopicConfig = (clusterId: ClusterId, topicName: TopicName): Pr
 export const createTopic = (clusterId: ClusterId, form: TopicFormData): PromiseThunk<void> => async (dispatch) => {
   dispatch(actions.createTopicAction.request());
   try {
-    await api.postTopic(clusterId, form);
-    dispatch(actions.createTopicAction.success());
+    const topic: Topic = await api.postTopic(clusterId, form);
+    dispatch(actions.createTopicAction.success(topic));
   } catch (e) {
     dispatch(actions.createTopicAction.failure());
   }

+ 0 - 0
src/redux/api/brokers.ts → kafka-ui-react-app/src/redux/api/brokers.ts


+ 0 - 0
src/redux/api/clusters.ts → kafka-ui-react-app/src/redux/api/clusters.ts


+ 0 - 0
src/redux/api/index.ts → kafka-ui-react-app/src/redux/api/index.ts


Một số tệp đã không được hiển thị bởi vì quá nhiều tập tin thay đổi trong này khác