Browse Source

Merge master

Marat Adiyatullin 4 years ago
parent
commit
a0b78f68df
40 changed files with 1811 additions and 676 deletions
  1. 2 2
      kafka-ui-api/pom.xml
  2. 3 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ClusterUtil.java
  3. 0 5
      kafka-ui-api/src/main/resources/application.yml
  4. 3 0
      kafka-ui-e2e-checks/.env.example
  5. 6 0
      kafka-ui-e2e-checks/.gitignore
  6. 117 0
      kafka-ui-e2e-checks/README.md
  7. 25 0
      kafka-ui-e2e-checks/docker/selenoid.yaml
  8. 180 0
      kafka-ui-e2e-checks/pom.xml
  9. BIN
      kafka-ui-e2e-checks/screenshots/main.png
  10. 11 0
      kafka-ui-e2e-checks/selenoid/config/browsers.json
  11. 2 0
      kafka-ui-e2e-checks/src/main/resources/allure.properties
  12. 21 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java
  13. 107 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
  14. 34 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/CamelCaseToSpacedDisplayNameGenerator.java
  15. 27 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/TestConfiguration.java
  16. 16 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/MainPage.java
  17. 16 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/Pages.java
  18. 7 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/screenshots/NoReferenceScreenshotFoundException.java
  19. 150 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/screenshots/Screenshooter.java
  20. 446 157
      kafka-ui-react-app/package-lock.json
  21. 2 2
      kafka-ui-react-app/package.json
  22. 2 3
      kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx
  23. 32 30
      kafka-ui-react-app/src/components/Topics/List/ListItem.tsx
  24. 8 5
      kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx
  25. 8 8
      kafka-ui-react-app/src/components/Topics/Topic/Details/Details.tsx
  26. 2 0
      kafka-ui-react-app/src/components/Topics/Topic/Details/DetailsContainer.ts
  27. 2 3
      kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Messages.tsx
  28. 16 14
      kafka-ui-react-app/src/components/Topics/Topic/Details/Overview/Overview.tsx
  29. 42 0
      kafka-ui-react-app/src/components/Topics/Topic/Details/Overview/__test__/Overview.spec.tsx
  30. 71 0
      kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/Details.spec.tsx
  31. 5 6
      kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx
  32. 5 7
      kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx
  33. 25 26
      kafka-ui-react-app/src/redux/actions/thunks/brokers.ts
  34. 24 22
      kafka-ui-react-app/src/redux/actions/thunks/clusters.ts
  35. 36 35
      kafka-ui-react-app/src/redux/actions/thunks/consumerGroups.ts
  36. 150 146
      kafka-ui-react-app/src/redux/actions/thunks/schemas.ts
  37. 196 194
      kafka-ui-react-app/src/redux/actions/thunks/topics.ts
  38. 9 6
      kafka-ui-react-app/src/redux/reducers/topics/selectors.ts
  39. 2 4
      kafka-ui-react-app/src/redux/store/configureStore/mockStoreCreator.ts
  40. 1 0
      pom.xml

+ 2 - 2
kafka-ui-api/pom.xml

@@ -38,8 +38,8 @@
             <artifactId>spring-boot-starter-security</artifactId>
         </dependency>
         <dependency>
-            <groupId>org.springframework.security</groupId>
-            <artifactId>spring-security-oauth2-client</artifactId>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-oauth2-client</artifactId>	
         </dependency>
         <dependency>
             <groupId>com.provectus</groupId>

+ 3 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ClusterUtil.java

@@ -139,7 +139,9 @@ public class ClusterUtil {
 
   public static InternalTopic mapToInternalTopic(TopicDescription topicDescription) {
     var topic = InternalTopic.builder();
-    topic.internal(topicDescription.isInternal());
+    topic.internal(
+        topicDescription.isInternal() || topicDescription.name().startsWith("_")
+    );
     topic.name(topicDescription.name());
 
     List<InternalPartition> partitions = topicDescription.partitions().stream().map(

+ 0 - 5
kafka-ui-api/src/main/resources/application.yml

@@ -1,9 +1,4 @@
 kafka:
-  clusters:
-    - name: local
-      bootstrapServers: localhost:9092
-      zookeeper: localhost:2181
-      schemaRegistry: http://localhost:8081
   admin-client-timeout: 5000
 zookeeper:
   connection-timeout: 1000

+ 3 - 0
kafka-ui-e2e-checks/.env.example

@@ -0,0 +1,3 @@
+USE_LOCAL_BROWSER=true
+SHOULD_START_SELENOID=false
+TURN_OFF_SCREENSHOTS=true

+ 6 - 0
kafka-ui-e2e-checks/.gitignore

@@ -0,0 +1,6 @@
+.env
+build/
+allure-results/
+selenoid/video/
+target/
+selenoid/logs/

+ 117 - 0
kafka-ui-e2e-checks/README.md

@@ -0,0 +1,117 @@
+### E2E UI automation for Kafka-ui
+
+This repository is for E2E UI automation. 
+
+### Table of Contents
+
+- [Prerequisites](#prerequisites)
+- [How to install](#how-to-install)
+- [Environment variables](#environment-variables)
+- [How to run checks](#how-to-run-checks)
+- [Reporting](#reporting)
+- [Environments setup](#environments-setup)
+- [Test Data](#test-data)
+- [Actions](#actions)
+- [Checks](#checks)
+- [Parallelization](#parallelization)
+- [How to develop](#how-to-develop)
+
+### Prerequisites
+- Docker & Docker-compose
+- Java
+- Maven
+  
+### How to install
+```
+git clone https://github.com/provectus/kafka-ui.git
+cd  kafka-ui-e2e-checks
+docker pull selenoid/vnc:chrome_86.0  
+```
+### Environment variables
+
+|Name               	                |   Default   | Description
+|---------------------------------------|-------------|---------------------
+|`USE_LOCAL_BROWSER`                    |  `true`     | clear reports dir on startup
+|`CLEAR_REPORTS_DIR`                    |  `true`     | clear reports dir on startup
+|`SHOULD_START_SELENOID`                |  `false`    | starts selenoid container on startup
+|`SELENOID_URL`                         |  `http://localhost:4444/wd/hub`    | URL of remote selenoid instance
+|`BASE_URL`                             |  `http://192.168.1.2:8080/`    | base url for selenide configuration
+|`PIXELS_THRESHOLD`                     |  `200`    | Amount of pixels, that should be different to fail screenshot check
+|`SCREENSHOTS_FOLDER`                   |  `screenshots/`    | folder for keeping reference screenshots
+|`DIFF_SCREENSHOTS_FOLDER`              |  `build/__diff__/`    | folder for keeping  screenshots diffs
+|`ACTUAL_SCREENSHOTS_FOLDER`            |  `build/__actual__/`   | folder for keeping  actual screenshots(during checks)
+|`SHOULD_SAVE_SCREENSHOTS_IF_NOT_EXIST` |  `true`    | folder for keeping  actual screenshots(during checks)
+|`TURN_OFF_SCREENSHOTS`                 |  `false`    | If true, `compareScreenshots` will not fail on different screenshots. Useful for functional debugging on local machine, while preserving golden screenshots made in selenoid
+
+### How to run checks
+
+1. Run `kafka-ui` 
+```
+cd docker
+docker-compose -f kafka-ui.yaml up -d
+```
+2. Run `selenoid-ui` 
+```
+cd kafka-ui-e2e-checks/docker
+docker-compose -f selenoid.yaml up -d
+```
+3. Run checks 
+```
+cd kafka-ui-e2e-checks
+mvn test
+```
+
+* There are several ways to run checks
+
+1. If you don't have  selenoid run on your machine
+```
+ mvn test -DSHOULD_START_SELENOID=true
+```
+⚠️ If you want to run checks in IDE with this approach, you'd need to set up
+environment variable(`SHOULD_START_SELENOID=true`) in `Run/Edit Configurations..`
+
+2. For development purposes it is better to just start separate selenoid in docker-compose
+Do it in separate window
+```
+cd docker
+docker-compose -f selenoid.yaml up
+```
+Then you can just `mvn test`. By default, `SELENOID_URL` will resolve to `http://localhost:4444/wd/hub`
+
+It's preferred way to run. 
+
+* If you have remote selenoid instance, set 
+
+`SELENOID_URL` environment variable
+
+Example:
+`mvn test -DSELENOID_URL=http://localhost:4444/wd/hub`
+That's the way to run tests in CI with selenoid set up somewhere in cloud
+
+### Reporting
+
+Reports are in `allure-results` folder.
+If you have installed allure commandline(e.g. like [here](https://docs.qameta.io/allure/#_installing_a_commandline) or [here](https://www.npmjs.com/package/allure-commandline))
+You can see allure report with command:
+```
+allure serve
+```
+### Screenshots
+
+Reference screenshots are in `SCREENSHOTS_FOLDER`  (default,`kafka-ui-e2e-checks/screenshots`)
+
+### How to develop
+> ⚠️ todo 
+### Setting for different environments
+> ⚠️ todo 
+### Test Data
+> ⚠️ todo 
+### Actions
+> ⚠️ todo 
+### Checks
+> ⚠️ todo 
+### Parallelization
+> ⚠️ todo 
+### Tips
+ - install `Selenium UI Testing plugin` in IDEA
+

+ 25 - 0
kafka-ui-e2e-checks/docker/selenoid.yaml

@@ -0,0 +1,25 @@
+version: '3'
+
+services:
+  selenoid:
+    network_mode: bridge
+    image: aerokube/selenoid:1.10.3
+    volumes:
+      - "../selenoid/config:/etc/selenoid"
+      - "/var/run/docker.sock:/var/run/docker.sock"
+      - "../selenoid/video:/video"
+      - "../selenoid/logs:/opt/selenoid/logs"
+    environment:
+      - OVERRIDE_VIDEO_OUTPUT_DIR=video
+    command: [ "-conf", "/etc/selenoid/browsers.json", "-video-output-dir", "/opt/selenoid/video", "-log-output-dir", "/opt/selenoid/logs" ]
+    ports:
+      - "4444:4444"
+
+  selenoid-ui:
+    network_mode: bridge
+    image: aerokube/selenoid-ui:latest-release
+    links:
+      - selenoid
+    ports:
+      - "8081:8080"
+    command: [ "--selenoid-uri", "http://selenoid:4444" ]

+ 180 - 0
kafka-ui-e2e-checks/pom.xml

@@ -0,0 +1,180 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>kafka-ui</artifactId>
+        <groupId>com.provectus</groupId>
+        <version>0.0.11-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>kafka-ui-e2e-checks</artifactId>
+    <properties>
+        <junit.version>5.7.0</junit.version>
+        <aspectj.version>1.9.6</aspectj.version>
+        <allure.version>2.13.7</allure.version>
+        <testcontainers.version>1.15.2</testcontainers.version>
+        <selenide.version>5.16.2</selenide.version>
+        <assertj.version>3.17.1</assertj.version>
+        <google.auto-service.version>1.0-rc7</google.auto-service.version>
+        <hamcrest.version>2.2</hamcrest.version>
+        <slf4j.version>1.7.29</slf4j.version>
+        <testcontainers.junit-jupiter.version>1.15.1</testcontainers.junit-jupiter.version>
+        <allure.java-commons.version>2.13.6</allure.java-commons.version>
+        <dotenv.version>2.2.0</dotenv.version>
+        <junit.platform-launcher.version>1.6.2</junit.platform-launcher.version>
+        <allure.maven-plugin.version>2.6</allure.maven-plugin.version>
+        <ashot.version>1.5.4</ashot.version>
+        <allure.screendiff-plugin.version>2.13.9</allure.screendiff-plugin.version>
+        <maven.surefire-plugin.version>2.22.2</maven.surefire-plugin.version>
+        <allure-maven.version>2.10.0</allure-maven.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.13</artifactId>
+            <version>${kafka.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.testcontainers</groupId>
+            <artifactId>testcontainers</artifactId>
+            <version>${testcontainers.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>io.qameta.allure</groupId>
+            <artifactId>allure-junit5</artifactId>
+            <version>${allure.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.codeborne</groupId>
+            <artifactId>selenide</artifactId>
+            <version>${selenide.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>io.qameta.allure</groupId>
+            <artifactId>allure-selenide</artifactId>
+            <version>${allure.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest</artifactId>
+            <version>${hamcrest.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.assertj</groupId>
+            <artifactId>assertj-core</artifactId>
+            <version>${assertj.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.google.auto.service</groupId>
+            <artifactId>auto-service</artifactId>
+            <version>${google.auto-service.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.junit.jupiter</groupId>
+            <artifactId>junit-jupiter-api</artifactId>
+            <version>${junit.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.junit.jupiter</groupId>
+            <artifactId>junit-jupiter-engine</artifactId>
+            <version>${junit.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-simple</artifactId>
+            <version>${slf4j.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <version>${org.projectlombok.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.aspectj</groupId>
+            <artifactId>aspectjrt</artifactId>
+            <version>${aspectj.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.testcontainers</groupId>
+            <artifactId>junit-jupiter</artifactId>
+            <version>${testcontainers.junit-jupiter.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>io.qameta.allure</groupId>
+            <artifactId>allure-java-commons</artifactId>
+            <version>${allure.java-commons.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>io.github.cdimascio</groupId>
+            <artifactId>dotenv-java</artifactId>
+            <version>${dotenv.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.junit.platform</groupId>
+            <artifactId>junit-platform-launcher</artifactId>
+            <version>${junit.platform-launcher.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ru.yandex.qatools.allure</groupId>
+            <artifactId>allure-maven-plugin</artifactId>
+            <version>${allure.maven-plugin.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ru.yandex.qatools.ashot</groupId>
+            <artifactId>ashot</artifactId>
+            <version>${ashot.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.seleniumhq.selenium</groupId>
+                    <artifactId>selenium-remote-driver</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>io.qameta.allure.plugins</groupId>
+            <artifactId>screen-diff-plugin</artifactId>
+            <version>${allure.screendiff-plugin.version}</version>
+        </dependency>
+    </dependencies>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>${maven.surefire-plugin.version}</version>
+                <configuration>
+                    <argLine>
+                        -javaagent:"${settings.localRepository}/org/aspectj/aspectjweaver/${aspectj.version}/aspectjweaver-${aspectj.version}.jar"
+                    </argLine>
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.aspectj</groupId>
+                        <artifactId>aspectjweaver</artifactId>
+                        <version>${aspectj.version}</version>
+                    </dependency>
+                </dependencies>
+
+            </plugin>
+            <plugin>
+                <groupId>io.qameta.allure</groupId>
+                <artifactId>allure-maven</artifactId>
+                <version>${allure-maven.version}</version>
+            </plugin>
+
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <configuration>
+                    <source>${maven.compiler.source}</source>
+                    <target>${maven.compiler.target}</target>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>

BIN
kafka-ui-e2e-checks/screenshots/main.png


+ 11 - 0
kafka-ui-e2e-checks/selenoid/config/browsers.json

@@ -0,0 +1,11 @@
+{
+  "chrome": {
+    "default": "86.0",
+    "versions": {
+      "86.0": {
+        "image": "selenoid/vnc:chrome_86.0",
+        "port": "4444"
+      }
+    }
+  }
+}

+ 2 - 0
kafka-ui-e2e-checks/src/main/resources/allure.properties

@@ -0,0 +1,2 @@
+allure.results.directory=allure-results
+allure.link.issue.pattern=https://github.com/provectus/kafka-ui/issues/{}

+ 21 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java

@@ -0,0 +1,21 @@
+package com.provectus.kafka.ui;
+
+import com.provectus.kafka.ui.base.BaseTest;
+import io.qameta.allure.Issue;
+import lombok.SneakyThrows;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+public class SmokeTests extends BaseTest {
+
+    @Disabled("till we get tests in ci run")
+    @SneakyThrows
+    @DisplayName("main page should load")
+    @Issue("380")
+    void mainPageLoads() {
+        pages.goTo("")
+            .mainPage.shouldBeOnPage();
+        compareScreenshots("main");
+    }
+}

+ 107 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java

@@ -0,0 +1,107 @@
+package com.provectus.kafka.ui.base;
+
+import com.codeborne.selenide.Configuration;
+import com.codeborne.selenide.logevents.SelenideLogger;
+import com.provectus.kafka.ui.pages.Pages;
+import com.provectus.kafka.ui.screenshots.Screenshooter;
+import io.github.cdimascio.dotenv.Dotenv;
+import io.qameta.allure.selenide.AllureSelenide;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.DisplayNameGeneration;
+import org.openqa.selenium.remote.DesiredCapabilities;
+import org.testcontainers.containers.BindMode;
+import org.testcontainers.containers.GenericContainer;
+import org.testcontainers.utility.DockerImageName;
+
+import java.io.File;
+import java.util.Arrays;
+
+import static com.codeborne.selenide.Selenide.closeWebDriver;
+
+@Slf4j
+@DisplayNameGeneration(CamelCaseToSpacedDisplayNameGenerator.class)
+public class BaseTest {
+
+  protected Pages pages = Pages.INSTANCE;
+
+  private Screenshooter screenshooter = new Screenshooter();
+
+  public void compareScreenshots(String name) {
+    screenshooter.compareScreenshots(name);
+  }
+
+  public void compareScreenshots(String name, Boolean shouldUpdateScreenshots) {
+    screenshooter.compareScreenshots(name, shouldUpdateScreenshots);
+  }
+
+  public static GenericContainer selenoid =
+      new GenericContainer(DockerImageName.parse("aerokube/selenoid:latest-release"))
+          .withExposedPorts(4444)
+          .withFileSystemBind("selenoid/config/", "/etc/selenoid", BindMode.READ_WRITE)
+          .withFileSystemBind("/var/run/docker.sock", "/var/run/docker.sock", BindMode.READ_WRITE)
+          .withFileSystemBind("selenoid/video", "/opt/selenoid/video", BindMode.READ_WRITE)
+          .withFileSystemBind("selenoid/logs", "/opt/selenoid/logs", BindMode.READ_WRITE)
+          .withEnv("OVERRIDE_VIDEO_OUTPUT_DIR", "/opt/selenoid/video")
+          .withCommand(
+              "-conf", "/etc/selenoid/browsers.json", "-log-output-dir", "/opt/selenoid/logs");
+
+  static {
+    if (new File("./.env").exists()) {
+      Dotenv.load().entries().forEach(env -> System.setProperty(env.getKey(), env.getValue()));
+    }
+    if (TestConfiguration.CLEAR_REPORTS_DIR) {
+      clearReports();
+    }
+    setupSelenoid();
+  }
+
+  @AfterAll
+  public static void afterAll() {
+    closeWebDriver();
+    selenoid.close();
+  }
+
+  @SneakyThrows
+  private static void setupSelenoid() {
+    String remote = TestConfiguration.SELENOID_URL;
+    if (TestConfiguration.SHOULD_START_SELENOID) {
+      selenoid.start();
+      remote =
+          "http://%s:%s/wd/hub"
+              .formatted(selenoid.getContainerIpAddress(), selenoid.getMappedPort(4444));
+    }
+
+    Configuration.reportsFolder = TestConfiguration.REPORTS_FOLDER;
+    if (!TestConfiguration.USE_LOCAL_BROWSER) {
+      Configuration.remote = remote;
+      TestConfiguration.BASE_URL =
+          TestConfiguration.BASE_URL.replace("localhost", "host.docker.internal");
+    }
+    Configuration.screenshots = TestConfiguration.SCREENSHOTS;
+    Configuration.savePageSource = TestConfiguration.SAVE_PAGE_SOURCE;
+    Configuration.reopenBrowserOnFail = TestConfiguration.REOPEN_BROWSER_ON_FAIL;
+    Configuration.browser = TestConfiguration.BROWSER;
+    Configuration.baseUrl = TestConfiguration.BASE_URL;
+    Configuration.browserSize = TestConfiguration.BROWSER_SIZE;
+    var capabilities = new DesiredCapabilities();
+    capabilities.setCapability("enableVNC", TestConfiguration.ENABLE_VNC);
+    Configuration.browserCapabilities = capabilities;
+
+    SelenideLogger.addListener("allure", new AllureSelenide().savePageSource(false));
+  }
+
+  public static void clearReports() {
+    log.info("Clearing reports dir [%s]...".formatted(TestConfiguration.REPORTS_FOLDER));
+    File allureResults = new File(TestConfiguration.REPORTS_FOLDER);
+    if (allureResults.isDirectory()) {
+      File[] list = allureResults.listFiles();
+      if (list != null)
+        Arrays.stream(list)
+            .sequential()
+            .filter(e -> !e.getName().equals("categories.json"))
+            .forEach(File::delete);
+    }
+  }
+}

+ 34 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/CamelCaseToSpacedDisplayNameGenerator.java

@@ -0,0 +1,34 @@
+package com.provectus.kafka.ui.base;
+
+import org.junit.jupiter.api.DisplayNameGenerator;
+import org.junit.platform.commons.util.ClassUtils;
+import org.junit.platform.commons.util.Preconditions;
+
+import java.lang.reflect.Method;
+
+public class CamelCaseToSpacedDisplayNameGenerator implements DisplayNameGenerator {
+  @Override
+  public String generateDisplayNameForClass(Class<?> testClass) {
+    String name = testClass.getName();
+    int lastDot = name.lastIndexOf('.');
+    return name.substring(lastDot + 1).replaceAll("([A-Z])", " $1").toLowerCase();
+  }
+
+  @Override
+  public String generateDisplayNameForNestedClass(Class<?> nestedClass) {
+    return nestedClass.getSimpleName();
+  }
+
+  @Override
+  public String generateDisplayNameForMethod(Class<?> testClass, Method testMethod) {
+    return testMethod.getName().replaceAll("([A-Z])", " $1").toLowerCase()
+        + parameterTypesAsString(testMethod);
+  }
+
+  static String parameterTypesAsString(Method method) {
+    Preconditions.notNull(method, "Method must not be null");
+    return method.getParameterTypes().length == 0
+        ? ""
+        : '(' + ClassUtils.nullSafeToString(Class::getSimpleName, method.getParameterTypes()) + ')';
+  }
+}

+ 27 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/TestConfiguration.java

@@ -0,0 +1,27 @@
+package com.provectus.kafka.ui.base;
+
+public class TestConfiguration {
+  public static boolean CLEAR_REPORTS_DIR =
+      Boolean.parseBoolean(System.getProperty("CLEAR_REPORTS_DIR", "true"));
+
+  public static boolean SHOULD_START_SELENOID =
+      Boolean.parseBoolean(System.getProperty("SHOULD_START_SELENOID", "false"));
+
+  public static String BASE_URL = System.getProperty("BASE_URL", "http://localhost:8080/");
+
+  public static boolean USE_LOCAL_BROWSER =
+      Boolean.parseBoolean(System.getProperty("USE_LOCAL_BROWSER", "true"));
+
+  public static String SELENOID_URL =
+      System.getProperty("SELENOID_URL", "http://localhost:4444/wd/hub");
+  public static String REPORTS_FOLDER = System.getProperty("REPORTS_FOLDER", "allure-results");
+  public static Boolean SCREENSHOTS =
+      Boolean.parseBoolean(System.getProperty("SCREENSHOTS", "false"));
+  public static Boolean SAVE_PAGE_SOURCE =
+      Boolean.parseBoolean(System.getProperty("SAVE_PAGE_SOURCE", "false"));
+  public static Boolean REOPEN_BROWSER_ON_FAIL =
+      Boolean.parseBoolean(System.getProperty("REOPEN_BROWSER_ON_FAIL", "true"));
+  public static String BROWSER = System.getProperty("BROWSER", "chrome");
+  public static String BROWSER_SIZE = System.getProperty("BROWSER_SIZE", "1920x1080");
+  public static Boolean ENABLE_VNC = Boolean.parseBoolean(System.getProperty("ENABLE_VNC", "true"));
+}

+ 16 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/MainPage.java

@@ -0,0 +1,16 @@
+package com.provectus.kafka.ui.pages;
+
+import com.codeborne.selenide.Condition;
+import io.qameta.allure.Step;
+import org.openqa.selenium.By;
+
+import static com.codeborne.selenide.Selenide.$;
+
+public class MainPage {
+
+    @Step
+    public void shouldBeOnPage(){
+        $(By.xpath("//*[contains(text(),'Loading')]")).shouldBe(Condition.disappear);
+        $(By.xpath("//h5[text()='Clusters']")).shouldBe(Condition.visible);
+    }
+}

+ 16 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/Pages.java

@@ -0,0 +1,16 @@
+package com.provectus.kafka.ui.pages;
+
+import com.codeborne.selenide.Selenide;
+import com.provectus.kafka.ui.base.TestConfiguration;
+
+public class Pages {
+
+    public static Pages INSTANCE = new Pages();
+
+    public MainPage mainPage = new MainPage();
+
+    public Pages goTo(String path) {
+        Selenide.open(TestConfiguration.BASE_URL+path);
+        return this;
+    }
+}

+ 7 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/screenshots/NoReferenceScreenshotFoundException.java

@@ -0,0 +1,7 @@
+package com.provectus.kafka.ui.screenshots;
+
+public class NoReferenceScreenshotFoundException extends Throwable {
+    public NoReferenceScreenshotFoundException(String name) {
+        super(("no reference screenshot found for %s".formatted(name)));
+    }
+}

+ 150 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/screenshots/Screenshooter.java

@@ -0,0 +1,150 @@
+package com.provectus.kafka.ui.screenshots;
+
+import io.qameta.allure.Allure;
+import io.qameta.allure.Attachment;
+import lombok.SneakyThrows;
+import org.junit.jupiter.api.Assertions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import ru.yandex.qatools.ashot.AShot;
+import ru.yandex.qatools.ashot.Screenshot;
+import ru.yandex.qatools.ashot.comparison.ImageDiff;
+import ru.yandex.qatools.ashot.comparison.ImageDiffer;
+import ru.yandex.qatools.ashot.coordinates.WebDriverCoordsProvider;
+
+import javax.imageio.ImageIO;
+import java.awt.image.BufferedImage;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.nio.file.FileSystems;
+import java.util.List;
+
+import static com.codeborne.selenide.WebDriverRunner.getWebDriver;
+import static org.junit.jupiter.api.Assertions.fail;
+
+public class Screenshooter {
+
+  public static  Logger log = LoggerFactory.getLogger(Screenshooter.class);
+
+  private static int PIXELS_THRESHOLD =
+      Integer.parseInt(System.getProperty("PIXELS_THRESHOLD", "200"));
+  private static String SCREENSHOTS_FOLDER =
+      System.getProperty("SCREENSHOTS_FOLDER", "screenshots/");
+  private static String DIFF_SCREENSHOTS_FOLDER =
+      System.getProperty("DIFF_SCREENSHOTS_FOLDER", "build/__diff__/");
+  private static String ACTUAL_SCREENSHOTS_FOLDER =
+      System.getProperty("ACTUAL_SCREENSHOTS_FOLDER", "build/__actual__/");
+  private static boolean SHOULD_SAVE_SCREENSHOTS_IF_NOT_EXIST =
+      Boolean.parseBoolean(System.getProperty("SHOULD_SAVE_SCREENSHOTS_IF_NOT_EXIST", "true"));
+  private static boolean TURN_OFF_SCREENSHOTS =
+      Boolean.parseBoolean(System.getProperty("TURN_OFF_SCREENSHOTS", "false"));
+
+  private File newFile(String name) {
+    var file = new File(name);
+    if (!file.exists()) {
+      file.mkdirs();
+    }
+    return file;
+  }
+
+  public Screenshooter() {
+    List.of(SCREENSHOTS_FOLDER, DIFF_SCREENSHOTS_FOLDER, ACTUAL_SCREENSHOTS_FOLDER)
+        .forEach(this::newFile);
+  }
+
+  public void compareScreenshots(String name) {
+    compareScreenshots(name, false);
+  }
+
+  @SneakyThrows
+  public void compareScreenshots(String name, boolean shouldUpdateScreenshotIfDiffer) {
+    if (TURN_OFF_SCREENSHOTS) {
+      return;
+    }
+    if (!doesScreenshotExist(name)) {
+      if (SHOULD_SAVE_SCREENSHOTS_IF_NOT_EXIST) {
+        updateActualScreenshot(name);
+      } else {
+        throw new NoReferenceScreenshotFoundException(name);
+      }
+    } else {
+      makeImageDiff(name, shouldUpdateScreenshotIfDiffer);
+    }
+  }
+
+  @SneakyThrows
+  private void updateActualScreenshot(String name) {
+    Screenshot actual =
+        new AShot().coordsProvider(new WebDriverCoordsProvider()).takeScreenshot(getWebDriver());
+    File  file=  newFile(SCREENSHOTS_FOLDER + name + ".png");
+    ImageIO.write(actual.getImage(), "png", file);
+    log.debug("created screenshot: %s \n at $s".formatted(name,file.getAbsolutePath()));
+  }
+
+  private static boolean doesScreenshotExist(String name) {
+    return new File(SCREENSHOTS_FOLDER + name + ".png").exists();
+  }
+
+  @SneakyThrows
+  private void makeImageDiff(String expectedName, boolean shouldUpdateScreenshotIfDiffer) {
+    String fullPathNameExpected = SCREENSHOTS_FOLDER + expectedName + ".png";
+    String fullPathNameActual = ACTUAL_SCREENSHOTS_FOLDER + expectedName + ".png";
+    String fullPathNameDiff = DIFF_SCREENSHOTS_FOLDER + expectedName + ".png";
+
+    //  activating allure plugin for showing diffs in report
+    Allure.label("testType", "screenshotDiff");
+
+    Screenshot actual =
+        new AShot().coordsProvider(new WebDriverCoordsProvider()).takeScreenshot(getWebDriver());
+    ImageIO.write(actual.getImage(), "png", newFile(fullPathNameActual));
+
+    Screenshot expected = new Screenshot(ImageIO.read(newFile(fullPathNameExpected)));
+    ImageDiff diff = new ImageDiffer().makeDiff(actual, expected);
+    BufferedImage diffImage = diff.getMarkedImage();
+    ImageIO.write(diffImage, "png", newFile(fullPathNameDiff));
+    // adding to report
+    diff(fullPathNameDiff);
+    // adding to report
+    actual(fullPathNameActual);
+    // adding to report
+    expected(fullPathNameExpected);
+
+    if (shouldUpdateScreenshotIfDiffer) {
+      if (diff.getDiffSize() > PIXELS_THRESHOLD) {
+        updateActualScreenshot(expectedName);
+      }
+    } else {
+      Assertions.assertTrue(
+          PIXELS_THRESHOLD >= diff.getDiffSize(),
+              ("Amount of differing pixels should be less or equals than %s, actual %s\n"+
+                  "diff file: %s")
+              .formatted(PIXELS_THRESHOLD, diff.getDiffSize(), FileSystems.getDefault().getPath(fullPathNameDiff).normalize().toAbsolutePath().toString()));
+    }
+  }
+
+  @SneakyThrows
+  private byte[] imgToBytes(String filename) {
+    BufferedImage bImage2 = ImageIO.read(new File(filename));
+    var bos2 = new ByteArrayOutputStream();
+    ImageIO.write(bImage2, "png", bos2);
+    return bos2.toByteArray();
+  }
+
+  @SneakyThrows
+  @Attachment
+  private byte[] actual(String actualFileName) {
+    return imgToBytes(actualFileName);
+  }
+
+  @SneakyThrows
+  @Attachment
+  private byte[] expected(String expectedFileName) {
+    return imgToBytes(expectedFileName);
+  }
+
+  @SneakyThrows
+  @Attachment
+  private byte[] diff(String diffFileName) {
+    return imgToBytes(diffFileName);
+  }
+}

File diff suppressed because it is too large
+ 446 - 157
kafka-ui-react-app/package-lock.json


+ 2 - 2
kafka-ui-react-app/package.json

@@ -89,7 +89,7 @@
     "@typescript-eslint/eslint-plugin": "^4.20.0",
     "@typescript-eslint/parser": "^4.20.0",
     "@wojtekmaj/enzyme-adapter-react-17": "^0.6.0",
-    "dotenv": "^8.2.0",
+    "dotenv": "^9.0.1",
     "enzyme": "^3.11.0",
     "enzyme-to-json": "^3.6.1",
     "eslint": "^7.22.0",
@@ -105,7 +105,7 @@
     "fetch-mock-jest": "^1.5.1",
     "husky": "^6.0.0",
     "jest-sonar-reporter": "^2.0.0",
-    "lint-staged": "^10.5.4",
+    "lint-staged": "^11.0.0",
     "node-sass": "^5.0.0",
     "prettier": "^2.2.1",
     "react-scripts": "4.0.3",

+ 2 - 3
kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx

@@ -18,9 +18,8 @@ const mapStateToProps = (state: RootState) => ({
   isFetching: getIsSchemaListFetching(state),
   schemas: getSchemaList(state),
   globalSchemaCompatibilityLevel: getGlobalSchemaCompatibilityLevel(state),
-  isGlobalSchemaCompatibilityLevelFetched: getGlobalSchemaCompatibilityLevelFetched(
-    state
-  ),
+  isGlobalSchemaCompatibilityLevelFetched:
+    getGlobalSchemaCompatibilityLevelFetched(state),
 });
 
 const mapDispatchToProps = {

+ 32 - 30
kafka-ui-react-app/src/components/Topics/List/ListItem.tsx

@@ -23,10 +23,8 @@ const ListItem: React.FC<ListItemProps> = ({
   clusterName,
   clearTopicMessages,
 }) => {
-  const [
-    isDeleteTopicConfirmationVisible,
-    setDeleteTopicConfirmationVisible,
-  ] = React.useState(false);
+  const [isDeleteTopicConfirmationVisible, setDeleteTopicConfirmationVisible] =
+    React.useState(false);
 
   const outOfSyncReplicas = React.useMemo(() => {
     if (partitions === undefined || partitions.length === 0) {
@@ -66,33 +64,37 @@ const ListItem: React.FC<ListItemProps> = ({
           {internal ? 'Internal' : 'External'}
         </div>
       </td>
-      <td>
-        <div className="has-text-right">
-          <Dropdown
-            label={
-              <span className="icon">
-                <i className="fas fa-cog" />
-              </span>
-            }
-            right
-          >
-            <DropdownItem onClick={clearTopicMessagesHandler}>
-              <span className="has-text-danger">Clear Messages</span>
-            </DropdownItem>
-            <DropdownItem
-              onClick={() => setDeleteTopicConfirmationVisible(true)}
+      <td className="topic-action-block">
+        {!internal ? (
+          <>
+            <div className="has-text-right">
+              <Dropdown
+                label={
+                  <span className="icon">
+                    <i className="fas fa-cog" />
+                  </span>
+                }
+                right
+              >
+                <DropdownItem onClick={clearTopicMessagesHandler}>
+                  <span className="has-text-danger">Clear Messages</span>
+                </DropdownItem>
+                <DropdownItem
+                  onClick={() => setDeleteTopicConfirmationVisible(true)}
+                >
+                  <span className="has-text-danger">Remove Topic</span>
+                </DropdownItem>
+              </Dropdown>
+            </div>
+            <ConfirmationModal
+              isOpen={isDeleteTopicConfirmationVisible}
+              onCancel={() => setDeleteTopicConfirmationVisible(false)}
+              onConfirm={deleteTopicHandler}
             >
-              <span className="has-text-danger">Remove Topic</span>
-            </DropdownItem>
-          </Dropdown>
-        </div>
-        <ConfirmationModal
-          isOpen={isDeleteTopicConfirmationVisible}
-          onCancel={() => setDeleteTopicConfirmationVisible(false)}
-          onConfirm={deleteTopicHandler}
-        >
-          Are you sure want to remove <b>{name}</b> topic?
-        </ConfirmationModal>
+              Are you sure want to remove <b>{name}</b> topic?
+            </ConfirmationModal>
+          </>
+        ) : null}
       </td>
     </tr>
   );

+ 8 - 5
kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx

@@ -28,28 +28,31 @@ describe('ListItem', () => {
   );
 
   it('triggers the deleting messages when clicked on the delete messages button', () => {
-    const component = shallow(setupComponent());
+    const component = shallow(setupComponent({ topic: externalTopicPayload }));
+    expect(component.exists('.topic-action-block')).toBeTruthy();
     component.find('DropdownItem').at(0).simulate('click');
     expect(mockDeleteMessages).toBeCalledTimes(1);
     expect(mockDeleteMessages).toBeCalledWith(
       clusterName,
-      internalTopicPayload.name
+      externalTopicPayload.name
     );
   });
 
   it('triggers the deleteTopic when clicked on the delete button', () => {
-    const wrapper = shallow(setupComponent());
+    const wrapper = shallow(setupComponent({ topic: externalTopicPayload }));
+    expect(wrapper.exists('.topic-action-block')).toBeTruthy();
     expect(wrapper.find('mock-ConfirmationModal').prop('isOpen')).toBeFalsy();
     wrapper.find('DropdownItem').at(1).simulate('click');
     const modal = wrapper.find('mock-ConfirmationModal');
     expect(modal.prop('isOpen')).toBeTruthy();
     modal.simulate('confirm');
     expect(mockDelete).toBeCalledTimes(1);
-    expect(mockDelete).toBeCalledWith(clusterName, internalTopicPayload.name);
+    expect(mockDelete).toBeCalledWith(clusterName, externalTopicPayload.name);
   });
 
   it('closes ConfirmationModal when clicked on the cancel button', () => {
-    const wrapper = shallow(setupComponent());
+    const wrapper = shallow(setupComponent({ topic: externalTopicPayload }));
+    expect(wrapper.exists('.topic-action-block')).toBeTruthy();
     expect(wrapper.find('mock-ConfirmationModal').prop('isOpen')).toBeFalsy();
     wrapper.find('DropdownItem').last().simulate('click');
     expect(wrapper.find('mock-ConfirmationModal').prop('isOpen')).toBeTruthy();

+ 8 - 8
kafka-ui-react-app/src/components/Topics/Topic/Details/Details.tsx

@@ -19,6 +19,7 @@ import SettingsContainer from './Settings/SettingsContainer';
 interface Props extends Topic, TopicDetails {
   clusterName: ClusterName;
   topicName: TopicName;
+  isInternal: boolean;
   deleteTopic: (clusterName: ClusterName, topicName: TopicName) => void;
   clearTopicMessages(clusterName: ClusterName, topicName: TopicName): void;
 }
@@ -26,15 +27,14 @@ interface Props extends Topic, TopicDetails {
 const Details: React.FC<Props> = ({
   clusterName,
   topicName,
+  isInternal,
   deleteTopic,
   clearTopicMessages,
 }) => {
   const history = useHistory();
   const { isReadOnly } = React.useContext(ClusterContext);
-  const [
-    isDeleteTopicConfirmationVisible,
-    setDeleteTopicConfirmationVisible,
-  ] = React.useState(false);
+  const [isDeleteTopicConfirmationVisible, setDeleteTopicConfirmationVisible] =
+    React.useState(false);
   const deleteTopicHandler = React.useCallback(() => {
     deleteTopic(clusterName, topicName);
     history.push(clusterTopicsPath(clusterName));
@@ -74,8 +74,8 @@ const Details: React.FC<Props> = ({
           </NavLink>
         </div>
         <div className="navbar-end">
-          <div className="buttons">
-            {!isReadOnly && (
+          {!isReadOnly && !isInternal ? (
+            <div className="buttons">
               <>
                 <button
                   type="button"
@@ -107,8 +107,8 @@ const Details: React.FC<Props> = ({
                   Are you sure want to remove <b>{topicName}</b> topic?
                 </ConfirmationModal>
               </>
-            )}
-          </div>
+            </div>
+          ) : null}
         </div>
       </nav>
       <br />

+ 2 - 0
kafka-ui-react-app/src/components/Topics/Topic/Details/DetailsContainer.ts

@@ -2,6 +2,7 @@ import { connect } from 'react-redux';
 import { ClusterName, RootState, TopicName } from 'redux/interfaces';
 import { withRouter, RouteComponentProps } from 'react-router-dom';
 import { deleteTopic, clearTopicMessages } from 'redux/actions';
+import { getIsTopicInternal } from 'redux/reducers/topics/selectors';
 
 import Details from './Details';
 
@@ -22,6 +23,7 @@ const mapStateToProps = (
 ) => ({
   clusterName,
   topicName,
+  isInternal: getIsTopicInternal(state, topicName),
 });
 
 const mapDispatchToProps = {

+ 2 - 3
kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Messages.tsx

@@ -50,9 +50,8 @@ const Messages: React.FC<Props> = ({
   fetchTopicMessages,
 }) => {
   const [searchQuery, setSearchQuery] = React.useState<string>('');
-  const [searchTimestamp, setSearchTimestamp] = React.useState<Date | null>(
-    null
-  );
+  const [searchTimestamp, setSearchTimestamp] =
+    React.useState<Date | null>(null);
   const [filterProps, setFilterProps] = React.useState<FilterProps[]>([]);
   const [selectedSeekType, setSelectedSeekType] = React.useState<SeekType>(
     SeekType.OFFSET

+ 16 - 14
kafka-ui-react-app/src/components/Topics/Topic/Details/Overview/Overview.tsx

@@ -75,22 +75,24 @@ const Overview: React.FC<Props> = ({
               <td>{offsetMin}</td>
               <td>{offsetMax}</td>
               <td className="has-text-right">
-                <Dropdown
-                  label={
-                    <span className="icon">
-                      <i className="fas fa-cog" />
-                    </span>
-                  }
-                  right
-                >
-                  <DropdownItem
-                    onClick={() =>
-                      clearTopicMessages(clusterName, topicName, [partition])
+                {!internal ? (
+                  <Dropdown
+                    label={
+                      <span className="icon">
+                        <i className="fas fa-cog" />
+                      </span>
                     }
+                    right
                   >
-                    <span className="has-text-danger">Clear Messages</span>
-                  </DropdownItem>
-                </Dropdown>
+                    <DropdownItem
+                      onClick={() =>
+                        clearTopicMessages(clusterName, topicName, [partition])
+                      }
+                    >
+                      <span className="has-text-danger">Clear Messages</span>
+                    </DropdownItem>
+                  </Dropdown>
+                ) : null}
               </td>
             </tr>
           ))}

+ 42 - 0
kafka-ui-react-app/src/components/Topics/Topic/Details/Overview/__test__/Overview.spec.tsx

@@ -0,0 +1,42 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import Overview from 'components/Topics/Topic/Details/Overview/Overview';
+
+describe('Overview', () => {
+  const mockInternal = false;
+  const mockClusterName = 'local';
+  const mockTopicName = 'topic';
+  const mockClearTopicMessages = jest.fn();
+  const mockPartitions = [
+    {
+      partition: 1,
+      leader: 1,
+      replicas: [
+        {
+          broker: 1,
+          leader: false,
+          inSync: true,
+        },
+      ],
+      offsetMax: 0,
+      offsetMin: 0,
+    },
+  ];
+
+  describe('when it has internal flag', () => {
+    it('does not render the Action button a Topic', () => {
+      const component = shallow(
+        <Overview
+          name={mockTopicName}
+          partitions={mockPartitions}
+          internal={mockInternal}
+          clusterName={mockClusterName}
+          topicName={mockTopicName}
+          clearTopicMessages={mockClearTopicMessages}
+        />
+      );
+
+      expect(component.exists('Dropdown')).toBeTruthy();
+    });
+  });
+});

+ 71 - 0
kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/Details.spec.tsx

@@ -0,0 +1,71 @@
+import React from 'react';
+import { mount } from 'enzyme';
+import { StaticRouter } from 'react-router-dom';
+import ClusterContext from 'components/contexts/ClusterContext';
+import Details from 'components/Topics/Topic/Details/Details';
+import {
+  internalTopicPayload,
+  externalTopicPayload,
+} from 'redux/reducers/topics/__test__/fixtures';
+
+describe('Details', () => {
+  const mockDelete = jest.fn();
+  const mockClusterName = 'local';
+  const mockClearTopicMessages = jest.fn();
+  const mockInternalTopicPayload = internalTopicPayload.internal;
+  const mockExternalTopicPayload = externalTopicPayload.internal;
+
+  describe('when it has readonly flag', () => {
+    it('does not render the Action button a Topic', () => {
+      const component = mount(
+        <StaticRouter>
+          <ClusterContext.Provider
+            value={{
+              isReadOnly: true,
+              hasKafkaConnectConfigured: true,
+              hasSchemaRegistryConfigured: true,
+            }}
+          >
+            <Details
+              clusterName={mockClusterName}
+              topicName={internalTopicPayload.name}
+              name={internalTopicPayload.name}
+              isInternal={mockInternalTopicPayload}
+              deleteTopic={mockDelete}
+              clearTopicMessages={mockClearTopicMessages}
+            />
+          </ClusterContext.Provider>
+        </StaticRouter>
+      );
+
+      expect(component.exists('button')).toBeFalsy();
+    });
+  });
+
+  describe('when it does not have readonly flag', () => {
+    it('renders the Action button a Topic', () => {
+      const component = mount(
+        <StaticRouter>
+          <ClusterContext.Provider
+            value={{
+              isReadOnly: false,
+              hasKafkaConnectConfigured: true,
+              hasSchemaRegistryConfigured: true,
+            }}
+          >
+            <Details
+              clusterName={mockClusterName}
+              topicName={internalTopicPayload.name}
+              name={internalTopicPayload.name}
+              isInternal={mockExternalTopicPayload}
+              deleteTopic={mockDelete}
+              clearTopicMessages={mockClearTopicMessages}
+            />
+          </ClusterContext.Provider>
+        </StaticRouter>
+      );
+
+      expect(component.exists('button')).toBeTruthy();
+    });
+  });
+});

+ 5 - 6
kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx

@@ -39,12 +39,11 @@ const CustomParamSelect: React.FC<CustomParamSelectProps> = ({
     return valid || 'Custom Parameter must be unique';
   };
 
-  const onChange = (inputName: string) => (
-    event: React.ChangeEvent<HTMLSelectElement>
-  ) => {
-    trigger(inputName);
-    onNameChange(index, event.target.value);
-  };
+  const onChange =
+    (inputName: string) => (event: React.ChangeEvent<HTMLSelectElement>) => {
+      trigger(inputName);
+      onNameChange(index, event.target.value);
+    };
 
   return (
     <>

+ 5 - 7
kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx

@@ -35,13 +35,11 @@ const CustomParams: React.FC<Props> = ({ isSubmitting, config }) => {
       )
     : {};
 
-  const [
-    formCustomParams,
-    setFormCustomParams,
-  ] = React.useState<TopicFormCustomParams>({
-    byIndex,
-    allIndexes: Object.keys(byIndex),
-  });
+  const [formCustomParams, setFormCustomParams] =
+    React.useState<TopicFormCustomParams>({
+      byIndex,
+      allIndexes: Object.keys(byIndex),
+    });
 
   const onAdd = (event: React.MouseEvent<HTMLButtonElement>) => {
     event.preventDefault();

+ 25 - 26
kafka-ui-react-app/src/redux/actions/thunks/brokers.ts

@@ -6,30 +6,29 @@ import * as actions from 'redux/actions/actions';
 const apiClientConf = new Configuration(BASE_PARAMS);
 export const brokersApiClient = new BrokersApi(apiClientConf);
 
-export const fetchBrokers = (
-  clusterName: ClusterName
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.fetchBrokersAction.request());
-  try {
-    const payload = await brokersApiClient.getBrokers({ clusterName });
-    dispatch(actions.fetchBrokersAction.success(payload));
-  } catch (e) {
-    dispatch(actions.fetchBrokersAction.failure());
-  }
-};
+export const fetchBrokers =
+  (clusterName: ClusterName): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.fetchBrokersAction.request());
+    try {
+      const payload = await brokersApiClient.getBrokers({ clusterName });
+      dispatch(actions.fetchBrokersAction.success(payload));
+    } catch (e) {
+      dispatch(actions.fetchBrokersAction.failure());
+    }
+  };
 
-export const fetchBrokerMetrics = (
-  clusterName: ClusterName,
-  brokerId: BrokerId
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.fetchBrokerMetricsAction.request());
-  try {
-    const payload = await brokersApiClient.getBrokersMetrics({
-      clusterName,
-      id: brokerId,
-    });
-    dispatch(actions.fetchBrokerMetricsAction.success(payload));
-  } catch (e) {
-    dispatch(actions.fetchBrokerMetricsAction.failure());
-  }
-};
+export const fetchBrokerMetrics =
+  (clusterName: ClusterName, brokerId: BrokerId): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.fetchBrokerMetricsAction.request());
+    try {
+      const payload = await brokersApiClient.getBrokersMetrics({
+        clusterName,
+        id: brokerId,
+      });
+      dispatch(actions.fetchBrokerMetricsAction.success(payload));
+    } catch (e) {
+      dispatch(actions.fetchBrokerMetricsAction.failure());
+    }
+  };

+ 24 - 22
kafka-ui-react-app/src/redux/actions/thunks/clusters.ts

@@ -16,26 +16,28 @@ export const fetchClustersList = (): PromiseThunkResult => async (dispatch) => {
   }
 };
 
-export const fetchClusterStats = (
-  clusterName: ClusterName
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.fetchClusterStatsAction.request());
-  try {
-    const payload = await clustersApiClient.getClusterStats({ clusterName });
-    dispatch(actions.fetchClusterStatsAction.success(payload));
-  } catch (e) {
-    dispatch(actions.fetchClusterStatsAction.failure());
-  }
-};
+export const fetchClusterStats =
+  (clusterName: ClusterName): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.fetchClusterStatsAction.request());
+    try {
+      const payload = await clustersApiClient.getClusterStats({ clusterName });
+      dispatch(actions.fetchClusterStatsAction.success(payload));
+    } catch (e) {
+      dispatch(actions.fetchClusterStatsAction.failure());
+    }
+  };
 
-export const fetchClusterMetrics = (
-  clusterName: ClusterName
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.fetchClusterMetricsAction.request());
-  try {
-    const payload = await clustersApiClient.getClusterMetrics({ clusterName });
-    dispatch(actions.fetchClusterMetricsAction.success(payload));
-  } catch (e) {
-    dispatch(actions.fetchClusterMetricsAction.failure());
-  }
-};
+export const fetchClusterMetrics =
+  (clusterName: ClusterName): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.fetchClusterMetricsAction.request());
+    try {
+      const payload = await clustersApiClient.getClusterMetrics({
+        clusterName,
+      });
+      dispatch(actions.fetchClusterMetricsAction.success(payload));
+    } catch (e) {
+      dispatch(actions.fetchClusterMetricsAction.failure());
+    }
+  };

+ 36 - 35
kafka-ui-react-app/src/redux/actions/thunks/consumerGroups.ts

@@ -10,39 +10,40 @@ import * as actions from 'redux/actions/actions';
 const apiClientConf = new Configuration(BASE_PARAMS);
 export const consumerGroupsApiClient = new ConsumerGroupsApi(apiClientConf);
 
-export const fetchConsumerGroupsList = (
-  clusterName: ClusterName
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.fetchConsumerGroupsAction.request());
-  try {
-    const consumerGroups = await consumerGroupsApiClient.getConsumerGroups({
-      clusterName,
-    });
-    dispatch(actions.fetchConsumerGroupsAction.success(consumerGroups));
-  } catch (e) {
-    dispatch(actions.fetchConsumerGroupsAction.failure());
-  }
-};
-
-export const fetchConsumerGroupDetails = (
-  clusterName: ClusterName,
-  consumerGroupID: ConsumerGroupID
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.fetchConsumerGroupDetailsAction.request());
-  try {
-    const consumerGroupDetails = await consumerGroupsApiClient.getConsumerGroup(
-      {
+export const fetchConsumerGroupsList =
+  (clusterName: ClusterName): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.fetchConsumerGroupsAction.request());
+    try {
+      const consumerGroups = await consumerGroupsApiClient.getConsumerGroups({
         clusterName,
-        id: consumerGroupID,
-      }
-    );
-    dispatch(
-      actions.fetchConsumerGroupDetailsAction.success({
-        consumerGroupID,
-        details: consumerGroupDetails,
-      })
-    );
-  } catch (e) {
-    dispatch(actions.fetchConsumerGroupDetailsAction.failure());
-  }
-};
+      });
+      dispatch(actions.fetchConsumerGroupsAction.success(consumerGroups));
+    } catch (e) {
+      dispatch(actions.fetchConsumerGroupsAction.failure());
+    }
+  };
+
+export const fetchConsumerGroupDetails =
+  (
+    clusterName: ClusterName,
+    consumerGroupID: ConsumerGroupID
+  ): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.fetchConsumerGroupDetailsAction.request());
+    try {
+      const consumerGroupDetails =
+        await consumerGroupsApiClient.getConsumerGroup({
+          clusterName,
+          id: consumerGroupID,
+        });
+      dispatch(
+        actions.fetchConsumerGroupDetailsAction.success({
+          consumerGroupID,
+          details: consumerGroupDetails,
+        })
+      );
+    } catch (e) {
+      dispatch(actions.fetchConsumerGroupDetailsAction.failure());
+    }
+  };

+ 150 - 146
kafka-ui-react-app/src/redux/actions/thunks/schemas.ts

@@ -20,160 +20,164 @@ import { isEqual } from 'lodash';
 const apiClientConf = new Configuration(BASE_PARAMS);
 export const schemasApiClient = new SchemasApi(apiClientConf);
 
-export const fetchSchemasByClusterName = (
-  clusterName: ClusterName
-): PromiseThunkResult<void> => async (dispatch) => {
-  dispatch(actions.fetchSchemasByClusterNameAction.request());
-  try {
-    const schemas = await schemasApiClient.getSchemas({ clusterName });
-    dispatch(actions.fetchSchemasByClusterNameAction.success(schemas));
-  } catch (e) {
-    dispatch(actions.fetchSchemasByClusterNameAction.failure());
-  }
-};
-
-export const fetchSchemaVersions = (
-  clusterName: ClusterName,
-  subject: SchemaName
-): PromiseThunkResult<void> => async (dispatch) => {
-  if (!subject) return;
-  dispatch(actions.fetchSchemaVersionsAction.request());
-  try {
-    const versions = await schemasApiClient.getAllVersionsBySubject({
-      clusterName,
-      subject,
-    });
-    dispatch(actions.fetchSchemaVersionsAction.success(versions));
-  } catch (e) {
-    dispatch(actions.fetchSchemaVersionsAction.failure());
-  }
-};
+export const fetchSchemasByClusterName =
+  (clusterName: ClusterName): PromiseThunkResult<void> =>
+  async (dispatch) => {
+    dispatch(actions.fetchSchemasByClusterNameAction.request());
+    try {
+      const schemas = await schemasApiClient.getSchemas({ clusterName });
+      dispatch(actions.fetchSchemasByClusterNameAction.success(schemas));
+    } catch (e) {
+      dispatch(actions.fetchSchemasByClusterNameAction.failure());
+    }
+  };
 
-export const fetchGlobalSchemaCompatibilityLevel = (
-  clusterName: ClusterName
-): PromiseThunkResult<void> => async (dispatch) => {
-  dispatch(actions.fetchGlobalSchemaCompatibilityLevelAction.request());
-  try {
-    const result = await schemasApiClient.getGlobalSchemaCompatibilityLevel({
-      clusterName,
-    });
-    dispatch(
-      actions.fetchGlobalSchemaCompatibilityLevelAction.success(
-        result.compatibility
-      )
-    );
-  } catch (e) {
-    dispatch(actions.fetchGlobalSchemaCompatibilityLevelAction.failure());
-  }
-};
+export const fetchSchemaVersions =
+  (clusterName: ClusterName, subject: SchemaName): PromiseThunkResult<void> =>
+  async (dispatch) => {
+    if (!subject) return;
+    dispatch(actions.fetchSchemaVersionsAction.request());
+    try {
+      const versions = await schemasApiClient.getAllVersionsBySubject({
+        clusterName,
+        subject,
+      });
+      dispatch(actions.fetchSchemaVersionsAction.success(versions));
+    } catch (e) {
+      dispatch(actions.fetchSchemaVersionsAction.failure());
+    }
+  };
 
-export const updateGlobalSchemaCompatibilityLevel = (
-  clusterName: ClusterName,
-  compatibilityLevel: CompatibilityLevelCompatibilityEnum
-): PromiseThunkResult<void> => async (dispatch) => {
-  dispatch(actions.updateGlobalSchemaCompatibilityLevelAction.request());
-  try {
-    await schemasApiClient.updateGlobalSchemaCompatibilityLevel({
-      clusterName,
-      compatibilityLevel: { compatibility: compatibilityLevel },
-    });
-    dispatch(
-      actions.updateGlobalSchemaCompatibilityLevelAction.success(
-        compatibilityLevel
-      )
-    );
-  } catch (e) {
-    dispatch(actions.updateGlobalSchemaCompatibilityLevelAction.failure());
-  }
-};
+export const fetchGlobalSchemaCompatibilityLevel =
+  (clusterName: ClusterName): PromiseThunkResult<void> =>
+  async (dispatch) => {
+    dispatch(actions.fetchGlobalSchemaCompatibilityLevelAction.request());
+    try {
+      const result = await schemasApiClient.getGlobalSchemaCompatibilityLevel({
+        clusterName,
+      });
+      dispatch(
+        actions.fetchGlobalSchemaCompatibilityLevelAction.success(
+          result.compatibility
+        )
+      );
+    } catch (e) {
+      dispatch(actions.fetchGlobalSchemaCompatibilityLevelAction.failure());
+    }
+  };
 
-export const createSchema = (
-  clusterName: ClusterName,
-  newSchemaSubject: NewSchemaSubject
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.createSchemaAction.request());
-  try {
-    const schema: SchemaSubject = await schemasApiClient.createNewSchema({
-      clusterName,
-      newSchemaSubject,
-    });
-    dispatch(actions.createSchemaAction.success(schema));
-  } catch (error) {
-    const response = await getResponse(error);
-    const alert: FailurePayload = {
-      subject: ['schema', newSchemaSubject.subject].join('-'),
-      title: `Schema ${newSchemaSubject.subject}`,
-      response,
-    };
-    dispatch(actions.createSchemaAction.failure({ alert }));
-    throw error;
-  }
-};
+export const updateGlobalSchemaCompatibilityLevel =
+  (
+    clusterName: ClusterName,
+    compatibilityLevel: CompatibilityLevelCompatibilityEnum
+  ): PromiseThunkResult<void> =>
+  async (dispatch) => {
+    dispatch(actions.updateGlobalSchemaCompatibilityLevelAction.request());
+    try {
+      await schemasApiClient.updateGlobalSchemaCompatibilityLevel({
+        clusterName,
+        compatibilityLevel: { compatibility: compatibilityLevel },
+      });
+      dispatch(
+        actions.updateGlobalSchemaCompatibilityLevelAction.success(
+          compatibilityLevel
+        )
+      );
+    } catch (e) {
+      dispatch(actions.updateGlobalSchemaCompatibilityLevelAction.failure());
+    }
+  };
 
-export const updateSchema = (
-  latestSchema: SchemaSubject,
-  newSchema: string,
-  newSchemaType: SchemaType,
-  newCompatibilityLevel: CompatibilityLevelCompatibilityEnum,
-  clusterName: string,
-  subject: string
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.updateSchemaAction.request());
-  try {
-    let schema: SchemaSubject = latestSchema;
-    if (
-      (newSchema &&
-        !isEqual(JSON.parse(latestSchema.schema), JSON.parse(newSchema))) ||
-      newSchemaType !== latestSchema.schemaType
-    ) {
-      schema = await schemasApiClient.createNewSchema({
+export const createSchema =
+  (
+    clusterName: ClusterName,
+    newSchemaSubject: NewSchemaSubject
+  ): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.createSchemaAction.request());
+    try {
+      const schema: SchemaSubject = await schemasApiClient.createNewSchema({
         clusterName,
-        newSchemaSubject: {
-          ...latestSchema,
-          schema: newSchema || latestSchema.schema,
-          schemaType: newSchemaType || latestSchema.schemaType,
-        },
+        newSchemaSubject,
       });
+      dispatch(actions.createSchemaAction.success(schema));
+    } catch (error) {
+      const response = await getResponse(error);
+      const alert: FailurePayload = {
+        subject: ['schema', newSchemaSubject.subject].join('-'),
+        title: `Schema ${newSchemaSubject.subject}`,
+        response,
+      };
+      dispatch(actions.createSchemaAction.failure({ alert }));
+      throw error;
+    }
+  };
+
+export const updateSchema =
+  (
+    latestSchema: SchemaSubject,
+    newSchema: string,
+    newSchemaType: SchemaType,
+    newCompatibilityLevel: CompatibilityLevelCompatibilityEnum,
+    clusterName: string,
+    subject: string
+  ): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.updateSchemaAction.request());
+    try {
+      let schema: SchemaSubject = latestSchema;
+      if (
+        (newSchema &&
+          !isEqual(JSON.parse(latestSchema.schema), JSON.parse(newSchema))) ||
+        newSchemaType !== latestSchema.schemaType
+      ) {
+        schema = await schemasApiClient.createNewSchema({
+          clusterName,
+          newSchemaSubject: {
+            ...latestSchema,
+            schema: newSchema || latestSchema.schema,
+            schemaType: newSchemaType || latestSchema.schemaType,
+          },
+        });
+      }
+      if (newCompatibilityLevel !== latestSchema.compatibilityLevel) {
+        await schemasApiClient.updateSchemaCompatibilityLevel({
+          clusterName,
+          subject,
+          compatibilityLevel: {
+            compatibility: newCompatibilityLevel,
+          },
+        });
+      }
+      actions.updateSchemaAction.success(schema);
+    } catch (e) {
+      const response = await getResponse(e);
+      const alert: FailurePayload = {
+        subject: ['schema', subject].join('-'),
+        title: `Schema ${subject}`,
+        response,
+      };
+      dispatch(actions.updateSchemaAction.failure({ alert }));
+      throw e;
     }
-    if (newCompatibilityLevel !== latestSchema.compatibilityLevel) {
-      await schemasApiClient.updateSchemaCompatibilityLevel({
+  };
+export const deleteSchema =
+  (clusterName: ClusterName, subject: string): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.deleteSchemaAction.request());
+    try {
+      await schemasApiClient.deleteSchema({
         clusterName,
         subject,
-        compatibilityLevel: {
-          compatibility: newCompatibilityLevel,
-        },
       });
+      dispatch(actions.deleteSchemaAction.success(subject));
+    } catch (error) {
+      const response = await getResponse(error);
+      const alert: FailurePayload = {
+        subject: ['schema', subject].join('-'),
+        title: `Schema ${subject}`,
+        response,
+      };
+      dispatch(actions.deleteSchemaAction.failure({ alert }));
     }
-    actions.updateSchemaAction.success(schema);
-  } catch (e) {
-    const response = await getResponse(e);
-    const alert: FailurePayload = {
-      subject: ['schema', subject].join('-'),
-      title: `Schema ${subject}`,
-      response,
-    };
-    dispatch(actions.updateSchemaAction.failure({ alert }));
-    throw e;
-  }
-};
-export const deleteSchema = (
-  clusterName: ClusterName,
-  subject: string
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.deleteSchemaAction.request());
-  try {
-    await schemasApiClient.deleteSchema({
-      clusterName,
-      subject,
-    });
-    dispatch(actions.deleteSchemaAction.success(subject));
-  } catch (error) {
-    const response = await getResponse(error);
-    const alert: FailurePayload = {
-      subject: ['schema', subject].join('-'),
-      title: `Schema ${subject}`,
-      response,
-    };
-    dispatch(actions.deleteSchemaAction.failure({ alert }));
-  }
-};
+  };

+ 196 - 194
kafka-ui-react-app/src/redux/actions/thunks/topics.ts

@@ -32,136 +32,138 @@ export interface FetchTopicsListParams {
   perPage?: number;
 }
 
-export const fetchTopicsList = (
-  params: FetchTopicsListParams
-): PromiseThunkResult => async (dispatch, getState) => {
-  dispatch(actions.fetchTopicsListAction.request());
-  try {
-    const { topics, pageCount } = await topicsApiClient.getTopics(params);
-    const newState = (topics || []).reduce(
-      (memo: TopicsState, topic) => ({
-        ...memo,
-        byName: {
-          ...memo.byName,
-          [topic.name]: {
-            ...memo.byName[topic.name],
-            ...topic,
-            id: v4(),
+export const fetchTopicsList =
+  (params: FetchTopicsListParams): PromiseThunkResult =>
+  async (dispatch, getState) => {
+    dispatch(actions.fetchTopicsListAction.request());
+    try {
+      const { topics, pageCount } = await topicsApiClient.getTopics(params);
+      const newState = (topics || []).reduce(
+        (memo: TopicsState, topic) => ({
+          ...memo,
+          byName: {
+            ...memo.byName,
+            [topic.name]: {
+              ...memo.byName[topic.name],
+              ...topic,
+              id: v4(),
+            },
           },
-        },
-        allNames: [...memo.allNames, topic.name],
-      }),
-      {
-        ...getState().topics,
-        allNames: [],
-        totalPages: pageCount || 1,
-      }
-    );
-    dispatch(actions.fetchTopicsListAction.success(newState));
-  } catch (e) {
-    dispatch(actions.fetchTopicsListAction.failure());
-  }
-};
+          allNames: [...memo.allNames, topic.name],
+        }),
+        {
+          ...getState().topics,
+          allNames: [],
+          totalPages: pageCount || 1,
+        }
+      );
+      dispatch(actions.fetchTopicsListAction.success(newState));
+    } catch (e) {
+      dispatch(actions.fetchTopicsListAction.failure());
+    }
+  };
 
-export const fetchTopicMessages = (
-  clusterName: ClusterName,
-  topicName: TopicName,
-  queryParams: Partial<TopicMessageQueryParams>
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.fetchTopicMessagesAction.request());
-  try {
-    const messages = await messagesApiClient.getTopicMessages({
-      clusterName,
-      topicName,
-      ...queryParams,
-    });
-    dispatch(actions.fetchTopicMessagesAction.success(messages));
-  } catch (e) {
-    dispatch(actions.fetchTopicMessagesAction.failure());
-  }
-};
+export const fetchTopicMessages =
+  (
+    clusterName: ClusterName,
+    topicName: TopicName,
+    queryParams: Partial<TopicMessageQueryParams>
+  ): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.fetchTopicMessagesAction.request());
+    try {
+      const messages = await messagesApiClient.getTopicMessages({
+        clusterName,
+        topicName,
+        ...queryParams,
+      });
+      dispatch(actions.fetchTopicMessagesAction.success(messages));
+    } catch (e) {
+      dispatch(actions.fetchTopicMessagesAction.failure());
+    }
+  };
 
-export const clearTopicMessages = (
-  clusterName: ClusterName,
-  topicName: TopicName,
-  partitions?: number[]
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.clearMessagesTopicAction.request());
-  try {
-    await messagesApiClient.deleteTopicMessages({
-      clusterName,
-      topicName,
-      partitions,
-    });
-    dispatch(actions.clearMessagesTopicAction.success(topicName));
-  } catch (e) {
-    const response = await getResponse(e);
-    const alert: FailurePayload = {
-      subject: [clusterName, topicName, partitions].join('-'),
-      title: `Clear Topic Messages`,
-      response,
-    };
-    dispatch(actions.clearMessagesTopicAction.failure({ alert }));
-  }
-};
+export const clearTopicMessages =
+  (
+    clusterName: ClusterName,
+    topicName: TopicName,
+    partitions?: number[]
+  ): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.clearMessagesTopicAction.request());
+    try {
+      await messagesApiClient.deleteTopicMessages({
+        clusterName,
+        topicName,
+        partitions,
+      });
+      dispatch(actions.clearMessagesTopicAction.success(topicName));
+    } catch (e) {
+      const response = await getResponse(e);
+      const alert: FailurePayload = {
+        subject: [clusterName, topicName, partitions].join('-'),
+        title: `Clear Topic Messages`,
+        response,
+      };
+      dispatch(actions.clearMessagesTopicAction.failure({ alert }));
+    }
+  };
 
-export const fetchTopicDetails = (
-  clusterName: ClusterName,
-  topicName: TopicName
-): PromiseThunkResult => async (dispatch, getState) => {
-  dispatch(actions.fetchTopicDetailsAction.request());
-  try {
-    const topicDetails = await topicsApiClient.getTopicDetails({
-      clusterName,
-      topicName,
-    });
-    const state = getState().topics;
-    const newState = {
-      ...state,
-      byName: {
-        ...state.byName,
-        [topicName]: {
-          ...state.byName[topicName],
-          ...topicDetails,
+export const fetchTopicDetails =
+  (clusterName: ClusterName, topicName: TopicName): PromiseThunkResult =>
+  async (dispatch, getState) => {
+    dispatch(actions.fetchTopicDetailsAction.request());
+    try {
+      const topicDetails = await topicsApiClient.getTopicDetails({
+        clusterName,
+        topicName,
+      });
+      const state = getState().topics;
+      const newState = {
+        ...state,
+        byName: {
+          ...state.byName,
+          [topicName]: {
+            ...state.byName[topicName],
+            ...topicDetails,
+          },
         },
-      },
-    };
-    dispatch(actions.fetchTopicDetailsAction.success(newState));
-  } catch (e) {
-    dispatch(actions.fetchTopicDetailsAction.failure());
-  }
-};
+      };
+      dispatch(actions.fetchTopicDetailsAction.success(newState));
+    } catch (e) {
+      dispatch(actions.fetchTopicDetailsAction.failure());
+    }
+  };
 
-export const fetchTopicConfig = (
-  clusterName: ClusterName,
-  topicName: TopicName
-): PromiseThunkResult => async (dispatch, getState) => {
-  dispatch(actions.fetchTopicConfigAction.request());
-  try {
-    const config = await topicsApiClient.getTopicConfigs({
-      clusterName,
-      topicName,
-    });
+export const fetchTopicConfig =
+  (clusterName: ClusterName, topicName: TopicName): PromiseThunkResult =>
+  async (dispatch, getState) => {
+    dispatch(actions.fetchTopicConfigAction.request());
+    try {
+      const config = await topicsApiClient.getTopicConfigs({
+        clusterName,
+        topicName,
+      });
 
-    const state = getState().topics;
-    const newState = {
-      ...state,
-      byName: {
-        ...state.byName,
-        [topicName]: {
-          ...state.byName[topicName],
-          config: config.map((inputConfig) => ({
-            ...inputConfig,
-          })),
+      const state = getState().topics;
+      const newState = {
+        ...state,
+        byName: {
+          ...state.byName,
+          [topicName]: {
+            ...state.byName[topicName],
+            config: config.map((inputConfig) => ({
+              ...inputConfig,
+            })),
+          },
         },
-      },
-    };
+      };
 
-    dispatch(actions.fetchTopicConfigAction.success(newState));
-  } catch (e) {
-    dispatch(actions.fetchTopicConfigAction.failure());
-  }
-};
+      dispatch(actions.fetchTopicConfigAction.success(newState));
+    } catch (e) {
+      dispatch(actions.fetchTopicConfigAction.failure());
+    }
+  };
 
 const formatTopicCreation = (form: TopicFormDataRaw): TopicCreation => {
   const {
@@ -229,84 +231,84 @@ const formatTopicUpdate = (form: TopicFormDataRaw): TopicUpdate => {
   };
 };
 
-export const createTopic = (
-  clusterName: ClusterName,
-  form: TopicFormDataRaw
-): PromiseThunkResult => async (dispatch, getState) => {
-  dispatch(actions.createTopicAction.request());
-  try {
-    const topic: Topic = await topicsApiClient.createTopic({
-      clusterName,
-      topicCreation: formatTopicCreation(form),
-    });
+export const createTopic =
+  (clusterName: ClusterName, form: TopicFormDataRaw): PromiseThunkResult =>
+  async (dispatch, getState) => {
+    dispatch(actions.createTopicAction.request());
+    try {
+      const topic: Topic = await topicsApiClient.createTopic({
+        clusterName,
+        topicCreation: formatTopicCreation(form),
+      });
 
-    const state = getState().topics;
-    const newState = {
-      ...state,
-      byName: {
-        ...state.byName,
-        [topic.name]: {
-          ...topic,
+      const state = getState().topics;
+      const newState = {
+        ...state,
+        byName: {
+          ...state.byName,
+          [topic.name]: {
+            ...topic,
+          },
         },
-      },
-      allNames: [...state.allNames, topic.name],
-    };
+        allNames: [...state.allNames, topic.name],
+      };
 
-    dispatch(actions.createTopicAction.success(newState));
-  } catch (error) {
-    const response = await getResponse(error);
-    const alert: FailurePayload = {
-      subject: ['schema', form.name].join('-'),
-      title: `Schema ${form.name}`,
-      response,
-    };
-    dispatch(actions.createTopicAction.failure({ alert }));
-  }
-};
+      dispatch(actions.createTopicAction.success(newState));
+    } catch (error) {
+      const response = await getResponse(error);
+      const alert: FailurePayload = {
+        subject: ['schema', form.name].join('-'),
+        title: `Schema ${form.name}`,
+        response,
+      };
+      dispatch(actions.createTopicAction.failure({ alert }));
+    }
+  };
 
-export const updateTopic = (
-  clusterName: ClusterName,
-  topicName: TopicName,
-  form: TopicFormDataRaw
-): PromiseThunkResult => async (dispatch, getState) => {
-  dispatch(actions.updateTopicAction.request());
-  try {
-    const topic: Topic = await topicsApiClient.updateTopic({
-      clusterName,
-      topicName,
-      topicUpdate: formatTopicUpdate(form),
-    });
+export const updateTopic =
+  (
+    clusterName: ClusterName,
+    topicName: TopicName,
+    form: TopicFormDataRaw
+  ): PromiseThunkResult =>
+  async (dispatch, getState) => {
+    dispatch(actions.updateTopicAction.request());
+    try {
+      const topic: Topic = await topicsApiClient.updateTopic({
+        clusterName,
+        topicName,
+        topicUpdate: formatTopicUpdate(form),
+      });
 
-    const state = getState().topics;
-    const newState = {
-      ...state,
-      byName: {
-        ...state.byName,
-        [topic.name]: {
-          ...state.byName[topic.name],
-          ...topic,
+      const state = getState().topics;
+      const newState = {
+        ...state,
+        byName: {
+          ...state.byName,
+          [topic.name]: {
+            ...state.byName[topic.name],
+            ...topic,
+          },
         },
-      },
-    };
+      };
 
-    dispatch(actions.updateTopicAction.success(newState));
-  } catch (e) {
-    dispatch(actions.updateTopicAction.failure());
-  }
-};
+      dispatch(actions.updateTopicAction.success(newState));
+    } catch (e) {
+      dispatch(actions.updateTopicAction.failure());
+    }
+  };
 
-export const deleteTopic = (
-  clusterName: ClusterName,
-  topicName: TopicName
-): PromiseThunkResult => async (dispatch) => {
-  dispatch(actions.deleteTopicAction.request());
-  try {
-    await topicsApiClient.deleteTopic({
-      clusterName,
-      topicName,
-    });
-    dispatch(actions.deleteTopicAction.success(topicName));
-  } catch (e) {
-    dispatch(actions.deleteTopicAction.failure());
-  }
-};
+export const deleteTopic =
+  (clusterName: ClusterName, topicName: TopicName): PromiseThunkResult =>
+  async (dispatch) => {
+    dispatch(actions.deleteTopicAction.request());
+    try {
+      await topicsApiClient.deleteTopic({
+        clusterName,
+        topicName,
+      });
+      dispatch(actions.deleteTopicAction.success(topicName));
+    } catch (e) {
+      dispatch(actions.deleteTopicAction.failure());
+    }
+  };

+ 9 - 6
kafka-ui-react-app/src/redux/reducers/topics/selectors.ts

@@ -18,12 +18,10 @@ export const getTopicListTotalPages = (state: RootState) =>
   topicsState(state).totalPages;
 
 const getTopicListFetchingStatus = createFetchingSelector('GET_TOPICS');
-const getTopicDetailsFetchingStatus = createFetchingSelector(
-  'GET_TOPIC_DETAILS'
-);
-const getTopicMessagesFetchingStatus = createFetchingSelector(
-  'GET_TOPIC_MESSAGES'
-);
+const getTopicDetailsFetchingStatus =
+  createFetchingSelector('GET_TOPIC_DETAILS');
+const getTopicMessagesFetchingStatus =
+  createFetchingSelector('GET_TOPIC_MESSAGES');
 const getTopicConfigFetchingStatus = createFetchingSelector('GET_TOPIC_CONFIG');
 const getTopicCreationStatus = createFetchingSelector('POST_TOPIC');
 const getTopicUpdateStatus = createFetchingSelector('PATCH_TOPIC');
@@ -123,3 +121,8 @@ export const getTopicConfigByParamName = createSelector(
     return byParamName;
   }
 );
+
+export const getIsTopicInternal = createSelector(
+  getTopicByName,
+  ({ internal }) => !!internal
+);

+ 2 - 4
kafka-ui-react-app/src/redux/store/configureStore/mockStoreCreator.ts

@@ -6,9 +6,7 @@ import { RootState, Action } from 'redux/interfaces';
 const middlewares: Array<Middleware> = [thunk];
 type DispatchExts = ThunkDispatch<RootState, undefined, Action>;
 
-const mockStoreCreator: MockStoreCreator<
-  RootState,
-  DispatchExts
-> = configureMockStore<RootState, DispatchExts>(middlewares);
+const mockStoreCreator: MockStoreCreator<RootState, DispatchExts> =
+  configureMockStore<RootState, DispatchExts>(middlewares);
 
 export default mockStoreCreator();

+ 1 - 0
pom.xml

@@ -6,6 +6,7 @@
     <modules>
         <module>kafka-ui-contract</module>
 		<module>kafka-ui-api</module>
+		<module>kafka-ui-e2e-checks</module>
 	</modules>
 
 	<properties>

Some files were not shown because too many files changed in this diff