Browse Source

split backend and frontend workflows

RustamGimadiev 4 years ago
parent
commit
403087c96f
66 changed files with 1509 additions and 877 deletions
  1. 24 0
      .github/workflows/backend.yml
  2. 31 0
      .github/workflows/charts.yaml
  3. 1 19
      .github/workflows/frontend.yaml
  4. 77 0
      .github/workflows/release.yaml
  5. 0 0
      charts/kafka-ui/.helmignore
  6. 2 1
      charts/kafka-ui/Chart.yaml
  7. 0 0
      charts/kafka-ui/README.md
  8. 0 0
      charts/kafka-ui/templates/NOTES.txt
  9. 0 0
      charts/kafka-ui/templates/_helpers.tpl
  10. 0 0
      charts/kafka-ui/templates/configmap.yaml
  11. 0 0
      charts/kafka-ui/templates/deployment.yaml
  12. 0 0
      charts/kafka-ui/templates/hpa.yaml
  13. 0 0
      charts/kafka-ui/templates/ingress.yaml
  14. 0 0
      charts/kafka-ui/templates/secret.yaml
  15. 0 0
      charts/kafka-ui/templates/service.yaml
  16. 0 0
      charts/kafka-ui/templates/serviceaccount.yaml
  17. 0 0
      charts/kafka-ui/values.yaml
  18. 13 8
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/SchemaRegistryRecordDeserializer.java
  19. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/GlobalErrorWebExceptionHandler.java
  20. 58 17
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java
  21. 18 17
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java
  22. 6 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/zookeeper/ZookeeperService.java
  23. 45 24
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java
  24. 20 20
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  25. 6 2
      kafka-ui-react-app/.eslintrc.json
  26. 0 8
      kafka-ui-react-app/jest.config.js
  27. 339 351
      kafka-ui-react-app/package-lock.json
  28. 15 6
      kafka-ui-react-app/package.json
  29. 18 10
      kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx
  30. 73 0
      kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx
  31. 159 0
      kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap
  32. 25 0
      kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/fixtures.ts
  33. 5 7
      kafka-ui-react-app/src/components/Topics/Details/Messages/MessageItem.tsx
  34. 11 13
      kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx
  35. 2 11
      kafka-ui-react-app/src/components/Topics/Details/Messages/MessagesContainer.ts
  36. 6 8
      kafka-ui-react-app/src/components/Topics/Details/Messages/MessagesTable.tsx
  37. 38 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/MessageItem.spec.tsx
  38. 178 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/Messages.spec.tsx
  39. 49 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/MessagesTable.spec.tsx
  40. 110 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/__snapshots__/MessageItem.spec.tsx.snap
  41. 66 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/__snapshots__/MessagesTable.spec.tsx.snap
  42. 19 0
      kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/fixtures.ts
  43. 2 2
      kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamAction.tsx
  44. 1 7
      kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamButton.tsx
  45. 3 3
      kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx
  46. 13 8
      kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx
  47. 0 13
      kafka-ui-react-app/src/lib/utils/formatBytes.ts
  48. 0 55
      kafka-ui-react-app/src/redux/actionType.ts
  49. 39 40
      kafka-ui-react-app/src/redux/actions/actions.ts
  50. 0 7
      kafka-ui-react-app/src/redux/interfaces/index.ts
  51. 1 3
      kafka-ui-react-app/src/redux/interfaces/loader.ts
  52. 3 4
      kafka-ui-react-app/src/redux/reducers/brokers/reducer.ts
  53. 2 2
      kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts
  54. 1 2
      kafka-ui-react-app/src/redux/reducers/clusters/reducer.ts
  55. 2 2
      kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts
  56. 2 3
      kafka-ui-react-app/src/redux/reducers/consumerGroups/reducer.ts
  57. 3 3
      kafka-ui-react-app/src/redux/reducers/consumerGroups/selectors.ts
  58. 4 4
      kafka-ui-react-app/src/redux/reducers/loader/reducer.ts
  59. 2 2
      kafka-ui-react-app/src/redux/reducers/loader/selectors.ts
  60. 5 6
      kafka-ui-react-app/src/redux/reducers/topics/reducer.ts
  61. 6 7
      kafka-ui-react-app/src/redux/reducers/topics/selectors.ts
  62. 2 3
      kafka-ui-react-app/src/setupTests.ts
  63. 0 156
      kafka-ui-react-app/src/tests/Topics/Details/Messages/Messages.spec.tsx
  64. 0 19
      kafka-ui-react-app/src/theme/bulma_overrides.scss
  65. 2 0
      kafka-ui-react-app/src/theme/index.scss
  66. 1 1
      kafka-ui-react-app/tsconfig.json

+ 24 - 0
.github/workflows/backend.yml

@@ -0,0 +1,24 @@
+name: backend
+on:
+  push:
+    branches: [ '*' ]
+  pull_request:
+    branches: [ master ]
+jobs:
+  mvn-all-build:
+    runs-on: ubuntu-latest
+    steps:
+    - name: Cache local Maven repository
+      uses: actions/cache@v1
+      with:
+        path: ~/.m2/repository
+        key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
+        restore-keys: |
+          ${{ runner.os }}-maven-
+    - uses: actions/checkout@v2
+    - name: Set up JDK 1.13
+      uses: actions/setup-java@v1
+      with:
+        java-version: 1.13
+    - name: Build with Maven
+      run: mvn clean package -Pprod

+ 31 - 0
.github/workflows/charts.yaml

@@ -0,0 +1,31 @@
+name: charts
+on:
+  create:
+    tags:
+      - "v*.*.*"
+jobs:
+  release:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2
+        with:
+          fetch-depth: 0
+      - run: |
+          git config user.name github-actions
+          git config user.email github-actions@github.com
+      - uses: azure/setup-helm@v1
+      - name: update appVersion
+        run: |
+          export version=${GITHUB_REF##*/}
+          sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml
+      - name:
+        run: |
+          export VERSION=${GITHUB_REF##*/}
+          MSG=$(helm package --app-version ${VERSION} charts/kafka-ui)
+          git fetch origin
+          git stash
+          git checkout -b gh-pages origin/gh-pages
+          helm repo index .
+          git add -f ${MSG##*/} index.yaml
+          git commit -m "release ${VERSION}"
+          git push

+ 1 - 19
.github/workflows/maven.yml → .github/workflows/frontend.yaml

@@ -1,4 +1,4 @@
-name: kafka-ui
+name: frontend
 on:
   push:
     branches: [ '*' ]
@@ -64,21 +64,3 @@ jobs:
       with:
         name: generated-sources
         path: kafka-ui-contract/target/generated-sources/frontend
-
-  mvn-all-build:
-    runs-on: ubuntu-latest
-    steps:
-    - name: Cache local Maven repository
-      uses: actions/cache@v1
-      with:
-        path: ~/.m2/repository
-        key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
-        restore-keys: |
-          ${{ runner.os }}-maven-
-    - uses: actions/checkout@v2
-    - name: Set up JDK 1.13
-      uses: actions/setup-java@v1
-      with:
-        java-version: 1.13
-    - name: Build with Maven
-      run: mvn clean package -Pprod

+ 77 - 0
.github/workflows/release.yaml

@@ -0,0 +1,77 @@
+name: release
+on: 
+  workflow_dispatch:
+
+jobs:
+  release:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2
+      - run: |
+          git config user.name github-actions
+          git config user.email github-actions@github.com
+      - name: Cache local Maven repository
+        uses: actions/cache@v2
+        with:
+          path: ~/.m2/repository
+          key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
+          restore-keys: |
+            ${{ runner.os }}-maven-
+      - uses: actions/checkout@v2
+      - name: Set up JDK 1.13
+        uses: actions/setup-java@v1
+        with:
+          java-version: 1.13
+      - name: Update development version
+        run: |
+          mvn -q versions:set -DnextSnapshot
+          git add pom.xml **/pom.xml
+          git commit -m "Increased version in pom.xml"
+          git push -f
+          git reset --hard HEAD~1
+      - name: Prepare release
+        id: prep
+        run: |
+          mvn -q versions:set -DremoveSnapshot
+          export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
+          git add .
+          git commit -m "release ${VERSION}"
+          git tag -f v${VERSION}
+          git push --tags
+          echo ::set-output name=version::${VERSION}
+      - name: Build with Maven
+        run: mvn clean package -Pprod
+#################
+#               #
+# Docker images #
+#               #
+#################
+      - name: Set up QEMU
+        uses: docker/setup-qemu-action@v1
+      - name: Set up Docker Buildx
+        uses: docker/setup-buildx-action@v1
+      - name: Cache Docker layers
+        uses: actions/cache@v2
+        with:
+          path: /tmp/.buildx-cache
+          key: ${{ runner.os }}-buildx-${{ github.sha }}
+          restore-keys: |
+            ${{ runner.os }}-buildx-
+      - name: Login to DockerHub
+        if: github.ref == 'refs/heads/master'
+        uses: docker/login-action@v1 
+        with:
+          username: ${{ secrets.DOCKERHUB_USERNAME }}
+          password: ${{ secrets.DOCKERHUB_TOKEN }}
+      - name: Build and push
+        id: docker_build
+        uses: docker/build-push-action@v2
+        with:
+          builder: ${{ steps.buildx.outputs.name }}
+          context: kafka-ui-api
+          push: github.ref == 'refs/heads/master'
+          tags: provectuslabs/kafka-ui:${{ steps.prep.outputs.version }}
+          build-args: |
+            JAR_FILE=kafka-ui-api-${{ steps.prep.outputs.version }}.jar
+          cache-from: type=local,src=/tmp/.buildx-cache
+          cache-to: type=local,dest=/tmp/.buildx-cache

+ 0 - 0
chart/kafka-ui/.helmignore → charts/kafka-ui/.helmignore


+ 2 - 1
chart/kafka-ui/Chart.yaml → charts/kafka-ui/Chart.yaml

@@ -3,4 +3,5 @@ name: kafka-ui
 description: A Helm chart for kafka-UI
 type: application
 version: 0.0.1
-appVersion: 0.0.9
+appVersion: latest
+icon: https://github.com/provectus/kafka-ui/raw/master/images/kafka-ui-logo.png

+ 0 - 0
chart/kafka-ui/README.md → charts/kafka-ui/README.md


+ 0 - 0
chart/kafka-ui/templates/NOTES.txt → charts/kafka-ui/templates/NOTES.txt


+ 0 - 0
chart/kafka-ui/templates/_helpers.tpl → charts/kafka-ui/templates/_helpers.tpl


+ 0 - 0
chart/kafka-ui/templates/configmap.yaml → charts/kafka-ui/templates/configmap.yaml


+ 0 - 0
chart/kafka-ui/templates/deployment.yaml → charts/kafka-ui/templates/deployment.yaml


+ 0 - 0
chart/kafka-ui/templates/hpa.yaml → charts/kafka-ui/templates/hpa.yaml


+ 0 - 0
chart/kafka-ui/templates/ingress.yaml → charts/kafka-ui/templates/ingress.yaml


+ 0 - 0
chart/kafka-ui/templates/secret.yaml → charts/kafka-ui/templates/secret.yaml


+ 0 - 0
chart/kafka-ui/templates/service.yaml → charts/kafka-ui/templates/service.yaml


+ 0 - 0
chart/kafka-ui/templates/serviceaccount.yaml → charts/kafka-ui/templates/serviceaccount.yaml


+ 0 - 0
chart/kafka-ui/values.yaml → charts/kafka-ui/values.yaml


+ 13 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/deserialization/SchemaRegistryRecordDeserializer.java

@@ -4,12 +4,14 @@ import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.protobuf.Message;
 import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import io.confluent.kafka.schemaregistry.SchemaProvider;
 import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
 import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
 import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
 import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
 import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
 import io.confluent.kafka.serializers.KafkaAvroDeserializer;
 import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
@@ -41,14 +43,17 @@ public class SchemaRegistryRecordDeserializer implements RecordDeserializer {
 		this.cluster = cluster;
 		this.objectMapper = objectMapper;
 
-		this.schemaRegistryClient = Optional.ofNullable(cluster.getSchemaRegistry()).map(e ->
-						new CachedSchemaRegistryClient(
-								Collections.singletonList(e),
-								CLIENT_IDENTITY_MAP_CAPACITY,
-								Collections.singletonList(new AvroSchemaProvider()),
-								Collections.emptyMap()
-						)
-		).orElse(null);
+		this.schemaRegistryClient = Optional.ofNullable(cluster.getSchemaRegistry())
+                .map(schemaRegistryUrl -> {
+                            List<SchemaProvider> schemaProviders = List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider());
+                            return new CachedSchemaRegistryClient(
+                                    Collections.singletonList(schemaRegistryUrl),
+                                    CLIENT_IDENTITY_MAP_CAPACITY,
+                                    schemaProviders,
+                                    Collections.emptyMap()
+                            );
+                        }
+                ).orElse(null);
 
 		this.avroDeserializer = Optional.ofNullable(this.schemaRegistryClient)
 				.map(KafkaAvroDeserializer::new)

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/GlobalErrorWebExceptionHandler.java

@@ -38,7 +38,7 @@ public class GlobalErrorWebExceptionHandler extends AbstractErrorWebExceptionHan
     private Mono<ServerResponse> renderErrorResponse(ServerRequest request) {
         Map<String, Object> errorAttributes = getErrorAttributes(request, false);
         HttpStatus statusCode = Optional.ofNullable(errorAttributes.get(GlobalErrorAttributes.STATUS))
-                .map(code -> (HttpStatus) code)
+                .map(code -> code instanceof Integer ? HttpStatus.valueOf((Integer) code) : (HttpStatus) code)
                 .orElse(HttpStatus.BAD_REQUEST);
         return ServerResponse
                 .status(statusCode)

+ 58 - 17
kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java

@@ -9,8 +9,11 @@ import com.provectus.kafka.ui.model.CompatibilityCheckResponse;
 import com.provectus.kafka.ui.model.CompatibilityLevel;
 import com.provectus.kafka.ui.model.NewSchemaSubject;
 import com.provectus.kafka.ui.model.SchemaSubject;
+import java.util.Formatter;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.log4j.Log4j2;
+import org.springframework.core.ParameterizedTypeReference;
+import org.springframework.http.HttpEntity;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.MediaType;
 import org.springframework.http.ResponseEntity;
@@ -20,6 +23,8 @@ import org.springframework.web.reactive.function.client.WebClient;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 
+import java.util.Arrays;
+import java.util.List;
 import java.util.Objects;
 
 @Service
@@ -36,24 +41,40 @@ public class SchemaRegistryService {
     private final ClusterMapper mapper;
     private final WebClient webClient;
 
-    public Flux<String> getAllSchemaSubjects(String clusterName) {
+    public Flux<SchemaSubject> getAllLatestVersionSchemas(String clusterName) {
+        var allSubjectNames = getAllSubjectNames(clusterName);
+        return allSubjectNames
+                .flatMapMany(Flux::fromArray)
+                .flatMap(subject -> getLatestSchemaSubject(clusterName, subject));
+    }
+
+    public Mono<String[]> getAllSubjectNames(String clusterName) {
         return clustersStorage.getClusterByName(clusterName)
                 .map(cluster -> webClient.get()
                         .uri(cluster.getSchemaRegistry() + URL_SUBJECTS)
                         .retrieve()
-                        .bodyToFlux(String.class)
-                        .doOnError(log::error))
-                .orElse(Flux.error(new NotFoundException("No such cluster")));
+                        .bodyToMono(String[].class)
+                        .doOnError(log::error)
+                )
+                .orElse(Mono.error(new NotFoundException("No such cluster")));
     }
 
-    public Flux<Integer> getSchemaSubjectVersions(String clusterName, String schemaName) {
+    public Flux<SchemaSubject> getAllVersionsBySubject(String clusterName, String subject) {
+        Flux<Integer> versions = getSubjectVersions(clusterName, subject);
+        return versions.flatMap(version -> getSchemaSubjectByVersion(clusterName, subject, version));
+    }
+
+    private Flux<Integer> getSubjectVersions(String clusterName, String schemaName) {
         return clustersStorage.getClusterByName(clusterName)
                 .map(cluster -> webClient.get()
                         .uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName)
                         .retrieve()
-                        .onStatus(HttpStatus.NOT_FOUND::equals, resp -> Mono.error(new NotFoundException("No such schema %s".formatted(schemaName))))
-                        .bodyToFlux(Integer.class))
-                .orElse(Flux.error(new NotFoundException("No such cluster")));
+                        .onStatus(HttpStatus.NOT_FOUND::equals,
+                            resp -> Mono.error(
+                                new NotFoundException(formatted("No such schema %s"))
+                            )
+                        ).bodyToFlux(Integer.class)
+                ).orElse(Flux.error(new NotFoundException("No such cluster")));
     }
 
     public Mono<SchemaSubject> getSchemaSubjectByVersion(String clusterName, String schemaName, Integer version) {
@@ -70,8 +91,12 @@ public class SchemaRegistryService {
                         .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version)
                         .retrieve()
                         .onStatus(HttpStatus.NOT_FOUND::equals,
-                                resp -> Mono.error(new NotFoundException("No such schema %s with version %s".formatted(schemaName, version))))
-                        .bodyToMono(SchemaSubject.class)
+                                resp -> Mono.error(
+                                    new NotFoundException(
+                                        formatted("No such schema %s with version %s", schemaName, version)
+                                    )
+                                )
+                        ).bodyToMono(SchemaSubject.class)
                         .zipWith(getSchemaCompatibilityInfoOrGlobal(clusterName, schemaName))
                         .map(tuple -> {
                             SchemaSubject schema = tuple.getT1();
@@ -97,9 +122,13 @@ public class SchemaRegistryService {
                         .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version)
                         .retrieve()
                         .onStatus(HttpStatus.NOT_FOUND::equals,
-                                resp -> Mono.error(new NotFoundException("No such schema %s with version %s".formatted(schemaName, version))))
-                        .toBodilessEntity())
-                .orElse(Mono.error(new NotFoundException("No such cluster")));
+                                resp -> Mono.error(
+                                    new NotFoundException(
+                                        formatted("No such schema %s with version %s", schemaName, version)
+                                    )
+                                )
+                        ).toBodilessEntity()
+                ).orElse(Mono.error(new NotFoundException("No such cluster")));
     }
 
     public Mono<ResponseEntity<Void>> deleteSchemaSubject(String clusterName, String schemaName) {
@@ -107,7 +136,13 @@ public class SchemaRegistryService {
                 .map(cluster -> webClient.delete()
                         .uri(cluster.getSchemaRegistry() + URL_SUBJECT, schemaName)
                         .retrieve()
-                        .onStatus(HttpStatus.NOT_FOUND::equals, resp -> Mono.error(new NotFoundException("No such schema %s".formatted(schemaName))))
+                        .onStatus(HttpStatus.NOT_FOUND::equals,
+                            resp -> Mono.error(
+                                new NotFoundException(
+                                    formatted("No such schema %s", schemaName)
+                                )
+                            )
+                        )
                         .toBodilessEntity())
                 .orElse(Mono.error(new NotFoundException("No such cluster")));
     }
@@ -120,7 +155,9 @@ public class SchemaRegistryService {
                         .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class))
                         .retrieve()
                         .onStatus(HttpStatus.NOT_FOUND::equals,
-                                resp -> Mono.error(new NotFoundException("No such schema %s".formatted(schemaName))))
+                                resp -> Mono.error(
+                                    new NotFoundException(formatted("No such schema %s", schemaName)))
+                        )
                         .toEntity(SchemaSubject.class)
                         .log())
                 .orElse(Mono.error(new NotFoundException("No such cluster")));
@@ -142,7 +179,7 @@ public class SchemaRegistryService {
                             .body(BodyInserters.fromPublisher(compatibilityLevel, CompatibilityLevel.class))
                             .retrieve()
                             .onStatus(HttpStatus.NOT_FOUND::equals,
-                                    resp -> Mono.error(new NotFoundException("No such schema %s".formatted(schemaName))))
+                                    resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName))))
                             .bodyToMono(Void.class);
                 }).orElse(Mono.error(new NotFoundException("No such cluster")));
     }
@@ -181,10 +218,14 @@ public class SchemaRegistryService {
                         .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class))
                         .retrieve()
                         .onStatus(HttpStatus.NOT_FOUND::equals,
-                                resp -> Mono.error(new NotFoundException("No such schema %s".formatted(schemaName))))
+                                resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName))))
                         .bodyToMono(InternalCompatibilityCheck.class)
                         .map(mapper::toCompatibilityCheckResponse)
                         .log()
                 ).orElse(Mono.error(new NotFoundException("No such cluster")));
     }
+
+    public String formatted(String str, Object... args) {
+        return new Formatter().format(str, args).toString();
+    }
 }

+ 18 - 17
kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java

@@ -105,29 +105,30 @@ public class MetricsRestController implements ApiClustersApi {
     }
 
     @Override
-    public Mono<ResponseEntity<SchemaSubject>> getLatestSchema(String clusterName, String schemaName, ServerWebExchange exchange) {
-        return schemaRegistryService.getLatestSchemaSubject(clusterName, schemaName).map(ResponseEntity::ok);
+    public Mono<ResponseEntity<SchemaSubject>> getLatestSchema(String clusterName, String subject, ServerWebExchange exchange) {
+        return schemaRegistryService.getLatestSchemaSubject(clusterName, subject).map(ResponseEntity::ok);
     }
 
     @Override
-    public Mono<ResponseEntity<SchemaSubject>> getSchemaByVersion(String clusterName, String schemaName, Integer version, ServerWebExchange exchange) {
-        return schemaRegistryService.getSchemaSubjectByVersion(clusterName, schemaName, version).map(ResponseEntity::ok);
+    public Mono<ResponseEntity<SchemaSubject>> getSchemaByVersion(String clusterName, String subject, Integer version, ServerWebExchange exchange) {
+        return schemaRegistryService.getSchemaSubjectByVersion(clusterName, subject, version).map(ResponseEntity::ok);
     }
 
     @Override
-    public Mono<ResponseEntity<Flux<String>>> getSchemas(String clusterName, ServerWebExchange exchange) {
-        Flux<String> subjects = schemaRegistryService.getAllSchemaSubjects(clusterName);
+    public Mono<ResponseEntity<Flux<SchemaSubject>>> getSchemas(String clusterName, ServerWebExchange exchange) {
+        Flux<SchemaSubject> subjects = schemaRegistryService.getAllLatestVersionSchemas(clusterName);
         return Mono.just(ResponseEntity.ok(subjects));
     }
 
     @Override
-    public Mono<ResponseEntity<Flux<Integer>>> getSchemaVersions(String clusterName, String subjectName, ServerWebExchange exchange) {
-        return Mono.just(ResponseEntity.ok(schemaRegistryService.getSchemaSubjectVersions(clusterName, subjectName)));
+    public Mono<ResponseEntity<Flux<SchemaSubject>>> getAllVersionsBySubject(String clusterName, String subjectName, ServerWebExchange exchange) {
+        Flux<SchemaSubject> schemas = schemaRegistryService.getAllVersionsBySubject(clusterName, subjectName);
+        return Mono.just(ResponseEntity.ok(schemas));
     }
 
     @Override
-    public Mono<ResponseEntity<Void>> deleteLatestSchema(String clusterName, String schemaName, ServerWebExchange exchange) {
-        return schemaRegistryService.deleteLatestSchemaSubject(clusterName, schemaName);
+    public Mono<ResponseEntity<Void>> deleteLatestSchema(String clusterName, String subject, ServerWebExchange exchange) {
+        return schemaRegistryService.deleteLatestSchemaSubject(clusterName, subject);
     }
 
     @Override
@@ -141,10 +142,10 @@ public class MetricsRestController implements ApiClustersApi {
     }
 
     @Override
-    public Mono<ResponseEntity<SchemaSubject>> createNewSchema(String clusterName, String schemaName,
+    public Mono<ResponseEntity<SchemaSubject>> createNewSchema(String clusterName, String subject,
                                                                @Valid Mono<NewSchemaSubject> newSchemaSubject,
                                                                ServerWebExchange exchange) {
-        return schemaRegistryService.createNewSubject(clusterName, schemaName, newSchemaSubject);
+        return schemaRegistryService.createNewSubject(clusterName, subject, newSchemaSubject);
     }
 
     @Override
@@ -172,17 +173,17 @@ public class MetricsRestController implements ApiClustersApi {
     }
 
     @Override
-    public Mono<ResponseEntity<CompatibilityCheckResponse>> checkSchemaCompatibility(String clusterName, String schemaName,
+    public Mono<ResponseEntity<CompatibilityCheckResponse>> checkSchemaCompatibility(String clusterName, String subject,
                                                                                      @Valid Mono<NewSchemaSubject> newSchemaSubject,
                                                                                      ServerWebExchange exchange) {
-        return schemaRegistryService.checksSchemaCompatibility(clusterName, schemaName, newSchemaSubject)
+        return schemaRegistryService.checksSchemaCompatibility(clusterName, subject, newSchemaSubject)
                 .map(ResponseEntity::ok);
     }
 
     @Override
-    public Mono<ResponseEntity<Void>> updateSchemaCompatibilityLevel(String clusterName, String schemaName, @Valid Mono<CompatibilityLevel> compatibilityLevel, ServerWebExchange exchange) {
-        log.info("Updating schema compatibility for schema: {}", schemaName);
-        return schemaRegistryService.updateSchemaCompatibility(clusterName, schemaName, compatibilityLevel)
+    public Mono<ResponseEntity<Void>> updateSchemaCompatibilityLevel(String clusterName, String subject, @Valid Mono<CompatibilityLevel> compatibilityLevel, ServerWebExchange exchange) {
+        log.info("Updating schema compatibility for subject: {}", subject);
+        return schemaRegistryService.updateSchemaCompatibility(clusterName, subject, compatibilityLevel)
                 .map(ResponseEntity::ok);
     }
 

+ 6 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/zookeeper/ZookeeperService.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.zookeeper;
 
 import com.provectus.kafka.ui.cluster.model.KafkaCluster;
+import java.util.concurrent.ConcurrentHashMap;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.log4j.Log4j2;
 import org.I0Itec.zkclient.ZkClient;
@@ -14,7 +15,7 @@ import java.util.Map;
 @Log4j2
 public class ZookeeperService {
 
-    private final Map<String, ZkClient> cachedZkClient = new HashMap<>();
+    private final Map<String, ZkClient> cachedZkClient = new ConcurrentHashMap<>();
 
     public boolean isZookeeperOnline(KafkaCluster kafkaCluster) {
         var isConnected = false;
@@ -33,7 +34,10 @@ public class ZookeeperService {
 
     private ZkClient getOrCreateZkClient (KafkaCluster cluster) {
         try {
-            return cachedZkClient.getOrDefault(cluster.getName(), new ZkClient(cluster.getZookeeper(), 1000));
+            return cachedZkClient.computeIfAbsent(
+                cluster.getName(),
+                (n) -> new ZkClient(cluster.getZookeeper(), 1000)
+            );
         } catch (Exception e) {
             log.error("Error while creating zookeeper client for cluster {}", cluster.getName());
             return null;

+ 45 - 24
kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java

@@ -5,6 +5,7 @@ import com.provectus.kafka.ui.model.SchemaSubject;
 import lombok.extern.log4j.Log4j2;
 import lombok.val;
 import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
@@ -23,6 +24,12 @@ import java.util.UUID;
 class SchemaRegistryServiceTests extends AbstractBaseTest {
     @Autowired
     WebTestClient webTestClient;
+    String subject;
+
+    @BeforeEach
+    void setUpBefore() {
+        this.subject = UUID.randomUUID().toString();
+    }
 
     @Test
     public void should404WhenGetAllSchemasForUnknownCluster() {
@@ -34,11 +41,11 @@ class SchemaRegistryServiceTests extends AbstractBaseTest {
     }
 
     @Test
-    void shouldReturn404WhenGetLatestSchemaByNonExistingSchemaName() {
+    void shouldReturn404WhenGetLatestSchemaByNonExistingSubject() {
         String unknownSchema = "unknown-schema";
         webTestClient
                 .get()
-                .uri("http://localhost:8080/api/clusters/local/schemas/{schemaName}/latest", unknownSchema)
+                .uri("http://localhost:8080/api/clusters/local/schemas/{subject}/latest", unknownSchema)
                 .exchange()
                 .expectStatus().isNotFound();
     }
@@ -59,49 +66,51 @@ class SchemaRegistryServiceTests extends AbstractBaseTest {
     }
 
     @Test
-    public void shouldReturnNotNullResponseWhenGetAllSchemas() {
+    public void shouldReturnNotEmptyResponseWhenGetAllSchemas() {
+        createNewSubjectAndAssert(subject);
+
         webTestClient
                 .get()
                 .uri("http://localhost:8080/api/clusters/local/schemas")
                 .exchange()
                 .expectStatus().isOk()
-                .expectBodyList(String.class)
+                .expectBodyList(SchemaSubject.class)
                 .consumeWith(result -> {
-                    List<String> responseBody = result.getResponseBody();
-                    Assertions.assertNotNull(responseBody);
+                    List<SchemaSubject> responseBody = result.getResponseBody();
                     log.info("Response of test schemas: {}", responseBody);
+                    Assertions.assertNotNull(responseBody);
+                    Assertions.assertFalse(responseBody.isEmpty());
+
+                    SchemaSubject actualSchemaSubject = responseBody.stream()
+                            .filter(schemaSubject -> subject.equals(schemaSubject.getSubject()))
+                            .findFirst()
+                            .orElseThrow();
+                    Assertions.assertNotNull(actualSchemaSubject.getId());
+                    Assertions.assertNotNull(actualSchemaSubject.getVersion());
+                    Assertions.assertNotNull(actualSchemaSubject.getCompatibilityLevel());
+                    Assertions.assertEquals("\"string\"", actualSchemaSubject.getSchema());
                 });
     }
 
     @Test
     public void shouldOkWhenCreateNewSchemaThenGetAndUpdateItsCompatibilityLevel() {
-        String schemaName = UUID.randomUUID().toString();
-        // Create a new schema
-        webTestClient
-                .post()
-                .uri("http://localhost:8080/api/clusters/local/schemas/{schemaName}", schemaName)
-                .contentType(MediaType.APPLICATION_JSON)
-                .body(BodyInserters.fromValue("{\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}"))
-                .exchange()
-                .expectStatus().isOk()
-                .expectBody(SchemaSubject.class)
-                .consumeWith(this::assertResponseBodyWhenCreateNewSchema);
+        createNewSubjectAndAssert(subject);
 
         //Get the created schema and check its items
         webTestClient
                 .get()
-                .uri("http://localhost:8080/api/clusters/local/schemas/{schemaName}/latest", schemaName)
+                .uri("http://localhost:8080/api/clusters/local/schemas/{subject}/latest", subject)
                 .exchange()
                 .expectStatus().isOk()
                 .expectBodyList(SchemaSubject.class)
                 .consumeWith(listEntityExchangeResult -> {
                     val expectedCompatibility = CompatibilityLevel.CompatibilityEnum.BACKWARD;
-                    assertSchemaWhenGetLatest(schemaName, listEntityExchangeResult, expectedCompatibility);
+                    assertSchemaWhenGetLatest(subject, listEntityExchangeResult, expectedCompatibility);
                 });
 
         //Now let's change compatibility level of this schema to FULL whereas the global level should be BACKWARD
         webTestClient.put()
-                .uri("http://localhost:8080/api/clusters/local/schemas/{schemaName}/compatibility", schemaName)
+                .uri("http://localhost:8080/api/clusters/local/schemas/{subject}/compatibility", subject)
                 .contentType(MediaType.APPLICATION_JSON)
                 .body(BodyInserters.fromValue("{\"compatibility\":\"FULL\"}"))
                 .exchange()
@@ -110,23 +119,35 @@ class SchemaRegistryServiceTests extends AbstractBaseTest {
         //Get one more time to check the schema compatibility level is changed to FULL
         webTestClient
                 .get()
-                .uri("http://localhost:8080/api/clusters/local/schemas/{schemaName}/latest", schemaName)
+                .uri("http://localhost:8080/api/clusters/local/schemas/{subject}/latest", subject)
                 .exchange()
                 .expectStatus().isOk()
                 .expectBodyList(SchemaSubject.class)
                 .consumeWith(listEntityExchangeResult -> {
                     val expectedCompatibility = CompatibilityLevel.CompatibilityEnum.FULL;
-                    assertSchemaWhenGetLatest(schemaName, listEntityExchangeResult, expectedCompatibility);
+                    assertSchemaWhenGetLatest(subject, listEntityExchangeResult, expectedCompatibility);
                 });
     }
 
-    private void assertSchemaWhenGetLatest(String schemaName, EntityExchangeResult<List<SchemaSubject>> listEntityExchangeResult, CompatibilityLevel.CompatibilityEnum expectedCompatibility) {
+    private void createNewSubjectAndAssert(String subject) {
+        webTestClient
+                .post()
+                .uri("http://localhost:8080/api/clusters/local/schemas/{subject}", subject)
+                .contentType(MediaType.APPLICATION_JSON)
+                .body(BodyInserters.fromValue("{\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}"))
+                .exchange()
+                .expectStatus().isOk()
+                .expectBody(SchemaSubject.class)
+                .consumeWith(this::assertResponseBodyWhenCreateNewSchema);
+    }
+
+    private void assertSchemaWhenGetLatest(String subject, EntityExchangeResult<List<SchemaSubject>> listEntityExchangeResult, CompatibilityLevel.CompatibilityEnum expectedCompatibility) {
         List<SchemaSubject> responseBody = listEntityExchangeResult.getResponseBody();
         Assertions.assertNotNull(responseBody);
         Assertions.assertEquals(1, responseBody.size());
         SchemaSubject actualSchema = responseBody.get(0);
         Assertions.assertNotNull(actualSchema);
-        Assertions.assertEquals(schemaName, actualSchema.getSubject());
+        Assertions.assertEquals(subject, actualSchema.getSubject());
         Assertions.assertEquals("\"string\"", actualSchema.getSchema());
 
         Assertions.assertNotNull(actualSchema.getCompatibilityLevel());

+ 20 - 20
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -339,7 +339,7 @@ paths:
     get:
       tags:
         - /api/clusters
-      summary: get all schemas from Schema Registry service
+      summary: get all schemas of latest version from Schema Registry service
       operationId: getSchemas
       parameters:
         - name: clusterName
@@ -355,9 +355,9 @@ paths:
               schema:
                 type: array
                 items:
-                  type: string
+                  $ref: '#/components/schemas/SchemaSubject'
 
-  /api/clusters/{clusterName}/schemas/{schemaName}:
+  /api/clusters/{clusterName}/schemas/{subject}:
     post:
       tags:
         - /api/clusters
@@ -369,7 +369,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -399,7 +399,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -410,19 +410,19 @@ paths:
         404:
           description: Not found
 
-  /api/clusters/{clusterName}/schemas/{schemaName}/versions:
+  /api/clusters/{clusterName}/schemas/{subject}/versions:
     get:
       tags:
         - /api/clusters
-      summary: get all version of schema from Schema Registry service
-      operationId: getSchemaVersions
+      summary: get all version of subject from Schema Registry service
+      operationId: getAllVersionsBySubject
       parameters:
         - name: clusterName
           in: path
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -435,9 +435,9 @@ paths:
               schema:
                 type: array
                 items:
-                  type: integer
+                  $ref: '#/components/schemas/SchemaSubject'
 
-  /api/clusters/{clusterName}/schemas/{schemaName}/latest:
+  /api/clusters/{clusterName}/schemas/{subject}/latest:
     get:
       tags:
         - /api/clusters
@@ -449,7 +449,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -472,7 +472,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -484,7 +484,7 @@ paths:
           description: Not found
 
 
-  /api/clusters/{clusterName}/schemas/{schemaName}/versions/{version}:
+  /api/clusters/{clusterName}/schemas/{subject}/versions/{version}:
     get:
       tags:
         - /api/clusters
@@ -496,7 +496,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -524,7 +524,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -581,7 +581,7 @@ paths:
         404:
           description: Not Found
 
-  /api/clusters/{clusterName}/schemas/{schemaName}/compatibility:
+  /api/clusters/{clusterName}/schemas/{subject}/compatibility:
     put:
       tags:
         - /api/clusters
@@ -593,7 +593,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:
@@ -609,7 +609,7 @@ paths:
         404:
           description: Not Found
 
-  /api/clusters/{clusterName}/schemas/{schemaName}/check:
+  /api/clusters/{clusterName}/schemas/{subject}/check:
     post:
       tags:
         - /api/clusters
@@ -621,7 +621,7 @@ paths:
           required: true
           schema:
             type: string
-        - name: schemaName
+        - name: subject
           in: path
           required: true
           schema:

+ 6 - 2
kafka-ui-react-app/.eslintrc.json

@@ -15,7 +15,10 @@
     },
     "ecmaVersion": 2018,
     "sourceType": "module",
-    "project": ["./tsconfig.json", "./src/setupTests.ts"]
+    "project": [
+        "./tsconfig.json",
+         "./src/setupTests.ts"
+      ]
   },
   "plugins": ["@typescript-eslint", "prettier"],
   "extends": [
@@ -30,7 +33,8 @@
     "@typescript-eslint/explicit-module-boundary-types": "off",
     "jsx-a11y/label-has-associated-control": "off",
     "import/prefer-default-export": "off",
-    "@typescript-eslint/no-explicit-any": "error"
+    "@typescript-eslint/no-explicit-any": "error",
+    "import/no-extraneous-dependencies": ["error", { "devDependencies": true }]
   },
   "overrides": [
     {

+ 0 - 8
kafka-ui-react-app/jest.config.js

@@ -1,8 +0,0 @@
-module.exports = {
-  roots: ['<rootDir>/src'],
-  transform: {
-    '^.+\\.tsx?$': 'ts-jest',
-  },
-  testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.tsx?$',
-  moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
-};

File diff suppressed because it is too large
+ 339 - 351
kafka-ui-react-app/package-lock.json


+ 15 - 6
kafka-ui-react-app/package.json

@@ -3,9 +3,7 @@
   "version": "0.1.0",
   "private": true,
   "dependencies": {
-    "@types/react-datepicker": "^3.1.1",
-    "@types/uuid": "^8.3.0",
-    "bulma": "^0.8.2",
+    "bulma": "^0.9.2",
     "bulma-switch": "^2.0.0",
     "classnames": "^2.2.6",
     "date-fns": "^2.16.1",
@@ -33,7 +31,7 @@
     "*.{js,ts,jsx,tsx}": [
       "eslint -c .eslintrc.json --fix",
       "git add",
-      "jest --bail --findRelatedTests"
+      "npm test -- --bail --findRelatedTests --watchAll=false"
     ]
   },
   "scripts": {
@@ -42,6 +40,7 @@
     "lint": "eslint --ext .tsx,.ts src/",
     "lint:fix": "eslint --ext .tsx,.ts src/ --fix",
     "test": "react-scripts test",
+    "test:CI": "CI=true npm test --watchAll=false",
     "eject": "react-scripts eject",
     "tsc": "tsc"
   },
@@ -66,6 +65,7 @@
     ]
   },
   "devDependencies": {
+    "@jest/types": "^26.6.2",
     "@testing-library/jest-dom": "^5.11.9",
     "@testing-library/react": "^9.5.0",
     "@testing-library/user-event": "^7.1.2",
@@ -75,16 +75,19 @@
     "@types/lodash": "^4.14.165",
     "@types/node": "^12.19.8",
     "@types/react": "^17.0.0",
+    "@types/react-datepicker": "^3.1.1",
     "@types/react-dom": "^17.0.0",
     "@types/react-redux": "^7.1.11",
     "@types/react-router-dom": "^5.1.6",
     "@types/redux": "^3.6.0",
     "@types/redux-thunk": "^2.1.0",
+    "@types/uuid": "^8.3.0",
     "@typescript-eslint/eslint-plugin": "^4.9.0",
     "@typescript-eslint/parser": "^4.9.0",
     "@wojtekmaj/enzyme-adapter-react-17": "^0.4.1",
     "dotenv": "^8.2.0",
     "enzyme": "^3.11.0",
+    "enzyme-to-json": "^3.6.1",
     "eslint": "^7.14.0",
     "eslint-config-airbnb": "^18.2.1",
     "eslint-config-airbnb-typescript": "^12.0.0",
@@ -99,12 +102,18 @@
     "lint-staged": "^10.5.2",
     "node-sass": "^4.14.1",
     "prettier": "^2.2.1",
-    "react-scripts": "4.0.1",
+    "react-scripts": "4.0.2",
     "ts-jest": "^26.4.4",
+    "ts-node": "^9.1.1",
     "typescript": "~4.1.2"
   },
   "engines": {
     "node": ">=14.15.4"
   },
-  "proxy": "http://localhost:8080"
+  "proxy": "http://localhost:8080",
+  "jest": {
+    "snapshotSerializers": [
+      "enzyme-to-json/serializer"
+    ]
+  }
 }

+ 18 - 10
kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx

@@ -1,8 +1,8 @@
 import React from 'react';
-import formatBytes from 'lib/utils/formatBytes';
 import { NavLink } from 'react-router-dom';
-import { clusterBrokersPath } from 'lib/paths';
+import { clusterBrokersPath, clusterTopicsPath } from 'lib/paths';
 import { Cluster, ServerStatus } from 'generated-sources';
+import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
 
 interface ClusterWidgetProps {
   cluster: Cluster;
@@ -19,9 +19,9 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({
     onlinePartitionCount,
   },
 }) => (
-  <NavLink to={clusterBrokersPath(name)} className="column is-full-modile is-6">
-    <div className="box is-hoverable">
-      <div className="title is-6 has-text-overflow-ellipsis" title={name}>
+  <div className="column is-full-modile is-6">
+    <div className="box">
+      <div className="title is-6 has-text-overflow-ellipsis">
         <div
           className={`tag has-margin-right ${
             status === ServerStatus.Online ? 'is-primary' : 'is-danger'
@@ -36,7 +36,9 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({
         <tbody>
           <tr>
             <th>Brokers</th>
-            <td>{brokerCount}</td>
+            <td>
+              <NavLink to={clusterBrokersPath(name)}>{brokerCount}</NavLink>
+            </td>
           </tr>
           <tr>
             <th>Partitions</th>
@@ -44,20 +46,26 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({
           </tr>
           <tr>
             <th>Topics</th>
-            <td>{topicCount}</td>
+            <td>
+              <NavLink to={clusterTopicsPath(name)}>{topicCount}</NavLink>
+            </td>
           </tr>
           <tr>
             <th>Production</th>
-            <td>{formatBytes(bytesInPerSec || 0)}</td>
+            <td>
+              <BytesFormatted value={bytesInPerSec} />
+            </td>
           </tr>
           <tr>
             <th>Consumption</th>
-            <td>{formatBytes(bytesOutPerSec || 0)}</td>
+            <td>
+              <BytesFormatted value={bytesOutPerSec} />
+            </td>
           </tr>
         </tbody>
       </table>
     </div>
-  </NavLink>
+  </div>
 );
 
 export default ClusterWidget;

+ 73 - 0
kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx

@@ -0,0 +1,73 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import { ServerStatus } from 'generated-sources';
+import { clusterBrokersPath, clusterTopicsPath } from 'lib/paths';
+import ClusterWidget from '../ClusterWidget';
+import { offlineCluster, onlineCluster } from './fixtures';
+
+describe('ClusterWidget', () => {
+  describe('when cluster is online', () => {
+    it('renders with correct tag', () => {
+      const tag = shallow(<ClusterWidget cluster={onlineCluster} />).find(
+        '.tag'
+      );
+      expect(tag.hasClass('is-primary')).toBeTruthy();
+      expect(tag.text()).toEqual(ServerStatus.Online);
+    });
+
+    it('renders table', () => {
+      const table = shallow(<ClusterWidget cluster={onlineCluster} />).find(
+        'table'
+      );
+      expect(table.hasClass('is-fullwidth')).toBeTruthy();
+
+      expect(
+        table.find(`NavLink[to="${clusterBrokersPath(onlineCluster.name)}"]`)
+          .exists
+      ).toBeTruthy();
+      expect(
+        table.find(`NavLink[to="${clusterTopicsPath(onlineCluster.name)}"]`)
+          .exists
+      ).toBeTruthy();
+    });
+
+    it('matches snapshot', () => {
+      expect(
+        shallow(<ClusterWidget cluster={onlineCluster} />)
+      ).toMatchSnapshot();
+    });
+  });
+
+  describe('when cluster is offline', () => {
+    it('renders with correct tag', () => {
+      const tag = shallow(<ClusterWidget cluster={offlineCluster} />).find(
+        '.tag'
+      );
+
+      expect(tag.hasClass('is-danger')).toBeTruthy();
+      expect(tag.text()).toEqual(ServerStatus.Offline);
+    });
+
+    it('renders table', () => {
+      const table = shallow(<ClusterWidget cluster={offlineCluster} />).find(
+        'table'
+      );
+      expect(table.hasClass('is-fullwidth')).toBeTruthy();
+
+      expect(
+        table.find(`NavLink[to="${clusterBrokersPath(onlineCluster.name)}"]`)
+          .exists
+      ).toBeTruthy();
+      expect(
+        table.find(`NavLink[to="${clusterTopicsPath(onlineCluster.name)}"]`)
+          .exists
+      ).toBeTruthy();
+    });
+
+    it('matches snapshot', () => {
+      expect(
+        shallow(<ClusterWidget cluster={offlineCluster} />)
+      ).toMatchSnapshot();
+    });
+  });
+});

+ 159 - 0
kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap

@@ -0,0 +1,159 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`ClusterWidget when cluster is offline matches snapshot 1`] = `
+<div
+  className="column is-full-modile is-6"
+>
+  <div
+    className="box"
+  >
+    <div
+      className="title is-6 has-text-overflow-ellipsis"
+    >
+      <div
+        className="tag has-margin-right is-danger"
+      >
+        offline
+      </div>
+      local
+    </div>
+    <table
+      className="table is-fullwidth"
+    >
+      <tbody>
+        <tr>
+          <th>
+            Brokers
+          </th>
+          <td>
+            <NavLink
+              to="/ui/clusters/local/brokers"
+            >
+              1
+            </NavLink>
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Partitions
+          </th>
+          <td>
+            2
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Topics
+          </th>
+          <td>
+            <NavLink
+              to="/ui/clusters/local/topics"
+            >
+              2
+            </NavLink>
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Production
+          </th>
+          <td>
+            <BytesFormatted
+              value={8000.00000673768}
+            />
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Consumption
+          </th>
+          <td>
+            <BytesFormatted
+              value={0.815306356729712}
+            />
+          </td>
+        </tr>
+      </tbody>
+    </table>
+  </div>
+</div>
+`;
+
+exports[`ClusterWidget when cluster is online matches snapshot 1`] = `
+<div
+  className="column is-full-modile is-6"
+>
+  <div
+    className="box"
+  >
+    <div
+      className="title is-6 has-text-overflow-ellipsis"
+    >
+      <div
+        className="tag has-margin-right is-primary"
+      >
+        online
+      </div>
+      secondLocal
+    </div>
+    <table
+      className="table is-fullwidth"
+    >
+      <tbody>
+        <tr>
+          <th>
+            Brokers
+          </th>
+          <td>
+            <NavLink
+              to="/ui/clusters/secondLocal/brokers"
+            >
+              1
+            </NavLink>
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Partitions
+          </th>
+          <td>
+            6
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Topics
+          </th>
+          <td>
+            <NavLink
+              to="/ui/clusters/secondLocal/topics"
+            >
+              3
+            </NavLink>
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Production
+          </th>
+          <td>
+            <BytesFormatted
+              value={0.00003061819685376472}
+            />
+          </td>
+        </tr>
+        <tr>
+          <th>
+            Consumption
+          </th>
+          <td>
+            <BytesFormatted
+              value={5.737800890036267}
+            />
+          </td>
+        </tr>
+      </tbody>
+    </table>
+  </div>
+</div>
+`;

+ 25 - 0
kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/fixtures.ts

@@ -0,0 +1,25 @@
+import { Cluster, ServerStatus } from 'generated-sources';
+
+export const onlineCluster: Cluster = {
+  name: 'secondLocal',
+  defaultCluster: false,
+  status: ServerStatus.Online,
+  brokerCount: 1,
+  onlinePartitionCount: 6,
+  topicCount: 3,
+  bytesInPerSec: 0.000030618196853764715,
+  bytesOutPerSec: 5.737800890036267075817,
+};
+
+export const offlineCluster: Cluster = {
+  name: 'local',
+  defaultCluster: true,
+  status: ServerStatus.Offline,
+  brokerCount: 1,
+  onlinePartitionCount: 2,
+  topicCount: 2,
+  bytesInPerSec: 8000.0000067376808542600021,
+  bytesOutPerSec: 0.8153063567297119490871,
+};
+
+export const clusters: Cluster[] = [onlineCluster, offlineCluster];

+ 5 - 7
kafka-ui-react-app/src/components/Topics/Details/Messages/MessageItem.tsx

@@ -3,11 +3,11 @@ import { format } from 'date-fns';
 import JSONTree from 'react-json-tree';
 import { TopicMessage } from 'generated-sources';
 
-interface MessageItemProp {
+export interface MessageItemProp {
   partition: TopicMessage['partition'];
   offset: TopicMessage['offset'];
   timestamp: TopicMessage['timestamp'];
-  content: TopicMessage['content'];
+  content?: TopicMessage['content'];
 }
 
 const MessageItem: React.FC<MessageItemProp> = ({
@@ -16,13 +16,11 @@ const MessageItem: React.FC<MessageItemProp> = ({
   timestamp,
   content,
 }) => (
-  <tr key="{timestamp}">
-    <td style={{ width: 200 }}>
-      {timestamp ? format(timestamp, 'yyyy-MM-dd HH:mm:ss') : null}
-    </td>
+  <tr>
+    <td style={{ width: 200 }}>{format(timestamp, 'yyyy-MM-dd HH:mm:ss')}</td>
     <td style={{ width: 150 }}>{offset}</td>
     <td style={{ width: 100 }}>{partition}</td>
-    <td key="{content}" style={{ wordBreak: 'break-word' }}>
+    <td style={{ wordBreak: 'break-word' }}>
       {content && (
         <JSONTree
           data={content}

+ 11 - 13
kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx

@@ -1,4 +1,9 @@
+import 'react-datepicker/dist/react-datepicker.css';
 import React, { useCallback, useEffect, useRef } from 'react';
+import { groupBy, map, concat, maxBy } from 'lodash';
+import MultiSelect from 'react-multi-select-component';
+import { Option } from 'react-multi-select-component/dist/lib/interfaces';
+import { useDebouncedCallback } from 'use-debounce';
 import {
   ClusterName,
   TopicMessageQueryParams,
@@ -7,13 +12,6 @@ import {
 import { TopicMessage, Partition, SeekType } from 'generated-sources';
 import PageLoader from 'components/common/PageLoader/PageLoader';
 import DatePicker from 'react-datepicker';
-import 'react-datepicker/dist/react-datepicker.css';
-
-import MultiSelect from 'react-multi-select-component';
-
-import * as _ from 'lodash';
-import { useDebouncedCallback } from 'use-debounce';
-import { Option } from 'react-multi-select-component/dist/lib/interfaces';
 import MessagesTable from './MessagesTable';
 
 export interface Props {
@@ -81,17 +79,17 @@ const Messages: React.FC<Props> = ({
       offset: 0,
       partition: p.partition,
     }));
-    const messageUniqs: FilterProps[] = _.map(
-      _.groupBy(messages, 'partition'),
-      (v) => _.maxBy(v, 'offset')
+    const messageUniqs: FilterProps[] = map(
+      groupBy(messages, 'partition'),
+      (v) => maxBy(v, 'offset')
     ).map((v) => ({
       offset: v ? v.offset : 0,
       partition: v ? v.partition : 0,
     }));
 
-    return _.map(
-      _.groupBy(_.concat(partitionUniqs, messageUniqs), 'partition'),
-      (v) => _.maxBy(v, 'offset') as FilterProps
+    return map(
+      groupBy(concat(partitionUniqs, messageUniqs), 'partition'),
+      (v) => maxBy(v, 'offset') as FilterProps
     );
   }, [messages, partitions]);
 

+ 2 - 11
kafka-ui-react-app/src/components/Topics/Details/Messages/MessagesContainer.ts

@@ -1,10 +1,5 @@
 import { connect } from 'react-redux';
-import {
-  ClusterName,
-  RootState,
-  TopicMessageQueryParams,
-  TopicName,
-} from 'redux/interfaces';
+import { ClusterName, RootState, TopicName } from 'redux/interfaces';
 import { RouteComponentProps, withRouter } from 'react-router-dom';
 import { fetchTopicMessages } from 'redux/actions';
 import {
@@ -38,11 +33,7 @@ const mapStateToProps = (
 });
 
 const mapDispatchToProps = {
-  fetchTopicMessages: (
-    clusterName: ClusterName,
-    topicName: TopicName,
-    queryParams: Partial<TopicMessageQueryParams>
-  ) => fetchTopicMessages(clusterName, topicName, queryParams),
+  fetchTopicMessages,
 };
 
 export default withRouter(

+ 6 - 8
kafka-ui-react-app/src/components/Topics/Details/Messages/MessagesTable.tsx

@@ -1,11 +1,9 @@
 import React from 'react';
 import { TopicMessage } from 'generated-sources';
-import CustomParamButton, {
-  CustomParamButtonType,
-} from '../../shared/Form/CustomParams/CustomParamButton';
+import CustomParamButton from 'components/Topics/shared/Form/CustomParams/CustomParamButton';
 import MessageItem from './MessageItem';
 
-interface MessagesTableProp {
+export interface MessagesTableProp {
   messages: TopicMessage[];
   onNext(event: React.MouseEvent<HTMLButtonElement>): void;
 }
@@ -16,7 +14,7 @@ const MessagesTable: React.FC<MessagesTableProp> = ({ messages, onNext }) => {
   }
 
   return (
-    <div>
+    <>
       <table className="table is-striped is-fullwidth">
         <thead>
           <tr>
@@ -30,7 +28,7 @@ const MessagesTable: React.FC<MessagesTableProp> = ({ messages, onNext }) => {
           {messages.map(
             ({ partition, offset, timestamp, content }: TopicMessage) => (
               <MessageItem
-                key={timestamp.toString()}
+                key={`message-${timestamp.getTime()}`}
                 partition={partition}
                 offset={offset}
                 timestamp={timestamp}
@@ -44,13 +42,13 @@ const MessagesTable: React.FC<MessagesTableProp> = ({ messages, onNext }) => {
         <div className="column is-full">
           <CustomParamButton
             className="is-link is-pulled-right"
-            type={CustomParamButtonType.chevronRight}
+            type="fa-chevron-right"
             onClick={onNext}
             btnText="Next"
           />
         </div>
       </div>
-    </div>
+    </>
   );
 };
 

+ 38 - 0
kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/MessageItem.spec.tsx

@@ -0,0 +1,38 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import MessageItem from 'components/Topics/Details/Messages/MessageItem';
+import { messages } from './fixtures';
+
+jest.mock('date-fns', () => ({
+  format: () => `mocked date`,
+}));
+
+describe('MessageItem', () => {
+  describe('when content is defined', () => {
+    it('renders table row with JSONTree', () => {
+      const wrapper = shallow(<MessageItem {...messages[0]} />);
+
+      expect(wrapper.find('tr').length).toEqual(1);
+      expect(wrapper.find('td').length).toEqual(4);
+      expect(wrapper.find('JSONTree').length).toEqual(1);
+    });
+
+    it('matches snapshot', () => {
+      expect(shallow(<MessageItem {...messages[0]} />)).toMatchSnapshot();
+    });
+  });
+
+  describe('when content is undefined', () => {
+    it('renders table row without JSONTree', () => {
+      const wrapper = shallow(<MessageItem {...messages[1]} />);
+
+      expect(wrapper.find('tr').length).toEqual(1);
+      expect(wrapper.find('td').length).toEqual(4);
+      expect(wrapper.find('JSONTree').length).toEqual(0);
+    });
+
+    it('matches snapshot', () => {
+      expect(shallow(<MessageItem {...messages[1]} />)).toMatchSnapshot();
+    });
+  });
+});

+ 178 - 0
kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/Messages.spec.tsx

@@ -0,0 +1,178 @@
+import React from 'react';
+import { Provider } from 'react-redux';
+import { mount, shallow } from 'enzyme';
+import * as useDebounce from 'use-debounce';
+import DatePicker from 'react-datepicker';
+import Messages, { Props } from 'components/Topics/Details/Messages/Messages';
+import MessagesContainer from 'components/Topics/Details/Messages/MessagesContainer';
+import PageLoader from 'components/common/PageLoader/PageLoader';
+import configureStore from 'redux/store/configureStore';
+
+describe('Messages', () => {
+  describe('Container', () => {
+    const store = configureStore();
+
+    it('renders view', () => {
+      const component = shallow(
+        <Provider store={store}>
+          <MessagesContainer />
+        </Provider>
+      );
+
+      expect(component.exists()).toBeTruthy();
+    });
+  });
+
+  describe('View', () => {
+    beforeEach(() => {
+      jest.restoreAllMocks();
+    });
+
+    const setupWrapper = (props: Partial<Props> = {}) => (
+      <Messages
+        clusterName="Test cluster"
+        topicName="Cluster topic"
+        isFetched
+        fetchTopicMessages={jest.fn()}
+        messages={[]}
+        partitions={[]}
+        {...props}
+      />
+    );
+
+    describe('Initial state', () => {
+      it('renders PageLoader', () => {
+        expect(
+          shallow(setupWrapper({ isFetched: false })).exists(PageLoader)
+        ).toBeTruthy();
+      });
+    });
+
+    describe('Table', () => {
+      describe('With messages', () => {
+        const messagesWrapper = mount(
+          setupWrapper({
+            messages: [
+              {
+                partition: 1,
+                offset: 2,
+                timestamp: new Date('05-05-1994'),
+                content: [1, 2, 3],
+              },
+            ],
+          })
+        );
+        it('renders table', () => {
+          expect(
+            messagesWrapper.exists(
+              '[className="table is-striped is-fullwidth"]'
+            )
+          ).toBeTruthy();
+        });
+        it('renders JSONTree', () => {
+          expect(messagesWrapper.find('JSONTree').length).toEqual(1);
+        });
+        it('parses message content correctly', () => {
+          const messages = [
+            {
+              partition: 1,
+              offset: 2,
+              timestamp: new Date('05-05-1994'),
+              content: [1, 2, 3],
+            },
+          ];
+          const content = JSON.stringify(messages[0].content);
+          expect(JSON.parse(content)).toEqual(messages[0].content);
+        });
+      });
+      describe('Without messages', () => {
+        it('renders string', () => {
+          const wrapper = mount(setupWrapper());
+          expect(wrapper.text()).toContain('No messages at selected topic');
+        });
+      });
+    });
+
+    describe('Offset field', () => {
+      describe('Seek Type dependency', () => {
+        const wrapper = mount(setupWrapper());
+
+        it('renders DatePicker', () => {
+          wrapper
+            .find('[id="selectSeekType"]')
+            .simulate('change', { target: { value: 'TIMESTAMP' } });
+
+          expect(
+            wrapper.find('[id="selectSeekType"]').first().props().value
+          ).toEqual('TIMESTAMP');
+
+          expect(wrapper.exists(DatePicker)).toBeTruthy();
+        });
+      });
+
+      describe('With defined offset value', () => {
+        const wrapper = shallow(setupWrapper());
+
+        it('shows offset value in input', () => {
+          const offset = '10';
+
+          wrapper
+            .find('#searchOffset')
+            .simulate('change', { target: { value: offset } });
+
+          expect(wrapper.find('#searchOffset').first().props().value).toEqual(
+            offset
+          );
+        });
+      });
+      describe('With invalid offset value', () => {
+        const wrapper = shallow(setupWrapper());
+
+        it('shows 0 in input', () => {
+          wrapper
+            .find('#searchOffset')
+            .simulate('change', { target: { value: null } });
+
+          expect(wrapper.find('#searchOffset').first().props().value).toBe('0');
+        });
+      });
+    });
+
+    describe('Search field', () => {
+      it('renders input correctly', () => {
+        const query = 20;
+        const mockedUseDebouncedCallback = jest.fn();
+        jest
+          .spyOn(useDebounce, 'useDebouncedCallback')
+          .mockImplementationOnce(() => [
+            mockedUseDebouncedCallback,
+            jest.fn(),
+            jest.fn(),
+          ]);
+
+        const wrapper = shallow(setupWrapper());
+
+        wrapper
+          .find('#searchText')
+          .simulate('change', { target: { value: query } });
+
+        expect(wrapper.find('#searchText').first().props().value).toEqual(
+          query
+        );
+        expect(mockedUseDebouncedCallback).toHaveBeenCalledWith({ q: query });
+      });
+    });
+
+    describe('Submit button', () => {
+      it('fetches topic messages', () => {
+        const mockedfetchTopicMessages = jest.fn();
+        const wrapper = mount(
+          setupWrapper({ fetchTopicMessages: mockedfetchTopicMessages })
+        );
+
+        wrapper.find('[type="submit"]').simulate('click');
+        expect(mockedfetchTopicMessages).toHaveBeenCalled();
+      });
+    });
+  });
+});

+ 49 - 0
kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/MessagesTable.spec.tsx

@@ -0,0 +1,49 @@
+import React from 'react';
+import { shallow } from 'enzyme';
+import MessagesTable, {
+  MessagesTableProp,
+} from 'components/Topics/Details/Messages/MessagesTable';
+import { messages } from './fixtures';
+
+jest.mock('date-fns', () => ({
+  format: () => `mocked date`,
+}));
+
+describe('MessagesTable', () => {
+  const setupWrapper = (props: Partial<MessagesTableProp> = {}) => (
+    <MessagesTable messages={[]} onNext={jest.fn()} {...props} />
+  );
+
+  describe('when topic is empty', () => {
+    it('renders table row with JSONTree', () => {
+      const wrapper = shallow(setupWrapper());
+      expect(wrapper.exists('table')).toBeFalsy();
+      expect(wrapper.exists('CustomParamButton')).toBeFalsy();
+      expect(wrapper.text()).toEqual('No messages at selected topic');
+    });
+
+    it('matches snapshot', () => {
+      expect(shallow(setupWrapper())).toMatchSnapshot();
+    });
+  });
+
+  describe('when topic contains messages', () => {
+    const onNext = jest.fn();
+    const wrapper = shallow(setupWrapper({ messages, onNext }));
+
+    it('renders table row without JSONTree', () => {
+      expect(wrapper.exists('table')).toBeTruthy();
+      expect(wrapper.exists('CustomParamButton')).toBeTruthy();
+      expect(wrapper.find('MessageItem').length).toEqual(2);
+    });
+
+    it('handles CustomParamButton click', () => {
+      wrapper.find('CustomParamButton').simulate('click');
+      expect(onNext).toHaveBeenCalled();
+    });
+
+    it('matches snapshot', () => {
+      expect(shallow(setupWrapper({ messages, onNext }))).toMatchSnapshot();
+    });
+  });
+});

+ 110 - 0
kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/__snapshots__/MessageItem.spec.tsx.snap

@@ -0,0 +1,110 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`MessageItem when content is defined matches snapshot 1`] = `
+<tr>
+  <td
+    style={
+      Object {
+        "width": 200,
+      }
+    }
+  >
+    mocked date
+  </td>
+  <td
+    style={
+      Object {
+        "width": 150,
+      }
+    }
+  >
+    2
+  </td>
+  <td
+    style={
+      Object {
+        "width": 100,
+      }
+    }
+  >
+    1
+  </td>
+  <td
+    style={
+      Object {
+        "wordBreak": "break-word",
+      }
+    }
+  >
+    <JSONTree
+      collectionLimit={50}
+      data={
+        Object {
+          "foo": "bar",
+          "key": "val",
+        }
+      }
+      getItemString={[Function]}
+      hideRoot={true}
+      invertTheme={false}
+      isCustomNode={[Function]}
+      keyPath={
+        Array [
+          "root",
+        ]
+      }
+      labelRenderer={[Function]}
+      postprocessValue={[Function]}
+      shouldExpandNode={[Function]}
+      theme={
+        Object {
+          "base0B": "#363636",
+          "base0D": "#3273dc",
+          "tree": [Function],
+          "value": [Function],
+        }
+      }
+      valueRenderer={[Function]}
+    />
+  </td>
+</tr>
+`;
+
+exports[`MessageItem when content is undefined matches snapshot 1`] = `
+<tr>
+  <td
+    style={
+      Object {
+        "width": 200,
+      }
+    }
+  >
+    mocked date
+  </td>
+  <td
+    style={
+      Object {
+        "width": 150,
+      }
+    }
+  >
+    20
+  </td>
+  <td
+    style={
+      Object {
+        "width": 100,
+      }
+    }
+  >
+    2
+  </td>
+  <td
+    style={
+      Object {
+        "wordBreak": "break-word",
+      }
+    }
+  />
+</tr>
+`;

+ 66 - 0
kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/__snapshots__/MessagesTable.spec.tsx.snap

@@ -0,0 +1,66 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`MessagesTable when topic contains messages matches snapshot 1`] = `
+<Fragment>
+  <table
+    className="table is-striped is-fullwidth"
+  >
+    <thead>
+      <tr>
+        <th>
+          Timestamp
+        </th>
+        <th>
+          Offset
+        </th>
+        <th>
+          Partition
+        </th>
+        <th>
+          Content
+        </th>
+      </tr>
+    </thead>
+    <tbody>
+      <MessageItem
+        content={
+          Object {
+            "foo": "bar",
+            "key": "val",
+          }
+        }
+        key="message-802310400000"
+        offset={2}
+        partition={1}
+        timestamp={1995-06-05T00:00:00.000Z}
+      />
+      <MessageItem
+        key="message-1596585600000"
+        offset={20}
+        partition={2}
+        timestamp={2020-08-05T00:00:00.000Z}
+      />
+    </tbody>
+  </table>
+  <div
+    className="columns"
+  >
+    <div
+      className="column is-full"
+    >
+      <CustomParamButton
+        btnText="Next"
+        className="is-link is-pulled-right"
+        onClick={[MockFunction]}
+        type="fa-chevron-right"
+      />
+    </div>
+  </div>
+</Fragment>
+`;
+
+exports[`MessagesTable when topic is empty matches snapshot 1`] = `
+<div>
+  No messages at selected topic
+</div>
+`;

+ 19 - 0
kafka-ui-react-app/src/components/Topics/Details/Messages/__test__/fixtures.ts

@@ -0,0 +1,19 @@
+import { TopicMessage } from 'generated-sources';
+
+export const messages: TopicMessage[] = [
+  {
+    partition: 1,
+    offset: 2,
+    timestamp: new Date(Date.UTC(1995, 5, 5)),
+    content: {
+      foo: 'bar',
+      key: 'val',
+    },
+  },
+  {
+    partition: 2,
+    offset: 20,
+    timestamp: new Date(Date.UTC(2020, 7, 5)),
+    content: undefined,
+  },
+];

+ 2 - 2
kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamAction.tsx

@@ -1,5 +1,5 @@
 import React from 'react';
-import CustomParamButton, { CustomParamButtonType } from './CustomParamButton';
+import CustomParamButton from './CustomParamButton';
 
 interface Props {
   index: string;
@@ -11,7 +11,7 @@ const CustomParamAction: React.FC<Props> = ({ index, onRemove }) => (
     <label className="label">&nbsp;</label>
     <CustomParamButton
       className="is-danger"
-      type={CustomParamButtonType.minus}
+      type="fa-minus"
       onClick={() => onRemove(index)}
     />
   </>

+ 1 - 7
kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamButton.tsx

@@ -1,15 +1,9 @@
 import React from 'react';
 
-export enum CustomParamButtonType {
-  plus = 'fa-plus',
-  minus = 'fa-minus',
-  chevronRight = 'fa-chevron-right',
-}
-
 interface Props {
   onClick: (event: React.MouseEvent<HTMLButtonElement>) => void;
   className: string;
-  type: CustomParamButtonType;
+  type: 'fa-plus' | 'fa-minus' | 'fa-chevron-right';
   btnText?: string;
 }
 

+ 3 - 3
kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx

@@ -7,7 +7,7 @@ import {
   TopicConfigByName,
   TopicConfigParams,
 } from 'redux/interfaces';
-import CustomParamButton, { CustomParamButtonType } from './CustomParamButton';
+import CustomParamButton from './CustomParamButton';
 import CustomParamField from './CustomParamField';
 
 export const INDEX_PREFIX = 'customParams';
@@ -79,7 +79,7 @@ const CustomParams: React.FC<Props> = ({ isSubmitting, config }) => {
         <div className="column">
           <CustomParamButton
             className="is-success"
-            type={CustomParamButtonType.plus}
+            type="fa-plus"
             onClick={onAdd}
             btnText="Add Custom Parameter"
           />
@@ -88,7 +88,7 @@ const CustomParams: React.FC<Props> = ({ isSubmitting, config }) => {
 
       {formCustomParams.allIndexes.map((index) => (
         <CustomParamField
-          key={index}
+          key={formCustomParams.byIndex[index].name}
           index={index}
           isDisabled={isSubmitting}
           name={formCustomParams.byIndex[index].name}

+ 13 - 8
kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx

@@ -5,18 +5,23 @@ interface Props {
   precision?: number;
 }
 
-const BytesFormatted: React.FC<Props> = ({ value, precision }) => {
-  const formatBytes = React.useCallback(() => {
-    const numVal = typeof value === 'string' ? parseInt(value, 10) : value;
-    if (!numVal) return 0;
-    const pow = Math.floor(Math.log2(numVal) / 10);
+const BytesFormatted: React.FC<Props> = ({ value, precision = 0 }) => {
+  const formatedValue = React.useMemo(() => {
+    const bytes = typeof value === 'string' ? parseInt(value, 10) : value;
+
+    const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
+    if (!bytes || bytes === 0) return [0, sizes[0]];
+
+    if (bytes < 1024) return [Math.ceil(bytes), sizes[0]];
+
+    const pow = Math.floor(Math.log2(bytes) / 10);
     const multiplier = 10 ** (precision || 2);
     return (
-      Math.round((numVal * multiplier) / 1024 ** pow) / multiplier +
-      ['Bytes', 'KB', 'MB', 'GB', 'TB'][pow]
+      Math.round((bytes * multiplier) / 1024 ** pow) / multiplier + sizes[pow]
     );
   }, [value]);
-  return <span>{formatBytes()}</span>;
+
+  return <span>{formatedValue}</span>;
 };
 
 export default BytesFormatted;

+ 0 - 13
kafka-ui-react-app/src/lib/utils/formatBytes.ts

@@ -1,13 +0,0 @@
-function formatBytes(bytes: number, decimals = 0) {
-  const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
-  if (bytes === 0) return [0, sizes[0]];
-
-  const k = 1024;
-  const dm = decimals < 0 ? 0 : decimals;
-
-  const i = Math.floor(Math.log(bytes) / Math.log(k));
-
-  return [parseFloat((bytes / k ** i).toFixed(dm)), sizes[i]];
-}
-
-export default formatBytes;

+ 0 - 55
kafka-ui-react-app/src/redux/actionType.ts

@@ -1,55 +0,0 @@
-enum ActionType {
-  GET_CLUSTERS__REQUEST = 'GET_CLUSTERS__REQUEST',
-  GET_CLUSTERS__SUCCESS = 'GET_CLUSTERS__SUCCESS',
-  GET_CLUSTERS__FAILURE = 'GET_CLUSTERS__FAILURE',
-
-  GET_CLUSTER_STATS__REQUEST = 'GET_CLUSTER_STATUS__REQUEST',
-  GET_CLUSTER_STATS__SUCCESS = 'GET_CLUSTER_STATUS__SUCCESS',
-  GET_CLUSTER_STATS__FAILURE = 'GET_CLUSTER_STATUS__FAILURE',
-
-  GET_CLUSTER_METRICS__REQUEST = 'GET_CLUSTER_METRICS__REQUEST',
-  GET_CLUSTER_METRICS__SUCCESS = 'GET_CLUSTER_METRICS__SUCCESS',
-  GET_CLUSTER_METRICS__FAILURE = 'GET_CLUSTER_METRICS__FAILURE',
-
-  GET_BROKERS__REQUEST = 'GET_BROKERS__REQUEST',
-  GET_BROKERS__SUCCESS = 'GET_BROKERS__SUCCESS',
-  GET_BROKERS__FAILURE = 'GET_BROKERS__FAILURE',
-
-  GET_BROKER_METRICS__REQUEST = 'GET_BROKER_METRICS__REQUEST',
-  GET_BROKER_METRICS__SUCCESS = 'GET_BROKER_METRICS__SUCCESS',
-  GET_BROKER_METRICS__FAILURE = 'GET_BROKER_METRICS__FAILURE',
-
-  GET_TOPICS__REQUEST = 'GET_TOPICS__REQUEST',
-  GET_TOPICS__SUCCESS = 'GET_TOPICS__SUCCESS',
-  GET_TOPICS__FAILURE = 'GET_TOPICS__FAILURE',
-
-  GET_TOPIC_MESSAGES__REQUEST = 'GET_TOPIC_MESSAGES__REQUEST',
-  GET_TOPIC_MESSAGES__SUCCESS = 'GET_TOPIC_MESSAGES__SUCCESS',
-  GET_TOPIC_MESSAGES__FAILURE = 'GET_TOPIC_MESSAGES__FAILURE',
-
-  GET_TOPIC_DETAILS__REQUEST = 'GET_TOPIC_DETAILS__REQUEST',
-  GET_TOPIC_DETAILS__SUCCESS = 'GET_TOPIC_DETAILS__SUCCESS',
-  GET_TOPIC_DETAILS__FAILURE = 'GET_TOPIC_DETAILS__FAILURE',
-
-  GET_TOPIC_CONFIG__REQUEST = 'GET_TOPIC_CONFIG__REQUEST',
-  GET_TOPIC_CONFIG__SUCCESS = 'GET_TOPIC_CONFIG__SUCCESS',
-  GET_TOPIC_CONFIG__FAILURE = 'GET_TOPIC_CONFIG__FAILURE',
-
-  POST_TOPIC__REQUEST = 'POST_TOPIC__REQUEST',
-  POST_TOPIC__SUCCESS = 'POST_TOPIC__SUCCESS',
-  POST_TOPIC__FAILURE = 'POST_TOPIC__FAILURE',
-
-  PATCH_TOPIC__REQUEST = 'PATCH_TOPIC__REQUEST',
-  PATCH_TOPIC__SUCCESS = 'PATCH_TOPIC__SUCCESS',
-  PATCH_TOPIC__FAILURE = 'PATCH_TOPIC__FAILURE',
-
-  GET_CONSUMER_GROUPS__REQUEST = 'GET_CONSUMER_GROUPS__REQUEST',
-  GET_CONSUMER_GROUPS__SUCCESS = 'GET_CONSUMER_GROUPS__SUCCESS',
-  GET_CONSUMER_GROUPS__FAILURE = 'GET_CONSUMER_GROUPS__FAILURE',
-
-  GET_CONSUMER_GROUP_DETAILS__REQUEST = 'GET_CONSUMER_GROUP_DETAILS__REQUEST',
-  GET_CONSUMER_GROUP_DETAILS__SUCCESS = 'GET_CONSUMER_GROUP_DETAILS__SUCCESS',
-  GET_CONSUMER_GROUP_DETAILS__FAILURE = 'GET_CONSUMER_GROUP_DETAILS__FAILURE',
-}
-
-export default ActionType;

+ 39 - 40
kafka-ui-react-app/src/redux/actions/actions.ts

@@ -1,5 +1,4 @@
 import { createAsyncAction } from 'typesafe-actions';
-import ActionType from 'redux/actionType';
 import { TopicName, ConsumerGroupID } from 'redux/interfaces';
 
 import {
@@ -17,81 +16,81 @@ import {
 } from 'generated-sources';
 
 export const fetchClusterStatsAction = createAsyncAction(
-  ActionType.GET_CLUSTER_STATS__REQUEST,
-  ActionType.GET_CLUSTER_STATS__SUCCESS,
-  ActionType.GET_CLUSTER_STATS__FAILURE
+  'GET_CLUSTER_STATUS__REQUEST',
+  'GET_CLUSTER_STATUS__SUCCESS',
+  'GET_CLUSTER_STATUS__FAILURE'
 )<undefined, ClusterStats, undefined>();
 
 export const fetchClusterMetricsAction = createAsyncAction(
-  ActionType.GET_CLUSTER_METRICS__REQUEST,
-  ActionType.GET_CLUSTER_METRICS__SUCCESS,
-  ActionType.GET_CLUSTER_METRICS__FAILURE
+  'GET_CLUSTER_METRICS__REQUEST',
+  'GET_CLUSTER_METRICS__SUCCESS',
+  'GET_CLUSTER_METRICS__FAILURE'
 )<undefined, ClusterMetrics, undefined>();
 
 export const fetchBrokersAction = createAsyncAction(
-  ActionType.GET_BROKERS__REQUEST,
-  ActionType.GET_BROKERS__SUCCESS,
-  ActionType.GET_BROKERS__FAILURE
+  'GET_BROKERS__REQUEST',
+  'GET_BROKERS__SUCCESS',
+  'GET_BROKERS__FAILURE'
 )<undefined, Broker[], undefined>();
 
 export const fetchBrokerMetricsAction = createAsyncAction(
-  ActionType.GET_BROKER_METRICS__REQUEST,
-  ActionType.GET_BROKER_METRICS__SUCCESS,
-  ActionType.GET_BROKER_METRICS__FAILURE
+  'GET_BROKER_METRICS__REQUEST',
+  'GET_BROKER_METRICS__SUCCESS',
+  'GET_BROKER_METRICS__FAILURE'
 )<undefined, BrokerMetrics, undefined>();
 
 export const fetchClusterListAction = createAsyncAction(
-  ActionType.GET_CLUSTERS__REQUEST,
-  ActionType.GET_CLUSTERS__SUCCESS,
-  ActionType.GET_CLUSTERS__FAILURE
+  'GET_CLUSTERS__REQUEST',
+  'GET_CLUSTERS__SUCCESS',
+  'GET_CLUSTERS__FAILURE'
 )<undefined, Cluster[], undefined>();
 
 export const fetchTopicsListAction = createAsyncAction(
-  ActionType.GET_TOPICS__REQUEST,
-  ActionType.GET_TOPICS__SUCCESS,
-  ActionType.GET_TOPICS__FAILURE
+  'GET_TOPICS__REQUEST',
+  'GET_TOPICS__SUCCESS',
+  'GET_TOPICS__FAILURE'
 )<undefined, Topic[], undefined>();
 
 export const fetchTopicMessagesAction = createAsyncAction(
-  ActionType.GET_TOPIC_MESSAGES__REQUEST,
-  ActionType.GET_TOPIC_MESSAGES__SUCCESS,
-  ActionType.GET_TOPIC_MESSAGES__FAILURE
+  'GET_TOPIC_MESSAGES__REQUEST',
+  'GET_TOPIC_MESSAGES__SUCCESS',
+  'GET_TOPIC_MESSAGES__FAILURE'
 )<undefined, TopicMessage[], undefined>();
 
 export const fetchTopicDetailsAction = createAsyncAction(
-  ActionType.GET_TOPIC_DETAILS__REQUEST,
-  ActionType.GET_TOPIC_DETAILS__SUCCESS,
-  ActionType.GET_TOPIC_DETAILS__FAILURE
+  'GET_TOPIC_DETAILS__REQUEST',
+  'GET_TOPIC_DETAILS__SUCCESS',
+  'GET_TOPIC_DETAILS__FAILURE'
 )<undefined, { topicName: TopicName; details: TopicDetails }, undefined>();
 
 export const fetchTopicConfigAction = createAsyncAction(
-  ActionType.GET_TOPIC_CONFIG__REQUEST,
-  ActionType.GET_TOPIC_CONFIG__SUCCESS,
-  ActionType.GET_TOPIC_CONFIG__FAILURE
+  'GET_TOPIC_CONFIG__REQUEST',
+  'GET_TOPIC_CONFIG__SUCCESS',
+  'GET_TOPIC_CONFIG__FAILURE'
 )<undefined, { topicName: TopicName; config: TopicConfig[] }, undefined>();
 
 export const createTopicAction = createAsyncAction(
-  ActionType.POST_TOPIC__REQUEST,
-  ActionType.POST_TOPIC__SUCCESS,
-  ActionType.POST_TOPIC__FAILURE
+  'POST_TOPIC__REQUEST',
+  'POST_TOPIC__SUCCESS',
+  'POST_TOPIC__FAILURE'
 )<undefined, Topic, undefined>();
 
 export const updateTopicAction = createAsyncAction(
-  ActionType.PATCH_TOPIC__REQUEST,
-  ActionType.PATCH_TOPIC__SUCCESS,
-  ActionType.PATCH_TOPIC__FAILURE
+  'PATCH_TOPIC__REQUEST',
+  'PATCH_TOPIC__SUCCESS',
+  'PATCH_TOPIC__FAILURE'
 )<undefined, Topic, undefined>();
 
 export const fetchConsumerGroupsAction = createAsyncAction(
-  ActionType.GET_CONSUMER_GROUPS__REQUEST,
-  ActionType.GET_CONSUMER_GROUPS__SUCCESS,
-  ActionType.GET_CONSUMER_GROUPS__FAILURE
+  'GET_CONSUMER_GROUPS__REQUEST',
+  'GET_CONSUMER_GROUPS__SUCCESS',
+  'GET_CONSUMER_GROUPS__FAILURE'
 )<undefined, ConsumerGroup[], undefined>();
 
 export const fetchConsumerGroupDetailsAction = createAsyncAction(
-  ActionType.GET_CONSUMER_GROUP_DETAILS__REQUEST,
-  ActionType.GET_CONSUMER_GROUP_DETAILS__SUCCESS,
-  ActionType.GET_CONSUMER_GROUP_DETAILS__FAILURE
+  'GET_CONSUMER_GROUP_DETAILS__REQUEST',
+  'GET_CONSUMER_GROUP_DETAILS__SUCCESS',
+  'GET_CONSUMER_GROUP_DETAILS__FAILURE'
 )<
   undefined,
   { consumerGroupID: ConsumerGroupID; details: ConsumerGroupDetails },

+ 0 - 7
kafka-ui-react-app/src/redux/interfaces/index.ts

@@ -16,13 +16,6 @@ export * from './broker';
 export * from './consumerGroup';
 export * from './loader';
 
-export enum FetchStatus {
-  notFetched = 'notFetched',
-  fetching = 'fetching',
-  fetched = 'fetched',
-  errorFetching = 'errorFetching',
-}
-
 export interface RootState {
   topics: TopicsState;
   clusters: ClusterState;

+ 1 - 3
kafka-ui-react-app/src/redux/interfaces/loader.ts

@@ -1,5 +1,3 @@
-import { FetchStatus } from 'redux/interfaces/index';
-
 export interface LoaderState {
-  [key: string]: FetchStatus;
+  [key: string]: 'notFetched' | 'fetching' | 'fetched' | 'errorFetching';
 }

+ 3 - 4
kafka-ui-react-app/src/redux/reducers/brokers/reducer.ts

@@ -1,6 +1,5 @@
 import { Action, BrokersState, ZooKeeperStatus } from 'redux/interfaces';
 import { ClusterStats } from 'generated-sources';
-import ActionType from 'redux/actionType';
 
 export const initialState: BrokersState = {
   items: [],
@@ -36,14 +35,14 @@ const updateBrokerSegmentSize = (
 
 const reducer = (state = initialState, action: Action): BrokersState => {
   switch (action.type) {
-    case ActionType.GET_BROKERS__REQUEST:
+    case 'GET_BROKERS__REQUEST':
       return initialState;
-    case ActionType.GET_BROKERS__SUCCESS:
+    case 'GET_BROKERS__SUCCESS':
       return {
         ...state,
         items: action.payload,
       };
-    case ActionType.GET_CLUSTER_STATS__SUCCESS:
+    case 'GET_CLUSTER_STATUS__SUCCESS':
       return updateBrokerSegmentSize(state, action.payload);
     default:
       return state;

+ 2 - 2
kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts

@@ -1,5 +1,5 @@
 import { createSelector } from 'reselect';
-import { RootState, FetchStatus, BrokersState } from 'redux/interfaces';
+import { RootState, BrokersState } from 'redux/interfaces';
 import { createFetchingSelector } from 'redux/reducers/loader/selectors';
 
 const brokersState = ({ brokers }: RootState): BrokersState => brokers;
@@ -8,7 +8,7 @@ const getBrokerListFetchingStatus = createFetchingSelector('GET_BROKERS');
 
 export const getIsBrokerListFetched = createSelector(
   getBrokerListFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getBrokerCount = createSelector(

+ 1 - 2
kafka-ui-react-app/src/redux/reducers/clusters/reducer.ts

@@ -1,12 +1,11 @@
 import { Action } from 'redux/interfaces';
 import { Cluster } from 'generated-sources';
-import ActionType from 'redux/actionType';
 
 export const initialState: Cluster[] = [];
 
 const reducer = (state = initialState, action: Action): Cluster[] => {
   switch (action.type) {
-    case ActionType.GET_CLUSTERS__SUCCESS:
+    case 'GET_CLUSTERS__SUCCESS':
       return action.payload;
     default:
       return state;

+ 2 - 2
kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts

@@ -1,5 +1,5 @@
 import { createSelector } from 'reselect';
-import { RootState, FetchStatus } from 'redux/interfaces';
+import { RootState } from 'redux/interfaces';
 import { createFetchingSelector } from 'redux/reducers/loader/selectors';
 import { Cluster, ServerStatus } from 'generated-sources';
 
@@ -9,7 +9,7 @@ const getClusterListFetchingStatus = createFetchingSelector('GET_CLUSTERS');
 
 export const getIsClusterListFetched = createSelector(
   getClusterListFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getClusterList = createSelector(

+ 2 - 3
kafka-ui-react-app/src/redux/reducers/consumerGroups/reducer.ts

@@ -1,6 +1,5 @@
 import { Action, ConsumerGroupsState } from 'redux/interfaces';
 import { ConsumerGroup } from 'generated-sources';
-import ActionType from 'redux/actionType';
 
 export const initialState: ConsumerGroupsState = {
   byID: {},
@@ -34,9 +33,9 @@ const updateConsumerGroupsList = (
 
 const reducer = (state = initialState, action: Action): ConsumerGroupsState => {
   switch (action.type) {
-    case ActionType.GET_CONSUMER_GROUPS__SUCCESS:
+    case 'GET_CONSUMER_GROUPS__SUCCESS':
       return updateConsumerGroupsList(state, action.payload);
-    case ActionType.GET_CONSUMER_GROUP_DETAILS__SUCCESS:
+    case 'GET_CONSUMER_GROUP_DETAILS__SUCCESS':
       return {
         ...state,
         byID: {

+ 3 - 3
kafka-ui-react-app/src/redux/reducers/consumerGroups/selectors.ts

@@ -1,5 +1,5 @@
 import { createSelector } from 'reselect';
-import { RootState, FetchStatus } from 'redux/interfaces';
+import { RootState } from 'redux/interfaces';
 import { createFetchingSelector } from 'redux/reducers/loader/selectors';
 import {
   ConsumerGroupID,
@@ -24,12 +24,12 @@ const getConsumerGroupDetailsFetchingStatus = createFetchingSelector(
 
 export const getIsConsumerGroupsListFetched = createSelector(
   getConsumerGroupsListFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getIsConsumerGroupDetailsFetched = createSelector(
   getConsumerGroupDetailsFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getConsumerGroupsList = createSelector(

+ 4 - 4
kafka-ui-react-app/src/redux/reducers/loader/reducer.ts

@@ -1,4 +1,4 @@
-import { FetchStatus, Action, LoaderState } from 'redux/interfaces';
+import { Action, LoaderState } from 'redux/interfaces';
 
 export const initialState: LoaderState = {};
 
@@ -15,17 +15,17 @@ const reducer = (state = initialState, action: Action): LoaderState => {
     case 'REQUEST':
       return {
         ...state,
-        [requestName]: FetchStatus.fetching,
+        [requestName]: 'fetching',
       };
     case 'SUCCESS':
       return {
         ...state,
-        [requestName]: FetchStatus.fetched,
+        [requestName]: 'fetched',
       };
     case 'FAILURE':
       return {
         ...state,
-        [requestName]: FetchStatus.errorFetching,
+        [requestName]: 'errorFetching',
       };
     default:
       return state;

+ 2 - 2
kafka-ui-react-app/src/redux/reducers/loader/selectors.ts

@@ -1,4 +1,4 @@
-import { RootState, FetchStatus } from 'redux/interfaces';
+import { RootState } from 'redux/interfaces';
 
 export const createFetchingSelector = (action: string) => (state: RootState) =>
-  state.loader[action] || FetchStatus.notFetched;
+  state.loader[action] || 'notFetched';

+ 5 - 6
kafka-ui-react-app/src/redux/reducers/topics/reducer.ts

@@ -1,7 +1,6 @@
 import { v4 } from 'uuid';
 import { Topic, TopicMessage } from 'generated-sources';
 import { Action, TopicsState } from 'redux/interfaces';
-import ActionType from 'redux/actionType';
 
 export const initialState: TopicsState = {
   byName: {},
@@ -68,9 +67,9 @@ const transformTopicMessages = (
 
 const reducer = (state = initialState, action: Action): TopicsState => {
   switch (action.type) {
-    case ActionType.GET_TOPICS__SUCCESS:
+    case 'GET_TOPICS__SUCCESS':
       return updateTopicList(state, action.payload);
-    case ActionType.GET_TOPIC_DETAILS__SUCCESS:
+    case 'GET_TOPIC_DETAILS__SUCCESS':
       return {
         ...state,
         byName: {
@@ -81,9 +80,9 @@ const reducer = (state = initialState, action: Action): TopicsState => {
           },
         },
       };
-    case ActionType.GET_TOPIC_MESSAGES__SUCCESS:
+    case 'GET_TOPIC_MESSAGES__SUCCESS':
       return transformTopicMessages(state, action.payload);
-    case ActionType.GET_TOPIC_CONFIG__SUCCESS:
+    case 'GET_TOPIC_CONFIG__SUCCESS':
       return {
         ...state,
         byName: {
@@ -97,7 +96,7 @@ const reducer = (state = initialState, action: Action): TopicsState => {
           },
         },
       };
-    case ActionType.POST_TOPIC__SUCCESS:
+    case 'POST_TOPIC__SUCCESS':
       return addToTopicList(state, action.payload);
     default:
       return state;

+ 6 - 7
kafka-ui-react-app/src/redux/reducers/topics/selectors.ts

@@ -2,7 +2,6 @@ import { createSelector } from 'reselect';
 import {
   RootState,
   TopicName,
-  FetchStatus,
   TopicsState,
   TopicConfigByName,
 } from 'redux/interfaces';
@@ -29,32 +28,32 @@ const getTopicUpdateStatus = createFetchingSelector('PATCH_TOPIC');
 
 export const getIsTopicListFetched = createSelector(
   getTopicListFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getIsTopicDetailsFetched = createSelector(
   getTopicDetailsFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getIsTopicMessagesFetched = createSelector(
   getTopicMessagesFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getTopicConfigFetched = createSelector(
   getTopicConfigFetchingStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getTopicCreated = createSelector(
   getTopicCreationStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getTopicUpdated = createSelector(
   getTopicUpdateStatus,
-  (status) => status === FetchStatus.fetched
+  (status) => status === 'fetched'
 );
 
 export const getTopicList = createSelector(

+ 2 - 3
kafka-ui-react-app/src/setupTests.ts

@@ -1,6 +1,5 @@
-/* eslint-disable */
-import * as Enzyme from 'enzyme';
+import { configure } from 'enzyme';
 import Adapter from '@wojtekmaj/enzyme-adapter-react-17';
 import '@testing-library/jest-dom/extend-expect';
 
-Enzyme.configure({ adapter: new Adapter() });
+configure({ adapter: new Adapter() });

+ 0 - 156
kafka-ui-react-app/src/tests/Topics/Details/Messages/Messages.spec.tsx

@@ -1,156 +0,0 @@
-import React from 'react';
-import { mount, shallow } from 'enzyme';
-import JSONTree from 'react-json-tree';
-import * as useDebounce from 'use-debounce';
-import DatePicker from 'react-datepicker';
-import Messages, { Props } from 'components/Topics/Details/Messages/Messages';
-import PageLoader from 'components/common/PageLoader/PageLoader';
-
-describe('Messages component', () => {
-  beforeEach(() => {
-    jest.restoreAllMocks();
-  });
-
-  const setupWrapper = (props: Partial<Props> = {}) => (
-    <Messages
-      clusterName="Test cluster"
-      topicName="Cluster topic"
-      isFetched
-      fetchTopicMessages={jest.fn()}
-      messages={[]}
-      partitions={[]}
-      {...props}
-    />
-  );
-
-  describe('Initial state', () => {
-    it('renders PageLoader', () => {
-      expect(
-        shallow(setupWrapper({ isFetched: false })).exists(PageLoader)
-      ).toBeTruthy();
-    });
-  });
-
-  describe('Messages table', () => {
-    describe('With messages', () => {
-      const messagesWrapper = mount(
-        setupWrapper({
-          messages: [
-            {
-              partition: 1,
-              offset: 2,
-              timestamp: new Date('05-05-1994'),
-              content: [1, 2, 3],
-            },
-          ],
-        })
-      );
-      it('renders table', () => {
-        expect(
-          messagesWrapper.exists('[className="table is-striped is-fullwidth"]')
-        ).toBeTruthy();
-      });
-      it('renders JSONTree', () => {
-        expect(messagesWrapper.find(JSONTree).length).toEqual(1);
-      });
-      it('parses message content correctly', () => {
-        const messages = [
-          {
-            partition: 1,
-            offset: 2,
-            timestamp: new Date('05-05-1994'),
-            content: [1, 2, 3],
-          },
-        ];
-        const content = JSON.stringify(messages[0].content);
-        expect(JSON.parse(content)).toEqual(messages[0].content);
-      });
-    });
-    describe('Without messages', () => {
-      it('renders string', () => {
-        const wrapper = mount(setupWrapper());
-        expect(wrapper.text()).toContain('No messages at selected topic');
-      });
-    });
-  });
-
-  describe('Offset field', () => {
-    describe('Seek Type dependency', () => {
-      const wrapper = mount(setupWrapper());
-
-      it('renders DatePicker', () => {
-        wrapper
-          .find('[id="selectSeekType"]')
-          .simulate('change', { target: { value: 'TIMESTAMP' } });
-
-        expect(
-          wrapper.find('[id="selectSeekType"]').first().props().value
-        ).toEqual('TIMESTAMP');
-
-        expect(wrapper.exists(DatePicker)).toBeTruthy();
-      });
-    });
-
-    describe('With defined offset value', () => {
-      const wrapper = shallow(setupWrapper());
-
-      it('shows offset value in input', () => {
-        const offset = '10';
-
-        wrapper
-          .find('#searchOffset')
-          .simulate('change', { target: { value: offset } });
-
-        expect(wrapper.find('#searchOffset').first().props().value).toEqual(
-          offset
-        );
-      });
-    });
-    describe('With invalid offset value', () => {
-      const wrapper = shallow(setupWrapper());
-
-      it('shows 0 in input', () => {
-        wrapper
-          .find('#searchOffset')
-          .simulate('change', { target: { value: null } });
-
-        expect(wrapper.find('#searchOffset').first().props().value).toBe('0');
-      });
-    });
-  });
-
-  describe('Search field', () => {
-    it('renders input correctly', () => {
-      const query = 20;
-      const mockedUseDebouncedCallback = jest.fn();
-      jest
-        .spyOn(useDebounce, 'useDebouncedCallback')
-        .mockImplementationOnce(() => [
-          mockedUseDebouncedCallback,
-          jest.fn(),
-          jest.fn(),
-        ]);
-
-      const wrapper = shallow(setupWrapper());
-
-      wrapper
-        .find('#searchText')
-        .simulate('change', { target: { value: query } });
-
-      expect(wrapper.find('#searchText').first().props().value).toEqual(query);
-      expect(mockedUseDebouncedCallback).toHaveBeenCalledWith({ q: query });
-    });
-  });
-
-  describe('Submit button', () => {
-    it('fetches topic messages', () => {
-      const mockedfetchTopicMessages = jest.fn();
-      const wrapper = mount(
-        setupWrapper({ fetchTopicMessages: mockedfetchTopicMessages })
-      );
-
-      wrapper.find('[type="submit"]').simulate('click');
-      expect(mockedfetchTopicMessages).toHaveBeenCalled();
-    });
-  });
-});

+ 0 - 19
kafka-ui-react-app/src/theme/bulma_overrides.scss

@@ -1,12 +1,3 @@
-@import "../../node_modules/bulma/sass/utilities/_all.sass";
-@import "../../node_modules/bulma/sass/base/_all.sass";
-@import "../../node_modules/bulma/sass/elements/_all.sass";
-@import "../../node_modules/bulma/sass/form/_all.sass";
-@import "../../node_modules/bulma/sass/components/_all.sass";
-@import "../../node_modules/bulma/sass/grid/_all.sass";
-@import "../../node_modules/bulma/sass/layout/_all.sass";
-@import "../../node_modules/bulma-switch/src/sass/index.sass";
-
 .has {
   &-text-overflow-ellipsis {
     flex: 1;
@@ -54,16 +45,6 @@
   }
 }
 
-.box {
-  &.is-hoverable {
-    cursor: pointer;
-
-    &:hover {
-      box-shadow: 0 0.5em 1em -0.125em rgba(10, 10, 10, 0.2), 0 0px 0 1px rgba(10, 10, 10, 0.02);
-    }
-  }
-}
-
 @keyframes fadein {
   from { opacity: 0; }
   to   { opacity: 1; }

+ 2 - 0
kafka-ui-react-app/src/theme/index.scss

@@ -1,3 +1,5 @@
+@import 'bulma';
+@import '~bulma-switch';
 @import 'src/theme/bulma_overrides';
 
 #root, body, html {

+ 1 - 1
kafka-ui-react-app/tsconfig.json

@@ -22,6 +22,6 @@
     "noFallthroughCasesInSwitch": true
   },
   "include": [
-    "src"
+    "src",
   ]
 }

Some files were not shown because too many files changed in this diff