Browse Source

Merge branch 'master' into consumer_group_id_in_reset_offset_heading

Sungyun Hur 2 years ago
parent
commit
6c73e8c082
73 changed files with 2060 additions and 517 deletions
  1. 14 10
      .github/workflows/branch-deploy.yml
  2. 0 6
      .github/workflows/branch-remove.yml
  3. 7 5
      .github/workflows/build-public-image.yml
  4. 3 4
      .github/workflows/release.yaml
  5. 1 1
      README.md
  6. 2 2
      documentation/compose/DOCKER_COMPOSE.md
  7. 0 0
      documentation/compose/data/message.json
  8. 0 0
      documentation/compose/data/proxy.conf
  9. 2 2
      documentation/compose/e2e-tests.yaml
  10. 2 2
      documentation/compose/kafka-cluster-sr-auth.yaml
  11. 0 84
      documentation/compose/kafka-clusters-only.yaml
  12. 1 1
      documentation/compose/kafka-ui-arm64.yaml
  13. 2 2
      documentation/compose/kafka-ui-connectors-auth.yaml
  14. 2 2
      documentation/compose/kafka-ui.yaml
  15. 1 1
      documentation/compose/kafka-with-zookeeper.yaml
  16. 12 15
      documentation/compose/ldap.yaml
  17. 1 1
      documentation/compose/nginx-proxy.yaml
  18. 0 22
      documentation/compose/oauth-cognito.yaml
  19. 0 0
      documentation/compose/traefik-proxy.yaml
  20. 21 12
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java
  21. 7 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java
  22. 3 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
  23. 7 7
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
  24. 4 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopicConsumerGroup.java
  25. 24 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdeInstance.java
  26. 4 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java
  27. 14 5
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java
  28. 33 37
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
  29. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
  30. 12 5
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java
  31. 4 15
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
  32. 1 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java
  33. 6 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java
  34. 5 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java
  35. 503 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java
  36. 0 10
      kafka-ui-api/src/main/resources/application-gauth.yml
  37. 120 58
      kafka-ui-api/src/main/resources/application-local.yml
  38. 0 13
      kafka-ui-api/src/main/resources/application-sdp.yml
  39. 171 5
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
  40. 24 4
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java
  41. 621 0
      kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java
  42. 2 2
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  43. 5 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
  44. 67 2
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java
  45. 15 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java
  46. 9 30
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
  47. 87 7
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java
  48. 3 1
      kafka-ui-react-app/package.json
  49. 26 12
      kafka-ui-react-app/pnpm-lock.yaml
  50. 5 3
      kafka-ui-react-app/src/components/App.tsx
  51. 3 3
      kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx
  52. 10 4
      kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx
  53. 3 3
      kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx
  54. 4 8
      kafka-ui-react-app/src/components/Connect/New/New.tsx
  55. 5 2
      kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx
  56. 3 3
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx
  57. 1 1
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx
  58. 3 3
      kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx
  59. 2 2
      kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
  60. 6 42
      kafka-ui-react-app/src/components/NavBar/NavBar.tsx
  61. 27 6
      kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx
  62. 44 13
      kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx
  63. 2 2
      kafka-ui-react-app/src/components/Topics/Topic/ConsumerGroups/TopicConsumerGroups.tsx
  64. 2 2
      kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx
  65. 6 2
      kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx
  66. 58 0
      kafka-ui-react-app/src/components/contexts/ThemeModeContext.tsx
  67. 4 1
      kafka-ui-react-app/src/index.tsx
  68. 5 5
      kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts
  69. 2 2
      kafka-ui-react-app/src/lib/fixtures/topics.ts
  70. 7 1
      kafka-ui-react-app/src/lib/testHelpers.tsx
  71. 7 1
      kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts
  72. 1 1
      kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts
  73. 1 1
      pom.xml

+ 14 - 10
.github/workflows/branch-deploy.yml

@@ -84,18 +84,22 @@ jobs:
           git add ../kafka-ui-from-branch/
           git add ../kafka-ui-from-branch/
           git commit -m "added env:${{ needs.build.outputs.deploy }}" && git push || true
           git commit -m "added env:${{ needs.build.outputs.deploy }}" && git push || true
 
 
-      - name: make comment with private deployment link
+      - name: update status check for private deployment
         if: ${{ github.event.label.name == 'status/feature_testing' }}
         if: ${{ github.event.label.name == 'status/feature_testing' }}
-        uses: peter-evans/create-or-update-comment@v3
+        uses: Sibz/github-status-action@v1.1.6
         with:
         with:
-          issue-number: ${{ github.event.pull_request.number }}
-          body: |
-            Custom deployment will be available at http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io
+          authToken: ${{secrets.GITHUB_TOKEN}}
+          context: "Click Details button to open custom deployment page"
+          state: "success"
+          sha: ${{ github.event.pull_request.head.sha  || github.sha }}
+          target_url: "http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io"
 
 
-      - name: make comment with public deployment link
+      - name: update status check for public deployment
         if: ${{ github.event.label.name == 'status/feature_testing_public' }}
         if: ${{ github.event.label.name == 'status/feature_testing_public' }}
-        uses: peter-evans/create-or-update-comment@v3
+        uses: Sibz/github-status-action@v1.1.6
         with:
         with:
-          issue-number: ${{ github.event.pull_request.number }}
-          body: |
-            Custom deployment will be available at http://${{ needs.build.outputs.tag }}.kafka-ui.provectus.io in 5 minutes
+          authToken: ${{secrets.GITHUB_TOKEN}}
+          context: "Click Details button to open custom deployment page"
+          state: "success"
+          sha: ${{ github.event.pull_request.head.sha  || github.sha }}
+          target_url: "http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io"

+ 0 - 6
.github/workflows/branch-remove.yml

@@ -20,9 +20,3 @@ jobs:
           git config --global user.name "infra-tech"
           git config --global user.name "infra-tech"
           git add ../kafka-ui-from-branch/
           git add ../kafka-ui-from-branch/
           git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true
           git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true
-      - name: make comment with deployment link
-        uses: peter-evans/create-or-update-comment@v3
-        with:
-          issue-number: ${{ github.event.pull_request.number }}
-          body: |
-            Custom deployment removed

+ 7 - 5
.github/workflows/build-public-image.yml

@@ -64,12 +64,14 @@ jobs:
             JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar
             JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar
           cache-from: type=local,src=/tmp/.buildx-cache
           cache-from: type=local,src=/tmp/.buildx-cache
           cache-to: type=local,dest=/tmp/.buildx-cache
           cache-to: type=local,dest=/tmp/.buildx-cache
-      - name: make comment with private deployment link
-        uses: peter-evans/create-or-update-comment@v3
+      - name: update status check
+        uses: Sibz/github-status-action@v1.1.6
         with:
         with:
-          issue-number: ${{ github.event.pull_request.number }}
-          body: |
-            Image published at public.ecr.aws/provectus/kafka-ui-custom-build:${{ steps.extract_branch.outputs.tag }}
+          authToken: ${{secrets.GITHUB_TOKEN}}
+          context: "Image published at"
+          state: "success"
+          sha: ${{ github.event.pull_request.head.sha  || github.sha }}
+          target_url: "public.ecr.aws/provectus/kafka-ui-custom-build:${{ steps.extract_branch.outputs.tag }}"
 
 
     outputs:
     outputs:
       tag: ${{ steps.extract_branch.outputs.tag }}
       tag: ${{ steps.extract_branch.outputs.tag }}

+ 3 - 4
.github/workflows/release.yaml

@@ -77,6 +77,7 @@ jobs:
           builder: ${{ steps.buildx.outputs.name }}
           builder: ${{ steps.buildx.outputs.name }}
           context: kafka-ui-api
           context: kafka-ui-api
           platforms: linux/amd64,linux/arm64
           platforms: linux/amd64,linux/arm64
+          provenance: false
           push: true
           push: true
           tags: |
           tags: |
             provectuslabs/kafka-ui:${{ steps.build.outputs.version }}
             provectuslabs/kafka-ui:${{ steps.build.outputs.version }}
@@ -88,14 +89,12 @@ jobs:
 
 
   charts:
   charts:
     runs-on: ubuntu-latest
     runs-on: ubuntu-latest
-    permissions:
-      contents: write
     needs: release
     needs: release
     steps:
     steps:
       - name: Repository Dispatch
       - name: Repository Dispatch
         uses: peter-evans/repository-dispatch@v2
         uses: peter-evans/repository-dispatch@v2
         with:
         with:
-          token: ${{ secrets.GITHUB_TOKEN }}
-          repository: provectus/kafka-ui
+          token: ${{ secrets.CHARTS_ACTIONS_TOKEN }}
+          repository: provectus/kafka-ui-charts
           event-type: prepare-helm-release
           event-type: prepare-helm-release
           client-payload: '{"appversion": "${{ needs.release.outputs.version }}"}'
           client-payload: '{"appversion": "${{ needs.release.outputs.version }}"}'

+ 1 - 1
README.md

@@ -99,7 +99,7 @@ services:
     ports:
     ports:
       - 8080:8080
       - 8080:8080
     environment:
     environment:
-      DYNAMIC_CONFIG_ENABLED: true
+      DYNAMIC_CONFIG_ENABLED: 'true'
     volumes:
     volumes:
       - ~/kui/config.yml:/etc/kafkaui/dynamic_config.yaml
       - ~/kui/config.yml:/etc/kafkaui/dynamic_config.yaml
 ```
 ```

+ 2 - 2
documentation/compose/DOCKER_COMPOSE.md

@@ -8,9 +8,9 @@
 6. [kafka-ui-auth-context.yaml](./kafka-ui-auth-context.yaml) - Basic (username/password) authentication with custom path (URL) (issue 861).
 6. [kafka-ui-auth-context.yaml](./kafka-ui-auth-context.yaml) - Basic (username/password) authentication with custom path (URL) (issue 861).
 7. [e2e-tests.yaml](./e2e-tests.yaml) - Configuration with different connectors (github-source, s3, sink-activities, source-activities) and Ksql functionality.
 7. [e2e-tests.yaml](./e2e-tests.yaml) - Configuration with different connectors (github-source, s3, sink-activities, source-activities) and Ksql functionality.
 8. [kafka-ui-jmx-secured.yml](./kafka-ui-jmx-secured.yml) - Kafka’s JMX with SSL and authentication.
 8. [kafka-ui-jmx-secured.yml](./kafka-ui-jmx-secured.yml) - Kafka’s JMX with SSL and authentication.
-9. [kafka-ui-reverse-proxy.yaml](./kafka-ui-reverse-proxy.yaml) - An example for using the app behind a proxy (like nginx).
+9. [kafka-ui-reverse-proxy.yaml](./nginx-proxy.yaml) - An example for using the app behind a proxy (like nginx).
 10. [kafka-ui-sasl.yaml](./kafka-ui-sasl.yaml) - SASL auth for Kafka.
 10. [kafka-ui-sasl.yaml](./kafka-ui-sasl.yaml) - SASL auth for Kafka.
-11. [kafka-ui-traefik-proxy.yaml](./kafka-ui-traefik-proxy.yaml) - Traefik specific proxy configuration.
+11. [kafka-ui-traefik-proxy.yaml](./traefik-proxy.yaml) - Traefik specific proxy configuration.
 12. [oauth-cognito.yaml](./oauth-cognito.yaml) - OAuth2 with Cognito
 12. [oauth-cognito.yaml](./oauth-cognito.yaml) - OAuth2 with Cognito
 13. [kafka-ui-with-jmx-exporter.yaml](./kafka-ui-with-jmx-exporter.yaml) - A configuration with 2 kafka clusters with enabled prometheus jmx exporters instead of jmx.
 13. [kafka-ui-with-jmx-exporter.yaml](./kafka-ui-with-jmx-exporter.yaml) - A configuration with 2 kafka clusters with enabled prometheus jmx exporters instead of jmx.
 14. [kafka-with-zookeeper.yaml](./kafka-with-zookeeper.yaml) - An example for using kafka with zookeeper
 14. [kafka-with-zookeeper.yaml](./kafka-with-zookeeper.yaml) - An example for using kafka with zookeeper

+ 0 - 0
documentation/compose/message.json → documentation/compose/data/message.json


+ 0 - 0
documentation/compose/proxy.conf → documentation/compose/data/proxy.conf


+ 2 - 2
documentation/compose/e2e-tests.yaml

@@ -124,7 +124,7 @@ services:
   kafka-init-topics:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     image: confluentinc/cp-kafka:7.2.1
     volumes:
     volumes:
-      - ./message.json:/data/message.json
+      - ./data/message.json:/data/message.json
     depends_on:
     depends_on:
       kafka0:
       kafka0:
         condition: service_healthy
         condition: service_healthy
@@ -187,4 +187,4 @@ services:
       KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
       KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
       KSQL_KSQL_SERVICE_ID: my_ksql_1
       KSQL_KSQL_SERVICE_ID: my_ksql_1
       KSQL_KSQL_HIDDEN_TOPICS: '^_.*'
       KSQL_KSQL_HIDDEN_TOPICS: '^_.*'
-      KSQL_CACHE_MAX_BYTES_BUFFERING: 0
+      KSQL_CACHE_MAX_BYTES_BUFFERING: 0

+ 2 - 2
documentation/compose/kafka-cluster-sr-auth.yaml

@@ -57,7 +57,7 @@ services:
   kafka-init-topics:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     image: confluentinc/cp-kafka:7.2.1
     volumes:
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
     depends_on:
       - kafka1
       - kafka1
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
@@ -80,4 +80,4 @@ services:
       KAFKA_CLUSTERS_0_METRICS_PORT: 9997
       KAFKA_CLUSTERS_0_METRICS_PORT: 9997
       KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry1:8085
       KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry1:8085
       KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME: admin
       KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME: admin
-      KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD: letmein
+      KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD: letmein

+ 0 - 84
documentation/compose/kafka-clusters-only.yaml

@@ -1,84 +0,0 @@
----
-version: "2"
-services:
-  kafka0:
-    image: confluentinc/cp-kafka:7.2.1
-    hostname: kafka0
-    container_name: kafka0
-    ports:
-      - "9092:9092"
-      - "9997:9997"
-    environment:
-      KAFKA_BROKER_ID: 1
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT"
-      KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092"
-      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
-      KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
-      KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
-      KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
-      KAFKA_JMX_PORT: 9997
-      KAFKA_JMX_HOSTNAME: localhost
-      KAFKA_PROCESS_ROLES: "broker,controller"
-      KAFKA_NODE_ID: 1
-      KAFKA_CONTROLLER_QUORUM_VOTERS: "1@kafka0:29093"
-      KAFKA_LISTENERS: "PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092"
-      KAFKA_INTER_BROKER_LISTENER_NAME: "PLAINTEXT"
-      KAFKA_CONTROLLER_LISTENER_NAMES: "CONTROLLER"
-      KAFKA_LOG_DIRS: "/tmp/kraft-combined-logs"
-    volumes:
-      - ./scripts/update_run_cluster.sh:/tmp/update_run.sh
-      - ./scripts/clusterID:/tmp/clusterID
-    command: 'bash -c ''if [ ! -f /tmp/update_run.sh ]; then echo "ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'''
-
-  schemaregistry0:
-    image: confluentinc/cp-schema-registry:7.2.1
-    depends_on:
-      - kafka0
-    environment:
-      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
-      SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
-      SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
-      SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
-      SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
-      SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
-      SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-    ports:
-      - 8085:8085
-
-  kafka-connect0:
-    image: confluentinc/cp-kafka-connect:7.2.1
-    ports:
-      - 8083:8083
-    depends_on:
-      - kafka0
-      - schemaregistry0
-    environment:
-      CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
-      CONNECT_GROUP_ID: compose-connect-group
-      CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
-      CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
-      CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_STATUS_STORAGE_TOPIC: _connect_status
-      CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
-      CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
-      CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
-      CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
-      CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
-      CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
-      CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
-      CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-
-  kafka-init-topics:
-    image: confluentinc/cp-kafka:7.2.1
-    volumes:
-      - ./message.json:/data/message.json
-    depends_on:
-      - kafka0
-    command: "bash -c 'echo Waiting for Kafka to be ready... && \
-      cub kafka-ready -b kafka0:29092 1 30 && \
-      kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
-      kafka-topics --create --topic messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
-      kafka-console-producer --bootstrap-server kafka0:29092 --topic users < /data/message.json'"

+ 1 - 1
documentation/compose/kafka-ui-arm64.yaml

@@ -93,7 +93,7 @@ services:
   kafka-init-topics:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1.arm64
     image: confluentinc/cp-kafka:7.2.1.arm64
     volumes:
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
     depends_on:
       - kafka0
       - kafka0
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 2 - 2
documentation/compose/kafka-ui-connectors-auth.yaml

@@ -69,7 +69,7 @@ services:
     build:
     build:
       context: ./kafka-connect
       context: ./kafka-connect
       args:
       args:
-        image: confluentinc/cp-kafka-connect:6.0.1
+        image: confluentinc/cp-kafka-connect:7.2.1
     ports:
     ports:
       - 8083:8083
       - 8083:8083
     depends_on:
     depends_on:
@@ -104,7 +104,7 @@ services:
   kafka-init-topics:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     image: confluentinc/cp-kafka:7.2.1
     volumes:
     volumes:
-      - ./message.json:/data/message.json
+      - ./data/message.json:/data/message.json
     depends_on:
     depends_on:
       - kafka0
       - kafka0
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 2 - 2
documentation/compose/kafka-ui.yaml

@@ -115,7 +115,7 @@ services:
       SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
       SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
 
 
   kafka-connect0:
   kafka-connect0:
-    image: confluentinc/cp-kafka-connect:6.0.1
+    image: confluentinc/cp-kafka-connect:7.2.1
     ports:
     ports:
       - 8083:8083
       - 8083:8083
     depends_on:
     depends_on:
@@ -142,7 +142,7 @@ services:
   kafka-init-topics:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     image: confluentinc/cp-kafka:7.2.1
     volumes:
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
     depends_on:
       - kafka1
       - kafka1
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 1 - 1
documentation/compose/kafka-with-zookeeper.yaml

@@ -38,7 +38,7 @@ services:
   kafka-init-topics:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     image: confluentinc/cp-kafka:7.2.1
     volumes:
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
     depends_on:
       - kafka
       - kafka
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 12 - 15
documentation/compose/auth-ldap.yaml → documentation/compose/ldap.yaml

@@ -15,26 +15,23 @@ services:
       KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
       KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
       KAFKA_CLUSTERS_0_METRICS_PORT: 9997
       KAFKA_CLUSTERS_0_METRICS_PORT: 9997
       KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
       KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
+
       AUTH_TYPE: "LDAP"
       AUTH_TYPE: "LDAP"
       SPRING_LDAP_URLS: "ldap://ldap:10389"
       SPRING_LDAP_URLS: "ldap://ldap:10389"
-      SPRING_LDAP_DN_PATTERN: "cn={0},ou=people,dc=planetexpress,dc=com"
-
-#     ===== USER SEARCH FILTER INSTEAD OF DN =====
-
-#     SPRING_LDAP_USERFILTER_SEARCHBASE: "dc=planetexpress,dc=com"
-#     SPRING_LDAP_USERFILTER_SEARCHFILTER: "(&(uid={0})(objectClass=inetOrgPerson))"
-#     LDAP ADMIN USER
-#     SPRING_LDAP_ADMINUSER: "cn=admin,dc=planetexpress,dc=com"
-#     SPRING_LDAP_ADMINPASSWORD: "GoodNewsEveryone"
-
-#     ===== ACTIVE DIRECTORY =====
-
-#      OAUTH2.LDAP.ACTIVEDIRECTORY: true
-#      OAUTH2.LDAP.AСTIVEDIRECTORY.DOMAIN: "memelord.lol"
+      SPRING_LDAP_BASE: "cn={0},ou=people,dc=planetexpress,dc=com"
+      SPRING_LDAP_ADMIN_USER: "cn=admin,dc=planetexpress,dc=com"
+      SPRING_LDAP_ADMIN_PASSWORD: "GoodNewsEveryone"
+      SPRING_LDAP_USER_FILTER_SEARCH_BASE: "dc=planetexpress,dc=com"
+      SPRING_LDAP_USER_FILTER_SEARCH_FILTER: "(&(uid={0})(objectClass=inetOrgPerson))"
+      SPRING_LDAP_GROUP_FILTER_SEARCH_BASE: "ou=people,dc=planetexpress,dc=com"
+#     OAUTH2.LDAP.ACTIVEDIRECTORY: true
+#     OAUTH2.LDAP.AСTIVEDIRECTORY.DOMAIN: "memelord.lol"
 
 
   ldap:
   ldap:
     image: rroemhild/test-openldap:latest
     image: rroemhild/test-openldap:latest
     hostname: "ldap"
     hostname: "ldap"
+    ports:
+      - 10389:10389
 
 
   kafka0:
   kafka0:
     image: confluentinc/cp-kafka:7.2.1
     image: confluentinc/cp-kafka:7.2.1
@@ -79,4 +76,4 @@ services:
 
 
       SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
       SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
       SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
       SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
-      SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
+      SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas

+ 1 - 1
documentation/compose/kafka-ui-reverse-proxy.yaml → documentation/compose/nginx-proxy.yaml

@@ -4,7 +4,7 @@ services:
   nginx:
   nginx:
     image: nginx:latest
     image: nginx:latest
     volumes:
     volumes:
-      - ./proxy.conf:/etc/nginx/conf.d/default.conf
+      - ./data/proxy.conf:/etc/nginx/conf.d/default.conf
     ports:
     ports:
       - 8080:80
       - 8080:80
 
 

+ 0 - 22
documentation/compose/oauth-cognito.yaml

@@ -1,22 +0,0 @@
----
-version: '3.4'
-services:
-
-  kafka-ui:
-    container_name: kafka-ui
-    image: provectuslabs/kafka-ui:local
-    ports:
-      - 8080:8080
-    depends_on:
-      - kafka0 # OMITTED, TAKE UP AN EXAMPLE FROM OTHER COMPOSE FILES
-    environment:
-      KAFKA_CLUSTERS_0_NAME: local
-      KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
-      KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
-      AUTH_TYPE: OAUTH2_COGNITO
-      AUTH_COGNITO_ISSUER_URI: "https://cognito-idp.eu-central-1.amazonaws.com/eu-central-xxxxxx"
-      AUTH_COGNITO_CLIENT_ID: ""
-      AUTH_COGNITO_CLIENT_SECRET: ""
-      AUTH_COGNITO_SCOPE: "openid"
-      AUTH_COGNITO_USER_NAME_ATTRIBUTE: "username"
-      AUTH_COGNITO_LOGOUT_URI: "https://<domain>.auth.eu-central-1.amazoncognito.com/logout"

+ 0 - 0
documentation/compose/kafka-ui-traefik-proxy.yaml → documentation/compose/traefik-proxy.yaml


+ 21 - 12
kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java

@@ -1,5 +1,9 @@
 package com.provectus.kafka.ui.controller;
 package com.provectus.kafka.ui.controller;
 
 
+import static com.provectus.kafka.ui.model.ConnectorActionDTO.RESTART;
+import static com.provectus.kafka.ui.model.ConnectorActionDTO.RESTART_ALL_TASKS;
+import static com.provectus.kafka.ui.model.ConnectorActionDTO.RESTART_FAILED_TASKS;
+
 import com.provectus.kafka.ui.api.KafkaConnectApi;
 import com.provectus.kafka.ui.api.KafkaConnectApi;
 import com.provectus.kafka.ui.model.ConnectDTO;
 import com.provectus.kafka.ui.model.ConnectDTO;
 import com.provectus.kafka.ui.model.ConnectorActionDTO;
 import com.provectus.kafka.ui.model.ConnectorActionDTO;
@@ -17,6 +21,7 @@ import com.provectus.kafka.ui.service.KafkaConnectService;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
 import java.util.Comparator;
 import java.util.Comparator;
 import java.util.Map;
 import java.util.Map;
+import java.util.Set;
 import javax.validation.Valid;
 import javax.validation.Valid;
 import lombok.RequiredArgsConstructor;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
@@ -30,6 +35,8 @@ import reactor.core.publisher.Mono;
 @RequiredArgsConstructor
 @RequiredArgsConstructor
 @Slf4j
 @Slf4j
 public class KafkaConnectController extends AbstractController implements KafkaConnectApi {
 public class KafkaConnectController extends AbstractController implements KafkaConnectApi {
+  private static final Set<ConnectorActionDTO> RESTART_ACTIONS
+      = Set.of(RESTART, RESTART_FAILED_TASKS, RESTART_ALL_TASKS);
   private final KafkaConnectService kafkaConnectService;
   private final KafkaConnectService kafkaConnectService;
   private final AccessControlService accessControlService;
   private final AccessControlService accessControlService;
 
 
@@ -172,10 +179,17 @@ public class KafkaConnectController extends AbstractController implements KafkaC
                                                          ConnectorActionDTO action,
                                                          ConnectorActionDTO action,
                                                          ServerWebExchange exchange) {
                                                          ServerWebExchange exchange) {
 
 
+    ConnectAction[] connectActions;
+    if (RESTART_ACTIONS.contains(action)) {
+      connectActions = new ConnectAction[] {ConnectAction.VIEW, ConnectAction.RESTART};
+    } else {
+      connectActions = new ConnectAction[] {ConnectAction.VIEW, ConnectAction.EDIT};
+    }
+
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
     Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
         .cluster(clusterName)
         .cluster(clusterName)
         .connect(connectName)
         .connect(connectName)
-        .connectActions(ConnectAction.VIEW, ConnectAction.EDIT)
+        .connectActions(connectActions)
         .build());
         .build());
 
 
     return validateAccess.then(
     return validateAccess.then(
@@ -253,16 +267,11 @@ public class KafkaConnectController extends AbstractController implements KafkaC
     if (orderBy == null) {
     if (orderBy == null) {
       return defaultComparator;
       return defaultComparator;
     }
     }
-    switch (orderBy) {
-      case CONNECT:
-        return Comparator.comparing(FullConnectorInfoDTO::getConnect);
-      case TYPE:
-        return Comparator.comparing(FullConnectorInfoDTO::getType);
-      case STATUS:
-        return Comparator.comparing(fullConnectorInfoDTO -> fullConnectorInfoDTO.getStatus().getState());
-      case NAME:
-      default:
-        return defaultComparator;
-    }
+    return switch (orderBy) {
+      case CONNECT -> Comparator.comparing(FullConnectorInfoDTO::getConnect);
+      case TYPE -> Comparator.comparing(FullConnectorInfoDTO::getType);
+      case STATUS -> Comparator.comparing(fullConnectorInfoDTO -> fullConnectorInfoDTO.getStatus().getState());
+      default -> defaultComparator;
+    };
   }
   }
 }
 }

+ 7 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java

@@ -0,0 +1,7 @@
+package com.provectus.kafka.ui.exception;
+
+public class JsonToAvroConversionException extends ValidationException {
+  public JsonToAvroConversionException(String message) {
+    super(message);
+  }
+}

+ 3 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java

@@ -28,7 +28,7 @@ public class ConsumerGroupMapper {
     consumerGroup.setTopics(1); //for ui backward-compatibility, need to rm usage from ui
     consumerGroup.setTopics(1); //for ui backward-compatibility, need to rm usage from ui
     consumerGroup.setGroupId(c.getGroupId());
     consumerGroup.setGroupId(c.getGroupId());
     consumerGroup.setMembers(c.getMembers());
     consumerGroup.setMembers(c.getMembers());
-    consumerGroup.setMessagesBehind(c.getMessagesBehind());
+    consumerGroup.setConsumerLag(c.getConsumerLag());
     consumerGroup.setSimple(c.isSimple());
     consumerGroup.setSimple(c.isSimple());
     consumerGroup.setPartitionAssignor(c.getPartitionAssignor());
     consumerGroup.setPartitionAssignor(c.getPartitionAssignor());
     consumerGroup.setState(mapConsumerGroupState(c.getState()));
     consumerGroup.setState(mapConsumerGroupState(c.getState()));
@@ -54,7 +54,7 @@ public class ConsumerGroupMapper {
           .orElse(0L);
           .orElse(0L);
 
 
       partition.setEndOffset(endOffset.orElse(0L));
       partition.setEndOffset(endOffset.orElse(0L));
-      partition.setMessagesBehind(behind);
+      partition.setConsumerLag(behind);
 
 
       partitionMap.put(entry.getKey(), partition);
       partitionMap.put(entry.getKey(), partition);
     }
     }
@@ -80,7 +80,7 @@ public class ConsumerGroupMapper {
       InternalConsumerGroup c, T consumerGroup) {
       InternalConsumerGroup c, T consumerGroup) {
     consumerGroup.setGroupId(c.getGroupId());
     consumerGroup.setGroupId(c.getGroupId());
     consumerGroup.setMembers(c.getMembers().size());
     consumerGroup.setMembers(c.getMembers().size());
-    consumerGroup.setMessagesBehind(c.getMessagesBehind());
+    consumerGroup.setConsumerLag(c.getConsumerLag());
     consumerGroup.setTopics(c.getTopicNum());
     consumerGroup.setTopics(c.getTopicNum());
     consumerGroup.setSimple(c.isSimple());
     consumerGroup.setSimple(c.isSimple());
 
 

+ 7 - 7
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java

@@ -21,7 +21,7 @@ public class InternalConsumerGroup {
   private final Collection<InternalMember> members;
   private final Collection<InternalMember> members;
   private final Map<TopicPartition, Long> offsets;
   private final Map<TopicPartition, Long> offsets;
   private final Map<TopicPartition, Long> endOffsets;
   private final Map<TopicPartition, Long> endOffsets;
-  private final Long messagesBehind;
+  private final Long consumerLag;
   private final Integer topicNum;
   private final Integer topicNum;
   private final String partitionAssignor;
   private final String partitionAssignor;
   private final ConsumerGroupState state;
   private final ConsumerGroupState state;
@@ -50,17 +50,17 @@ public class InternalConsumerGroup {
     builder.members(internalMembers);
     builder.members(internalMembers);
     builder.offsets(groupOffsets);
     builder.offsets(groupOffsets);
     builder.endOffsets(topicEndOffsets);
     builder.endOffsets(topicEndOffsets);
-    builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
+    builder.consumerLag(calculateConsumerLag(groupOffsets, topicEndOffsets));
     builder.topicNum(calculateTopicNum(groupOffsets, internalMembers));
     builder.topicNum(calculateTopicNum(groupOffsets, internalMembers));
     Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
     Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
     return builder.build();
     return builder.build();
   }
   }
 
 
-  private static Long calculateMessagesBehind(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
-    Long messagesBehind = null;
-    // messagesBehind should be undefined if no committed offsets found for topic
+  private static Long calculateConsumerLag(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
+    Long consumerLag = null;
+    // consumerLag should be undefined if no committed offsets found for topic
     if (!offsets.isEmpty()) {
     if (!offsets.isEmpty()) {
-      messagesBehind = offsets.entrySet().stream()
+      consumerLag = offsets.entrySet().stream()
           .mapToLong(e ->
           .mapToLong(e ->
               Optional.ofNullable(endOffsets)
               Optional.ofNullable(endOffsets)
                   .map(o -> o.get(e.getKey()))
                   .map(o -> o.get(e.getKey()))
@@ -69,7 +69,7 @@ public class InternalConsumerGroup {
           ).sum();
           ).sum();
     }
     }
 
 
-    return messagesBehind;
+    return consumerLag;
   }
   }
 
 
   private static Integer calculateTopicNum(Map<TopicPartition, Long> offsets, Collection<InternalMember> members) {
   private static Integer calculateTopicNum(Map<TopicPartition, Long> offsets, Collection<InternalMember> members) {

+ 4 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopicConsumerGroup.java

@@ -17,7 +17,7 @@ public class InternalTopicConsumerGroup {
   String groupId;
   String groupId;
   int members;
   int members;
   @Nullable
   @Nullable
-  Long messagesBehind; //null means no committed offsets found for this group
+  Long consumerLag; //null means no committed offsets found for this group
   boolean isSimple;
   boolean isSimple;
   String partitionAssignor;
   String partitionAssignor;
   ConsumerGroupState state;
   ConsumerGroupState state;
@@ -37,7 +37,7 @@ public class InternalTopicConsumerGroup {
                 .filter(m -> m.assignment().topicPartitions().stream().anyMatch(p -> p.topic().equals(topic)))
                 .filter(m -> m.assignment().topicPartitions().stream().anyMatch(p -> p.topic().equals(topic)))
                 .count()
                 .count()
         )
         )
-        .messagesBehind(calculateMessagesBehind(committedOffsets, endOffsets))
+        .consumerLag(calculateConsumerLag(committedOffsets, endOffsets))
         .isSimple(g.isSimpleConsumerGroup())
         .isSimple(g.isSimpleConsumerGroup())
         .partitionAssignor(g.partitionAssignor())
         .partitionAssignor(g.partitionAssignor())
         .state(g.state())
         .state(g.state())
@@ -46,8 +46,8 @@ public class InternalTopicConsumerGroup {
   }
   }
 
 
   @Nullable
   @Nullable
-  private static Long calculateMessagesBehind(Map<TopicPartition, Long> committedOffsets,
-                                              Map<TopicPartition, Long> endOffsets) {
+  private static Long calculateConsumerLag(Map<TopicPartition, Long> committedOffsets,
+                                           Map<TopicPartition, Long> endOffsets) {
     if (committedOffsets.isEmpty()) {
     if (committedOffsets.isEmpty()) {
       return null;
       return null;
     }
     }

+ 24 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdeInstance.java

@@ -42,19 +42,39 @@ public class SerdeInstance implements Closeable {
   }
   }
 
 
   public Optional<SchemaDescription> getSchema(String topic, Serde.Target type) {
   public Optional<SchemaDescription> getSchema(String topic, Serde.Target type) {
-    return wrapWithClassloader(() -> serde.getSchema(topic, type));
+    try {
+      return wrapWithClassloader(() -> serde.getSchema(topic, type));
+    } catch (Exception e) {
+      log.warn("Error getting schema for '{}'({}) with serde '{}'", topic, type, name, e);
+      return Optional.empty();
+    }
   }
   }
 
 
   public Optional<String> description() {
   public Optional<String> description() {
-    return wrapWithClassloader(serde::getDescription);
+    try {
+      return wrapWithClassloader(serde::getDescription);
+    } catch (Exception e) {
+      log.warn("Error getting description serde '{}'", name, e);
+      return Optional.empty();
+    }
   }
   }
 
 
   public boolean canSerialize(String topic, Serde.Target type) {
   public boolean canSerialize(String topic, Serde.Target type) {
-    return wrapWithClassloader(() -> serde.canSerialize(topic, type));
+    try {
+      return wrapWithClassloader(() -> serde.canSerialize(topic, type));
+    } catch (Exception e) {
+      log.warn("Error calling canSerialize for '{}'({}) with serde '{}'", topic, type, name, e);
+      return false;
+    }
   }
   }
 
 
   public boolean canDeserialize(String topic, Serde.Target type) {
   public boolean canDeserialize(String topic, Serde.Target type) {
-    return wrapWithClassloader(() -> serde.canDeserialize(topic, type));
+    try {
+      return wrapWithClassloader(() -> serde.canDeserialize(topic, type));
+    } catch (Exception e) {
+      log.warn("Error calling canDeserialize for '{}'({}) with serde '{}'", topic, type, name, e);
+      return false;
+    }
   }
   }
 
 
   public Serde.Serializer serializer(String topic, Serde.Target type) {
   public Serde.Serializer serializer(String topic, Serde.Target type) {

+ 4 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java

@@ -1,12 +1,13 @@
 package com.provectus.kafka.ui.serdes.builtin.sr;
 package com.provectus.kafka.ui.serdes.builtin.sr;
 
 
+import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion;
 import io.confluent.kafka.schemaregistry.ParsedSchema;
 import io.confluent.kafka.schemaregistry.ParsedSchema;
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
 import io.confluent.kafka.schemaregistry.client.SchemaMetadata;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
 import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
 import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
 import io.confluent.kafka.serializers.KafkaAvroSerializer;
 import io.confluent.kafka.serializers.KafkaAvroSerializer;
+import io.confluent.kafka.serializers.KafkaAvroSerializerConfig;
 import java.util.Map;
 import java.util.Map;
 import org.apache.kafka.common.serialization.Serializer;
 import org.apache.kafka.common.serialization.Serializer;
 
 
@@ -25,6 +26,7 @@ class AvroSchemaRegistrySerializer extends SchemaRegistrySerializer<Object> {
         Map.of(
         Map.of(
             "schema.registry.url", "wontbeused",
             "schema.registry.url", "wontbeused",
             AbstractKafkaSchemaSerDeConfig.AUTO_REGISTER_SCHEMAS, false,
             AbstractKafkaSchemaSerDeConfig.AUTO_REGISTER_SCHEMAS, false,
+            KafkaAvroSerializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true,
             AbstractKafkaSchemaSerDeConfig.USE_LATEST_VERSION, true
             AbstractKafkaSchemaSerDeConfig.USE_LATEST_VERSION, true
         ),
         ),
         isKey
         isKey
@@ -35,7 +37,7 @@ class AvroSchemaRegistrySerializer extends SchemaRegistrySerializer<Object> {
   @Override
   @Override
   protected Object serialize(String value, ParsedSchema schema) {
   protected Object serialize(String value, ParsedSchema schema) {
     try {
     try {
-      return AvroSchemaUtils.toObject(value, (AvroSchema) schema);
+      return JsonAvroConversion.convertJsonToAvro(value, ((AvroSchema) schema).rawSchema());
     } catch (Throwable e) {
     } catch (Throwable e) {
       throw new RuntimeException("Failed to serialize record for topic " + topic, e);
       throw new RuntimeException("Failed to serialize record for topic " + topic, e);
     }
     }

+ 14 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java

@@ -3,9 +3,12 @@ package com.provectus.kafka.ui.serdes.builtin.sr;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.google.protobuf.Message;
 import com.google.protobuf.Message;
 import com.google.protobuf.util.JsonFormat;
 import com.google.protobuf.util.JsonFormat;
+import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion;
 import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
 import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
+import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
 import io.confluent.kafka.serializers.KafkaAvroDeserializer;
 import io.confluent.kafka.serializers.KafkaAvroDeserializer;
+import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
 import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
 import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
 import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
 import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer;
 import java.util.Map;
 import java.util.Map;
@@ -28,16 +31,22 @@ interface MessageFormatter {
 
 
     AvroMessageFormatter(SchemaRegistryClient client) {
     AvroMessageFormatter(SchemaRegistryClient client) {
       this.avroDeserializer = new KafkaAvroDeserializer(client);
       this.avroDeserializer = new KafkaAvroDeserializer(client);
+      this.avroDeserializer.configure(
+          Map.of(
+              AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "wontbeused",
+              KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, false,
+              KafkaAvroDeserializerConfig.SCHEMA_REFLECTION_CONFIG, false,
+              KafkaAvroDeserializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true
+          ),
+          false
+      );
     }
     }
 
 
     @Override
     @Override
-    @SneakyThrows
     public String format(String topic, byte[] value) {
     public String format(String topic, byte[] value) {
-      // deserialized will have type, that depends on schema type (record or primitive),
-      // AvroSchemaUtils.toJson(...) method will take it into account
       Object deserialized = avroDeserializer.deserialize(topic, value);
       Object deserialized = avroDeserializer.deserialize(topic, value);
-      byte[] jsonBytes = AvroSchemaUtils.toJson(deserialized);
-      return new String(jsonBytes);
+      var schema = AvroSchemaUtils.getSchema(deserialized);
+      return JsonAvroConversion.convertAvroToJson(deserialized, schema).toString();
     }
     }
   }
   }
 
 

+ 33 - 37
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java

@@ -189,39 +189,40 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   public Optional<SchemaDescription> getSchema(String topic, Target type) {
   public Optional<SchemaDescription> getSchema(String topic, Target type) {
     String subject = schemaSubject(topic, type);
     String subject = schemaSubject(topic, type);
     return getSchemaBySubject(subject)
     return getSchemaBySubject(subject)
-        .map(schemaMetadata ->
-            new SchemaDescription(
-                convertSchema(schemaMetadata),
-                Map.of(
-                    "subject", subject,
-                    "schemaId", schemaMetadata.getId(),
-                    "latestVersion", schemaMetadata.getVersion(),
-                    "type", schemaMetadata.getSchemaType() // AVRO / PROTOBUF / JSON
-                )
-            ));
+        .flatMap(schemaMetadata ->
+            //schema can be not-found, when schema contexts configured improperly
+            getSchemaById(schemaMetadata.getId())
+                .map(parsedSchema ->
+                    new SchemaDescription(
+                        convertSchema(schemaMetadata, parsedSchema),
+                        Map.of(
+                            "subject", subject,
+                            "schemaId", schemaMetadata.getId(),
+                            "latestVersion", schemaMetadata.getVersion(),
+                            "type", schemaMetadata.getSchemaType() // AVRO / PROTOBUF / JSON
+                        )
+                    )));
   }
   }
 
 
   @SneakyThrows
   @SneakyThrows
-  private String convertSchema(SchemaMetadata schema) {
+  private String convertSchema(SchemaMetadata schema, ParsedSchema parsedSchema) {
     URI basePath = new URI(schemaRegistryUrls.get(0))
     URI basePath = new URI(schemaRegistryUrls.get(0))
         .resolve(Integer.toString(schema.getId()));
         .resolve(Integer.toString(schema.getId()));
-    ParsedSchema schemaById = schemaRegistryClient.getSchemaById(schema.getId());
     SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
     SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
         .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
         .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
-    switch (schemaType) {
-      case PROTOBUF:
-        return new ProtobufSchemaConverter()
-            .convert(basePath, ((ProtobufSchema) schemaById).toDescriptor())
-            .toJson();
-      case AVRO:
-        return new AvroJsonSchemaConverter()
-            .convert(basePath, ((AvroSchema) schemaById).rawSchema())
-            .toJson();
-      case JSON:
-        return schema.getSchema();
-      default:
-        throw new IllegalStateException();
-    }
+    return switch (schemaType) {
+      case PROTOBUF -> new ProtobufSchemaConverter()
+          .convert(basePath, ((ProtobufSchema) parsedSchema).toDescriptor())
+          .toJson();
+      case AVRO -> new AvroJsonSchemaConverter()
+          .convert(basePath, ((AvroSchema) parsedSchema).rawSchema())
+          .toJson();
+      case JSON -> schema.getSchema();
+    };
+  }
+
+  private Optional<ParsedSchema> getSchemaById(int id) {
+    return wrapWith404Handler(() -> schemaRegistryClient.getSchemaById(id));
   }
   }
 
 
   private Optional<SchemaMetadata> getSchemaBySubject(String subject) {
   private Optional<SchemaMetadata> getSchemaBySubject(String subject) {
@@ -253,16 +254,11 @@ public class SchemaRegistrySerde implements BuiltInSerde {
     boolean isKey = type == Target.KEY;
     boolean isKey = type == Target.KEY;
     SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
     SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
         .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
         .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
-    switch (schemaType) {
-      case PROTOBUF:
-        return new ProtobufSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
-      case AVRO:
-        return new AvroSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
-      case JSON:
-        return new JsonSchemaSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
-      default:
-        throw new IllegalStateException();
-    }
+    return switch (schemaType) {
+      case PROTOBUF -> new ProtobufSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+      case AVRO -> new AvroSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+      case JSON -> new JsonSchemaSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+    };
   }
   }
 
 
   @Override
   @Override
@@ -297,7 +293,7 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   }
   }
 
 
   private SchemaType getMessageFormatBySchemaId(int schemaId) {
   private SchemaType getMessageFormatBySchemaId(int schemaId) {
-    return wrapWith404Handler(() -> schemaRegistryClient.getSchemaById(schemaId))
+    return getSchemaById(schemaId)
         .map(ParsedSchema::schemaType)
         .map(ParsedSchema::schemaType)
         .flatMap(SchemaType::fromString)
         .flatMap(SchemaType::fromString)
         .orElseThrow(() -> new ValidationException(String.format("Schema for id '%d' not found ", schemaId)));
         .orElseThrow(() -> new ValidationException(String.format("Schema for id '%d' not found ", schemaId)));

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java

@@ -164,7 +164,7 @@ public class ConsumerGroupService {
       case MESSAGES_BEHIND -> {
       case MESSAGES_BEHIND -> {
 
 
         Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
         Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
-            gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
+            gwd.icg.getConsumerLag() == null ? 0L : gwd.icg.getConsumerLag());
 
 
         yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
         yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
       }
       }

+ 12 - 5
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java

@@ -5,11 +5,13 @@ import static java.util.regex.Pattern.CASE_INSENSITIVE;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableList;
 import java.util.Arrays;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Set;
 import java.util.Set;
 import java.util.regex.Pattern;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
+import javax.annotation.Nullable;
 import org.apache.kafka.common.config.ConfigDef;
 import org.apache.kafka.common.config.ConfigDef;
 import org.apache.kafka.common.config.SaslConfigs;
 import org.apache.kafka.common.config.SaslConfigs;
 import org.apache.kafka.common.config.SslConfigs;
 import org.apache.kafka.common.config.SslConfigs;
@@ -17,7 +19,7 @@ import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Component;
 import org.springframework.stereotype.Component;
 
 
 @Component
 @Component
-class KafkaConfigSanitizer  {
+class KafkaConfigSanitizer {
 
 
   private static final String SANITIZED_VALUE = "******";
   private static final String SANITIZED_VALUE = "******";
 
 
@@ -65,10 +67,8 @@ class KafkaConfigSanitizer  {
         .collect(Collectors.toSet());
         .collect(Collectors.toSet());
   }
   }
 
 
-  public Object sanitize(String key, Object value) {
-    if (value == null) {
-      return null;
-    }
+  @Nullable
+  public Object sanitize(String key, @Nullable Object value) {
     for (Pattern pattern : sanitizeKeysPatterns) {
     for (Pattern pattern : sanitizeKeysPatterns) {
       if (pattern.matcher(key).matches()) {
       if (pattern.matcher(key).matches()) {
         return SANITIZED_VALUE;
         return SANITIZED_VALUE;
@@ -77,5 +77,12 @@ class KafkaConfigSanitizer  {
     return value;
     return value;
   }
   }
 
 
+  public Map<String, Object> sanitizeConnectorConfig(@Nullable Map<String, Object> original) {
+    var result = new HashMap<String, Object>(); //null-values supporting map!
+    if (original != null) {
+      original.forEach((k, v) -> result.put(k, sanitize(k, v)));
+    }
+    return result;
+  }
 
 
 }
 }

+ 4 - 15
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java

@@ -24,7 +24,6 @@ import com.provectus.kafka.ui.model.NewConnectorDTO;
 import com.provectus.kafka.ui.model.TaskDTO;
 import com.provectus.kafka.ui.model.TaskDTO;
 import com.provectus.kafka.ui.model.connect.InternalConnectInfo;
 import com.provectus.kafka.ui.model.connect.InternalConnectInfo;
 import com.provectus.kafka.ui.util.ReactiveFailover;
 import com.provectus.kafka.ui.util.ReactiveFailover;
-import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Optional;
@@ -176,19 +175,14 @@ public class KafkaConnectService {
                         e -> emptyStatus(connectorName))
                         e -> emptyStatus(connectorName))
                     .map(connectorStatus -> {
                     .map(connectorStatus -> {
                       var status = connectorStatus.getConnector();
                       var status = connectorStatus.getConnector();
-                      final Map<String, Object> obfuscatedConfig = connector.getConfig().entrySet()
-                          .stream()
-                          .collect(Collectors.toMap(
-                              Map.Entry::getKey,
-                              e -> kafkaConfigSanitizer.sanitize(e.getKey(), e.getValue())
-                          ));
-                      ConnectorDTO result = (ConnectorDTO) new ConnectorDTO()
+                      var sanitizedConfig = kafkaConfigSanitizer.sanitizeConnectorConfig(connector.getConfig());
+                      ConnectorDTO result = new ConnectorDTO()
                           .connect(connectName)
                           .connect(connectName)
                           .status(kafkaConnectMapper.fromClient(status))
                           .status(kafkaConnectMapper.fromClient(status))
                           .type(connector.getType())
                           .type(connector.getType())
                           .tasks(connector.getTasks())
                           .tasks(connector.getTasks())
                           .name(connector.getName())
                           .name(connector.getName())
-                          .config(obfuscatedConfig);
+                          .config(sanitizedConfig);
 
 
                       if (connectorStatus.getTasks() != null) {
                       if (connectorStatus.getTasks() != null) {
                         boolean isAnyTaskFailed = connectorStatus.getTasks().stream()
                         boolean isAnyTaskFailed = connectorStatus.getTasks().stream()
@@ -217,12 +211,7 @@ public class KafkaConnectService {
                                                       String connectorName) {
                                                       String connectorName) {
     return api(cluster, connectName)
     return api(cluster, connectName)
         .mono(c -> c.getConnectorConfig(connectorName))
         .mono(c -> c.getConnectorConfig(connectorName))
-        .map(connectorConfig -> {
-          final Map<String, Object> obfuscatedMap = new HashMap<>();
-          connectorConfig.forEach((key, value) ->
-              obfuscatedMap.put(key, kafkaConfigSanitizer.sanitize(key, value)));
-          return obfuscatedMap;
-        });
+        .map(kafkaConfigSanitizer::sanitizeConnectorConfig);
   }
   }
 
 
   public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,
   public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,

+ 1 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java

@@ -3,7 +3,6 @@ package com.provectus.kafka.ui.util;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.annotations.VisibleForTesting;
 import java.time.Duration;
 import java.time.Duration;
 import lombok.extern.slf4j.Slf4j;
 import lombok.extern.slf4j.Slf4j;
-import org.springframework.web.reactive.function.client.WebClient;
 import reactor.core.publisher.Mono;
 import reactor.core.publisher.Mono;
 
 
 @Slf4j
 @Slf4j
@@ -31,7 +30,7 @@ public class GithubReleaseInfo {
 
 
   @VisibleForTesting
   @VisibleForTesting
   GithubReleaseInfo(String url) {
   GithubReleaseInfo(String url) {
-    this.refreshMono = WebClient.create()
+    this.refreshMono = new WebClientConfigurator().build()
         .get()
         .get()
         .uri(url)
         .uri(url)
         .exchangeToMono(resp -> resp.bodyToMono(GithubReleaseDto.class))
         .exchangeToMono(resp -> resp.bodyToMono(GithubReleaseDto.class))

+ 6 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java

@@ -5,11 +5,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
 import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.exception.ValidationException;
 import com.provectus.kafka.ui.exception.ValidationException;
-import io.netty.buffer.ByteBufAllocator;
-import io.netty.handler.ssl.JdkSslContext;
 import io.netty.handler.ssl.SslContext;
 import io.netty.handler.ssl.SslContext;
 import io.netty.handler.ssl.SslContextBuilder;
 import io.netty.handler.ssl.SslContextBuilder;
-import io.netty.handler.ssl.SslProvider;
 import java.io.FileInputStream;
 import java.io.FileInputStream;
 import java.security.KeyStore;
 import java.security.KeyStore;
 import java.util.function.Consumer;
 import java.util.function.Consumer;
@@ -93,7 +90,12 @@ public class WebClientConfigurator {
     // Create webclient
     // Create webclient
     SslContext context = contextBuilder.build();
     SslContext context = contextBuilder.build();
 
 
-    builder.clientConnector(new ReactorClientHttpConnector(HttpClient.create().secure(t -> t.sslContext(context))));
+    var httpClient = HttpClient
+        .create()
+        .secure(t -> t.sslContext(context))
+        .proxyWithSystemProperties();
+
+    builder.clientConnector(new ReactorClientHttpConnector(httpClient));
     return this;
     return this;
   }
   }
 
 

+ 5 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java

@@ -5,6 +5,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashMap;
 import java.util.List;
 import java.util.List;
 import java.util.Map;
 import java.util.Map;
+import java.util.Optional;
 import java.util.stream.Collectors;
 import java.util.stream.Collectors;
 import org.apache.avro.Schema;
 import org.apache.avro.Schema;
 import reactor.util.function.Tuple2;
 import reactor.util.function.Tuple2;
@@ -40,6 +41,10 @@ public class AvroJsonSchemaConverter implements JsonSchemaConverter<Schema> {
 
 
   private FieldSchema convertSchema(Schema schema,
   private FieldSchema convertSchema(Schema schema,
                                     Map<String, FieldSchema> definitions, boolean isRoot) {
                                     Map<String, FieldSchema> definitions, boolean isRoot) {
+    Optional<FieldSchema> logicalTypeSchema = JsonAvroConversion.LogicalTypeConversion.getJsonSchema(schema);
+    if (logicalTypeSchema.isPresent()) {
+      return logicalTypeSchema.get();
+    }
     if (!schema.isUnion()) {
     if (!schema.isUnion()) {
       JsonType type = convertType(schema);
       JsonType type = convertType(schema);
       switch (type.getType()) {
       switch (type.getType()) {
@@ -66,7 +71,6 @@ public class AvroJsonSchemaConverter implements JsonSchemaConverter<Schema> {
     }
     }
   }
   }
 
 
-
   // this method formats json-schema field in a way
   // this method formats json-schema field in a way
   // to fit avro-> json encoding rules (https://avro.apache.org/docs/1.11.1/specification/_print/#json-encoding)
   // to fit avro-> json encoding rules (https://avro.apache.org/docs/1.11.1/specification/_print/#json-encoding)
   private FieldSchema createUnionSchema(Schema schema, Map<String, FieldSchema> definitions) {
   private FieldSchema createUnionSchema(Schema schema, Map<String, FieldSchema> definitions) {

+ 503 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java

@@ -0,0 +1,503 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.BooleanNode;
+import com.fasterxml.jackson.databind.node.DecimalNode;
+import com.fasterxml.jackson.databind.node.DoubleNode;
+import com.fasterxml.jackson.databind.node.FloatNode;
+import com.fasterxml.jackson.databind.node.IntNode;
+import com.fasterxml.jackson.databind.node.JsonNodeType;
+import com.fasterxml.jackson.databind.node.LongNode;
+import com.fasterxml.jackson.databind.node.NullNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import com.google.common.collect.Lists;
+import com.provectus.kafka.ui.exception.JsonToAvroConversionException;
+import io.confluent.kafka.serializers.AvroData;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.ZoneOffset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+import java.util.function.BiFunction;
+import java.util.stream.Stream;
+import lombok.SneakyThrows;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+
+// json <-> avro
+public class JsonAvroConversion {
+
+  private static final JsonMapper MAPPER = new JsonMapper();
+
+  // converts json into Object that is expected input for KafkaAvroSerializer
+  // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!)
+  @SneakyThrows
+  public static Object convertJsonToAvro(String jsonString, Schema avroSchema) {
+    JsonNode rootNode = MAPPER.readTree(jsonString);
+    return convert(rootNode, avroSchema);
+  }
+
+  private static Object convert(JsonNode node, Schema avroSchema) {
+    return switch (avroSchema.getType()) {
+      case RECORD -> {
+        assertJsonType(node, JsonNodeType.OBJECT);
+        var rec = new GenericData.Record(avroSchema);
+        for (Schema.Field field : avroSchema.getFields()) {
+          if (node.has(field.name()) && !node.get(field.name()).isNull()) {
+            rec.put(field.name(), convert(node.get(field.name()), field.schema()));
+          }
+        }
+        yield rec;
+      }
+      case MAP -> {
+        assertJsonType(node, JsonNodeType.OBJECT);
+        var map = new LinkedHashMap<String, Object>();
+        var valueSchema = avroSchema.getValueType();
+        node.fields().forEachRemaining(f -> map.put(f.getKey(), convert(f.getValue(), valueSchema)));
+        yield map;
+      }
+      case ARRAY -> {
+        assertJsonType(node, JsonNodeType.ARRAY);
+        var lst = new ArrayList<>();
+        node.elements().forEachRemaining(e -> lst.add(convert(e, avroSchema.getElementType())));
+        yield lst;
+      }
+      case ENUM -> {
+        assertJsonType(node, JsonNodeType.STRING);
+        String symbol = node.textValue();
+        if (!avroSchema.getEnumSymbols().contains(symbol)) {
+          throw new JsonToAvroConversionException("%s is not a part of enum symbols [%s]"
+              .formatted(symbol, avroSchema.getEnumSymbols()));
+        }
+        yield new GenericData.EnumSymbol(avroSchema, symbol);
+      }
+      case UNION -> {
+        // for types from enum (other than null) payload should be an object with single key == name of type
+        // ex: schema = [ "null", "int", "string" ], possible payloads = null, { "string": "str" },  { "int": 123 }
+        if (node.isNull() && avroSchema.getTypes().contains(Schema.create(Schema.Type.NULL))) {
+          yield null;
+        }
+
+        assertJsonType(node, JsonNodeType.OBJECT);
+        var elements = Lists.newArrayList(node.fields());
+        if (elements.size() != 1) {
+          throw new JsonToAvroConversionException(
+              "UNION field value should be an object with single field == type name");
+        }
+        var typeNameToValue = elements.get(0);
+        for (Schema unionType : avroSchema.getTypes()) {
+          if (typeNameToValue.getKey().equals(unionType.getFullName())) {
+            yield convert(typeNameToValue.getValue(), unionType);
+          }
+        }
+        throw new JsonToAvroConversionException(
+            "json value '%s' is cannot be converted to any of union types [%s]"
+                .formatted(node, avroSchema.getTypes()));
+      }
+      case STRING -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(node, avroSchema);
+        }
+        assertJsonType(node, JsonNodeType.STRING);
+        yield node.textValue();
+      }
+      case LONG -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(node, avroSchema);
+        }
+        assertJsonType(node, JsonNodeType.NUMBER);
+        assertJsonNumberType(node, JsonParser.NumberType.LONG, JsonParser.NumberType.INT);
+        yield node.longValue();
+      }
+      case INT -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(node, avroSchema);
+        }
+        assertJsonType(node, JsonNodeType.NUMBER);
+        assertJsonNumberType(node, JsonParser.NumberType.INT);
+        yield node.intValue();
+      }
+      case FLOAT -> {
+        assertJsonType(node, JsonNodeType.NUMBER);
+        assertJsonNumberType(node, JsonParser.NumberType.DOUBLE, JsonParser.NumberType.FLOAT);
+        yield node.floatValue();
+      }
+      case DOUBLE -> {
+        assertJsonType(node, JsonNodeType.NUMBER);
+        assertJsonNumberType(node, JsonParser.NumberType.DOUBLE, JsonParser.NumberType.FLOAT);
+        yield node.doubleValue();
+      }
+      case BOOLEAN -> {
+        assertJsonType(node, JsonNodeType.BOOLEAN);
+        yield node.booleanValue();
+      }
+      case NULL -> {
+        assertJsonType(node, JsonNodeType.NULL);
+        yield null;
+      }
+      case BYTES -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(node, avroSchema);
+        }
+        assertJsonType(node, JsonNodeType.STRING);
+        // logic copied from JsonDecoder::readBytes
+        yield ByteBuffer.wrap(node.textValue().getBytes(StandardCharsets.ISO_8859_1));
+      }
+      case FIXED -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(node, avroSchema);
+        }
+        assertJsonType(node, JsonNodeType.STRING);
+        byte[] bytes = node.textValue().getBytes(StandardCharsets.ISO_8859_1);
+        if (bytes.length != avroSchema.getFixedSize()) {
+          throw new JsonToAvroConversionException(
+              "Fixed field has unexpected size %d (should be %d)"
+                  .formatted(bytes.length, avroSchema.getFixedSize()));
+        }
+        yield new GenericData.Fixed(avroSchema, bytes);
+      }
+    };
+  }
+
+  // converts output of KafkaAvroDeserializer (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) into json.
+  // Note: conversion should be compatible with AvroJsonSchemaConverter logic!
+  public static JsonNode convertAvroToJson(Object obj, Schema avroSchema) {
+    if (obj == null) {
+      return NullNode.getInstance();
+    }
+    return switch (avroSchema.getType()) {
+      case RECORD -> {
+        var rec = (GenericData.Record) obj;
+        ObjectNode node = MAPPER.createObjectNode();
+        for (Schema.Field field : avroSchema.getFields()) {
+          var fieldVal = rec.get(field.name());
+          if (fieldVal != null) {
+            node.set(field.name(), convertAvroToJson(fieldVal, field.schema()));
+          }
+        }
+        yield node;
+      }
+      case MAP -> {
+        ObjectNode node = MAPPER.createObjectNode();
+        ((Map) obj).forEach((k, v) -> node.set(k.toString(), convertAvroToJson(v, avroSchema.getValueType())));
+        yield node;
+      }
+      case ARRAY -> {
+        var list = (List<Object>) obj;
+        ArrayNode node = MAPPER.createArrayNode();
+        list.forEach(e -> node.add(convertAvroToJson(e, avroSchema.getElementType())));
+        yield node;
+      }
+      case ENUM -> {
+        yield new TextNode(obj.toString());
+      }
+      case UNION -> {
+        ObjectNode node = MAPPER.createObjectNode();
+        int unionIdx = AvroData.getGenericData().resolveUnion(avroSchema, obj);
+        Schema unionType = avroSchema.getTypes().get(unionIdx);
+        node.set(unionType.getFullName(), convertAvroToJson(obj, unionType));
+        yield node;
+      }
+      case STRING -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(obj, avroSchema);
+        }
+        yield new TextNode(obj.toString());
+      }
+      case LONG -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(obj, avroSchema);
+        }
+        yield new LongNode((Long) obj);
+      }
+      case INT -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(obj, avroSchema);
+        }
+        yield new IntNode((Integer) obj);
+      }
+      case FLOAT -> new FloatNode((Float) obj);
+      case DOUBLE -> new DoubleNode((Double) obj);
+      case BOOLEAN -> BooleanNode.valueOf((Boolean) obj);
+      case NULL -> NullNode.getInstance();
+      case BYTES -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(obj, avroSchema);
+        }
+        ByteBuffer bytes = (ByteBuffer) obj;
+        //see JsonEncoder::writeByteArray
+        yield new TextNode(new String(bytes.array(), StandardCharsets.ISO_8859_1));
+      }
+      case FIXED -> {
+        if (isLogicalType(avroSchema)) {
+          yield processLogicalType(obj, avroSchema);
+        }
+        var fixed = (GenericData.Fixed) obj;
+        yield new TextNode(new String(fixed.bytes(), StandardCharsets.ISO_8859_1));
+      }
+    };
+  }
+
+  private static Object processLogicalType(JsonNode node, Schema schema) {
+    return findConversion(schema)
+        .map(c -> c.jsonToAvroConversion.apply(node, schema))
+        .orElseThrow(() ->
+            new JsonToAvroConversionException("'%s' logical type is not supported"
+                .formatted(schema.getLogicalType().getName())));
+  }
+
+  private static JsonNode processLogicalType(Object obj, Schema schema) {
+    return findConversion(schema)
+        .map(c -> c.avroToJsonConversion.apply(obj, schema))
+        .orElseThrow(() ->
+            new JsonToAvroConversionException("'%s' logical type is not supported"
+                .formatted(schema.getLogicalType().getName())));
+  }
+
+  private static Optional<LogicalTypeConversion> findConversion(Schema schema) {
+    String logicalTypeName = schema.getLogicalType().getName();
+    return Stream.of(LogicalTypeConversion.values())
+        .filter(t -> t.name.equalsIgnoreCase(logicalTypeName))
+        .findFirst();
+  }
+
+  private static boolean isLogicalType(Schema schema) {
+    return schema.getLogicalType() != null;
+  }
+
+  private static void assertJsonType(JsonNode node, JsonNodeType... allowedTypes) {
+    if (Stream.of(allowedTypes).noneMatch(t -> node.getNodeType() == t)) {
+      throw new JsonToAvroConversionException(
+          "%s node has unexpected type, allowed types %s, actual type %s"
+              .formatted(node, Arrays.toString(allowedTypes), node.getNodeType()));
+    }
+  }
+
+  private static void assertJsonNumberType(JsonNode node, JsonParser.NumberType... allowedTypes) {
+    if (Stream.of(allowedTypes).noneMatch(t -> node.numberType() == t)) {
+      throw new JsonToAvroConversionException(
+          "%s node has unexpected numeric type, allowed types %s, actual type %s"
+              .formatted(node, Arrays.toString(allowedTypes), node.numberType()));
+    }
+  }
+
+  enum LogicalTypeConversion {
+
+    UUID("uuid",
+        (node, schema) -> {
+          assertJsonType(node, JsonNodeType.STRING);
+          return java.util.UUID.fromString(node.asText());
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("uuid"))))
+    ),
+
+    DECIMAL("decimal",
+        (node, schema) -> {
+          if (node.isTextual()) {
+            return new BigDecimal(node.asText());
+          } else if (node.isNumber()) {
+            return new BigDecimal(node.numberValue().toString());
+          }
+          throw new JsonToAvroConversionException(
+              "node '%s' can't be converted to decimal logical type"
+                  .formatted(node));
+        },
+        (obj, schema) -> {
+          return new DecimalNode((BigDecimal) obj);
+        },
+        new SimpleFieldSchema(new SimpleJsonType(JsonType.Type.NUMBER))
+    ),
+
+    DATE("date",
+        (node, schema) -> {
+          if (node.isInt()) {
+            return LocalDate.ofEpochDay(node.intValue());
+          } else if (node.isTextual()) {
+            return LocalDate.parse(node.asText());
+          } else {
+            throw new JsonToAvroConversionException(
+                "node '%s' can't be converted to date logical type"
+                    .formatted(node));
+          }
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("date"))))
+    ),
+
+    TIME_MILLIS("time-millis",
+        (node, schema) -> {
+          if (node.isIntegralNumber()) {
+            return LocalTime.ofNanoOfDay(TimeUnit.MILLISECONDS.toNanos(node.longValue()));
+          } else if (node.isTextual()) {
+            return LocalTime.parse(node.asText());
+          } else {
+            throw new JsonToAvroConversionException(
+                "node '%s' can't be converted to time-millis logical type"
+                    .formatted(node));
+          }
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("time"))))
+    ),
+
+    TIME_MICROS("time-micros",
+        (node, schema) -> {
+          if (node.isIntegralNumber()) {
+            return LocalTime.ofNanoOfDay(TimeUnit.MICROSECONDS.toNanos(node.longValue()));
+          } else if (node.isTextual()) {
+            return LocalTime.parse(node.asText());
+          } else {
+            throw new JsonToAvroConversionException(
+                "node '%s' can't be converted to time-micros logical type"
+                    .formatted(node));
+          }
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("time"))))
+    ),
+
+    TIMESTAMP_MILLIS("timestamp-millis",
+        (node, schema) -> {
+          if (node.isIntegralNumber()) {
+            return Instant.ofEpochMilli(node.longValue());
+          } else if (node.isTextual()) {
+            return Instant.parse(node.asText());
+          } else {
+            throw new JsonToAvroConversionException(
+                "node '%s' can't be converted to timestamp-millis logical type"
+                    .formatted(node));
+          }
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("date-time"))))
+    ),
+
+    TIMESTAMP_MICROS("timestamp-micros",
+        (node, schema) -> {
+          if (node.isIntegralNumber()) {
+            // TimeConversions.TimestampMicrosConversion for impl
+            long microsFromEpoch = node.longValue();
+            long epochSeconds = microsFromEpoch / (1_000_000L);
+            long nanoAdjustment = (microsFromEpoch % (1_000_000L)) * 1_000L;
+            return Instant.ofEpochSecond(epochSeconds, nanoAdjustment);
+          } else if (node.isTextual()) {
+            return Instant.parse(node.asText());
+          } else {
+            throw new JsonToAvroConversionException(
+                "node '%s' can't be converted to timestamp-millis logical type"
+                    .formatted(node));
+          }
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("date-time"))))
+    ),
+
+    LOCAL_TIMESTAMP_MILLIS("local-timestamp-millis",
+        (node, schema) -> {
+          if (node.isTextual()) {
+            return LocalDateTime.parse(node.asText());
+          }
+          // TimeConversions.TimestampMicrosConversion for impl
+          Instant instant = (Instant) TIMESTAMP_MILLIS.jsonToAvroConversion.apply(node, schema);
+          return LocalDateTime.ofInstant(instant, ZoneOffset.UTC);
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("date-time"))))
+    ),
+
+    LOCAL_TIMESTAMP_MICROS("local-timestamp-micros",
+        (node, schema) -> {
+          if (node.isTextual()) {
+            return LocalDateTime.parse(node.asText());
+          }
+          Instant instant = (Instant) TIMESTAMP_MICROS.jsonToAvroConversion.apply(node, schema);
+          return LocalDateTime.ofInstant(instant, ZoneOffset.UTC);
+        },
+        (obj, schema) -> {
+          return new TextNode(obj.toString());
+        },
+        new SimpleFieldSchema(
+            new SimpleJsonType(
+                JsonType.Type.STRING,
+                Map.of("format", new TextNode("date-time"))))
+    );
+
+    private final String name;
+    private final BiFunction<JsonNode, Schema, Object> jsonToAvroConversion;
+    private final BiFunction<Object, Schema, JsonNode> avroToJsonConversion;
+    private final FieldSchema jsonSchema;
+
+    LogicalTypeConversion(String name,
+                          BiFunction<JsonNode, Schema, Object> jsonToAvroConversion,
+                          BiFunction<Object, Schema, JsonNode> avroToJsonConversion,
+                          FieldSchema jsonSchema) {
+      this.name = name;
+      this.jsonToAvroConversion = jsonToAvroConversion;
+      this.avroToJsonConversion = avroToJsonConversion;
+      this.jsonSchema = jsonSchema;
+    }
+
+    static Optional<FieldSchema> getJsonSchema(Schema schema) {
+      if (schema.getLogicalType() == null) {
+        return Optional.empty();
+      }
+      String logicalTypeName = schema.getLogicalType().getName();
+      return Stream.of(JsonAvroConversion.LogicalTypeConversion.values())
+          .filter(t -> t.name.equalsIgnoreCase(logicalTypeName))
+          .map(c -> c.jsonSchema)
+          .findFirst();
+    }
+  }
+
+
+}

+ 0 - 10
kafka-ui-api/src/main/resources/application-gauth.yml

@@ -1,10 +0,0 @@
-auth:
-  type: OAUTH2
-spring:
-  security:
-    oauth2:
-      client:
-        registration:
-          google:
-            client-id: [put your client id here]
-            client-secret: [put your client secret here]

+ 120 - 58
kafka-ui-api/src/main/resources/application-local.yml

@@ -5,15 +5,27 @@ logging:
     #org.springframework.http.codec.json.Jackson2JsonEncoder: DEBUG
     #org.springframework.http.codec.json.Jackson2JsonEncoder: DEBUG
     #org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
     #org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
     reactor.netty.http.server.AccessLog: INFO
     reactor.netty.http.server.AccessLog: INFO
+    org.springframework.security: DEBUG
 
 
 #server:
 #server:
 #  port: 8080 #- Port in which kafka-ui will run.
 #  port: 8080 #- Port in which kafka-ui will run.
 
 
+spring:
+  jmx:
+    enabled: true
+  ldap:
+    urls: ldap://localhost:10389
+    base: "cn={0},ou=people,dc=planetexpress,dc=com"
+    admin-user: "cn=admin,dc=planetexpress,dc=com"
+    admin-password: "GoodNewsEveryone"
+    user-filter-search-base: "dc=planetexpress,dc=com"
+    user-filter-search-filter: "(&(uid={0})(objectClass=inetOrgPerson))"
+    group-filter-search-base: "ou=people,dc=planetexpress,dc=com"
+
 kafka:
 kafka:
   clusters:
   clusters:
     - name: local
     - name: local
       bootstrapServers: localhost:9092
       bootstrapServers: localhost:9092
-      zookeeper: localhost:2181
       schemaRegistry: http://localhost:8085
       schemaRegistry: http://localhost:8085
       ksqldbServer: http://localhost:8088
       ksqldbServer: http://localhost:8088
       kafkaConnect:
       kafkaConnect:
@@ -22,63 +34,113 @@ kafka:
       metrics:
       metrics:
         port: 9997
         port: 9997
         type: JMX
         type: JMX
-  #    -
-  #      name: secondLocal
-  #      bootstrapServers: localhost:9093
-  #      zookeeper: localhost:2182
-  #      schemaRegistry: http://localhost:18085
-  #      kafkaConnect:
-  #        - name: first
-  #          address: http://localhost:8083
-  #      metrics:
-  #        port: 9998
-  #        type: JMX
-  #      read-only: true
-  #    -
-  #      name: localUsingProtobufFile
-  #      bootstrapServers: localhost:9092
-  #      protobufFile: messages.proto
-  #      protobufMessageName: GenericMessage
-  #      protobufMessageNameByTopic:
-  #        input-topic: InputMessage
-  #        output-topic: OutputMessage
-spring:
-  jmx:
-    enabled: true
+
+dynamic.config.enabled: true
+
+oauth2:
+  ldap:
+    activeDirectory: false
+    aсtiveDirectory.domain: domain.com
 
 
 auth:
 auth:
   type: DISABLED
   type: DISABLED
-#  type: OAUTH2
-#  oauth2:
-#    client:
-#      cognito:
-#        clientId:
-#        clientSecret:
-#        scope: openid
-#        client-name: cognito
-#        provider: cognito
-#        redirect-uri: http://localhost:8080/login/oauth2/code/cognito
-#        authorization-grant-type: authorization_code
-#        issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
-#        jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
-#        user-name-attribute: username
-#        custom-params:
-#          type: cognito
-#          logoutUrl: https://kafka-ui.auth.eu-central-1.amazoncognito.com/logout
-#      google:
-#        provider: google
-#        clientId:
-#        clientSecret:
-#        user-name-attribute: email
-#        custom-params:
-#          type: google
-#          allowedDomain: provectus.com
-#      github:
-#        provider: github
-#        clientId:
-#        clientSecret:
-#        scope:
-#          - read:org
-#        user-name-attribute: login
-#        custom-params:
-#          type: github
+  #  type: OAUTH2
+  #  type: LDAP
+  oauth2:
+    client:
+      cognito:
+        clientId: # CLIENT ID
+        clientSecret: # CLIENT SECRET
+        scope: openid
+        client-name: cognito
+        provider: cognito
+        redirect-uri: http://localhost:8080/login/oauth2/code/cognito
+        authorization-grant-type: authorization_code
+        issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
+        jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
+        user-name-attribute: cognito:username
+        custom-params:
+          type: cognito
+          logoutUrl: https://kafka-ui.auth.eu-central-1.amazoncognito.com/logout
+      google:
+        provider: google
+        clientId: # CLIENT ID
+        clientSecret: # CLIENT SECRET
+        user-name-attribute: email
+        custom-params:
+          type: google
+          allowedDomain: provectus.com
+      github:
+        provider: github
+        clientId: # CLIENT ID
+        clientSecret: # CLIENT SECRET
+        scope:
+          - read:org
+        user-name-attribute: login
+        custom-params:
+          type: github
+
+rbac:
+  roles:
+    - name: "memelords"
+      clusters:
+        - local
+      subjects:
+        - provider: oauth_google
+          type: domain
+          value: "provectus.com"
+        - provider: oauth_google
+          type: user
+          value: "name@provectus.com"
+
+        - provider: oauth_github
+          type: organization
+          value: "provectus"
+        - provider: oauth_github
+          type: user
+          value: "memelord"
+
+        - provider: oauth_cognito
+          type: user
+          value: "username"
+        - provider: oauth_cognito
+          type: group
+          value: "memelords"
+
+        - provider: ldap
+          type: group
+          value: "admin_staff"
+
+        # NOT IMPLEMENTED YET
+      #        - provider: ldap_ad
+      #          type: group
+      #          value: "admin_staff"
+
+      permissions:
+        - resource: applicationconfig
+          actions: all
+
+        - resource: clusterconfig
+          actions: all
+
+        - resource: topic
+          value: ".*"
+          actions: all
+
+        - resource: consumer
+          value: ".*"
+          actions: all
+
+        - resource: schema
+          value: ".*"
+          actions: all
+
+        - resource: connect
+          value: "*"
+          actions: all
+
+        - resource: ksql
+          actions: all
+
+        - resource: acl
+          actions: all

+ 0 - 13
kafka-ui-api/src/main/resources/application-sdp.yml

@@ -1,13 +0,0 @@
-kafka:
-  clusters:
-    - name: local
-      bootstrapServers: b-1.kad-msk.57w67o.c6.kafka.eu-central-1.amazonaws.com:9094
-      properties:
-        security.protocol: SSL
-#      zookeeper: localhost:2181
-#      schemaRegistry: http://kad-ecs-application-lb-857515197.eu-west-1.elb.amazonaws.com:9000/api/schema-registry
-  #    -
-  #      name: secondLocal
-  #      zookeeper: zookeeper1:2181
-  #      bootstrapServers: kafka1:29092
-  #      schemaRegistry: http://schemaregistry1:8085

+ 171 - 5
kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java

@@ -2,13 +2,12 @@ package com.provectus.kafka.ui.serdes.builtin.sr;
 
 
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThat;
 
 
-import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.json.JsonMapper;
 import com.fasterxml.jackson.databind.json.JsonMapper;
 import com.provectus.kafka.ui.serde.api.DeserializeResult;
 import com.provectus.kafka.ui.serde.api.DeserializeResult;
 import com.provectus.kafka.ui.serde.api.SchemaDescription;
 import com.provectus.kafka.ui.serde.api.SchemaDescription;
 import com.provectus.kafka.ui.serde.api.Serde;
 import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion;
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
 import io.confluent.kafka.schemaregistry.avro.AvroSchema;
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
 import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient;
 import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient;
 import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
 import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
 import java.io.ByteArrayOutputStream;
 import java.io.ByteArrayOutputStream;
@@ -54,7 +53,8 @@ class SchemaRegistrySerdeTest {
 
 
     SchemaDescription schemaDescription = schemaOptional.get();
     SchemaDescription schemaDescription = schemaOptional.get();
     assertThat(schemaDescription.getSchema())
     assertThat(schemaDescription.getSchema())
-        .contains("{\"$id\":\"int\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"integer\"}");
+        .contains(
+            "{\"$id\":\"int\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"integer\"}");
     assertThat(schemaDescription.getAdditionalProperties())
     assertThat(schemaDescription.getAdditionalProperties())
         .containsOnlyKeys("subject", "schemaId", "latestVersion", "type")
         .containsOnlyKeys("subject", "schemaId", "latestVersion", "type")
         .containsEntry("subject", subject)
         .containsEntry("subject", subject)
@@ -189,7 +189,8 @@ class SchemaRegistrySerdeTest {
     assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse();
     assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse();
   }
   }
 
 
-  private void assertJsonsEqual(String expected, String actual) throws JsonProcessingException {
+  @SneakyThrows
+  private void assertJsonsEqual(String expected, String actual) {
     var mapper = new JsonMapper();
     var mapper = new JsonMapper();
     assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected));
     assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected));
   }
   }
@@ -211,9 +212,174 @@ class SchemaRegistrySerdeTest {
     GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema.rawSchema());
     GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema.rawSchema());
     ByteArrayOutputStream output = new ByteArrayOutputStream();
     ByteArrayOutputStream output = new ByteArrayOutputStream();
     Encoder encoder = EncoderFactory.get().binaryEncoder(output, null);
     Encoder encoder = EncoderFactory.get().binaryEncoder(output, null);
-    writer.write(AvroSchemaUtils.toObject(json, schema), encoder);
+    writer.write(JsonAvroConversion.convertJsonToAvro(json, schema.rawSchema()), encoder);
     encoder.flush();
     encoder.flush();
     return output.toByteArray();
     return output.toByteArray();
   }
   }
 
 
+  @Test
+  void avroFieldsRepresentationIsConsistentForSerializationAndDeserialization() throws Exception {
+    AvroSchema schema = new AvroSchema(
+        """
+             {
+               "type": "record",
+               "name": "TestAvroRecord",
+               "fields": [
+                 {
+                   "name": "f_int",
+                   "type": "int"
+                 },
+                 {
+                   "name": "f_long",
+                   "type": "long"
+                 },
+                 {
+                   "name": "f_string",
+                   "type": "string"
+                 },
+                 {
+                   "name": "f_boolean",
+                   "type": "boolean"
+                 },
+                 {
+                   "name": "f_float",
+                   "type": "float"
+                 },
+                 {
+                   "name": "f_double",
+                   "type": "double"
+                 },
+                 {
+                   "name": "f_enum",
+                   "type" : {
+                    "type": "enum",
+                    "name": "Suit",
+                    "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"]
+                   }
+                 },
+                 {
+                  "name": "f_map",
+                  "type": {
+                     "type": "map",
+                     "values" : "string",
+                     "default": {}
+                   }
+                 },
+                 {
+                  "name": "f_union",
+                  "type": ["null", "string", "int" ]
+                 },
+                 {
+                  "name": "f_optional_to_test_not_filled_case",
+                  "type": [ "null", "string"]
+                 },
+                 {
+                     "name" : "f_fixed",
+                     "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" }
+                   },
+                   {
+                     "name" : "f_bytes",
+                     "type": "bytes"
+                   }
+               ]
+            }"""
+    );
+
+    String jsonPayload = """
+        {
+          "f_int": 123,
+          "f_long": 4294967294,
+          "f_string": "string here",
+          "f_boolean": true,
+          "f_float": 123.1,
+          "f_double": 123456.123456,
+          "f_enum": "SPADES",
+          "f_map": { "k1": "string value" },
+          "f_union": { "int": 123 },
+          "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Ò",
+          "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)"
+        }
+        """;
+
+    registryClient.register("test-value", schema);
+    assertSerdeCycle("test", jsonPayload);
+  }
+
+  @Test
+  void avroLogicalTypesRepresentationIsConsistentForSerializationAndDeserialization() throws Exception {
+    AvroSchema schema = new AvroSchema(
+        """
+             {
+               "type": "record",
+               "name": "TestAvroRecord",
+               "fields": [
+                 {
+                   "name": "lt_date",
+                   "type": { "type": "int", "logicalType": "date" }
+                 },
+                 {
+                   "name": "lt_uuid",
+                   "type": { "type": "string", "logicalType": "uuid" }
+                 },
+                 {
+                   "name": "lt_decimal",
+                   "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 }
+                 },
+                 {
+                   "name": "lt_time_millis",
+                   "type": { "type": "int", "logicalType": "time-millis"}
+                 },
+                 {
+                   "name": "lt_time_micros",
+                   "type": { "type": "long", "logicalType": "time-micros"}
+                 },
+                 {
+                   "name": "lt_timestamp_millis",
+                   "type": { "type": "long", "logicalType": "timestamp-millis" }
+                 },
+                 {
+                   "name": "lt_timestamp_micros",
+                   "type": { "type": "long", "logicalType": "timestamp-micros" }
+                 },
+                 {
+                   "name": "lt_local_timestamp_millis",
+                   "type": { "type": "long", "logicalType": "local-timestamp-millis" }
+                 },
+                 {
+                   "name": "lt_local_timestamp_micros",
+                   "type": { "type": "long", "logicalType": "local-timestamp-micros" }
+                 }
+               ]
+            }"""
+    );
+
+    String jsonPayload = """
+        {
+          "lt_date":"1991-08-14",
+          "lt_decimal": 2.1617413862327545E11,
+          "lt_time_millis": "10:15:30.001",
+          "lt_time_micros": "10:15:30.123456",
+          "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908",
+          "lt_timestamp_millis": "2007-12-03T10:15:30.123Z",
+          "lt_timestamp_micros": "2007-12-03T10:15:30.123456Z",
+          "lt_local_timestamp_millis": "2017-12-03T10:15:30.123",
+          "lt_local_timestamp_micros": "2017-12-03T10:15:30.123456"
+        }
+        """;
+
+    registryClient.register("test-value", schema);
+    assertSerdeCycle("test", jsonPayload);
+  }
+
+  // 1. serialize input json to binary
+  // 2. deserialize from binary
+  // 3. check that deserialized version equal to input
+  void assertSerdeCycle(String topic, String jsonInput) {
+    byte[] serializedBytes = serde.serializer(topic, Serde.Target.VALUE).serialize(jsonInput);
+    var deserializedJson = serde.deserializer(topic, Serde.Target.VALUE)
+        .deserialize(null, serializedBytes)
+        .getResult();
+    assertJsonsEqual(jsonInput, deserializedJson);
+  }
+
 }
 }

+ 24 - 4
kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java

@@ -3,14 +3,16 @@ package com.provectus.kafka.ui.service;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThat;
 
 
 import java.util.Arrays;
 import java.util.Arrays;
-import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.Test;
 
 
 class KafkaConfigSanitizerTest {
 class KafkaConfigSanitizerTest {
 
 
   @Test
   @Test
   void doNothingIfEnabledPropertySetToFalse() {
   void doNothingIfEnabledPropertySetToFalse() {
-    final var sanitizer = new KafkaConfigSanitizer(false, Collections.emptyList());
+    final var sanitizer = new KafkaConfigSanitizer(false, List.of());
     assertThat(sanitizer.sanitize("password", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("password", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("secret");
     assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("secret");
@@ -18,7 +20,7 @@ class KafkaConfigSanitizerTest {
 
 
   @Test
   @Test
   void obfuscateCredentials() {
   void obfuscateCredentials() {
-    final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
+    final var sanitizer = new KafkaConfigSanitizer(true, List.of());
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("consumer.sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("consumer.sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("producer.sasl.jaas.config", "secret")).isEqualTo("******");
     assertThat(sanitizer.sanitize("producer.sasl.jaas.config", "secret")).isEqualTo("******");
@@ -36,7 +38,7 @@ class KafkaConfigSanitizerTest {
 
 
   @Test
   @Test
   void notObfuscateNormalConfigs() {
   void notObfuscateNormalConfigs() {
-    final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList());
+    final var sanitizer = new KafkaConfigSanitizer(true, List.of());
     assertThat(sanitizer.sanitize("security.protocol", "SASL_SSL")).isEqualTo("SASL_SSL");
     assertThat(sanitizer.sanitize("security.protocol", "SASL_SSL")).isEqualTo("SASL_SSL");
     final String[] bootstrapServer = new String[] {"test1:9092", "test2:9092"};
     final String[] bootstrapServer = new String[] {"test1:9092", "test2:9092"};
     assertThat(sanitizer.sanitize("bootstrap.servers", bootstrapServer)).isEqualTo(bootstrapServer);
     assertThat(sanitizer.sanitize("bootstrap.servers", bootstrapServer)).isEqualTo(bootstrapServer);
@@ -52,4 +54,22 @@ class KafkaConfigSanitizerTest {
     assertThat(sanitizer.sanitize("database.password", "no longer credential"))
     assertThat(sanitizer.sanitize("database.password", "no longer credential"))
             .isEqualTo("no longer credential");
             .isEqualTo("no longer credential");
   }
   }
+
+  @Test
+  void sanitizeConnectorConfigDoNotFailOnNullableValues() {
+    Map<String, Object> originalConfig = new HashMap<>();
+    originalConfig.put("password", "secret");
+    originalConfig.put("asIs", "normal");
+    originalConfig.put("nullVal", null);
+
+    var sanitizedConfig = new KafkaConfigSanitizer(true, List.of())
+        .sanitizeConnectorConfig(originalConfig);
+
+    assertThat(sanitizedConfig)
+        .hasSize(3)
+        .containsEntry("password", "******")
+        .containsEntry("asIs", "normal")
+        .containsEntry("nullVal", null);
+  }
+
 }
 }

+ 621 - 0
kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java

@@ -0,0 +1,621 @@
+package com.provectus.kafka.ui.util.jsonschema;
+
+import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertAvroToJson;
+import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertJsonToAvro;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.fasterxml.jackson.databind.node.BooleanNode;
+import com.fasterxml.jackson.databind.node.DoubleNode;
+import com.fasterxml.jackson.databind.node.FloatNode;
+import com.fasterxml.jackson.databind.node.IntNode;
+import com.fasterxml.jackson.databind.node.LongNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import com.google.common.primitives.Longs;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import lombok.SneakyThrows;
+import org.apache.avro.Schema;
+import org.apache.avro.generic.GenericData;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+
+class JsonAvroConversionTest {
+
+  // checking conversion from json to KafkaAvroSerializer-compatible avro objects
+  @Nested
+  class FromJsonToAvro {
+
+    @Test
+    void primitiveRoot() {
+      assertThat(convertJsonToAvro("\"str\"", createSchema("\"string\"")))
+          .isEqualTo("str");
+
+      assertThat(convertJsonToAvro("123", createSchema("\"int\"")))
+          .isEqualTo(123);
+
+      assertThat(convertJsonToAvro("123", createSchema("\"long\"")))
+          .isEqualTo(123L);
+
+      assertThat(convertJsonToAvro("123.123", createSchema("\"float\"")))
+          .isEqualTo(123.123F);
+
+      assertThat(convertJsonToAvro("12345.12345", createSchema("\"double\"")))
+          .isEqualTo(12345.12345);
+    }
+
+    @Test
+    void primitiveTypedFields() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "f_int",
+                     "type": "int"
+                   },
+                   {
+                     "name": "f_long",
+                     "type": "long"
+                   },
+                   {
+                     "name": "f_string",
+                     "type": "string"
+                   },
+                   {
+                     "name": "f_boolean",
+                     "type": "boolean"
+                   },
+                   {
+                     "name": "f_float",
+                     "type": "float"
+                   },
+                   {
+                     "name": "f_double",
+                     "type": "double"
+                   },
+                   {
+                     "name": "f_enum",
+                     "type" : {
+                      "type": "enum",
+                      "name": "Suit",
+                      "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"]
+                     }
+                   },
+                   {
+                     "name" : "f_fixed",
+                     "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" }
+                   },
+                   {
+                     "name" : "f_bytes",
+                     "type": "bytes"
+                   }
+                 ]
+              }"""
+      );
+
+      String jsonPayload = """
+          {
+            "f_int": 123,
+            "f_long": 4294967294,
+            "f_string": "string here",
+            "f_boolean": true,
+            "f_float": 123.1,
+            "f_double": 123456.123456,
+            "f_enum": "SPADES",
+            "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Ò",
+            "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)"
+          }
+          """;
+
+      var converted = convertJsonToAvro(jsonPayload, schema);
+      assertThat(converted).isInstanceOf(GenericData.Record.class);
+
+      var record = (GenericData.Record) converted;
+      assertThat(record.get("f_int")).isEqualTo(123);
+      assertThat(record.get("f_long")).isEqualTo(4294967294L);
+      assertThat(record.get("f_string")).isEqualTo("string here");
+      assertThat(record.get("f_boolean")).isEqualTo(true);
+      assertThat(record.get("f_float")).isEqualTo(123.1f);
+      assertThat(record.get("f_double")).isEqualTo(123456.123456);
+      assertThat(record.get("f_enum"))
+          .isEqualTo(
+              new GenericData.EnumSymbol(
+                  schema.getField("f_enum").schema(),
+                  "SPADES"
+              )
+          );
+      assertThat(((GenericData.Fixed) record.get("f_fixed")).bytes()).isEqualTo(Longs.toByteArray(1234L));
+      assertThat(((ByteBuffer) record.get("f_bytes")).array()).isEqualTo(Longs.toByteArray(2345L));
+    }
+
+    @Test
+    void unionRoot() {
+      var schema = createSchema("[ \"null\", \"string\", \"int\" ]");
+
+      var converted = convertJsonToAvro("{\"string\":\"string here\"}", schema);
+      assertThat(converted).isEqualTo("string here");
+
+      converted = convertJsonToAvro("{\"int\": 123}", schema);
+      assertThat(converted).isEqualTo(123);
+
+      converted = convertJsonToAvro("null", schema);
+      assertThat(converted).isEqualTo(null);
+    }
+
+    @Test
+    void unionField() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "namespace": "com.test",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "f_union",
+                     "type": [ "null", "int", "TestAvroRecord"]
+                   }
+                 ]
+              }"""
+      );
+
+      String jsonPayload = "{ \"f_union\": null }";
+
+      var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema);
+      assertThat(record.get("f_union")).isNull();
+
+      jsonPayload = "{ \"f_union\": { \"int\": 123 } }";
+      record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema);
+      assertThat(record.get("f_union")).isEqualTo(123);
+
+      //inner-record's name should be fully-qualified!
+      jsonPayload = "{ \"f_union\": { \"com.test.TestAvroRecord\": { \"f_union\": { \"int\": 123  } } } }";
+      record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema);
+      assertThat(record.get("f_union")).isInstanceOf(GenericData.Record.class);
+      var innerRec = (GenericData.Record) record.get("f_union");
+      assertThat(innerRec.get("f_union")).isEqualTo(123);
+    }
+
+    @Test
+    void mapField() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "long_map",
+                     "type": {
+                       "type": "map",
+                       "values" : "long",
+                       "default": {}
+                     }
+                   },
+                   {
+                     "name": "string_map",
+                     "type": {
+                       "type": "map",
+                       "values" : "string",
+                       "default": {}
+                     }
+                   },
+                   {
+                     "name": "self_ref_map",
+                     "type": {
+                       "type": "map",
+                       "values" : "TestAvroRecord",
+                       "default": {}
+                     }
+                   }
+                 ]
+              }"""
+      );
+
+      String jsonPayload = """
+          {
+            "long_map": {
+              "k1": 123,
+              "k2": 456
+            },
+            "string_map": {
+              "k3": "s1",
+              "k4": "s2"
+            },
+            "self_ref_map": {
+              "k5" : {
+                "long_map": { "_k1": 222 },
+                "string_map": { "_k2": "_s1" }
+              }
+            }
+          }
+          """;
+
+      var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema);
+      assertThat(record.get("long_map"))
+          .isEqualTo(Map.of("k1", 123L, "k2", 456L));
+      assertThat(record.get("string_map"))
+          .isEqualTo(Map.of("k3", "s1", "k4", "s2"));
+      assertThat(record.get("self_ref_map"))
+          .isNotNull();
+
+      Map<String, Object> selfRefMapField = (Map<String, Object>) record.get("self_ref_map");
+      assertThat(selfRefMapField)
+          .hasSize(1)
+          .hasEntrySatisfying("k5", v -> {
+            assertThat(v).isInstanceOf(GenericData.Record.class);
+            var innerRec = (GenericData.Record) v;
+            assertThat(innerRec.get("long_map"))
+                .isEqualTo(Map.of("_k1", 222L));
+            assertThat(innerRec.get("string_map"))
+                .isEqualTo(Map.of("_k2", "_s1"));
+          });
+    }
+
+    @Test
+    void arrayField() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "f_array",
+                     "type": {
+                        "type": "array",
+                        "items" : "string",
+                        "default": []
+                      }
+                   }
+                 ]
+              }"""
+      );
+
+      String jsonPayload = """
+          {
+            "f_array": [ "e1", "e2" ]
+          }
+          """;
+
+      var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema);
+      assertThat(record.get("f_array")).isEqualTo(List.of("e1", "e2"));
+    }
+
+    @Test
+    void logicalTypesField() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "lt_date",
+                     "type": { "type": "int", "logicalType": "date" }
+                   },
+                   {
+                     "name": "lt_uuid",
+                     "type": { "type": "string", "logicalType": "uuid" }
+                   },
+                   {
+                     "name": "lt_decimal",
+                     "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 }
+                   },
+                   {
+                     "name": "lt_time_millis",
+                     "type": { "type": "int", "logicalType": "time-millis"}
+                   },
+                   {
+                     "name": "lt_time_micros",
+                     "type": { "type": "long", "logicalType": "time-micros"}
+                   },
+                   {
+                     "name": "lt_timestamp_millis",
+                     "type": { "type": "long", "logicalType": "timestamp-millis" }
+                   },
+                   {
+                     "name": "lt_timestamp_micros",
+                     "type": { "type": "long", "logicalType": "timestamp-micros" }
+                   },
+                   {
+                     "name": "lt_local_timestamp_millis",
+                     "type": { "type": "long", "logicalType": "local-timestamp-millis" }
+                   },
+                   {
+                     "name": "lt_local_timestamp_micros",
+                     "type": { "type": "long", "logicalType": "local-timestamp-micros" }
+                   }
+                 ]
+              }"""
+      );
+
+      String jsonPayload = """
+          {
+            "lt_date":"1991-08-14",
+            "lt_decimal": 2.1617413862327545E11,
+            "lt_time_millis": "10:15:30.001",
+            "lt_time_micros": "10:15:30.123456",
+            "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908",
+            "lt_timestamp_millis": "2007-12-03T10:15:30.123Z",
+            "lt_timestamp_micros": "2007-12-13T10:15:30.123456Z",
+            "lt_local_timestamp_millis": "2017-12-03T10:15:30.123",
+            "lt_local_timestamp_micros": "2017-12-13T10:15:30.123456"
+          }
+          """;
+
+      var converted = convertJsonToAvro(jsonPayload, schema);
+      assertThat(converted).isInstanceOf(GenericData.Record.class);
+
+      var record = (GenericData.Record) converted;
+
+      assertThat(record.get("lt_date"))
+          .isEqualTo(LocalDate.of(1991, 8, 14));
+      assertThat(record.get("lt_decimal"))
+          .isEqualTo(new BigDecimal("2.1617413862327545E11"));
+      assertThat(record.get("lt_time_millis"))
+          .isEqualTo(LocalTime.parse("10:15:30.001"));
+      assertThat(record.get("lt_time_micros"))
+          .isEqualTo(LocalTime.parse("10:15:30.123456"));
+      assertThat(record.get("lt_timestamp_millis"))
+          .isEqualTo(Instant.parse("2007-12-03T10:15:30.123Z"));
+      assertThat(record.get("lt_timestamp_micros"))
+          .isEqualTo(Instant.parse("2007-12-13T10:15:30.123456Z"));
+      assertThat(record.get("lt_local_timestamp_millis"))
+          .isEqualTo(LocalDateTime.parse("2017-12-03T10:15:30.123"));
+      assertThat(record.get("lt_local_timestamp_micros"))
+          .isEqualTo(LocalDateTime.parse("2017-12-13T10:15:30.123456"));
+    }
+  }
+
+  // checking conversion of KafkaAvroDeserializer output to JsonNode
+  @Nested
+  class FromAvroToJson {
+
+    @Test
+    void primitiveRoot() {
+      assertThat(convertAvroToJson("str", createSchema("\"string\"")))
+          .isEqualTo(new TextNode("str"));
+
+      assertThat(convertAvroToJson(123, createSchema("\"int\"")))
+          .isEqualTo(new IntNode(123));
+
+      assertThat(convertAvroToJson(123L, createSchema("\"long\"")))
+          .isEqualTo(new LongNode(123));
+
+      assertThat(convertAvroToJson(123.1F, createSchema("\"float\"")))
+          .isEqualTo(new FloatNode(123.1F));
+
+      assertThat(convertAvroToJson(123.1, createSchema("\"double\"")))
+          .isEqualTo(new DoubleNode(123.1));
+
+      assertThat(convertAvroToJson(true, createSchema("\"boolean\"")))
+          .isEqualTo(BooleanNode.valueOf(true));
+
+      assertThat(convertAvroToJson(ByteBuffer.wrap(Longs.toByteArray(123L)), createSchema("\"bytes\"")))
+          .isEqualTo(new TextNode(new String(Longs.toByteArray(123L), StandardCharsets.ISO_8859_1)));
+    }
+
+    @SneakyThrows
+    @Test
+    void primitiveTypedFields() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "f_int",
+                     "type": "int"
+                   },
+                   {
+                     "name": "f_long",
+                     "type": "long"
+                   },
+                   {
+                     "name": "f_string",
+                     "type": "string"
+                   },
+                   {
+                     "name": "f_boolean",
+                     "type": "boolean"
+                   },
+                   {
+                     "name": "f_float",
+                     "type": "float"
+                   },
+                   {
+                     "name": "f_double",
+                     "type": "double"
+                   },
+                   {
+                     "name": "f_enum",
+                     "type" : {
+                      "type": "enum",
+                      "name": "Suit",
+                      "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"]
+                     }
+                   },
+                   {
+                     "name" : "f_fixed",
+                     "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" }
+                   },
+                   {
+                     "name" : "f_bytes",
+                     "type": "bytes"
+                   }
+                 ]
+              }"""
+      );
+
+      byte[] fixedFieldValue = Longs.toByteArray(1234L);
+      byte[] bytesFieldValue = Longs.toByteArray(2345L);
+
+      GenericData.Record inputRecord = new GenericData.Record(schema);
+      inputRecord.put("f_int", 123);
+      inputRecord.put("f_long", 4294967294L);
+      inputRecord.put("f_string", "string here");
+      inputRecord.put("f_boolean", true);
+      inputRecord.put("f_float", 123.1f);
+      inputRecord.put("f_double", 123456.123456);
+      inputRecord.put("f_enum", new GenericData.EnumSymbol(schema.getField("f_enum").schema(), "SPADES"));
+      inputRecord.put("f_fixed", new GenericData.Fixed(schema.getField("f_fixed").schema(), fixedFieldValue));
+      inputRecord.put("f_bytes", ByteBuffer.wrap(bytesFieldValue));
+
+      String expectedJson = """
+          {
+            "f_int": 123,
+            "f_long": 4294967294,
+            "f_string": "string here",
+            "f_boolean": true,
+            "f_float": 123.1,
+            "f_double": 123456.123456,
+            "f_enum": "SPADES",
+            "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Ò",
+            "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)"
+          }
+          """;
+
+      assertJsonsEqual(expectedJson, convertAvroToJson(inputRecord, schema));
+    }
+
+    @Test
+    void logicalTypesField() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "lt_date",
+                     "type": { "type": "int", "logicalType": "date" }
+                   },
+                   {
+                     "name": "lt_uuid",
+                     "type": { "type": "string", "logicalType": "uuid" }
+                   },
+                   {
+                     "name": "lt_decimal",
+                     "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 }
+                   },
+                   {
+                     "name": "lt_time_millis",
+                     "type": { "type": "int", "logicalType": "time-millis"}
+                   },
+                   {
+                     "name": "lt_time_micros",
+                     "type": { "type": "long", "logicalType": "time-micros"}
+                   },
+                   {
+                     "name": "lt_timestamp_millis",
+                     "type": { "type": "long", "logicalType": "timestamp-millis" }
+                   },
+                   {
+                     "name": "lt_timestamp_micros",
+                     "type": { "type": "long", "logicalType": "timestamp-micros" }
+                   },
+                   {
+                     "name": "lt_local_timestamp_millis",
+                     "type": { "type": "long", "logicalType": "local-timestamp-millis" }
+                   },
+                   {
+                     "name": "lt_local_timestamp_micros",
+                     "type": { "type": "long", "logicalType": "local-timestamp-micros" }
+                   }
+                 ]
+              }"""
+      );
+
+      GenericData.Record inputRecord = new GenericData.Record(schema);
+      inputRecord.put("lt_date", LocalDate.of(1991, 8, 14));
+      inputRecord.put("lt_uuid", UUID.fromString("a37b75ca-097c-5d46-6119-f0637922e908"));
+      inputRecord.put("lt_decimal", new BigDecimal("2.16"));
+      inputRecord.put("lt_time_millis", LocalTime.parse("10:15:30.001"));
+      inputRecord.put("lt_time_micros", LocalTime.parse("10:15:30.123456"));
+      inputRecord.put("lt_timestamp_millis", Instant.parse("2007-12-03T10:15:30.123Z"));
+      inputRecord.put("lt_timestamp_micros", Instant.parse("2007-12-13T10:15:30.123456Z"));
+      inputRecord.put("lt_local_timestamp_millis", LocalDateTime.parse("2017-12-03T10:15:30.123"));
+      inputRecord.put("lt_local_timestamp_micros", LocalDateTime.parse("2017-12-13T10:15:30.123456"));
+
+      String expectedJson = """
+          {
+            "lt_date":"1991-08-14",
+            "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908",
+            "lt_decimal": 2.16,
+            "lt_time_millis": "10:15:30.001",
+            "lt_time_micros": "10:15:30.123456",
+            "lt_timestamp_millis": "2007-12-03T10:15:30.123Z",
+            "lt_timestamp_micros": "2007-12-13T10:15:30.123456Z",
+            "lt_local_timestamp_millis": "2017-12-03T10:15:30.123",
+            "lt_local_timestamp_micros": "2017-12-13T10:15:30.123456"
+          }
+          """;
+
+      assertJsonsEqual(expectedJson, convertAvroToJson(inputRecord, schema));
+    }
+
+    @Test
+    void unionField() {
+      var schema = createSchema(
+          """
+               {
+                 "type": "record",
+                 "namespace": "com.test",
+                 "name": "TestAvroRecord",
+                 "fields": [
+                   {
+                     "name": "f_union",
+                     "type": [ "null", "int", "TestAvroRecord"]
+                   }
+                 ]
+              }"""
+      );
+
+      var r = new GenericData.Record(schema);
+      r.put("f_union", null);
+      assertJsonsEqual(" {}", convertAvroToJson(r, schema));
+
+      r = new GenericData.Record(schema);
+      r.put("f_union", 123);
+      assertJsonsEqual(" { \"f_union\" : { \"int\" : 123 } }", convertAvroToJson(r, schema));
+
+
+      r = new GenericData.Record(schema);
+      var innerRec = new GenericData.Record(schema);
+      innerRec.put("f_union", 123);
+      r.put("f_union", innerRec);
+      assertJsonsEqual(
+          " { \"f_union\" : { \"com.test.TestAvroRecord\" : { \"f_union\" : { \"int\" : 123 } } } }",
+          convertAvroToJson(r, schema)
+      );
+    }
+
+  }
+
+  private Schema createSchema(String schema) {
+    return new AvroSchema(schema).rawSchema();
+  }
+
+  @SneakyThrows
+  private void assertJsonsEqual(String expectedJson, JsonNode actual) {
+    var mapper = new JsonMapper();
+    assertThat(actual.toPrettyString())
+        .isEqualTo(mapper.readTree(expectedJson).toPrettyString());
+  }
+
+}

+ 2 - 2
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -2558,7 +2558,7 @@ components:
           $ref: "#/components/schemas/ConsumerGroupState"
           $ref: "#/components/schemas/ConsumerGroupState"
         coordinator:
         coordinator:
           $ref: "#/components/schemas/Broker"
           $ref: "#/components/schemas/Broker"
-        messagesBehind:
+        consumerLag:
           type: integer
           type: integer
           format: int64
           format: int64
           description: null if consumer group has no offsets committed
           description: null if consumer group has no offsets committed
@@ -2776,7 +2776,7 @@ components:
         endOffset:
         endOffset:
           type: integer
           type: integer
           format: int64
           format: int64
-        messagesBehind:
+        consumerLag:
           type: integer
           type: integer
           format: int64
           format: int64
           description: null if consumer group has no offsets committed
           description: null if consumer group has no offsets committed

+ 5 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java

@@ -28,6 +28,7 @@ public abstract class BasePage extends WebUtils {
   protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
   protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
   protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
   protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
   protected SelenideElement backBtn = $x("//button[contains(text(),'Back')]");
   protected SelenideElement backBtn = $x("//button[contains(text(),'Back')]");
+  protected SelenideElement previousBtn = $x("//button[contains(text(),'Previous')]");
   protected SelenideElement nextBtn = $x("//button[contains(text(),'Next')]");
   protected SelenideElement nextBtn = $x("//button[contains(text(),'Next')]");
   protected ElementsCollection ddlOptions = $$x("//li[@value]");
   protected ElementsCollection ddlOptions = $$x("//li[@value]");
   protected ElementsCollection gridItems = $$x("//tr[@class]");
   protected ElementsCollection gridItems = $$x("//tr[@class]");
@@ -75,6 +76,10 @@ public abstract class BasePage extends WebUtils {
     clickByJavaScript(backBtn);
     clickByJavaScript(backBtn);
   }
   }
 
 
+  protected void clickPreviousBtn() {
+    clickByJavaScript(previousBtn);
+  }
+
   protected void setJsonInputValue(SelenideElement jsonInput, String jsonConfig) {
   protected void setJsonInputValue(SelenideElement jsonInput, String jsonConfig) {
     sendKeysByActions(jsonInput, jsonConfig.replace("  ", ""));
     sendKeysByActions(jsonInput, jsonConfig.replace("  ", ""));
     new Actions(WebDriverRunner.getWebDriver())
     new Actions(WebDriverRunner.getWebDriver())

+ 67 - 2
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java

@@ -16,6 +16,8 @@ import java.util.stream.Stream;
 
 
 public class BrokersConfigTab extends BasePage {
 public class BrokersConfigTab extends BasePage {
 
 
+  protected SelenideElement sourceInfoIcon = $x("//div[text()='Source']/..//div/div[@class]");
+  protected SelenideElement sourceInfoTooltip = $x("//div[text()='Source']/..//div/div[@style]");
   protected ElementsCollection editBtns = $$x("//button[@aria-label='editAction']");
   protected ElementsCollection editBtns = $$x("//button[@aria-label='editAction']");
 
 
   @Step
   @Step
@@ -25,6 +27,17 @@ public class BrokersConfigTab extends BasePage {
     return this;
     return this;
   }
   }
 
 
+  @Step
+  public BrokersConfigTab hoverOnSourceInfoIcon() {
+    sourceInfoIcon.shouldBe(Condition.visible).hover();
+    return this;
+  }
+
+  @Step
+  public String getSourceInfoTooltipText() {
+    return sourceInfoTooltip.shouldBe(Condition.visible).getText().trim();
+  }
+
   @Step
   @Step
   public boolean isSearchByKeyVisible() {
   public boolean isSearchByKeyVisible() {
     return isVisible(searchFld);
     return isVisible(searchFld);
@@ -53,6 +66,13 @@ public class BrokersConfigTab extends BasePage {
     return this;
     return this;
   }
   }
 
 
+  @Step
+  public BrokersConfigTab clickPreviousButton() {
+    clickPreviousBtn();
+    waitUntilSpinnerDisappear(1);
+    return this;
+  }
+
   private List<BrokersConfigTab.BrokersConfigItem> initGridItems() {
   private List<BrokersConfigTab.BrokersConfigItem> initGridItems() {
     List<BrokersConfigTab.BrokersConfigItem> gridItemList = new ArrayList<>();
     List<BrokersConfigTab.BrokersConfigItem> gridItemList = new ArrayList<>();
     gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
     gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
@@ -91,13 +111,58 @@ public class BrokersConfigTab extends BasePage {
     }
     }
 
 
     @Step
     @Step
-    public void edit() {
-      element.$x("./td[2]//button").shouldBe(Condition.enabled).click();
+    public BrokersConfigItem setValue(String value) {
+      sendKeysAfterClear(getValueFld(), value);
+      return this;
+    }
+
+    @Step
+    public SelenideElement getValueFld() {
+      return element.$x("./td[2]//input");
+    }
+
+    @Step
+    public SelenideElement getSaveBtn() {
+      return element.$x("./td[2]//button[@aria-label='confirmAction']");
+    }
+
+    @Step
+    public SelenideElement getCancelBtn() {
+      return element.$x("./td[2]//button[@aria-label='cancelAction']");
+    }
+
+    @Step
+    public SelenideElement getEditBtn() {
+      return element.$x("./td[2]//button[@aria-label='editAction']");
+    }
+
+    @Step
+    public BrokersConfigItem clickSaveBtn() {
+      getSaveBtn().shouldBe(Condition.enabled).click();
+      return this;
+    }
+
+    @Step
+    public BrokersConfigItem clickCancelBtn() {
+      getCancelBtn().shouldBe(Condition.enabled).click();
+      return this;
+    }
+
+    @Step
+    public BrokersConfigItem clickEditBtn() {
+      getEditBtn().shouldBe(Condition.enabled).click();
+      return this;
     }
     }
 
 
     @Step
     @Step
     public String getSource() {
     public String getSource() {
       return element.$x("./td[3]").getText().trim();
       return element.$x("./td[3]").getText().trim();
     }
     }
+
+    @Step
+    public BrokersConfigItem clickConfirm() {
+      clickConfirmButton();
+      return this;
+    }
   }
   }
 }
 }

+ 15 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java

@@ -0,0 +1,15 @@
+package com.provectus.kafka.ui.variables;
+
+public interface Expected {
+
+  String BROKER_SOURCE_INFO_TOOLTIP =
+      "DYNAMIC_TOPIC_CONFIG = dynamic topic config that is configured for a specific topic\n"
+          + "DYNAMIC_BROKER_LOGGER_CONFIG = dynamic broker logger config that is configured for a specific broker\n"
+          + "DYNAMIC_BROKER_CONFIG = dynamic broker config that is configured for a specific broker\n"
+          + "DYNAMIC_DEFAULT_BROKER_CONFIG = dynamic broker config that is configured as default "
+          + "for all brokers in the cluster\n"
+          + "STATIC_BROKER_CONFIG = static broker config provided as broker properties at start up "
+          + "(e.g. server.properties file)\n"
+          + "DEFAULT_CONFIG = built-in default configuration for configs that have a default value\n"
+          + "UNKNOWN = source unknown e.g. in the ConfigEntry used for alter requests where source is not set";
+}

+ 9 - 30
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java

@@ -15,87 +15,66 @@ import org.testng.annotations.Test;
 
 
 public class SmokeBacklog extends BaseManualTest {
 public class SmokeBacklog extends BaseManualTest {
 
 
-  @Automation(state = TO_BE_AUTOMATED)
-  @Suite(id = BROKERS_SUITE_ID)
-  @QaseId(331)
-  @Test
-  public void testCaseA() {
-  }
-
-  @Automation(state = TO_BE_AUTOMATED)
-  @Suite(id = BROKERS_SUITE_ID)
-  @QaseId(332)
-  @Test
-  public void testCaseB() {
-  }
-
   @Automation(state = TO_BE_AUTOMATED)
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(335)
   @QaseId(335)
   @Test
   @Test
-  public void testCaseC() {
+  public void testCaseA() {
   }
   }
 
 
   @Automation(state = TO_BE_AUTOMATED)
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(336)
   @QaseId(336)
   @Test
   @Test
-  public void testCaseD() {
+  public void testCaseB() {
   }
   }
 
 
   @Automation(state = TO_BE_AUTOMATED)
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(343)
   @QaseId(343)
   @Test
   @Test
-  public void testCaseE() {
+  public void testCaseC() {
   }
   }
 
 
   @Automation(state = TO_BE_AUTOMATED)
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = SCHEMAS_SUITE_ID)
   @Suite(id = SCHEMAS_SUITE_ID)
   @QaseId(345)
   @QaseId(345)
   @Test
   @Test
-  public void testCaseF() {
+  public void testCaseD() {
   }
   }
 
 
   @Automation(state = TO_BE_AUTOMATED)
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = SCHEMAS_SUITE_ID)
   @Suite(id = SCHEMAS_SUITE_ID)
   @QaseId(346)
   @QaseId(346)
   @Test
   @Test
-  public void testCaseG() {
+  public void testCaseE() {
   }
   }
 
 
   @Automation(state = TO_BE_AUTOMATED)
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(347)
   @QaseId(347)
   @Test
   @Test
-  public void testCaseH() {
+  public void testCaseF() {
   }
   }
 
 
   @Automation(state = TO_BE_AUTOMATED)
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = BROKERS_SUITE_ID)
   @Suite(id = BROKERS_SUITE_ID)
   @QaseId(348)
   @QaseId(348)
   @Test
   @Test
-  public void testCaseI() {
-  }
-
-  @Automation(state = TO_BE_AUTOMATED)
-  @Suite(id = BROKERS_SUITE_ID)
-  @QaseId(350)
-  @Test
-  public void testCaseJ() {
+  public void testCaseG() {
   }
   }
 
 
   @Automation(state = NOT_AUTOMATED)
   @Automation(state = NOT_AUTOMATED)
   @Suite(id = TOPICS_SUITE_ID)
   @Suite(id = TOPICS_SUITE_ID)
   @QaseId(50)
   @QaseId(50)
   @Test
   @Test
-  public void testCaseK() {
+  public void testCaseH() {
   }
   }
 
 
   @Automation(state = NOT_AUTOMATED)
   @Automation(state = NOT_AUTOMATED)
   @Suite(id = SCHEMAS_SUITE_ID)
   @Suite(id = SCHEMAS_SUITE_ID)
   @QaseId(351)
   @QaseId(351)
   @Test
   @Test
-  public void testCaseL() {
+  public void testCaseI() {
   }
   }
 }
 }

+ 87 - 7
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.smokesuite.brokers;
 package com.provectus.kafka.ui.smokesuite.brokers;
 
 
 import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS;
 import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS;
+import static com.provectus.kafka.ui.variables.Expected.BROKER_SOURCE_INFO_TOOLTIP;
 
 
 import com.codeborne.selenide.Condition;
 import com.codeborne.selenide.Condition;
 import com.provectus.kafka.ui.BaseTest;
 import com.provectus.kafka.ui.BaseTest;
@@ -10,6 +11,7 @@ import io.qase.api.annotation.QaseId;
 import org.testng.Assert;
 import org.testng.Assert;
 import org.testng.annotations.Ignore;
 import org.testng.annotations.Ignore;
 import org.testng.annotations.Test;
 import org.testng.annotations.Test;
+import org.testng.asserts.SoftAssert;
 
 
 public class BrokersTest extends BaseTest {
 public class BrokersTest extends BaseTest {
 
 
@@ -48,11 +50,11 @@ public class BrokersTest extends BaseTest {
   @Issue("https://github.com/provectus/kafka-ui/issues/3347")
   @Issue("https://github.com/provectus/kafka-ui/issues/3347")
   @QaseId(330)
   @QaseId(330)
   @Test
   @Test
-  public void brokersConfigSearchCheck() {
+  public void brokersConfigFirstPageSearchCheck() {
     navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID);
     navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID);
     brokersDetails
     brokersDetails
         .openDetailsTab(CONFIGS);
         .openDetailsTab(CONFIGS);
-    String anyConfigKey = brokersConfigTab
+    String anyConfigKeyFirstPage = brokersConfigTab
         .getAllConfigs().stream()
         .getAllConfigs().stream()
         .findAny().orElseThrow()
         .findAny().orElseThrow()
         .getKey();
         .getKey();
@@ -60,13 +62,91 @@ public class BrokersTest extends BaseTest {
         .clickNextButton();
         .clickNextButton();
     Assert.assertFalse(brokersConfigTab.getAllConfigs().stream()
     Assert.assertFalse(brokersConfigTab.getAllConfigs().stream()
             .map(BrokersConfigTab.BrokersConfigItem::getKey)
             .map(BrokersConfigTab.BrokersConfigItem::getKey)
-            .toList().contains(anyConfigKey),
-        String.format("getAllConfigs().contains(%s)", anyConfigKey));
+            .toList().contains(anyConfigKeyFirstPage),
+        String.format("getAllConfigs().contains(%s)", anyConfigKeyFirstPage));
     brokersConfigTab
     brokersConfigTab
-        .searchConfig(anyConfigKey);
+        .searchConfig(anyConfigKeyFirstPage);
     Assert.assertTrue(brokersConfigTab.getAllConfigs().stream()
     Assert.assertTrue(brokersConfigTab.getAllConfigs().stream()
             .map(BrokersConfigTab.BrokersConfigItem::getKey)
             .map(BrokersConfigTab.BrokersConfigItem::getKey)
-            .toList().contains(anyConfigKey),
-        String.format("getAllConfigs().contains(%s)", anyConfigKey));
+            .toList().contains(anyConfigKeyFirstPage),
+        String.format("getAllConfigs().contains(%s)", anyConfigKeyFirstPage));
+  }
+
+  @Ignore
+  @Issue("https://github.com/provectus/kafka-ui/issues/3347")
+  @QaseId(350)
+  @Test
+  public void brokersConfigSecondPageSearchCheck() {
+    navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID);
+    brokersDetails
+        .openDetailsTab(CONFIGS);
+    brokersConfigTab
+        .clickNextButton();
+    String anyConfigKeySecondPage = brokersConfigTab
+        .getAllConfigs().stream()
+        .findAny().orElseThrow()
+        .getKey();
+    brokersConfigTab
+        .clickPreviousButton();
+    Assert.assertFalse(brokersConfigTab.getAllConfigs().stream()
+            .map(BrokersConfigTab.BrokersConfigItem::getKey)
+            .toList().contains(anyConfigKeySecondPage),
+        String.format("getAllConfigs().contains(%s)", anyConfigKeySecondPage));
+    brokersConfigTab
+        .searchConfig(anyConfigKeySecondPage);
+    Assert.assertTrue(brokersConfigTab.getAllConfigs().stream()
+            .map(BrokersConfigTab.BrokersConfigItem::getKey)
+            .toList().contains(anyConfigKeySecondPage),
+        String.format("getAllConfigs().contains(%s)", anyConfigKeySecondPage));
+  }
+
+  @QaseId(331)
+  @Test
+  public void brokersSourceInfoCheck() {
+    navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID);
+    brokersDetails
+        .openDetailsTab(CONFIGS);
+    String sourceInfoTooltip = brokersConfigTab
+        .hoverOnSourceInfoIcon()
+        .getSourceInfoTooltipText();
+    Assert.assertEquals(sourceInfoTooltip, BROKER_SOURCE_INFO_TOOLTIP, "brokerSourceInfoTooltip");
+  }
+
+  @QaseId(332)
+  @Test
+  public void brokersConfigEditCheck() {
+    navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID);
+    brokersDetails
+        .openDetailsTab(CONFIGS);
+    String configKey = "log.cleaner.min.compaction.lag.ms";
+    BrokersConfigTab.BrokersConfigItem configItem = brokersConfigTab
+        .searchConfig(configKey)
+        .getConfig(configKey);
+    int defaultValue = Integer.parseInt(configItem.getValue());
+    configItem
+        .clickEditBtn();
+    SoftAssert softly = new SoftAssert();
+    softly.assertTrue(configItem.getSaveBtn().isDisplayed(), "getSaveBtn().isDisplayed()");
+    softly.assertTrue(configItem.getCancelBtn().isDisplayed(), "getCancelBtn().isDisplayed()");
+    softly.assertTrue(configItem.getValueFld().isEnabled(), "getValueFld().isEnabled()");
+    softly.assertAll();
+    int newValue = defaultValue + 1;
+    configItem
+        .setValue(String.valueOf(newValue))
+        .clickCancelBtn();
+    Assert.assertEquals(Integer.parseInt(configItem.getValue()), defaultValue, "getValue()");
+    configItem
+        .clickEditBtn()
+        .setValue(String.valueOf(newValue))
+        .clickSaveBtn()
+        .clickConfirm();
+    configItem = brokersConfigTab
+        .searchConfig(configKey)
+        .getConfig(configKey);
+    softly.assertFalse(configItem.getSaveBtn().isDisplayed(), "getSaveBtn().isDisplayed()");
+    softly.assertFalse(configItem.getCancelBtn().isDisplayed(), "getCancelBtn().isDisplayed()");
+    softly.assertTrue(configItem.getEditBtn().isDisplayed(), "getEditBtn().isDisplayed()");
+    softly.assertEquals(Integer.parseInt(configItem.getValue()), newValue, "getValue()");
+    softly.assertAll();
   }
   }
 }
 }

+ 3 - 1
kafka-ui-react-app/package.json

@@ -9,7 +9,7 @@
     "@hookform/resolvers": "^2.7.1",
     "@hookform/resolvers": "^2.7.1",
     "@microsoft/fetch-event-source": "^2.0.1",
     "@microsoft/fetch-event-source": "^2.0.1",
     "@reduxjs/toolkit": "^1.8.3",
     "@reduxjs/toolkit": "^1.8.3",
-    "@szhsin/react-menu": "^3.1.1",
+    "@szhsin/react-menu": "^3.5.3",
     "@tanstack/react-query": "^4.0.5",
     "@tanstack/react-query": "^4.0.5",
     "@tanstack/react-table": "^8.5.10",
     "@tanstack/react-table": "^8.5.10",
     "@testing-library/react": "^14.0.0",
     "@testing-library/react": "^14.0.0",
@@ -24,6 +24,7 @@
     "json-schema-faker": "^0.5.0-rcv.44",
     "json-schema-faker": "^0.5.0-rcv.44",
     "jsonpath-plus": "^7.2.0",
     "jsonpath-plus": "^7.2.0",
     "lodash": "^4.17.21",
     "lodash": "^4.17.21",
+    "lossless-json": "^2.0.8",
     "pretty-ms": "7.0.1",
     "pretty-ms": "7.0.1",
     "react": "^18.1.0",
     "react": "^18.1.0",
     "react-ace": "^10.1.0",
     "react-ace": "^10.1.0",
@@ -71,6 +72,7 @@
     "@testing-library/user-event": "^14.4.3",
     "@testing-library/user-event": "^14.4.3",
     "@types/eventsource": "^1.1.8",
     "@types/eventsource": "^1.1.8",
     "@types/lodash": "^4.14.172",
     "@types/lodash": "^4.14.172",
+    "@types/lossless-json": "^1.0.1",
     "@types/node": "^16.4.13",
     "@types/node": "^16.4.13",
     "@types/react": "^18.0.9",
     "@types/react": "^18.0.9",
     "@types/react-datepicker": "^4.8.0",
     "@types/react-datepicker": "^4.8.0",

+ 26 - 12
kafka-ui-react-app/pnpm-lock.yaml

@@ -10,7 +10,7 @@ specifiers:
   '@reduxjs/toolkit': ^1.8.3
   '@reduxjs/toolkit': ^1.8.3
   '@swc/core': ^1.3.36
   '@swc/core': ^1.3.36
   '@swc/jest': ^0.2.24
   '@swc/jest': ^0.2.24
-  '@szhsin/react-menu': ^3.1.1
+  '@szhsin/react-menu': ^3.5.3
   '@tanstack/react-query': ^4.0.5
   '@tanstack/react-query': ^4.0.5
   '@tanstack/react-table': ^8.5.10
   '@tanstack/react-table': ^8.5.10
   '@testing-library/dom': ^9.0.0
   '@testing-library/dom': ^9.0.0
@@ -19,6 +19,7 @@ specifiers:
   '@testing-library/user-event': ^14.4.3
   '@testing-library/user-event': ^14.4.3
   '@types/eventsource': ^1.1.8
   '@types/eventsource': ^1.1.8
   '@types/lodash': ^4.14.172
   '@types/lodash': ^4.14.172
+  '@types/lossless-json': ^1.0.1
   '@types/node': ^16.4.13
   '@types/node': ^16.4.13
   '@types/react': ^18.0.9
   '@types/react': ^18.0.9
   '@types/react-datepicker': ^4.8.0
   '@types/react-datepicker': ^4.8.0
@@ -55,6 +56,7 @@ specifiers:
   json-schema-faker: ^0.5.0-rcv.44
   json-schema-faker: ^0.5.0-rcv.44
   jsonpath-plus: ^7.2.0
   jsonpath-plus: ^7.2.0
   lodash: ^4.17.21
   lodash: ^4.17.21
+  lossless-json: ^2.0.8
   prettier: ^2.8.4
   prettier: ^2.8.4
   pretty-ms: 7.0.1
   pretty-ms: 7.0.1
   react: ^18.1.0
   react: ^18.1.0
@@ -89,14 +91,14 @@ dependencies:
   '@hookform/resolvers': 2.8.9_react-hook-form@7.43.1
   '@hookform/resolvers': 2.8.9_react-hook-form@7.43.1
   '@microsoft/fetch-event-source': 2.0.1
   '@microsoft/fetch-event-source': 2.0.1
   '@reduxjs/toolkit': 1.8.3_ctm756ikdwcjcvyfxxwskzbr6q
   '@reduxjs/toolkit': 1.8.3_ctm756ikdwcjcvyfxxwskzbr6q
-  '@szhsin/react-menu': 3.1.1_ef5jwxihqo6n7gxfmzogljlgcm
+  '@szhsin/react-menu': 3.5.3_ef5jwxihqo6n7gxfmzogljlgcm
   '@tanstack/react-query': 4.0.5_ef5jwxihqo6n7gxfmzogljlgcm
   '@tanstack/react-query': 4.0.5_ef5jwxihqo6n7gxfmzogljlgcm
   '@tanstack/react-table': 8.5.10_ef5jwxihqo6n7gxfmzogljlgcm
   '@tanstack/react-table': 8.5.10_ef5jwxihqo6n7gxfmzogljlgcm
   '@testing-library/react': 14.0.0_ef5jwxihqo6n7gxfmzogljlgcm
   '@testing-library/react': 14.0.0_ef5jwxihqo6n7gxfmzogljlgcm
   '@types/testing-library__jest-dom': 5.14.5
   '@types/testing-library__jest-dom': 5.14.5
   ace-builds: 1.7.1
   ace-builds: 1.7.1
   ajv: 8.8.2
   ajv: 8.8.2
-  ajv-formats: 2.1.1
+  ajv-formats: 2.1.1_ajv@8.8.2
   classnames: 2.3.1
   classnames: 2.3.1
   fetch-mock: 9.11.0
   fetch-mock: 9.11.0
   jest: 29.5.0_6m7kcbkkzjz4ln6z66tlzx44we
   jest: 29.5.0_6m7kcbkkzjz4ln6z66tlzx44we
@@ -104,6 +106,7 @@ dependencies:
   json-schema-faker: 0.5.0-rcv.44
   json-schema-faker: 0.5.0-rcv.44
   jsonpath-plus: 7.2.0
   jsonpath-plus: 7.2.0
   lodash: 4.17.21
   lodash: 4.17.21
+  lossless-json: 2.0.8
   pretty-ms: 7.0.1
   pretty-ms: 7.0.1
   react: 18.1.0
   react: 18.1.0
   react-ace: 10.1.0_ef5jwxihqo6n7gxfmzogljlgcm
   react-ace: 10.1.0_ef5jwxihqo6n7gxfmzogljlgcm
@@ -136,6 +139,7 @@ devDependencies:
   '@testing-library/user-event': 14.4.3_@testing-library+dom@9.0.0
   '@testing-library/user-event': 14.4.3_@testing-library+dom@9.0.0
   '@types/eventsource': 1.1.8
   '@types/eventsource': 1.1.8
   '@types/lodash': 4.14.177
   '@types/lodash': 4.14.177
+  '@types/lossless-json': 1.0.1
   '@types/node': 16.11.7
   '@types/node': 16.11.7
   '@types/react': 18.0.9
   '@types/react': 18.0.9
   '@types/react-datepicker': 4.10.0_react@18.1.0
   '@types/react-datepicker': 4.10.0_react@18.1.0
@@ -1532,8 +1536,8 @@ packages:
       jsonc-parser: 3.2.0
       jsonc-parser: 3.2.0
     dev: true
     dev: true
 
 
-  /@szhsin/react-menu/3.1.1_ef5jwxihqo6n7gxfmzogljlgcm:
-    resolution: {integrity: sha512-IdHLyH61M+KqjTrvqglKo7JnbC0GIkg4OCtlXBxQPEjx/ecR5g0Iycqm+SG3rObEoniLZEz32iJkefve/LAHMA==}
+  /@szhsin/react-menu/3.5.3_ef5jwxihqo6n7gxfmzogljlgcm:
+    resolution: {integrity: sha512-jxo8oaRwxmVjUzkyOi/ZJiXaZiuFPMIxFzyJdUKfnhBLYiEOVTU9M2CiPuEkirILoareR2GJj2K3y8a81CBPlw==}
     peerDependencies:
     peerDependencies:
       react: '>=16.14.0'
       react: '>=16.14.0'
       react-dom: '>=16.14.0'
       react-dom: '>=16.14.0'
@@ -1541,7 +1545,7 @@ packages:
       prop-types: 15.8.1
       prop-types: 15.8.1
       react: 18.1.0
       react: 18.1.0
       react-dom: 18.1.0_react@18.1.0
       react-dom: 18.1.0_react@18.1.0
-      react-transition-state: 1.1.4_ef5jwxihqo6n7gxfmzogljlgcm
+      react-transition-state: 1.1.5_ef5jwxihqo6n7gxfmzogljlgcm
     dev: false
     dev: false
 
 
   /@tanstack/query-core/4.0.5:
   /@tanstack/query-core/4.0.5:
@@ -1770,6 +1774,10 @@ packages:
     resolution: {integrity: sha512-0fDwydE2clKe9MNfvXHBHF9WEahRuj+msTuQqOmAApNORFvhMYZKNGGJdCzuhheVjMps/ti0Ak/iJPACMaevvw==}
     resolution: {integrity: sha512-0fDwydE2clKe9MNfvXHBHF9WEahRuj+msTuQqOmAApNORFvhMYZKNGGJdCzuhheVjMps/ti0Ak/iJPACMaevvw==}
     dev: true
     dev: true
 
 
+  /@types/lossless-json/1.0.1:
+    resolution: {integrity: sha512-zPE8kmpeL5/6L5gtTQHSOkAW/OSYYNTDRt6/2oEgLO1Zd3Rj5WVDoMloTtLJxQJhZGLGbL4pktKSh3NbzdaWdw==}
+    dev: true
+
   /@types/node/16.11.7:
   /@types/node/16.11.7:
     resolution: {integrity: sha512-QB5D2sqfSjCmTuWcBWyJ+/44bcjO7VbjSbOE0ucoVbAsSNQc4Lt6QkgkVXkTDwkL4z/beecZNDvVX15D4P8Jbw==}
     resolution: {integrity: sha512-QB5D2sqfSjCmTuWcBWyJ+/44bcjO7VbjSbOE0ucoVbAsSNQc4Lt6QkgkVXkTDwkL4z/beecZNDvVX15D4P8Jbw==}
 
 
@@ -2050,8 +2058,10 @@ packages:
       - supports-color
       - supports-color
     dev: true
     dev: true
 
 
-  /ajv-formats/2.1.1:
+  /ajv-formats/2.1.1_ajv@8.8.2:
     resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
     resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
+    peerDependencies:
+      ajv: ^8.0.0
     peerDependenciesMeta:
     peerDependenciesMeta:
       ajv:
       ajv:
         optional: true
         optional: true
@@ -2734,8 +2744,8 @@ packages:
       ms: 2.1.2
       ms: 2.1.2
       supports-color: 5.5.0
       supports-color: 5.5.0
 
 
-  /decimal.js/10.3.1:
-    resolution: {integrity: sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ==}
+  /decimal.js/10.4.3:
+    resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==}
     dev: true
     dev: true
 
 
   /dedent/0.7.0:
   /dedent/0.7.0:
@@ -4649,7 +4659,7 @@ packages:
       cssom: 0.5.0
       cssom: 0.5.0
       cssstyle: 2.3.0
       cssstyle: 2.3.0
       data-urls: 3.0.2
       data-urls: 3.0.2
-      decimal.js: 10.3.1
+      decimal.js: 10.4.3
       domexception: 4.0.0
       domexception: 4.0.0
       escodegen: 2.0.0
       escodegen: 2.0.0
       form-data: 4.0.0
       form-data: 4.0.0
@@ -4841,6 +4851,10 @@ packages:
     dependencies:
     dependencies:
       js-tokens: 4.0.0
       js-tokens: 4.0.0
 
 
+  /lossless-json/2.0.8:
+    resolution: {integrity: sha512-7/GaZldUc7H5oNZlSk6bF06cRbtA7oF8zWXwbfMZm8yrYC2debx0KvWTBbQIbj6fh08LsXTWg+YtHJshXgYKow==}
+    dev: false
+
   /lru-cache/6.0.0:
   /lru-cache/6.0.0:
     resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
     resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
     engines: {node: '>=10'}
     engines: {node: '>=10'}
@@ -5562,8 +5576,8 @@ packages:
       react: 18.1.0
       react: 18.1.0
     dev: false
     dev: false
 
 
-  /react-transition-state/1.1.4_ef5jwxihqo6n7gxfmzogljlgcm:
-    resolution: {integrity: sha512-6nQLWWx95gYazCm6OdtD1zGbRiirvVXPrDtHAGsYb4xs9spMM7bA8Vx77KCpjL8PJ8qz1lXFGz2PTboCSvt7iw==}
+  /react-transition-state/1.1.5_ef5jwxihqo6n7gxfmzogljlgcm:
+    resolution: {integrity: sha512-ITY2mZqc2dWG2eitJkYNdcSFW8aKeOlkL2A/vowRrLL8GH3J6Re/SpD/BLvQzrVOTqjsP0b5S9N10vgNNzwMUQ==}
     peerDependencies:
     peerDependencies:
       react: '>=16.8.0'
       react: '>=16.8.0'
       react-dom: '>=16.8.0'
       react-dom: '>=16.8.0'

+ 5 - 3
kafka-ui-react-app/src/components/App.tsx

@@ -1,4 +1,4 @@
-import React, { Suspense } from 'react';
+import React, { Suspense, useContext } from 'react';
 import { Routes, Route, Navigate } from 'react-router-dom';
 import { Routes, Route, Navigate } from 'react-router-dom';
 import {
 import {
   accessErrorPage,
   accessErrorPage,
@@ -18,6 +18,7 @@ import { Toaster } from 'react-hot-toast';
 import GlobalCSS from 'components/globalCss';
 import GlobalCSS from 'components/globalCss';
 import * as S from 'components/App.styled';
 import * as S from 'components/App.styled';
 import ClusterConfigForm from 'widgets/ClusterConfigForm';
 import ClusterConfigForm from 'widgets/ClusterConfigForm';
+import { ThemeModeContext } from 'components/contexts/ThemeModeContext';
 
 
 import ConfirmationModal from './common/ConfirmationModal/ConfirmationModal';
 import ConfirmationModal from './common/ConfirmationModal/ConfirmationModal';
 import { ConfirmContextProvider } from './contexts/ConfirmContext';
 import { ConfirmContextProvider } from './contexts/ConfirmContext';
@@ -30,6 +31,7 @@ const queryClient = new QueryClient({
   defaultOptions: {
   defaultOptions: {
     queries: {
     queries: {
       suspense: true,
       suspense: true,
+      networkMode: 'offlineFirst',
       onError(error) {
       onError(error) {
         showServerError(error as Response);
         showServerError(error as Response);
       },
       },
@@ -42,7 +44,7 @@ const queryClient = new QueryClient({
   },
   },
 });
 });
 const App: React.FC = () => {
 const App: React.FC = () => {
-  const [isDarkMode, setDarkMode] = React.useState<boolean>(false);
+  const { isDarkMode } = useContext(ThemeModeContext);
 
 
   return (
   return (
     <QueryClientProvider client={queryClient}>
     <QueryClientProvider client={queryClient}>
@@ -53,7 +55,7 @@ const App: React.FC = () => {
               <ConfirmContextProvider>
               <ConfirmContextProvider>
                 <GlobalCSS />
                 <GlobalCSS />
                 <S.Layout>
                 <S.Layout>
-                  <PageContainer setDarkMode={setDarkMode}>
+                  <PageContainer>
                     <Routes>
                     <Routes>
                       {['/', '/ui', '/ui/clusters'].map((path) => (
                       {['/', '/ui', '/ui/clusters'].map((path) => (
                         <Route
                         <Route

+ 3 - 3
kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx

@@ -102,7 +102,7 @@ const Actions: React.FC = () => {
           disabled={isMutating}
           disabled={isMutating}
           permission={{
           permission={{
             resource: ResourceType.CONNECT,
             resource: ResourceType.CONNECT,
-            action: Action.EDIT,
+            action: Action.RESTART,
             value: routerProps.connectorName,
             value: routerProps.connectorName,
           }}
           }}
         >
         >
@@ -113,7 +113,7 @@ const Actions: React.FC = () => {
           disabled={isMutating}
           disabled={isMutating}
           permission={{
           permission={{
             resource: ResourceType.CONNECT,
             resource: ResourceType.CONNECT,
-            action: Action.EDIT,
+            action: Action.RESTART,
             value: routerProps.connectorName,
             value: routerProps.connectorName,
           }}
           }}
         >
         >
@@ -124,7 +124,7 @@ const Actions: React.FC = () => {
           disabled={isMutating}
           disabled={isMutating}
           permission={{
           permission={{
             resource: ResourceType.CONNECT,
             resource: ResourceType.CONNECT,
-            action: Action.EDIT,
+            action: Action.RESTART,
             value: routerProps.connectorName,
             value: routerProps.connectorName,
           }}
           }}
         >
         >

+ 10 - 4
kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx

@@ -1,9 +1,10 @@
 import React from 'react';
 import React from 'react';
-import { Task } from 'generated-sources';
+import { Action, ResourceType, Task } from 'generated-sources';
 import { CellContext } from '@tanstack/react-table';
 import { CellContext } from '@tanstack/react-table';
 import useAppParams from 'lib/hooks/useAppParams';
 import useAppParams from 'lib/hooks/useAppParams';
 import { useRestartConnectorTask } from 'lib/hooks/api/kafkaConnect';
 import { useRestartConnectorTask } from 'lib/hooks/api/kafkaConnect';
-import { Dropdown, DropdownItem } from 'components/common/Dropdown';
+import { Dropdown } from 'components/common/Dropdown';
+import { ActionDropdownItem } from 'components/common/ActionComponent';
 import { RouterParamsClusterConnectConnector } from 'lib/paths';
 import { RouterParamsClusterConnectConnector } from 'lib/paths';
 
 
 const ActionsCellTasks: React.FC<CellContext<Task, unknown>> = ({ row }) => {
 const ActionsCellTasks: React.FC<CellContext<Task, unknown>> = ({ row }) => {
@@ -18,13 +19,18 @@ const ActionsCellTasks: React.FC<CellContext<Task, unknown>> = ({ row }) => {
 
 
   return (
   return (
     <Dropdown>
     <Dropdown>
-      <DropdownItem
+      <ActionDropdownItem
         onClick={() => restartTaskHandler(id?.task)}
         onClick={() => restartTaskHandler(id?.task)}
         danger
         danger
         confirm="Are you sure you want to restart the task?"
         confirm="Are you sure you want to restart the task?"
+        permission={{
+          resource: ResourceType.CONNECT,
+          action: Action.RESTART,
+          value: routerProps.connectorName,
+        }}
       >
       >
         <span>Restart task</span>
         <span>Restart task</span>
-      </DropdownItem>
+      </ActionDropdownItem>
     </Dropdown>
     </Dropdown>
   );
   );
 };
 };

+ 3 - 3
kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx

@@ -78,7 +78,7 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
         disabled={isMutating}
         disabled={isMutating}
         permission={{
         permission={{
           resource: ResourceType.CONNECT,
           resource: ResourceType.CONNECT,
-          action: Action.EDIT,
+          action: Action.RESTART,
           value: name,
           value: name,
         }}
         }}
       >
       >
@@ -89,7 +89,7 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
         disabled={isMutating}
         disabled={isMutating}
         permission={{
         permission={{
           resource: ResourceType.CONNECT,
           resource: ResourceType.CONNECT,
-          action: Action.EDIT,
+          action: Action.RESTART,
           value: name,
           value: name,
         }}
         }}
       >
       >
@@ -100,7 +100,7 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
         disabled={isMutating}
         disabled={isMutating}
         permission={{
         permission={{
           resource: ResourceType.CONNECT,
           resource: ResourceType.CONNECT,
-          action: Action.EDIT,
+          action: Action.RESTART,
           value: name,
           value: name,
         }}
         }}
       >
       >

+ 4 - 8
kafka-ui-react-app/src/components/Connect/New/New.tsx

@@ -38,7 +38,7 @@ const New: React.FC = () => {
   const { clusterName } = useAppParams<ClusterNameRoute>();
   const { clusterName } = useAppParams<ClusterNameRoute>();
   const navigate = useNavigate();
   const navigate = useNavigate();
 
 
-  const { data: connects } = useConnects(clusterName);
+  const { data: connects = [] } = useConnects(clusterName);
   const mutation = useCreateConnector(clusterName);
   const mutation = useCreateConnector(clusterName);
 
 
   const methods = useForm<FormValues>({
   const methods = useForm<FormValues>({
@@ -88,10 +88,6 @@ const New: React.FC = () => {
     }
     }
   };
   };
 
 
-  if (!connects || connects.length === 0) {
-    return null;
-  }
-
   const connectOptions = connects.map(({ name: connectName }) => ({
   const connectOptions = connects.map(({ name: connectName }) => ({
     value: connectName,
     value: connectName,
     label: connectName,
     label: connectName,
@@ -108,10 +104,10 @@ const New: React.FC = () => {
         onSubmit={handleSubmit(onSubmit)}
         onSubmit={handleSubmit(onSubmit)}
         aria-label="Create connect form"
         aria-label="Create connect form"
       >
       >
-        <S.Filed $hidden={connects.length <= 1}>
+        <S.Filed $hidden={connects?.length <= 1}>
           <Heading level={3}>Connect *</Heading>
           <Heading level={3}>Connect *</Heading>
           <Controller
           <Controller
-            defaultValue={connectOptions[0].value}
+            defaultValue={connectOptions[0]?.value}
             control={control}
             control={control}
             name="connectName"
             name="connectName"
             render={({ field: { name, onChange } }) => (
             render={({ field: { name, onChange } }) => (
@@ -120,7 +116,7 @@ const New: React.FC = () => {
                 name={name}
                 name={name}
                 disabled={isSubmitting}
                 disabled={isSubmitting}
                 onChange={onChange}
                 onChange={onChange}
-                value={connectOptions[0].value}
+                value={connectOptions[0]?.value}
                 minWidth="100%"
                 minWidth="100%"
                 options={connectOptions}
                 options={connectOptions}
               />
               />

+ 5 - 2
kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx

@@ -54,6 +54,8 @@ const Details: React.FC = () => {
     ? filteredPartitionsByTopic
     ? filteredPartitionsByTopic
     : Object.keys(partitionsByTopic);
     : Object.keys(partitionsByTopic);
 
 
+  const hasAssignedTopics = consumerGroup?.data?.topics !== 0;
+
   return (
   return (
     <div>
     <div>
       <div>
       <div>
@@ -71,6 +73,7 @@ const Details: React.FC = () => {
                   action: Action.RESET_OFFSETS,
                   action: Action.RESET_OFFSETS,
                   value: consumerGroupID,
                   value: consumerGroupID,
                 }}
                 }}
+                disabled={!hasAssignedTopics}
               >
               >
                 Reset offset
                 Reset offset
               </ActionDropdownItem>
               </ActionDropdownItem>
@@ -110,7 +113,7 @@ const Details: React.FC = () => {
             {consumerGroup.data?.coordinator?.id}
             {consumerGroup.data?.coordinator?.id}
           </Metrics.Indicator>
           </Metrics.Indicator>
           <Metrics.Indicator label="Total lag">
           <Metrics.Indicator label="Total lag">
-            {consumerGroup.data?.messagesBehind}
+            {consumerGroup.data?.consumerLag}
           </Metrics.Indicator>
           </Metrics.Indicator>
         </Metrics.Section>
         </Metrics.Section>
       </Metrics.Wrapper>
       </Metrics.Wrapper>
@@ -121,7 +124,7 @@ const Details: React.FC = () => {
         <thead>
         <thead>
           <tr>
           <tr>
             <TableHeaderCell title="Topic" />
             <TableHeaderCell title="Topic" />
-            <TableHeaderCell title="Messages behind" />
+            <TableHeaderCell title="Consumer Lag" />
           </tr>
           </tr>
         </thead>
         </thead>
         <tbody>
         <tbody>

+ 3 - 3
kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx

@@ -19,10 +19,10 @@ interface Props {
 const ListItem: React.FC<Props> = ({ clusterName, name, consumers }) => {
 const ListItem: React.FC<Props> = ({ clusterName, name, consumers }) => {
   const [isOpen, setIsOpen] = React.useState(false);
   const [isOpen, setIsOpen] = React.useState(false);
 
 
-  const getTotalMessagesBehind = () => {
+  const getTotalconsumerLag = () => {
     let count = 0;
     let count = 0;
     consumers.forEach((consumer) => {
     consumers.forEach((consumer) => {
-      count += consumer?.messagesBehind || 0;
+      count += consumer?.consumerLag || 0;
     });
     });
     return count;
     return count;
   };
   };
@@ -40,7 +40,7 @@ const ListItem: React.FC<Props> = ({ clusterName, name, consumers }) => {
             </TableKeyLink>
             </TableKeyLink>
           </FlexWrapper>
           </FlexWrapper>
         </td>
         </td>
-        <td>{getTotalMessagesBehind()}</td>
+        <td>{getTotalconsumerLag()}</td>
       </tr>
       </tr>
       {isOpen && <TopicContents consumers={consumers} />}
       {isOpen && <TopicContents consumers={consumers} />}
     </>
     </>

+ 1 - 1
kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx

@@ -22,7 +22,7 @@ const ResetOffsets: React.FC = () => {
     return <PageLoader />;
     return <PageLoader />;
 
 
   const partitions = consumerGroup.data.partitions || [];
   const partitions = consumerGroup.data.partitions || [];
-  const { topic } = partitions[0];
+  const { topic } = partitions[0] || '';
 
 
   const uniqTopics = Array.from(
   const uniqTopics = Array.from(
     new Set(partitions.map((partition) => partition.topic))
     new Set(partitions.map((partition) => partition.topic))

+ 3 - 3
kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx

@@ -19,7 +19,7 @@ const TABLE_HEADERS_MAP: Headers[] = [
   { title: 'Partition', orderBy: 'partition' },
   { title: 'Partition', orderBy: 'partition' },
   { title: 'Consumer ID', orderBy: 'consumerId' },
   { title: 'Consumer ID', orderBy: 'consumerId' },
   { title: 'Host', orderBy: 'host' },
   { title: 'Host', orderBy: 'host' },
-  { title: 'Messages Behind', orderBy: 'messagesBehind' },
+  { title: 'Consumer Lag', orderBy: 'consumerLag' },
   { title: 'Current Offset', orderBy: 'currentOffset' },
   { title: 'Current Offset', orderBy: 'currentOffset' },
   { title: 'End offset', orderBy: 'endOffset' },
   { title: 'End offset', orderBy: 'endOffset' },
 ];
 ];
@@ -108,7 +108,7 @@ const TopicContents: React.FC<Props> = ({ consumers }) => {
         orderBy === 'partition' ||
         orderBy === 'partition' ||
         orderBy === 'currentOffset' ||
         orderBy === 'currentOffset' ||
         orderBy === 'endOffset' ||
         orderBy === 'endOffset' ||
-        orderBy === 'messagesBehind';
+        orderBy === 'consumerLag';
 
 
       let comparator: ComparatorFunction<ConsumerGroupTopicPartition>;
       let comparator: ComparatorFunction<ConsumerGroupTopicPartition>;
       if (isNumberProperty) {
       if (isNumberProperty) {
@@ -153,7 +153,7 @@ const TopicContents: React.FC<Props> = ({ consumers }) => {
                   <td>{consumer.partition}</td>
                   <td>{consumer.partition}</td>
                   <td>{consumer.consumerId}</td>
                   <td>{consumer.consumerId}</td>
                   <td>{consumer.host}</td>
                   <td>{consumer.host}</td>
-                  <td>{consumer.messagesBehind}</td>
+                  <td>{consumer.consumerLag}</td>
                   <td>{consumer.currentOffset}</td>
                   <td>{consumer.currentOffset}</td>
                   <td>{consumer.endOffset}</td>
                   <td>{consumer.endOffset}</td>
                 </tr>
                 </tr>

+ 2 - 2
kafka-ui-react-app/src/components/ConsumerGroups/List.tsx

@@ -57,8 +57,8 @@ const List = () => {
       },
       },
       {
       {
         id: ConsumerGroupOrdering.MESSAGES_BEHIND,
         id: ConsumerGroupOrdering.MESSAGES_BEHIND,
-        header: 'Messages Behind',
-        accessorKey: 'messagesBehind',
+        header: 'Consumer Lag',
+        accessorKey: 'consumerLag',
       },
       },
       {
       {
         header: 'Coordinator',
         header: 'Coordinator',

+ 6 - 42
kafka-ui-react-app/src/components/NavBar/NavBar.tsx

@@ -1,4 +1,4 @@
-import React from 'react';
+import React, { useContext } from 'react';
 import Select from 'components/common/Select/Select';
 import Select from 'components/common/Select/Select';
 import Logo from 'components/common/Logo/Logo';
 import Logo from 'components/common/Logo/Logo';
 import Version from 'components/Version/Version';
 import Version from 'components/Version/Version';
@@ -7,16 +7,16 @@ import DiscordIcon from 'components/common/Icons/DiscordIcon';
 import AutoIcon from 'components/common/Icons/AutoIcon';
 import AutoIcon from 'components/common/Icons/AutoIcon';
 import SunIcon from 'components/common/Icons/SunIcon';
 import SunIcon from 'components/common/Icons/SunIcon';
 import MoonIcon from 'components/common/Icons/MoonIcon';
 import MoonIcon from 'components/common/Icons/MoonIcon';
+import { ThemeModeContext } from 'components/contexts/ThemeModeContext';
 
 
 import UserInfo from './UserInfo/UserInfo';
 import UserInfo from './UserInfo/UserInfo';
 import * as S from './NavBar.styled';
 import * as S from './NavBar.styled';
 
 
 interface Props {
 interface Props {
   onBurgerClick: () => void;
   onBurgerClick: () => void;
-  setDarkMode: (value: boolean) => void;
 }
 }
 
 
-type ThemeDropDownValue = 'auto_theme' | 'light_theme' | 'dark_theme';
+export type ThemeDropDownValue = 'auto_theme' | 'light_theme' | 'dark_theme';
 
 
 const options = [
 const options = [
   {
   {
@@ -48,44 +48,8 @@ const options = [
   },
   },
 ];
 ];
 
 
-const NavBar: React.FC<Props> = ({ onBurgerClick, setDarkMode }) => {
-  const matchDark = window.matchMedia('(prefers-color-scheme: dark)');
-  const [themeMode, setThemeMode] = React.useState<ThemeDropDownValue>();
-
-  React.useLayoutEffect(() => {
-    const mode = localStorage.getItem('mode');
-    if (mode) {
-      setThemeMode(mode as ThemeDropDownValue);
-      if (mode === 'auto_theme') {
-        setDarkMode(matchDark.matches);
-      } else if (mode === 'light_theme') {
-        setDarkMode(false);
-      } else if (mode === 'dark_theme') {
-        setDarkMode(true);
-      }
-    } else {
-      setThemeMode('auto_theme');
-    }
-  }, []);
-
-  React.useEffect(() => {
-    if (themeMode === 'auto_theme') {
-      setDarkMode(matchDark.matches);
-      matchDark.addListener((e) => {
-        setDarkMode(e.matches);
-      });
-    }
-  }, [matchDark, themeMode]);
-
-  const onChangeThemeMode = (value: string | number) => {
-    setThemeMode(value as ThemeDropDownValue);
-    localStorage.setItem('mode', value as string);
-    if (value === 'light_theme') {
-      setDarkMode(false);
-    } else if (value === 'dark_theme') {
-      setDarkMode(true);
-    }
-  };
+const NavBar: React.FC<Props> = ({ onBurgerClick }) => {
+  const { themeMode, setThemeMode } = useContext(ThemeModeContext);
 
 
   return (
   return (
     <S.Navbar role="navigation" aria-label="Page Header">
     <S.Navbar role="navigation" aria-label="Page Header">
@@ -117,7 +81,7 @@ const NavBar: React.FC<Props> = ({ onBurgerClick, setDarkMode }) => {
         <Select
         <Select
           options={options}
           options={options}
           value={themeMode}
           value={themeMode}
-          onChange={onChangeThemeMode}
+          onChange={setThemeMode}
           isThemeMode
           isThemeMode
         />
         />
         <S.SocialLink
         <S.SocialLink

+ 27 - 6
kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx

@@ -1,27 +1,48 @@
-import React, { PropsWithChildren } from 'react';
-import { useLocation } from 'react-router-dom';
+import React, { PropsWithChildren, useEffect, useMemo } from 'react';
+import { useLocation, useNavigate } from 'react-router-dom';
 import NavBar from 'components/NavBar/NavBar';
 import NavBar from 'components/NavBar/NavBar';
 import * as S from 'components/PageContainer/PageContainer.styled';
 import * as S from 'components/PageContainer/PageContainer.styled';
 import Nav from 'components/Nav/Nav';
 import Nav from 'components/Nav/Nav';
 import useBoolean from 'lib/hooks/useBoolean';
 import useBoolean from 'lib/hooks/useBoolean';
+import { clusterNewConfigPath } from 'lib/paths';
+import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
+import { useClusters } from 'lib/hooks/api/clusters';
+import { ResourceType } from 'generated-sources';
+import { useGetUserInfo } from 'lib/hooks/api/roles';
 
 
-const PageContainer: React.FC<
-  PropsWithChildren<{ setDarkMode: (value: boolean) => void }>
-> = ({ children, setDarkMode }) => {
+const PageContainer: React.FC<PropsWithChildren<unknown>> = ({ children }) => {
   const {
   const {
     value: isSidebarVisible,
     value: isSidebarVisible,
     toggle,
     toggle,
     setFalse: closeSidebar,
     setFalse: closeSidebar,
   } = useBoolean(false);
   } = useBoolean(false);
+  const clusters = useClusters();
+  const appInfo = React.useContext(GlobalSettingsContext);
   const location = useLocation();
   const location = useLocation();
+  const navigate = useNavigate();
+  const { data: authInfo } = useGetUserInfo();
 
 
   React.useEffect(() => {
   React.useEffect(() => {
     closeSidebar();
     closeSidebar();
   }, [location, closeSidebar]);
   }, [location, closeSidebar]);
 
 
+  const hasApplicationPermissions = useMemo(() => {
+    if (!authInfo?.rbacEnabled) return true;
+    return !!authInfo?.userInfo?.permissions.some(
+      (permission) => permission.resource === ResourceType.APPLICATIONCONFIG
+    );
+  }, [authInfo]);
+
+  useEffect(() => {
+    if (!appInfo.hasDynamicConfig) return;
+    if (clusters?.data?.length !== 0) return;
+    if (!hasApplicationPermissions) return;
+    navigate(clusterNewConfigPath);
+  }, [clusters?.data, appInfo.hasDynamicConfig]);
+
   return (
   return (
     <>
     <>
-      <NavBar onBurgerClick={toggle} setDarkMode={setDarkMode} />
+      <NavBar onBurgerClick={toggle} />
       <S.Container>
       <S.Container>
         <S.Sidebar aria-label="Sidebar" $visible={isSidebarVisible}>
         <S.Sidebar aria-label="Sidebar" $visible={isSidebarVisible}>
           <Nav />
           <Nav />

+ 44 - 13
kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx

@@ -4,21 +4,24 @@ import userEvent from '@testing-library/user-event';
 import { render } from 'lib/testHelpers';
 import { render } from 'lib/testHelpers';
 import PageContainer from 'components/PageContainer/PageContainer';
 import PageContainer from 'components/PageContainer/PageContainer';
 import { useClusters } from 'lib/hooks/api/clusters';
 import { useClusters } from 'lib/hooks/api/clusters';
+import { Cluster, ServerStatus } from 'generated-sources';
 
 
 const burgerButtonOptions = { name: 'burger' };
 const burgerButtonOptions = { name: 'burger' };
 
 
-jest.mock('lib/hooks/api/clusters', () => ({
-  ...jest.requireActual('lib/hooks/api/roles'),
-  useClusters: jest.fn(),
-}));
-
 jest.mock('components/Version/Version', () => () => <div>Version</div>);
 jest.mock('components/Version/Version', () => () => <div>Version</div>);
-
+interface DataType {
+  data: Cluster[] | undefined;
+}
+jest.mock('lib/hooks/api/clusters');
+const mockedNavigate = jest.fn();
+jest.mock('react-router-dom', () => ({
+  ...jest.requireActual('react-router-dom'),
+  useNavigate: () => mockedNavigate,
+}));
 describe('Page Container', () => {
 describe('Page Container', () => {
-  beforeEach(() => {
-    (useClusters as jest.Mock).mockImplementation(() => ({
-      isSuccess: false,
-    }));
+  const renderComponent = (hasDynamicConfig: boolean, data: DataType) => {
+    const useClustersMock = useClusters as jest.Mock;
+    useClustersMock.mockReturnValue(data);
     Object.defineProperty(window, 'matchMedia', {
     Object.defineProperty(window, 'matchMedia', {
       writable: true,
       writable: true,
       value: jest.fn().mockImplementation(() => ({
       value: jest.fn().mockImplementation(() => ({
@@ -26,15 +29,18 @@ describe('Page Container', () => {
         addListener: jest.fn(),
         addListener: jest.fn(),
       })),
       })),
     });
     });
-
     render(
     render(
       <PageContainer setDarkMode={jest.fn()}>
       <PageContainer setDarkMode={jest.fn()}>
         <div>child</div>
         <div>child</div>
-      </PageContainer>
+      </PageContainer>,
+      {
+        globalSettings: { hasDynamicConfig },
+      }
     );
     );
-  });
+  };
 
 
   it('handle burger click correctly', async () => {
   it('handle burger click correctly', async () => {
+    renderComponent(false, { data: undefined });
     const burger = within(screen.getByLabelText('Page Header')).getByRole(
     const burger = within(screen.getByLabelText('Page Header')).getByRole(
       'button',
       'button',
       burgerButtonOptions
       burgerButtonOptions
@@ -49,6 +55,31 @@ describe('Page Container', () => {
   });
   });
 
 
   it('render the inner container', async () => {
   it('render the inner container', async () => {
+    renderComponent(false, { data: undefined });
     expect(screen.getByText('child')).toBeInTheDocument();
     expect(screen.getByText('child')).toBeInTheDocument();
   });
   });
+
+  describe('Redirect to the Wizard page', () => {
+    it('redirects to new cluster configuration page if there are no clusters and dynamic config is enabled', async () => {
+      await renderComponent(true, { data: [] });
+
+      expect(mockedNavigate).toHaveBeenCalled();
+    });
+
+    it('should not navigate to new cluster config page when there are clusters', async () => {
+      await renderComponent(true, {
+        data: [{ name: 'Cluster 1', status: ServerStatus.ONLINE }],
+      });
+
+      expect(mockedNavigate).not.toHaveBeenCalled();
+    });
+
+    it('should not navigate to new cluster config page when there are no clusters and hasDynamicConfig is false', async () => {
+      await renderComponent(false, {
+        data: [],
+      });
+
+      expect(mockedNavigate).not.toHaveBeenCalled();
+    });
+  });
 });
 });

+ 2 - 2
kafka-ui-react-app/src/components/Topics/Topic/ConsumerGroups/TopicConsumerGroups.tsx

@@ -48,8 +48,8 @@ const TopicConsumerGroups: React.FC = () => {
         enableSorting: false,
         enableSorting: false,
       },
       },
       {
       {
-        header: 'Messages Behind',
-        accessorKey: 'messagesBehind',
+        header: 'Consumer Lag',
+        accessorKey: 'consumerLag',
         enableSorting: false,
         enableSorting: false,
       },
       },
       {
       {

+ 2 - 2
kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx

@@ -1,6 +1,7 @@
 import React from 'react';
 import React from 'react';
 import Editor from 'components/common/Editor/Editor';
 import Editor from 'components/common/Editor/Editor';
 import { SchemaType } from 'generated-sources';
 import { SchemaType } from 'generated-sources';
+import { parse, stringify } from 'lossless-json';
 
 
 import * as S from './EditorViewer.styled';
 import * as S from './EditorViewer.styled';
 
 
@@ -9,10 +10,9 @@ export interface EditorViewerProps {
   schemaType?: string;
   schemaType?: string;
   maxLines?: number;
   maxLines?: number;
 }
 }
-
 const getSchemaValue = (data: string, schemaType?: string) => {
 const getSchemaValue = (data: string, schemaType?: string) => {
   if (schemaType === SchemaType.JSON || schemaType === SchemaType.AVRO) {
   if (schemaType === SchemaType.JSON || schemaType === SchemaType.AVRO) {
-    return JSON.stringify(JSON.parse(data), null, '\t');
+    return stringify(parse(data), undefined, '\t');
   }
   }
   return data;
   return data;
 };
 };

+ 6 - 2
kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx

@@ -3,7 +3,9 @@ import AceEditor, { IAceEditorProps } from 'react-ace';
 import 'ace-builds/src-noconflict/ace';
 import 'ace-builds/src-noconflict/ace';
 import 'ace-builds/src-noconflict/mode-sql';
 import 'ace-builds/src-noconflict/mode-sql';
 import 'ace-builds/src-noconflict/theme-textmate';
 import 'ace-builds/src-noconflict/theme-textmate';
-import React from 'react';
+import 'ace-builds/src-noconflict/theme-dracula';
+import React, { useContext } from 'react';
+import { ThemeModeContext } from 'components/contexts/ThemeModeContext';
 
 
 interface SQLEditorProps extends IAceEditorProps {
 interface SQLEditorProps extends IAceEditorProps {
   isFixedHeight?: boolean;
   isFixedHeight?: boolean;
@@ -12,11 +14,13 @@ interface SQLEditorProps extends IAceEditorProps {
 const SQLEditor = React.forwardRef<AceEditor | null, SQLEditorProps>(
 const SQLEditor = React.forwardRef<AceEditor | null, SQLEditorProps>(
   (props, ref) => {
   (props, ref) => {
     const { isFixedHeight, ...rest } = props;
     const { isFixedHeight, ...rest } = props;
+    const { isDarkMode } = useContext(ThemeModeContext);
+
     return (
     return (
       <AceEditor
       <AceEditor
         ref={ref}
         ref={ref}
         mode="sql"
         mode="sql"
-        theme="textmate"
+        theme={isDarkMode ? 'dracula' : 'textmate'}
         tabSize={2}
         tabSize={2}
         width="100%"
         width="100%"
         height={
         height={

+ 58 - 0
kafka-ui-react-app/src/components/contexts/ThemeModeContext.tsx

@@ -0,0 +1,58 @@
+import React, { useMemo } from 'react';
+import type { FC, PropsWithChildren } from 'react';
+import type { ThemeDropDownValue } from 'components/NavBar/NavBar';
+
+interface ThemeModeContextProps {
+  isDarkMode: boolean;
+  themeMode: ThemeDropDownValue;
+  setThemeMode: (value: string | number) => void;
+}
+
+export const ThemeModeContext = React.createContext<ThemeModeContextProps>({
+  isDarkMode: false,
+  themeMode: 'auto_theme',
+  setThemeMode: () => {},
+});
+
+export const ThemeModeProvider: FC<PropsWithChildren<unknown>> = ({
+  children,
+}) => {
+  const matchDark = window.matchMedia('(prefers-color-scheme: dark)');
+  const [themeMode, setThemeModeState] =
+    React.useState<ThemeDropDownValue>('auto_theme');
+
+  React.useLayoutEffect(() => {
+    const mode = localStorage.getItem('mode');
+    setThemeModeState((mode as ThemeDropDownValue) ?? 'auto_theme');
+  }, [setThemeModeState]);
+
+  const isDarkMode = React.useMemo(() => {
+    if (themeMode === 'auto_theme') {
+      return matchDark.matches;
+    }
+    return themeMode === 'dark_theme';
+  }, [themeMode]);
+
+  const setThemeMode = React.useCallback(
+    (value: string | number) => {
+      setThemeModeState(value as ThemeDropDownValue);
+      localStorage.setItem('mode', value as string);
+    },
+    [setThemeModeState]
+  );
+
+  const contextValue = useMemo(
+    () => ({
+      isDarkMode,
+      themeMode,
+      setThemeMode,
+    }),
+    [isDarkMode, themeMode, setThemeMode]
+  );
+
+  return (
+    <ThemeModeContext.Provider value={contextValue}>
+      {children}
+    </ThemeModeContext.Provider>
+  );
+};

+ 4 - 1
kafka-ui-react-app/src/index.tsx

@@ -2,6 +2,7 @@ import React from 'react';
 import { createRoot } from 'react-dom/client';
 import { createRoot } from 'react-dom/client';
 import { BrowserRouter } from 'react-router-dom';
 import { BrowserRouter } from 'react-router-dom';
 import { Provider } from 'react-redux';
 import { Provider } from 'react-redux';
+import { ThemeModeProvider } from 'components/contexts/ThemeModeContext';
 import App from 'components/App';
 import App from 'components/App';
 import { store } from 'redux/store';
 import { store } from 'redux/store';
 import 'lib/constants';
 import 'lib/constants';
@@ -14,7 +15,9 @@ const root = createRoot(container);
 root.render(
 root.render(
   <Provider store={store}>
   <Provider store={store}>
     <BrowserRouter basename={window.basePath || '/'}>
     <BrowserRouter basename={window.basePath || '/'}>
-      <App />
+      <ThemeModeProvider>
+        <App />
+      </ThemeModeProvider>
     </BrowserRouter>
     </BrowserRouter>
   </Provider>
   </Provider>
 );
 );

+ 5 - 5
kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts

@@ -11,14 +11,14 @@ export const consumerGroupPayload = {
     id: 2,
     id: 2,
     host: 'b-2.kad-msk.st2jzq.c6.kafka.eu-west-1.amazonaws.com',
     host: 'b-2.kad-msk.st2jzq.c6.kafka.eu-west-1.amazonaws.com',
   },
   },
-  messagesBehind: 0,
+  consumerLag: 0,
   partitions: [
   partitions: [
     {
     {
       topic: '__amazon_msk_canary',
       topic: '__amazon_msk_canary',
       partition: 1,
       partition: 1,
       currentOffset: 0,
       currentOffset: 0,
       endOffset: 0,
       endOffset: 0,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       consumerId: undefined,
       host: undefined,
       host: undefined,
     },
     },
@@ -27,7 +27,7 @@ export const consumerGroupPayload = {
       partition: 0,
       partition: 0,
       currentOffset: 56932,
       currentOffset: 56932,
       endOffset: 56932,
       endOffset: 56932,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       consumerId: undefined,
       host: undefined,
       host: undefined,
     },
     },
@@ -36,7 +36,7 @@ export const consumerGroupPayload = {
       partition: 3,
       partition: 3,
       currentOffset: 56932,
       currentOffset: 56932,
       endOffset: 56932,
       endOffset: 56932,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       consumerId: undefined,
       host: undefined,
       host: undefined,
     },
     },
@@ -45,7 +45,7 @@ export const consumerGroupPayload = {
       partition: 4,
       partition: 4,
       currentOffset: 56932,
       currentOffset: 56932,
       endOffset: 56932,
       endOffset: 56932,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       consumerId: undefined,
       host: undefined,
       host: undefined,
     },
     },

+ 2 - 2
kafka-ui-react-app/src/lib/fixtures/topics.ts

@@ -63,7 +63,7 @@ export const topicConsumerGroups: ConsumerGroup[] = [
     partitionAssignor: '',
     partitionAssignor: '',
     state: ConsumerGroupState.UNKNOWN,
     state: ConsumerGroupState.UNKNOWN,
     coordinator: { id: 1 },
     coordinator: { id: 1 },
-    messagesBehind: 9,
+    consumerLag: 9,
   },
   },
   {
   {
     groupId: 'amazon.msk.canary.group.broker-4',
     groupId: 'amazon.msk.canary.group.broker-4',
@@ -73,7 +73,7 @@ export const topicConsumerGroups: ConsumerGroup[] = [
     partitionAssignor: '',
     partitionAssignor: '',
     state: ConsumerGroupState.COMPLETING_REBALANCE,
     state: ConsumerGroupState.COMPLETING_REBALANCE,
     coordinator: { id: 1 },
     coordinator: { id: 1 },
-    messagesBehind: 9,
+    consumerLag: 9,
   },
   },
 ];
 ];
 
 

+ 7 - 1
kafka-ui-react-app/src/lib/testHelpers.tsx

@@ -39,6 +39,9 @@ interface CustomRenderOptions extends Omit<RenderOptions, 'wrapper'> {
     roles?: RolesType;
     roles?: RolesType;
     rbacFlag: boolean;
     rbacFlag: boolean;
   };
   };
+  globalSettings?: {
+    hasDynamicConfig: boolean;
+  };
 }
 }
 
 
 interface WithRouteProps {
 interface WithRouteProps {
@@ -111,6 +114,7 @@ const customRender = (
     }),
     }),
     initialEntries,
     initialEntries,
     userInfo,
     userInfo,
+    globalSettings,
     ...renderOptions
     ...renderOptions
   }: CustomRenderOptions = {}
   }: CustomRenderOptions = {}
 ) => {
 ) => {
@@ -119,7 +123,9 @@ const customRender = (
     children,
     children,
   }) => (
   }) => (
     <TestQueryClientProvider>
     <TestQueryClientProvider>
-      <GlobalSettingsContext.Provider value={{ hasDynamicConfig: false }}>
+      <GlobalSettingsContext.Provider
+        value={globalSettings || { hasDynamicConfig: false }}
+      >
         <ThemeProvider theme={theme}>
         <ThemeProvider theme={theme}>
           <TestUserInfoProvider data={userInfo}>
           <TestUserInfoProvider data={userInfo}>
             <ConfirmContextProvider>
             <ConfirmContextProvider>

+ 7 - 1
kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts

@@ -20,7 +20,13 @@ export const getJaasConfig = (
   options: Record<string, string>
   options: Record<string, string>
 ) => {
 ) => {
   const optionsString = Object.entries(options)
   const optionsString = Object.entries(options)
-    .map(([key, value]) => (isUndefined(value) ? null : ` ${key}="${value}"`))
+    .map(([key, value]) => {
+      if (isUndefined(value)) return null;
+      if (value === 'true' || value === 'false') {
+        return ` ${key}=${value}`;
+      }
+      return ` ${key}="${value}"`;
+    })
     .join('');
     .join('');
 
 
   return `${JAAS_CONFIGS[method]} required${optionsString};`;
   return `${JAAS_CONFIGS[method]} required${optionsString};`;

+ 1 - 1
kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts

@@ -122,7 +122,7 @@ export const transformFormDataToPayload = (data: ClusterConfigFormValues) => {
           'sasl.mechanism': 'GSSAPI',
           'sasl.mechanism': 'GSSAPI',
           'sasl.kerberos.service.name': props.saslKerberosServiceName,
           'sasl.kerberos.service.name': props.saslKerberosServiceName,
           'sasl.jaas.config': getJaasConfig('SASL/GSSAPI', {
           'sasl.jaas.config': getJaasConfig('SASL/GSSAPI', {
-            useKeytab: props.keyTabFile ? 'true' : 'false',
+            useKeyTab: props.keyTabFile ? 'true' : 'false',
             keyTab: props.keyTabFile,
             keyTab: props.keyTabFile,
             storeKey: String(!!props.storeKey),
             storeKey: String(!!props.storeKey),
             principal: props.principal,
             principal: props.principal,

+ 1 - 1
pom.xml

@@ -59,7 +59,7 @@
         <maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>
         <maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>
         <maven-resources-plugin.version>3.2.0</maven-resources-plugin.version>
         <maven-resources-plugin.version>3.2.0</maven-resources-plugin.version>
         <maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
         <maven-surefire-plugin.version>2.22.2</maven-surefire-plugin.version>
-        <openapi-generator-maven-plugin.version>6.5.0</openapi-generator-maven-plugin.version>
+        <openapi-generator-maven-plugin.version>6.6.0</openapi-generator-maven-plugin.version>
         <springdoc-openapi-webflux-ui.version>1.2.32</springdoc-openapi-webflux-ui.version>
         <springdoc-openapi-webflux-ui.version>1.2.32</springdoc-openapi-webflux-ui.version>
     </properties>
     </properties>