Bladeren bron

Merge branch 'master' into audit_be

Ilya Kuramshin 2 jaren geleden
bovenliggende
commit
a1ae46a5ac
38 gewijzigde bestanden met toevoegingen van 302 en 317 verwijderingen
  1. 2 2
      documentation/compose/DOCKER_COMPOSE.md
  2. 0 0
      documentation/compose/data/message.json
  3. 0 0
      documentation/compose/data/proxy.conf
  4. 2 2
      documentation/compose/e2e-tests.yaml
  5. 2 2
      documentation/compose/kafka-cluster-sr-auth.yaml
  6. 0 84
      documentation/compose/kafka-clusters-only.yaml
  7. 1 1
      documentation/compose/kafka-ui-arm64.yaml
  8. 2 2
      documentation/compose/kafka-ui-connectors-auth.yaml
  9. 2 2
      documentation/compose/kafka-ui.yaml
  10. 1 1
      documentation/compose/kafka-with-zookeeper.yaml
  11. 12 15
      documentation/compose/ldap.yaml
  12. 1 1
      documentation/compose/nginx-proxy.yaml
  13. 0 22
      documentation/compose/oauth-cognito.yaml
  14. 0 0
      documentation/compose/traefik-proxy.yaml
  15. 3 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
  16. 7 7
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
  17. 4 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopicConsumerGroup.java
  18. 24 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdeInstance.java
  19. 33 37
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
  20. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
  21. 5 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
  22. 1 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java
  23. 6 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java
  24. 0 10
      kafka-ui-api/src/main/resources/application-gauth.yml
  25. 120 58
      kafka-ui-api/src/main/resources/application-local.yml
  26. 0 13
      kafka-ui-api/src/main/resources/application-sdp.yml
  27. 2 2
      kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
  28. 13 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java
  29. 15 0
      kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java
  30. 11 18
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
  31. 13 0
      kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java
  32. 2 2
      kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx
  33. 3 3
      kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx
  34. 3 3
      kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx
  35. 2 2
      kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
  36. 2 2
      kafka-ui-react-app/src/components/Topics/Topic/ConsumerGroups/TopicConsumerGroups.tsx
  37. 5 5
      kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts
  38. 2 2
      kafka-ui-react-app/src/lib/fixtures/topics.ts

+ 2 - 2
documentation/compose/DOCKER_COMPOSE.md

@@ -8,9 +8,9 @@
 6. [kafka-ui-auth-context.yaml](./kafka-ui-auth-context.yaml) - Basic (username/password) authentication with custom path (URL) (issue 861).
 7. [e2e-tests.yaml](./e2e-tests.yaml) - Configuration with different connectors (github-source, s3, sink-activities, source-activities) and Ksql functionality.
 8. [kafka-ui-jmx-secured.yml](./kafka-ui-jmx-secured.yml) - Kafka’s JMX with SSL and authentication.
-9. [kafka-ui-reverse-proxy.yaml](./kafka-ui-reverse-proxy.yaml) - An example for using the app behind a proxy (like nginx).
+9. [kafka-ui-reverse-proxy.yaml](./nginx-proxy.yaml) - An example for using the app behind a proxy (like nginx).
 10. [kafka-ui-sasl.yaml](./kafka-ui-sasl.yaml) - SASL auth for Kafka.
-11. [kafka-ui-traefik-proxy.yaml](./kafka-ui-traefik-proxy.yaml) - Traefik specific proxy configuration.
+11. [kafka-ui-traefik-proxy.yaml](./traefik-proxy.yaml) - Traefik specific proxy configuration.
 12. [oauth-cognito.yaml](./oauth-cognito.yaml) - OAuth2 with Cognito
 13. [kafka-ui-with-jmx-exporter.yaml](./kafka-ui-with-jmx-exporter.yaml) - A configuration with 2 kafka clusters with enabled prometheus jmx exporters instead of jmx.
 14. [kafka-with-zookeeper.yaml](./kafka-with-zookeeper.yaml) - An example for using kafka with zookeeper

+ 0 - 0
documentation/compose/message.json → documentation/compose/data/message.json


+ 0 - 0
documentation/compose/proxy.conf → documentation/compose/data/proxy.conf


+ 2 - 2
documentation/compose/e2e-tests.yaml

@@ -124,7 +124,7 @@ services:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     volumes:
-      - ./message.json:/data/message.json
+      - ./data/message.json:/data/message.json
     depends_on:
       kafka0:
         condition: service_healthy
@@ -187,4 +187,4 @@ services:
       KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
       KSQL_KSQL_SERVICE_ID: my_ksql_1
       KSQL_KSQL_HIDDEN_TOPICS: '^_.*'
-      KSQL_CACHE_MAX_BYTES_BUFFERING: 0
+      KSQL_CACHE_MAX_BYTES_BUFFERING: 0

+ 2 - 2
documentation/compose/kafka-cluster-sr-auth.yaml

@@ -57,7 +57,7 @@ services:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
       - kafka1
     command: "bash -c 'echo Waiting for Kafka to be ready... && \
@@ -80,4 +80,4 @@ services:
       KAFKA_CLUSTERS_0_METRICS_PORT: 9997
       KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry1:8085
       KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME: admin
-      KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD: letmein
+      KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD: letmein

+ 0 - 84
documentation/compose/kafka-clusters-only.yaml

@@ -1,84 +0,0 @@
----
-version: "2"
-services:
-  kafka0:
-    image: confluentinc/cp-kafka:7.2.1
-    hostname: kafka0
-    container_name: kafka0
-    ports:
-      - "9092:9092"
-      - "9997:9997"
-    environment:
-      KAFKA_BROKER_ID: 1
-      KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT"
-      KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092"
-      KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
-      KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
-      KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
-      KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
-      KAFKA_JMX_PORT: 9997
-      KAFKA_JMX_HOSTNAME: localhost
-      KAFKA_PROCESS_ROLES: "broker,controller"
-      KAFKA_NODE_ID: 1
-      KAFKA_CONTROLLER_QUORUM_VOTERS: "1@kafka0:29093"
-      KAFKA_LISTENERS: "PLAINTEXT://kafka0:29092,CONTROLLER://kafka0:29093,PLAINTEXT_HOST://0.0.0.0:9092"
-      KAFKA_INTER_BROKER_LISTENER_NAME: "PLAINTEXT"
-      KAFKA_CONTROLLER_LISTENER_NAMES: "CONTROLLER"
-      KAFKA_LOG_DIRS: "/tmp/kraft-combined-logs"
-    volumes:
-      - ./scripts/update_run_cluster.sh:/tmp/update_run.sh
-      - ./scripts/clusterID:/tmp/clusterID
-    command: 'bash -c ''if [ ! -f /tmp/update_run.sh ]; then echo "ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'''
-
-  schemaregistry0:
-    image: confluentinc/cp-schema-registry:7.2.1
-    depends_on:
-      - kafka0
-    environment:
-      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
-      SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
-      SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
-      SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
-      SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
-      SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
-      SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-    ports:
-      - 8085:8085
-
-  kafka-connect0:
-    image: confluentinc/cp-kafka-connect:7.2.1
-    ports:
-      - 8083:8083
-    depends_on:
-      - kafka0
-      - schemaregistry0
-    environment:
-      CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
-      CONNECT_GROUP_ID: compose-connect-group
-      CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
-      CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
-      CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_STATUS_STORAGE_TOPIC: _connect_status
-      CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
-      CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
-      CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
-      CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
-      CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
-      CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
-      CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
-      CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
-      CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-
-  kafka-init-topics:
-    image: confluentinc/cp-kafka:7.2.1
-    volumes:
-      - ./message.json:/data/message.json
-    depends_on:
-      - kafka0
-    command: "bash -c 'echo Waiting for Kafka to be ready... && \
-      cub kafka-ready -b kafka0:29092 1 30 && \
-      kafka-topics --create --topic users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
-      kafka-topics --create --topic messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
-      kafka-console-producer --bootstrap-server kafka0:29092 --topic users < /data/message.json'"

+ 1 - 1
documentation/compose/kafka-ui-arm64.yaml

@@ -93,7 +93,7 @@ services:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1.arm64
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
       - kafka0
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 2 - 2
documentation/compose/kafka-ui-connectors-auth.yaml

@@ -69,7 +69,7 @@ services:
     build:
       context: ./kafka-connect
       args:
-        image: confluentinc/cp-kafka-connect:6.0.1
+        image: confluentinc/cp-kafka-connect:7.2.1
     ports:
       - 8083:8083
     depends_on:
@@ -104,7 +104,7 @@ services:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     volumes:
-      - ./message.json:/data/message.json
+      - ./data/message.json:/data/message.json
     depends_on:
       - kafka0
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 2 - 2
documentation/compose/kafka-ui.yaml

@@ -115,7 +115,7 @@ services:
       SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
 
   kafka-connect0:
-    image: confluentinc/cp-kafka-connect:6.0.1
+    image: confluentinc/cp-kafka-connect:7.2.1
     ports:
       - 8083:8083
     depends_on:
@@ -142,7 +142,7 @@ services:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
       - kafka1
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 1 - 1
documentation/compose/kafka-with-zookeeper.yaml

@@ -38,7 +38,7 @@ services:
   kafka-init-topics:
     image: confluentinc/cp-kafka:7.2.1
     volumes:
-       - ./message.json:/data/message.json
+       - ./data/message.json:/data/message.json
     depends_on:
       - kafka
     command: "bash -c 'echo Waiting for Kafka to be ready... && \

+ 12 - 15
documentation/compose/auth-ldap.yaml → documentation/compose/ldap.yaml

@@ -15,26 +15,23 @@ services:
       KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
       KAFKA_CLUSTERS_0_METRICS_PORT: 9997
       KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schemaregistry0:8085
+
       AUTH_TYPE: "LDAP"
       SPRING_LDAP_URLS: "ldap://ldap:10389"
-      SPRING_LDAP_DN_PATTERN: "cn={0},ou=people,dc=planetexpress,dc=com"
-
-#     ===== USER SEARCH FILTER INSTEAD OF DN =====
-
-#     SPRING_LDAP_USERFILTER_SEARCHBASE: "dc=planetexpress,dc=com"
-#     SPRING_LDAP_USERFILTER_SEARCHFILTER: "(&(uid={0})(objectClass=inetOrgPerson))"
-#     LDAP ADMIN USER
-#     SPRING_LDAP_ADMINUSER: "cn=admin,dc=planetexpress,dc=com"
-#     SPRING_LDAP_ADMINPASSWORD: "GoodNewsEveryone"
-
-#     ===== ACTIVE DIRECTORY =====
-
-#      OAUTH2.LDAP.ACTIVEDIRECTORY: true
-#      OAUTH2.LDAP.AСTIVEDIRECTORY.DOMAIN: "memelord.lol"
+      SPRING_LDAP_BASE: "cn={0},ou=people,dc=planetexpress,dc=com"
+      SPRING_LDAP_ADMIN_USER: "cn=admin,dc=planetexpress,dc=com"
+      SPRING_LDAP_ADMIN_PASSWORD: "GoodNewsEveryone"
+      SPRING_LDAP_USER_FILTER_SEARCH_BASE: "dc=planetexpress,dc=com"
+      SPRING_LDAP_USER_FILTER_SEARCH_FILTER: "(&(uid={0})(objectClass=inetOrgPerson))"
+      SPRING_LDAP_GROUP_FILTER_SEARCH_BASE: "ou=people,dc=planetexpress,dc=com"
+#     OAUTH2.LDAP.ACTIVEDIRECTORY: true
+#     OAUTH2.LDAP.AСTIVEDIRECTORY.DOMAIN: "memelord.lol"
 
   ldap:
     image: rroemhild/test-openldap:latest
     hostname: "ldap"
+    ports:
+      - 10389:10389
 
   kafka0:
     image: confluentinc/cp-kafka:7.2.1
@@ -79,4 +76,4 @@ services:
 
       SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
       SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
-      SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
+      SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas

+ 1 - 1
documentation/compose/kafka-ui-reverse-proxy.yaml → documentation/compose/nginx-proxy.yaml

@@ -4,7 +4,7 @@ services:
   nginx:
     image: nginx:latest
     volumes:
-      - ./proxy.conf:/etc/nginx/conf.d/default.conf
+      - ./data/proxy.conf:/etc/nginx/conf.d/default.conf
     ports:
       - 8080:80
 

+ 0 - 22
documentation/compose/oauth-cognito.yaml

@@ -1,22 +0,0 @@
----
-version: '3.4'
-services:
-
-  kafka-ui:
-    container_name: kafka-ui
-    image: provectuslabs/kafka-ui:local
-    ports:
-      - 8080:8080
-    depends_on:
-      - kafka0 # OMITTED, TAKE UP AN EXAMPLE FROM OTHER COMPOSE FILES
-    environment:
-      KAFKA_CLUSTERS_0_NAME: local
-      KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
-      KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
-      AUTH_TYPE: OAUTH2_COGNITO
-      AUTH_COGNITO_ISSUER_URI: "https://cognito-idp.eu-central-1.amazonaws.com/eu-central-xxxxxx"
-      AUTH_COGNITO_CLIENT_ID: ""
-      AUTH_COGNITO_CLIENT_SECRET: ""
-      AUTH_COGNITO_SCOPE: "openid"
-      AUTH_COGNITO_USER_NAME_ATTRIBUTE: "username"
-      AUTH_COGNITO_LOGOUT_URI: "https://<domain>.auth.eu-central-1.amazoncognito.com/logout"

+ 0 - 0
documentation/compose/kafka-ui-traefik-proxy.yaml → documentation/compose/traefik-proxy.yaml


+ 3 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java

@@ -28,7 +28,7 @@ public class ConsumerGroupMapper {
     consumerGroup.setTopics(1); //for ui backward-compatibility, need to rm usage from ui
     consumerGroup.setGroupId(c.getGroupId());
     consumerGroup.setMembers(c.getMembers());
-    consumerGroup.setMessagesBehind(c.getMessagesBehind());
+    consumerGroup.setConsumerLag(c.getConsumerLag());
     consumerGroup.setSimple(c.isSimple());
     consumerGroup.setPartitionAssignor(c.getPartitionAssignor());
     consumerGroup.setState(mapConsumerGroupState(c.getState()));
@@ -54,7 +54,7 @@ public class ConsumerGroupMapper {
           .orElse(0L);
 
       partition.setEndOffset(endOffset.orElse(0L));
-      partition.setMessagesBehind(behind);
+      partition.setConsumerLag(behind);
 
       partitionMap.put(entry.getKey(), partition);
     }
@@ -80,7 +80,7 @@ public class ConsumerGroupMapper {
       InternalConsumerGroup c, T consumerGroup) {
     consumerGroup.setGroupId(c.getGroupId());
     consumerGroup.setMembers(c.getMembers().size());
-    consumerGroup.setMessagesBehind(c.getMessagesBehind());
+    consumerGroup.setConsumerLag(c.getConsumerLag());
     consumerGroup.setTopics(c.getTopicNum());
     consumerGroup.setSimple(c.isSimple());
 

+ 7 - 7
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java

@@ -21,7 +21,7 @@ public class InternalConsumerGroup {
   private final Collection<InternalMember> members;
   private final Map<TopicPartition, Long> offsets;
   private final Map<TopicPartition, Long> endOffsets;
-  private final Long messagesBehind;
+  private final Long consumerLag;
   private final Integer topicNum;
   private final String partitionAssignor;
   private final ConsumerGroupState state;
@@ -50,17 +50,17 @@ public class InternalConsumerGroup {
     builder.members(internalMembers);
     builder.offsets(groupOffsets);
     builder.endOffsets(topicEndOffsets);
-    builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
+    builder.consumerLag(calculateConsumerLag(groupOffsets, topicEndOffsets));
     builder.topicNum(calculateTopicNum(groupOffsets, internalMembers));
     Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
     return builder.build();
   }
 
-  private static Long calculateMessagesBehind(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
-    Long messagesBehind = null;
-    // messagesBehind should be undefined if no committed offsets found for topic
+  private static Long calculateConsumerLag(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
+    Long consumerLag = null;
+    // consumerLag should be undefined if no committed offsets found for topic
     if (!offsets.isEmpty()) {
-      messagesBehind = offsets.entrySet().stream()
+      consumerLag = offsets.entrySet().stream()
           .mapToLong(e ->
               Optional.ofNullable(endOffsets)
                   .map(o -> o.get(e.getKey()))
@@ -69,7 +69,7 @@ public class InternalConsumerGroup {
           ).sum();
     }
 
-    return messagesBehind;
+    return consumerLag;
   }
 
   private static Integer calculateTopicNum(Map<TopicPartition, Long> offsets, Collection<InternalMember> members) {

+ 4 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopicConsumerGroup.java

@@ -17,7 +17,7 @@ public class InternalTopicConsumerGroup {
   String groupId;
   int members;
   @Nullable
-  Long messagesBehind; //null means no committed offsets found for this group
+  Long consumerLag; //null means no committed offsets found for this group
   boolean isSimple;
   String partitionAssignor;
   ConsumerGroupState state;
@@ -37,7 +37,7 @@ public class InternalTopicConsumerGroup {
                 .filter(m -> m.assignment().topicPartitions().stream().anyMatch(p -> p.topic().equals(topic)))
                 .count()
         )
-        .messagesBehind(calculateMessagesBehind(committedOffsets, endOffsets))
+        .consumerLag(calculateConsumerLag(committedOffsets, endOffsets))
         .isSimple(g.isSimpleConsumerGroup())
         .partitionAssignor(g.partitionAssignor())
         .state(g.state())
@@ -46,8 +46,8 @@ public class InternalTopicConsumerGroup {
   }
 
   @Nullable
-  private static Long calculateMessagesBehind(Map<TopicPartition, Long> committedOffsets,
-                                              Map<TopicPartition, Long> endOffsets) {
+  private static Long calculateConsumerLag(Map<TopicPartition, Long> committedOffsets,
+                                           Map<TopicPartition, Long> endOffsets) {
     if (committedOffsets.isEmpty()) {
       return null;
     }

+ 24 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdeInstance.java

@@ -42,19 +42,39 @@ public class SerdeInstance implements Closeable {
   }
 
   public Optional<SchemaDescription> getSchema(String topic, Serde.Target type) {
-    return wrapWithClassloader(() -> serde.getSchema(topic, type));
+    try {
+      return wrapWithClassloader(() -> serde.getSchema(topic, type));
+    } catch (Exception e) {
+      log.warn("Error getting schema for '{}'({}) with serde '{}'", topic, type, name, e);
+      return Optional.empty();
+    }
   }
 
   public Optional<String> description() {
-    return wrapWithClassloader(serde::getDescription);
+    try {
+      return wrapWithClassloader(serde::getDescription);
+    } catch (Exception e) {
+      log.warn("Error getting description serde '{}'", name, e);
+      return Optional.empty();
+    }
   }
 
   public boolean canSerialize(String topic, Serde.Target type) {
-    return wrapWithClassloader(() -> serde.canSerialize(topic, type));
+    try {
+      return wrapWithClassloader(() -> serde.canSerialize(topic, type));
+    } catch (Exception e) {
+      log.warn("Error calling canSerialize for '{}'({}) with serde '{}'", topic, type, name, e);
+      return false;
+    }
   }
 
   public boolean canDeserialize(String topic, Serde.Target type) {
-    return wrapWithClassloader(() -> serde.canDeserialize(topic, type));
+    try {
+      return wrapWithClassloader(() -> serde.canDeserialize(topic, type));
+    } catch (Exception e) {
+      log.warn("Error calling canDeserialize for '{}'({}) with serde '{}'", topic, type, name, e);
+      return false;
+    }
   }
 
   public Serde.Serializer serializer(String topic, Serde.Target type) {

+ 33 - 37
kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java

@@ -189,39 +189,40 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   public Optional<SchemaDescription> getSchema(String topic, Target type) {
     String subject = schemaSubject(topic, type);
     return getSchemaBySubject(subject)
-        .map(schemaMetadata ->
-            new SchemaDescription(
-                convertSchema(schemaMetadata),
-                Map.of(
-                    "subject", subject,
-                    "schemaId", schemaMetadata.getId(),
-                    "latestVersion", schemaMetadata.getVersion(),
-                    "type", schemaMetadata.getSchemaType() // AVRO / PROTOBUF / JSON
-                )
-            ));
+        .flatMap(schemaMetadata ->
+            //schema can be not-found, when schema contexts configured improperly
+            getSchemaById(schemaMetadata.getId())
+                .map(parsedSchema ->
+                    new SchemaDescription(
+                        convertSchema(schemaMetadata, parsedSchema),
+                        Map.of(
+                            "subject", subject,
+                            "schemaId", schemaMetadata.getId(),
+                            "latestVersion", schemaMetadata.getVersion(),
+                            "type", schemaMetadata.getSchemaType() // AVRO / PROTOBUF / JSON
+                        )
+                    )));
   }
 
   @SneakyThrows
-  private String convertSchema(SchemaMetadata schema) {
+  private String convertSchema(SchemaMetadata schema, ParsedSchema parsedSchema) {
     URI basePath = new URI(schemaRegistryUrls.get(0))
         .resolve(Integer.toString(schema.getId()));
-    ParsedSchema schemaById = schemaRegistryClient.getSchemaById(schema.getId());
     SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
         .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
-    switch (schemaType) {
-      case PROTOBUF:
-        return new ProtobufSchemaConverter()
-            .convert(basePath, ((ProtobufSchema) schemaById).toDescriptor())
-            .toJson();
-      case AVRO:
-        return new AvroJsonSchemaConverter()
-            .convert(basePath, ((AvroSchema) schemaById).rawSchema())
-            .toJson();
-      case JSON:
-        return schema.getSchema();
-      default:
-        throw new IllegalStateException();
-    }
+    return switch (schemaType) {
+      case PROTOBUF -> new ProtobufSchemaConverter()
+          .convert(basePath, ((ProtobufSchema) parsedSchema).toDescriptor())
+          .toJson();
+      case AVRO -> new AvroJsonSchemaConverter()
+          .convert(basePath, ((AvroSchema) parsedSchema).rawSchema())
+          .toJson();
+      case JSON -> schema.getSchema();
+    };
+  }
+
+  private Optional<ParsedSchema> getSchemaById(int id) {
+    return wrapWith404Handler(() -> schemaRegistryClient.getSchemaById(id));
   }
 
   private Optional<SchemaMetadata> getSchemaBySubject(String subject) {
@@ -253,16 +254,11 @@ public class SchemaRegistrySerde implements BuiltInSerde {
     boolean isKey = type == Target.KEY;
     SchemaType schemaType = SchemaType.fromString(schema.getSchemaType())
         .orElseThrow(() -> new IllegalStateException("Unknown schema type: " + schema.getSchemaType()));
-    switch (schemaType) {
-      case PROTOBUF:
-        return new ProtobufSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
-      case AVRO:
-        return new AvroSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
-      case JSON:
-        return new JsonSchemaSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
-      default:
-        throw new IllegalStateException();
-    }
+    return switch (schemaType) {
+      case PROTOBUF -> new ProtobufSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+      case AVRO -> new AvroSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+      case JSON -> new JsonSchemaSchemaRegistrySerializer(topic, isKey, schemaRegistryClient, schema);
+    };
   }
 
   @Override
@@ -297,7 +293,7 @@ public class SchemaRegistrySerde implements BuiltInSerde {
   }
 
   private SchemaType getMessageFormatBySchemaId(int schemaId) {
-    return wrapWith404Handler(() -> schemaRegistryClient.getSchemaById(schemaId))
+    return getSchemaById(schemaId)
         .map(ParsedSchema::schemaType)
         .flatMap(SchemaType::fromString)
         .orElseThrow(() -> new ValidationException(String.format("Schema for id '%d' not found ", schemaId)));

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java

@@ -164,7 +164,7 @@ public class ConsumerGroupService {
       case MESSAGES_BEHIND -> {
 
         Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
-            gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
+            gwd.icg.getConsumerLag() == null ? 0L : gwd.icg.getConsumerLag());
 
         yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
       }

+ 5 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java

@@ -28,6 +28,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
   private static final String ORGANIZATION_NAME = "login";
   private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json";
   private static final String DUMMY = "dummy";
+  // The number of results (max 100) per page of list organizations for authenticated user.
+  private static final Integer ORGANIZATIONS_PER_PAGE = 100;
 
   @Override
   public boolean isApplicable(String provider, Map<String, String> customParams) {
@@ -83,7 +85,9 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
 
     final Mono<List<Map<String, Object>>> userOrganizations = webClient
         .get()
-        .uri("/orgs")
+        .uri(uriBuilder -> uriBuilder.path("/orgs")
+            .queryParam("per_page", ORGANIZATIONS_PER_PAGE)
+            .build())
         .headers(headers -> {
           headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER);
           OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");

+ 1 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java

@@ -3,7 +3,6 @@ package com.provectus.kafka.ui.util;
 import com.google.common.annotations.VisibleForTesting;
 import java.time.Duration;
 import lombok.extern.slf4j.Slf4j;
-import org.springframework.web.reactive.function.client.WebClient;
 import reactor.core.publisher.Mono;
 
 @Slf4j
@@ -31,7 +30,7 @@ public class GithubReleaseInfo {
 
   @VisibleForTesting
   GithubReleaseInfo(String url) {
-    this.refreshMono = WebClient.create()
+    this.refreshMono = new WebClientConfigurator().build()
         .get()
         .uri(url)
         .exchangeToMono(resp -> resp.bodyToMono(GithubReleaseDto.class))

+ 6 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java

@@ -5,11 +5,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
 import com.provectus.kafka.ui.config.ClustersProperties;
 import com.provectus.kafka.ui.exception.ValidationException;
-import io.netty.buffer.ByteBufAllocator;
-import io.netty.handler.ssl.JdkSslContext;
 import io.netty.handler.ssl.SslContext;
 import io.netty.handler.ssl.SslContextBuilder;
-import io.netty.handler.ssl.SslProvider;
 import java.io.FileInputStream;
 import java.security.KeyStore;
 import java.util.function.Consumer;
@@ -93,7 +90,12 @@ public class WebClientConfigurator {
     // Create webclient
     SslContext context = contextBuilder.build();
 
-    builder.clientConnector(new ReactorClientHttpConnector(HttpClient.create().secure(t -> t.sslContext(context))));
+    var httpClient = HttpClient
+        .create()
+        .secure(t -> t.sslContext(context))
+        .proxyWithSystemProperties();
+
+    builder.clientConnector(new ReactorClientHttpConnector(httpClient));
     return this;
   }
 

+ 0 - 10
kafka-ui-api/src/main/resources/application-gauth.yml

@@ -1,10 +0,0 @@
-auth:
-  type: OAUTH2
-spring:
-  security:
-    oauth2:
-      client:
-        registration:
-          google:
-            client-id: [put your client id here]
-            client-secret: [put your client secret here]

+ 120 - 58
kafka-ui-api/src/main/resources/application-local.yml

@@ -5,15 +5,27 @@ logging:
     #org.springframework.http.codec.json.Jackson2JsonEncoder: DEBUG
     #org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
     reactor.netty.http.server.AccessLog: INFO
+    org.springframework.security: DEBUG
 
 #server:
 #  port: 8080 #- Port in which kafka-ui will run.
 
+spring:
+  jmx:
+    enabled: true
+  ldap:
+    urls: ldap://localhost:10389
+    base: "cn={0},ou=people,dc=planetexpress,dc=com"
+    admin-user: "cn=admin,dc=planetexpress,dc=com"
+    admin-password: "GoodNewsEveryone"
+    user-filter-search-base: "dc=planetexpress,dc=com"
+    user-filter-search-filter: "(&(uid={0})(objectClass=inetOrgPerson))"
+    group-filter-search-base: "ou=people,dc=planetexpress,dc=com"
+
 kafka:
   clusters:
     - name: local
       bootstrapServers: localhost:9092
-      zookeeper: localhost:2181
       schemaRegistry: http://localhost:8085
       ksqldbServer: http://localhost:8088
       kafkaConnect:
@@ -22,63 +34,113 @@ kafka:
       metrics:
         port: 9997
         type: JMX
-  #    -
-  #      name: secondLocal
-  #      bootstrapServers: localhost:9093
-  #      zookeeper: localhost:2182
-  #      schemaRegistry: http://localhost:18085
-  #      kafkaConnect:
-  #        - name: first
-  #          address: http://localhost:8083
-  #      metrics:
-  #        port: 9998
-  #        type: JMX
-  #      read-only: true
-  #    -
-  #      name: localUsingProtobufFile
-  #      bootstrapServers: localhost:9092
-  #      protobufFile: messages.proto
-  #      protobufMessageName: GenericMessage
-  #      protobufMessageNameByTopic:
-  #        input-topic: InputMessage
-  #        output-topic: OutputMessage
-spring:
-  jmx:
-    enabled: true
+
+dynamic.config.enabled: true
+
+oauth2:
+  ldap:
+    activeDirectory: false
+    aсtiveDirectory.domain: domain.com
 
 auth:
   type: DISABLED
-#  type: OAUTH2
-#  oauth2:
-#    client:
-#      cognito:
-#        clientId:
-#        clientSecret:
-#        scope: openid
-#        client-name: cognito
-#        provider: cognito
-#        redirect-uri: http://localhost:8080/login/oauth2/code/cognito
-#        authorization-grant-type: authorization_code
-#        issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
-#        jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
-#        user-name-attribute: username
-#        custom-params:
-#          type: cognito
-#          logoutUrl: https://kafka-ui.auth.eu-central-1.amazoncognito.com/logout
-#      google:
-#        provider: google
-#        clientId:
-#        clientSecret:
-#        user-name-attribute: email
-#        custom-params:
-#          type: google
-#          allowedDomain: provectus.com
-#      github:
-#        provider: github
-#        clientId:
-#        clientSecret:
-#        scope:
-#          - read:org
-#        user-name-attribute: login
-#        custom-params:
-#          type: github
+  #  type: OAUTH2
+  #  type: LDAP
+  oauth2:
+    client:
+      cognito:
+        clientId: # CLIENT ID
+        clientSecret: # CLIENT SECRET
+        scope: openid
+        client-name: cognito
+        provider: cognito
+        redirect-uri: http://localhost:8080/login/oauth2/code/cognito
+        authorization-grant-type: authorization_code
+        issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
+        jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
+        user-name-attribute: cognito:username
+        custom-params:
+          type: cognito
+          logoutUrl: https://kafka-ui.auth.eu-central-1.amazoncognito.com/logout
+      google:
+        provider: google
+        clientId: # CLIENT ID
+        clientSecret: # CLIENT SECRET
+        user-name-attribute: email
+        custom-params:
+          type: google
+          allowedDomain: provectus.com
+      github:
+        provider: github
+        clientId: # CLIENT ID
+        clientSecret: # CLIENT SECRET
+        scope:
+          - read:org
+        user-name-attribute: login
+        custom-params:
+          type: github
+
+rbac:
+  roles:
+    - name: "memelords"
+      clusters:
+        - local
+      subjects:
+        - provider: oauth_google
+          type: domain
+          value: "provectus.com"
+        - provider: oauth_google
+          type: user
+          value: "name@provectus.com"
+
+        - provider: oauth_github
+          type: organization
+          value: "provectus"
+        - provider: oauth_github
+          type: user
+          value: "memelord"
+
+        - provider: oauth_cognito
+          type: user
+          value: "username"
+        - provider: oauth_cognito
+          type: group
+          value: "memelords"
+
+        - provider: ldap
+          type: group
+          value: "admin_staff"
+
+        # NOT IMPLEMENTED YET
+      #        - provider: ldap_ad
+      #          type: group
+      #          value: "admin_staff"
+
+      permissions:
+        - resource: applicationconfig
+          actions: all
+
+        - resource: clusterconfig
+          actions: all
+
+        - resource: topic
+          value: ".*"
+          actions: all
+
+        - resource: consumer
+          value: ".*"
+          actions: all
+
+        - resource: schema
+          value: ".*"
+          actions: all
+
+        - resource: connect
+          value: "*"
+          actions: all
+
+        - resource: ksql
+          actions: all
+
+        - resource: acl
+          actions: all

+ 0 - 13
kafka-ui-api/src/main/resources/application-sdp.yml

@@ -1,13 +0,0 @@
-kafka:
-  clusters:
-    - name: local
-      bootstrapServers: b-1.kad-msk.57w67o.c6.kafka.eu-central-1.amazonaws.com:9094
-      properties:
-        security.protocol: SSL
-#      zookeeper: localhost:2181
-#      schemaRegistry: http://kad-ecs-application-lb-857515197.eu-west-1.elb.amazonaws.com:9000/api/schema-registry
-  #    -
-  #      name: secondLocal
-  #      zookeeper: zookeeper1:2181
-  #      bootstrapServers: kafka1:29092
-  #      schemaRegistry: http://schemaregistry1:8085

+ 2 - 2
kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

@@ -2558,7 +2558,7 @@ components:
           $ref: "#/components/schemas/ConsumerGroupState"
         coordinator:
           $ref: "#/components/schemas/Broker"
-        messagesBehind:
+        consumerLag:
           type: integer
           format: int64
           description: null if consumer group has no offsets committed
@@ -2776,7 +2776,7 @@ components:
         endOffset:
           type: integer
           format: int64
-        messagesBehind:
+        consumerLag:
           type: integer
           format: int64
           description: null if consumer group has no offsets committed

+ 13 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java

@@ -16,6 +16,8 @@ import java.util.stream.Stream;
 
 public class BrokersConfigTab extends BasePage {
 
+  protected SelenideElement sourceInfoIcon = $x("//div[text()='Source']/..//div/div[@class]");
+  protected SelenideElement sourceInfoTooltip = $x("//div[text()='Source']/..//div/div[@style]");
   protected ElementsCollection editBtns = $$x("//button[@aria-label='editAction']");
 
   @Step
@@ -25,6 +27,17 @@ public class BrokersConfigTab extends BasePage {
     return this;
   }
 
+  @Step
+  public BrokersConfigTab hoverOnSourceInfoIcon() {
+    sourceInfoIcon.shouldBe(Condition.visible).hover();
+    return this;
+  }
+
+  @Step
+  public String getSourceInfoTooltipText() {
+    return sourceInfoTooltip.shouldBe(Condition.visible).getText().trim();
+  }
+
   @Step
   public boolean isSearchByKeyVisible() {
     return isVisible(searchFld);

+ 15 - 0
kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java

@@ -0,0 +1,15 @@
+package com.provectus.kafka.ui.variables;
+
+public interface Expected {
+
+  String BROKER_SOURCE_INFO_TOOLTIP =
+      "DYNAMIC_TOPIC_CONFIG = dynamic topic config that is configured for a specific topic\n"
+          + "DYNAMIC_BROKER_LOGGER_CONFIG = dynamic broker logger config that is configured for a specific broker\n"
+          + "DYNAMIC_BROKER_CONFIG = dynamic broker config that is configured for a specific broker\n"
+          + "DYNAMIC_DEFAULT_BROKER_CONFIG = dynamic broker config that is configured as default "
+          + "for all brokers in the cluster\n"
+          + "STATIC_BROKER_CONFIG = static broker config provided as broker properties at start up "
+          + "(e.g. server.properties file)\n"
+          + "DEFAULT_CONFIG = built-in default configuration for configs that have a default value\n"
+          + "UNKNOWN = source unknown e.g. in the ConfigEntry used for alter requests where source is not set";
+}

+ 11 - 18
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java

@@ -15,87 +15,80 @@ import org.testng.annotations.Test;
 
 public class SmokeBacklog extends BaseManualTest {
 
-  @Automation(state = TO_BE_AUTOMATED)
-  @Suite(id = BROKERS_SUITE_ID)
-  @QaseId(331)
-  @Test
-  public void testCaseA() {
-  }
-
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = BROKERS_SUITE_ID)
   @QaseId(332)
   @Test
-  public void testCaseB() {
+  public void testCaseA() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(335)
   @Test
-  public void testCaseC() {
+  public void testCaseB() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(336)
   @Test
-  public void testCaseD() {
+  public void testCaseC() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(343)
   @Test
-  public void testCaseE() {
+  public void testCaseD() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = SCHEMAS_SUITE_ID)
   @QaseId(345)
   @Test
-  public void testCaseF() {
+  public void testCaseE() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = SCHEMAS_SUITE_ID)
   @QaseId(346)
   @Test
-  public void testCaseG() {
+  public void testCaseF() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = TOPICS_PROFILE_SUITE_ID)
   @QaseId(347)
   @Test
-  public void testCaseH() {
+  public void testCaseG() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = BROKERS_SUITE_ID)
   @QaseId(348)
   @Test
-  public void testCaseI() {
+  public void testCaseH() {
   }
 
   @Automation(state = TO_BE_AUTOMATED)
   @Suite(id = BROKERS_SUITE_ID)
   @QaseId(350)
   @Test
-  public void testCaseJ() {
+  public void testCaseI() {
   }
 
   @Automation(state = NOT_AUTOMATED)
   @Suite(id = TOPICS_SUITE_ID)
   @QaseId(50)
   @Test
-  public void testCaseK() {
+  public void testCaseJ() {
   }
 
   @Automation(state = NOT_AUTOMATED)
   @Suite(id = SCHEMAS_SUITE_ID)
   @QaseId(351)
   @Test
-  public void testCaseL() {
+  public void testCaseK() {
   }
 }

+ 13 - 0
kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.smokesuite.brokers;
 
 import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS;
+import static com.provectus.kafka.ui.variables.Expected.BROKER_SOURCE_INFO_TOOLTIP;
 
 import com.codeborne.selenide.Condition;
 import com.provectus.kafka.ui.BaseTest;
@@ -69,4 +70,16 @@ public class BrokersTest extends BaseTest {
             .toList().contains(anyConfigKey),
         String.format("getAllConfigs().contains(%s)", anyConfigKey));
   }
+
+  @QaseId(331)
+  @Test
+  public void brokersSourceInfoCheck() {
+    navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID);
+    brokersDetails
+        .openDetailsTab(CONFIGS);
+    String sourceInfoTooltip = brokersConfigTab
+        .hoverOnSourceInfoIcon()
+        .getSourceInfoTooltipText();
+    Assert.assertEquals(sourceInfoTooltip, BROKER_SOURCE_INFO_TOOLTIP, "brokerSourceInfoTooltip");
+  }
 }

+ 2 - 2
kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx

@@ -110,7 +110,7 @@ const Details: React.FC = () => {
             {consumerGroup.data?.coordinator?.id}
           </Metrics.Indicator>
           <Metrics.Indicator label="Total lag">
-            {consumerGroup.data?.messagesBehind}
+            {consumerGroup.data?.consumerLag}
           </Metrics.Indicator>
         </Metrics.Section>
       </Metrics.Wrapper>
@@ -121,7 +121,7 @@ const Details: React.FC = () => {
         <thead>
           <tr>
             <TableHeaderCell title="Topic" />
-            <TableHeaderCell title="Messages behind" />
+            <TableHeaderCell title="Consumer Lag" />
           </tr>
         </thead>
         <tbody>

+ 3 - 3
kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx

@@ -19,10 +19,10 @@ interface Props {
 const ListItem: React.FC<Props> = ({ clusterName, name, consumers }) => {
   const [isOpen, setIsOpen] = React.useState(false);
 
-  const getTotalMessagesBehind = () => {
+  const getTotalconsumerLag = () => {
     let count = 0;
     consumers.forEach((consumer) => {
-      count += consumer?.messagesBehind || 0;
+      count += consumer?.consumerLag || 0;
     });
     return count;
   };
@@ -40,7 +40,7 @@ const ListItem: React.FC<Props> = ({ clusterName, name, consumers }) => {
             </TableKeyLink>
           </FlexWrapper>
         </td>
-        <td>{getTotalMessagesBehind()}</td>
+        <td>{getTotalconsumerLag()}</td>
       </tr>
       {isOpen && <TopicContents consumers={consumers} />}
     </>

+ 3 - 3
kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx

@@ -19,7 +19,7 @@ const TABLE_HEADERS_MAP: Headers[] = [
   { title: 'Partition', orderBy: 'partition' },
   { title: 'Consumer ID', orderBy: 'consumerId' },
   { title: 'Host', orderBy: 'host' },
-  { title: 'Messages Behind', orderBy: 'messagesBehind' },
+  { title: 'Consumer Lag', orderBy: 'consumerLag' },
   { title: 'Current Offset', orderBy: 'currentOffset' },
   { title: 'End offset', orderBy: 'endOffset' },
 ];
@@ -108,7 +108,7 @@ const TopicContents: React.FC<Props> = ({ consumers }) => {
         orderBy === 'partition' ||
         orderBy === 'currentOffset' ||
         orderBy === 'endOffset' ||
-        orderBy === 'messagesBehind';
+        orderBy === 'consumerLag';
 
       let comparator: ComparatorFunction<ConsumerGroupTopicPartition>;
       if (isNumberProperty) {
@@ -153,7 +153,7 @@ const TopicContents: React.FC<Props> = ({ consumers }) => {
                   <td>{consumer.partition}</td>
                   <td>{consumer.consumerId}</td>
                   <td>{consumer.host}</td>
-                  <td>{consumer.messagesBehind}</td>
+                  <td>{consumer.consumerLag}</td>
                   <td>{consumer.currentOffset}</td>
                   <td>{consumer.endOffset}</td>
                 </tr>

+ 2 - 2
kafka-ui-react-app/src/components/ConsumerGroups/List.tsx

@@ -57,8 +57,8 @@ const List = () => {
       },
       {
         id: ConsumerGroupOrdering.MESSAGES_BEHIND,
-        header: 'Messages Behind',
-        accessorKey: 'messagesBehind',
+        header: 'Consumer Lag',
+        accessorKey: 'consumerLag',
       },
       {
         header: 'Coordinator',

+ 2 - 2
kafka-ui-react-app/src/components/Topics/Topic/ConsumerGroups/TopicConsumerGroups.tsx

@@ -48,8 +48,8 @@ const TopicConsumerGroups: React.FC = () => {
         enableSorting: false,
       },
       {
-        header: 'Messages Behind',
-        accessorKey: 'messagesBehind',
+        header: 'Consumer Lag',
+        accessorKey: 'consumerLag',
         enableSorting: false,
       },
       {

+ 5 - 5
kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts

@@ -11,14 +11,14 @@ export const consumerGroupPayload = {
     id: 2,
     host: 'b-2.kad-msk.st2jzq.c6.kafka.eu-west-1.amazonaws.com',
   },
-  messagesBehind: 0,
+  consumerLag: 0,
   partitions: [
     {
       topic: '__amazon_msk_canary',
       partition: 1,
       currentOffset: 0,
       endOffset: 0,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       host: undefined,
     },
@@ -27,7 +27,7 @@ export const consumerGroupPayload = {
       partition: 0,
       currentOffset: 56932,
       endOffset: 56932,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       host: undefined,
     },
@@ -36,7 +36,7 @@ export const consumerGroupPayload = {
       partition: 3,
       currentOffset: 56932,
       endOffset: 56932,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       host: undefined,
     },
@@ -45,7 +45,7 @@ export const consumerGroupPayload = {
       partition: 4,
       currentOffset: 56932,
       endOffset: 56932,
-      messagesBehind: 0,
+      consumerLag: 0,
       consumerId: undefined,
       host: undefined,
     },

+ 2 - 2
kafka-ui-react-app/src/lib/fixtures/topics.ts

@@ -63,7 +63,7 @@ export const topicConsumerGroups: ConsumerGroup[] = [
     partitionAssignor: '',
     state: ConsumerGroupState.UNKNOWN,
     coordinator: { id: 1 },
-    messagesBehind: 9,
+    consumerLag: 9,
   },
   {
     groupId: 'amazon.msk.canary.group.broker-4',
@@ -73,7 +73,7 @@ export const topicConsumerGroups: ConsumerGroup[] = [
     partitionAssignor: '',
     state: ConsumerGroupState.COMPLETING_REBALANCE,
     coordinator: { id: 1 },
-    messagesBehind: 9,
+    consumerLag: 9,
   },
 ];