Forráskód Böngészése

Merge branch 'master' into issues/1911-sonar-fixes

Ankit Verma 2 éve
szülő
commit
3168f7fc9d
15 módosított fájl, 228 hozzáadás és 118 törlés
  1. 11 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
  2. 1 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
  3. 11 11
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
  4. 2 4
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java
  5. 4 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/provider/Provider.java
  6. 7 23
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
  7. 63 40
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
  8. 20 19
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
  9. 2 0
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
  10. 6 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/CognitoAuthorityExtractor.java
  11. 4 2
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
  12. 8 8
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GoogleAuthorityExtractor.java
  13. 85 3
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/OauthAuthorityExtractor.java
  14. 3 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/ProviderAuthorityExtractor.java
  15. 1 1
      kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java

+ 11 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java

@@ -1,6 +1,7 @@
 package com.provectus.kafka.ui.config.auth;
 
 import jakarta.annotation.PostConstruct;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
@@ -14,7 +15,16 @@ public class OAuthProperties {
   private Map<String, OAuth2Provider> client = new HashMap<>();
 
   @PostConstruct
-  public void validate() {
+  public void init() {
+    getClient().values().forEach((provider) -> {
+      if (provider.getCustomParams() == null) {
+        provider.setCustomParams(Collections.emptyMap());
+      }
+      if (provider.getScope() == null) {
+        provider.setScope(Collections.emptySet());
+      }
+    });
+
     getClient().values().forEach(this::validateProvider);
   }
 

+ 1 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java

@@ -73,8 +73,7 @@ public final class OAuthPropertiesConverter {
   }
 
   private static boolean isGoogle(OAuth2Provider provider) {
-    return provider.getCustomParams() != null
-        && GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
+    return GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
   }
 }
 

+ 11 - 11
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java

@@ -72,13 +72,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
     final OidcReactiveOAuth2UserService delegate = new OidcReactiveOAuth2UserService();
     return request -> delegate.loadUser(request)
         .flatMap(user -> {
-          String providerId = request.getClientRegistration().getRegistrationId();
-          final var extractor = getExtractor(providerId, acs);
+          var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
+          final var extractor = getExtractor(provider, acs);
           if (extractor == null) {
             return Mono.just(user);
           }
 
-          return extractor.extract(acs, user, Map.of("request", request))
+          return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
               .map(groups -> new RbacOidcUser(user, groups));
         });
   }
@@ -88,13 +88,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
     final DefaultReactiveOAuth2UserService delegate = new DefaultReactiveOAuth2UserService();
     return request -> delegate.loadUser(request)
         .flatMap(user -> {
-          String providerId = request.getClientRegistration().getRegistrationId();
-          final var extractor = getExtractor(providerId, acs);
+          var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
+          final var extractor = getExtractor(provider, acs);
           if (extractor == null) {
             return Mono.just(user);
           }
 
-          return extractor.extract(acs, user, Map.of("request", request))
+          return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
               .map(groups -> new RbacOAuth2User(user, groups));
         });
   }
@@ -113,18 +113,18 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
   }
 
   @Nullable
-  private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
-    final String provider = getProviderByProviderId(providerId);
+  private ProviderAuthorityExtractor getExtractor(final OAuthProperties.OAuth2Provider provider,
+                                                  AccessControlService acs) {
     Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
         .stream()
-        .filter(e -> e.isApplicable(provider))
+        .filter(e -> e.isApplicable(provider.getProvider(), provider.getCustomParams()))
         .findFirst();
 
     return extractor.orElse(null);
   }
 
-  private String getProviderByProviderId(final String providerId) {
-    return properties.getClient().get(providerId).getProvider();
+  private OAuthProperties.OAuth2Provider getProviderByProviderId(final String providerId) {
+    return properties.getClient().get(providerId);
   }
 
 }

+ 2 - 4
kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java

@@ -46,10 +46,8 @@ public class CognitoLogoutSuccessHandler implements LogoutSuccessHandler {
         .fragment(null)
         .build();
 
-    Assert.isTrue(
-        provider.getCustomParams() != null && provider.getCustomParams().containsKey("logoutUrl"),
-        "Custom params should contain 'logoutUrl'"
-    );
+    Assert.isTrue(provider.getCustomParams().containsKey("logoutUrl"),
+        "Custom params should contain 'logoutUrl'");
     final var uri = UriComponentsBuilder
         .fromUri(URI.create(provider.getCustomParams().get("logoutUrl")))
         .queryParam("client_id", provider.getClientId())

+ 4 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/provider/Provider.java

@@ -10,6 +10,8 @@ public enum Provider {
 
   OAUTH_COGNITO,
 
+  OAUTH,
+
   LDAP,
   LDAP_AD;
 
@@ -22,6 +24,8 @@ public enum Provider {
     public static String GOOGLE = "google";
     public static String GITHUB = "github";
     public static String COGNITO = "cognito";
+
+    public static String OAUTH = "oauth";
   }
 
 }

+ 7 - 23
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java

@@ -4,16 +4,13 @@ import com.provectus.kafka.ui.model.ClusterFeature;
 import com.provectus.kafka.ui.model.KafkaCluster;
 import com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
 import java.util.function.Predicate;
-import javax.annotation.Nullable;
 import lombok.RequiredArgsConstructor;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.common.Node;
 import org.apache.kafka.common.acl.AclOperation;
 import org.springframework.stereotype.Service;
 import reactor.core.publisher.Flux;
@@ -24,11 +21,10 @@ import reactor.core.publisher.Mono;
 @Slf4j
 public class FeatureService {
 
-  private static final String DELETE_TOPIC_ENABLED_SERVER_PROPERTY = "delete.topic.enable";
-
   private final AdminClientService adminClientService;
 
-  public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
+  public Mono<List<ClusterFeature>> getAvailableFeatures(ReactiveAdminClient adminClient,
+                                                         KafkaCluster cluster,
                                                          ClusterDescription clusterDescription) {
     List<Mono<ClusterFeature>> features = new ArrayList<>();
 
@@ -46,29 +42,17 @@ public class FeatureService {
       features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
     }
 
-    features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
+    features.add(topicDeletionEnabled(adminClient));
     features.add(aclView(cluster));
     features.add(aclEdit(clusterDescription));
 
     return Flux.fromIterable(features).flatMap(m -> m).collectList();
   }
 
-  private Mono<ClusterFeature> topicDeletionEnabled(KafkaCluster cluster, @Nullable Node controller) {
-    if (controller == null) {
-      return Mono.just(ClusterFeature.TOPIC_DELETION); // assuming it is enabled by default
-    }
-    return adminClientService.get(cluster)
-        .flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
-        .map(config ->
-            config.values().stream()
-                .flatMap(Collection::stream)
-                .filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
-                .map(e -> Boolean.parseBoolean(e.value()))
-                .findFirst()
-                .orElse(true))
-        .flatMap(enabled -> enabled
-            ? Mono.just(ClusterFeature.TOPIC_DELETION)
-            : Mono.empty());
+  private Mono<ClusterFeature> topicDeletionEnabled(ReactiveAdminClient adminClient) {
+    return adminClient.isTopicDeletionEnabled()
+        ? Mono.just(ClusterFeature.TOPIC_DELETION)
+        : Mono.empty();
   }
 
   private Mono<ClusterFeature> aclEdit(ClusterDescription clusterDescription) {

+ 63 - 40
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java

@@ -32,8 +32,9 @@ import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import javax.annotation.Nullable;
 import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
 import lombok.Getter;
-import lombok.RequiredArgsConstructor;
 import lombok.Value;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.kafka.clients.admin.AdminClient;
@@ -75,7 +76,6 @@ import org.apache.kafka.common.errors.TopicAuthorizationException;
 import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
 import org.apache.kafka.common.errors.UnsupportedVersionException;
 import org.apache.kafka.common.requests.DescribeLogDirsResponse;
-import org.apache.kafka.common.resource.ResourcePattern;
 import org.apache.kafka.common.resource.ResourcePatternFilter;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
@@ -85,7 +85,7 @@ import reactor.util.function.Tuples;
 
 
 @Slf4j
-@RequiredArgsConstructor
+@AllArgsConstructor
 public class ReactiveAdminClient implements Closeable {
 
   public enum SupportedFeature {
@@ -104,7 +104,8 @@ public class ReactiveAdminClient implements Closeable {
       this.predicate = (admin, ver) -> Mono.just(ver != null && ver >= fromVersion);
     }
 
-    static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, @Nullable Float kafkaVersion) {
+    static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, String kafkaVersionStr) {
+      @Nullable Float kafkaVersion = KafkaVersion.parse(kafkaVersionStr).orElse(null);
       return Flux.fromArray(SupportedFeature.values())
           .flatMap(f -> f.predicate.apply(ac, kafkaVersion).map(enabled -> Tuples.of(f, enabled)))
           .filter(Tuple2::getT2)
@@ -123,19 +124,46 @@ public class ReactiveAdminClient implements Closeable {
     Set<AclOperation> authorizedOperations;
   }
 
-  public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
-    return getClusterVersion(adminClient)
-        .flatMap(ver ->
-            getSupportedUpdateFeaturesForVersion(adminClient, ver)
-                .map(features ->
-                    new ReactiveAdminClient(adminClient, ver, features)));
+  @Builder
+  private record ConfigRelatedInfo(String version,
+                                   Set<SupportedFeature> features,
+                                   boolean topicDeletionIsAllowed) {
+
+    private static Mono<ConfigRelatedInfo> extract(AdminClient ac, int controllerId) {
+      return loadBrokersConfig(ac, List.of(controllerId))
+          .map(map -> map.isEmpty() ? List.<ConfigEntry>of() : map.get(controllerId))
+          .flatMap(configs -> {
+            String version = "1.0-UNKNOWN";
+            boolean topicDeletionEnabled = true;
+            for (ConfigEntry entry : configs) {
+              if (entry.name().contains("inter.broker.protocol.version")) {
+                version = entry.value();
+              }
+              if (entry.name().equals("delete.topic.enable")) {
+                topicDeletionEnabled = Boolean.parseBoolean(entry.value());
+              }
+            }
+            var builder = ConfigRelatedInfo.builder()
+                .version(version)
+                .topicDeletionIsAllowed(topicDeletionEnabled);
+            return SupportedFeature.forVersion(ac, version)
+                .map(features -> builder.features(features).build());
+          });
+    }
   }
 
-  private static Mono<Set<SupportedFeature>> getSupportedUpdateFeaturesForVersion(AdminClient ac, String versionStr) {
-    @Nullable Float kafkaVersion = KafkaVersion.parse(versionStr).orElse(null);
-    return SupportedFeature.forVersion(ac, kafkaVersion);
+  public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
+    return describeClusterImpl(adminClient, Set.of())
+        // choosing node from which we will get configs (starting with controller)
+        .flatMap(descr -> descr.controller != null
+            ? Mono.just(descr.controller)
+            : Mono.justOrEmpty(descr.nodes.stream().findFirst())
+        )
+        .flatMap(node -> ConfigRelatedInfo.extract(adminClient, node.id()))
+        .map(info -> new ReactiveAdminClient(adminClient, info));
   }
 
+
   private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullable Float kafkaVersion) {
     return toMono(ac.describeAcls(AclBindingFilter.ANY).values())
         .thenReturn(true)
@@ -174,11 +202,10 @@ public class ReactiveAdminClient implements Closeable {
 
   @Getter(AccessLevel.PACKAGE) // visible for testing
   private final AdminClient client;
-  private final String version;
-  private final Set<SupportedFeature> features;
+  private volatile ConfigRelatedInfo configRelatedInfo;
 
   public Set<SupportedFeature> getClusterFeatures() {
-    return features;
+    return configRelatedInfo.features();
   }
 
   public Mono<Set<String>> listTopics(boolean listInternal) {
@@ -190,7 +217,20 @@ public class ReactiveAdminClient implements Closeable {
   }
 
   public String getVersion() {
-    return version;
+    return configRelatedInfo.version();
+  }
+
+  public boolean isTopicDeletionEnabled() {
+    return configRelatedInfo.topicDeletionIsAllowed();
+  }
+
+  public Mono<Void> updateInternalStats(@Nullable Node controller) {
+    if (controller == null) {
+      return Mono.empty();
+    }
+    return ConfigRelatedInfo.extract(client, controller.id())
+        .doOnNext(info -> this.configRelatedInfo = info)
+        .then();
   }
 
   public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig() {
@@ -200,7 +240,7 @@ public class ReactiveAdminClient implements Closeable {
   //NOTE: skips not-found topics (for which UnknownTopicOrPartitionException was thrown by AdminClient)
   //and topics for which DESCRIBE_CONFIGS permission is not set (TopicAuthorizationException was thrown)
   public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig(Collection<String> topicNames, boolean includeDoc) {
-    var includeDocFixed = features.contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL) && includeDoc;
+    var includeDocFixed = includeDoc && getClusterFeatures().contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL);
     // we need to partition calls, because it can lead to AdminClient timeouts in case of large topics count
     return partitionCalls(
         topicNames,
@@ -349,7 +389,7 @@ public class ReactiveAdminClient implements Closeable {
   }
 
   public Mono<ClusterDescription> describeCluster() {
-    return describeClusterImpl(client, features);
+    return describeClusterImpl(client, getClusterFeatures());
   }
 
   private static Mono<ClusterDescription> describeClusterImpl(AdminClient client, Set<SupportedFeature> features) {
@@ -371,23 +411,6 @@ public class ReactiveAdminClient implements Closeable {
     );
   }
 
-  private static Mono<String> getClusterVersion(AdminClient client) {
-    return describeClusterImpl(client, Set.of())
-        // choosing node from which we will get configs (starting with controller)
-        .flatMap(descr -> descr.controller != null
-            ? Mono.just(descr.controller)
-            : Mono.justOrEmpty(descr.nodes.stream().findFirst())
-        )
-        .flatMap(node -> loadBrokersConfig(client, List.of(node.id())))
-        .flatMap(configs -> configs.values().stream()
-            .flatMap(Collection::stream)
-            .filter(entry -> entry.name().contains("inter.broker.protocol.version"))
-            .findFirst()
-            .map(configEntry -> Mono.just(configEntry.value()))
-            .orElse(Mono.empty()))
-        .switchIfEmpty(Mono.just("1.0-UNKNOWN"));
-  }
-
   public Mono<Void> deleteConsumerGroups(Collection<String> groupIds) {
     return toMono(client.deleteConsumerGroups(groupIds).all())
         .onErrorResume(GroupIdNotFoundException.class,
@@ -421,7 +444,7 @@ public class ReactiveAdminClient implements Closeable {
   // NOTE: places whole current topic config with new one. Entries that were present in old config,
   // but missed in new will be set to default
   public Mono<Void> updateTopicConfig(String topicName, Map<String, String> configs) {
-    if (features.contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
+    if (getClusterFeatures().contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
       return getTopicsConfigImpl(List.of(topicName), false)
           .map(conf -> conf.getOrDefault(topicName, List.of()))
           .flatMap(currentConfigs -> incrementalAlterConfig(topicName, currentConfigs, configs));
@@ -596,17 +619,17 @@ public class ReactiveAdminClient implements Closeable {
   }
 
   public Mono<Collection<AclBinding>> listAcls(ResourcePatternFilter filter) {
-    Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
+    Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
     return toMono(client.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values());
   }
 
   public Mono<Void> createAcls(Collection<AclBinding> aclBindings) {
-    Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
+    Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
     return toMono(client.createAcls(aclBindings).all());
   }
 
   public Mono<Void> deleteAcls(Collection<AclBinding> aclBindings) {
-    Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
+    Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
     var filters = aclBindings.stream().map(AclBinding::toFilter).collect(Collectors.toSet());
     return toMono(client.deleteAcls(filters).all()).then();
   }

+ 20 - 19
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java

@@ -37,25 +37,26 @@ public class StatisticsService {
   private Mono<Statistics> getStatistics(KafkaCluster cluster) {
     return adminClientService.get(cluster).flatMap(ac ->
             ac.describeCluster().flatMap(description ->
-                Mono.zip(
-                    List.of(
-                        metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
-                        getLogDirInfo(description, ac),
-                        featureService.getAvailableFeatures(cluster, description),
-                        loadTopicConfigs(cluster),
-                        describeTopics(cluster)),
-                    results ->
-                        Statistics.builder()
-                            .status(ServerStatusDTO.ONLINE)
-                            .clusterDescription(description)
-                            .version(ac.getVersion())
-                            .metrics((Metrics) results[0])
-                            .logDirInfo((InternalLogDirStats) results[1])
-                            .features((List<ClusterFeature>) results[2])
-                            .topicConfigs((Map<String, List<ConfigEntry>>) results[3])
-                            .topicDescriptions((Map<String, TopicDescription>) results[4])
-                            .build()
-                )))
+                ac.updateInternalStats(description.getController()).then(
+                    Mono.zip(
+                        List.of(
+                            metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
+                            getLogDirInfo(description, ac),
+                            featureService.getAvailableFeatures(ac, cluster, description),
+                            loadTopicConfigs(cluster),
+                            describeTopics(cluster)),
+                        results ->
+                            Statistics.builder()
+                                .status(ServerStatusDTO.ONLINE)
+                                .clusterDescription(description)
+                                .version(ac.getVersion())
+                                .metrics((Metrics) results[0])
+                                .logDirInfo((InternalLogDirStats) results[1])
+                                .features((List<ClusterFeature>) results[2])
+                                .topicConfigs((Map<String, List<ConfigEntry>>) results[3])
+                                .topicDescriptions((Map<String, TopicDescription>) results[4])
+                                .build()
+                    ))))
         .doOnError(e ->
             log.error("Failed to collect cluster {} info", cluster.getName(), e))
         .onErrorResume(

+ 2 - 0
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java

@@ -20,6 +20,7 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
 import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
+import com.provectus.kafka.ui.service.rbac.extractor.OauthAuthorityExtractor;
 import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
 import jakarta.annotation.PostConstruct;
 import java.util.Collections;
@@ -76,6 +77,7 @@ public class AccessControlService {
               case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
               case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
               case OAUTH_GITHUB -> new GithubAuthorityExtractor();
+              case OAUTH -> new OauthAuthorityExtractor();
               default -> null;
             })
             .filter(Objects::nonNull)

+ 6 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/CognitoAuthorityExtractor.java

@@ -1,5 +1,8 @@
 package com.provectus.kafka.ui.service.rbac.extractor;
 
+import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.COGNITO;
+
+import com.google.common.collect.Sets;
 import com.provectus.kafka.ui.model.rbac.Role;
 import com.provectus.kafka.ui.model.rbac.provider.Provider;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
@@ -18,8 +21,8 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
   private static final String COGNITO_GROUPS_ATTRIBUTE_NAME = "cognito:groups";
 
   @Override
-  public boolean isApplicable(String provider) {
-    return Provider.Name.COGNITO.equalsIgnoreCase(provider);
+  public boolean isApplicable(String provider, Map<String, String> customParams) {
+    return COGNITO.equalsIgnoreCase(provider) || COGNITO.equalsIgnoreCase(customParams.get(TYPE));
   }
 
   @Override
@@ -63,7 +66,7 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
         .map(Role::getName)
         .collect(Collectors.toSet());
 
-    return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByGroups.stream()).collect(Collectors.toSet()));
+    return Mono.just(Sets.union(groupsByUsername, groupsByGroups));
   }
 
 }

+ 4 - 2
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java

@@ -1,5 +1,7 @@
 package com.provectus.kafka.ui.service.rbac.extractor;
 
+import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GITHUB;
+
 import com.provectus.kafka.ui.model.rbac.Role;
 import com.provectus.kafka.ui.model.rbac.provider.Provider;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
@@ -28,8 +30,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
   private static final String DUMMY = "dummy";
 
   @Override
-  public boolean isApplicable(String provider) {
-    return Provider.Name.GITHUB.equalsIgnoreCase(provider);
+  public boolean isApplicable(String provider, Map<String, String> customParams) {
+    return GITHUB.equalsIgnoreCase(provider) || GITHUB.equalsIgnoreCase(customParams.get(TYPE));
   }
 
   @Override

+ 8 - 8
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GoogleAuthorityExtractor.java

@@ -1,13 +1,14 @@
 package com.provectus.kafka.ui.service.rbac.extractor;
 
+import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GOOGLE;
+
+import com.google.common.collect.Sets;
 import com.provectus.kafka.ui.model.rbac.Role;
 import com.provectus.kafka.ui.model.rbac.provider.Provider;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
-import java.util.stream.Stream;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
 import reactor.core.publisher.Mono;
@@ -19,8 +20,8 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
   public static final String EMAIL_ATTRIBUTE_NAME = "email";
 
   @Override
-  public boolean isApplicable(String provider) {
-    return Provider.Name.GOOGLE.equalsIgnoreCase(provider);
+  public boolean isApplicable(String provider, Map<String, String> customParams) {
+    return GOOGLE.equalsIgnoreCase(provider) || GOOGLE.equalsIgnoreCase(customParams.get(TYPE));
   }
 
   @Override
@@ -52,7 +53,7 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
       return Mono.just(groupsByUsername);
     }
 
-    List<String> groupsByDomain = acs.getRoles()
+    Set<String> groupsByDomain = acs.getRoles()
         .stream()
         .filter(r -> r.getSubjects()
             .stream()
@@ -60,10 +61,9 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
             .filter(s -> s.getType().equals("domain"))
             .anyMatch(s -> s.getValue().equals(domain)))
         .map(Role::getName)
-        .toList();
+        .collect(Collectors.toSet());
 
-    return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByDomain.stream())
-        .collect(Collectors.toSet()));
+    return Mono.just(Sets.union(groupsByUsername, groupsByDomain));
   }
 
 }

+ 85 - 3
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/OauthAuthorityExtractor.java

@@ -1,22 +1,44 @@
 package com.provectus.kafka.ui.service.rbac.extractor;
 
+import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.OAUTH;
+
+import com.google.common.collect.Sets;
+import com.provectus.kafka.ui.config.auth.OAuthProperties;
+import com.provectus.kafka.ui.model.rbac.Role;
+import com.provectus.kafka.ui.model.rbac.provider.Provider;
 import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.stream.Collectors;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
+import org.springframework.util.Assert;
 import reactor.core.publisher.Mono;
 
 @Slf4j
 public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
 
+  public static final String ROLES_FIELD_PARAM_NAME = "roles-field";
+
   @Override
-  public boolean isApplicable(String provider) {
-    return false; // TODO #2844
+  public boolean isApplicable(String provider, Map<String, String> customParams) {
+    var containsRolesFieldNameParam = customParams.containsKey(ROLES_FIELD_PARAM_NAME);
+    if (!containsRolesFieldNameParam) {
+      log.debug("Provider [{}] doesn't contain a roles field param name, mapping won't be performed", provider);
+      return false;
+    }
+
+    return OAUTH.equalsIgnoreCase(provider) || OAUTH.equalsIgnoreCase(customParams.get(TYPE));
   }
 
   @Override
   public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
+    log.trace("Extracting OAuth2 user authorities");
+
     DefaultOAuth2User principal;
     try {
       principal = (DefaultOAuth2User) value;
@@ -25,7 +47,67 @@ public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
       throw new RuntimeException();
     }
 
-    return Mono.just(Set.of(principal.getName())); // TODO #2844
+    var provider = (OAuthProperties.OAuth2Provider) additionalParams.get("provider");
+    Assert.notNull(provider, "provider is null");
+    var rolesFieldName = provider.getCustomParams().get(ROLES_FIELD_PARAM_NAME);
+
+    Set<String> rolesByUsername = acs.getRoles()
+        .stream()
+        .filter(r -> r.getSubjects()
+            .stream()
+            .filter(s -> s.getProvider().equals(Provider.OAUTH))
+            .filter(s -> s.getType().equals("user"))
+            .anyMatch(s -> s.getValue().equals(principal.getName())))
+        .map(Role::getName)
+        .collect(Collectors.toSet());
+
+    Set<String> rolesByRolesField = acs.getRoles()
+        .stream()
+        .filter(role -> role.getSubjects()
+            .stream()
+            .filter(s -> s.getProvider().equals(Provider.OAUTH))
+            .filter(s -> s.getType().equals("role"))
+            .anyMatch(subject -> {
+              var roleName = subject.getValue();
+              var principalRoles = convertRoles(principal.getAttribute(rolesFieldName));
+              var roleMatched = principalRoles.contains(roleName);
+
+              if (roleMatched) {
+                log.debug("Assigning role [{}] to user [{}]", roleName, principal.getName());
+              } else {
+                log.trace("Role [{}] not found in user [{}] roles", roleName, principal.getName());
+              }
+
+              return roleMatched;
+            })
+        )
+        .map(Role::getName)
+        .collect(Collectors.toSet());
+
+    return Mono.just(Sets.union(rolesByUsername, rolesByRolesField));
+  }
+
+  @SuppressWarnings("unchecked")
+  private Collection<String> convertRoles(Object roles) {
+    if (roles == null) {
+      log.debug("Param missing from attributes, skipping");
+      return Collections.emptySet();
+    }
+
+    if ((roles instanceof List<?>) || (roles instanceof Set<?>)) {
+      log.trace("The field is either a set or a list, returning as is");
+      return (Collection<String>) roles;
+    }
+
+    if (!(roles instanceof String)) {
+      log.debug("The field is not a string, skipping");
+      return Collections.emptySet();
+    }
+
+    log.trace("Trying to deserialize the field value [{}] as a string", roles);
+
+    return Arrays.stream(((String) roles).split(","))
+        .collect(Collectors.toSet());
   }
 
 }

+ 3 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/ProviderAuthorityExtractor.java

@@ -7,7 +7,9 @@ import reactor.core.publisher.Mono;
 
 public interface ProviderAuthorityExtractor {
 
-  boolean isApplicable(String provider);
+  String TYPE = "type";
+
+  boolean isApplicable(String provider, Map<String, String> customParams);
 
   Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams);
 

+ 1 - 1
kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java

@@ -230,7 +230,7 @@ public class DynamicConfigOperations {
 
       Optional.ofNullable(auth)
           .flatMap(a -> Optional.ofNullable(a.oauth2))
-          .ifPresent(OAuthProperties::validate);
+          .ifPresent(OAuthProperties::init);
 
       Optional.ofNullable(webclient)
           .ifPresent(WebclientProperties::validate);