Merge branch 'master' into issue/3163

This commit is contained in:
Malav Mevada 2023-05-10 15:07:07 +05:30 committed by GitHub
commit 58ec224ab3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 295 additions and 281 deletions

View file

@ -14,13 +14,11 @@ public class LdapProperties {
private String adminPassword;
private String userFilterSearchBase;
private String userFilterSearchFilter;
private String groupFilterSearchBase;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aсtiveDirectory.domain:@null}")
private String activeDirectoryDomain;
@Value("${oauth2.ldap.groupRoleAttribute:cn}")
private String groupRoleAttribute;
}

View file

@ -3,7 +3,6 @@ package com.provectus.kafka.ui.config.auth;
import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
@ -12,7 +11,6 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -34,6 +32,8 @@ import org.springframework.security.ldap.authentication.LdapAuthenticationProvid
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator;
import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
import org.springframework.security.web.server.SecurityWebFilterChain;
@ -50,7 +50,7 @@ public class LdapSecurityConfig {
@Bean
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
ApplicationContext context,
LdapAuthoritiesPopulator ldapAuthoritiesPopulator,
@Nullable AccessControlService acs) {
var rbacEnabled = acs != null && acs.isRbacEnabled();
BindAuthenticator ba = new BindAuthenticator(contextSource);
@ -67,7 +67,7 @@ public class LdapSecurityConfig {
AbstractLdapAuthenticationProvider authenticationProvider;
if (!props.isActiveDirectory()) {
authenticationProvider = rbacEnabled
? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
? new LdapAuthenticationProvider(ba, ldapAuthoritiesPopulator)
: new LdapAuthenticationProvider(ba);
} else {
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
@ -95,6 +95,15 @@ public class LdapSecurityConfig {
return ctx;
}
@Bean
@Primary
public LdapAuthoritiesPopulator ldapAuthoritiesPopulator(BaseLdapPathContextSource contextSource) {
var authoritiesPopulator = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase());
authoritiesPopulator.setRolePrefix("");
authoritiesPopulator.setConvertToUpperCase(false);
return authoritiesPopulator;
}
@Bean
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
log.info("Configuring LDAP authentication.");

View file

@ -1,6 +1,7 @@
package com.provectus.kafka.ui.config.auth;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
@ -14,7 +15,16 @@ public class OAuthProperties {
private Map<String, OAuth2Provider> client = new HashMap<>();
@PostConstruct
public void validate() {
public void init() {
getClient().values().forEach((provider) -> {
if (provider.getCustomParams() == null) {
provider.setCustomParams(Collections.emptyMap());
}
if (provider.getScope() == null) {
provider.setScope(Collections.emptySet());
}
});
getClient().values().forEach(this::validateProvider);
}

View file

@ -73,8 +73,7 @@ public final class OAuthPropertiesConverter {
}
private static boolean isGoogle(OAuth2Provider provider) {
return provider.getCustomParams() != null
&& GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
return GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
}
}

View file

@ -72,13 +72,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
final OidcReactiveOAuth2UserService delegate = new OidcReactiveOAuth2UserService();
return request -> delegate.loadUser(request)
.flatMap(user -> {
String providerId = request.getClientRegistration().getRegistrationId();
final var extractor = getExtractor(providerId, acs);
var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
final var extractor = getExtractor(provider, acs);
if (extractor == null) {
return Mono.just(user);
}
return extractor.extract(acs, user, Map.of("request", request))
return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
.map(groups -> new RbacOidcUser(user, groups));
});
}
@ -88,13 +88,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
final DefaultReactiveOAuth2UserService delegate = new DefaultReactiveOAuth2UserService();
return request -> delegate.loadUser(request)
.flatMap(user -> {
String providerId = request.getClientRegistration().getRegistrationId();
final var extractor = getExtractor(providerId, acs);
var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
final var extractor = getExtractor(provider, acs);
if (extractor == null) {
return Mono.just(user);
}
return extractor.extract(acs, user, Map.of("request", request))
return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
.map(groups -> new RbacOAuth2User(user, groups));
});
}
@ -113,18 +113,18 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
}
@Nullable
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
final String provider = getProviderByProviderId(providerId);
private ProviderAuthorityExtractor getExtractor(final OAuthProperties.OAuth2Provider provider,
AccessControlService acs) {
Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
.stream()
.filter(e -> e.isApplicable(provider))
.filter(e -> e.isApplicable(provider.getProvider(), provider.getCustomParams()))
.findFirst();
return extractor.orElse(null);
}
private String getProviderByProviderId(final String providerId) {
return properties.getClient().get(providerId).getProvider();
private OAuthProperties.OAuth2Provider getProviderByProviderId(final String providerId) {
return properties.getClient().get(providerId);
}
}

View file

@ -46,10 +46,8 @@ public class CognitoLogoutSuccessHandler implements LogoutSuccessHandler {
.fragment(null)
.build();
Assert.isTrue(
provider.getCustomParams() != null && provider.getCustomParams().containsKey("logoutUrl"),
"Custom params should contain 'logoutUrl'"
);
Assert.isTrue(provider.getCustomParams().containsKey("logoutUrl"),
"Custom params should contain 'logoutUrl'");
final var uri = UriComponentsBuilder
.fromUri(URI.create(provider.getCustomParams().get("logoutUrl")))
.queryParam("client_id", provider.getClientId())

View file

@ -39,41 +39,42 @@ public class MessageFilters {
}
static Predicate<TopicMessageDTO> groovyScriptFilter(String script) {
var compiledScript = compileScript(script);
var engine = getGroovyEngine();
var compiledScript = compileScript(engine, script);
var jsonSlurper = new JsonSlurper();
return new Predicate<TopicMessageDTO>() {
@SneakyThrows
@Override
public boolean test(TopicMessageDTO msg) {
var bindings = getGroovyEngine().createBindings();
var bindings = engine.createBindings();
bindings.put("partition", msg.getPartition());
bindings.put("offset", msg.getOffset());
bindings.put("timestampMs", msg.getTimestamp().toInstant().toEpochMilli());
bindings.put("keyAsText", msg.getKey());
bindings.put("valueAsText", msg.getContent());
bindings.put("headers", msg.getHeaders());
bindings.put("key", parseToJsonOrReturnNull(jsonSlurper, msg.getKey()));
bindings.put("value", parseToJsonOrReturnNull(jsonSlurper, msg.getContent()));
bindings.put("key", parseToJsonOrReturnAsIs(jsonSlurper, msg.getKey()));
bindings.put("value", parseToJsonOrReturnAsIs(jsonSlurper, msg.getContent()));
var result = compiledScript.eval(bindings);
if (result instanceof Boolean) {
return (Boolean) result;
} else {
throw new ValidationException(
String.format("Unexpected script result: %s, Boolean should be returned instead", result));
"Unexpected script result: %s, Boolean should be returned instead".formatted(result));
}
}
};
}
@Nullable
private static Object parseToJsonOrReturnNull(JsonSlurper parser, @Nullable String str) {
private static Object parseToJsonOrReturnAsIs(JsonSlurper parser, @Nullable String str) {
if (str == null) {
return null;
}
try {
return parser.parseText(str);
} catch (Exception e) {
return null;
return str;
}
}
@ -86,9 +87,9 @@ public class MessageFilters {
return GROOVY_ENGINE;
}
private static CompiledScript compileScript(String script) {
private static CompiledScript compileScript(GroovyScriptEngineImpl engine, String script) {
try {
return getGroovyEngine().compile(script);
return engine.compile(script);
} catch (ScriptException e) {
throw new ValidationException("Script syntax error: " + e.getMessage());
}

View file

@ -10,6 +10,8 @@ public enum Provider {
OAUTH_COGNITO,
OAUTH,
LDAP,
LDAP_AD;
@ -22,6 +24,8 @@ public enum Provider {
public static String GOOGLE = "google";
public static String GITHUB = "github";
public static String COGNITO = "cognito";
public static String OAUTH = "oauth";
}
}

View file

@ -4,16 +4,13 @@ import com.provectus.kafka.ui.model.ClusterFeature;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.acl.AclOperation;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
@ -24,11 +21,10 @@ import reactor.core.publisher.Mono;
@Slf4j
public class FeatureService {
private static final String DELETE_TOPIC_ENABLED_SERVER_PROPERTY = "delete.topic.enable";
private final AdminClientService adminClientService;
public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
public Mono<List<ClusterFeature>> getAvailableFeatures(ReactiveAdminClient adminClient,
KafkaCluster cluster,
ClusterDescription clusterDescription) {
List<Mono<ClusterFeature>> features = new ArrayList<>();
@ -46,29 +42,17 @@ public class FeatureService {
features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
}
features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
features.add(topicDeletionEnabled(adminClient));
features.add(aclView(cluster));
features.add(aclEdit(clusterDescription));
return Flux.fromIterable(features).flatMap(m -> m).collectList();
}
private Mono<ClusterFeature> topicDeletionEnabled(KafkaCluster cluster, @Nullable Node controller) {
if (controller == null) {
return Mono.just(ClusterFeature.TOPIC_DELETION); // assuming it is enabled by default
}
return adminClientService.get(cluster)
.flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
.map(config ->
config.values().stream()
.flatMap(Collection::stream)
.filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
.map(e -> Boolean.parseBoolean(e.value()))
.findFirst()
.orElse(true))
.flatMap(enabled -> enabled
? Mono.just(ClusterFeature.TOPIC_DELETION)
: Mono.empty());
private Mono<ClusterFeature> topicDeletionEnabled(ReactiveAdminClient adminClient) {
return adminClient.isTopicDeletionEnabled()
? Mono.just(ClusterFeature.TOPIC_DELETION)
: Mono.empty();
}
private Mono<ClusterFeature> aclEdit(ClusterDescription clusterDescription) {

View file

@ -32,8 +32,9 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
@ -75,7 +76,6 @@ import org.apache.kafka.common.errors.TopicAuthorizationException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@ -85,7 +85,7 @@ import reactor.util.function.Tuples;
@Slf4j
@RequiredArgsConstructor
@AllArgsConstructor
public class ReactiveAdminClient implements Closeable {
public enum SupportedFeature {
@ -104,7 +104,8 @@ public class ReactiveAdminClient implements Closeable {
this.predicate = (admin, ver) -> Mono.just(ver != null && ver >= fromVersion);
}
static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, @Nullable Float kafkaVersion) {
static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, String kafkaVersionStr) {
@Nullable Float kafkaVersion = KafkaVersion.parse(kafkaVersionStr).orElse(null);
return Flux.fromArray(SupportedFeature.values())
.flatMap(f -> f.predicate.apply(ac, kafkaVersion).map(enabled -> Tuples.of(f, enabled)))
.filter(Tuple2::getT2)
@ -123,19 +124,46 @@ public class ReactiveAdminClient implements Closeable {
Set<AclOperation> authorizedOperations;
}
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
return getClusterVersion(adminClient)
.flatMap(ver ->
getSupportedUpdateFeaturesForVersion(adminClient, ver)
.map(features ->
new ReactiveAdminClient(adminClient, ver, features)));
@Builder
private record ConfigRelatedInfo(String version,
Set<SupportedFeature> features,
boolean topicDeletionIsAllowed) {
private static Mono<ConfigRelatedInfo> extract(AdminClient ac, int controllerId) {
return loadBrokersConfig(ac, List.of(controllerId))
.map(map -> map.isEmpty() ? List.<ConfigEntry>of() : map.get(controllerId))
.flatMap(configs -> {
String version = "1.0-UNKNOWN";
boolean topicDeletionEnabled = true;
for (ConfigEntry entry : configs) {
if (entry.name().contains("inter.broker.protocol.version")) {
version = entry.value();
}
if (entry.name().equals("delete.topic.enable")) {
topicDeletionEnabled = Boolean.parseBoolean(entry.value());
}
}
var builder = ConfigRelatedInfo.builder()
.version(version)
.topicDeletionIsAllowed(topicDeletionEnabled);
return SupportedFeature.forVersion(ac, version)
.map(features -> builder.features(features).build());
});
}
}
private static Mono<Set<SupportedFeature>> getSupportedUpdateFeaturesForVersion(AdminClient ac, String versionStr) {
@Nullable Float kafkaVersion = KafkaVersion.parse(versionStr).orElse(null);
return SupportedFeature.forVersion(ac, kafkaVersion);
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
return describeClusterImpl(adminClient, Set.of())
// choosing node from which we will get configs (starting with controller)
.flatMap(descr -> descr.controller != null
? Mono.just(descr.controller)
: Mono.justOrEmpty(descr.nodes.stream().findFirst())
)
.flatMap(node -> ConfigRelatedInfo.extract(adminClient, node.id()))
.map(info -> new ReactiveAdminClient(adminClient, info));
}
private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullable Float kafkaVersion) {
return toMono(ac.describeAcls(AclBindingFilter.ANY).values())
.thenReturn(true)
@ -174,11 +202,10 @@ public class ReactiveAdminClient implements Closeable {
@Getter(AccessLevel.PACKAGE) // visible for testing
private final AdminClient client;
private final String version;
private final Set<SupportedFeature> features;
private volatile ConfigRelatedInfo configRelatedInfo;
public Set<SupportedFeature> getClusterFeatures() {
return features;
return configRelatedInfo.features();
}
public Mono<Set<String>> listTopics(boolean listInternal) {
@ -190,7 +217,20 @@ public class ReactiveAdminClient implements Closeable {
}
public String getVersion() {
return version;
return configRelatedInfo.version();
}
public boolean isTopicDeletionEnabled() {
return configRelatedInfo.topicDeletionIsAllowed();
}
public Mono<Void> updateInternalStats(@Nullable Node controller) {
if (controller == null) {
return Mono.empty();
}
return ConfigRelatedInfo.extract(client, controller.id())
.doOnNext(info -> this.configRelatedInfo = info)
.then();
}
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig() {
@ -200,7 +240,7 @@ public class ReactiveAdminClient implements Closeable {
//NOTE: skips not-found topics (for which UnknownTopicOrPartitionException was thrown by AdminClient)
//and topics for which DESCRIBE_CONFIGS permission is not set (TopicAuthorizationException was thrown)
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig(Collection<String> topicNames, boolean includeDoc) {
var includeDocFixed = features.contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL) && includeDoc;
var includeDocFixed = includeDoc && getClusterFeatures().contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL);
// we need to partition calls, because it can lead to AdminClient timeouts in case of large topics count
return partitionCalls(
topicNames,
@ -349,7 +389,7 @@ public class ReactiveAdminClient implements Closeable {
}
public Mono<ClusterDescription> describeCluster() {
return describeClusterImpl(client, features);
return describeClusterImpl(client, getClusterFeatures());
}
private static Mono<ClusterDescription> describeClusterImpl(AdminClient client, Set<SupportedFeature> features) {
@ -371,23 +411,6 @@ public class ReactiveAdminClient implements Closeable {
);
}
private static Mono<String> getClusterVersion(AdminClient client) {
return describeClusterImpl(client, Set.of())
// choosing node from which we will get configs (starting with controller)
.flatMap(descr -> descr.controller != null
? Mono.just(descr.controller)
: Mono.justOrEmpty(descr.nodes.stream().findFirst())
)
.flatMap(node -> loadBrokersConfig(client, List.of(node.id())))
.flatMap(configs -> configs.values().stream()
.flatMap(Collection::stream)
.filter(entry -> entry.name().contains("inter.broker.protocol.version"))
.findFirst()
.map(configEntry -> Mono.just(configEntry.value()))
.orElse(Mono.empty()))
.switchIfEmpty(Mono.just("1.0-UNKNOWN"));
}
public Mono<Void> deleteConsumerGroups(Collection<String> groupIds) {
return toMono(client.deleteConsumerGroups(groupIds).all())
.onErrorResume(GroupIdNotFoundException.class,
@ -421,7 +444,7 @@ public class ReactiveAdminClient implements Closeable {
// NOTE: places whole current topic config with new one. Entries that were present in old config,
// but missed in new will be set to default
public Mono<Void> updateTopicConfig(String topicName, Map<String, String> configs) {
if (features.contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
if (getClusterFeatures().contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
return getTopicsConfigImpl(List.of(topicName), false)
.map(conf -> conf.getOrDefault(topicName, List.of()))
.flatMap(currentConfigs -> incrementalAlterConfig(topicName, currentConfigs, configs));
@ -596,17 +619,17 @@ public class ReactiveAdminClient implements Closeable {
}
public Mono<Collection<AclBinding>> listAcls(ResourcePatternFilter filter) {
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
return toMono(client.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values());
}
public Mono<Void> createAcls(Collection<AclBinding> aclBindings) {
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
return toMono(client.createAcls(aclBindings).all());
}
public Mono<Void> deleteAcls(Collection<AclBinding> aclBindings) {
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
var filters = aclBindings.stream().map(AclBinding::toFilter).collect(Collectors.toSet());
return toMono(client.deleteAcls(filters).all()).then();
}

View file

@ -37,25 +37,26 @@ public class StatisticsService {
private Mono<Statistics> getStatistics(KafkaCluster cluster) {
return adminClientService.get(cluster).flatMap(ac ->
ac.describeCluster().flatMap(description ->
Mono.zip(
List.of(
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
getLogDirInfo(description, ac),
featureService.getAvailableFeatures(cluster, description),
loadTopicConfigs(cluster),
describeTopics(cluster)),
results ->
Statistics.builder()
.status(ServerStatusDTO.ONLINE)
.clusterDescription(description)
.version(ac.getVersion())
.metrics((Metrics) results[0])
.logDirInfo((InternalLogDirStats) results[1])
.features((List<ClusterFeature>) results[2])
.topicConfigs((Map<String, List<ConfigEntry>>) results[3])
.topicDescriptions((Map<String, TopicDescription>) results[4])
.build()
)))
ac.updateInternalStats(description.getController()).then(
Mono.zip(
List.of(
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
getLogDirInfo(description, ac),
featureService.getAvailableFeatures(ac, cluster, description),
loadTopicConfigs(cluster),
describeTopics(cluster)),
results ->
Statistics.builder()
.status(ServerStatusDTO.ONLINE)
.clusterDescription(description)
.version(ac.getVersion())
.metrics((Metrics) results[0])
.logDirInfo((InternalLogDirStats) results[1])
.features((List<ClusterFeature>) results[2])
.topicConfigs((Map<String, List<ConfigEntry>>) results[3])
.topicDescriptions((Map<String, TopicDescription>) results[4])
.build()
))))
.doOnError(e ->
log.error("Failed to collect cluster {} info", cluster.getName(), e))
.onErrorResume(

View file

@ -20,6 +20,7 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.OauthAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
@ -76,6 +77,7 @@ public class AccessControlService {
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
case OAUTH -> new OauthAuthorityExtractor();
default -> null;
})
.filter(Objects::nonNull)

View file

@ -1,5 +1,8 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.COGNITO;
import com.google.common.collect.Sets;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
@ -18,8 +21,8 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
private static final String COGNITO_GROUPS_ATTRIBUTE_NAME = "cognito:groups";
@Override
public boolean isApplicable(String provider) {
return Provider.Name.COGNITO.equalsIgnoreCase(provider);
public boolean isApplicable(String provider, Map<String, String> customParams) {
return COGNITO.equalsIgnoreCase(provider) || COGNITO.equalsIgnoreCase(customParams.get(TYPE));
}
@Override
@ -63,7 +66,7 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
.map(Role::getName)
.collect(Collectors.toSet());
return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByGroups.stream()).collect(Collectors.toSet()));
return Mono.just(Sets.union(groupsByUsername, groupsByGroups));
}
}

View file

@ -1,5 +1,7 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GITHUB;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
@ -28,8 +30,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
private static final String DUMMY = "dummy";
@Override
public boolean isApplicable(String provider) {
return Provider.Name.GITHUB.equalsIgnoreCase(provider);
public boolean isApplicable(String provider, Map<String, String> customParams) {
return GITHUB.equalsIgnoreCase(provider) || GITHUB.equalsIgnoreCase(customParams.get(TYPE));
}
@Override

View file

@ -1,13 +1,14 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GOOGLE;
import com.google.common.collect.Sets;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.extern.slf4j.Slf4j;
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
import reactor.core.publisher.Mono;
@ -19,8 +20,8 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
public static final String EMAIL_ATTRIBUTE_NAME = "email";
@Override
public boolean isApplicable(String provider) {
return Provider.Name.GOOGLE.equalsIgnoreCase(provider);
public boolean isApplicable(String provider, Map<String, String> customParams) {
return GOOGLE.equalsIgnoreCase(provider) || GOOGLE.equalsIgnoreCase(customParams.get(TYPE));
}
@Override
@ -52,7 +53,7 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
return Mono.just(groupsByUsername);
}
List<String> groupsByDomain = acs.getRoles()
Set<String> groupsByDomain = acs.getRoles()
.stream()
.filter(r -> r.getSubjects()
.stream()
@ -60,10 +61,9 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
.filter(s -> s.getType().equals("domain"))
.anyMatch(s -> s.getValue().equals(domain)))
.map(Role::getName)
.toList();
.collect(Collectors.toSet());
return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByDomain.stream())
.collect(Collectors.toSet()));
return Mono.just(Sets.union(groupsByUsername, groupsByDomain));
}
}

View file

@ -1,22 +1,44 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.OAUTH;
import com.google.common.collect.Sets;
import com.provectus.kafka.ui.config.auth.OAuthProperties;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
import org.springframework.util.Assert;
import reactor.core.publisher.Mono;
@Slf4j
public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
public static final String ROLES_FIELD_PARAM_NAME = "roles-field";
@Override
public boolean isApplicable(String provider) {
return false; // TODO #2844
public boolean isApplicable(String provider, Map<String, String> customParams) {
var containsRolesFieldNameParam = customParams.containsKey(ROLES_FIELD_PARAM_NAME);
if (!containsRolesFieldNameParam) {
log.debug("Provider [{}] doesn't contain a roles field param name, mapping won't be performed", provider);
return false;
}
return OAUTH.equalsIgnoreCase(provider) || OAUTH.equalsIgnoreCase(customParams.get(TYPE));
}
@Override
public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
log.trace("Extracting OAuth2 user authorities");
DefaultOAuth2User principal;
try {
principal = (DefaultOAuth2User) value;
@ -25,7 +47,67 @@ public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
throw new RuntimeException();
}
return Mono.just(Set.of(principal.getName())); // TODO #2844
var provider = (OAuthProperties.OAuth2Provider) additionalParams.get("provider");
Assert.notNull(provider, "provider is null");
var rolesFieldName = provider.getCustomParams().get(ROLES_FIELD_PARAM_NAME);
Set<String> rolesByUsername = acs.getRoles()
.stream()
.filter(r -> r.getSubjects()
.stream()
.filter(s -> s.getProvider().equals(Provider.OAUTH))
.filter(s -> s.getType().equals("user"))
.anyMatch(s -> s.getValue().equals(principal.getName())))
.map(Role::getName)
.collect(Collectors.toSet());
Set<String> rolesByRolesField = acs.getRoles()
.stream()
.filter(role -> role.getSubjects()
.stream()
.filter(s -> s.getProvider().equals(Provider.OAUTH))
.filter(s -> s.getType().equals("role"))
.anyMatch(subject -> {
var roleName = subject.getValue();
var principalRoles = convertRoles(principal.getAttribute(rolesFieldName));
var roleMatched = principalRoles.contains(roleName);
if (roleMatched) {
log.debug("Assigning role [{}] to user [{}]", roleName, principal.getName());
} else {
log.trace("Role [{}] not found in user [{}] roles", roleName, principal.getName());
}
return roleMatched;
})
)
.map(Role::getName)
.collect(Collectors.toSet());
return Mono.just(Sets.union(rolesByUsername, rolesByRolesField));
}
@SuppressWarnings("unchecked")
private Collection<String> convertRoles(Object roles) {
if (roles == null) {
log.debug("Param missing from attributes, skipping");
return Collections.emptySet();
}
if ((roles instanceof List<?>) || (roles instanceof Set<?>)) {
log.trace("The field is either a set or a list, returning as is");
return (Collection<String>) roles;
}
if (!(roles instanceof String)) {
log.debug("The field is not a string, skipping");
return Collections.emptySet();
}
log.trace("Trying to deserialize the field value [{}] as a string", roles);
return Arrays.stream(((String) roles).split(","))
.collect(Collectors.toSet());
}
}

View file

@ -7,7 +7,9 @@ import reactor.core.publisher.Mono;
public interface ProviderAuthorityExtractor {
boolean isApplicable(String provider);
String TYPE = "type";
boolean isApplicable(String provider, Map<String, String> customParams);
Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams);

View file

@ -1,70 +0,0 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.provectus.kafka.ui.config.auth.LdapProperties;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
import org.springframework.util.Assert;
@Slf4j
public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
private final AccessControlService acs;
private final LdapProperties props;
private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
String role = record.get(getGroupRoleAttribute()).get(0);
return new SimpleGrantedAuthority(role);
};
public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
super(context.getBean(BaseLdapPathContextSource.class), null);
this.acs = context.getBean(AccessControlService.class);
this.props = context.getBean(LdapProperties.class);
}
@Override
public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
return acs.getRoles()
.stream()
.map(Role::getSubjects)
.flatMap(List::stream)
.filter(s -> s.getProvider().equals(Provider.LDAP))
.filter(s -> s.getType().equals("group"))
.flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
.collect(Collectors.toSet());
}
private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
Assert.notNull(groupSearchBase, "groupSearchBase is empty");
log.trace(
"Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
var ldapTemplate = getLdapTemplate();
ldapTemplate.setIgnoreNameNotFoundException(true);
Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
new String[] {props.getGroupRoleAttribute()});
return userRoles.stream()
.map(authorityMapper)
.peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
.collect(Collectors.toSet());
}
}

View file

@ -230,7 +230,7 @@ public class DynamicConfigOperations {
Optional.ofNullable(auth)
.flatMap(a -> Optional.ofNullable(a.oauth2))
.ifPresent(OAuthProperties::validate);
.ifPresent(OAuthProperties::init);
Optional.ofNullable(webclient)
.ifPresent(WebclientProperties::validate);

View file

@ -118,10 +118,18 @@ class MessageFiltersTest {
}
@Test
void keySetToNullIfKeyCantBeParsedToJson() {
var f = groovyScriptFilter("key == null");
void keySetToKeyStringIfCantBeParsedToJson() {
var f = groovyScriptFilter("key == \"not json\"");
assertTrue(f.test(msg().key("not json")));
assertFalse(f.test(msg().key("{ \"k\" : \"v\" }")));
}
@Test
void keyAndKeyAsTextSetToNullIfRecordsKeyIsNull() {
var f = groovyScriptFilter("key == null");
assertTrue(f.test(msg().key(null)));
f = groovyScriptFilter("keyAsText == null");
assertTrue(f.test(msg().key(null)));
}
@Test
@ -132,10 +140,18 @@ class MessageFiltersTest {
}
@Test
void valueSetToNullIfKeyCantBeParsedToJson() {
var f = groovyScriptFilter("value == null");
void valueSetToContentStringIfCantBeParsedToJson() {
var f = groovyScriptFilter("value == \"not json\"");
assertTrue(f.test(msg().content("not json")));
assertFalse(f.test(msg().content("{ \"k\" : \"v\" }")));
}
@Test
void valueAndValueAsTextSetToNullIfRecordsContentIsNull() {
var f = groovyScriptFilter("value == null");
assertTrue(f.test(msg().content(null)));
f = groovyScriptFilter("valueAsText == null");
assertTrue(f.test(msg().content(null)));
}
@Test
@ -185,4 +201,4 @@ class MessageFiltersTest {
.partition(1);
}
}
}

View file

@ -3,6 +3,7 @@ package com.provectus.kafka.ui.pages.ksqldb;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
import static com.codeborne.selenide.Selenide.sleep;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
@ -37,6 +38,7 @@ public class KsqlQueryForm extends BasePage {
@Step
public KsqlQueryForm clickClearBtn() {
clickByJavaScript(clearBtn);
sleep(500);
return this;
}

View file

@ -1,4 +1,4 @@
import React, { useEffect, useMemo } from 'react';
import React, { useMemo } from 'react';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as Metrics from 'components/common/Metrics';
import { Tag } from 'components/common/Tag/Tag.styled';
@ -10,7 +10,6 @@ import Table, { SizeCell } from 'components/common/NewTable';
import useBoolean from 'lib/hooks/useBoolean';
import { clusterNewConfigPath } from 'lib/paths';
import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
import { useNavigate } from 'react-router-dom';
import { ActionCanButton } from 'components/common/ActionComponent';
import { useGetUserInfo } from 'lib/hooks/api/roles';
@ -23,7 +22,7 @@ const Dashboard: React.FC = () => {
const clusters = useClusters();
const { value: showOfflineOnly, toggle } = useBoolean(false);
const appInfo = React.useContext(GlobalSettingsContext);
const navigate = useNavigate();
const config = React.useMemo(() => {
const clusterList = clusters.data || [];
const offlineClusters = clusterList.filter(
@ -58,12 +57,6 @@ const Dashboard: React.FC = () => {
return initialColumns;
}, []);
useEffect(() => {
if (appInfo.hasDynamicConfig && !clusters.data) {
navigate(clusterNewConfigPath);
}
}, [clusters, appInfo.hasDynamicConfig]);
const isApplicationConfig = useMemo(() => {
return !!data?.userInfo?.permissions.some(
(permission) => permission.resource === ResourceType.APPLICATIONCONFIG

View file

@ -1,45 +0,0 @@
import React from 'react';
import { useClusters } from 'lib/hooks/api/clusters';
import Dashboard from 'components/Dashboard/Dashboard';
import { Cluster, ServerStatus } from 'generated-sources';
import { render } from 'lib/testHelpers';
interface DataType {
data: Cluster[] | undefined;
}
jest.mock('lib/hooks/api/clusters');
const mockedNavigate = jest.fn();
jest.mock('react-router-dom', () => ({
...jest.requireActual('react-router-dom'),
useNavigate: () => mockedNavigate,
}));
describe('Dashboard component', () => {
const renderComponent = (hasDynamicConfig: boolean, data: DataType) => {
const useClustersMock = useClusters as jest.Mock;
useClustersMock.mockReturnValue(data);
render(<Dashboard />, {
globalSettings: { hasDynamicConfig },
});
};
it('redirects to new cluster configuration page if there are no clusters and dynamic config is enabled', async () => {
await renderComponent(true, { data: undefined });
expect(mockedNavigate).toHaveBeenCalled();
});
it('should not navigate to new cluster config page when there are clusters', async () => {
await renderComponent(true, {
data: [{ name: 'Cluster 1', status: ServerStatus.ONLINE }],
});
expect(mockedNavigate).not.toHaveBeenCalled();
});
it('should not navigate to new cluster config page when there are no clusters and hasDynamicConfig is false', async () => {
await renderComponent(false, {
data: [],
});
expect(mockedNavigate).not.toHaveBeenCalled();
});
});

View file

@ -33,6 +33,7 @@ export const Fieldset = styled.fieldset`
flex: 1;
flex-direction: column;
gap: 8px;
color: ${({ theme }) => theme.default.color.normal};
`;
export const ButtonsContainer = styled.div`

View file

@ -14,9 +14,6 @@ export const DiffWrapper = styled.div`
background-color: ${({ theme }) => theme.default.backgroundColor};
color: ${({ theme }) => theme.default.color.normal};
}
.ace_line {
background-color: ${({ theme }) => theme.default.backgroundColor};
}
.ace_gutter-cell {
background-color: ${({ theme }) =>
theme.ksqlDb.query.editor.cell.backgroundColor};
@ -39,10 +36,10 @@ export const DiffWrapper = styled.div`
.ace_string {
color: ${({ theme }) => theme.ksqlDb.query.editor.aceString};
}
> .codeMarker {
background: ${({ theme }) => theme.icons.warningIcon};
.codeMarker {
background-color: ${({ theme }) => theme.ksqlDb.query.editor.codeMarker};
position: absolute;
z-index: 20;
z-index: 2000;
}
`;

View file

@ -39,6 +39,10 @@ export const StyledSlider = styled.span<Props>`
transition: 0.4s;
border-radius: 20px;
:hover {
background-color: ${({ theme }) => theme.switch.hover};
}
&::before {
position: absolute;
content: '';

View file

@ -2,7 +2,7 @@ import { useAppInfo } from 'lib/hooks/api/appConfig';
import React from 'react';
import { ApplicationInfoEnabledFeaturesEnum } from 'generated-sources';
export interface GlobalSettingsContextProps {
interface GlobalSettingsContextProps {
hasDynamicConfig: boolean;
}

View file

@ -26,10 +26,7 @@ import {
} from '@tanstack/react-query';
import { ConfirmContextProvider } from 'components/contexts/ConfirmContext';
import ConfirmationModal from 'components/common/ConfirmationModal/ConfirmationModal';
import {
GlobalSettingsContext,
GlobalSettingsContextProps,
} from 'components/contexts/GlobalSettingsContext';
import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
import { UserInfoRolesAccessContext } from 'components/contexts/UserInfoRolesAccessContext';
import { RolesType, modifyRolesData } from './permissions';
@ -38,7 +35,6 @@ interface CustomRenderOptions extends Omit<RenderOptions, 'wrapper'> {
preloadedState?: Partial<RootState>;
store?: Store<Partial<RootState>, AnyAction>;
initialEntries?: MemoryRouterProps['initialEntries'];
globalSettings?: GlobalSettingsContextProps;
userInfo?: {
roles?: RolesType;
rbacFlag: boolean;
@ -114,7 +110,6 @@ const customRender = (
preloadedState,
}),
initialEntries,
globalSettings = { hasDynamicConfig: false },
userInfo,
...renderOptions
}: CustomRenderOptions = {}
@ -124,7 +119,7 @@ const customRender = (
children,
}) => (
<TestQueryClientProvider>
<GlobalSettingsContext.Provider value={globalSettings}>
<GlobalSettingsContext.Provider value={{ hasDynamicConfig: false }}>
<ThemeProvider theme={theme}>
<TestUserInfoProvider data={userInfo}>
<ConfirmContextProvider>

View file

@ -235,12 +235,13 @@ const baseTheme = {
color: Colors.neutral[90],
},
switch: {
unchecked: Colors.brand[30],
unchecked: Colors.neutral[20],
hover: Colors.neutral[40],
checked: Colors.brand[50],
circle: Colors.neutral[0],
disabled: Colors.neutral[10],
checkedIcon: {
backgroundColor: Colors.neutral[70],
backgroundColor: Colors.neutral[10],
},
},
pageLoader: {
@ -366,6 +367,7 @@ export const theme = {
cursor: Colors.neutral[90],
variable: Colors.red[50],
aceString: Colors.green[60],
codeMarker: Colors.yellow[20],
},
},
},
@ -767,6 +769,7 @@ export const darkTheme: ThemeType = {
cursor: Colors.neutral[0],
variable: Colors.red[50],
aceString: Colors.green[60],
codeMarker: Colors.yellow[20],
},
},
},