Merge branch 'master' into issue/3422
This commit is contained in:
commit
d80e17f900
58 changed files with 940 additions and 330 deletions
8
.github/release_drafter.yaml
vendored
8
.github/release_drafter.yaml
vendored
|
@ -16,18 +16,26 @@ exclude-labels:
|
|||
- 'type/refactoring'
|
||||
|
||||
categories:
|
||||
- title: '🚩 Breaking Changes'
|
||||
labels:
|
||||
- 'impact/changelog'
|
||||
|
||||
- title: '⚙️Features'
|
||||
labels:
|
||||
- 'type/feature'
|
||||
|
||||
- title: '🪛Enhancements'
|
||||
labels:
|
||||
- 'type/enhancement'
|
||||
|
||||
- title: '🔨Bug Fixes'
|
||||
labels:
|
||||
- 'type/bug'
|
||||
|
||||
- title: 'Security'
|
||||
labels:
|
||||
- 'type/security'
|
||||
|
||||
- title: '⎈ Helm/K8S Changes'
|
||||
labels:
|
||||
- 'scope/k8s'
|
||||
|
|
2
.github/workflows/e2e-checks.yaml
vendored
2
.github/workflows/e2e-checks.yaml
vendored
|
@ -45,7 +45,7 @@ jobs:
|
|||
# use the following command until #819 will be fixed
|
||||
run: |
|
||||
docker-compose -f kafka-ui-e2e-checks/docker/selenoid-git.yaml up -d
|
||||
docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
|
||||
docker-compose -f ./documentation/compose/e2e-tests.yaml up -d && until [ "$(docker exec kafka-ui wget --spider --server-response http://localhost:8080/actuator/health 2>&1 | grep -c 'HTTP/1.1 200 OK')" == "1" ]; do echo "Waiting for kafka-ui ..." && sleep 1; done
|
||||
- name: Run test suite
|
||||
run: |
|
||||
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
|
||||
|
|
17
.github/workflows/release_drafter.yml
vendored
17
.github/workflows/release_drafter.yml
vendored
|
@ -2,18 +2,33 @@ name: Release Drafter
|
|||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Release version'
|
||||
required: false
|
||||
branch:
|
||||
description: 'Target branch'
|
||||
required: false
|
||||
default: 'master'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
config-name: release_drafter.yaml
|
||||
disable-autolabeler: true
|
||||
version: ${{ github.event.inputs.version }}
|
||||
commitish: ${{ github.event.inputs.branch }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
@ -6,7 +6,8 @@ Following versions of the project are currently being supported with security up
|
|||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 0.6.x | :white_check_mark: |
|
||||
| 0.7.x | :white_check_mark: |
|
||||
| 0.6.x | :x: |
|
||||
| 0.5.x | :x: |
|
||||
| 0.4.x | :x: |
|
||||
| 0.3.x | :x: |
|
||||
|
|
|
@ -2,6 +2,6 @@ apiVersion: v2
|
|||
name: kafka-ui
|
||||
description: A Helm chart for kafka-UI
|
||||
type: application
|
||||
version: 0.6.2
|
||||
appVersion: v0.6.2
|
||||
version: 0.7.0
|
||||
appVersion: v0.7.0
|
||||
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
|
||||
|
|
|
@ -14,13 +14,11 @@ public class LdapProperties {
|
|||
private String adminPassword;
|
||||
private String userFilterSearchBase;
|
||||
private String userFilterSearchFilter;
|
||||
private String groupFilterSearchBase;
|
||||
|
||||
@Value("${oauth2.ldap.activeDirectory:false}")
|
||||
private boolean isActiveDirectory;
|
||||
@Value("${oauth2.ldap.aсtiveDirectory.domain:@null}")
|
||||
private String activeDirectoryDomain;
|
||||
|
||||
@Value("${oauth2.ldap.groupRoleAttribute:cn}")
|
||||
private String groupRoleAttribute;
|
||||
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ package com.provectus.kafka.ui.config.auth;
|
|||
import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
|
||||
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -12,7 +11,6 @@ import lombok.extern.slf4j.Slf4j;
|
|||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
@ -34,6 +32,8 @@ import org.springframework.security.ldap.authentication.LdapAuthenticationProvid
|
|||
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
|
||||
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
|
||||
import org.springframework.security.ldap.search.LdapUserSearch;
|
||||
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
|
||||
import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator;
|
||||
import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
|
||||
import org.springframework.security.web.server.SecurityWebFilterChain;
|
||||
|
||||
|
@ -50,7 +50,7 @@ public class LdapSecurityConfig {
|
|||
|
||||
@Bean
|
||||
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
|
||||
ApplicationContext context,
|
||||
LdapAuthoritiesPopulator ldapAuthoritiesPopulator,
|
||||
@Nullable AccessControlService acs) {
|
||||
var rbacEnabled = acs != null && acs.isRbacEnabled();
|
||||
BindAuthenticator ba = new BindAuthenticator(contextSource);
|
||||
|
@ -67,7 +67,7 @@ public class LdapSecurityConfig {
|
|||
AbstractLdapAuthenticationProvider authenticationProvider;
|
||||
if (!props.isActiveDirectory()) {
|
||||
authenticationProvider = rbacEnabled
|
||||
? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
|
||||
? new LdapAuthenticationProvider(ba, ldapAuthoritiesPopulator)
|
||||
: new LdapAuthenticationProvider(ba);
|
||||
} else {
|
||||
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
|
||||
|
@ -95,6 +95,15 @@ public class LdapSecurityConfig {
|
|||
return ctx;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public LdapAuthoritiesPopulator ldapAuthoritiesPopulator(BaseLdapPathContextSource contextSource) {
|
||||
var authoritiesPopulator = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase());
|
||||
authoritiesPopulator.setRolePrefix("");
|
||||
authoritiesPopulator.setConvertToUpperCase(false);
|
||||
return authoritiesPopulator;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
|
||||
log.info("Configuring LDAP authentication.");
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.provectus.kafka.ui.config.auth;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -14,7 +15,16 @@ public class OAuthProperties {
|
|||
private Map<String, OAuth2Provider> client = new HashMap<>();
|
||||
|
||||
@PostConstruct
|
||||
public void validate() {
|
||||
public void init() {
|
||||
getClient().values().forEach((provider) -> {
|
||||
if (provider.getCustomParams() == null) {
|
||||
provider.setCustomParams(Collections.emptyMap());
|
||||
}
|
||||
if (provider.getScope() == null) {
|
||||
provider.setScope(Collections.emptySet());
|
||||
}
|
||||
});
|
||||
|
||||
getClient().values().forEach(this::validateProvider);
|
||||
}
|
||||
|
||||
|
|
|
@ -73,8 +73,7 @@ public final class OAuthPropertiesConverter {
|
|||
}
|
||||
|
||||
private static boolean isGoogle(OAuth2Provider provider) {
|
||||
return provider.getCustomParams() != null
|
||||
&& GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
|
||||
return GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -72,13 +72,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
|
|||
final OidcReactiveOAuth2UserService delegate = new OidcReactiveOAuth2UserService();
|
||||
return request -> delegate.loadUser(request)
|
||||
.flatMap(user -> {
|
||||
String providerId = request.getClientRegistration().getRegistrationId();
|
||||
final var extractor = getExtractor(providerId, acs);
|
||||
var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
|
||||
final var extractor = getExtractor(provider, acs);
|
||||
if (extractor == null) {
|
||||
return Mono.just(user);
|
||||
}
|
||||
|
||||
return extractor.extract(acs, user, Map.of("request", request))
|
||||
return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
|
||||
.map(groups -> new RbacOidcUser(user, groups));
|
||||
});
|
||||
}
|
||||
|
@ -88,13 +88,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
|
|||
final DefaultReactiveOAuth2UserService delegate = new DefaultReactiveOAuth2UserService();
|
||||
return request -> delegate.loadUser(request)
|
||||
.flatMap(user -> {
|
||||
String providerId = request.getClientRegistration().getRegistrationId();
|
||||
final var extractor = getExtractor(providerId, acs);
|
||||
var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
|
||||
final var extractor = getExtractor(provider, acs);
|
||||
if (extractor == null) {
|
||||
return Mono.just(user);
|
||||
}
|
||||
|
||||
return extractor.extract(acs, user, Map.of("request", request))
|
||||
return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
|
||||
.map(groups -> new RbacOAuth2User(user, groups));
|
||||
});
|
||||
}
|
||||
|
@ -113,18 +113,18 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
|
||||
final String provider = getProviderByProviderId(providerId);
|
||||
private ProviderAuthorityExtractor getExtractor(final OAuthProperties.OAuth2Provider provider,
|
||||
AccessControlService acs) {
|
||||
Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
|
||||
.stream()
|
||||
.filter(e -> e.isApplicable(provider))
|
||||
.filter(e -> e.isApplicable(provider.getProvider(), provider.getCustomParams()))
|
||||
.findFirst();
|
||||
|
||||
return extractor.orElse(null);
|
||||
}
|
||||
|
||||
private String getProviderByProviderId(final String providerId) {
|
||||
return properties.getClient().get(providerId).getProvider();
|
||||
private OAuthProperties.OAuth2Provider getProviderByProviderId(final String providerId) {
|
||||
return properties.getClient().get(providerId);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -46,10 +46,8 @@ public class CognitoLogoutSuccessHandler implements LogoutSuccessHandler {
|
|||
.fragment(null)
|
||||
.build();
|
||||
|
||||
Assert.isTrue(
|
||||
provider.getCustomParams() != null && provider.getCustomParams().containsKey("logoutUrl"),
|
||||
"Custom params should contain 'logoutUrl'"
|
||||
);
|
||||
Assert.isTrue(provider.getCustomParams().containsKey("logoutUrl"),
|
||||
"Custom params should contain 'logoutUrl'");
|
||||
final var uri = UriComponentsBuilder
|
||||
.fromUri(URI.create(provider.getCustomParams().get("logoutUrl")))
|
||||
.queryParam("client_id", provider.getClientId())
|
||||
|
|
|
@ -12,8 +12,11 @@ import java.security.Principal;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
|
||||
import org.springframework.security.core.context.SecurityContext;
|
||||
|
@ -23,15 +26,12 @@ import reactor.core.publisher.Mono;
|
|||
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class AccessController implements AuthorizationApi {
|
||||
|
||||
private final AccessControlService accessControlService;
|
||||
|
||||
public Mono<ResponseEntity<AuthenticationInfoDTO>> getUserAuthInfo(ServerWebExchange exchange) {
|
||||
AuthenticationInfoDTO dto = new AuthenticationInfoDTO();
|
||||
dto.setRbacEnabled(accessControlService.isRbacEnabled());
|
||||
UserInfoDTO userInfo = new UserInfoDTO();
|
||||
|
||||
Mono<List<UserPermissionDTO>> permissions = accessControlService.getUser()
|
||||
.map(user -> accessControlService.getRoles()
|
||||
.stream()
|
||||
|
@ -49,13 +49,11 @@ public class AccessController implements AuthorizationApi {
|
|||
return userName
|
||||
.zipWith(permissions)
|
||||
.map(data -> {
|
||||
userInfo.setUsername(data.getT1());
|
||||
userInfo.setPermissions(data.getT2());
|
||||
|
||||
dto.setUserInfo(userInfo);
|
||||
var dto = new AuthenticationInfoDTO(accessControlService.isRbacEnabled());
|
||||
dto.setUserInfo(new UserInfoDTO(data.getT1(), data.getT2()));
|
||||
return dto;
|
||||
})
|
||||
.switchIfEmpty(Mono.just(dto))
|
||||
.switchIfEmpty(Mono.just(new AuthenticationInfoDTO(accessControlService.isRbacEnabled())))
|
||||
.map(ResponseEntity::ok);
|
||||
}
|
||||
|
||||
|
@ -70,11 +68,22 @@ public class AccessController implements AuthorizationApi {
|
|||
dto.setActions(permission.getActions()
|
||||
.stream()
|
||||
.map(String::toUpperCase)
|
||||
.map(ActionDTO::valueOf)
|
||||
.map(this::mapAction)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList()));
|
||||
return dto;
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private ActionDTO mapAction(String name) {
|
||||
try {
|
||||
return ActionDTO.fromValue(name);
|
||||
} catch (IllegalArgumentException e) {
|
||||
log.warn("Unknown Action [{}], skipping", name);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -211,7 +211,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
|
|||
Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.connect(connectName)
|
||||
.connectActions(ConnectAction.VIEW, ConnectAction.EDIT)
|
||||
.connectActions(ConnectAction.VIEW, ConnectAction.RESTART)
|
||||
.build());
|
||||
|
||||
return validateAccess.then(
|
||||
|
|
|
@ -39,41 +39,42 @@ public class MessageFilters {
|
|||
}
|
||||
|
||||
static Predicate<TopicMessageDTO> groovyScriptFilter(String script) {
|
||||
var compiledScript = compileScript(script);
|
||||
var engine = getGroovyEngine();
|
||||
var compiledScript = compileScript(engine, script);
|
||||
var jsonSlurper = new JsonSlurper();
|
||||
return new Predicate<TopicMessageDTO>() {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public boolean test(TopicMessageDTO msg) {
|
||||
var bindings = getGroovyEngine().createBindings();
|
||||
var bindings = engine.createBindings();
|
||||
bindings.put("partition", msg.getPartition());
|
||||
bindings.put("offset", msg.getOffset());
|
||||
bindings.put("timestampMs", msg.getTimestamp().toInstant().toEpochMilli());
|
||||
bindings.put("keyAsText", msg.getKey());
|
||||
bindings.put("valueAsText", msg.getContent());
|
||||
bindings.put("headers", msg.getHeaders());
|
||||
bindings.put("key", parseToJsonOrReturnNull(jsonSlurper, msg.getKey()));
|
||||
bindings.put("value", parseToJsonOrReturnNull(jsonSlurper, msg.getContent()));
|
||||
bindings.put("key", parseToJsonOrReturnAsIs(jsonSlurper, msg.getKey()));
|
||||
bindings.put("value", parseToJsonOrReturnAsIs(jsonSlurper, msg.getContent()));
|
||||
var result = compiledScript.eval(bindings);
|
||||
if (result instanceof Boolean) {
|
||||
return (Boolean) result;
|
||||
} else {
|
||||
throw new ValidationException(
|
||||
String.format("Unexpected script result: %s, Boolean should be returned instead", result));
|
||||
"Unexpected script result: %s, Boolean should be returned instead".formatted(result));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static Object parseToJsonOrReturnNull(JsonSlurper parser, @Nullable String str) {
|
||||
private static Object parseToJsonOrReturnAsIs(JsonSlurper parser, @Nullable String str) {
|
||||
if (str == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return parser.parseText(str);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,9 +87,9 @@ public class MessageFilters {
|
|||
return GROOVY_ENGINE;
|
||||
}
|
||||
|
||||
private static CompiledScript compileScript(String script) {
|
||||
private static CompiledScript compileScript(GroovyScriptEngineImpl engine, String script) {
|
||||
try {
|
||||
return getGroovyEngine().compile(script);
|
||||
return engine.compile(script);
|
||||
} catch (ScriptException e) {
|
||||
throw new ValidationException("Script syntax error: " + e.getMessage());
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package com.provectus.kafka.ui.model.rbac;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.Resource.ACL;
|
||||
import static com.provectus.kafka.ui.model.rbac.Resource.APPLICATIONCONFIG;
|
||||
import static com.provectus.kafka.ui.model.rbac.Resource.CLUSTERCONFIG;
|
||||
import static com.provectus.kafka.ui.model.rbac.Resource.KSQL;
|
||||
|
@ -27,7 +28,7 @@ import org.springframework.util.Assert;
|
|||
@EqualsAndHashCode
|
||||
public class Permission {
|
||||
|
||||
private static final List<Resource> RBAC_ACTION_EXEMPT_LIST = List.of(KSQL, CLUSTERCONFIG, APPLICATIONCONFIG);
|
||||
private static final List<Resource> RBAC_ACTION_EXEMPT_LIST = List.of(KSQL, CLUSTERCONFIG, APPLICATIONCONFIG, ACL);
|
||||
|
||||
Resource resource;
|
||||
List<String> actions;
|
||||
|
|
|
@ -7,7 +7,8 @@ public enum ConnectAction implements PermissibleAction {
|
|||
|
||||
VIEW,
|
||||
EDIT,
|
||||
CREATE
|
||||
CREATE,
|
||||
RESTART
|
||||
|
||||
;
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ public enum Provider {
|
|||
|
||||
OAUTH_COGNITO,
|
||||
|
||||
OAUTH,
|
||||
|
||||
LDAP,
|
||||
LDAP_AD;
|
||||
|
||||
|
@ -22,6 +24,8 @@ public enum Provider {
|
|||
public static String GOOGLE = "google";
|
||||
public static String GITHUB = "github";
|
||||
public static String COGNITO = "cognito";
|
||||
|
||||
public static String OAUTH = "oauth";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,31 +4,23 @@ import com.provectus.kafka.ui.model.ClusterFeature;
|
|||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.springframework.stereotype.Service;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class FeatureService {
|
||||
|
||||
private static final String DELETE_TOPIC_ENABLED_SERVER_PROPERTY = "delete.topic.enable";
|
||||
|
||||
private final AdminClientService adminClientService;
|
||||
|
||||
public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
|
||||
public Mono<List<ClusterFeature>> getAvailableFeatures(ReactiveAdminClient adminClient,
|
||||
KafkaCluster cluster,
|
||||
ClusterDescription clusterDescription) {
|
||||
List<Mono<ClusterFeature>> features = new ArrayList<>();
|
||||
|
||||
|
@ -46,44 +38,36 @@ public class FeatureService {
|
|||
features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
|
||||
}
|
||||
|
||||
features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
|
||||
features.add(aclView(cluster));
|
||||
features.add(aclEdit(clusterDescription));
|
||||
features.add(topicDeletionEnabled(adminClient));
|
||||
features.add(aclView(adminClient));
|
||||
features.add(aclEdit(adminClient, clusterDescription));
|
||||
|
||||
return Flux.fromIterable(features).flatMap(m -> m).collectList();
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> topicDeletionEnabled(KafkaCluster cluster, @Nullable Node controller) {
|
||||
if (controller == null) {
|
||||
return Mono.just(ClusterFeature.TOPIC_DELETION); // assuming it is enabled by default
|
||||
}
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
|
||||
.map(config ->
|
||||
config.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
|
||||
.map(e -> Boolean.parseBoolean(e.value()))
|
||||
.findFirst()
|
||||
.orElse(true))
|
||||
.flatMap(enabled -> enabled
|
||||
? Mono.just(ClusterFeature.TOPIC_DELETION)
|
||||
: Mono.empty());
|
||||
private Mono<ClusterFeature> topicDeletionEnabled(ReactiveAdminClient adminClient) {
|
||||
return adminClient.isTopicDeletionEnabled()
|
||||
? Mono.just(ClusterFeature.TOPIC_DELETION)
|
||||
: Mono.empty();
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> aclEdit(ClusterDescription clusterDescription) {
|
||||
private Mono<ClusterFeature> aclEdit(ReactiveAdminClient adminClient, ClusterDescription clusterDescription) {
|
||||
var authorizedOps = Optional.ofNullable(clusterDescription.getAuthorizedOperations()).orElse(Set.of());
|
||||
boolean canEdit = authorizedOps.contains(AclOperation.ALL) || authorizedOps.contains(AclOperation.ALTER);
|
||||
boolean canEdit = aclViewEnabled(adminClient)
|
||||
&& (authorizedOps.contains(AclOperation.ALL) || authorizedOps.contains(AclOperation.ALTER));
|
||||
return canEdit
|
||||
? Mono.just(ClusterFeature.KAFKA_ACL_EDIT)
|
||||
: Mono.empty();
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> aclView(KafkaCluster cluster) {
|
||||
return adminClientService.get(cluster).flatMap(
|
||||
ac -> ac.getClusterFeatures().contains(ReactiveAdminClient.SupportedFeature.AUTHORIZED_SECURITY_ENABLED)
|
||||
? Mono.just(ClusterFeature.KAFKA_ACL_VIEW)
|
||||
: Mono.empty()
|
||||
);
|
||||
private Mono<ClusterFeature> aclView(ReactiveAdminClient adminClient) {
|
||||
return aclViewEnabled(adminClient)
|
||||
? Mono.just(ClusterFeature.KAFKA_ACL_VIEW)
|
||||
: Mono.empty();
|
||||
}
|
||||
|
||||
private boolean aclViewEnabled(ReactiveAdminClient adminClient) {
|
||||
return adminClient.getClusterFeatures().contains(ReactiveAdminClient.SupportedFeature.AUTHORIZED_SECURITY_ENABLED);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -32,8 +32,9 @@ import java.util.stream.Collectors;
|
|||
import java.util.stream.Stream;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.Value;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.clients.admin.AdminClient;
|
||||
|
@ -75,7 +76,6 @@ import org.apache.kafka.common.errors.TopicAuthorizationException;
|
|||
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
|
||||
import org.apache.kafka.common.errors.UnsupportedVersionException;
|
||||
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
@ -85,7 +85,7 @@ import reactor.util.function.Tuples;
|
|||
|
||||
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ReactiveAdminClient implements Closeable {
|
||||
|
||||
public enum SupportedFeature {
|
||||
|
@ -104,7 +104,8 @@ public class ReactiveAdminClient implements Closeable {
|
|||
this.predicate = (admin, ver) -> Mono.just(ver != null && ver >= fromVersion);
|
||||
}
|
||||
|
||||
static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, @Nullable Float kafkaVersion) {
|
||||
static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, String kafkaVersionStr) {
|
||||
@Nullable Float kafkaVersion = KafkaVersion.parse(kafkaVersionStr).orElse(null);
|
||||
return Flux.fromArray(SupportedFeature.values())
|
||||
.flatMap(f -> f.predicate.apply(ac, kafkaVersion).map(enabled -> Tuples.of(f, enabled)))
|
||||
.filter(Tuple2::getT2)
|
||||
|
@ -123,19 +124,46 @@ public class ReactiveAdminClient implements Closeable {
|
|||
Set<AclOperation> authorizedOperations;
|
||||
}
|
||||
|
||||
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
|
||||
return getClusterVersion(adminClient)
|
||||
.flatMap(ver ->
|
||||
getSupportedUpdateFeaturesForVersion(adminClient, ver)
|
||||
.map(features ->
|
||||
new ReactiveAdminClient(adminClient, ver, features)));
|
||||
@Builder
|
||||
private record ConfigRelatedInfo(String version,
|
||||
Set<SupportedFeature> features,
|
||||
boolean topicDeletionIsAllowed) {
|
||||
|
||||
private static Mono<ConfigRelatedInfo> extract(AdminClient ac, int controllerId) {
|
||||
return loadBrokersConfig(ac, List.of(controllerId))
|
||||
.map(map -> map.isEmpty() ? List.<ConfigEntry>of() : map.get(controllerId))
|
||||
.flatMap(configs -> {
|
||||
String version = "1.0-UNKNOWN";
|
||||
boolean topicDeletionEnabled = true;
|
||||
for (ConfigEntry entry : configs) {
|
||||
if (entry.name().contains("inter.broker.protocol.version")) {
|
||||
version = entry.value();
|
||||
}
|
||||
if (entry.name().equals("delete.topic.enable")) {
|
||||
topicDeletionEnabled = Boolean.parseBoolean(entry.value());
|
||||
}
|
||||
}
|
||||
var builder = ConfigRelatedInfo.builder()
|
||||
.version(version)
|
||||
.topicDeletionIsAllowed(topicDeletionEnabled);
|
||||
return SupportedFeature.forVersion(ac, version)
|
||||
.map(features -> builder.features(features).build());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static Mono<Set<SupportedFeature>> getSupportedUpdateFeaturesForVersion(AdminClient ac, String versionStr) {
|
||||
@Nullable Float kafkaVersion = KafkaVersion.parse(versionStr).orElse(null);
|
||||
return SupportedFeature.forVersion(ac, kafkaVersion);
|
||||
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
|
||||
return describeClusterImpl(adminClient, Set.of())
|
||||
// choosing node from which we will get configs (starting with controller)
|
||||
.flatMap(descr -> descr.controller != null
|
||||
? Mono.just(descr.controller)
|
||||
: Mono.justOrEmpty(descr.nodes.stream().findFirst())
|
||||
)
|
||||
.flatMap(node -> ConfigRelatedInfo.extract(adminClient, node.id()))
|
||||
.map(info -> new ReactiveAdminClient(adminClient, info));
|
||||
}
|
||||
|
||||
|
||||
private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullable Float kafkaVersion) {
|
||||
return toMono(ac.describeAcls(AclBindingFilter.ANY).values())
|
||||
.thenReturn(true)
|
||||
|
@ -174,11 +202,10 @@ public class ReactiveAdminClient implements Closeable {
|
|||
|
||||
@Getter(AccessLevel.PACKAGE) // visible for testing
|
||||
private final AdminClient client;
|
||||
private final String version;
|
||||
private final Set<SupportedFeature> features;
|
||||
private volatile ConfigRelatedInfo configRelatedInfo;
|
||||
|
||||
public Set<SupportedFeature> getClusterFeatures() {
|
||||
return features;
|
||||
return configRelatedInfo.features();
|
||||
}
|
||||
|
||||
public Mono<Set<String>> listTopics(boolean listInternal) {
|
||||
|
@ -190,7 +217,20 @@ public class ReactiveAdminClient implements Closeable {
|
|||
}
|
||||
|
||||
public String getVersion() {
|
||||
return version;
|
||||
return configRelatedInfo.version();
|
||||
}
|
||||
|
||||
public boolean isTopicDeletionEnabled() {
|
||||
return configRelatedInfo.topicDeletionIsAllowed();
|
||||
}
|
||||
|
||||
public Mono<Void> updateInternalStats(@Nullable Node controller) {
|
||||
if (controller == null) {
|
||||
return Mono.empty();
|
||||
}
|
||||
return ConfigRelatedInfo.extract(client, controller.id())
|
||||
.doOnNext(info -> this.configRelatedInfo = info)
|
||||
.then();
|
||||
}
|
||||
|
||||
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig() {
|
||||
|
@ -200,7 +240,7 @@ public class ReactiveAdminClient implements Closeable {
|
|||
//NOTE: skips not-found topics (for which UnknownTopicOrPartitionException was thrown by AdminClient)
|
||||
//and topics for which DESCRIBE_CONFIGS permission is not set (TopicAuthorizationException was thrown)
|
||||
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig(Collection<String> topicNames, boolean includeDoc) {
|
||||
var includeDocFixed = features.contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL) && includeDoc;
|
||||
var includeDocFixed = includeDoc && getClusterFeatures().contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL);
|
||||
// we need to partition calls, because it can lead to AdminClient timeouts in case of large topics count
|
||||
return partitionCalls(
|
||||
topicNames,
|
||||
|
@ -349,7 +389,7 @@ public class ReactiveAdminClient implements Closeable {
|
|||
}
|
||||
|
||||
public Mono<ClusterDescription> describeCluster() {
|
||||
return describeClusterImpl(client, features);
|
||||
return describeClusterImpl(client, getClusterFeatures());
|
||||
}
|
||||
|
||||
private static Mono<ClusterDescription> describeClusterImpl(AdminClient client, Set<SupportedFeature> features) {
|
||||
|
@ -371,23 +411,6 @@ public class ReactiveAdminClient implements Closeable {
|
|||
);
|
||||
}
|
||||
|
||||
private static Mono<String> getClusterVersion(AdminClient client) {
|
||||
return describeClusterImpl(client, Set.of())
|
||||
// choosing node from which we will get configs (starting with controller)
|
||||
.flatMap(descr -> descr.controller != null
|
||||
? Mono.just(descr.controller)
|
||||
: Mono.justOrEmpty(descr.nodes.stream().findFirst())
|
||||
)
|
||||
.flatMap(node -> loadBrokersConfig(client, List.of(node.id())))
|
||||
.flatMap(configs -> configs.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.filter(entry -> entry.name().contains("inter.broker.protocol.version"))
|
||||
.findFirst()
|
||||
.map(configEntry -> Mono.just(configEntry.value()))
|
||||
.orElse(Mono.empty()))
|
||||
.switchIfEmpty(Mono.just("1.0-UNKNOWN"));
|
||||
}
|
||||
|
||||
public Mono<Void> deleteConsumerGroups(Collection<String> groupIds) {
|
||||
return toMono(client.deleteConsumerGroups(groupIds).all())
|
||||
.onErrorResume(GroupIdNotFoundException.class,
|
||||
|
@ -421,7 +444,7 @@ public class ReactiveAdminClient implements Closeable {
|
|||
// NOTE: places whole current topic config with new one. Entries that were present in old config,
|
||||
// but missed in new will be set to default
|
||||
public Mono<Void> updateTopicConfig(String topicName, Map<String, String> configs) {
|
||||
if (features.contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
|
||||
if (getClusterFeatures().contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
|
||||
return getTopicsConfigImpl(List.of(topicName), false)
|
||||
.map(conf -> conf.getOrDefault(topicName, List.of()))
|
||||
.flatMap(currentConfigs -> incrementalAlterConfig(topicName, currentConfigs, configs));
|
||||
|
@ -596,17 +619,17 @@ public class ReactiveAdminClient implements Closeable {
|
|||
}
|
||||
|
||||
public Mono<Collection<AclBinding>> listAcls(ResourcePatternFilter filter) {
|
||||
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
return toMono(client.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values());
|
||||
}
|
||||
|
||||
public Mono<Void> createAcls(Collection<AclBinding> aclBindings) {
|
||||
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
return toMono(client.createAcls(aclBindings).all());
|
||||
}
|
||||
|
||||
public Mono<Void> deleteAcls(Collection<AclBinding> aclBindings) {
|
||||
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
var filters = aclBindings.stream().map(AclBinding::toFilter).collect(Collectors.toSet());
|
||||
return toMono(client.deleteAcls(filters).all()).then();
|
||||
}
|
||||
|
|
|
@ -37,25 +37,26 @@ public class StatisticsService {
|
|||
private Mono<Statistics> getStatistics(KafkaCluster cluster) {
|
||||
return adminClientService.get(cluster).flatMap(ac ->
|
||||
ac.describeCluster().flatMap(description ->
|
||||
Mono.zip(
|
||||
List.of(
|
||||
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
|
||||
getLogDirInfo(description, ac),
|
||||
featureService.getAvailableFeatures(cluster, description),
|
||||
loadTopicConfigs(cluster),
|
||||
describeTopics(cluster)),
|
||||
results ->
|
||||
Statistics.builder()
|
||||
.status(ServerStatusDTO.ONLINE)
|
||||
.clusterDescription(description)
|
||||
.version(ac.getVersion())
|
||||
.metrics((Metrics) results[0])
|
||||
.logDirInfo((InternalLogDirStats) results[1])
|
||||
.features((List<ClusterFeature>) results[2])
|
||||
.topicConfigs((Map<String, List<ConfigEntry>>) results[3])
|
||||
.topicDescriptions((Map<String, TopicDescription>) results[4])
|
||||
.build()
|
||||
)))
|
||||
ac.updateInternalStats(description.getController()).then(
|
||||
Mono.zip(
|
||||
List.of(
|
||||
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
|
||||
getLogDirInfo(description, ac),
|
||||
featureService.getAvailableFeatures(ac, cluster, description),
|
||||
loadTopicConfigs(cluster),
|
||||
describeTopics(cluster)),
|
||||
results ->
|
||||
Statistics.builder()
|
||||
.status(ServerStatusDTO.ONLINE)
|
||||
.clusterDescription(description)
|
||||
.version(ac.getVersion())
|
||||
.metrics((Metrics) results[0])
|
||||
.logDirInfo((InternalLogDirStats) results[1])
|
||||
.features((List<ClusterFeature>) results[2])
|
||||
.topicConfigs((Map<String, List<ConfigEntry>>) results[3])
|
||||
.topicDescriptions((Map<String, TopicDescription>) results[4])
|
||||
.build()
|
||||
))))
|
||||
.doOnError(e ->
|
||||
log.error("Failed to collect cluster {} info", cluster.getName(), e))
|
||||
.onErrorResume(
|
||||
|
|
|
@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service.acl;
|
|||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.AdminClientService;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
@ -39,7 +40,8 @@ public class AclsService {
|
|||
public Flux<AclBinding> listAcls(KafkaCluster cluster, ResourcePatternFilter filter) {
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(c -> c.listAcls(filter))
|
||||
.flatMapIterable(acls -> acls);
|
||||
.flatMapIterable(acls -> acls)
|
||||
.sort(Comparator.comparing(AclBinding::toString)); //sorting to keep stable order on different calls
|
||||
}
|
||||
|
||||
public Mono<String> getAclAsCsvString(KafkaCluster cluster) {
|
||||
|
|
|
@ -20,6 +20,7 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
|
|||
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.OauthAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import java.util.Collections;
|
||||
|
@ -76,6 +77,7 @@ public class AccessControlService {
|
|||
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
|
||||
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
|
||||
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
|
||||
case OAUTH -> new OauthAuthorityExtractor();
|
||||
default -> null;
|
||||
})
|
||||
.filter(Objects::nonNull)
|
||||
|
@ -106,7 +108,8 @@ public class AccessControlService {
|
|||
&& isConnectAccessible(context, user)
|
||||
&& isConnectorAccessible(context, user) // TODO connector selectors
|
||||
&& isSchemaAccessible(context, user)
|
||||
&& isKsqlAccessible(context, user);
|
||||
&& isKsqlAccessible(context, user)
|
||||
&& isAclAccessible(context, user);
|
||||
|
||||
if (!accessGranted) {
|
||||
throw new AccessDeniedException("Access denied");
|
||||
|
@ -362,6 +365,23 @@ public class AccessControlService {
|
|||
return isAccessible(Resource.KSQL, null, user, context, requiredActions);
|
||||
}
|
||||
|
||||
private boolean isAclAccessible(AccessContext context, AuthenticatedUser user) {
|
||||
if (!rbacEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (context.getAclActions().isEmpty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
Set<String> requiredActions = context.getAclActions()
|
||||
.stream()
|
||||
.map(a -> a.toString().toUpperCase())
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return isAccessible(Resource.ACL, null, user, context, requiredActions);
|
||||
}
|
||||
|
||||
public Set<ProviderAuthorityExtractor> getOauthExtractors() {
|
||||
return oauthExtractors;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.COGNITO;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
|
@ -18,8 +21,8 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
private static final String COGNITO_GROUPS_ATTRIBUTE_NAME = "cognito:groups";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return Provider.Name.COGNITO.equalsIgnoreCase(provider);
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
return COGNITO.equalsIgnoreCase(provider) || COGNITO.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -63,7 +66,7 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
.map(Role::getName)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByGroups.stream()).collect(Collectors.toSet()));
|
||||
return Mono.just(Sets.union(groupsByUsername, groupsByGroups));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GITHUB;
|
||||
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
|
@ -28,8 +30,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
private static final String DUMMY = "dummy";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return Provider.Name.GITHUB.equalsIgnoreCase(provider);
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
return GITHUB.equalsIgnoreCase(provider) || GITHUB.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GOOGLE;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
@ -19,8 +20,8 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
public static final String EMAIL_ATTRIBUTE_NAME = "email";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return Provider.Name.GOOGLE.equalsIgnoreCase(provider);
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
return GOOGLE.equalsIgnoreCase(provider) || GOOGLE.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -52,7 +53,7 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
return Mono.just(groupsByUsername);
|
||||
}
|
||||
|
||||
List<String> groupsByDomain = acs.getRoles()
|
||||
Set<String> groupsByDomain = acs.getRoles()
|
||||
.stream()
|
||||
.filter(r -> r.getSubjects()
|
||||
.stream()
|
||||
|
@ -60,10 +61,9 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
.filter(s -> s.getType().equals("domain"))
|
||||
.anyMatch(s -> s.getValue().equals(domain)))
|
||||
.map(Role::getName)
|
||||
.toList();
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByDomain.stream())
|
||||
.collect(Collectors.toSet()));
|
||||
return Mono.just(Sets.union(groupsByUsername, groupsByDomain));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,22 +1,44 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.OAUTH;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.config.auth.OAuthProperties;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
|
||||
import org.springframework.util.Assert;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@Slf4j
|
||||
public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
|
||||
|
||||
public static final String ROLES_FIELD_PARAM_NAME = "roles-field";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return false; // TODO #2844
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
var containsRolesFieldNameParam = customParams.containsKey(ROLES_FIELD_PARAM_NAME);
|
||||
if (!containsRolesFieldNameParam) {
|
||||
log.debug("Provider [{}] doesn't contain a roles field param name, mapping won't be performed", provider);
|
||||
return false;
|
||||
}
|
||||
|
||||
return OAUTH.equalsIgnoreCase(provider) || OAUTH.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
|
||||
log.trace("Extracting OAuth2 user authorities");
|
||||
|
||||
DefaultOAuth2User principal;
|
||||
try {
|
||||
principal = (DefaultOAuth2User) value;
|
||||
|
@ -25,7 +47,67 @@ public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
throw new RuntimeException();
|
||||
}
|
||||
|
||||
return Mono.just(Set.of(principal.getName())); // TODO #2844
|
||||
var provider = (OAuthProperties.OAuth2Provider) additionalParams.get("provider");
|
||||
Assert.notNull(provider, "provider is null");
|
||||
var rolesFieldName = provider.getCustomParams().get(ROLES_FIELD_PARAM_NAME);
|
||||
|
||||
Set<String> rolesByUsername = acs.getRoles()
|
||||
.stream()
|
||||
.filter(r -> r.getSubjects()
|
||||
.stream()
|
||||
.filter(s -> s.getProvider().equals(Provider.OAUTH))
|
||||
.filter(s -> s.getType().equals("user"))
|
||||
.anyMatch(s -> s.getValue().equals(principal.getName())))
|
||||
.map(Role::getName)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> rolesByRolesField = acs.getRoles()
|
||||
.stream()
|
||||
.filter(role -> role.getSubjects()
|
||||
.stream()
|
||||
.filter(s -> s.getProvider().equals(Provider.OAUTH))
|
||||
.filter(s -> s.getType().equals("role"))
|
||||
.anyMatch(subject -> {
|
||||
var roleName = subject.getValue();
|
||||
var principalRoles = convertRoles(principal.getAttribute(rolesFieldName));
|
||||
var roleMatched = principalRoles.contains(roleName);
|
||||
|
||||
if (roleMatched) {
|
||||
log.debug("Assigning role [{}] to user [{}]", roleName, principal.getName());
|
||||
} else {
|
||||
log.trace("Role [{}] not found in user [{}] roles", roleName, principal.getName());
|
||||
}
|
||||
|
||||
return roleMatched;
|
||||
})
|
||||
)
|
||||
.map(Role::getName)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return Mono.just(Sets.union(rolesByUsername, rolesByRolesField));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Collection<String> convertRoles(Object roles) {
|
||||
if (roles == null) {
|
||||
log.debug("Param missing from attributes, skipping");
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
if ((roles instanceof List<?>) || (roles instanceof Set<?>)) {
|
||||
log.trace("The field is either a set or a list, returning as is");
|
||||
return (Collection<String>) roles;
|
||||
}
|
||||
|
||||
if (!(roles instanceof String)) {
|
||||
log.debug("The field is not a string, skipping");
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
log.trace("Trying to deserialize the field value [{}] as a string", roles);
|
||||
|
||||
return Arrays.stream(((String) roles).split(","))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@ import reactor.core.publisher.Mono;
|
|||
|
||||
public interface ProviderAuthorityExtractor {
|
||||
|
||||
boolean isApplicable(String provider);
|
||||
String TYPE = "type";
|
||||
|
||||
boolean isApplicable(String provider, Map<String, String> customParams);
|
||||
|
||||
Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams);
|
||||
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import com.provectus.kafka.ui.config.auth.LdapProperties;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.ldap.core.DirContextOperations;
|
||||
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
|
||||
import org.springframework.security.core.GrantedAuthority;
|
||||
import org.springframework.security.core.authority.SimpleGrantedAuthority;
|
||||
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
|
||||
import org.springframework.util.Assert;
|
||||
|
||||
@Slf4j
|
||||
public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
|
||||
|
||||
private final AccessControlService acs;
|
||||
private final LdapProperties props;
|
||||
|
||||
private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
|
||||
String role = record.get(getGroupRoleAttribute()).get(0);
|
||||
return new SimpleGrantedAuthority(role);
|
||||
};
|
||||
|
||||
public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
|
||||
super(context.getBean(BaseLdapPathContextSource.class), null);
|
||||
this.acs = context.getBean(AccessControlService.class);
|
||||
this.props = context.getBean(LdapProperties.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
|
||||
return acs.getRoles()
|
||||
.stream()
|
||||
.map(Role::getSubjects)
|
||||
.flatMap(List::stream)
|
||||
.filter(s -> s.getProvider().equals(Provider.LDAP))
|
||||
.filter(s -> s.getType().equals("group"))
|
||||
.flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
|
||||
Assert.notNull(groupSearchBase, "groupSearchBase is empty");
|
||||
|
||||
log.trace(
|
||||
"Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
|
||||
username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
|
||||
|
||||
var ldapTemplate = getLdapTemplate();
|
||||
ldapTemplate.setIgnoreNameNotFoundException(true);
|
||||
|
||||
Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
|
||||
groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
|
||||
new String[] {props.getGroupRoleAttribute()});
|
||||
|
||||
return userRoles.stream()
|
||||
.map(authorityMapper)
|
||||
.peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
}
|
|
@ -230,7 +230,7 @@ public class DynamicConfigOperations {
|
|||
|
||||
Optional.ofNullable(auth)
|
||||
.flatMap(a -> Optional.ofNullable(a.oauth2))
|
||||
.ifPresent(OAuthProperties::validate);
|
||||
.ifPresent(OAuthProperties::init);
|
||||
|
||||
Optional.ofNullable(webclient)
|
||||
.ifPresent(WebclientProperties::validate);
|
||||
|
|
|
@ -118,10 +118,18 @@ class MessageFiltersTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void keySetToNullIfKeyCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("key == null");
|
||||
void keySetToKeyStringIfCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("key == \"not json\"");
|
||||
assertTrue(f.test(msg().key("not json")));
|
||||
assertFalse(f.test(msg().key("{ \"k\" : \"v\" }")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void keyAndKeyAsTextSetToNullIfRecordsKeyIsNull() {
|
||||
var f = groovyScriptFilter("key == null");
|
||||
assertTrue(f.test(msg().key(null)));
|
||||
|
||||
f = groovyScriptFilter("keyAsText == null");
|
||||
assertTrue(f.test(msg().key(null)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -132,10 +140,18 @@ class MessageFiltersTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void valueSetToNullIfKeyCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("value == null");
|
||||
void valueSetToContentStringIfCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("value == \"not json\"");
|
||||
assertTrue(f.test(msg().content("not json")));
|
||||
assertFalse(f.test(msg().content("{ \"k\" : \"v\" }")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void valueAndValueAsTextSetToNullIfRecordsContentIsNull() {
|
||||
var f = groovyScriptFilter("value == null");
|
||||
assertTrue(f.test(msg().content(null)));
|
||||
|
||||
f = groovyScriptFilter("valueAsText == null");
|
||||
assertTrue(f.test(msg().content(null)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -185,4 +201,4 @@ class MessageFiltersTest {
|
|||
.partition(1);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3452,6 +3452,7 @@ components:
|
|||
- MESSAGES_READ
|
||||
- MESSAGES_PRODUCE
|
||||
- MESSAGES_DELETE
|
||||
- RESTART
|
||||
|
||||
ResourceType:
|
||||
type: string
|
||||
|
|
|
@ -3,6 +3,7 @@ package com.provectus.kafka.ui.pages.ksqldb;
|
|||
import static com.codeborne.selenide.Condition.visible;
|
||||
import static com.codeborne.selenide.Selenide.$$x;
|
||||
import static com.codeborne.selenide.Selenide.$x;
|
||||
import static com.codeborne.selenide.Selenide.sleep;
|
||||
|
||||
import com.codeborne.selenide.CollectionCondition;
|
||||
import com.codeborne.selenide.Condition;
|
||||
|
@ -37,6 +38,7 @@ public class KsqlQueryForm extends BasePage {
|
|||
@Step
|
||||
public KsqlQueryForm clickClearBtn() {
|
||||
clickByJavaScript(clearBtn);
|
||||
sleep(500);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
13
kafka-ui-react-app/src/components/ACLPage/ACLPage.tsx
Normal file
13
kafka-ui-react-app/src/components/ACLPage/ACLPage.tsx
Normal file
|
@ -0,0 +1,13 @@
|
|||
import React from 'react';
|
||||
import { Routes, Route } from 'react-router-dom';
|
||||
import ACList from 'components/ACLPage/List/List';
|
||||
|
||||
const ACLPage = () => {
|
||||
return (
|
||||
<Routes>
|
||||
<Route index element={<ACList />} />
|
||||
</Routes>
|
||||
);
|
||||
};
|
||||
|
||||
export default ACLPage;
|
|
@ -0,0 +1,44 @@
|
|||
import styled from 'styled-components';
|
||||
|
||||
export const EnumCell = styled.div`
|
||||
text-transform: capitalize;
|
||||
`;
|
||||
|
||||
export const DeleteCell = styled.div`
|
||||
svg {
|
||||
cursor: pointer;
|
||||
}
|
||||
`;
|
||||
|
||||
export const Chip = styled.div<{
|
||||
chipType?: 'default' | 'success' | 'danger' | 'secondary' | string;
|
||||
}>`
|
||||
width: fit-content;
|
||||
text-transform: capitalize;
|
||||
padding: 2px 8px;
|
||||
font-size: 12px;
|
||||
line-height: 16px;
|
||||
border-radius: 16px;
|
||||
color: ${({ theme }) => theme.tag.color};
|
||||
background-color: ${({ theme, chipType }) => {
|
||||
switch (chipType) {
|
||||
case 'success':
|
||||
return theme.tag.backgroundColor.green;
|
||||
case 'danger':
|
||||
return theme.tag.backgroundColor.red;
|
||||
case 'secondary':
|
||||
return theme.tag.backgroundColor.secondary;
|
||||
default:
|
||||
return theme.tag.backgroundColor.gray;
|
||||
}
|
||||
}};
|
||||
`;
|
||||
|
||||
export const PatternCell = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
${Chip} {
|
||||
margin-left: 4px;
|
||||
}
|
||||
`;
|
153
kafka-ui-react-app/src/components/ACLPage/List/List.tsx
Normal file
153
kafka-ui-react-app/src/components/ACLPage/List/List.tsx
Normal file
|
@ -0,0 +1,153 @@
|
|||
import React from 'react';
|
||||
import { ColumnDef } from '@tanstack/react-table';
|
||||
import { useTheme } from 'styled-components';
|
||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||
import Table from 'components/common/NewTable';
|
||||
import DeleteIcon from 'components/common/Icons/DeleteIcon';
|
||||
import { useConfirm } from 'lib/hooks/useConfirm';
|
||||
import useAppParams from 'lib/hooks/useAppParams';
|
||||
import { useAcls, useDeleteAcl } from 'lib/hooks/api/acl';
|
||||
import { ClusterName } from 'redux/interfaces';
|
||||
import {
|
||||
KafkaAcl,
|
||||
KafkaAclNamePatternType,
|
||||
KafkaAclPermissionEnum,
|
||||
} from 'generated-sources';
|
||||
|
||||
import * as S from './List.styled';
|
||||
|
||||
const ACList: React.FC = () => {
|
||||
const { clusterName } = useAppParams<{ clusterName: ClusterName }>();
|
||||
const theme = useTheme();
|
||||
const { data: aclList } = useAcls(clusterName);
|
||||
const { deleteResource } = useDeleteAcl(clusterName);
|
||||
const modal = useConfirm(true);
|
||||
|
||||
const [rowId, setRowId] = React.useState('');
|
||||
|
||||
const onDeleteClick = (acl: KafkaAcl | null) => {
|
||||
if (acl) {
|
||||
modal('Are you sure want to delete this ACL record?', () =>
|
||||
deleteResource(acl)
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const columns = React.useMemo<ColumnDef<KafkaAcl>[]>(
|
||||
() => [
|
||||
{
|
||||
header: 'Principal',
|
||||
accessorKey: 'principal',
|
||||
size: 257,
|
||||
},
|
||||
{
|
||||
header: 'Resource',
|
||||
accessorKey: 'resourceType',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => (
|
||||
<S.EnumCell>{getValue<string>().toLowerCase()}</S.EnumCell>
|
||||
),
|
||||
size: 145,
|
||||
},
|
||||
{
|
||||
header: 'Pattern',
|
||||
accessorKey: 'resourceName',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue, row }) => {
|
||||
let chipType;
|
||||
if (
|
||||
row.original.namePatternType === KafkaAclNamePatternType.PREFIXED
|
||||
) {
|
||||
chipType = 'default';
|
||||
}
|
||||
|
||||
if (
|
||||
row.original.namePatternType === KafkaAclNamePatternType.LITERAL
|
||||
) {
|
||||
chipType = 'secondary';
|
||||
}
|
||||
return (
|
||||
<S.PatternCell>
|
||||
{getValue<string>()}
|
||||
{chipType ? (
|
||||
<S.Chip chipType={chipType}>
|
||||
{row.original.namePatternType.toLowerCase()}
|
||||
</S.Chip>
|
||||
) : null}
|
||||
</S.PatternCell>
|
||||
);
|
||||
},
|
||||
size: 257,
|
||||
},
|
||||
{
|
||||
header: 'Host',
|
||||
accessorKey: 'host',
|
||||
size: 257,
|
||||
},
|
||||
{
|
||||
header: 'Operation',
|
||||
accessorKey: 'operation',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => (
|
||||
<S.EnumCell>{getValue<string>().toLowerCase()}</S.EnumCell>
|
||||
),
|
||||
size: 121,
|
||||
},
|
||||
{
|
||||
header: 'Permission',
|
||||
accessorKey: 'permission',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => (
|
||||
<S.Chip
|
||||
chipType={
|
||||
getValue<string>() === KafkaAclPermissionEnum.ALLOW
|
||||
? 'success'
|
||||
: 'danger'
|
||||
}
|
||||
>
|
||||
{getValue<string>().toLowerCase()}
|
||||
</S.Chip>
|
||||
),
|
||||
size: 111,
|
||||
},
|
||||
{
|
||||
id: 'delete',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ row }) => {
|
||||
return (
|
||||
<S.DeleteCell onClick={() => onDeleteClick(row.original)}>
|
||||
<DeleteIcon
|
||||
fill={
|
||||
rowId === row.id ? theme.acl.table.deleteIcon : 'transparent'
|
||||
}
|
||||
/>
|
||||
</S.DeleteCell>
|
||||
);
|
||||
},
|
||||
size: 76,
|
||||
},
|
||||
],
|
||||
[rowId]
|
||||
);
|
||||
|
||||
const onRowHover = (value: unknown) => {
|
||||
if (value && typeof value === 'object' && 'id' in value) {
|
||||
setRowId(value.id as string);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeading text="Access Control List" />
|
||||
<Table
|
||||
columns={columns}
|
||||
data={aclList ?? []}
|
||||
emptyMessage="No ACL items found"
|
||||
onRowHover={onRowHover}
|
||||
onMouseLeave={() => setRowId('')}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ACList;
|
|
@ -0,0 +1,74 @@
|
|||
import React from 'react';
|
||||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { screen } from '@testing-library/dom';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { clusterACLPath } from 'lib/paths';
|
||||
import ACList from 'components/ACLPage/List/List';
|
||||
import { useAcls, useDeleteAcl } from 'lib/hooks/api/acl';
|
||||
import { aclPayload } from 'lib/fixtures/acls';
|
||||
|
||||
jest.mock('lib/hooks/api/acl', () => ({
|
||||
useAcls: jest.fn(),
|
||||
useDeleteAcl: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('ACLList Component', () => {
|
||||
const clusterName = 'local';
|
||||
const renderComponent = () =>
|
||||
render(
|
||||
<WithRoute path={clusterACLPath()}>
|
||||
<ACList />
|
||||
</WithRoute>,
|
||||
{
|
||||
initialEntries: [clusterACLPath(clusterName)],
|
||||
}
|
||||
);
|
||||
|
||||
describe('ACLList', () => {
|
||||
describe('when the acls are loaded', () => {
|
||||
beforeEach(() => {
|
||||
(useAcls as jest.Mock).mockImplementation(() => ({
|
||||
data: aclPayload,
|
||||
}));
|
||||
(useDeleteAcl as jest.Mock).mockImplementation(() => ({
|
||||
deleteResource: jest.fn(),
|
||||
}));
|
||||
});
|
||||
|
||||
it('renders ACLList with records', async () => {
|
||||
renderComponent();
|
||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
||||
expect(screen.getAllByRole('row').length).toEqual(4);
|
||||
});
|
||||
|
||||
it('shows delete icon on hover', async () => {
|
||||
const { container } = renderComponent();
|
||||
const [trElement] = screen.getAllByRole('row');
|
||||
await userEvent.hover(trElement);
|
||||
const deleteElement = container.querySelector('svg');
|
||||
expect(deleteElement).not.toHaveStyle({
|
||||
fill: 'transparent',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when it has no acls', () => {
|
||||
beforeEach(() => {
|
||||
(useAcls as jest.Mock).mockImplementation(() => ({
|
||||
data: [],
|
||||
}));
|
||||
(useDeleteAcl as jest.Mock).mockImplementation(() => ({
|
||||
deleteResource: jest.fn(),
|
||||
}));
|
||||
});
|
||||
|
||||
it('renders empty ACLList with message', async () => {
|
||||
renderComponent();
|
||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByRole('row', { name: 'No ACL items found' })
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -34,7 +34,11 @@ const Configs: React.FC = () => {
|
|||
|
||||
const getData = () => {
|
||||
return data
|
||||
.filter((item) => item.name.toLocaleLowerCase().indexOf(keyword) > -1)
|
||||
.filter(
|
||||
(item) =>
|
||||
item.name.toLocaleLowerCase().indexOf(keyword.toLocaleLowerCase()) >
|
||||
-1
|
||||
)
|
||||
.sort((a, b) => {
|
||||
if (a.source === b.source) return 0;
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
clusterTopicsRelativePath,
|
||||
clusterConfigRelativePath,
|
||||
getNonExactPath,
|
||||
clusterAclRelativePath,
|
||||
} from 'lib/paths';
|
||||
import ClusterContext from 'components/contexts/ClusterContext';
|
||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
||||
|
@ -30,6 +31,7 @@ const ClusterConfigPage = React.lazy(
|
|||
const ConsumerGroups = React.lazy(
|
||||
() => import('components/ConsumerGroups/ConsumerGroups')
|
||||
);
|
||||
const AclPage = React.lazy(() => import('components/ACLPage/ACLPage'));
|
||||
|
||||
const ClusterPage: React.FC = () => {
|
||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||
|
@ -51,6 +53,9 @@ const ClusterPage: React.FC = () => {
|
|||
ClusterFeaturesEnum.TOPIC_DELETION
|
||||
),
|
||||
hasKsqlDbConfigured: features.includes(ClusterFeaturesEnum.KSQL_DB),
|
||||
hasAclViewConfigured:
|
||||
features.includes(ClusterFeaturesEnum.KAFKA_ACL_VIEW) ||
|
||||
features.includes(ClusterFeaturesEnum.KAFKA_ACL_EDIT),
|
||||
};
|
||||
}, [clusterName, data]);
|
||||
|
||||
|
@ -95,6 +100,12 @@ const ClusterPage: React.FC = () => {
|
|||
element={<KsqlDb />}
|
||||
/>
|
||||
)}
|
||||
{contextValue.hasAclViewConfigured && (
|
||||
<Route
|
||||
path={getNonExactPath(clusterAclRelativePath)}
|
||||
element={<AclPage />}
|
||||
/>
|
||||
)}
|
||||
{appInfo.hasDynamicConfig && (
|
||||
<Route
|
||||
path={getNonExactPath(clusterConfigRelativePath)}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useEffect, useMemo } from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||
import * as Metrics from 'components/common/Metrics';
|
||||
import { Tag } from 'components/common/Tag/Tag.styled';
|
||||
|
@ -10,7 +10,6 @@ import Table, { SizeCell } from 'components/common/NewTable';
|
|||
import useBoolean from 'lib/hooks/useBoolean';
|
||||
import { clusterNewConfigPath } from 'lib/paths';
|
||||
import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { ActionCanButton } from 'components/common/ActionComponent';
|
||||
import { useGetUserInfo } from 'lib/hooks/api/roles';
|
||||
|
||||
|
@ -23,7 +22,7 @@ const Dashboard: React.FC = () => {
|
|||
const clusters = useClusters();
|
||||
const { value: showOfflineOnly, toggle } = useBoolean(false);
|
||||
const appInfo = React.useContext(GlobalSettingsContext);
|
||||
const navigate = useNavigate();
|
||||
|
||||
const config = React.useMemo(() => {
|
||||
const clusterList = clusters.data || [];
|
||||
const offlineClusters = clusterList.filter(
|
||||
|
@ -58,12 +57,6 @@ const Dashboard: React.FC = () => {
|
|||
return initialColumns;
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (appInfo.hasDynamicConfig && !clusters.data) {
|
||||
navigate(clusterNewConfigPath);
|
||||
}
|
||||
}, [clusters, appInfo.hasDynamicConfig]);
|
||||
|
||||
const isApplicationConfig = useMemo(() => {
|
||||
return !!data?.userInfo?.permissions.some(
|
||||
(permission) => permission.resource === ResourceType.APPLICATIONCONFIG
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
import React from 'react';
|
||||
import { useClusters } from 'lib/hooks/api/clusters';
|
||||
import Dashboard from 'components/Dashboard/Dashboard';
|
||||
import { Cluster, ServerStatus } from 'generated-sources';
|
||||
import { render } from 'lib/testHelpers';
|
||||
|
||||
interface DataType {
|
||||
data: Cluster[] | undefined;
|
||||
}
|
||||
jest.mock('lib/hooks/api/clusters');
|
||||
const mockedNavigate = jest.fn();
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useNavigate: () => mockedNavigate,
|
||||
}));
|
||||
describe('Dashboard component', () => {
|
||||
const renderComponent = (hasDynamicConfig: boolean, data: DataType) => {
|
||||
const useClustersMock = useClusters as jest.Mock;
|
||||
useClustersMock.mockReturnValue(data);
|
||||
render(<Dashboard />, {
|
||||
globalSettings: { hasDynamicConfig },
|
||||
});
|
||||
};
|
||||
it('redirects to new cluster configuration page if there are no clusters and dynamic config is enabled', async () => {
|
||||
await renderComponent(true, { data: undefined });
|
||||
|
||||
expect(mockedNavigate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not navigate to new cluster config page when there are clusters', async () => {
|
||||
await renderComponent(true, {
|
||||
data: [{ name: 'Cluster 1', status: ServerStatus.ONLINE }],
|
||||
});
|
||||
|
||||
expect(mockedNavigate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not navigate to new cluster config page when there are no clusters and hasDynamicConfig is false', async () => {
|
||||
await renderComponent(false, {
|
||||
data: [],
|
||||
});
|
||||
|
||||
expect(mockedNavigate).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -33,6 +33,7 @@ export const Fieldset = styled.fieldset`
|
|||
flex: 1;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
color: ${({ theme }) => theme.default.color.normal};
|
||||
`;
|
||||
|
||||
export const ButtonsContainer = styled.div`
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
clusterSchemasPath,
|
||||
clusterConnectorsPath,
|
||||
clusterKsqlDbPath,
|
||||
clusterACLPath,
|
||||
} from 'lib/paths';
|
||||
|
||||
import ClusterMenuItem from './ClusterMenuItem';
|
||||
|
@ -57,6 +58,10 @@ const ClusterMenu: React.FC<Props> = ({
|
|||
{hasFeatureConfigured(ClusterFeaturesEnum.KSQL_DB) && (
|
||||
<ClusterMenuItem to={clusterKsqlDbPath(name)} title="KSQL DB" />
|
||||
)}
|
||||
{(hasFeatureConfigured(ClusterFeaturesEnum.KAFKA_ACL_VIEW) ||
|
||||
hasFeatureConfigured(ClusterFeaturesEnum.KAFKA_ACL_EDIT)) && (
|
||||
<ClusterMenuItem to={clusterACLPath(name)} title="ACL" />
|
||||
)}
|
||||
</S.List>
|
||||
)}
|
||||
</S.List>
|
||||
|
|
|
@ -14,9 +14,6 @@ export const DiffWrapper = styled.div`
|
|||
background-color: ${({ theme }) => theme.default.backgroundColor};
|
||||
color: ${({ theme }) => theme.default.color.normal};
|
||||
}
|
||||
.ace_line {
|
||||
background-color: ${({ theme }) => theme.default.backgroundColor};
|
||||
}
|
||||
.ace_gutter-cell {
|
||||
background-color: ${({ theme }) =>
|
||||
theme.ksqlDb.query.editor.cell.backgroundColor};
|
||||
|
@ -39,10 +36,10 @@ export const DiffWrapper = styled.div`
|
|||
.ace_string {
|
||||
color: ${({ theme }) => theme.ksqlDb.query.editor.aceString};
|
||||
}
|
||||
> .codeMarker {
|
||||
background: ${({ theme }) => theme.icons.warningIcon};
|
||||
.codeMarker {
|
||||
background-color: ${({ theme }) => theme.ksqlDb.query.editor.codeMarker};
|
||||
position: absolute;
|
||||
z-index: 20;
|
||||
z-index: 2000;
|
||||
}
|
||||
`;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import styled from 'styled-components';
|
||||
|
||||
export interface ButtonProps {
|
||||
buttonType: 'primary' | 'secondary';
|
||||
buttonType: 'primary' | 'secondary' | 'danger';
|
||||
buttonSize: 'S' | 'M' | 'L';
|
||||
isInverted?: boolean;
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ const ConfirmationModal: React.FC = () => {
|
|||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
buttonType="primary"
|
||||
buttonType={context.dangerButton ? 'danger' : 'primary'}
|
||||
buttonSize="M"
|
||||
onClick={context.confirm}
|
||||
type="button"
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
import React from 'react';
|
||||
import { useTheme } from 'styled-components';
|
||||
|
||||
const DeleteIcon: React.FC = () => {
|
||||
const DeleteIcon: React.FC<{ fill?: string }> = ({ fill }) => {
|
||||
const theme = useTheme();
|
||||
const curentFill = fill || theme.editFilter.deleteIconColor;
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 448 512"
|
||||
fill={theme.editFilter.deleteIconColor}
|
||||
fill={curentFill}
|
||||
width="14"
|
||||
height="14"
|
||||
>
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import styled from 'styled-components';
|
||||
import { MultiSelect as ReactMultiSelect } from 'react-multi-select-component';
|
||||
|
||||
const MultiSelect = styled(ReactMultiSelect)<{ minWidth?: string }>`
|
||||
const MultiSelect = styled(ReactMultiSelect)<{
|
||||
minWidth?: string;
|
||||
height?: string;
|
||||
}>`
|
||||
min-width: ${({ minWidth }) => minWidth || '200px;'};
|
||||
height: 32px;
|
||||
height: ${({ height }) => height ?? '32px'};
|
||||
font-size: 14px;
|
||||
.search input {
|
||||
color: ${({ theme }) => theme.input.color.normal};
|
||||
|
@ -36,13 +39,14 @@ const MultiSelect = styled(ReactMultiSelect)<{ minWidth?: string }>`
|
|||
&:hover {
|
||||
border-color: ${({ theme }) => theme.select.borderColor.hover} !important;
|
||||
}
|
||||
height: 32px;
|
||||
|
||||
height: ${({ height }) => height ?? '32px'};
|
||||
* {
|
||||
cursor: ${({ disabled }) => (disabled ? 'not-allowed' : 'pointer')};
|
||||
}
|
||||
|
||||
& > .dropdown-heading {
|
||||
height: 32px;
|
||||
height: ${({ height }) => height ?? '32px'};
|
||||
color: ${({ disabled, theme }) =>
|
||||
disabled
|
||||
? theme.select.color.disabled
|
||||
|
|
|
@ -52,6 +52,9 @@ export interface TableProps<TData> {
|
|||
|
||||
// Handles row click. Can not be combined with `enableRowSelection` && expandable rows.
|
||||
onRowClick?: (row: Row<TData>) => void;
|
||||
|
||||
onRowHover?: (row: Row<TData>) => void;
|
||||
onMouseLeave?: () => void;
|
||||
}
|
||||
|
||||
type UpdaterFn<T> = (previousState: T) => T;
|
||||
|
@ -127,6 +130,8 @@ const Table: React.FC<TableProps<any>> = ({
|
|||
emptyMessage,
|
||||
disabled,
|
||||
onRowClick,
|
||||
onRowHover,
|
||||
onMouseLeave,
|
||||
}) => {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const location = useLocation();
|
||||
|
@ -194,6 +199,21 @@ const Table: React.FC<TableProps<any>> = ({
|
|||
return undefined;
|
||||
};
|
||||
|
||||
const handleRowHover = (row: Row<typeof data>) => (e: React.MouseEvent) => {
|
||||
if (onRowHover) {
|
||||
e.stopPropagation();
|
||||
return onRowHover(row);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const handleMouseLeave = () => {
|
||||
if (onMouseLeave) {
|
||||
onMouseLeave();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
{BatchActionsBar && (
|
||||
|
@ -227,6 +247,12 @@ const Table: React.FC<TableProps<any>> = ({
|
|||
sortable={header.column.getCanSort()}
|
||||
sortOrder={header.column.getIsSorted()}
|
||||
onClick={header.column.getToggleSortingHandler()}
|
||||
style={{
|
||||
width:
|
||||
header.column.getSize() !== 150
|
||||
? header.column.getSize()
|
||||
: undefined,
|
||||
}}
|
||||
>
|
||||
<div>
|
||||
{flexRender(
|
||||
|
@ -245,6 +271,8 @@ const Table: React.FC<TableProps<any>> = ({
|
|||
<S.Row
|
||||
expanded={row.getIsExpanded()}
|
||||
onClick={handleRowClick(row)}
|
||||
onMouseOver={onRowHover ? handleRowHover(row) : undefined}
|
||||
onMouseLeave={onMouseLeave ? handleMouseLeave : undefined}
|
||||
clickable={
|
||||
!enableRowSelection &&
|
||||
(row.getCanExpand() || onRowClick !== undefined)
|
||||
|
@ -269,7 +297,13 @@ const Table: React.FC<TableProps<any>> = ({
|
|||
{row
|
||||
.getVisibleCells()
|
||||
.map(({ id, getContext, column: { columnDef } }) => (
|
||||
<td key={id} style={columnDef.meta}>
|
||||
<td
|
||||
key={id}
|
||||
style={{
|
||||
width:
|
||||
columnDef.size !== 150 ? columnDef.size : undefined,
|
||||
}}
|
||||
>
|
||||
{flexRender(columnDef.cell, getContext())}
|
||||
</td>
|
||||
))}
|
||||
|
|
|
@ -39,6 +39,10 @@ export const StyledSlider = styled.span<Props>`
|
|||
transition: 0.4s;
|
||||
border-radius: 20px;
|
||||
|
||||
:hover {
|
||||
background-color: ${({ theme }) => theme.switch.hover};
|
||||
}
|
||||
|
||||
&::before {
|
||||
position: absolute;
|
||||
content: '';
|
||||
|
|
|
@ -6,6 +6,8 @@ interface ConfirmContextType {
|
|||
setContent: React.Dispatch<React.SetStateAction<React.ReactNode>>;
|
||||
setConfirm: React.Dispatch<React.SetStateAction<(() => void) | undefined>>;
|
||||
cancel: () => void;
|
||||
dangerButton: boolean;
|
||||
setDangerButton: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
}
|
||||
|
||||
export const ConfirmContext = React.createContext<ConfirmContextType | null>(
|
||||
|
@ -17,6 +19,7 @@ export const ConfirmContextProvider: React.FC<
|
|||
> = ({ children }) => {
|
||||
const [content, setContent] = useState<React.ReactNode>(null);
|
||||
const [confirm, setConfirm] = useState<(() => void) | undefined>(undefined);
|
||||
const [dangerButton, setDangerButton] = useState(false);
|
||||
|
||||
const cancel = () => {
|
||||
setContent(null);
|
||||
|
@ -31,6 +34,8 @@ export const ConfirmContextProvider: React.FC<
|
|||
confirm,
|
||||
setConfirm,
|
||||
cancel,
|
||||
dangerButton,
|
||||
setDangerButton,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
|
|
@ -2,7 +2,7 @@ import { useAppInfo } from 'lib/hooks/api/appConfig';
|
|||
import React from 'react';
|
||||
import { ApplicationInfoEnabledFeaturesEnum } from 'generated-sources';
|
||||
|
||||
export interface GlobalSettingsContextProps {
|
||||
interface GlobalSettingsContextProps {
|
||||
hasDynamicConfig: boolean;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
ConsumerGroupsApi,
|
||||
AuthorizationApi,
|
||||
ApplicationConfigApi,
|
||||
AclsApi,
|
||||
} from 'generated-sources';
|
||||
import { BASE_PARAMS } from 'lib/constants';
|
||||
|
||||
|
@ -25,3 +26,4 @@ export const kafkaConnectApiClient = new KafkaConnectApi(apiClientConf);
|
|||
export const consumerGroupsApiClient = new ConsumerGroupsApi(apiClientConf);
|
||||
export const authApiClient = new AuthorizationApi(apiClientConf);
|
||||
export const appConfigApiClient = new ApplicationConfigApi(apiClientConf);
|
||||
export const aclApiClient = new AclsApi(apiClientConf);
|
||||
|
|
37
kafka-ui-react-app/src/lib/fixtures/acls.ts
Normal file
37
kafka-ui-react-app/src/lib/fixtures/acls.ts
Normal file
|
@ -0,0 +1,37 @@
|
|||
import {
|
||||
KafkaAcl,
|
||||
KafkaAclResourceType,
|
||||
KafkaAclNamePatternType,
|
||||
KafkaAclPermissionEnum,
|
||||
KafkaAclOperationEnum,
|
||||
} from 'generated-sources';
|
||||
|
||||
export const aclPayload: KafkaAcl[] = [
|
||||
{
|
||||
principal: 'User 1',
|
||||
resourceName: 'Topic',
|
||||
resourceType: KafkaAclResourceType.TOPIC,
|
||||
host: '_host1',
|
||||
namePatternType: KafkaAclNamePatternType.LITERAL,
|
||||
permission: KafkaAclPermissionEnum.ALLOW,
|
||||
operation: KafkaAclOperationEnum.READ,
|
||||
},
|
||||
{
|
||||
principal: 'User 2',
|
||||
resourceName: 'Topic',
|
||||
resourceType: KafkaAclResourceType.TOPIC,
|
||||
host: '_host1',
|
||||
namePatternType: KafkaAclNamePatternType.PREFIXED,
|
||||
permission: KafkaAclPermissionEnum.ALLOW,
|
||||
operation: KafkaAclOperationEnum.READ,
|
||||
},
|
||||
{
|
||||
principal: 'User 3',
|
||||
resourceName: 'Topic',
|
||||
resourceType: KafkaAclResourceType.TOPIC,
|
||||
host: '_host1',
|
||||
namePatternType: KafkaAclNamePatternType.LITERAL,
|
||||
permission: KafkaAclPermissionEnum.DENY,
|
||||
operation: KafkaAclOperationEnum.READ,
|
||||
},
|
||||
];
|
67
kafka-ui-react-app/src/lib/hooks/api/acl.ts
Normal file
67
kafka-ui-react-app/src/lib/hooks/api/acl.ts
Normal file
|
@ -0,0 +1,67 @@
|
|||
import { aclApiClient as api } from 'lib/api';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { ClusterName } from 'redux/interfaces';
|
||||
import { showSuccessAlert } from 'lib/errorHandling';
|
||||
import { KafkaAcl } from 'generated-sources';
|
||||
|
||||
export function useAcls(clusterName: ClusterName) {
|
||||
return useQuery(
|
||||
['clusters', clusterName, 'acls'],
|
||||
() => api.listAcls({ clusterName }),
|
||||
{
|
||||
suspense: false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function useCreateAclMutation(clusterName: ClusterName) {
|
||||
return useMutation(
|
||||
(data: KafkaAcl) =>
|
||||
api.createAcl({
|
||||
clusterName,
|
||||
kafkaAcl: data,
|
||||
}),
|
||||
{
|
||||
onSuccess() {
|
||||
showSuccessAlert({
|
||||
message: 'Your ACL was created successfully',
|
||||
});
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function useCreateAcl(clusterName: ClusterName) {
|
||||
const mutate = useCreateAclMutation(clusterName);
|
||||
|
||||
return {
|
||||
createResource: async (param: KafkaAcl) => {
|
||||
return mutate.mutateAsync(param);
|
||||
},
|
||||
...mutate,
|
||||
};
|
||||
}
|
||||
|
||||
export function useDeleteAclMutation(clusterName: ClusterName) {
|
||||
const queryClient = useQueryClient();
|
||||
return useMutation(
|
||||
(acl: KafkaAcl) => api.deleteAcl({ clusterName, kafkaAcl: acl }),
|
||||
{
|
||||
onSuccess: () => {
|
||||
showSuccessAlert({ message: 'ACL deleted' });
|
||||
queryClient.invalidateQueries(['clusters', clusterName, 'acls']);
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export function useDeleteAcl(clusterName: ClusterName) {
|
||||
const mutate = useDeleteAclMutation(clusterName);
|
||||
|
||||
return {
|
||||
deleteResource: async (param: KafkaAcl) => {
|
||||
return mutate.mutateAsync(param);
|
||||
},
|
||||
...mutate,
|
||||
};
|
||||
}
|
|
@ -1,12 +1,13 @@
|
|||
import { ConfirmContext } from 'components/contexts/ConfirmContext';
|
||||
import React, { useContext } from 'react';
|
||||
|
||||
export const useConfirm = () => {
|
||||
export const useConfirm = (danger = false) => {
|
||||
const context = useContext(ConfirmContext);
|
||||
return (
|
||||
message: React.ReactNode,
|
||||
callback: () => void | Promise<unknown>
|
||||
) => {
|
||||
context?.setDangerButton(danger);
|
||||
context?.setContent(message);
|
||||
context?.setConfirm(() => async () => {
|
||||
await callback();
|
||||
|
|
|
@ -285,3 +285,10 @@ export const clusterConfigPath = (
|
|||
|
||||
const clusterNewConfigRelativePath = 'create-new-cluster';
|
||||
export const clusterNewConfigPath = `/ui/clusters/${clusterNewConfigRelativePath}`;
|
||||
|
||||
// ACL
|
||||
export const clusterAclRelativePath = 'acl';
|
||||
export const clusterAclNewRelativePath = 'create-new-acl';
|
||||
export const clusterACLPath = (
|
||||
clusterName: ClusterName = RouteParams.clusterName
|
||||
) => `${clusterPath(clusterName)}/${clusterAclRelativePath}`;
|
||||
|
|
|
@ -26,10 +26,7 @@ import {
|
|||
} from '@tanstack/react-query';
|
||||
import { ConfirmContextProvider } from 'components/contexts/ConfirmContext';
|
||||
import ConfirmationModal from 'components/common/ConfirmationModal/ConfirmationModal';
|
||||
import {
|
||||
GlobalSettingsContext,
|
||||
GlobalSettingsContextProps,
|
||||
} from 'components/contexts/GlobalSettingsContext';
|
||||
import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
|
||||
import { UserInfoRolesAccessContext } from 'components/contexts/UserInfoRolesAccessContext';
|
||||
|
||||
import { RolesType, modifyRolesData } from './permissions';
|
||||
|
@ -38,7 +35,6 @@ interface CustomRenderOptions extends Omit<RenderOptions, 'wrapper'> {
|
|||
preloadedState?: Partial<RootState>;
|
||||
store?: Store<Partial<RootState>, AnyAction>;
|
||||
initialEntries?: MemoryRouterProps['initialEntries'];
|
||||
globalSettings?: GlobalSettingsContextProps;
|
||||
userInfo?: {
|
||||
roles?: RolesType;
|
||||
rbacFlag: boolean;
|
||||
|
@ -114,7 +110,6 @@ const customRender = (
|
|||
preloadedState,
|
||||
}),
|
||||
initialEntries,
|
||||
globalSettings = { hasDynamicConfig: false },
|
||||
userInfo,
|
||||
...renderOptions
|
||||
}: CustomRenderOptions = {}
|
||||
|
@ -124,7 +119,7 @@ const customRender = (
|
|||
children,
|
||||
}) => (
|
||||
<TestQueryClientProvider>
|
||||
<GlobalSettingsContext.Provider value={globalSettings}>
|
||||
<GlobalSettingsContext.Provider value={{ hasDynamicConfig: false }}>
|
||||
<ThemeProvider theme={theme}>
|
||||
<TestUserInfoProvider data={userInfo}>
|
||||
<ConfirmContextProvider>
|
||||
|
|
|
@ -31,6 +31,7 @@ const Colors = {
|
|||
'15': '#C2F0D1',
|
||||
'30': '#85E0A3',
|
||||
'40': '#5CD685',
|
||||
'50': '#33CC66',
|
||||
'60': '#29A352',
|
||||
},
|
||||
brand: {
|
||||
|
@ -242,16 +243,18 @@ const baseTheme = {
|
|||
white: Colors.neutral[10],
|
||||
red: Colors.red[10],
|
||||
blue: Colors.blue[10],
|
||||
secondary: Colors.neutral[15],
|
||||
},
|
||||
color: Colors.neutral[90],
|
||||
},
|
||||
switch: {
|
||||
unchecked: Colors.brand[30],
|
||||
unchecked: Colors.neutral[20],
|
||||
hover: Colors.neutral[40],
|
||||
checked: Colors.brand[50],
|
||||
circle: Colors.neutral[0],
|
||||
disabled: Colors.neutral[10],
|
||||
checkedIcon: {
|
||||
backgroundColor: Colors.neutral[70],
|
||||
backgroundColor: Colors.neutral[10],
|
||||
},
|
||||
},
|
||||
pageLoader: {
|
||||
|
@ -377,6 +380,7 @@ export const theme = {
|
|||
cursor: Colors.neutral[90],
|
||||
variable: Colors.red[50],
|
||||
aceString: Colors.green[60],
|
||||
codeMarker: Colors.yellow[20],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -425,8 +429,8 @@ export const theme = {
|
|||
disabled: Colors.red[20],
|
||||
},
|
||||
color: {
|
||||
normal: Colors.neutral[90],
|
||||
disabled: Colors.neutral[30],
|
||||
normal: Colors.neutral[0],
|
||||
disabled: Colors.neutral[0],
|
||||
},
|
||||
invertedColors: {
|
||||
normal: Colors.brand[50],
|
||||
|
@ -707,6 +711,44 @@ export const theme = {
|
|||
textColor: Colors.brand[50],
|
||||
deleteIconColor: Colors.brand[50],
|
||||
},
|
||||
acl: {
|
||||
table: {
|
||||
deleteIcon: Colors.neutral[50],
|
||||
},
|
||||
create: {
|
||||
radioButtons: {
|
||||
green: {
|
||||
normal: {
|
||||
background: Colors.neutral[0],
|
||||
text: Colors.neutral[50],
|
||||
},
|
||||
active: {
|
||||
background: Colors.green[50],
|
||||
text: Colors.neutral[0],
|
||||
},
|
||||
hover: {
|
||||
background: Colors.green[10],
|
||||
text: Colors.neutral[90],
|
||||
},
|
||||
},
|
||||
gray: {
|
||||
normal: {
|
||||
background: Colors.neutral[0],
|
||||
text: Colors.neutral[50],
|
||||
},
|
||||
active: {
|
||||
background: Colors.neutral[10],
|
||||
text: Colors.neutral[90],
|
||||
},
|
||||
hover: {
|
||||
background: Colors.neutral[5],
|
||||
text: Colors.neutral[90],
|
||||
},
|
||||
},
|
||||
red: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export type ThemeType = typeof theme;
|
||||
|
@ -781,6 +823,7 @@ export const darkTheme: ThemeType = {
|
|||
cursor: Colors.neutral[0],
|
||||
variable: Colors.red[50],
|
||||
aceString: Colors.green[60],
|
||||
codeMarker: Colors.yellow[20],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -829,8 +872,8 @@ export const darkTheme: ThemeType = {
|
|||
disabled: Colors.red[20],
|
||||
},
|
||||
color: {
|
||||
normal: Colors.neutral[90],
|
||||
disabled: Colors.neutral[30],
|
||||
normal: Colors.neutral[0],
|
||||
disabled: Colors.neutral[0],
|
||||
},
|
||||
invertedColors: {
|
||||
normal: Colors.brand[50],
|
||||
|
@ -1180,4 +1223,42 @@ export const darkTheme: ThemeType = {
|
|||
color: Colors.neutral[0],
|
||||
},
|
||||
},
|
||||
acl: {
|
||||
table: {
|
||||
deleteIcon: Colors.neutral[50],
|
||||
},
|
||||
create: {
|
||||
radioButtons: {
|
||||
green: {
|
||||
normal: {
|
||||
background: Colors.neutral[0],
|
||||
text: Colors.neutral[50],
|
||||
},
|
||||
active: {
|
||||
background: Colors.green[50],
|
||||
text: Colors.neutral[0],
|
||||
},
|
||||
hover: {
|
||||
background: Colors.green[10],
|
||||
text: Colors.neutral[0],
|
||||
},
|
||||
},
|
||||
gray: {
|
||||
normal: {
|
||||
background: Colors.neutral[0],
|
||||
text: Colors.neutral[50],
|
||||
},
|
||||
active: {
|
||||
background: Colors.neutral[10],
|
||||
text: Colors.neutral[90],
|
||||
},
|
||||
hover: {
|
||||
background: Colors.neutral[5],
|
||||
text: Colors.neutral[90],
|
||||
},
|
||||
},
|
||||
red: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
|
Loading…
Add table
Reference in a new issue