Merge branch 'master' into Wizard_RBAC_Disable_configure_buttons_#3646

This commit is contained in:
David Bejanyan 2023-05-05 19:57:17 +04:00 committed by GitHub
commit bc0d2873d4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
70 changed files with 1596 additions and 520 deletions

View file

@ -11,4 +11,8 @@ KafkaClient {
user_admin="admin-secret";
};
Client {};
Client {
org.apache.zookeeper.server.auth.DigestLoginModule required
username="zkuser"
password="zkuserpassword";
};

View file

@ -0,0 +1,4 @@
Server {
org.apache.zookeeper.server.auth.DigestLoginModule required
user_zkuser="zkuserpassword";
};

View file

@ -0,0 +1,59 @@
---
version: '2'
services:
kafka-ui:
container_name: kafka-ui
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
depends_on:
- zookeeper
- kafka
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
zookeeper:
image: wurstmeister/zookeeper:3.4.6
environment:
JVMFLAGS: "-Djava.security.auth.login.config=/etc/zookeeper/zookeeper_jaas.conf"
volumes:
- ./jaas/zookeeper_jaas.conf:/etc/zookeeper/zookeeper_jaas.conf
ports:
- 2181:2181
kafka:
image: confluentinc/cp-kafka:7.2.1
hostname: kafka
container_name: kafka
ports:
- "9092:9092"
- "9997:9997"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
KAFKA_ADVERTISED_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas/kafka_server.conf"
KAFKA_AUTHORIZER_CLASS_NAME: "kafka.security.authorizer.AclAuthorizer"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_JMX_PORT: 9997
KAFKA_JMX_HOSTNAME: localhost
KAFKA_NODE_ID: 1
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
KAFKA_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
KAFKA_INTER_BROKER_LISTENER_NAME: 'SASL_PLAINTEXT'
KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT'
KAFKA_SUPER_USERS: 'User:admin'
volumes:
- ./scripts/update_run.sh:/tmp/update_run.sh
- ./jaas:/etc/kafka/jaas

View file

@ -131,8 +131,9 @@ public class ClustersProperties {
@Data
public static class Masking {
Type type;
List<String> fields; //if null or empty list - policy will be applied to all fields
List<String> pattern; //used when type=MASK
List<String> fields;
String fieldsNamePattern;
List<String> maskingCharsReplacement; //used when type=MASK
String replacement; //used when type=REPLACE
String topicKeysPattern;
String topicValuesPattern;

View file

@ -0,0 +1,26 @@
package com.provectus.kafka.ui.config.auth;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties("spring.ldap")
@Data
public class LdapProperties {
private String urls;
private String base;
private String adminUser;
private String adminPassword;
private String userFilterSearchBase;
private String userFilterSearchFilter;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aсtiveDirectory.domain:@null}")
private String activeDirectoryDomain;
@Value("${oauth2.ldap.groupRoleAttribute:cn}")
private String groupRoleAttribute;
}

View file

@ -1,13 +1,23 @@
package com.provectus.kafka.ui.config.auth;
import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.ldap.core.support.LdapContextSource;
import org.springframework.security.authentication.AuthenticationManager;
@ -16,70 +26,71 @@ import org.springframework.security.authentication.ReactiveAuthenticationManager
import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.BindAuthenticator;
import org.springframework.security.ldap.authentication.LdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
import org.springframework.security.web.server.SecurityWebFilterChain;
@Configuration
@EnableWebFluxSecurity
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
@Import(LdapAutoConfiguration.class)
@EnableConfigurationProperties(LdapProperties.class)
@RequiredArgsConstructor
@Slf4j
public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
public class LdapSecurityConfig {
@Value("${spring.ldap.urls}")
private String ldapUrls;
@Value("${spring.ldap.dn.pattern:#{null}}")
private String ldapUserDnPattern;
@Value("${spring.ldap.adminUser:#{null}}")
private String adminUser;
@Value("${spring.ldap.adminPassword:#{null}}")
private String adminPassword;
@Value("${spring.ldap.userFilter.searchBase:#{null}}")
private String userFilterSearchBase;
@Value("${spring.ldap.userFilter.searchFilter:#{null}}")
private String userFilterSearchFilter;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aсtiveDirectory.domain:#{null}}")
private String activeDirectoryDomain;
private final LdapProperties props;
@Bean
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource) {
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
ApplicationContext context,
@Nullable AccessControlService acs) {
var rbacEnabled = acs != null && acs.isRbacEnabled();
BindAuthenticator ba = new BindAuthenticator(contextSource);
if (ldapUserDnPattern != null) {
ba.setUserDnPatterns(new String[] {ldapUserDnPattern});
if (props.getBase() != null) {
ba.setUserDnPatterns(new String[] {props.getBase()});
}
if (userFilterSearchFilter != null) {
if (props.getUserFilterSearchFilter() != null) {
LdapUserSearch userSearch =
new FilterBasedLdapUserSearch(userFilterSearchBase, userFilterSearchFilter, contextSource);
new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(),
contextSource);
ba.setUserSearch(userSearch);
}
AbstractLdapAuthenticationProvider authenticationProvider;
if (!isActiveDirectory) {
authenticationProvider = new LdapAuthenticationProvider(ba);
if (!props.isActiveDirectory()) {
authenticationProvider = rbacEnabled
? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
: new LdapAuthenticationProvider(ba);
} else {
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(activeDirectoryDomain, ldapUrls);
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
props.getUrls()); // TODO Issue #3741
authenticationProvider.setUseAuthenticationRequestCredentials(true);
}
if (rbacEnabled) {
authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper());
}
AuthenticationManager am = new ProviderManager(List.of(authenticationProvider));
return new ReactiveAuthenticationManagerAdapter(am);
}
@Bean
@Primary
public BaseLdapPathContextSource contextSource() {
LdapContextSource ctx = new LdapContextSource();
ctx.setUrl(ldapUrls);
ctx.setUserDn(adminUser);
ctx.setPassword(adminPassword);
ctx.setUrl(props.getUrls());
ctx.setUserDn(props.getAdminUser());
ctx.setPassword(props.getAdminPassword());
ctx.afterPropertiesSet();
return ctx;
}
@ -87,20 +98,35 @@ public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
@Bean
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
log.info("Configuring LDAP authentication.");
if (isActiveDirectory) {
if (props.isActiveDirectory()) {
log.info("Active Directory support for LDAP has been enabled.");
}
http
return http
.authorizeExchange()
.pathMatchers(AUTH_WHITELIST)
.permitAll()
.anyExchange()
.authenticated()
.and()
.httpBasic();
return http.csrf().disable().build();
.and()
.formLogin()
.and()
.logout()
.and()
.csrf().disable()
.build();
}
private static class UserDetailsMapper extends LdapUserDetailsMapper {
@Override
public UserDetails mapUserFromContext(DirContextOperations ctx, String username,
Collection<? extends GrantedAuthority> authorities) {
UserDetails userDetails = super.mapUserFromContext(ctx, username, authorities);
return new RbacLdapUser(userDetails);
}
}
}

View file

@ -115,7 +115,7 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
@Nullable
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
final String provider = getProviderByProviderId(providerId);
Optional<ProviderAuthorityExtractor> extractor = acs.getExtractors()
Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
.stream()
.filter(e -> e.isApplicable(provider))
.findFirst();

View file

@ -0,0 +1,60 @@
package com.provectus.kafka.ui.config.auth;
import java.util.Collection;
import java.util.stream.Collectors;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
public class RbacLdapUser implements UserDetails, RbacUser {
private final UserDetails userDetails;
public RbacLdapUser(UserDetails userDetails) {
this.userDetails = userDetails;
}
@Override
public String name() {
return userDetails.getUsername();
}
@Override
public Collection<String> groups() {
return userDetails.getAuthorities().stream().map(GrantedAuthority::getAuthority).collect(Collectors.toSet());
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return userDetails.getAuthorities();
}
@Override
public String getPassword() {
return userDetails.getPassword();
}
@Override
public String getUsername() {
return userDetails.getUsername();
}
@Override
public boolean isAccountNonExpired() {
return userDetails.isAccountNonExpired();
}
@Override
public boolean isAccountNonLocked() {
return userDetails.isAccountNonLocked();
}
@Override
public boolean isCredentialsNonExpired() {
return userDetails.isCredentialsNonExpired();
}
@Override
public boolean isEnabled() {
return userDetails.isEnabled();
}
}

View file

@ -0,0 +1,21 @@
package com.provectus.kafka.ui.config.auth.condition;
import org.springframework.boot.autoconfigure.condition.AllNestedConditions;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
public class ActiveDirectoryCondition extends AllNestedConditions {
public ActiveDirectoryCondition() {
super(ConfigurationPhase.PARSE_CONFIGURATION);
}
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
public static class OnAuthType {
}
@ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false)
public static class OnActiveDirectory {
}
}

View file

@ -0,0 +1,115 @@
package com.provectus.kafka.ui.controller;
import com.provectus.kafka.ui.api.AclsApi;
import com.provectus.kafka.ui.mapper.ClusterMapper;
import com.provectus.kafka.ui.model.KafkaAclDTO;
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
import com.provectus.kafka.ui.service.acl.AclsService;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.Optional;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.common.resource.PatternType;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import org.apache.kafka.common.resource.ResourceType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@RestController
@RequiredArgsConstructor
public class AclsController extends AbstractController implements AclsApi {
private final AclsService aclsService;
private final AccessControlService accessControlService;
@Override
public Mono<ResponseEntity<Void>> createAcl(String clusterName, Mono<KafkaAclDTO> kafkaAclDto,
ServerWebExchange exchange) {
AccessContext context = AccessContext.builder()
.cluster(clusterName)
.aclActions(AclAction.EDIT)
.build();
return accessControlService.validateAccess(context)
.then(kafkaAclDto)
.map(ClusterMapper::toAclBinding)
.flatMap(binding -> aclsService.createAcl(getCluster(clusterName), binding))
.thenReturn(ResponseEntity.ok().build());
}
@Override
public Mono<ResponseEntity<Void>> deleteAcl(String clusterName, Mono<KafkaAclDTO> kafkaAclDto,
ServerWebExchange exchange) {
AccessContext context = AccessContext.builder()
.cluster(clusterName)
.aclActions(AclAction.EDIT)
.build();
return accessControlService.validateAccess(context)
.then(kafkaAclDto)
.map(ClusterMapper::toAclBinding)
.flatMap(binding -> aclsService.deleteAcl(getCluster(clusterName), binding))
.thenReturn(ResponseEntity.ok().build());
}
@Override
public Mono<ResponseEntity<Flux<KafkaAclDTO>>> listAcls(String clusterName,
KafkaAclResourceTypeDTO resourceTypeDto,
String resourceName,
KafkaAclNamePatternTypeDTO namePatternTypeDto,
ServerWebExchange exchange) {
AccessContext context = AccessContext.builder()
.cluster(clusterName)
.aclActions(AclAction.VIEW)
.build();
var resourceType = Optional.ofNullable(resourceTypeDto)
.map(ClusterMapper::mapAclResourceTypeDto)
.orElse(ResourceType.ANY);
var namePatternType = Optional.ofNullable(namePatternTypeDto)
.map(ClusterMapper::mapPatternTypeDto)
.orElse(PatternType.ANY);
var filter = new ResourcePatternFilter(resourceType, resourceName, namePatternType);
return accessControlService.validateAccess(context).then(
Mono.just(
ResponseEntity.ok(
aclsService.listAcls(getCluster(clusterName), filter)
.map(ClusterMapper::toKafkaAclDto)))
);
}
@Override
public Mono<ResponseEntity<String>> getAclAsCsv(String clusterName, ServerWebExchange exchange) {
AccessContext context = AccessContext.builder()
.cluster(clusterName)
.aclActions(AclAction.VIEW)
.build();
return accessControlService.validateAccess(context).then(
aclsService.getAclAsCsvString(getCluster(clusterName))
.map(ResponseEntity::ok)
.flatMap(Mono::just)
);
}
@Override
public Mono<ResponseEntity<Void>> syncAclsCsv(String clusterName, Mono<String> csvMono, ServerWebExchange exchange) {
AccessContext context = AccessContext.builder()
.cluster(clusterName)
.aclActions(AclAction.EDIT)
.build();
return accessControlService.validateAccess(context)
.then(csvMono)
.flatMap(csv -> aclsService.syncAclWithAclCsv(getCluster(clusterName), csv))
.thenReturn(ResponseEntity.ok().build());
}
}

View file

@ -27,6 +27,7 @@ import org.mapstruct.Mapper;
import org.mapstruct.factory.Mappers;
import org.springframework.http.ResponseEntity;
import org.springframework.http.codec.multipart.FilePart;
import org.springframework.http.codec.multipart.Part;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Flux;
@ -92,16 +93,19 @@ public class ApplicationConfigController implements ApplicationConfigApi {
}
@Override
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(FilePart file, ServerWebExchange exchange) {
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(Flux<Part> fileFlux,
ServerWebExchange exchange) {
return accessControlService
.validateAccess(
AccessContext.builder()
.applicationConfigActions(EDIT)
.build()
)
.then(dynamicConfigOperations.uploadConfigRelatedFile(file))
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
.map(ResponseEntity::ok);
.then(fileFlux.single())
.flatMap(file ->
dynamicConfigOperations.uploadConfigRelatedFile((FilePart) file)
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
.map(ResponseEntity::ok));
}
@Override

View file

@ -20,6 +20,9 @@ import com.provectus.kafka.ui.model.InternalPartition;
import com.provectus.kafka.ui.model.InternalReplica;
import com.provectus.kafka.ui.model.InternalTopic;
import com.provectus.kafka.ui.model.InternalTopicConfig;
import com.provectus.kafka.ui.model.KafkaAclDTO;
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
import com.provectus.kafka.ui.model.MetricDTO;
import com.provectus.kafka.ui.model.Metrics;
import com.provectus.kafka.ui.model.PartitionDTO;
@ -27,12 +30,18 @@ import com.provectus.kafka.ui.model.ReplicaDTO;
import com.provectus.kafka.ui.model.TopicConfigDTO;
import com.provectus.kafka.ui.model.TopicDTO;
import com.provectus.kafka.ui.model.TopicDetailsDTO;
import com.provectus.kafka.ui.service.masking.DataMasking;
import com.provectus.kafka.ui.service.metrics.RawMetric;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.kafka.clients.admin.ConfigEntry;
import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.acl.AclPermissionType;
import org.apache.kafka.common.resource.PatternType;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourceType;
import org.mapstruct.Mapper;
import org.mapstruct.Mapping;
@ -109,8 +118,74 @@ public interface ClusterMapper {
return brokerDiskUsage;
}
default DataMasking map(List<ClustersProperties.Masking> maskingProperties) {
return DataMasking.create(maskingProperties);
static KafkaAclDTO.OperationEnum mapAclOperation(AclOperation operation) {
return switch (operation) {
case ALL -> KafkaAclDTO.OperationEnum.ALL;
case READ -> KafkaAclDTO.OperationEnum.READ;
case WRITE -> KafkaAclDTO.OperationEnum.WRITE;
case CREATE -> KafkaAclDTO.OperationEnum.CREATE;
case DELETE -> KafkaAclDTO.OperationEnum.DELETE;
case ALTER -> KafkaAclDTO.OperationEnum.ALTER;
case DESCRIBE -> KafkaAclDTO.OperationEnum.DESCRIBE;
case CLUSTER_ACTION -> KafkaAclDTO.OperationEnum.CLUSTER_ACTION;
case DESCRIBE_CONFIGS -> KafkaAclDTO.OperationEnum.DESCRIBE_CONFIGS;
case ALTER_CONFIGS -> KafkaAclDTO.OperationEnum.ALTER_CONFIGS;
case IDEMPOTENT_WRITE -> KafkaAclDTO.OperationEnum.IDEMPOTENT_WRITE;
case CREATE_TOKENS -> KafkaAclDTO.OperationEnum.CREATE_TOKENS;
case DESCRIBE_TOKENS -> KafkaAclDTO.OperationEnum.DESCRIBE_TOKENS;
case ANY -> throw new IllegalArgumentException("ANY operation can be only part of filter");
case UNKNOWN -> KafkaAclDTO.OperationEnum.UNKNOWN;
};
}
static KafkaAclResourceTypeDTO mapAclResourceType(ResourceType resourceType) {
return switch (resourceType) {
case CLUSTER -> KafkaAclResourceTypeDTO.CLUSTER;
case TOPIC -> KafkaAclResourceTypeDTO.TOPIC;
case GROUP -> KafkaAclResourceTypeDTO.GROUP;
case DELEGATION_TOKEN -> KafkaAclResourceTypeDTO.DELEGATION_TOKEN;
case TRANSACTIONAL_ID -> KafkaAclResourceTypeDTO.TRANSACTIONAL_ID;
case USER -> KafkaAclResourceTypeDTO.USER;
case ANY -> throw new IllegalArgumentException("ANY type can be only part of filter");
case UNKNOWN -> KafkaAclResourceTypeDTO.UNKNOWN;
};
}
static ResourceType mapAclResourceTypeDto(KafkaAclResourceTypeDTO dto) {
return ResourceType.valueOf(dto.name());
}
static PatternType mapPatternTypeDto(KafkaAclNamePatternTypeDTO dto) {
return PatternType.valueOf(dto.name());
}
static AclBinding toAclBinding(KafkaAclDTO dto) {
return new AclBinding(
new ResourcePattern(
mapAclResourceTypeDto(dto.getResourceType()),
dto.getResourceName(),
mapPatternTypeDto(dto.getNamePatternType())
),
new AccessControlEntry(
dto.getPrincipal(),
dto.getHost(),
AclOperation.valueOf(dto.getOperation().name()),
AclPermissionType.valueOf(dto.getPermission().name())
)
);
}
static KafkaAclDTO toKafkaAclDto(AclBinding binding) {
var pattern = binding.pattern();
var filter = binding.toFilter().entryFilter();
return new KafkaAclDTO()
.resourceType(mapAclResourceType(pattern.resourceType()))
.resourceName(pattern.name())
.namePatternType(KafkaAclNamePatternTypeDTO.fromValue(pattern.patternType().name()))
.principal(filter.principal())
.host(filter.host())
.operation(mapAclOperation(filter.operation()))
.permission(KafkaAclDTO.PermissionEnum.fromValue(filter.permissionType().name()));
}
}

View file

@ -4,5 +4,7 @@ public enum ClusterFeature {
KAFKA_CONNECT,
KSQL_DB,
SCHEMA_REGISTRY,
TOPIC_DELETION
TOPIC_DELETION,
KAFKA_ACL_VIEW,
KAFKA_ACL_EDIT
}

View file

@ -1,5 +1,6 @@
package com.provectus.kafka.ui.model.rbac;
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
@ -37,6 +38,8 @@ public class AccessContext {
Collection<KsqlAction> ksqlActions;
Collection<AclAction> aclActions;
public static AccessContextBuilder builder() {
return new AccessContextBuilder();
}
@ -55,6 +58,7 @@ public class AccessContext {
private String schema;
private Collection<SchemaAction> schemaActions = Collections.emptySet();
private Collection<KsqlAction> ksqlActions = Collections.emptySet();
private Collection<AclAction> aclActions = Collections.emptySet();
private AccessContextBuilder() {
}
@ -131,6 +135,12 @@ public class AccessContext {
return this;
}
public AccessContextBuilder aclActions(AclAction... actions) {
Assert.isTrue(actions.length > 0, "actions not present");
this.aclActions = List.of(actions);
return this;
}
public AccessContext build() {
return new AccessContext(
applicationConfigActions,
@ -140,7 +150,7 @@ public class AccessContext {
connect, connectActions,
connector,
schema, schemaActions,
ksqlActions);
ksqlActions, aclActions);
}
}
}

View file

@ -4,6 +4,7 @@ import static com.provectus.kafka.ui.model.rbac.Resource.APPLICATIONCONFIG;
import static com.provectus.kafka.ui.model.rbac.Resource.CLUSTERCONFIG;
import static com.provectus.kafka.ui.model.rbac.Resource.KSQL;
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
@ -76,6 +77,7 @@ public class Permission {
case SCHEMA -> Arrays.stream(SchemaAction.values()).map(Enum::toString).toList();
case CONNECT -> Arrays.stream(ConnectAction.values()).map(Enum::toString).toList();
case KSQL -> Arrays.stream(KsqlAction.values()).map(Enum::toString).toList();
case ACL -> Arrays.stream(AclAction.values()).map(Enum::toString).toList();
};
}

View file

@ -11,7 +11,8 @@ public enum Resource {
CONSUMER,
SCHEMA,
CONNECT,
KSQL;
KSQL,
ACL;
@Nullable
public static Resource fromString(String name) {

View file

@ -0,0 +1,15 @@
package com.provectus.kafka.ui.model.rbac.permission;
import org.apache.commons.lang3.EnumUtils;
import org.jetbrains.annotations.Nullable;
public enum AclAction implements PermissibleAction {
VIEW,
EDIT;
@Nullable
public static AclAction fromString(String name) {
return EnumUtils.getEnum(AclAction.class, name);
}
}

View file

@ -123,11 +123,11 @@ public class ConsumerRecordDeserializer {
}
private static Long getKeySize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.key() != null ? (long) consumerRecord.key().get().length : null;
return consumerRecord.key() != null ? (long) consumerRecord.serializedKeySize() : null;
}
private static Long getValueSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.value() != null ? (long) consumerRecord.value().get().length : null;
return consumerRecord.value() != null ? (long) consumerRecord.serializedValueSize() : null;
}
private static int headerSize(Header header) {

View file

@ -122,8 +122,6 @@ public class SerdesInitializer {
registeredSerdes,
Optional.ofNullable(clusterProperties.getDefaultKeySerde())
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default key serde not found"))
.or(() -> Optional.ofNullable(registeredSerdes.get(SchemaRegistrySerde.name())))
.or(() -> Optional.ofNullable(registeredSerdes.get(ProtobufFileSerde.name())))
.orElse(null),
Optional.ofNullable(clusterProperties.getDefaultValueSerde())
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default value serde not found"))

View file

@ -2,16 +2,19 @@ package com.provectus.kafka.ui.service;
import com.provectus.kafka.ui.model.ClusterFeature;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.acl.AclOperation;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@ -26,7 +29,7 @@ public class FeatureService {
private final AdminClientService adminClientService;
public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
ReactiveAdminClient.ClusterDescription clusterDescription) {
ClusterDescription clusterDescription) {
List<Mono<ClusterFeature>> features = new ArrayList<>();
if (Optional.ofNullable(cluster.getConnectsClients())
@ -44,6 +47,8 @@ public class FeatureService {
}
features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
features.add(aclView(cluster));
features.add(aclEdit(clusterDescription));
return Flux.fromIterable(features).flatMap(m -> m).collectList();
}
@ -65,4 +70,20 @@ public class FeatureService {
? Mono.just(ClusterFeature.TOPIC_DELETION)
: Mono.empty());
}
private Mono<ClusterFeature> aclEdit(ClusterDescription clusterDescription) {
var authorizedOps = Optional.ofNullable(clusterDescription.getAuthorizedOperations()).orElse(Set.of());
boolean canEdit = authorizedOps.contains(AclOperation.ALL) || authorizedOps.contains(AclOperation.ALTER);
return canEdit
? Mono.just(ClusterFeature.KAFKA_ACL_EDIT)
: Mono.empty();
}
private Mono<ClusterFeature> aclView(KafkaCluster cluster) {
return adminClientService.get(cluster).flatMap(
ac -> ac.getClusterFeatures().contains(ReactiveAdminClient.SupportedFeature.AUTHORIZED_SECURITY_ENABLED)
? Mono.just(ClusterFeature.KAFKA_ACL_VIEW)
: Mono.empty()
);
}
}

View file

@ -109,6 +109,7 @@ public class KafkaConnectService {
private Stream<String> getStringsForSearch(FullConnectorInfoDTO fullConnectorInfo) {
return Stream.of(
fullConnectorInfo.getName(),
fullConnectorInfo.getConnect(),
fullConnectorInfo.getStatus().getState().getValue(),
fullConnectorInfo.getType().getValue());
}

View file

@ -5,6 +5,7 @@ import static java.util.stream.Collectors.toMap;
import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.Iterables;
import com.google.common.collect.Table;
@ -15,7 +16,6 @@ import com.provectus.kafka.ui.util.KafkaVersion;
import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
@ -61,16 +61,22 @@ import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.TopicPartitionInfo;
import org.apache.kafka.common.TopicPartitionReplica;
import org.apache.kafka.common.acl.AccessControlEntryFilter;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclBindingFilter;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.config.ConfigResource;
import org.apache.kafka.common.errors.ClusterAuthorizationException;
import org.apache.kafka.common.errors.GroupIdNotFoundException;
import org.apache.kafka.common.errors.GroupNotEmptyException;
import org.apache.kafka.common.errors.InvalidRequestException;
import org.apache.kafka.common.errors.SecurityDisabledException;
import org.apache.kafka.common.errors.TopicAuthorizationException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
@ -82,26 +88,29 @@ import reactor.util.function.Tuples;
@RequiredArgsConstructor
public class ReactiveAdminClient implements Closeable {
private enum SupportedFeature {
public enum SupportedFeature {
INCREMENTAL_ALTER_CONFIGS(2.3f),
CONFIG_DOCUMENTATION_RETRIEVAL(2.6f),
DESCRIBE_CLUSTER_INCLUDE_AUTHORIZED_OPERATIONS(2.3f);
DESCRIBE_CLUSTER_INCLUDE_AUTHORIZED_OPERATIONS(2.3f),
AUTHORIZED_SECURITY_ENABLED(ReactiveAdminClient::isAuthorizedSecurityEnabled);
private final float sinceVersion;
private final BiFunction<AdminClient, Float, Mono<Boolean>> predicate;
SupportedFeature(float sinceVersion) {
this.sinceVersion = sinceVersion;
SupportedFeature(BiFunction<AdminClient, Float, Mono<Boolean>> predicate) {
this.predicate = predicate;
}
static Set<SupportedFeature> forVersion(float kafkaVersion) {
return Arrays.stream(SupportedFeature.values())
.filter(f -> kafkaVersion >= f.sinceVersion)
SupportedFeature(float fromVersion) {
this.predicate = (admin, ver) -> Mono.just(ver != null && ver >= fromVersion);
}
static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, @Nullable Float kafkaVersion) {
return Flux.fromArray(SupportedFeature.values())
.flatMap(f -> f.predicate.apply(ac, kafkaVersion).map(enabled -> Tuples.of(f, enabled)))
.filter(Tuple2::getT2)
.map(Tuple2::getT1)
.collect(Collectors.toSet());
}
static Set<SupportedFeature> defaultFeatures() {
return Set.of();
}
}
@Value
@ -110,25 +119,31 @@ public class ReactiveAdminClient implements Closeable {
Node controller;
String clusterId;
Collection<Node> nodes;
@Nullable // null, if ACL is disabled
Set<AclOperation> authorizedOperations;
}
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
return getClusterVersion(adminClient)
.map(ver ->
new ReactiveAdminClient(
adminClient,
ver,
getSupportedUpdateFeaturesForVersion(ver)));
.flatMap(ver ->
getSupportedUpdateFeaturesForVersion(adminClient, ver)
.map(features ->
new ReactiveAdminClient(adminClient, ver, features)));
}
private static Set<SupportedFeature> getSupportedUpdateFeaturesForVersion(String versionStr) {
try {
float version = KafkaVersion.parse(versionStr);
return SupportedFeature.forVersion(version);
} catch (NumberFormatException e) {
return SupportedFeature.defaultFeatures();
}
private static Mono<Set<SupportedFeature>> getSupportedUpdateFeaturesForVersion(AdminClient ac, String versionStr) {
@Nullable Float kafkaVersion = KafkaVersion.parse(versionStr).orElse(null);
return SupportedFeature.forVersion(ac, kafkaVersion);
}
private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullable Float kafkaVersion) {
return toMono(ac.describeAcls(AclBindingFilter.ANY).values())
.thenReturn(true)
.doOnError(th -> !(th instanceof SecurityDisabledException)
&& !(th instanceof InvalidRequestException)
&& !(th instanceof UnsupportedVersionException),
th -> log.warn("Error checking if security enabled", th))
.onErrorReturn(false);
}
// NOTE: if KafkaFuture returns null, that Mono will be empty(!), since Reactor does not support nullable results
@ -162,6 +177,10 @@ public class ReactiveAdminClient implements Closeable {
private final String version;
private final Set<SupportedFeature> features;
public Set<SupportedFeature> getClusterFeatures() {
return features;
}
public Mono<Set<String>> listTopics(boolean listInternal) {
return toMono(client.listTopics(new ListTopicsOptions().listInternal(listInternal)).names());
}
@ -576,6 +595,22 @@ public class ReactiveAdminClient implements Closeable {
);
}
public Mono<Collection<AclBinding>> listAcls(ResourcePatternFilter filter) {
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
return toMono(client.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values());
}
public Mono<Void> createAcls(Collection<AclBinding> aclBindings) {
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
return toMono(client.createAcls(aclBindings).all());
}
public Mono<Void> deleteAcls(Collection<AclBinding> aclBindings) {
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
var filters = aclBindings.stream().map(AclBinding::toFilter).collect(Collectors.toSet());
return toMono(client.deleteAcls(filters).all()).then();
}
public Mono<Void> updateBrokerConfigByName(Integer brokerId, String name, String value) {
ConfigResource cr = new ConfigResource(ConfigResource.Type.BROKER, String.valueOf(brokerId));
AlterConfigOp op = new AlterConfigOp(new ConfigEntry(name, value), AlterConfigOp.OpType.SET);

View file

@ -0,0 +1,81 @@
package com.provectus.kafka.ui.service.acl;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.acl.AclPermissionType;
import org.apache.kafka.common.resource.PatternType;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourceType;
public class AclCsv {
private static final String LINE_SEPARATOR = System.lineSeparator();
private static final String VALUES_SEPARATOR = ",";
private static final String HEADER = "Principal,ResourceType,PatternType,ResourceName,Operation,PermissionType,Host";
public static String transformToCsvString(Collection<AclBinding> acls) {
return Stream.concat(Stream.of(HEADER), acls.stream().map(AclCsv::createAclString))
.collect(Collectors.joining(System.lineSeparator()));
}
public static String createAclString(AclBinding binding) {
var pattern = binding.pattern();
var filter = binding.toFilter().entryFilter();
return String.format(
"%s,%s,%s,%s,%s,%s,%s",
filter.principal(),
pattern.resourceType(),
pattern.patternType(),
pattern.name(),
filter.operation(),
filter.permissionType(),
filter.host()
);
}
private static AclBinding parseCsvLine(String csv, int line) {
String[] values = csv.split(VALUES_SEPARATOR);
if (values.length != 7) {
throw new ValidationException("Input csv is not valid - there should be 7 columns in line " + line);
}
for (int i = 0; i < values.length; i++) {
if ((values[i] = values[i].trim()).isBlank()) {
throw new ValidationException("Input csv is not valid - blank value in colum " + i + ", line " + line);
}
}
try {
return new AclBinding(
new ResourcePattern(
ResourceType.valueOf(values[1]), values[3], PatternType.valueOf(values[2])),
new AccessControlEntry(
values[0], values[6], AclOperation.valueOf(values[4]), AclPermissionType.valueOf(values[5]))
);
} catch (IllegalArgumentException enumParseError) {
throw new ValidationException("Error parsing enum value in line " + line);
}
}
public static Collection<AclBinding> parseCsv(String csvString) {
String[] lines = csvString.split(LINE_SEPARATOR);
if (lines.length == 0) {
throw new ValidationException("Error parsing ACL csv file: no lines in file");
}
boolean firstLineIsHeader = HEADER.equalsIgnoreCase(lines[0].trim().replace(" ", ""));
Set<AclBinding> result = new HashSet<>();
for (int i = firstLineIsHeader ? 1 : 0; i < lines.length; i++) {
String line = lines[i];
if (!line.isBlank()) {
AclBinding aclBinding = parseCsvLine(line, i);
result.add(aclBinding);
}
}
return result;
}
}

View file

@ -0,0 +1,93 @@
package com.provectus.kafka.ui.service.acl;
import com.google.common.collect.Sets;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.AdminClientService;
import java.util.List;
import java.util.Set;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@Slf4j
@Service
@RequiredArgsConstructor
public class AclsService {
private final AdminClientService adminClientService;
public Mono<Void> createAcl(KafkaCluster cluster, AclBinding aclBinding) {
var aclString = AclCsv.createAclString(aclBinding);
log.info("CREATING ACL: [{}]", aclString);
return adminClientService.get(cluster)
.flatMap(ac -> ac.createAcls(List.of(aclBinding)))
.doOnSuccess(v -> log.info("ACL CREATED: [{}]", aclString));
}
public Mono<Void> deleteAcl(KafkaCluster cluster, AclBinding aclBinding) {
var aclString = AclCsv.createAclString(aclBinding);
log.info("DELETING ACL: [{}]", aclString);
return adminClientService.get(cluster)
.flatMap(ac -> ac.deleteAcls(List.of(aclBinding)))
.doOnSuccess(v -> log.info("ACL DELETED: [{}]", aclString));
}
public Flux<AclBinding> listAcls(KafkaCluster cluster, ResourcePatternFilter filter) {
return adminClientService.get(cluster)
.flatMap(c -> c.listAcls(filter))
.flatMapIterable(acls -> acls);
}
public Mono<String> getAclAsCsvString(KafkaCluster cluster) {
return adminClientService.get(cluster)
.flatMap(c -> c.listAcls(ResourcePatternFilter.ANY))
.map(AclCsv::transformToCsvString);
}
public Mono<Void> syncAclWithAclCsv(KafkaCluster cluster, String csv) {
return adminClientService.get(cluster)
.flatMap(ac -> ac.listAcls(ResourcePatternFilter.ANY).flatMap(existingAclList -> {
var existingSet = Set.copyOf(existingAclList);
var newAcls = Set.copyOf(AclCsv.parseCsv(csv));
var toDelete = Sets.difference(existingSet, newAcls);
var toAdd = Sets.difference(newAcls, existingSet);
logAclSyncPlan(cluster, toAdd, toDelete);
if (toAdd.isEmpty() && toDelete.isEmpty()) {
return Mono.empty();
}
log.info("Starting new ACLs creation");
return ac.createAcls(toAdd)
.doOnSuccess(v -> {
log.info("{} new ACLs created", toAdd.size());
log.info("Starting ACLs deletion");
})
.then(ac.deleteAcls(toDelete)
.doOnSuccess(v -> log.info("{} ACLs deleted", toDelete.size())));
}));
}
private void logAclSyncPlan(KafkaCluster cluster, Set<AclBinding> toBeAdded, Set<AclBinding> toBeDeleted) {
log.info("'{}' cluster ACL sync plan: ", cluster.getName());
if (toBeAdded.isEmpty() && toBeDeleted.isEmpty()) {
log.info("Nothing to do, ACL is already in sync");
return;
}
if (!toBeAdded.isEmpty()) {
log.info("ACLs to be added ({}): ", toBeAdded.size());
for (AclBinding aclBinding : toBeAdded) {
log.info(" " + AclCsv.createAclString(aclBinding));
}
}
if (!toBeDeleted.isEmpty()) {
log.info("ACLs to be deleted ({}): ", toBeDeleted.size());
for (AclBinding aclBinding : toBeDeleted) {
log.info(" " + AclCsv.createAclString(aclBinding));
}
}
}
}

View file

@ -43,8 +43,7 @@ class TopicAnalysisStats {
Long max;
final UpdateDoublesSketch sizeSketch = DoublesSketch.builder().build();
void apply(byte[] bytes) {
int len = bytes.length;
void apply(int len) {
sum += len;
min = minNullable(min, len);
max = maxNullable(max, len);
@ -98,7 +97,7 @@ class TopicAnalysisStats {
if (rec.key() != null) {
byte[] keyBytes = rec.key().get();
keysSize.apply(keyBytes);
keysSize.apply(rec.serializedKeySize());
uniqKeys.update(keyBytes);
} else {
nullKeys++;
@ -106,7 +105,7 @@ class TopicAnalysisStats {
if (rec.value() != null) {
byte[] valueBytes = rec.value().get();
valuesSize.apply(valueBytes);
valuesSize.apply(rec.serializedValueSize());
uniqValues.update(valueBytes);
} else {
nullValues++;

View file

@ -44,7 +44,7 @@ public class DataMasking {
public static DataMasking create(@Nullable List<ClustersProperties.Masking> config) {
return new DataMasking(
Optional.ofNullable(config).orElse(List.of()).stream().map(property -> {
Preconditions.checkNotNull(property.getType(), "masking type not specifed");
Preconditions.checkNotNull(property.getType(), "masking type not specified");
Preconditions.checkArgument(
StringUtils.isNotEmpty(property.getTopicKeysPattern())
|| StringUtils.isNotEmpty(property.getTopicValuesPattern()),

View file

@ -0,0 +1,28 @@
package com.provectus.kafka.ui.service.masking.policies;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.regex.Pattern;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
interface FieldsSelector {
static FieldsSelector create(ClustersProperties.Masking property) {
if (StringUtils.hasText(property.getFieldsNamePattern()) && !CollectionUtils.isEmpty(property.getFields())) {
throw new ValidationException("You can't provide both fieldNames & fieldsNamePattern for masking");
}
if (StringUtils.hasText(property.getFieldsNamePattern())) {
Pattern pattern = Pattern.compile(property.getFieldsNamePattern());
return f -> pattern.matcher(f).matches();
}
if (!CollectionUtils.isEmpty(property.getFields())) {
return f -> property.getFields().contains(f);
}
//no pattern, no field names - mean all fields should be masked
return fieldName -> true;
}
boolean shouldBeMasked(String fieldName);
}

View file

@ -15,8 +15,8 @@ class Mask extends MaskingPolicy {
private final UnaryOperator<String> masker;
Mask(List<String> fieldNames, List<String> maskingChars) {
super(fieldNames);
Mask(FieldsSelector fieldsSelector, List<String> maskingChars) {
super(fieldsSelector);
this.masker = createMasker(maskingChars);
}
@ -38,22 +38,13 @@ class Mask extends MaskingPolicy {
for (int i = 0; i < input.length(); i++) {
int cp = input.codePointAt(i);
switch (Character.getType(cp)) {
case Character.SPACE_SEPARATOR:
case Character.LINE_SEPARATOR:
case Character.PARAGRAPH_SEPARATOR:
sb.appendCodePoint(cp); // keeping separators as-is
break;
case Character.UPPERCASE_LETTER:
sb.append(maskingChars.get(0));
break;
case Character.LOWERCASE_LETTER:
sb.append(maskingChars.get(1));
break;
case Character.DECIMAL_DIGIT_NUMBER:
sb.append(maskingChars.get(2));
break;
default:
sb.append(maskingChars.get(3));
case Character.SPACE_SEPARATOR,
Character.LINE_SEPARATOR,
Character.PARAGRAPH_SEPARATOR -> sb.appendCodePoint(cp); // keeping separators as-is
case Character.UPPERCASE_LETTER -> sb.append(maskingChars.get(0));
case Character.LOWERCASE_LETTER -> sb.append(maskingChars.get(1));
case Character.DECIMAL_DIGIT_NUMBER -> sb.append(maskingChars.get(2));
default -> sb.append(maskingChars.get(3));
}
}
return sb.toString();

View file

@ -2,46 +2,36 @@ package com.provectus.kafka.ui.service.masking.policies;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.provectus.kafka.ui.config.ClustersProperties;
import java.util.List;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public abstract class MaskingPolicy {
public static MaskingPolicy create(ClustersProperties.Masking property) {
List<String> fields = property.getFields() == null
? List.of() // empty list means that policy will be applied to all fields
: property.getFields();
switch (property.getType()) {
case REMOVE:
return new Remove(fields);
case REPLACE:
return new Replace(
fields,
property.getReplacement() == null
? Replace.DEFAULT_REPLACEMENT
: property.getReplacement()
);
case MASK:
return new Mask(
fields,
property.getPattern() == null
? Mask.DEFAULT_PATTERN
: property.getPattern()
);
default:
throw new IllegalStateException("Unknown policy type: " + property.getType());
}
FieldsSelector fieldsSelector = FieldsSelector.create(property);
return switch (property.getType()) {
case REMOVE -> new Remove(fieldsSelector);
case REPLACE -> new Replace(
fieldsSelector,
property.getReplacement() == null
? Replace.DEFAULT_REPLACEMENT
: property.getReplacement()
);
case MASK -> new Mask(
fieldsSelector,
property.getMaskingCharsReplacement() == null
? Mask.DEFAULT_PATTERN
: property.getMaskingCharsReplacement()
);
};
}
//----------------------------------------------------------------
// empty list means policy will be applied to all fields
private final List<String> fieldNames;
private final FieldsSelector fieldsSelector;
protected boolean fieldShouldBeMasked(String fieldName) {
return fieldNames.isEmpty() || fieldNames.contains(fieldName);
return fieldsSelector.shouldBeMasked(fieldName);
}
public abstract ContainerNode<?> applyToJsonContainer(ContainerNode<?> node);

View file

@ -4,12 +4,12 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.util.List;
class Remove extends MaskingPolicy {
Remove(List<String> fieldNames) {
super(fieldNames);
Remove(FieldsSelector fieldsSelector) {
super(fieldsSelector);
}
@Override

View file

@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.google.common.base.Preconditions;
import java.util.List;
class Replace extends MaskingPolicy {
@ -14,8 +13,8 @@ class Replace extends MaskingPolicy {
private final String replacement;
Replace(List<String> fieldNames, String replacementString) {
super(fieldNames);
Replace(FieldsSelector fieldsSelector, String replacementString) {
super(fieldsSelector);
this.replacement = Preconditions.checkNotNull(replacementString);
}

View file

@ -61,7 +61,9 @@ class JmxSslSocketFactory extends javax.net.ssl.SSLSocketFactory {
} catch (Exception e) {
log.error("----------------------------------");
log.error("SSL can't be enabled for JMX retrieval. "
+ "Make sure your java app run with '--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED' arg.", e);
+ "Make sure your java app run with '--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED' arg. Err: {}",
e.getMessage());
log.trace("SSL can't be enabled for JMX retrieval", e);
log.error("----------------------------------");
}
SSL_JMX_SUPPORTED = sslJmxSupported;

View file

@ -12,6 +12,7 @@ import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.model.rbac.Permission;
import com.provectus.kafka.ui.model.rbac.Resource;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.Subject;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
import com.provectus.kafka.ui.model.rbac.permission.SchemaAction;
@ -19,11 +20,11 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
@ -34,6 +35,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.core.env.Environment;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
import org.springframework.security.core.context.SecurityContext;
@ -50,10 +52,11 @@ public class AccessControlService {
@Nullable
private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
private final RoleBasedAccessControlProperties properties;
private final Environment environment;
private boolean rbacEnabled = false;
private Set<ProviderAuthorityExtractor> extractors = Collections.emptySet();
private final RoleBasedAccessControlProperties properties;
private Set<ProviderAuthorityExtractor> oauthExtractors = Collections.emptySet();
@PostConstruct
public void init() {
@ -63,21 +66,26 @@ public class AccessControlService {
}
rbacEnabled = true;
this.extractors = properties.getRoles()
this.oauthExtractors = properties.getRoles()
.stream()
.map(role -> role.getSubjects()
.stream()
.map(provider -> switch (provider.getProvider()) {
.map(Subject::getProvider)
.distinct()
.map(provider -> switch (provider) {
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
case LDAP, LDAP_AD -> new LdapAuthorityExtractor();
}).collect(Collectors.toSet()))
default -> null;
})
.filter(Objects::nonNull)
.collect(Collectors.toSet()))
.flatMap(Set::stream)
.collect(Collectors.toSet());
if ((clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())
&& !properties.getRoles().isEmpty()) {
if (!properties.getRoles().isEmpty()
&& "oauth2".equalsIgnoreCase(environment.getProperty("auth.type"))
&& (clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())) {
log.error("Roles are configured but no authentication methods are present. Authentication might fail.");
}
}
@ -354,8 +362,8 @@ public class AccessControlService {
return isAccessible(Resource.KSQL, null, user, context, requiredActions);
}
public Set<ProviderAuthorityExtractor> getExtractors() {
return extractors;
public Set<ProviderAuthorityExtractor> getOauthExtractors() {
return oauthExtractors;
}
public List<Role> getRoles() {

View file

@ -1,23 +0,0 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import reactor.core.publisher.Mono;
@Slf4j
public class LdapAuthorityExtractor implements ProviderAuthorityExtractor {
@Override
public boolean isApplicable(String provider) {
return false; // TODO #2752
}
@Override
public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
return Mono.just(Collections.emptySet()); // TODO #2752
}
}

View file

@ -0,0 +1,70 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.provectus.kafka.ui.config.auth.LdapProperties;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
import org.springframework.util.Assert;
@Slf4j
public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
private final AccessControlService acs;
private final LdapProperties props;
private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
String role = record.get(getGroupRoleAttribute()).get(0);
return new SimpleGrantedAuthority(role);
};
public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
super(context.getBean(BaseLdapPathContextSource.class), null);
this.acs = context.getBean(AccessControlService.class);
this.props = context.getBean(LdapProperties.class);
}
@Override
public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
return acs.getRoles()
.stream()
.map(Role::getSubjects)
.flatMap(List::stream)
.filter(s -> s.getProvider().equals(Provider.LDAP))
.filter(s -> s.getType().equals("group"))
.flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
.collect(Collectors.toSet());
}
private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
Assert.notNull(groupSearchBase, "groupSearchBase is empty");
log.trace(
"Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
var ldapTemplate = getLdapTemplate();
ldapTemplate.setIgnoreNameNotFoundException(true);
Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
new String[] {props.getGroupRoleAttribute()});
return userRoles.stream()
.map(authorityMapper)
.peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
.collect(Collectors.toSet());
}
}

View file

@ -90,6 +90,7 @@ public class DynamicConfigOperations {
}
public PropertiesStructure getCurrentProperties() {
checkIfDynamicConfigEnabled();
return PropertiesStructure.builder()
.kafka(getNullableBean(ClustersProperties.class))
.rbac(getNullableBean(RoleBasedAccessControlProperties.class))
@ -112,11 +113,7 @@ public class DynamicConfigOperations {
}
public void persist(PropertiesStructure properties) {
if (!dynamicConfigEnabled()) {
throw new ValidationException(
"Dynamic config change is not allowed. "
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
}
checkIfDynamicConfigEnabled();
properties.initAndValidate();
String yaml = serializeToYaml(properties);
@ -124,8 +121,9 @@ public class DynamicConfigOperations {
}
public Mono<Path> uploadConfigRelatedFile(FilePart file) {
String targetDirStr = (String) ctx.getEnvironment().getSystemEnvironment()
.getOrDefault(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
checkIfDynamicConfigEnabled();
String targetDirStr = ctx.getEnvironment()
.getProperty(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
Path targetDir = Path.of(targetDirStr);
if (!Files.exists(targetDir)) {
@ -149,6 +147,14 @@ public class DynamicConfigOperations {
.onErrorMap(th -> new FileUploadException(targetFilePath, th));
}
private void checkIfDynamicConfigEnabled() {
if (!dynamicConfigEnabled()) {
throw new ValidationException(
"Dynamic config change is not allowed. "
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
}
}
@SneakyThrows
private void writeYamlToFile(String yaml, Path path) {
if (Files.isDirectory(path)) {

View file

@ -1,24 +1,21 @@
package com.provectus.kafka.ui.util;
import lombok.extern.slf4j.Slf4j;
import java.util.Optional;
@Slf4j
public final class KafkaVersion {
private KafkaVersion() {
}
public static float parse(String version) throws NumberFormatException {
log.trace("Parsing cluster version [{}]", version);
public static Optional<Float> parse(String version) throws NumberFormatException {
try {
final String[] parts = version.split("\\.");
if (parts.length > 2) {
version = parts[0] + "." + parts[1];
}
return Float.parseFloat(version.split("-")[0]);
return Optional.of(Float.parseFloat(version.split("-")[0]));
} catch (Exception e) {
log.error("Conversion clusterVersion [{}] to float value failed", version, e);
throw e;
return Optional.empty();
}
}
}

View file

@ -2,6 +2,7 @@ package com.provectus.kafka.ui;
import com.provectus.kafka.ui.container.KafkaConnectContainer;
import com.provectus.kafka.ui.container.SchemaRegistryContainer;
import java.nio.file.Path;
import java.util.List;
import java.util.Properties;
import org.apache.kafka.clients.admin.AdminClient;
@ -9,6 +10,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.function.ThrowingConsumer;
import org.junit.jupiter.api.io.TempDir;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
import org.springframework.boot.test.context.SpringBootTest;
@ -47,6 +49,9 @@ public abstract class AbstractIntegrationTest {
.dependsOn(kafka)
.dependsOn(schemaRegistry);
@TempDir
public static Path tmpDir;
static {
kafka.start();
schemaRegistry.start();
@ -76,6 +81,9 @@ public abstract class AbstractIntegrationTest {
System.setProperty("kafka.clusters.1.schemaRegistry", schemaRegistry.getUrl());
System.setProperty("kafka.clusters.1.kafkaConnect.0.name", "kafka-connect");
System.setProperty("kafka.clusters.1.kafkaConnect.0.address", kafkaConnect.getTarget());
System.setProperty("dynamic.config.enabled", "true");
System.setProperty("config.related.uploads.dir", tmpDir.toString());
}
}

View file

@ -0,0 +1,49 @@
package com.provectus.kafka.ui.controller;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
import java.io.IOException;
import java.nio.file.Path;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import org.springframework.http.HttpEntity;
import org.springframework.http.client.MultipartBodyBuilder;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.util.MultiValueMap;
class ApplicationConfigControllerTest extends AbstractIntegrationTest {
@Autowired
private WebTestClient webTestClient;
@Test
public void testUpload() throws IOException {
var fileToUpload = new ClassPathResource("/fileForUploadTest.txt", this.getClass());
UploadedFileInfoDTO result = webTestClient
.post()
.uri("/api/config/relatedfiles")
.bodyValue(generateBody(fileToUpload))
.exchange()
.expectStatus()
.isOk()
.expectBody(UploadedFileInfoDTO.class)
.returnResult()
.getResponseBody();
assertThat(result).isNotNull();
assertThat(result.getLocation()).isNotNull();
assertThat(Path.of(result.getLocation()))
.hasSameBinaryContentAs(fileToUpload.getFile().toPath());
}
private MultiValueMap<String, HttpEntity<?>> generateBody(ClassPathResource resource) {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("file", resource);
return builder.build();
}
}

View file

@ -0,0 +1,70 @@
package com.provectus.kafka.ui.service.acl;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.Collection;
import java.util.List;
import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.acl.AclPermissionType;
import org.apache.kafka.common.resource.PatternType;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourceType;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
class AclCsvTest {
private static final List<AclBinding> TEST_BINDINGS = List.of(
new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
new AccessControlEntry("User:test1", "*", AclOperation.READ, AclPermissionType.ALLOW)),
new AclBinding(
new ResourcePattern(ResourceType.GROUP, "group1", PatternType.PREFIXED),
new AccessControlEntry("User:test2", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY))
);
@ParameterizedTest
@ValueSource(strings = {
"Principal,ResourceType, PatternType, ResourceName,Operation,PermissionType,Host\n"
+ "User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
+ "User:test2,GROUP,PREFIXED,group1,DESCRIBE,DENY,localhost",
//without header
"User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
+ "\n"
+ "User:test2,GROUP,PREFIXED,group1,DESCRIBE,DENY,localhost"
+ "\n"
})
void parsesValidInputCsv(String csvString) {
Collection<AclBinding> parsed = AclCsv.parseCsv(csvString);
assertThat(parsed).containsExactlyInAnyOrderElementsOf(TEST_BINDINGS);
}
@ParameterizedTest
@ValueSource(strings = {
// columns > 7
"User:test1,TOPIC,LITERAL,*,READ,ALLOW,*,1,2,3,4",
// columns < 7
"User:test1,TOPIC,LITERAL,*",
// enum values are illegal
"User:test1,ILLEGAL,LITERAL,*,READ,ALLOW,*",
"User:test1,TOPIC,LITERAL,*,READ,ILLEGAL,*"
})
void throwsExceptionForInvalidInputCsv(String csvString) {
assertThatThrownBy(() -> AclCsv.parseCsv(csvString))
.isInstanceOf(ValidationException.class);
}
@Test
void transformAndParseUseSameFormat() {
String csv = AclCsv.transformToCsvString(TEST_BINDINGS);
Collection<AclBinding> parsedBindings = AclCsv.parseCsv(csv);
assertThat(parsedBindings).containsExactlyInAnyOrderElementsOf(TEST_BINDINGS);
}
}

View file

@ -0,0 +1,82 @@
package com.provectus.kafka.ui.service.acl;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.AdminClientService;
import com.provectus.kafka.ui.service.ReactiveAdminClient;
import java.util.Collection;
import java.util.List;
import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.acl.AclPermissionType;
import org.apache.kafka.common.resource.PatternType;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import org.apache.kafka.common.resource.ResourceType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import reactor.core.publisher.Mono;
class AclsServiceTest {
private static final KafkaCluster CLUSTER = KafkaCluster.builder().build();
private final ReactiveAdminClient adminClientMock = mock(ReactiveAdminClient.class);
private final AdminClientService adminClientService = mock(AdminClientService.class);
private final AclsService aclsService = new AclsService(adminClientService);
@BeforeEach
void initMocks() {
when(adminClientService.get(CLUSTER)).thenReturn(Mono.just(adminClientMock));
}
@Test
void testSyncAclWithAclCsv() {
var existingBinding1 = new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
new AccessControlEntry("User:test1", "*", AclOperation.READ, AclPermissionType.ALLOW));
var existingBinding2 = new AclBinding(
new ResourcePattern(ResourceType.GROUP, "group1", PatternType.PREFIXED),
new AccessControlEntry("User:test2", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY));
var newBindingToBeAdded = new AclBinding(
new ResourcePattern(ResourceType.GROUP, "groupNew", PatternType.PREFIXED),
new AccessControlEntry("User:test3", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY));
when(adminClientMock.listAcls(ResourcePatternFilter.ANY))
.thenReturn(Mono.just(List.of(existingBinding1, existingBinding2)));
ArgumentCaptor<?> createdCaptor = ArgumentCaptor.forClass(Collection.class);
when(adminClientMock.createAcls((Collection<AclBinding>) createdCaptor.capture()))
.thenReturn(Mono.empty());
ArgumentCaptor<?> deletedCaptor = ArgumentCaptor.forClass(Collection.class);
when(adminClientMock.deleteAcls((Collection<AclBinding>) deletedCaptor.capture()))
.thenReturn(Mono.empty());
aclsService.syncAclWithAclCsv(
CLUSTER,
"Principal,ResourceType, PatternType, ResourceName,Operation,PermissionType,Host\n"
+ "User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
+ "User:test3,GROUP,PREFIXED,groupNew,DESCRIBE,DENY,localhost"
).block();
Collection<AclBinding> createdBindings = (Collection<AclBinding>) createdCaptor.getValue();
assertThat(createdBindings)
.hasSize(1)
.contains(newBindingToBeAdded);
Collection<AclBinding> deletedBindings = (Collection<AclBinding>) deletedCaptor.getValue();
assertThat(deletedBindings)
.hasSize(1)
.contains(existingBinding2);
}
}

View file

@ -0,0 +1,53 @@
package com.provectus.kafka.ui.service.masking.policies;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.List;
import org.junit.jupiter.api.Test;
class FieldsSelectorTest {
@Test
void selectsFieldsDueToProvidedPattern() {
var properties = new ClustersProperties.Masking();
properties.setFieldsNamePattern("f1|f2");
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("f1")).isTrue();
assertThat(selector.shouldBeMasked("f2")).isTrue();
assertThat(selector.shouldBeMasked("doesNotMatchPattern")).isFalse();
}
@Test
void selectsFieldsDueToProvidedFieldNames() {
var properties = new ClustersProperties.Masking();
properties.setFields(List.of("f1", "f2"));
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("f1")).isTrue();
assertThat(selector.shouldBeMasked("f2")).isTrue();
assertThat(selector.shouldBeMasked("notInAList")).isFalse();
}
@Test
void selectAllFieldsIfNoPatternAndNoNamesProvided() {
var properties = new ClustersProperties.Masking();
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("anyPropertyName")).isTrue();
}
@Test
void throwsExceptionIfBothFieldListAndPatternProvided() {
var properties = new ClustersProperties.Masking();
properties.setFieldsNamePattern("f1|f2");
properties.setFields(List.of("f3", "f4"));
assertThatThrownBy(() -> FieldsSelector.create(properties))
.isInstanceOf(ValidationException.class);
}
}

View file

@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
class MaskTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final List<String> PATTERN = List.of("X", "x", "n", "-");
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
Mask policy = new Mask(fields, PATTERN);
void testApplyToJsonContainer(FieldsSelector selector, ContainerNode<?> original, ContainerNode<?> expected) {
Mask policy = new Mask(selector, PATTERN);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"nnn\", \"name\": { \"first\": \"Xxxxx\", \"surname\": \"Xxxxnnn-\"}}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"nnn\", \"f2\": 234}, { \"name\": \"n-n\", \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Xxxxnnn-\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"Xxxxx\", \"name\": \"Xxxxnnn-\"}}")
)
@ -57,7 +57,7 @@ class MaskTest {
"null, xxxx"
})
void testApplyToString(String original, String expected) {
Mask policy = new Mask(List.of(), PATTERN);
Mask policy = new Mask(fieldName -> true, PATTERN);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}

View file

@ -15,39 +15,39 @@ import org.junit.jupiter.params.provider.MethodSource;
class RemoveTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Remove(fields);
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Remove(fieldsSelector);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"f2\": 234}, { \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("[{ \"f1\": 123}, { \"f2\": \"1.2\"} ]"),
parse("[{}, {}]")
)
@ -66,7 +66,7 @@ class RemoveTest {
"null, null"
})
void testApplyToString(String original, String expected) {
var policy = new Remove(List.of());
var policy = new Remove(fieldName -> true);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
}
}

View file

@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
class ReplaceTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final String REPLACEMENT_STRING = "***";
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Replace(fields, REPLACEMENT_STRING);
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Replace(fieldsSelector, REPLACEMENT_STRING);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"***\", \"name\": { \"first\": \"***\", \"surname\": \"***\"}}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"***\", \"f2\": 234}, { \"name\": \"***\", \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"***\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{ \"outer\": { \"f1\": \"***\", \"f2\": \"***\", \"inner\" : {\"if1\": \"***\"}}}}")
)
@ -62,7 +62,7 @@ class ReplaceTest {
"null, ***"
})
void testApplyToString(String original, String expected) {
var policy = new Replace(List.of(), REPLACEMENT_STRING);
var policy = new Replace(fieldName -> true, REPLACEMENT_STRING);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
}
}

View file

@ -0,0 +1 @@
some content goes here

View file

@ -101,9 +101,6 @@
<useSpringBoot3>true</useSpringBoot3>
<dateLibrary>java8</dateLibrary>
</configOptions>
<typeMappings>
<mapping>filepart=org.springframework.http.codec.multipart.FilePart</mapping>
</typeMappings>
</configuration>
</execution>
<execution>

View file

@ -1730,6 +1730,125 @@ paths:
404:
description: Not found
/api/clusters/{clusterName}/acls:
get:
tags:
- Acls
summary: listKafkaAcls
operationId: listAcls
parameters:
- name: clusterName
in: path
required: true
schema:
type: string
- name: resourceType
in: query
required: false
schema:
$ref: '#/components/schemas/KafkaAclResourceType'
- name: resourceName
in: query
required: false
schema:
type: string
- name: namePatternType
in: query
required: false
schema:
$ref: '#/components/schemas/KafkaAclNamePatternType'
responses:
200:
description: OK
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/KafkaAcl'
/api/clusters/{clusterName}/acl/csv:
get:
tags:
- Acls
summary: getAclAsCsv
operationId: getAclAsCsv
parameters:
- name: clusterName
in: path
required: true
schema:
type: string
responses:
200:
description: OK
content:
text/plain:
schema:
type: string
post:
tags:
- Acls
summary: syncAclsCsv
operationId: syncAclsCsv
parameters:
- name: clusterName
in: path
required: true
schema:
type: string
requestBody:
content:
text/plain:
schema:
type: string
responses:
200:
description: OK
/api/clusters/{clusterName}/acl:
post:
tags:
- Acls
summary: createAcl
operationId: createAcl
parameters:
- name: clusterName
in: path
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/KafkaAcl'
responses:
200:
description: OK
delete:
tags:
- Acls
summary: deleteAcl
operationId: deleteAcl
parameters:
- name: clusterName
in: path
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/KafkaAcl'
responses:
200:
description: OK
404:
description: Acl not found
/api/authorization:
get:
tags:
@ -1819,7 +1938,7 @@ paths:
properties:
file:
type: string
format: filepart
format: binary
responses:
200:
description: OK
@ -1972,6 +2091,8 @@ components:
- KAFKA_CONNECT
- KSQL_DB
- TOPIC_DELETION
- KAFKA_ACL_VIEW # get ACLs listing
- KAFKA_ACL_EDIT # create & delete ACLs
required:
- id
- name
@ -3342,6 +3463,62 @@ components:
- SCHEMA
- CONNECT
- KSQL
- ACL
KafkaAcl:
type: object
required: [resourceType, resourceName, namePatternType, principal, host, operation, permission]
properties:
resourceType:
$ref: '#/components/schemas/KafkaAclResourceType'
resourceName:
type: string # "*" if acl can be applied to any resource of given type
namePatternType:
$ref: '#/components/schemas/KafkaAclNamePatternType'
principal:
type: string
host:
type: string # "*" if acl can be applied to any resource of given type
operation:
type: string
enum:
- UNKNOWN # Unknown operation, need to update mapping code on BE
- ALL # Cluster, Topic, Group
- READ # Topic, Group
- WRITE # Topic, TransactionalId
- CREATE # Cluster, Topic
- DELETE # Topic, Group
- ALTER # Cluster, Topic,
- DESCRIBE # Cluster, Topic, Group, TransactionalId, DelegationToken
- CLUSTER_ACTION # Cluster
- DESCRIBE_CONFIGS # Cluster, Topic
- ALTER_CONFIGS # Cluster, Topic
- IDEMPOTENT_WRITE # Cluster
- CREATE_TOKENS
- DESCRIBE_TOKENS
permission:
type: string
enum:
- ALLOW
- DENY
KafkaAclResourceType:
type: string
enum:
- UNKNOWN # Unknown operation, need to update mapping code on BE
- TOPIC
- GROUP
- CLUSTER
- TRANSACTIONAL_ID
- DELEGATION_TOKEN
- USER
KafkaAclNamePatternType:
type: string
enum:
- MATCH
- LITERAL
- PREFIXED
RestartRequest:
type: object
@ -3632,7 +3809,9 @@ components:
type: array
items:
type: string
pattern:
fieldsNamePattern:
type: string
maskingCharsReplacement:
type: array
items:
type: string

View file

@ -29,72 +29,65 @@ public class SmokeBacklog extends BaseManualTest {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(278)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(331)
@Test
public void testCaseC() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(331)
@Test
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(332)
@Test
public void testCaseE() {
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(335)
@Test
public void testCaseF() {
public void testCaseE() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(336)
@Test
public void testCaseG() {
public void testCaseF() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(343)
@Test
public void testCaseH() {
public void testCaseG() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(344)
@Test
public void testCaseI() {
public void testCaseH() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = SCHEMAS_SUITE_ID)
@QaseId(345)
@Test
public void testCaseJ() {
public void testCaseI() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = SCHEMAS_SUITE_ID)
@QaseId(346)
@Test
public void testCaseK() {
public void testCaseJ() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(347)
@Test
public void testCaseL() {
public void testCaseK() {
}
}

View file

@ -1,6 +1,7 @@
package com.provectus.kafka.ui.smokesuite.ksqldb;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs.STREAMS;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_STREAMS;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@ -80,8 +81,18 @@ public class KsqlDbTest extends BaseTest {
softly.assertAll();
}
@QaseId(86)
@QaseId(278)
@Test(priority = 4)
public void checkShowStreamsRequestExecution() {
navigateToKsqlDbAndExecuteRequest(SHOW_STREAMS.getQuery());
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertTrue(ksqlQueryForm.getItemByName(DEFAULT_STREAM.getName()).isVisible(), "getItemByName()");
softly.assertAll();
}
@QaseId(86)
@Test(priority = 5)
public void clearResultsForExecutedRequest() {
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
SoftAssert softly = new SoftAssert();

View file

@ -1,4 +1,5 @@
import styled from 'styled-components';
import { Button } from 'components/common/Button/Button';
export const DiffWrapper = styled.div`
align-items: stretch;
@ -81,3 +82,6 @@ export const DiffTile = styled.div`
export const DiffVersionsSelect = styled.div`
width: 0.625em;
`;
export const BackButton = styled(Button)`
margin: 10px 9px;
`;

View file

@ -20,6 +20,7 @@ import useAppParams from 'lib/hooks/useAppParams';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as S from './Diff.styled';
import { BackButton } from './Diff.styled';
export interface DiffProps {
versions: SchemaSubject[];
@ -77,6 +78,13 @@ const Diff: React.FC<DiffProps> = ({ versions, areVersionsFetched }) => {
backText="Schema Registry"
backTo={clusterSchemasPath(clusterName)}
/>
<BackButton
buttonType="secondary"
buttonSize="S"
onClick={() => navigate(-1)}
>
Back
</BackButton>
<S.Section>
{areVersionsFetched ? (
<S.DiffBox>

View file

@ -3,6 +3,7 @@ import Diff, { DiffProps } from 'components/Schemas/Diff/Diff';
import { render, WithRoute } from 'lib/testHelpers';
import { screen } from '@testing-library/react';
import { clusterSchemaComparePath } from 'lib/paths';
import userEvent from '@testing-library/user-event';
import { versions } from './fixtures';
@ -142,4 +143,24 @@ describe('Diff', () => {
expect(select).toHaveTextContent(versions[0].version);
});
});
describe('Back button', () => {
beforeEach(() => {
setupComponent({
areVersionsFetched: true,
versions,
});
});
it('back button is appear', () => {
const backButton = screen.getAllByRole('button', { name: 'Back' });
expect(backButton[0]).toBeInTheDocument();
});
it('click on back button', () => {
const backButton = screen.getAllByRole('button', { name: 'Back' });
userEvent.click(backButton[0]);
expect(screen.queryByRole('Back')).not.toBeInTheDocument();
});
});
});

View file

@ -142,6 +142,8 @@ const Message: React.FC<Props> = ({
timestampType={timestampType}
keySize={keySize}
contentSize={valueSize}
keySerde={keySerde}
valueSerde={valueSerde}
/>
)}
</>

View file

@ -3,7 +3,6 @@ import EditorViewer from 'components/common/EditorViewer/EditorViewer';
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
import { SchemaType, TopicMessageTimestampTypeEnum } from 'generated-sources';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import { useSearchParams } from 'react-router-dom';
import * as S from './MessageContent.styled';
@ -17,6 +16,8 @@ export interface MessageContentProps {
timestampType?: TopicMessageTimestampTypeEnum;
keySize?: number;
contentSize?: number;
keySerde?: string;
valueSerde?: string;
}
const MessageContent: React.FC<MessageContentProps> = ({
@ -27,12 +28,10 @@ const MessageContent: React.FC<MessageContentProps> = ({
timestampType,
keySize,
contentSize,
keySerde,
valueSerde,
}) => {
const [activeTab, setActiveTab] = React.useState<Tab>('content');
const [searchParams] = useSearchParams();
const keyFormat = searchParams.get('keySerde') || '';
const valueFormat = searchParams.get('valueSerde') || '';
const activeTabContent = () => {
switch (activeTab) {
case 'content':
@ -110,7 +109,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Key Serde</S.MetadataLabel>
<span>
<S.MetadataValue>{keyFormat}</S.MetadataValue>
<S.MetadataValue>{keySerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={keySize} />
</S.MetadataMeta>
@ -120,7 +119,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Value Serde</S.MetadataLabel>
<span>
<S.MetadataValue>{valueFormat}</S.MetadataValue>
<S.MetadataValue>{valueSerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={contentSize} />
</S.MetadataMeta>

View file

@ -20,6 +20,8 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
headers={{ header: 'test' }}
timestamp={new Date(0)}
timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME}
keySerde="SchemaRegistry"
valueSerde="Avro"
{...props}
/>
</tbody>
@ -27,42 +29,20 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
);
};
const proto =
'syntax = "proto3";\npackage com.provectus;\n\nmessage TestProtoRecord {\n string f1 = 1;\n int32 f2 = 2;\n}\n';
global.TextEncoder = TextEncoder;
const searchParamsContentAVRO = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'AVRO',
limit: '100',
});
const searchParamsContentJSON = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'JSON',
limit: '100',
});
const searchParamsContentPROTOBUF = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'PROTOBUF',
limit: '100',
});
describe('MessageContent screen', () => {
beforeEach(() => {
render(setupWrapper(), {
initialEntries: [`/messages?${searchParamsContentAVRO}`],
});
render(setupWrapper());
});
describe('renders', () => {
it('key format in document', () => {
describe('Checking keySerde and valueSerde', () => {
it('keySerde in document', () => {
expect(screen.getByText('SchemaRegistry')).toBeInTheDocument();
});
it('content format in document', () => {
expect(screen.getByText('AVRO')).toBeInTheDocument();
it('valueSerde in document', () => {
expect(screen.getByText('Avro')).toBeInTheDocument();
});
});
@ -98,42 +78,3 @@ describe('MessageContent screen', () => {
});
});
});
describe('checking content type depend on message type', () => {
it('renders component with message having JSON type', () => {
render(
setupWrapper({
messageContent: '{"data": "test"}',
}),
{ initialEntries: [`/messages?${searchParamsContentJSON}`] }
);
expect(screen.getByText('JSON')).toBeInTheDocument();
});
it('renders component with message having AVRO type', () => {
render(
setupWrapper({
messageContent: '{"data": "test"}',
}),
{ initialEntries: [`/messages?${searchParamsContentAVRO}`] }
);
expect(screen.getByText('AVRO')).toBeInTheDocument();
});
it('renders component with message having PROTOBUF type', () => {
render(
setupWrapper({
messageContent: proto,
}),
{ initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
it('renders component with message having no type which is equal to having PROTOBUF type', () => {
render(
setupWrapper({
messageContent: '',
}),
{ initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
});

View file

@ -8,15 +8,29 @@ export const Wrapper = styled.div`
export const Columns = styled.div`
margin: -0.75rem;
margin-bottom: 0.75rem;
display: flex;
flex-direction: column;
padding: 0.75rem;
gap: 8px;
@media screen and (min-width: 769px) {
display: flex;
}
`;
export const Column = styled.div`
flex-basis: 0;
flex-grow: 1;
flex-shrink: 1;
padding: 0.75rem;
export const Flex = styled.div`
display: flex;
flex-direction: row;
gap: 8px;
@media screen and (max-width: 1200px) {
flex-direction: column;
}
`;
export const FlexItem = styled.div`
width: 18rem;
@media screen and (max-width: 1450px) {
width: 50%;
}
@media screen and (max-width: 1200px) {
width: 100%;
}
`;

View file

@ -4,6 +4,7 @@ import { RouteParamsClusterTopic } from 'lib/paths';
import { Button } from 'components/common/Button/Button';
import Editor from 'components/common/Editor/Editor';
import Select, { SelectOption } from 'components/common/Select/Select';
import Switch from 'components/common/Switch/Switch';
import useAppParams from 'lib/hooks/useAppParams';
import { showAlert } from 'lib/errorHandling';
import { useSendMessage, useTopicDetails } from 'lib/hooks/api/topics';
@ -26,9 +27,12 @@ interface FormType {
partition: number;
keySerde: string;
valueSerde: string;
keepContents: boolean;
}
const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
closeSidebar,
}) => {
const { clusterName, topicName } = useAppParams<RouteParamsClusterTopic>();
const { data: topic } = useTopicDetails({ clusterName, topicName });
const { data: serdes = {} } = useSerdes({
@ -47,11 +51,13 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
handleSubmit,
formState: { isSubmitting },
control,
setValue,
} = useForm<FormType>({
mode: 'onChange',
defaultValues: {
...defaultValues,
partition: Number(partitionOptions[0].value),
keepContents: false,
},
});
@ -62,6 +68,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
content,
headers,
partition,
keepContents,
}: FormType) => {
let errors: string[] = [];
@ -110,7 +117,11 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
keySerde,
valueSerde,
});
onSubmit();
if (!keepContents) {
setValue('key', '');
setValue('content', '');
closeSidebar();
}
} catch (e) {
// do nothing
}
@ -120,7 +131,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
<S.Wrapper>
<form onSubmit={handleSubmit(submit)}>
<S.Columns>
<S.Column>
<S.FlexItem>
<InputLabel>Partition</InputLabel>
<Controller
control={control}
@ -137,47 +148,58 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
<S.Column>
<InputLabel>Key Serde</InputLabel>
</S.FlexItem>
<S.Flex>
<S.FlexItem>
<InputLabel>Key Serde</InputLabel>
<Controller
control={control}
name="keySerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectKeySerdeOptions"
aria-labelledby="selectKeySerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.key || [])}
value={value}
/>
)}
/>
</S.FlexItem>
<S.FlexItem>
<InputLabel>Value Serde</InputLabel>
<Controller
control={control}
name="valueSerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectValueSerdeOptions"
aria-labelledby="selectValueSerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.value || [])}
value={value}
/>
)}
/>
</S.FlexItem>
</S.Flex>
<div>
<Controller
control={control}
name="keySerde"
name="keepContents"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectKeySerdeOptions"
aria-labelledby="selectKeySerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.key || [])}
value={value}
/>
<Switch name={name} onChange={onChange} checked={value} />
)}
/>
</S.Column>
<S.Column>
<InputLabel>Value Serde</InputLabel>
<Controller
control={control}
name="valueSerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectValueSerdeOptions"
aria-labelledby="selectValueSerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.value || [])}
value={value}
/>
)}
/>
</S.Column>
<InputLabel>Keep contents</InputLabel>
</div>
</S.Columns>
<S.Columns>
<S.Column>
<div>
<InputLabel>Key</InputLabel>
<Controller
control={control}
@ -191,8 +213,8 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
<S.Column>
</div>
<div>
<InputLabel>Value</InputLabel>
<Controller
control={control}
@ -206,10 +228,10 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
</div>
</S.Columns>
<S.Columns>
<S.Column>
<div>
<InputLabel>Headers</InputLabel>
<Controller
control={control}
@ -224,7 +246,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
</div>
</S.Columns>
<Button
buttonSize="M"

View file

@ -49,7 +49,7 @@ const renderComponent = async () => {
const path = clusterTopicPath(clusterName, topicName);
await render(
<WithRoute path={clusterTopicPath()}>
<SendMessage onSubmit={mockOnSubmit} />
<SendMessage closeSidebar={mockOnSubmit} />
</WithRoute>,
{ initialEntries: [path] }
);

View file

@ -236,7 +236,7 @@ const Topic: React.FC = () => {
title="Produce Message"
>
<Suspense fallback={<PageLoader />}>
<SendMessage onSubmit={closeSidebar} />
<SendMessage closeSidebar={closeSidebar} />
</Suspense>
</SlidingSidebar>
</>

View file

@ -1,52 +1,38 @@
import React from 'react';
import WarningIcon from 'components/common/Icons/WarningIcon';
import { gitCommitPath } from 'lib/paths';
import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
import { BUILD_VERSION_PATTERN } from 'lib/constants';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import * as S from './Version.styled';
import compareVersions from './compareVersions';
const Version: React.FC = () => {
const { data: actuatorInfo = {} } = useActuatorInfo();
const { data: latestVersionInfo = {} } = useLatestVersion();
const tag = actuatorInfo?.build?.version;
const commit = actuatorInfo?.git?.commit.id;
const { tag_name: latestTag } = latestVersionInfo;
const outdated = compareVersions(tag, latestTag);
const currentVersion = tag?.match(BUILD_VERSION_PATTERN)
? tag
: formatTimestamp(actuatorInfo?.build?.time);
if (!tag) return null;
const { buildTime, commitId, isLatestRelease } = latestVersionInfo.build;
const { versionTag } = latestVersionInfo?.latestRelease || '';
return (
<S.Wrapper>
{!!outdated && (
{!isLatestRelease && (
<S.OutdatedWarning
title={`Your app version is outdated. Current latest version is ${latestTag}`}
title={`Your app version is outdated. Current latest version is ${versionTag}`}
>
<WarningIcon />
</S.OutdatedWarning>
)}
{commit && (
{commitId && (
<div>
<S.CurrentCommitLink
title="Current commit"
target="__blank"
href={gitCommitPath(commit)}
href={gitCommitPath(commitId)}
>
{commit}
{commitId}
</S.CurrentCommitLink>
</div>
)}
<S.CurrentVersion>{currentVersion}</S.CurrentVersion>
<S.CurrentVersion>{formatTimestamp(buildTime)}</S.CurrentVersion>
</S.Wrapper>
);
};

View file

@ -2,87 +2,40 @@ import React from 'react';
import { screen } from '@testing-library/dom';
import Version from 'components/Version/Version';
import { render } from 'lib/testHelpers';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
import { latestVersionPayload } from 'lib/fixtures/latestVersion';
import {
deprecatedVersionPayload,
latestVersionPayload,
} from 'lib/fixtures/latestVersion';
jest.mock('lib/hooks/api/actuatorInfo', () => ({
useActuatorInfo: jest.fn(),
}));
jest.mock('lib/hooks/api/latestVersion', () => ({
useLatestVersion: jest.fn(),
}));
describe('Version Component', () => {
const versionTag = 'v0.5.0';
const snapshotTag = 'test-SNAPSHOT';
const commitTag = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813';
const commitId = '96a577a';
const actuatorVersionPayload = actuatorInfoPayload(versionTag);
const formattedTimestamp = formatTimestamp(actuatorVersionPayload.build.time);
describe('render latest version', () => {
beforeEach(() => {
(useLatestVersion as jest.Mock).mockImplementation(() => ({
data: latestVersionPayload,
}));
});
it('renders latest release version as current version', async () => {
render(<Version />);
expect(screen.getByText(commitId)).toBeInTheDocument();
});
beforeEach(() => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorVersionPayload,
}));
it('should not show warning icon if it is last release', async () => {
render(<Version />);
expect(screen.queryByRole('img')).not.toBeInTheDocument();
});
});
it('show warning icon if it is not last release', async () => {
(useLatestVersion as jest.Mock).mockImplementation(() => ({
data: latestVersionPayload,
data: deprecatedVersionPayload,
}));
});
describe('tag does not exist', () => {
it('does not render component', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: null,
}));
const { container } = render(<Version />);
expect(container.firstChild).toBeEmptyDOMElement();
});
});
describe('renders current version', () => {
it('renders release build version as current version', async () => {
render(<Version />);
expect(screen.getByText(versionTag)).toBeInTheDocument();
});
it('renders formatted timestamp as current version when version is commit', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload(commitTag),
}));
render(<Version />);
expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
});
it('renders formatted timestamp as current version when version contains -SNAPSHOT', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload(snapshotTag),
}));
render(<Version />);
expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
});
});
describe('outdated build version', () => {
it('renders warning message', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload('v0.3.0'),
}));
render(<Version />);
expect(
screen.getByTitle(
`Your app version is outdated. Current latest version is ${latestVersionPayload.tag_name}`
)
).toBeInTheDocument();
});
});
describe('current commit id with link', () => {
it('renders', async () => {
render(<Version />);
expect(
screen.getByText(actuatorVersionPayload.git.commit.id)
).toBeInTheDocument();
});
render(<Version />);
expect(screen.getByRole('img')).toBeInTheDocument();
});
});

View file

@ -13,6 +13,7 @@ const WarningIcon: React.FC = () => {
return (
<WarningIconContainer>
<svg
role="img"
width="14"
height="13"
viewBox="0 0 14 13"

View file

@ -6,7 +6,7 @@ export const Wrapper = styled.div<{ $open?: boolean }>(
position: fixed;
top: ${theme.layout.navBarHeight};
bottom: 0;
width: 60vw;
width: 37vw;
right: calc(${$open ? '0px' : theme.layout.rightSidebarWidth} * -1);
box-shadow: -1px 0px 10px 0px rgba(0, 0, 0, 0.2);
transition: right 0.3s linear;

View file

@ -1,12 +0,0 @@
export const actuatorInfoPayload = (
version = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813'
) => ({
git: { commit: { id: 'befd3b3' } },
build: {
artifact: 'kafka-ui-api',
name: 'kafka-ui-api',
time: '2022-09-15T09:52:21.753Z',
version,
group: 'com.provectus',
},
});

View file

@ -1,3 +1,16 @@
export const latestVersionPayload = {
tag_name: 'v0.4.0',
export const deprecatedVersionPayload = {
build: {
buildTime: '2023-04-14T09:47:35.463Z',
commitId: '96a577a',
isLatestRelease: false,
version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
},
};
export const latestVersionPayload = {
build: {
buildTime: '2023-04-14T09:47:35.463Z',
commitId: '96a577a',
isLatestRelease: true,
version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
},
};

View file

@ -1,17 +0,0 @@
import fetchMock from 'fetch-mock';
import * as hooks from 'lib/hooks/api/actuatorInfo';
import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
const actuatorInfoPath = '/actuator/info';
describe('Actuator info hooks', () => {
beforeEach(() => fetchMock.restore());
describe('useActuatorInfo', () => {
it('returns the correct data', async () => {
const mock = fetchMock.getOnce(actuatorInfoPath, actuatorInfoPayload());
const { result } = renderQueryHook(() => hooks.useActuatorInfo());
await expectQueryWorks(mock, result);
});
});
});

View file

@ -1,18 +1,16 @@
import fetchMock from 'fetch-mock';
import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
import * as hooks from 'lib/hooks/api/latestVersion';
import { GIT_REPO_LATEST_RELEASE_LINK } from 'lib/constants';
import { latestVersionPayload } from 'lib/fixtures/latestVersion';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
const latestVersionPath = '/api/info';
describe('Latest version hooks', () => {
beforeEach(() => fetchMock.restore());
describe('useLatestVersion', () => {
it('returns the correct data', async () => {
const mock = fetchMock.getOnce(
GIT_REPO_LATEST_RELEASE_LINK,
latestVersionPayload
);
const { result } = renderQueryHook(() => hooks.useLatestVersion());
const mock = fetchMock.getOnce(latestVersionPath, latestVersionPayload);
const { result } = renderQueryHook(() => useLatestVersion());
await expectQueryWorks(mock, result);
});
});

View file

@ -1,19 +0,0 @@
import { useQuery } from '@tanstack/react-query';
import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
const fetchActuatorInfo = async () => {
const data = await fetch(
`${BASE_PARAMS.basePath}/actuator/info`,
BASE_PARAMS
).then((res) => res.json());
return data;
};
export function useActuatorInfo() {
return useQuery(
['actuatorInfo'],
fetchActuatorInfo,
QUERY_REFETCH_OFF_OPTIONS
);
}

View file

@ -1,21 +1,19 @@
import { useQuery } from '@tanstack/react-query';
import {
QUERY_REFETCH_OFF_OPTIONS,
GIT_REPO_LATEST_RELEASE_LINK,
} from 'lib/constants';
import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
const fetchLatestVersion = async () => {
const data = await fetch(GIT_REPO_LATEST_RELEASE_LINK).then((res) =>
res.json()
);
const fetchLatestVersionInfo = async () => {
const data = await fetch(
`${BASE_PARAMS.basePath}/api/info`,
BASE_PARAMS
).then((res) => res.json());
return data;
};
export function useLatestVersion() {
return useQuery(
['latestVersion'],
fetchLatestVersion,
['versionInfo'],
fetchLatestVersionInfo,
QUERY_REFETCH_OFF_OPTIONS
);
}