Merge remote-tracking branch 'origin/master' into issues/2844
This commit is contained in:
commit
d99e9e9775
41 changed files with 1168 additions and 157 deletions
|
@ -11,4 +11,8 @@ KafkaClient {
|
|||
user_admin="admin-secret";
|
||||
};
|
||||
|
||||
Client {};
|
||||
Client {
|
||||
org.apache.zookeeper.server.auth.DigestLoginModule required
|
||||
username="zkuser"
|
||||
password="zkuserpassword";
|
||||
};
|
||||
|
|
4
documentation/compose/jaas/zookeeper_jaas.conf
Normal file
4
documentation/compose/jaas/zookeeper_jaas.conf
Normal file
|
@ -0,0 +1,4 @@
|
|||
Server {
|
||||
org.apache.zookeeper.server.auth.DigestLoginModule required
|
||||
user_zkuser="zkuserpassword";
|
||||
};
|
59
documentation/compose/kafka-ui-acl-with-zk.yaml
Normal file
59
documentation/compose/kafka-ui-acl-with-zk.yaml
Normal file
|
@ -0,0 +1,59 @@
|
|||
---
|
||||
version: '2'
|
||||
services:
|
||||
|
||||
kafka-ui:
|
||||
container_name: kafka-ui
|
||||
image: provectuslabs/kafka-ui:latest
|
||||
ports:
|
||||
- 8080:8080
|
||||
depends_on:
|
||||
- zookeeper
|
||||
- kafka
|
||||
environment:
|
||||
KAFKA_CLUSTERS_0_NAME: local
|
||||
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
|
||||
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
|
||||
KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
|
||||
KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
|
||||
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper:3.4.6
|
||||
environment:
|
||||
JVMFLAGS: "-Djava.security.auth.login.config=/etc/zookeeper/zookeeper_jaas.conf"
|
||||
volumes:
|
||||
- ./jaas/zookeeper_jaas.conf:/etc/zookeeper/zookeeper_jaas.conf
|
||||
ports:
|
||||
- 2181:2181
|
||||
|
||||
kafka:
|
||||
image: confluentinc/cp-kafka:7.2.1
|
||||
hostname: kafka
|
||||
container_name: kafka
|
||||
ports:
|
||||
- "9092:9092"
|
||||
- "9997:9997"
|
||||
environment:
|
||||
KAFKA_BROKER_ID: 1
|
||||
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
|
||||
KAFKA_ADVERTISED_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
|
||||
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas/kafka_server.conf"
|
||||
KAFKA_AUTHORIZER_CLASS_NAME: "kafka.security.authorizer.AclAuthorizer"
|
||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
|
||||
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
||||
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
|
||||
KAFKA_JMX_PORT: 9997
|
||||
KAFKA_JMX_HOSTNAME: localhost
|
||||
KAFKA_NODE_ID: 1
|
||||
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
|
||||
KAFKA_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
|
||||
KAFKA_INTER_BROKER_LISTENER_NAME: 'SASL_PLAINTEXT'
|
||||
KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
|
||||
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
|
||||
KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT'
|
||||
KAFKA_SUPER_USERS: 'User:admin'
|
||||
volumes:
|
||||
- ./scripts/update_run.sh:/tmp/update_run.sh
|
||||
- ./jaas:/etc/kafka/jaas
|
|
@ -131,8 +131,9 @@ public class ClustersProperties {
|
|||
@Data
|
||||
public static class Masking {
|
||||
Type type;
|
||||
List<String> fields; //if null or empty list - policy will be applied to all fields
|
||||
List<String> pattern; //used when type=MASK
|
||||
List<String> fields;
|
||||
String fieldsNamePattern;
|
||||
List<String> maskingCharsReplacement; //used when type=MASK
|
||||
String replacement; //used when type=REPLACE
|
||||
String topicKeysPattern;
|
||||
String topicValuesPattern;
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
package com.provectus.kafka.ui.controller;
|
||||
|
||||
import com.provectus.kafka.ui.api.AclsApi;
|
||||
import com.provectus.kafka.ui.mapper.ClusterMapper;
|
||||
import com.provectus.kafka.ui.model.KafkaAclDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
|
||||
import com.provectus.kafka.ui.model.rbac.AccessContext;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
|
||||
import com.provectus.kafka.ui.service.acl.AclsService;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.Optional;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.server.ServerWebExchange;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
public class AclsController extends AbstractController implements AclsApi {
|
||||
|
||||
private final AclsService aclsService;
|
||||
private final AccessControlService accessControlService;
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Void>> createAcl(String clusterName, Mono<KafkaAclDTO> kafkaAclDto,
|
||||
ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.EDIT)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context)
|
||||
.then(kafkaAclDto)
|
||||
.map(ClusterMapper::toAclBinding)
|
||||
.flatMap(binding -> aclsService.createAcl(getCluster(clusterName), binding))
|
||||
.thenReturn(ResponseEntity.ok().build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Void>> deleteAcl(String clusterName, Mono<KafkaAclDTO> kafkaAclDto,
|
||||
ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.EDIT)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context)
|
||||
.then(kafkaAclDto)
|
||||
.map(ClusterMapper::toAclBinding)
|
||||
.flatMap(binding -> aclsService.deleteAcl(getCluster(clusterName), binding))
|
||||
.thenReturn(ResponseEntity.ok().build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Flux<KafkaAclDTO>>> listAcls(String clusterName,
|
||||
KafkaAclResourceTypeDTO resourceTypeDto,
|
||||
String resourceName,
|
||||
KafkaAclNamePatternTypeDTO namePatternTypeDto,
|
||||
ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.VIEW)
|
||||
.build();
|
||||
|
||||
var resourceType = Optional.ofNullable(resourceTypeDto)
|
||||
.map(ClusterMapper::mapAclResourceTypeDto)
|
||||
.orElse(ResourceType.ANY);
|
||||
|
||||
var namePatternType = Optional.ofNullable(namePatternTypeDto)
|
||||
.map(ClusterMapper::mapPatternTypeDto)
|
||||
.orElse(PatternType.ANY);
|
||||
|
||||
var filter = new ResourcePatternFilter(resourceType, resourceName, namePatternType);
|
||||
|
||||
return accessControlService.validateAccess(context).then(
|
||||
Mono.just(
|
||||
ResponseEntity.ok(
|
||||
aclsService.listAcls(getCluster(clusterName), filter)
|
||||
.map(ClusterMapper::toKafkaAclDto)))
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<String>> getAclAsCsv(String clusterName, ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.VIEW)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context).then(
|
||||
aclsService.getAclAsCsvString(getCluster(clusterName))
|
||||
.map(ResponseEntity::ok)
|
||||
.flatMap(Mono::just)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Void>> syncAclsCsv(String clusterName, Mono<String> csvMono, ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.EDIT)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context)
|
||||
.then(csvMono)
|
||||
.flatMap(csv -> aclsService.syncAclWithAclCsv(getCluster(clusterName), csv))
|
||||
.thenReturn(ResponseEntity.ok().build());
|
||||
}
|
||||
}
|
|
@ -27,6 +27,7 @@ import org.mapstruct.Mapper;
|
|||
import org.mapstruct.factory.Mappers;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.http.codec.multipart.FilePart;
|
||||
import org.springframework.http.codec.multipart.Part;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.server.ServerWebExchange;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
@ -92,16 +93,19 @@ public class ApplicationConfigController implements ApplicationConfigApi {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(FilePart file, ServerWebExchange exchange) {
|
||||
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(Flux<Part> fileFlux,
|
||||
ServerWebExchange exchange) {
|
||||
return accessControlService
|
||||
.validateAccess(
|
||||
AccessContext.builder()
|
||||
.applicationConfigActions(EDIT)
|
||||
.build()
|
||||
)
|
||||
.then(dynamicConfigOperations.uploadConfigRelatedFile(file))
|
||||
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
|
||||
.map(ResponseEntity::ok);
|
||||
.then(fileFlux.single())
|
||||
.flatMap(file ->
|
||||
dynamicConfigOperations.uploadConfigRelatedFile((FilePart) file)
|
||||
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
|
||||
.map(ResponseEntity::ok));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,6 +20,9 @@ import com.provectus.kafka.ui.model.InternalPartition;
|
|||
import com.provectus.kafka.ui.model.InternalReplica;
|
||||
import com.provectus.kafka.ui.model.InternalTopic;
|
||||
import com.provectus.kafka.ui.model.InternalTopicConfig;
|
||||
import com.provectus.kafka.ui.model.KafkaAclDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
|
||||
import com.provectus.kafka.ui.model.MetricDTO;
|
||||
import com.provectus.kafka.ui.model.Metrics;
|
||||
import com.provectus.kafka.ui.model.PartitionDTO;
|
||||
|
@ -27,12 +30,18 @@ import com.provectus.kafka.ui.model.ReplicaDTO;
|
|||
import com.provectus.kafka.ui.model.TopicConfigDTO;
|
||||
import com.provectus.kafka.ui.model.TopicDTO;
|
||||
import com.provectus.kafka.ui.model.TopicDetailsDTO;
|
||||
import com.provectus.kafka.ui.service.masking.DataMasking;
|
||||
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.kafka.clients.admin.ConfigEntry;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.mapstruct.Mapper;
|
||||
import org.mapstruct.Mapping;
|
||||
|
||||
|
@ -109,8 +118,74 @@ public interface ClusterMapper {
|
|||
return brokerDiskUsage;
|
||||
}
|
||||
|
||||
default DataMasking map(List<ClustersProperties.Masking> maskingProperties) {
|
||||
return DataMasking.create(maskingProperties);
|
||||
static KafkaAclDTO.OperationEnum mapAclOperation(AclOperation operation) {
|
||||
return switch (operation) {
|
||||
case ALL -> KafkaAclDTO.OperationEnum.ALL;
|
||||
case READ -> KafkaAclDTO.OperationEnum.READ;
|
||||
case WRITE -> KafkaAclDTO.OperationEnum.WRITE;
|
||||
case CREATE -> KafkaAclDTO.OperationEnum.CREATE;
|
||||
case DELETE -> KafkaAclDTO.OperationEnum.DELETE;
|
||||
case ALTER -> KafkaAclDTO.OperationEnum.ALTER;
|
||||
case DESCRIBE -> KafkaAclDTO.OperationEnum.DESCRIBE;
|
||||
case CLUSTER_ACTION -> KafkaAclDTO.OperationEnum.CLUSTER_ACTION;
|
||||
case DESCRIBE_CONFIGS -> KafkaAclDTO.OperationEnum.DESCRIBE_CONFIGS;
|
||||
case ALTER_CONFIGS -> KafkaAclDTO.OperationEnum.ALTER_CONFIGS;
|
||||
case IDEMPOTENT_WRITE -> KafkaAclDTO.OperationEnum.IDEMPOTENT_WRITE;
|
||||
case CREATE_TOKENS -> KafkaAclDTO.OperationEnum.CREATE_TOKENS;
|
||||
case DESCRIBE_TOKENS -> KafkaAclDTO.OperationEnum.DESCRIBE_TOKENS;
|
||||
case ANY -> throw new IllegalArgumentException("ANY operation can be only part of filter");
|
||||
case UNKNOWN -> KafkaAclDTO.OperationEnum.UNKNOWN;
|
||||
};
|
||||
}
|
||||
|
||||
static KafkaAclResourceTypeDTO mapAclResourceType(ResourceType resourceType) {
|
||||
return switch (resourceType) {
|
||||
case CLUSTER -> KafkaAclResourceTypeDTO.CLUSTER;
|
||||
case TOPIC -> KafkaAclResourceTypeDTO.TOPIC;
|
||||
case GROUP -> KafkaAclResourceTypeDTO.GROUP;
|
||||
case DELEGATION_TOKEN -> KafkaAclResourceTypeDTO.DELEGATION_TOKEN;
|
||||
case TRANSACTIONAL_ID -> KafkaAclResourceTypeDTO.TRANSACTIONAL_ID;
|
||||
case USER -> KafkaAclResourceTypeDTO.USER;
|
||||
case ANY -> throw new IllegalArgumentException("ANY type can be only part of filter");
|
||||
case UNKNOWN -> KafkaAclResourceTypeDTO.UNKNOWN;
|
||||
};
|
||||
}
|
||||
|
||||
static ResourceType mapAclResourceTypeDto(KafkaAclResourceTypeDTO dto) {
|
||||
return ResourceType.valueOf(dto.name());
|
||||
}
|
||||
|
||||
static PatternType mapPatternTypeDto(KafkaAclNamePatternTypeDTO dto) {
|
||||
return PatternType.valueOf(dto.name());
|
||||
}
|
||||
|
||||
static AclBinding toAclBinding(KafkaAclDTO dto) {
|
||||
return new AclBinding(
|
||||
new ResourcePattern(
|
||||
mapAclResourceTypeDto(dto.getResourceType()),
|
||||
dto.getResourceName(),
|
||||
mapPatternTypeDto(dto.getNamePatternType())
|
||||
),
|
||||
new AccessControlEntry(
|
||||
dto.getPrincipal(),
|
||||
dto.getHost(),
|
||||
AclOperation.valueOf(dto.getOperation().name()),
|
||||
AclPermissionType.valueOf(dto.getPermission().name())
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
static KafkaAclDTO toKafkaAclDto(AclBinding binding) {
|
||||
var pattern = binding.pattern();
|
||||
var filter = binding.toFilter().entryFilter();
|
||||
return new KafkaAclDTO()
|
||||
.resourceType(mapAclResourceType(pattern.resourceType()))
|
||||
.resourceName(pattern.name())
|
||||
.namePatternType(KafkaAclNamePatternTypeDTO.fromValue(pattern.patternType().name()))
|
||||
.principal(filter.principal())
|
||||
.host(filter.host())
|
||||
.operation(mapAclOperation(filter.operation()))
|
||||
.permission(KafkaAclDTO.PermissionEnum.fromValue(filter.permissionType().name()));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,5 +4,7 @@ public enum ClusterFeature {
|
|||
KAFKA_CONNECT,
|
||||
KSQL_DB,
|
||||
SCHEMA_REGISTRY,
|
||||
TOPIC_DELETION
|
||||
TOPIC_DELETION,
|
||||
KAFKA_ACL_VIEW,
|
||||
KAFKA_ACL_EDIT
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package com.provectus.kafka.ui.model.rbac;
|
||||
|
||||
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
|
||||
|
@ -37,6 +38,8 @@ public class AccessContext {
|
|||
|
||||
Collection<KsqlAction> ksqlActions;
|
||||
|
||||
Collection<AclAction> aclActions;
|
||||
|
||||
public static AccessContextBuilder builder() {
|
||||
return new AccessContextBuilder();
|
||||
}
|
||||
|
@ -55,6 +58,7 @@ public class AccessContext {
|
|||
private String schema;
|
||||
private Collection<SchemaAction> schemaActions = Collections.emptySet();
|
||||
private Collection<KsqlAction> ksqlActions = Collections.emptySet();
|
||||
private Collection<AclAction> aclActions = Collections.emptySet();
|
||||
|
||||
private AccessContextBuilder() {
|
||||
}
|
||||
|
@ -131,6 +135,12 @@ public class AccessContext {
|
|||
return this;
|
||||
}
|
||||
|
||||
public AccessContextBuilder aclActions(AclAction... actions) {
|
||||
Assert.isTrue(actions.length > 0, "actions not present");
|
||||
this.aclActions = List.of(actions);
|
||||
return this;
|
||||
}
|
||||
|
||||
public AccessContext build() {
|
||||
return new AccessContext(
|
||||
applicationConfigActions,
|
||||
|
@ -140,7 +150,7 @@ public class AccessContext {
|
|||
connect, connectActions,
|
||||
connector,
|
||||
schema, schemaActions,
|
||||
ksqlActions);
|
||||
ksqlActions, aclActions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import static com.provectus.kafka.ui.model.rbac.Resource.APPLICATIONCONFIG;
|
|||
import static com.provectus.kafka.ui.model.rbac.Resource.CLUSTERCONFIG;
|
||||
import static com.provectus.kafka.ui.model.rbac.Resource.KSQL;
|
||||
|
||||
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
|
||||
|
@ -76,6 +77,7 @@ public class Permission {
|
|||
case SCHEMA -> Arrays.stream(SchemaAction.values()).map(Enum::toString).toList();
|
||||
case CONNECT -> Arrays.stream(ConnectAction.values()).map(Enum::toString).toList();
|
||||
case KSQL -> Arrays.stream(KsqlAction.values()).map(Enum::toString).toList();
|
||||
case ACL -> Arrays.stream(AclAction.values()).map(Enum::toString).toList();
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,8 @@ public enum Resource {
|
|||
CONSUMER,
|
||||
SCHEMA,
|
||||
CONNECT,
|
||||
KSQL;
|
||||
KSQL,
|
||||
ACL;
|
||||
|
||||
@Nullable
|
||||
public static Resource fromString(String name) {
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
package com.provectus.kafka.ui.model.rbac.permission;
|
||||
|
||||
import org.apache.commons.lang3.EnumUtils;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
public enum AclAction implements PermissibleAction {
|
||||
|
||||
VIEW,
|
||||
EDIT;
|
||||
|
||||
@Nullable
|
||||
public static AclAction fromString(String name) {
|
||||
return EnumUtils.getEnum(AclAction.class, name);
|
||||
}
|
||||
}
|
|
@ -2,16 +2,19 @@ package com.provectus.kafka.ui.service;
|
|||
|
||||
import com.provectus.kafka.ui.model.ClusterFeature;
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.springframework.stereotype.Service;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
@ -26,7 +29,7 @@ public class FeatureService {
|
|||
private final AdminClientService adminClientService;
|
||||
|
||||
public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
|
||||
ReactiveAdminClient.ClusterDescription clusterDescription) {
|
||||
ClusterDescription clusterDescription) {
|
||||
List<Mono<ClusterFeature>> features = new ArrayList<>();
|
||||
|
||||
if (Optional.ofNullable(cluster.getConnectsClients())
|
||||
|
@ -44,6 +47,8 @@ public class FeatureService {
|
|||
}
|
||||
|
||||
features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
|
||||
features.add(aclView(cluster));
|
||||
features.add(aclEdit(clusterDescription));
|
||||
|
||||
return Flux.fromIterable(features).flatMap(m -> m).collectList();
|
||||
}
|
||||
|
@ -65,4 +70,20 @@ public class FeatureService {
|
|||
? Mono.just(ClusterFeature.TOPIC_DELETION)
|
||||
: Mono.empty());
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> aclEdit(ClusterDescription clusterDescription) {
|
||||
var authorizedOps = Optional.ofNullable(clusterDescription.getAuthorizedOperations()).orElse(Set.of());
|
||||
boolean canEdit = authorizedOps.contains(AclOperation.ALL) || authorizedOps.contains(AclOperation.ALTER);
|
||||
return canEdit
|
||||
? Mono.just(ClusterFeature.KAFKA_ACL_EDIT)
|
||||
: Mono.empty();
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> aclView(KafkaCluster cluster) {
|
||||
return adminClientService.get(cluster).flatMap(
|
||||
ac -> ac.getClusterFeatures().contains(ReactiveAdminClient.SupportedFeature.AUTHORIZED_SECURITY_ENABLED)
|
||||
? Mono.just(ClusterFeature.KAFKA_ACL_VIEW)
|
||||
: Mono.empty()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import static java.util.stream.Collectors.toMap;
|
|||
import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableTable;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Table;
|
||||
|
@ -15,7 +16,6 @@ import com.provectus.kafka.ui.util.KafkaVersion;
|
|||
import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
|
||||
import java.io.Closeable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -61,16 +61,22 @@ import org.apache.kafka.common.Node;
|
|||
import org.apache.kafka.common.TopicPartition;
|
||||
import org.apache.kafka.common.TopicPartitionInfo;
|
||||
import org.apache.kafka.common.TopicPartitionReplica;
|
||||
import org.apache.kafka.common.acl.AccessControlEntryFilter;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclBindingFilter;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.config.ConfigResource;
|
||||
import org.apache.kafka.common.errors.ClusterAuthorizationException;
|
||||
import org.apache.kafka.common.errors.GroupIdNotFoundException;
|
||||
import org.apache.kafka.common.errors.GroupNotEmptyException;
|
||||
import org.apache.kafka.common.errors.InvalidRequestException;
|
||||
import org.apache.kafka.common.errors.SecurityDisabledException;
|
||||
import org.apache.kafka.common.errors.TopicAuthorizationException;
|
||||
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
|
||||
import org.apache.kafka.common.errors.UnsupportedVersionException;
|
||||
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
@ -82,26 +88,29 @@ import reactor.util.function.Tuples;
|
|||
@RequiredArgsConstructor
|
||||
public class ReactiveAdminClient implements Closeable {
|
||||
|
||||
private enum SupportedFeature {
|
||||
public enum SupportedFeature {
|
||||
INCREMENTAL_ALTER_CONFIGS(2.3f),
|
||||
CONFIG_DOCUMENTATION_RETRIEVAL(2.6f),
|
||||
DESCRIBE_CLUSTER_INCLUDE_AUTHORIZED_OPERATIONS(2.3f);
|
||||
DESCRIBE_CLUSTER_INCLUDE_AUTHORIZED_OPERATIONS(2.3f),
|
||||
AUTHORIZED_SECURITY_ENABLED(ReactiveAdminClient::isAuthorizedSecurityEnabled);
|
||||
|
||||
private final float sinceVersion;
|
||||
private final BiFunction<AdminClient, Float, Mono<Boolean>> predicate;
|
||||
|
||||
SupportedFeature(float sinceVersion) {
|
||||
this.sinceVersion = sinceVersion;
|
||||
SupportedFeature(BiFunction<AdminClient, Float, Mono<Boolean>> predicate) {
|
||||
this.predicate = predicate;
|
||||
}
|
||||
|
||||
static Set<SupportedFeature> forVersion(float kafkaVersion) {
|
||||
return Arrays.stream(SupportedFeature.values())
|
||||
.filter(f -> kafkaVersion >= f.sinceVersion)
|
||||
SupportedFeature(float fromVersion) {
|
||||
this.predicate = (admin, ver) -> Mono.just(ver != null && ver >= fromVersion);
|
||||
}
|
||||
|
||||
static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, @Nullable Float kafkaVersion) {
|
||||
return Flux.fromArray(SupportedFeature.values())
|
||||
.flatMap(f -> f.predicate.apply(ac, kafkaVersion).map(enabled -> Tuples.of(f, enabled)))
|
||||
.filter(Tuple2::getT2)
|
||||
.map(Tuple2::getT1)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
static Set<SupportedFeature> defaultFeatures() {
|
||||
return Set.of();
|
||||
}
|
||||
}
|
||||
|
||||
@Value
|
||||
|
@ -110,25 +119,31 @@ public class ReactiveAdminClient implements Closeable {
|
|||
Node controller;
|
||||
String clusterId;
|
||||
Collection<Node> nodes;
|
||||
@Nullable // null, if ACL is disabled
|
||||
Set<AclOperation> authorizedOperations;
|
||||
}
|
||||
|
||||
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
|
||||
return getClusterVersion(adminClient)
|
||||
.map(ver ->
|
||||
new ReactiveAdminClient(
|
||||
adminClient,
|
||||
ver,
|
||||
getSupportedUpdateFeaturesForVersion(ver)));
|
||||
.flatMap(ver ->
|
||||
getSupportedUpdateFeaturesForVersion(adminClient, ver)
|
||||
.map(features ->
|
||||
new ReactiveAdminClient(adminClient, ver, features)));
|
||||
}
|
||||
|
||||
private static Set<SupportedFeature> getSupportedUpdateFeaturesForVersion(String versionStr) {
|
||||
try {
|
||||
float version = KafkaVersion.parse(versionStr);
|
||||
return SupportedFeature.forVersion(version);
|
||||
} catch (NumberFormatException e) {
|
||||
return SupportedFeature.defaultFeatures();
|
||||
}
|
||||
private static Mono<Set<SupportedFeature>> getSupportedUpdateFeaturesForVersion(AdminClient ac, String versionStr) {
|
||||
@Nullable Float kafkaVersion = KafkaVersion.parse(versionStr).orElse(null);
|
||||
return SupportedFeature.forVersion(ac, kafkaVersion);
|
||||
}
|
||||
|
||||
private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullable Float kafkaVersion) {
|
||||
return toMono(ac.describeAcls(AclBindingFilter.ANY).values())
|
||||
.thenReturn(true)
|
||||
.doOnError(th -> !(th instanceof SecurityDisabledException)
|
||||
&& !(th instanceof InvalidRequestException)
|
||||
&& !(th instanceof UnsupportedVersionException),
|
||||
th -> log.warn("Error checking if security enabled", th))
|
||||
.onErrorReturn(false);
|
||||
}
|
||||
|
||||
// NOTE: if KafkaFuture returns null, that Mono will be empty(!), since Reactor does not support nullable results
|
||||
|
@ -162,6 +177,10 @@ public class ReactiveAdminClient implements Closeable {
|
|||
private final String version;
|
||||
private final Set<SupportedFeature> features;
|
||||
|
||||
public Set<SupportedFeature> getClusterFeatures() {
|
||||
return features;
|
||||
}
|
||||
|
||||
public Mono<Set<String>> listTopics(boolean listInternal) {
|
||||
return toMono(client.listTopics(new ListTopicsOptions().listInternal(listInternal)).names());
|
||||
}
|
||||
|
@ -576,6 +595,22 @@ public class ReactiveAdminClient implements Closeable {
|
|||
);
|
||||
}
|
||||
|
||||
public Mono<Collection<AclBinding>> listAcls(ResourcePatternFilter filter) {
|
||||
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
return toMono(client.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values());
|
||||
}
|
||||
|
||||
public Mono<Void> createAcls(Collection<AclBinding> aclBindings) {
|
||||
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
return toMono(client.createAcls(aclBindings).all());
|
||||
}
|
||||
|
||||
public Mono<Void> deleteAcls(Collection<AclBinding> aclBindings) {
|
||||
Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
var filters = aclBindings.stream().map(AclBinding::toFilter).collect(Collectors.toSet());
|
||||
return toMono(client.deleteAcls(filters).all()).then();
|
||||
}
|
||||
|
||||
public Mono<Void> updateBrokerConfigByName(Integer brokerId, String name, String value) {
|
||||
ConfigResource cr = new ConfigResource(ConfigResource.Type.BROKER, String.valueOf(brokerId));
|
||||
AlterConfigOp op = new AlterConfigOp(new ConfigEntry(name, value), AlterConfigOp.OpType.SET);
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
|
||||
public class AclCsv {
|
||||
|
||||
private static final String LINE_SEPARATOR = System.lineSeparator();
|
||||
private static final String VALUES_SEPARATOR = ",";
|
||||
private static final String HEADER = "Principal,ResourceType,PatternType,ResourceName,Operation,PermissionType,Host";
|
||||
|
||||
public static String transformToCsvString(Collection<AclBinding> acls) {
|
||||
return Stream.concat(Stream.of(HEADER), acls.stream().map(AclCsv::createAclString))
|
||||
.collect(Collectors.joining(System.lineSeparator()));
|
||||
}
|
||||
|
||||
public static String createAclString(AclBinding binding) {
|
||||
var pattern = binding.pattern();
|
||||
var filter = binding.toFilter().entryFilter();
|
||||
return String.format(
|
||||
"%s,%s,%s,%s,%s,%s,%s",
|
||||
filter.principal(),
|
||||
pattern.resourceType(),
|
||||
pattern.patternType(),
|
||||
pattern.name(),
|
||||
filter.operation(),
|
||||
filter.permissionType(),
|
||||
filter.host()
|
||||
);
|
||||
}
|
||||
|
||||
private static AclBinding parseCsvLine(String csv, int line) {
|
||||
String[] values = csv.split(VALUES_SEPARATOR);
|
||||
if (values.length != 7) {
|
||||
throw new ValidationException("Input csv is not valid - there should be 7 columns in line " + line);
|
||||
}
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
if ((values[i] = values[i].trim()).isBlank()) {
|
||||
throw new ValidationException("Input csv is not valid - blank value in colum " + i + ", line " + line);
|
||||
}
|
||||
}
|
||||
try {
|
||||
return new AclBinding(
|
||||
new ResourcePattern(
|
||||
ResourceType.valueOf(values[1]), values[3], PatternType.valueOf(values[2])),
|
||||
new AccessControlEntry(
|
||||
values[0], values[6], AclOperation.valueOf(values[4]), AclPermissionType.valueOf(values[5]))
|
||||
);
|
||||
} catch (IllegalArgumentException enumParseError) {
|
||||
throw new ValidationException("Error parsing enum value in line " + line);
|
||||
}
|
||||
}
|
||||
|
||||
public static Collection<AclBinding> parseCsv(String csvString) {
|
||||
String[] lines = csvString.split(LINE_SEPARATOR);
|
||||
if (lines.length == 0) {
|
||||
throw new ValidationException("Error parsing ACL csv file: no lines in file");
|
||||
}
|
||||
boolean firstLineIsHeader = HEADER.equalsIgnoreCase(lines[0].trim().replace(" ", ""));
|
||||
Set<AclBinding> result = new HashSet<>();
|
||||
for (int i = firstLineIsHeader ? 1 : 0; i < lines.length; i++) {
|
||||
String line = lines[i];
|
||||
if (!line.isBlank()) {
|
||||
AclBinding aclBinding = parseCsvLine(line, i);
|
||||
result.add(aclBinding);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.AdminClientService;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import org.springframework.stereotype.Service;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class AclsService {
|
||||
|
||||
private final AdminClientService adminClientService;
|
||||
|
||||
public Mono<Void> createAcl(KafkaCluster cluster, AclBinding aclBinding) {
|
||||
var aclString = AclCsv.createAclString(aclBinding);
|
||||
log.info("CREATING ACL: [{}]", aclString);
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.createAcls(List.of(aclBinding)))
|
||||
.doOnSuccess(v -> log.info("ACL CREATED: [{}]", aclString));
|
||||
}
|
||||
|
||||
public Mono<Void> deleteAcl(KafkaCluster cluster, AclBinding aclBinding) {
|
||||
var aclString = AclCsv.createAclString(aclBinding);
|
||||
log.info("DELETING ACL: [{}]", aclString);
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.deleteAcls(List.of(aclBinding)))
|
||||
.doOnSuccess(v -> log.info("ACL DELETED: [{}]", aclString));
|
||||
}
|
||||
|
||||
public Flux<AclBinding> listAcls(KafkaCluster cluster, ResourcePatternFilter filter) {
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(c -> c.listAcls(filter))
|
||||
.flatMapIterable(acls -> acls);
|
||||
}
|
||||
|
||||
public Mono<String> getAclAsCsvString(KafkaCluster cluster) {
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(c -> c.listAcls(ResourcePatternFilter.ANY))
|
||||
.map(AclCsv::transformToCsvString);
|
||||
}
|
||||
|
||||
public Mono<Void> syncAclWithAclCsv(KafkaCluster cluster, String csv) {
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.listAcls(ResourcePatternFilter.ANY).flatMap(existingAclList -> {
|
||||
var existingSet = Set.copyOf(existingAclList);
|
||||
var newAcls = Set.copyOf(AclCsv.parseCsv(csv));
|
||||
var toDelete = Sets.difference(existingSet, newAcls);
|
||||
var toAdd = Sets.difference(newAcls, existingSet);
|
||||
logAclSyncPlan(cluster, toAdd, toDelete);
|
||||
if (toAdd.isEmpty() && toDelete.isEmpty()) {
|
||||
return Mono.empty();
|
||||
}
|
||||
log.info("Starting new ACLs creation");
|
||||
return ac.createAcls(toAdd)
|
||||
.doOnSuccess(v -> {
|
||||
log.info("{} new ACLs created", toAdd.size());
|
||||
log.info("Starting ACLs deletion");
|
||||
})
|
||||
.then(ac.deleteAcls(toDelete)
|
||||
.doOnSuccess(v -> log.info("{} ACLs deleted", toDelete.size())));
|
||||
}));
|
||||
}
|
||||
|
||||
private void logAclSyncPlan(KafkaCluster cluster, Set<AclBinding> toBeAdded, Set<AclBinding> toBeDeleted) {
|
||||
log.info("'{}' cluster ACL sync plan: ", cluster.getName());
|
||||
if (toBeAdded.isEmpty() && toBeDeleted.isEmpty()) {
|
||||
log.info("Nothing to do, ACL is already in sync");
|
||||
return;
|
||||
}
|
||||
if (!toBeAdded.isEmpty()) {
|
||||
log.info("ACLs to be added ({}): ", toBeAdded.size());
|
||||
for (AclBinding aclBinding : toBeAdded) {
|
||||
log.info(" " + AclCsv.createAclString(aclBinding));
|
||||
}
|
||||
}
|
||||
if (!toBeDeleted.isEmpty()) {
|
||||
log.info("ACLs to be deleted ({}): ", toBeDeleted.size());
|
||||
for (AclBinding aclBinding : toBeDeleted) {
|
||||
log.info(" " + AclCsv.createAclString(aclBinding));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -44,7 +44,7 @@ public class DataMasking {
|
|||
public static DataMasking create(@Nullable List<ClustersProperties.Masking> config) {
|
||||
return new DataMasking(
|
||||
Optional.ofNullable(config).orElse(List.of()).stream().map(property -> {
|
||||
Preconditions.checkNotNull(property.getType(), "masking type not specifed");
|
||||
Preconditions.checkNotNull(property.getType(), "masking type not specified");
|
||||
Preconditions.checkArgument(
|
||||
StringUtils.isNotEmpty(property.getTopicKeysPattern())
|
||||
|| StringUtils.isNotEmpty(property.getTopicValuesPattern()),
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
package com.provectus.kafka.ui.service.masking.policies;
|
||||
|
||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.regex.Pattern;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
interface FieldsSelector {
|
||||
|
||||
static FieldsSelector create(ClustersProperties.Masking property) {
|
||||
if (StringUtils.hasText(property.getFieldsNamePattern()) && !CollectionUtils.isEmpty(property.getFields())) {
|
||||
throw new ValidationException("You can't provide both fieldNames & fieldsNamePattern for masking");
|
||||
}
|
||||
if (StringUtils.hasText(property.getFieldsNamePattern())) {
|
||||
Pattern pattern = Pattern.compile(property.getFieldsNamePattern());
|
||||
return f -> pattern.matcher(f).matches();
|
||||
}
|
||||
if (!CollectionUtils.isEmpty(property.getFields())) {
|
||||
return f -> property.getFields().contains(f);
|
||||
}
|
||||
//no pattern, no field names - mean all fields should be masked
|
||||
return fieldName -> true;
|
||||
}
|
||||
|
||||
boolean shouldBeMasked(String fieldName);
|
||||
|
||||
}
|
|
@ -15,8 +15,8 @@ class Mask extends MaskingPolicy {
|
|||
|
||||
private final UnaryOperator<String> masker;
|
||||
|
||||
Mask(List<String> fieldNames, List<String> maskingChars) {
|
||||
super(fieldNames);
|
||||
Mask(FieldsSelector fieldsSelector, List<String> maskingChars) {
|
||||
super(fieldsSelector);
|
||||
this.masker = createMasker(maskingChars);
|
||||
}
|
||||
|
||||
|
@ -38,22 +38,13 @@ class Mask extends MaskingPolicy {
|
|||
for (int i = 0; i < input.length(); i++) {
|
||||
int cp = input.codePointAt(i);
|
||||
switch (Character.getType(cp)) {
|
||||
case Character.SPACE_SEPARATOR:
|
||||
case Character.LINE_SEPARATOR:
|
||||
case Character.PARAGRAPH_SEPARATOR:
|
||||
sb.appendCodePoint(cp); // keeping separators as-is
|
||||
break;
|
||||
case Character.UPPERCASE_LETTER:
|
||||
sb.append(maskingChars.get(0));
|
||||
break;
|
||||
case Character.LOWERCASE_LETTER:
|
||||
sb.append(maskingChars.get(1));
|
||||
break;
|
||||
case Character.DECIMAL_DIGIT_NUMBER:
|
||||
sb.append(maskingChars.get(2));
|
||||
break;
|
||||
default:
|
||||
sb.append(maskingChars.get(3));
|
||||
case Character.SPACE_SEPARATOR,
|
||||
Character.LINE_SEPARATOR,
|
||||
Character.PARAGRAPH_SEPARATOR -> sb.appendCodePoint(cp); // keeping separators as-is
|
||||
case Character.UPPERCASE_LETTER -> sb.append(maskingChars.get(0));
|
||||
case Character.LOWERCASE_LETTER -> sb.append(maskingChars.get(1));
|
||||
case Character.DECIMAL_DIGIT_NUMBER -> sb.append(maskingChars.get(2));
|
||||
default -> sb.append(maskingChars.get(3));
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
|
|
|
@ -2,46 +2,36 @@ package com.provectus.kafka.ui.service.masking.policies;
|
|||
|
||||
import com.fasterxml.jackson.databind.node.ContainerNode;
|
||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@RequiredArgsConstructor
|
||||
public abstract class MaskingPolicy {
|
||||
|
||||
|
||||
public static MaskingPolicy create(ClustersProperties.Masking property) {
|
||||
List<String> fields = property.getFields() == null
|
||||
? List.of() // empty list means that policy will be applied to all fields
|
||||
: property.getFields();
|
||||
switch (property.getType()) {
|
||||
case REMOVE:
|
||||
return new Remove(fields);
|
||||
case REPLACE:
|
||||
return new Replace(
|
||||
fields,
|
||||
property.getReplacement() == null
|
||||
? Replace.DEFAULT_REPLACEMENT
|
||||
: property.getReplacement()
|
||||
);
|
||||
case MASK:
|
||||
return new Mask(
|
||||
fields,
|
||||
property.getPattern() == null
|
||||
? Mask.DEFAULT_PATTERN
|
||||
: property.getPattern()
|
||||
);
|
||||
default:
|
||||
throw new IllegalStateException("Unknown policy type: " + property.getType());
|
||||
}
|
||||
FieldsSelector fieldsSelector = FieldsSelector.create(property);
|
||||
return switch (property.getType()) {
|
||||
case REMOVE -> new Remove(fieldsSelector);
|
||||
case REPLACE -> new Replace(
|
||||
fieldsSelector,
|
||||
property.getReplacement() == null
|
||||
? Replace.DEFAULT_REPLACEMENT
|
||||
: property.getReplacement()
|
||||
);
|
||||
case MASK -> new Mask(
|
||||
fieldsSelector,
|
||||
property.getMaskingCharsReplacement() == null
|
||||
? Mask.DEFAULT_PATTERN
|
||||
: property.getMaskingCharsReplacement()
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------
|
||||
|
||||
// empty list means policy will be applied to all fields
|
||||
private final List<String> fieldNames;
|
||||
private final FieldsSelector fieldsSelector;
|
||||
|
||||
protected boolean fieldShouldBeMasked(String fieldName) {
|
||||
return fieldNames.isEmpty() || fieldNames.contains(fieldName);
|
||||
return fieldsSelector.shouldBeMasked(fieldName);
|
||||
}
|
||||
|
||||
public abstract ContainerNode<?> applyToJsonContainer(ContainerNode<?> node);
|
||||
|
|
|
@ -4,12 +4,12 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ContainerNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
class Remove extends MaskingPolicy {
|
||||
|
||||
Remove(List<String> fieldNames) {
|
||||
super(fieldNames);
|
||||
Remove(FieldsSelector fieldsSelector) {
|
||||
super(fieldsSelector);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.node.ContainerNode;
|
|||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.fasterxml.jackson.databind.node.TextNode;
|
||||
import com.google.common.base.Preconditions;
|
||||
import java.util.List;
|
||||
|
||||
class Replace extends MaskingPolicy {
|
||||
|
||||
|
@ -14,8 +13,8 @@ class Replace extends MaskingPolicy {
|
|||
|
||||
private final String replacement;
|
||||
|
||||
Replace(List<String> fieldNames, String replacementString) {
|
||||
super(fieldNames);
|
||||
Replace(FieldsSelector fieldsSelector, String replacementString) {
|
||||
super(fieldsSelector);
|
||||
this.replacement = Preconditions.checkNotNull(replacementString);
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,9 @@ class JmxSslSocketFactory extends javax.net.ssl.SSLSocketFactory {
|
|||
} catch (Exception e) {
|
||||
log.error("----------------------------------");
|
||||
log.error("SSL can't be enabled for JMX retrieval. "
|
||||
+ "Make sure your java app run with '--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED' arg.", e);
|
||||
+ "Make sure your java app run with '--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED' arg. Err: {}",
|
||||
e.getMessage());
|
||||
log.trace("SSL can't be enabled for JMX retrieval", e);
|
||||
log.error("----------------------------------");
|
||||
}
|
||||
SSL_JMX_SUPPORTED = sslJmxSupported;
|
||||
|
|
|
@ -90,6 +90,7 @@ public class DynamicConfigOperations {
|
|||
}
|
||||
|
||||
public PropertiesStructure getCurrentProperties() {
|
||||
checkIfDynamicConfigEnabled();
|
||||
return PropertiesStructure.builder()
|
||||
.kafka(getNullableBean(ClustersProperties.class))
|
||||
.rbac(getNullableBean(RoleBasedAccessControlProperties.class))
|
||||
|
@ -112,11 +113,7 @@ public class DynamicConfigOperations {
|
|||
}
|
||||
|
||||
public void persist(PropertiesStructure properties) {
|
||||
if (!dynamicConfigEnabled()) {
|
||||
throw new ValidationException(
|
||||
"Dynamic config change is not allowed. "
|
||||
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
|
||||
}
|
||||
checkIfDynamicConfigEnabled();
|
||||
properties.initAndValidate();
|
||||
|
||||
String yaml = serializeToYaml(properties);
|
||||
|
@ -124,8 +121,9 @@ public class DynamicConfigOperations {
|
|||
}
|
||||
|
||||
public Mono<Path> uploadConfigRelatedFile(FilePart file) {
|
||||
String targetDirStr = (String) ctx.getEnvironment().getSystemEnvironment()
|
||||
.getOrDefault(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
|
||||
checkIfDynamicConfigEnabled();
|
||||
String targetDirStr = ctx.getEnvironment()
|
||||
.getProperty(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
|
||||
|
||||
Path targetDir = Path.of(targetDirStr);
|
||||
if (!Files.exists(targetDir)) {
|
||||
|
@ -149,6 +147,14 @@ public class DynamicConfigOperations {
|
|||
.onErrorMap(th -> new FileUploadException(targetFilePath, th));
|
||||
}
|
||||
|
||||
private void checkIfDynamicConfigEnabled() {
|
||||
if (!dynamicConfigEnabled()) {
|
||||
throw new ValidationException(
|
||||
"Dynamic config change is not allowed. "
|
||||
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
|
||||
}
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void writeYamlToFile(String yaml, Path path) {
|
||||
if (Files.isDirectory(path)) {
|
||||
|
|
|
@ -1,24 +1,21 @@
|
|||
package com.provectus.kafka.ui.util;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import java.util.Optional;
|
||||
|
||||
@Slf4j
|
||||
public final class KafkaVersion {
|
||||
|
||||
private KafkaVersion() {
|
||||
}
|
||||
|
||||
public static float parse(String version) throws NumberFormatException {
|
||||
log.trace("Parsing cluster version [{}]", version);
|
||||
public static Optional<Float> parse(String version) throws NumberFormatException {
|
||||
try {
|
||||
final String[] parts = version.split("\\.");
|
||||
if (parts.length > 2) {
|
||||
version = parts[0] + "." + parts[1];
|
||||
}
|
||||
return Float.parseFloat(version.split("-")[0]);
|
||||
return Optional.of(Float.parseFloat(version.split("-")[0]));
|
||||
} catch (Exception e) {
|
||||
log.error("Conversion clusterVersion [{}] to float value failed", version, e);
|
||||
throw e;
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui;
|
|||
|
||||
import com.provectus.kafka.ui.container.KafkaConnectContainer;
|
||||
import com.provectus.kafka.ui.container.SchemaRegistryContainer;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import org.apache.kafka.clients.admin.AdminClient;
|
||||
|
@ -9,6 +10,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig;
|
|||
import org.apache.kafka.clients.admin.NewTopic;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.jupiter.api.function.ThrowingConsumer;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
|
@ -47,6 +49,9 @@ public abstract class AbstractIntegrationTest {
|
|||
.dependsOn(kafka)
|
||||
.dependsOn(schemaRegistry);
|
||||
|
||||
@TempDir
|
||||
public static Path tmpDir;
|
||||
|
||||
static {
|
||||
kafka.start();
|
||||
schemaRegistry.start();
|
||||
|
@ -76,6 +81,9 @@ public abstract class AbstractIntegrationTest {
|
|||
System.setProperty("kafka.clusters.1.schemaRegistry", schemaRegistry.getUrl());
|
||||
System.setProperty("kafka.clusters.1.kafkaConnect.0.name", "kafka-connect");
|
||||
System.setProperty("kafka.clusters.1.kafkaConnect.0.address", kafkaConnect.getTarget());
|
||||
|
||||
System.setProperty("dynamic.config.enabled", "true");
|
||||
System.setProperty("config.related.uploads.dir", tmpDir.toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
package com.provectus.kafka.ui.controller;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import com.provectus.kafka.ui.AbstractIntegrationTest;
|
||||
import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.http.HttpEntity;
|
||||
import org.springframework.http.client.MultipartBodyBuilder;
|
||||
import org.springframework.test.web.reactive.server.WebTestClient;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
|
||||
class ApplicationConfigControllerTest extends AbstractIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private WebTestClient webTestClient;
|
||||
|
||||
@Test
|
||||
public void testUpload() throws IOException {
|
||||
var fileToUpload = new ClassPathResource("/fileForUploadTest.txt", this.getClass());
|
||||
|
||||
UploadedFileInfoDTO result = webTestClient
|
||||
.post()
|
||||
.uri("/api/config/relatedfiles")
|
||||
.bodyValue(generateBody(fileToUpload))
|
||||
.exchange()
|
||||
.expectStatus()
|
||||
.isOk()
|
||||
.expectBody(UploadedFileInfoDTO.class)
|
||||
.returnResult()
|
||||
.getResponseBody();
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getLocation()).isNotNull();
|
||||
assertThat(Path.of(result.getLocation()))
|
||||
.hasSameBinaryContentAs(fileToUpload.getFile().toPath());
|
||||
}
|
||||
|
||||
private MultiValueMap<String, HttpEntity<?>> generateBody(ClassPathResource resource) {
|
||||
MultipartBodyBuilder builder = new MultipartBodyBuilder();
|
||||
builder.part("file", resource);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
class AclCsvTest {
|
||||
|
||||
private static final List<AclBinding> TEST_BINDINGS = List.of(
|
||||
new AclBinding(
|
||||
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
|
||||
new AccessControlEntry("User:test1", "*", AclOperation.READ, AclPermissionType.ALLOW)),
|
||||
new AclBinding(
|
||||
new ResourcePattern(ResourceType.GROUP, "group1", PatternType.PREFIXED),
|
||||
new AccessControlEntry("User:test2", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY))
|
||||
);
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
"Principal,ResourceType, PatternType, ResourceName,Operation,PermissionType,Host\n"
|
||||
+ "User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
|
||||
+ "User:test2,GROUP,PREFIXED,group1,DESCRIBE,DENY,localhost",
|
||||
|
||||
//without header
|
||||
"User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
|
||||
+ "\n"
|
||||
+ "User:test2,GROUP,PREFIXED,group1,DESCRIBE,DENY,localhost"
|
||||
+ "\n"
|
||||
})
|
||||
void parsesValidInputCsv(String csvString) {
|
||||
Collection<AclBinding> parsed = AclCsv.parseCsv(csvString);
|
||||
assertThat(parsed).containsExactlyInAnyOrderElementsOf(TEST_BINDINGS);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
// columns > 7
|
||||
"User:test1,TOPIC,LITERAL,*,READ,ALLOW,*,1,2,3,4",
|
||||
// columns < 7
|
||||
"User:test1,TOPIC,LITERAL,*",
|
||||
// enum values are illegal
|
||||
"User:test1,ILLEGAL,LITERAL,*,READ,ALLOW,*",
|
||||
"User:test1,TOPIC,LITERAL,*,READ,ILLEGAL,*"
|
||||
})
|
||||
void throwsExceptionForInvalidInputCsv(String csvString) {
|
||||
assertThatThrownBy(() -> AclCsv.parseCsv(csvString))
|
||||
.isInstanceOf(ValidationException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void transformAndParseUseSameFormat() {
|
||||
String csv = AclCsv.transformToCsvString(TEST_BINDINGS);
|
||||
Collection<AclBinding> parsedBindings = AclCsv.parseCsv(csv);
|
||||
assertThat(parsedBindings).containsExactlyInAnyOrderElementsOf(TEST_BINDINGS);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.AdminClientService;
|
||||
import com.provectus.kafka.ui.service.ReactiveAdminClient;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
class AclsServiceTest {
|
||||
|
||||
private static final KafkaCluster CLUSTER = KafkaCluster.builder().build();
|
||||
|
||||
private final ReactiveAdminClient adminClientMock = mock(ReactiveAdminClient.class);
|
||||
private final AdminClientService adminClientService = mock(AdminClientService.class);
|
||||
|
||||
private final AclsService aclsService = new AclsService(adminClientService);
|
||||
|
||||
@BeforeEach
|
||||
void initMocks() {
|
||||
when(adminClientService.get(CLUSTER)).thenReturn(Mono.just(adminClientMock));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSyncAclWithAclCsv() {
|
||||
var existingBinding1 = new AclBinding(
|
||||
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
|
||||
new AccessControlEntry("User:test1", "*", AclOperation.READ, AclPermissionType.ALLOW));
|
||||
|
||||
var existingBinding2 = new AclBinding(
|
||||
new ResourcePattern(ResourceType.GROUP, "group1", PatternType.PREFIXED),
|
||||
new AccessControlEntry("User:test2", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY));
|
||||
|
||||
var newBindingToBeAdded = new AclBinding(
|
||||
new ResourcePattern(ResourceType.GROUP, "groupNew", PatternType.PREFIXED),
|
||||
new AccessControlEntry("User:test3", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY));
|
||||
|
||||
when(adminClientMock.listAcls(ResourcePatternFilter.ANY))
|
||||
.thenReturn(Mono.just(List.of(existingBinding1, existingBinding2)));
|
||||
|
||||
ArgumentCaptor<?> createdCaptor = ArgumentCaptor.forClass(Collection.class);
|
||||
when(adminClientMock.createAcls((Collection<AclBinding>) createdCaptor.capture()))
|
||||
.thenReturn(Mono.empty());
|
||||
|
||||
ArgumentCaptor<?> deletedCaptor = ArgumentCaptor.forClass(Collection.class);
|
||||
when(adminClientMock.deleteAcls((Collection<AclBinding>) deletedCaptor.capture()))
|
||||
.thenReturn(Mono.empty());
|
||||
|
||||
aclsService.syncAclWithAclCsv(
|
||||
CLUSTER,
|
||||
"Principal,ResourceType, PatternType, ResourceName,Operation,PermissionType,Host\n"
|
||||
+ "User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
|
||||
+ "User:test3,GROUP,PREFIXED,groupNew,DESCRIBE,DENY,localhost"
|
||||
).block();
|
||||
|
||||
Collection<AclBinding> createdBindings = (Collection<AclBinding>) createdCaptor.getValue();
|
||||
assertThat(createdBindings)
|
||||
.hasSize(1)
|
||||
.contains(newBindingToBeAdded);
|
||||
|
||||
Collection<AclBinding> deletedBindings = (Collection<AclBinding>) deletedCaptor.getValue();
|
||||
assertThat(deletedBindings)
|
||||
.hasSize(1)
|
||||
.contains(existingBinding2);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package com.provectus.kafka.ui.service.masking.policies;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
|
||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.List;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class FieldsSelectorTest {
|
||||
|
||||
@Test
|
||||
void selectsFieldsDueToProvidedPattern() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
properties.setFieldsNamePattern("f1|f2");
|
||||
|
||||
var selector = FieldsSelector.create(properties);
|
||||
assertThat(selector.shouldBeMasked("f1")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("f2")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("doesNotMatchPattern")).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
void selectsFieldsDueToProvidedFieldNames() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
properties.setFields(List.of("f1", "f2"));
|
||||
|
||||
var selector = FieldsSelector.create(properties);
|
||||
assertThat(selector.shouldBeMasked("f1")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("f2")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("notInAList")).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
void selectAllFieldsIfNoPatternAndNoNamesProvided() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
|
||||
var selector = FieldsSelector.create(properties);
|
||||
assertThat(selector.shouldBeMasked("anyPropertyName")).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
void throwsExceptionIfBothFieldListAndPatternProvided() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
properties.setFieldsNamePattern("f1|f2");
|
||||
properties.setFields(List.of("f3", "f4"));
|
||||
|
||||
assertThatThrownBy(() -> FieldsSelector.create(properties))
|
||||
.isInstanceOf(ValidationException.class);
|
||||
}
|
||||
|
||||
}
|
|
@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
|
|||
|
||||
class MaskTest {
|
||||
|
||||
private static final List<String> TARGET_FIELDS = List.of("id", "name");
|
||||
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
|
||||
private static final List<String> PATTERN = List.of("X", "x", "n", "-");
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource
|
||||
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
Mask policy = new Mask(fields, PATTERN);
|
||||
void testApplyToJsonContainer(FieldsSelector selector, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
Mask policy = new Mask(selector, PATTERN);
|
||||
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> testApplyToJsonContainer() {
|
||||
return Stream.of(
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
|
||||
parse("{ \"id\": \"nnn\", \"name\": { \"first\": \"Xxxxx\", \"surname\": \"Xxxxnnn-\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
|
||||
parse("[{ \"id\": \"nnn\", \"f2\": 234}, { \"name\": \"n-n\", \"f2\": 345} ]")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Xxxxnnn-\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"Xxxxx\", \"name\": \"Xxxxnnn-\"}}")
|
||||
)
|
||||
|
@ -57,7 +57,7 @@ class MaskTest {
|
|||
"null, xxxx"
|
||||
})
|
||||
void testApplyToString(String original, String expected) {
|
||||
Mask policy = new Mask(List.of(), PATTERN);
|
||||
Mask policy = new Mask(fieldName -> true, PATTERN);
|
||||
assertThat(policy.applyToString(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
|
|
|
@ -15,39 +15,39 @@ import org.junit.jupiter.params.provider.MethodSource;
|
|||
|
||||
class RemoveTest {
|
||||
|
||||
private static final List<String> TARGET_FIELDS = List.of("id", "name");
|
||||
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource
|
||||
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Remove(fields);
|
||||
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Remove(fieldsSelector);
|
||||
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> testApplyToJsonContainer() {
|
||||
return Stream.of(
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
|
||||
parse("{}")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
|
||||
parse("[{ \"f2\": 234}, { \"f2\": 345} ]")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"James\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
|
||||
parse("{}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("[{ \"f1\": 123}, { \"f2\": \"1.2\"} ]"),
|
||||
parse("[{}, {}]")
|
||||
)
|
||||
|
@ -66,7 +66,7 @@ class RemoveTest {
|
|||
"null, null"
|
||||
})
|
||||
void testApplyToString(String original, String expected) {
|
||||
var policy = new Remove(List.of());
|
||||
var policy = new Remove(fieldName -> true);
|
||||
assertThat(policy.applyToString(original)).isEqualTo(expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
|
|||
|
||||
class ReplaceTest {
|
||||
|
||||
private static final List<String> TARGET_FIELDS = List.of("id", "name");
|
||||
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
|
||||
private static final String REPLACEMENT_STRING = "***";
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource
|
||||
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Replace(fields, REPLACEMENT_STRING);
|
||||
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Replace(fieldsSelector, REPLACEMENT_STRING);
|
||||
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> testApplyToJsonContainer() {
|
||||
return Stream.of(
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
|
||||
parse("{ \"id\": \"***\", \"name\": { \"first\": \"***\", \"surname\": \"***\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
|
||||
parse("[{ \"id\": \"***\", \"f2\": 234}, { \"name\": \"***\", \"f2\": 345} ]")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"***\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"***\", \"f2\": \"***\", \"inner\" : {\"if1\": \"***\"}}}}")
|
||||
)
|
||||
|
@ -62,7 +62,7 @@ class ReplaceTest {
|
|||
"null, ***"
|
||||
})
|
||||
void testApplyToString(String original, String expected) {
|
||||
var policy = new Replace(List.of(), REPLACEMENT_STRING);
|
||||
var policy = new Replace(fieldName -> true, REPLACEMENT_STRING);
|
||||
assertThat(policy.applyToString(original)).isEqualTo(expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
1
kafka-ui-api/src/test/resources/fileForUploadTest.txt
Normal file
1
kafka-ui-api/src/test/resources/fileForUploadTest.txt
Normal file
|
@ -0,0 +1 @@
|
|||
some content goes here
|
|
@ -101,9 +101,6 @@
|
|||
<useSpringBoot3>true</useSpringBoot3>
|
||||
<dateLibrary>java8</dateLibrary>
|
||||
</configOptions>
|
||||
<typeMappings>
|
||||
<mapping>filepart=org.springframework.http.codec.multipart.FilePart</mapping>
|
||||
</typeMappings>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
|
|
|
@ -1730,6 +1730,125 @@ paths:
|
|||
404:
|
||||
description: Not found
|
||||
|
||||
/api/clusters/{clusterName}/acls:
|
||||
get:
|
||||
tags:
|
||||
- Acls
|
||||
summary: listKafkaAcls
|
||||
operationId: listAcls
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: resourceType
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAclResourceType'
|
||||
- name: resourceName
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
- name: namePatternType
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAclNamePatternType'
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/KafkaAcl'
|
||||
|
||||
/api/clusters/{clusterName}/acl/csv:
|
||||
get:
|
||||
tags:
|
||||
- Acls
|
||||
summary: getAclAsCsv
|
||||
operationId: getAclAsCsv
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
content:
|
||||
text/plain:
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
tags:
|
||||
- Acls
|
||||
summary: syncAclsCsv
|
||||
operationId: syncAclsCsv
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
text/plain:
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
|
||||
/api/clusters/{clusterName}/acl:
|
||||
post:
|
||||
tags:
|
||||
- Acls
|
||||
summary: createAcl
|
||||
operationId: createAcl
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAcl'
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
|
||||
delete:
|
||||
tags:
|
||||
- Acls
|
||||
summary: deleteAcl
|
||||
operationId: deleteAcl
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAcl'
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
404:
|
||||
description: Acl not found
|
||||
|
||||
/api/authorization:
|
||||
get:
|
||||
tags:
|
||||
|
@ -1819,7 +1938,7 @@ paths:
|
|||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: filepart
|
||||
format: binary
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
|
@ -1972,6 +2091,8 @@ components:
|
|||
- KAFKA_CONNECT
|
||||
- KSQL_DB
|
||||
- TOPIC_DELETION
|
||||
- KAFKA_ACL_VIEW # get ACLs listing
|
||||
- KAFKA_ACL_EDIT # create & delete ACLs
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
|
@ -3342,6 +3463,62 @@ components:
|
|||
- SCHEMA
|
||||
- CONNECT
|
||||
- KSQL
|
||||
- ACL
|
||||
|
||||
KafkaAcl:
|
||||
type: object
|
||||
required: [resourceType, resourceName, namePatternType, principal, host, operation, permission]
|
||||
properties:
|
||||
resourceType:
|
||||
$ref: '#/components/schemas/KafkaAclResourceType'
|
||||
resourceName:
|
||||
type: string # "*" if acl can be applied to any resource of given type
|
||||
namePatternType:
|
||||
$ref: '#/components/schemas/KafkaAclNamePatternType'
|
||||
principal:
|
||||
type: string
|
||||
host:
|
||||
type: string # "*" if acl can be applied to any resource of given type
|
||||
operation:
|
||||
type: string
|
||||
enum:
|
||||
- UNKNOWN # Unknown operation, need to update mapping code on BE
|
||||
- ALL # Cluster, Topic, Group
|
||||
- READ # Topic, Group
|
||||
- WRITE # Topic, TransactionalId
|
||||
- CREATE # Cluster, Topic
|
||||
- DELETE # Topic, Group
|
||||
- ALTER # Cluster, Topic,
|
||||
- DESCRIBE # Cluster, Topic, Group, TransactionalId, DelegationToken
|
||||
- CLUSTER_ACTION # Cluster
|
||||
- DESCRIBE_CONFIGS # Cluster, Topic
|
||||
- ALTER_CONFIGS # Cluster, Topic
|
||||
- IDEMPOTENT_WRITE # Cluster
|
||||
- CREATE_TOKENS
|
||||
- DESCRIBE_TOKENS
|
||||
permission:
|
||||
type: string
|
||||
enum:
|
||||
- ALLOW
|
||||
- DENY
|
||||
|
||||
KafkaAclResourceType:
|
||||
type: string
|
||||
enum:
|
||||
- UNKNOWN # Unknown operation, need to update mapping code on BE
|
||||
- TOPIC
|
||||
- GROUP
|
||||
- CLUSTER
|
||||
- TRANSACTIONAL_ID
|
||||
- DELEGATION_TOKEN
|
||||
- USER
|
||||
|
||||
KafkaAclNamePatternType:
|
||||
type: string
|
||||
enum:
|
||||
- MATCH
|
||||
- LITERAL
|
||||
- PREFIXED
|
||||
|
||||
RestartRequest:
|
||||
type: object
|
||||
|
@ -3632,7 +3809,9 @@ components:
|
|||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern:
|
||||
fieldsNamePattern:
|
||||
type: string
|
||||
maskingCharsReplacement:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
|
|
|
@ -29,72 +29,65 @@ public class SmokeBacklog extends BaseManualTest {
|
|||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(278)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(331)
|
||||
@Test
|
||||
public void testCaseC() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(331)
|
||||
@Test
|
||||
public void testCaseD() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(332)
|
||||
@Test
|
||||
public void testCaseE() {
|
||||
public void testCaseD() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(335)
|
||||
@Test
|
||||
public void testCaseF() {
|
||||
public void testCaseE() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(336)
|
||||
@Test
|
||||
public void testCaseG() {
|
||||
public void testCaseF() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(343)
|
||||
@Test
|
||||
public void testCaseH() {
|
||||
public void testCaseG() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(344)
|
||||
@Test
|
||||
public void testCaseI() {
|
||||
public void testCaseH() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = SCHEMAS_SUITE_ID)
|
||||
@QaseId(345)
|
||||
@Test
|
||||
public void testCaseJ() {
|
||||
public void testCaseI() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = SCHEMAS_SUITE_ID)
|
||||
@QaseId(346)
|
||||
@Test
|
||||
public void testCaseK() {
|
||||
public void testCaseJ() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(347)
|
||||
@Test
|
||||
public void testCaseL() {
|
||||
public void testCaseK() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.provectus.kafka.ui.smokesuite.ksqldb;
|
||||
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs.STREAMS;
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_STREAMS;
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
|
||||
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
|
||||
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
||||
|
@ -80,8 +81,18 @@ public class KsqlDbTest extends BaseTest {
|
|||
softly.assertAll();
|
||||
}
|
||||
|
||||
@QaseId(86)
|
||||
@QaseId(278)
|
||||
@Test(priority = 4)
|
||||
public void checkShowStreamsRequestExecution() {
|
||||
navigateToKsqlDbAndExecuteRequest(SHOW_STREAMS.getQuery());
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(DEFAULT_STREAM.getName()).isVisible(), "getItemByName()");
|
||||
softly.assertAll();
|
||||
}
|
||||
|
||||
@QaseId(86)
|
||||
@Test(priority = 5)
|
||||
public void clearResultsForExecutedRequest() {
|
||||
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||
SoftAssert softly = new SoftAssert();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import styled from 'styled-components';
|
||||
import { Button } from 'components/common/Button/Button';
|
||||
|
||||
export const DiffWrapper = styled.div`
|
||||
align-items: stretch;
|
||||
|
@ -81,3 +82,6 @@ export const DiffTile = styled.div`
|
|||
export const DiffVersionsSelect = styled.div`
|
||||
width: 0.625em;
|
||||
`;
|
||||
export const BackButton = styled(Button)`
|
||||
margin: 10px 9px;
|
||||
`;
|
||||
|
|
|
@ -20,6 +20,7 @@ import useAppParams from 'lib/hooks/useAppParams';
|
|||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||
|
||||
import * as S from './Diff.styled';
|
||||
import { BackButton } from './Diff.styled';
|
||||
|
||||
export interface DiffProps {
|
||||
versions: SchemaSubject[];
|
||||
|
@ -77,6 +78,13 @@ const Diff: React.FC<DiffProps> = ({ versions, areVersionsFetched }) => {
|
|||
backText="Schema Registry"
|
||||
backTo={clusterSchemasPath(clusterName)}
|
||||
/>
|
||||
<BackButton
|
||||
buttonType="secondary"
|
||||
buttonSize="S"
|
||||
onClick={() => navigate(-1)}
|
||||
>
|
||||
Back
|
||||
</BackButton>
|
||||
<S.Section>
|
||||
{areVersionsFetched ? (
|
||||
<S.DiffBox>
|
||||
|
|
|
@ -3,6 +3,7 @@ import Diff, { DiffProps } from 'components/Schemas/Diff/Diff';
|
|||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { screen } from '@testing-library/react';
|
||||
import { clusterSchemaComparePath } from 'lib/paths';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
|
||||
import { versions } from './fixtures';
|
||||
|
||||
|
@ -142,4 +143,24 @@ describe('Diff', () => {
|
|||
expect(select).toHaveTextContent(versions[0].version);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Back button', () => {
|
||||
beforeEach(() => {
|
||||
setupComponent({
|
||||
areVersionsFetched: true,
|
||||
versions,
|
||||
});
|
||||
});
|
||||
|
||||
it('back button is appear', () => {
|
||||
const backButton = screen.getAllByRole('button', { name: 'Back' });
|
||||
expect(backButton[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('click on back button', () => {
|
||||
const backButton = screen.getAllByRole('button', { name: 'Back' });
|
||||
userEvent.click(backButton[0]);
|
||||
expect(screen.queryByRole('Back')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Add table
Reference in a new issue