Merge branch 'master' into FE_ISSUE_754_acl

This commit is contained in:
Ilya Kuramshin 2023-05-03 14:56:45 +04:00 committed by GitHub
commit 70d672bf59
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
53 changed files with 769 additions and 479 deletions

View file

@ -0,0 +1,59 @@
---
version: '2'
services:
kafka-ui:
container_name: kafka-ui
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
depends_on:
- zookeeper
- kafka
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
zookeeper:
image: wurstmeister/zookeeper:3.4.6
environment:
JVMFLAGS: "-Djava.security.auth.login.config=/etc/zookeeper/zookeeper_jaas.conf"
volumes:
- ./jaas/zookeeper_jaas.conf:/etc/zookeeper/zookeeper_jaas.conf
ports:
- 2181:2181
kafka:
image: confluentinc/cp-kafka:7.2.1
hostname: kafka
container_name: kafka
ports:
- "9092:9092"
- "9997:9997"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
KAFKA_ADVERTISED_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas/kafka_server.conf"
KAFKA_AUTHORIZER_CLASS_NAME: "kafka.security.authorizer.AclAuthorizer"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_JMX_PORT: 9997
KAFKA_JMX_HOSTNAME: localhost
KAFKA_NODE_ID: 1
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
KAFKA_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
KAFKA_INTER_BROKER_LISTENER_NAME: 'SASL_PLAINTEXT'
KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT'
KAFKA_SUPER_USERS: 'User:admin'
volumes:
- ./scripts/update_run.sh:/tmp/update_run.sh
- ./jaas:/etc/kafka/jaas

View file

@ -131,8 +131,9 @@ public class ClustersProperties {
@Data
public static class Masking {
Type type;
List<String> fields; //if null or empty list - policy will be applied to all fields
List<String> pattern; //used when type=MASK
List<String> fields;
String fieldsNamePattern;
List<String> maskingCharsReplacement; //used when type=MASK
String replacement; //used when type=REPLACE
String topicKeysPattern;
String topicValuesPattern;

View file

@ -0,0 +1,26 @@
package com.provectus.kafka.ui.config.auth;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties("spring.ldap")
@Data
public class LdapProperties {
private String urls;
private String base;
private String adminUser;
private String adminPassword;
private String userFilterSearchBase;
private String userFilterSearchFilter;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aсtiveDirectory.domain:@null}")
private String activeDirectoryDomain;
@Value("${oauth2.ldap.groupRoleAttribute:cn}")
private String groupRoleAttribute;
}

View file

@ -1,13 +1,23 @@
package com.provectus.kafka.ui.config.auth;
import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.ldap.core.support.LdapContextSource;
import org.springframework.security.authentication.AuthenticationManager;
@ -16,70 +26,71 @@ import org.springframework.security.authentication.ReactiveAuthenticationManager
import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.BindAuthenticator;
import org.springframework.security.ldap.authentication.LdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
import org.springframework.security.web.server.SecurityWebFilterChain;
@Configuration
@EnableWebFluxSecurity
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
@Import(LdapAutoConfiguration.class)
@EnableConfigurationProperties(LdapProperties.class)
@RequiredArgsConstructor
@Slf4j
public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
public class LdapSecurityConfig {
@Value("${spring.ldap.urls}")
private String ldapUrls;
@Value("${spring.ldap.dn.pattern:#{null}}")
private String ldapUserDnPattern;
@Value("${spring.ldap.adminUser:#{null}}")
private String adminUser;
@Value("${spring.ldap.adminPassword:#{null}}")
private String adminPassword;
@Value("${spring.ldap.userFilter.searchBase:#{null}}")
private String userFilterSearchBase;
@Value("${spring.ldap.userFilter.searchFilter:#{null}}")
private String userFilterSearchFilter;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aсtiveDirectory.domain:#{null}}")
private String activeDirectoryDomain;
private final LdapProperties props;
@Bean
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource) {
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
ApplicationContext context,
@Nullable AccessControlService acs) {
var rbacEnabled = acs != null && acs.isRbacEnabled();
BindAuthenticator ba = new BindAuthenticator(contextSource);
if (ldapUserDnPattern != null) {
ba.setUserDnPatterns(new String[] {ldapUserDnPattern});
if (props.getBase() != null) {
ba.setUserDnPatterns(new String[] {props.getBase()});
}
if (userFilterSearchFilter != null) {
if (props.getUserFilterSearchFilter() != null) {
LdapUserSearch userSearch =
new FilterBasedLdapUserSearch(userFilterSearchBase, userFilterSearchFilter, contextSource);
new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(),
contextSource);
ba.setUserSearch(userSearch);
}
AbstractLdapAuthenticationProvider authenticationProvider;
if (!isActiveDirectory) {
authenticationProvider = new LdapAuthenticationProvider(ba);
if (!props.isActiveDirectory()) {
authenticationProvider = rbacEnabled
? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
: new LdapAuthenticationProvider(ba);
} else {
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(activeDirectoryDomain, ldapUrls);
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
props.getUrls()); // TODO Issue #3741
authenticationProvider.setUseAuthenticationRequestCredentials(true);
}
if (rbacEnabled) {
authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper());
}
AuthenticationManager am = new ProviderManager(List.of(authenticationProvider));
return new ReactiveAuthenticationManagerAdapter(am);
}
@Bean
@Primary
public BaseLdapPathContextSource contextSource() {
LdapContextSource ctx = new LdapContextSource();
ctx.setUrl(ldapUrls);
ctx.setUserDn(adminUser);
ctx.setPassword(adminPassword);
ctx.setUrl(props.getUrls());
ctx.setUserDn(props.getAdminUser());
ctx.setPassword(props.getAdminPassword());
ctx.afterPropertiesSet();
return ctx;
}
@ -87,20 +98,35 @@ public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
@Bean
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
log.info("Configuring LDAP authentication.");
if (isActiveDirectory) {
if (props.isActiveDirectory()) {
log.info("Active Directory support for LDAP has been enabled.");
}
http
return http
.authorizeExchange()
.pathMatchers(AUTH_WHITELIST)
.permitAll()
.anyExchange()
.authenticated()
.and()
.httpBasic();
return http.csrf().disable().build();
.and()
.formLogin()
.and()
.logout()
.and()
.csrf().disable()
.build();
}
private static class UserDetailsMapper extends LdapUserDetailsMapper {
@Override
public UserDetails mapUserFromContext(DirContextOperations ctx, String username,
Collection<? extends GrantedAuthority> authorities) {
UserDetails userDetails = super.mapUserFromContext(ctx, username, authorities);
return new RbacLdapUser(userDetails);
}
}
}

View file

@ -115,7 +115,7 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
@Nullable
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
final String provider = getProviderByProviderId(providerId);
Optional<ProviderAuthorityExtractor> extractor = acs.getExtractors()
Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
.stream()
.filter(e -> e.isApplicable(provider))
.findFirst();

View file

@ -0,0 +1,60 @@
package com.provectus.kafka.ui.config.auth;
import java.util.Collection;
import java.util.stream.Collectors;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
public class RbacLdapUser implements UserDetails, RbacUser {
private final UserDetails userDetails;
public RbacLdapUser(UserDetails userDetails) {
this.userDetails = userDetails;
}
@Override
public String name() {
return userDetails.getUsername();
}
@Override
public Collection<String> groups() {
return userDetails.getAuthorities().stream().map(GrantedAuthority::getAuthority).collect(Collectors.toSet());
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return userDetails.getAuthorities();
}
@Override
public String getPassword() {
return userDetails.getPassword();
}
@Override
public String getUsername() {
return userDetails.getUsername();
}
@Override
public boolean isAccountNonExpired() {
return userDetails.isAccountNonExpired();
}
@Override
public boolean isAccountNonLocked() {
return userDetails.isAccountNonLocked();
}
@Override
public boolean isCredentialsNonExpired() {
return userDetails.isCredentialsNonExpired();
}
@Override
public boolean isEnabled() {
return userDetails.isEnabled();
}
}

View file

@ -0,0 +1,21 @@
package com.provectus.kafka.ui.config.auth.condition;
import org.springframework.boot.autoconfigure.condition.AllNestedConditions;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
public class ActiveDirectoryCondition extends AllNestedConditions {
public ActiveDirectoryCondition() {
super(ConfigurationPhase.PARSE_CONFIGURATION);
}
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
public static class OnAuthType {
}
@ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false)
public static class OnActiveDirectory {
}
}

View file

@ -27,6 +27,7 @@ import org.mapstruct.Mapper;
import org.mapstruct.factory.Mappers;
import org.springframework.http.ResponseEntity;
import org.springframework.http.codec.multipart.FilePart;
import org.springframework.http.codec.multipart.Part;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Flux;
@ -92,16 +93,19 @@ public class ApplicationConfigController implements ApplicationConfigApi {
}
@Override
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(FilePart file, ServerWebExchange exchange) {
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(Flux<Part> fileFlux,
ServerWebExchange exchange) {
return accessControlService
.validateAccess(
AccessContext.builder()
.applicationConfigActions(EDIT)
.build()
)
.then(dynamicConfigOperations.uploadConfigRelatedFile(file))
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
.map(ResponseEntity::ok);
.then(fileFlux.single())
.flatMap(file ->
dynamicConfigOperations.uploadConfigRelatedFile((FilePart) file)
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
.map(ResponseEntity::ok));
}
@Override

View file

@ -123,11 +123,11 @@ public class ConsumerRecordDeserializer {
}
private static Long getKeySize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.key() != null ? (long) consumerRecord.key().get().length : null;
return consumerRecord.key() != null ? (long) consumerRecord.serializedKeySize() : null;
}
private static Long getValueSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.value() != null ? (long) consumerRecord.value().get().length : null;
return consumerRecord.value() != null ? (long) consumerRecord.serializedValueSize() : null;
}
private static int headerSize(Header header) {

View file

@ -122,8 +122,6 @@ public class SerdesInitializer {
registeredSerdes,
Optional.ofNullable(clusterProperties.getDefaultKeySerde())
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default key serde not found"))
.or(() -> Optional.ofNullable(registeredSerdes.get(SchemaRegistrySerde.name())))
.or(() -> Optional.ofNullable(registeredSerdes.get(ProtobufFileSerde.name())))
.orElse(null),
Optional.ofNullable(clusterProperties.getDefaultValueSerde())
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default value serde not found"))

View file

@ -109,6 +109,7 @@ public class KafkaConnectService {
private Stream<String> getStringsForSearch(FullConnectorInfoDTO fullConnectorInfo) {
return Stream.of(
fullConnectorInfo.getName(),
fullConnectorInfo.getConnect(),
fullConnectorInfo.getStatus().getState().getValue(),
fullConnectorInfo.getType().getValue());
}

View file

@ -43,8 +43,7 @@ class TopicAnalysisStats {
Long max;
final UpdateDoublesSketch sizeSketch = DoublesSketch.builder().build();
void apply(byte[] bytes) {
int len = bytes.length;
void apply(int len) {
sum += len;
min = minNullable(min, len);
max = maxNullable(max, len);
@ -98,7 +97,7 @@ class TopicAnalysisStats {
if (rec.key() != null) {
byte[] keyBytes = rec.key().get();
keysSize.apply(keyBytes);
keysSize.apply(rec.serializedKeySize());
uniqKeys.update(keyBytes);
} else {
nullKeys++;
@ -106,7 +105,7 @@ class TopicAnalysisStats {
if (rec.value() != null) {
byte[] valueBytes = rec.value().get();
valuesSize.apply(valueBytes);
valuesSize.apply(rec.serializedValueSize());
uniqValues.update(valueBytes);
} else {
nullValues++;

View file

@ -44,7 +44,7 @@ public class DataMasking {
public static DataMasking create(@Nullable List<ClustersProperties.Masking> config) {
return new DataMasking(
Optional.ofNullable(config).orElse(List.of()).stream().map(property -> {
Preconditions.checkNotNull(property.getType(), "masking type not specifed");
Preconditions.checkNotNull(property.getType(), "masking type not specified");
Preconditions.checkArgument(
StringUtils.isNotEmpty(property.getTopicKeysPattern())
|| StringUtils.isNotEmpty(property.getTopicValuesPattern()),

View file

@ -0,0 +1,28 @@
package com.provectus.kafka.ui.service.masking.policies;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.regex.Pattern;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
interface FieldsSelector {
static FieldsSelector create(ClustersProperties.Masking property) {
if (StringUtils.hasText(property.getFieldsNamePattern()) && !CollectionUtils.isEmpty(property.getFields())) {
throw new ValidationException("You can't provide both fieldNames & fieldsNamePattern for masking");
}
if (StringUtils.hasText(property.getFieldsNamePattern())) {
Pattern pattern = Pattern.compile(property.getFieldsNamePattern());
return f -> pattern.matcher(f).matches();
}
if (!CollectionUtils.isEmpty(property.getFields())) {
return f -> property.getFields().contains(f);
}
//no pattern, no field names - mean all fields should be masked
return fieldName -> true;
}
boolean shouldBeMasked(String fieldName);
}

View file

@ -15,8 +15,8 @@ class Mask extends MaskingPolicy {
private final UnaryOperator<String> masker;
Mask(List<String> fieldNames, List<String> maskingChars) {
super(fieldNames);
Mask(FieldsSelector fieldsSelector, List<String> maskingChars) {
super(fieldsSelector);
this.masker = createMasker(maskingChars);
}
@ -38,22 +38,13 @@ class Mask extends MaskingPolicy {
for (int i = 0; i < input.length(); i++) {
int cp = input.codePointAt(i);
switch (Character.getType(cp)) {
case Character.SPACE_SEPARATOR:
case Character.LINE_SEPARATOR:
case Character.PARAGRAPH_SEPARATOR:
sb.appendCodePoint(cp); // keeping separators as-is
break;
case Character.UPPERCASE_LETTER:
sb.append(maskingChars.get(0));
break;
case Character.LOWERCASE_LETTER:
sb.append(maskingChars.get(1));
break;
case Character.DECIMAL_DIGIT_NUMBER:
sb.append(maskingChars.get(2));
break;
default:
sb.append(maskingChars.get(3));
case Character.SPACE_SEPARATOR,
Character.LINE_SEPARATOR,
Character.PARAGRAPH_SEPARATOR -> sb.appendCodePoint(cp); // keeping separators as-is
case Character.UPPERCASE_LETTER -> sb.append(maskingChars.get(0));
case Character.LOWERCASE_LETTER -> sb.append(maskingChars.get(1));
case Character.DECIMAL_DIGIT_NUMBER -> sb.append(maskingChars.get(2));
default -> sb.append(maskingChars.get(3));
}
}
return sb.toString();

View file

@ -2,46 +2,36 @@ package com.provectus.kafka.ui.service.masking.policies;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.provectus.kafka.ui.config.ClustersProperties;
import java.util.List;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public abstract class MaskingPolicy {
public static MaskingPolicy create(ClustersProperties.Masking property) {
List<String> fields = property.getFields() == null
? List.of() // empty list means that policy will be applied to all fields
: property.getFields();
switch (property.getType()) {
case REMOVE:
return new Remove(fields);
case REPLACE:
return new Replace(
fields,
property.getReplacement() == null
? Replace.DEFAULT_REPLACEMENT
: property.getReplacement()
);
case MASK:
return new Mask(
fields,
property.getPattern() == null
? Mask.DEFAULT_PATTERN
: property.getPattern()
);
default:
throw new IllegalStateException("Unknown policy type: " + property.getType());
}
FieldsSelector fieldsSelector = FieldsSelector.create(property);
return switch (property.getType()) {
case REMOVE -> new Remove(fieldsSelector);
case REPLACE -> new Replace(
fieldsSelector,
property.getReplacement() == null
? Replace.DEFAULT_REPLACEMENT
: property.getReplacement()
);
case MASK -> new Mask(
fieldsSelector,
property.getMaskingCharsReplacement() == null
? Mask.DEFAULT_PATTERN
: property.getMaskingCharsReplacement()
);
};
}
//----------------------------------------------------------------
// empty list means policy will be applied to all fields
private final List<String> fieldNames;
private final FieldsSelector fieldsSelector;
protected boolean fieldShouldBeMasked(String fieldName) {
return fieldNames.isEmpty() || fieldNames.contains(fieldName);
return fieldsSelector.shouldBeMasked(fieldName);
}
public abstract ContainerNode<?> applyToJsonContainer(ContainerNode<?> node);

View file

@ -4,12 +4,12 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.util.List;
class Remove extends MaskingPolicy {
Remove(List<String> fieldNames) {
super(fieldNames);
Remove(FieldsSelector fieldsSelector) {
super(fieldsSelector);
}
@Override

View file

@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.google.common.base.Preconditions;
import java.util.List;
class Replace extends MaskingPolicy {
@ -14,8 +13,8 @@ class Replace extends MaskingPolicy {
private final String replacement;
Replace(List<String> fieldNames, String replacementString) {
super(fieldNames);
Replace(FieldsSelector fieldsSelector, String replacementString) {
super(fieldsSelector);
this.replacement = Preconditions.checkNotNull(replacementString);
}

View file

@ -12,6 +12,7 @@ import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.model.rbac.Permission;
import com.provectus.kafka.ui.model.rbac.Resource;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.Subject;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
import com.provectus.kafka.ui.model.rbac.permission.SchemaAction;
@ -19,11 +20,11 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
@ -34,6 +35,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.core.env.Environment;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
import org.springframework.security.core.context.SecurityContext;
@ -50,10 +52,11 @@ public class AccessControlService {
@Nullable
private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
private final RoleBasedAccessControlProperties properties;
private final Environment environment;
private boolean rbacEnabled = false;
private Set<ProviderAuthorityExtractor> extractors = Collections.emptySet();
private final RoleBasedAccessControlProperties properties;
private Set<ProviderAuthorityExtractor> oauthExtractors = Collections.emptySet();
@PostConstruct
public void init() {
@ -63,21 +66,26 @@ public class AccessControlService {
}
rbacEnabled = true;
this.extractors = properties.getRoles()
this.oauthExtractors = properties.getRoles()
.stream()
.map(role -> role.getSubjects()
.stream()
.map(provider -> switch (provider.getProvider()) {
.map(Subject::getProvider)
.distinct()
.map(provider -> switch (provider) {
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
case LDAP, LDAP_AD -> new LdapAuthorityExtractor();
}).collect(Collectors.toSet()))
default -> null;
})
.filter(Objects::nonNull)
.collect(Collectors.toSet()))
.flatMap(Set::stream)
.collect(Collectors.toSet());
if ((clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())
&& !properties.getRoles().isEmpty()) {
if (!properties.getRoles().isEmpty()
&& "oauth2".equalsIgnoreCase(environment.getProperty("auth.type"))
&& (clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())) {
log.error("Roles are configured but no authentication methods are present. Authentication might fail.");
}
}
@ -354,8 +362,8 @@ public class AccessControlService {
return isAccessible(Resource.KSQL, null, user, context, requiredActions);
}
public Set<ProviderAuthorityExtractor> getExtractors() {
return extractors;
public Set<ProviderAuthorityExtractor> getOauthExtractors() {
return oauthExtractors;
}
public List<Role> getRoles() {

View file

@ -1,23 +0,0 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import reactor.core.publisher.Mono;
@Slf4j
public class LdapAuthorityExtractor implements ProviderAuthorityExtractor {
@Override
public boolean isApplicable(String provider) {
return false; // TODO #2752
}
@Override
public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
return Mono.just(Collections.emptySet()); // TODO #2752
}
}

View file

@ -0,0 +1,70 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.provectus.kafka.ui.config.auth.LdapProperties;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
import org.springframework.util.Assert;
@Slf4j
public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
private final AccessControlService acs;
private final LdapProperties props;
private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
String role = record.get(getGroupRoleAttribute()).get(0);
return new SimpleGrantedAuthority(role);
};
public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
super(context.getBean(BaseLdapPathContextSource.class), null);
this.acs = context.getBean(AccessControlService.class);
this.props = context.getBean(LdapProperties.class);
}
@Override
public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
return acs.getRoles()
.stream()
.map(Role::getSubjects)
.flatMap(List::stream)
.filter(s -> s.getProvider().equals(Provider.LDAP))
.filter(s -> s.getType().equals("group"))
.flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
.collect(Collectors.toSet());
}
private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
Assert.notNull(groupSearchBase, "groupSearchBase is empty");
log.trace(
"Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
var ldapTemplate = getLdapTemplate();
ldapTemplate.setIgnoreNameNotFoundException(true);
Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
new String[] {props.getGroupRoleAttribute()});
return userRoles.stream()
.map(authorityMapper)
.peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
.collect(Collectors.toSet());
}
}

View file

@ -90,6 +90,7 @@ public class DynamicConfigOperations {
}
public PropertiesStructure getCurrentProperties() {
checkIfDynamicConfigEnabled();
return PropertiesStructure.builder()
.kafka(getNullableBean(ClustersProperties.class))
.rbac(getNullableBean(RoleBasedAccessControlProperties.class))
@ -112,11 +113,7 @@ public class DynamicConfigOperations {
}
public void persist(PropertiesStructure properties) {
if (!dynamicConfigEnabled()) {
throw new ValidationException(
"Dynamic config change is not allowed. "
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
}
checkIfDynamicConfigEnabled();
properties.initAndValidate();
String yaml = serializeToYaml(properties);
@ -124,8 +121,9 @@ public class DynamicConfigOperations {
}
public Mono<Path> uploadConfigRelatedFile(FilePart file) {
String targetDirStr = (String) ctx.getEnvironment().getSystemEnvironment()
.getOrDefault(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
checkIfDynamicConfigEnabled();
String targetDirStr = ctx.getEnvironment()
.getProperty(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
Path targetDir = Path.of(targetDirStr);
if (!Files.exists(targetDir)) {
@ -149,6 +147,14 @@ public class DynamicConfigOperations {
.onErrorMap(th -> new FileUploadException(targetFilePath, th));
}
private void checkIfDynamicConfigEnabled() {
if (!dynamicConfigEnabled()) {
throw new ValidationException(
"Dynamic config change is not allowed. "
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
}
}
@SneakyThrows
private void writeYamlToFile(String yaml, Path path) {
if (Files.isDirectory(path)) {

View file

@ -2,6 +2,7 @@ package com.provectus.kafka.ui;
import com.provectus.kafka.ui.container.KafkaConnectContainer;
import com.provectus.kafka.ui.container.SchemaRegistryContainer;
import java.nio.file.Path;
import java.util.List;
import java.util.Properties;
import org.apache.kafka.clients.admin.AdminClient;
@ -9,6 +10,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.function.ThrowingConsumer;
import org.junit.jupiter.api.io.TempDir;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
import org.springframework.boot.test.context.SpringBootTest;
@ -47,6 +49,9 @@ public abstract class AbstractIntegrationTest {
.dependsOn(kafka)
.dependsOn(schemaRegistry);
@TempDir
public static Path tmpDir;
static {
kafka.start();
schemaRegistry.start();
@ -76,6 +81,9 @@ public abstract class AbstractIntegrationTest {
System.setProperty("kafka.clusters.1.schemaRegistry", schemaRegistry.getUrl());
System.setProperty("kafka.clusters.1.kafkaConnect.0.name", "kafka-connect");
System.setProperty("kafka.clusters.1.kafkaConnect.0.address", kafkaConnect.getTarget());
System.setProperty("dynamic.config.enabled", "true");
System.setProperty("config.related.uploads.dir", tmpDir.toString());
}
}

View file

@ -0,0 +1,49 @@
package com.provectus.kafka.ui.controller;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
import java.io.IOException;
import java.nio.file.Path;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import org.springframework.http.HttpEntity;
import org.springframework.http.client.MultipartBodyBuilder;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.util.MultiValueMap;
class ApplicationConfigControllerTest extends AbstractIntegrationTest {
@Autowired
private WebTestClient webTestClient;
@Test
public void testUpload() throws IOException {
var fileToUpload = new ClassPathResource("/fileForUploadTest.txt", this.getClass());
UploadedFileInfoDTO result = webTestClient
.post()
.uri("/api/config/relatedfiles")
.bodyValue(generateBody(fileToUpload))
.exchange()
.expectStatus()
.isOk()
.expectBody(UploadedFileInfoDTO.class)
.returnResult()
.getResponseBody();
assertThat(result).isNotNull();
assertThat(result.getLocation()).isNotNull();
assertThat(Path.of(result.getLocation()))
.hasSameBinaryContentAs(fileToUpload.getFile().toPath());
}
private MultiValueMap<String, HttpEntity<?>> generateBody(ClassPathResource resource) {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("file", resource);
return builder.build();
}
}

View file

@ -0,0 +1,53 @@
package com.provectus.kafka.ui.service.masking.policies;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.List;
import org.junit.jupiter.api.Test;
class FieldsSelectorTest {
@Test
void selectsFieldsDueToProvidedPattern() {
var properties = new ClustersProperties.Masking();
properties.setFieldsNamePattern("f1|f2");
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("f1")).isTrue();
assertThat(selector.shouldBeMasked("f2")).isTrue();
assertThat(selector.shouldBeMasked("doesNotMatchPattern")).isFalse();
}
@Test
void selectsFieldsDueToProvidedFieldNames() {
var properties = new ClustersProperties.Masking();
properties.setFields(List.of("f1", "f2"));
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("f1")).isTrue();
assertThat(selector.shouldBeMasked("f2")).isTrue();
assertThat(selector.shouldBeMasked("notInAList")).isFalse();
}
@Test
void selectAllFieldsIfNoPatternAndNoNamesProvided() {
var properties = new ClustersProperties.Masking();
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("anyPropertyName")).isTrue();
}
@Test
void throwsExceptionIfBothFieldListAndPatternProvided() {
var properties = new ClustersProperties.Masking();
properties.setFieldsNamePattern("f1|f2");
properties.setFields(List.of("f3", "f4"));
assertThatThrownBy(() -> FieldsSelector.create(properties))
.isInstanceOf(ValidationException.class);
}
}

View file

@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
class MaskTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final List<String> PATTERN = List.of("X", "x", "n", "-");
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
Mask policy = new Mask(fields, PATTERN);
void testApplyToJsonContainer(FieldsSelector selector, ContainerNode<?> original, ContainerNode<?> expected) {
Mask policy = new Mask(selector, PATTERN);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"nnn\", \"name\": { \"first\": \"Xxxxx\", \"surname\": \"Xxxxnnn-\"}}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"nnn\", \"f2\": 234}, { \"name\": \"n-n\", \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Xxxxnnn-\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"Xxxxx\", \"name\": \"Xxxxnnn-\"}}")
)
@ -57,7 +57,7 @@ class MaskTest {
"null, xxxx"
})
void testApplyToString(String original, String expected) {
Mask policy = new Mask(List.of(), PATTERN);
Mask policy = new Mask(fieldName -> true, PATTERN);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}

View file

@ -15,39 +15,39 @@ import org.junit.jupiter.params.provider.MethodSource;
class RemoveTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Remove(fields);
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Remove(fieldsSelector);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"f2\": 234}, { \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("[{ \"f1\": 123}, { \"f2\": \"1.2\"} ]"),
parse("[{}, {}]")
)
@ -66,7 +66,7 @@ class RemoveTest {
"null, null"
})
void testApplyToString(String original, String expected) {
var policy = new Remove(List.of());
var policy = new Remove(fieldName -> true);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
}
}

View file

@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
class ReplaceTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final String REPLACEMENT_STRING = "***";
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Replace(fields, REPLACEMENT_STRING);
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Replace(fieldsSelector, REPLACEMENT_STRING);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"***\", \"name\": { \"first\": \"***\", \"surname\": \"***\"}}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"***\", \"f2\": 234}, { \"name\": \"***\", \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"***\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{ \"outer\": { \"f1\": \"***\", \"f2\": \"***\", \"inner\" : {\"if1\": \"***\"}}}}")
)
@ -62,7 +62,7 @@ class ReplaceTest {
"null, ***"
})
void testApplyToString(String original, String expected) {
var policy = new Replace(List.of(), REPLACEMENT_STRING);
var policy = new Replace(fieldName -> true, REPLACEMENT_STRING);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
}
}

View file

@ -0,0 +1 @@
some content goes here

View file

@ -101,9 +101,6 @@
<useSpringBoot3>true</useSpringBoot3>
<dateLibrary>java8</dateLibrary>
</configOptions>
<typeMappings>
<mapping>filepart=org.springframework.http.codec.multipart.FilePart</mapping>
</typeMappings>
</configuration>
</execution>
<execution>

View file

@ -1938,7 +1938,7 @@ paths:
properties:
file:
type: string
format: filepart
format: binary
responses:
200:
description: OK
@ -3809,7 +3809,9 @@ components:
type: array
items:
type: string
pattern:
fieldsNamePattern:
type: string
maskingCharsReplacement:
type: array
items:
type: string

View file

@ -29,72 +29,65 @@ public class SmokeBacklog extends BaseManualTest {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(278)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(331)
@Test
public void testCaseC() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(331)
@Test
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(332)
@Test
public void testCaseE() {
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(335)
@Test
public void testCaseF() {
public void testCaseE() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(336)
@Test
public void testCaseG() {
public void testCaseF() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(343)
@Test
public void testCaseH() {
public void testCaseG() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(344)
@Test
public void testCaseI() {
public void testCaseH() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = SCHEMAS_SUITE_ID)
@QaseId(345)
@Test
public void testCaseJ() {
public void testCaseI() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = SCHEMAS_SUITE_ID)
@QaseId(346)
@Test
public void testCaseK() {
public void testCaseJ() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(347)
@Test
public void testCaseL() {
public void testCaseK() {
}
}

View file

@ -1,6 +1,7 @@
package com.provectus.kafka.ui.smokesuite.ksqldb;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs.STREAMS;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_STREAMS;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@ -80,8 +81,18 @@ public class KsqlDbTest extends BaseTest {
softly.assertAll();
}
@QaseId(86)
@QaseId(278)
@Test(priority = 4)
public void checkShowStreamsRequestExecution() {
navigateToKsqlDbAndExecuteRequest(SHOW_STREAMS.getQuery());
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertTrue(ksqlQueryForm.getItemByName(DEFAULT_STREAM.getName()).isVisible(), "getItemByName()");
softly.assertAll();
}
@QaseId(86)
@Test(priority = 5)
public void clearResultsForExecutedRequest() {
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
SoftAssert softly = new SoftAssert();

View file

@ -1,4 +1,5 @@
import styled from 'styled-components';
import { Button } from 'components/common/Button/Button';
export const DiffWrapper = styled.div`
align-items: stretch;
@ -81,3 +82,6 @@ export const DiffTile = styled.div`
export const DiffVersionsSelect = styled.div`
width: 0.625em;
`;
export const BackButton = styled(Button)`
margin: 10px 9px;
`;

View file

@ -20,6 +20,7 @@ import useAppParams from 'lib/hooks/useAppParams';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as S from './Diff.styled';
import { BackButton } from './Diff.styled';
export interface DiffProps {
versions: SchemaSubject[];
@ -77,6 +78,13 @@ const Diff: React.FC<DiffProps> = ({ versions, areVersionsFetched }) => {
backText="Schema Registry"
backTo={clusterSchemasPath(clusterName)}
/>
<BackButton
buttonType="secondary"
buttonSize="S"
onClick={() => navigate(-1)}
>
Back
</BackButton>
<S.Section>
{areVersionsFetched ? (
<S.DiffBox>

View file

@ -3,6 +3,7 @@ import Diff, { DiffProps } from 'components/Schemas/Diff/Diff';
import { render, WithRoute } from 'lib/testHelpers';
import { screen } from '@testing-library/react';
import { clusterSchemaComparePath } from 'lib/paths';
import userEvent from '@testing-library/user-event';
import { versions } from './fixtures';
@ -142,4 +143,24 @@ describe('Diff', () => {
expect(select).toHaveTextContent(versions[0].version);
});
});
describe('Back button', () => {
beforeEach(() => {
setupComponent({
areVersionsFetched: true,
versions,
});
});
it('back button is appear', () => {
const backButton = screen.getAllByRole('button', { name: 'Back' });
expect(backButton[0]).toBeInTheDocument();
});
it('click on back button', () => {
const backButton = screen.getAllByRole('button', { name: 'Back' });
userEvent.click(backButton[0]);
expect(screen.queryByRole('Back')).not.toBeInTheDocument();
});
});
});

View file

@ -142,6 +142,8 @@ const Message: React.FC<Props> = ({
timestampType={timestampType}
keySize={keySize}
contentSize={valueSize}
keySerde={keySerde}
valueSerde={valueSerde}
/>
)}
</>

View file

@ -3,7 +3,6 @@ import EditorViewer from 'components/common/EditorViewer/EditorViewer';
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
import { SchemaType, TopicMessageTimestampTypeEnum } from 'generated-sources';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import { useSearchParams } from 'react-router-dom';
import * as S from './MessageContent.styled';
@ -17,6 +16,8 @@ export interface MessageContentProps {
timestampType?: TopicMessageTimestampTypeEnum;
keySize?: number;
contentSize?: number;
keySerde?: string;
valueSerde?: string;
}
const MessageContent: React.FC<MessageContentProps> = ({
@ -27,12 +28,10 @@ const MessageContent: React.FC<MessageContentProps> = ({
timestampType,
keySize,
contentSize,
keySerde,
valueSerde,
}) => {
const [activeTab, setActiveTab] = React.useState<Tab>('content');
const [searchParams] = useSearchParams();
const keyFormat = searchParams.get('keySerde') || '';
const valueFormat = searchParams.get('valueSerde') || '';
const activeTabContent = () => {
switch (activeTab) {
case 'content':
@ -110,7 +109,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Key Serde</S.MetadataLabel>
<span>
<S.MetadataValue>{keyFormat}</S.MetadataValue>
<S.MetadataValue>{keySerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={keySize} />
</S.MetadataMeta>
@ -120,7 +119,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Value Serde</S.MetadataLabel>
<span>
<S.MetadataValue>{valueFormat}</S.MetadataValue>
<S.MetadataValue>{valueSerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={contentSize} />
</S.MetadataMeta>

View file

@ -20,6 +20,8 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
headers={{ header: 'test' }}
timestamp={new Date(0)}
timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME}
keySerde="SchemaRegistry"
valueSerde="Avro"
{...props}
/>
</tbody>
@ -27,42 +29,20 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
);
};
const proto =
'syntax = "proto3";\npackage com.provectus;\n\nmessage TestProtoRecord {\n string f1 = 1;\n int32 f2 = 2;\n}\n';
global.TextEncoder = TextEncoder;
const searchParamsContentAVRO = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'AVRO',
limit: '100',
});
const searchParamsContentJSON = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'JSON',
limit: '100',
});
const searchParamsContentPROTOBUF = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'PROTOBUF',
limit: '100',
});
describe('MessageContent screen', () => {
beforeEach(() => {
render(setupWrapper(), {
initialEntries: [`/messages?${searchParamsContentAVRO}`],
});
render(setupWrapper());
});
describe('renders', () => {
it('key format in document', () => {
describe('Checking keySerde and valueSerde', () => {
it('keySerde in document', () => {
expect(screen.getByText('SchemaRegistry')).toBeInTheDocument();
});
it('content format in document', () => {
expect(screen.getByText('AVRO')).toBeInTheDocument();
it('valueSerde in document', () => {
expect(screen.getByText('Avro')).toBeInTheDocument();
});
});
@ -98,42 +78,3 @@ describe('MessageContent screen', () => {
});
});
});
describe('checking content type depend on message type', () => {
it('renders component with message having JSON type', () => {
render(
setupWrapper({
messageContent: '{"data": "test"}',
}),
{ initialEntries: [`/messages?${searchParamsContentJSON}`] }
);
expect(screen.getByText('JSON')).toBeInTheDocument();
});
it('renders component with message having AVRO type', () => {
render(
setupWrapper({
messageContent: '{"data": "test"}',
}),
{ initialEntries: [`/messages?${searchParamsContentAVRO}`] }
);
expect(screen.getByText('AVRO')).toBeInTheDocument();
});
it('renders component with message having PROTOBUF type', () => {
render(
setupWrapper({
messageContent: proto,
}),
{ initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
it('renders component with message having no type which is equal to having PROTOBUF type', () => {
render(
setupWrapper({
messageContent: '',
}),
{ initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
});

View file

@ -8,15 +8,29 @@ export const Wrapper = styled.div`
export const Columns = styled.div`
margin: -0.75rem;
margin-bottom: 0.75rem;
display: flex;
flex-direction: column;
padding: 0.75rem;
gap: 8px;
@media screen and (min-width: 769px) {
display: flex;
}
`;
export const Column = styled.div`
flex-basis: 0;
flex-grow: 1;
flex-shrink: 1;
padding: 0.75rem;
export const Flex = styled.div`
display: flex;
flex-direction: row;
gap: 8px;
@media screen and (max-width: 1200px) {
flex-direction: column;
}
`;
export const FlexItem = styled.div`
width: 18rem;
@media screen and (max-width: 1450px) {
width: 50%;
}
@media screen and (max-width: 1200px) {
width: 100%;
}
`;

View file

@ -4,6 +4,7 @@ import { RouteParamsClusterTopic } from 'lib/paths';
import { Button } from 'components/common/Button/Button';
import Editor from 'components/common/Editor/Editor';
import Select, { SelectOption } from 'components/common/Select/Select';
import Switch from 'components/common/Switch/Switch';
import useAppParams from 'lib/hooks/useAppParams';
import { showAlert } from 'lib/errorHandling';
import { useSendMessage, useTopicDetails } from 'lib/hooks/api/topics';
@ -26,9 +27,12 @@ interface FormType {
partition: number;
keySerde: string;
valueSerde: string;
keepContents: boolean;
}
const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
closeSidebar,
}) => {
const { clusterName, topicName } = useAppParams<RouteParamsClusterTopic>();
const { data: topic } = useTopicDetails({ clusterName, topicName });
const { data: serdes = {} } = useSerdes({
@ -47,11 +51,13 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
handleSubmit,
formState: { isSubmitting },
control,
setValue,
} = useForm<FormType>({
mode: 'onChange',
defaultValues: {
...defaultValues,
partition: Number(partitionOptions[0].value),
keepContents: false,
},
});
@ -62,6 +68,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
content,
headers,
partition,
keepContents,
}: FormType) => {
let errors: string[] = [];
@ -110,7 +117,11 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
keySerde,
valueSerde,
});
onSubmit();
if (!keepContents) {
setValue('key', '');
setValue('content', '');
closeSidebar();
}
} catch (e) {
// do nothing
}
@ -120,7 +131,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
<S.Wrapper>
<form onSubmit={handleSubmit(submit)}>
<S.Columns>
<S.Column>
<S.FlexItem>
<InputLabel>Partition</InputLabel>
<Controller
control={control}
@ -137,47 +148,58 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
<S.Column>
<InputLabel>Key Serde</InputLabel>
</S.FlexItem>
<S.Flex>
<S.FlexItem>
<InputLabel>Key Serde</InputLabel>
<Controller
control={control}
name="keySerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectKeySerdeOptions"
aria-labelledby="selectKeySerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.key || [])}
value={value}
/>
)}
/>
</S.FlexItem>
<S.FlexItem>
<InputLabel>Value Serde</InputLabel>
<Controller
control={control}
name="valueSerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectValueSerdeOptions"
aria-labelledby="selectValueSerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.value || [])}
value={value}
/>
)}
/>
</S.FlexItem>
</S.Flex>
<div>
<Controller
control={control}
name="keySerde"
name="keepContents"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectKeySerdeOptions"
aria-labelledby="selectKeySerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.key || [])}
value={value}
/>
<Switch name={name} onChange={onChange} checked={value} />
)}
/>
</S.Column>
<S.Column>
<InputLabel>Value Serde</InputLabel>
<Controller
control={control}
name="valueSerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectValueSerdeOptions"
aria-labelledby="selectValueSerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.value || [])}
value={value}
/>
)}
/>
</S.Column>
<InputLabel>Keep contents</InputLabel>
</div>
</S.Columns>
<S.Columns>
<S.Column>
<div>
<InputLabel>Key</InputLabel>
<Controller
control={control}
@ -191,8 +213,8 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
<S.Column>
</div>
<div>
<InputLabel>Value</InputLabel>
<Controller
control={control}
@ -206,10 +228,10 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
</div>
</S.Columns>
<S.Columns>
<S.Column>
<div>
<InputLabel>Headers</InputLabel>
<Controller
control={control}
@ -224,7 +246,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
</div>
</S.Columns>
<Button
buttonSize="M"

View file

@ -49,7 +49,7 @@ const renderComponent = async () => {
const path = clusterTopicPath(clusterName, topicName);
await render(
<WithRoute path={clusterTopicPath()}>
<SendMessage onSubmit={mockOnSubmit} />
<SendMessage closeSidebar={mockOnSubmit} />
</WithRoute>,
{ initialEntries: [path] }
);

View file

@ -236,7 +236,7 @@ const Topic: React.FC = () => {
title="Produce Message"
>
<Suspense fallback={<PageLoader />}>
<SendMessage onSubmit={closeSidebar} />
<SendMessage closeSidebar={closeSidebar} />
</Suspense>
</SlidingSidebar>
</>

View file

@ -1,52 +1,38 @@
import React from 'react';
import WarningIcon from 'components/common/Icons/WarningIcon';
import { gitCommitPath } from 'lib/paths';
import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
import { BUILD_VERSION_PATTERN } from 'lib/constants';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import * as S from './Version.styled';
import compareVersions from './compareVersions';
const Version: React.FC = () => {
const { data: actuatorInfo = {} } = useActuatorInfo();
const { data: latestVersionInfo = {} } = useLatestVersion();
const tag = actuatorInfo?.build?.version;
const commit = actuatorInfo?.git?.commit.id;
const { tag_name: latestTag } = latestVersionInfo;
const outdated = compareVersions(tag, latestTag);
const currentVersion = tag?.match(BUILD_VERSION_PATTERN)
? tag
: formatTimestamp(actuatorInfo?.build?.time);
if (!tag) return null;
const { buildTime, commitId, isLatestRelease } = latestVersionInfo.build;
const { versionTag } = latestVersionInfo?.latestRelease || '';
return (
<S.Wrapper>
{!!outdated && (
{!isLatestRelease && (
<S.OutdatedWarning
title={`Your app version is outdated. Current latest version is ${latestTag}`}
title={`Your app version is outdated. Current latest version is ${versionTag}`}
>
<WarningIcon />
</S.OutdatedWarning>
)}
{commit && (
{commitId && (
<div>
<S.CurrentCommitLink
title="Current commit"
target="__blank"
href={gitCommitPath(commit)}
href={gitCommitPath(commitId)}
>
{commit}
{commitId}
</S.CurrentCommitLink>
</div>
)}
<S.CurrentVersion>{currentVersion}</S.CurrentVersion>
<S.CurrentVersion>{formatTimestamp(buildTime)}</S.CurrentVersion>
</S.Wrapper>
);
};

View file

@ -2,87 +2,40 @@ import React from 'react';
import { screen } from '@testing-library/dom';
import Version from 'components/Version/Version';
import { render } from 'lib/testHelpers';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
import { latestVersionPayload } from 'lib/fixtures/latestVersion';
import {
deprecatedVersionPayload,
latestVersionPayload,
} from 'lib/fixtures/latestVersion';
jest.mock('lib/hooks/api/actuatorInfo', () => ({
useActuatorInfo: jest.fn(),
}));
jest.mock('lib/hooks/api/latestVersion', () => ({
useLatestVersion: jest.fn(),
}));
describe('Version Component', () => {
const versionTag = 'v0.5.0';
const snapshotTag = 'test-SNAPSHOT';
const commitTag = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813';
const commitId = '96a577a';
const actuatorVersionPayload = actuatorInfoPayload(versionTag);
const formattedTimestamp = formatTimestamp(actuatorVersionPayload.build.time);
describe('render latest version', () => {
beforeEach(() => {
(useLatestVersion as jest.Mock).mockImplementation(() => ({
data: latestVersionPayload,
}));
});
it('renders latest release version as current version', async () => {
render(<Version />);
expect(screen.getByText(commitId)).toBeInTheDocument();
});
beforeEach(() => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorVersionPayload,
}));
it('should not show warning icon if it is last release', async () => {
render(<Version />);
expect(screen.queryByRole('img')).not.toBeInTheDocument();
});
});
it('show warning icon if it is not last release', async () => {
(useLatestVersion as jest.Mock).mockImplementation(() => ({
data: latestVersionPayload,
data: deprecatedVersionPayload,
}));
});
describe('tag does not exist', () => {
it('does not render component', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: null,
}));
const { container } = render(<Version />);
expect(container.firstChild).toBeEmptyDOMElement();
});
});
describe('renders current version', () => {
it('renders release build version as current version', async () => {
render(<Version />);
expect(screen.getByText(versionTag)).toBeInTheDocument();
});
it('renders formatted timestamp as current version when version is commit', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload(commitTag),
}));
render(<Version />);
expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
});
it('renders formatted timestamp as current version when version contains -SNAPSHOT', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload(snapshotTag),
}));
render(<Version />);
expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
});
});
describe('outdated build version', () => {
it('renders warning message', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload('v0.3.0'),
}));
render(<Version />);
expect(
screen.getByTitle(
`Your app version is outdated. Current latest version is ${latestVersionPayload.tag_name}`
)
).toBeInTheDocument();
});
});
describe('current commit id with link', () => {
it('renders', async () => {
render(<Version />);
expect(
screen.getByText(actuatorVersionPayload.git.commit.id)
).toBeInTheDocument();
});
render(<Version />);
expect(screen.getByRole('img')).toBeInTheDocument();
});
});

View file

@ -13,6 +13,7 @@ const WarningIcon: React.FC = () => {
return (
<WarningIconContainer>
<svg
role="img"
width="14"
height="13"
viewBox="0 0 14 13"

View file

@ -6,7 +6,7 @@ export const Wrapper = styled.div<{ $open?: boolean }>(
position: fixed;
top: ${theme.layout.navBarHeight};
bottom: 0;
width: 60vw;
width: 37vw;
right: calc(${$open ? '0px' : theme.layout.rightSidebarWidth} * -1);
box-shadow: -1px 0px 10px 0px rgba(0, 0, 0, 0.2);
transition: right 0.3s linear;

View file

@ -1,12 +0,0 @@
export const actuatorInfoPayload = (
version = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813'
) => ({
git: { commit: { id: 'befd3b3' } },
build: {
artifact: 'kafka-ui-api',
name: 'kafka-ui-api',
time: '2022-09-15T09:52:21.753Z',
version,
group: 'com.provectus',
},
});

View file

@ -1,3 +1,16 @@
export const latestVersionPayload = {
tag_name: 'v0.4.0',
export const deprecatedVersionPayload = {
build: {
buildTime: '2023-04-14T09:47:35.463Z',
commitId: '96a577a',
isLatestRelease: false,
version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
},
};
export const latestVersionPayload = {
build: {
buildTime: '2023-04-14T09:47:35.463Z',
commitId: '96a577a',
isLatestRelease: true,
version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
},
};

View file

@ -1,17 +0,0 @@
import fetchMock from 'fetch-mock';
import * as hooks from 'lib/hooks/api/actuatorInfo';
import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
const actuatorInfoPath = '/actuator/info';
describe('Actuator info hooks', () => {
beforeEach(() => fetchMock.restore());
describe('useActuatorInfo', () => {
it('returns the correct data', async () => {
const mock = fetchMock.getOnce(actuatorInfoPath, actuatorInfoPayload());
const { result } = renderQueryHook(() => hooks.useActuatorInfo());
await expectQueryWorks(mock, result);
});
});
});

View file

@ -1,18 +1,16 @@
import fetchMock from 'fetch-mock';
import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
import * as hooks from 'lib/hooks/api/latestVersion';
import { GIT_REPO_LATEST_RELEASE_LINK } from 'lib/constants';
import { latestVersionPayload } from 'lib/fixtures/latestVersion';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
const latestVersionPath = '/api/info';
describe('Latest version hooks', () => {
beforeEach(() => fetchMock.restore());
describe('useLatestVersion', () => {
it('returns the correct data', async () => {
const mock = fetchMock.getOnce(
GIT_REPO_LATEST_RELEASE_LINK,
latestVersionPayload
);
const { result } = renderQueryHook(() => hooks.useLatestVersion());
const mock = fetchMock.getOnce(latestVersionPath, latestVersionPayload);
const { result } = renderQueryHook(() => useLatestVersion());
await expectQueryWorks(mock, result);
});
});

View file

@ -1,19 +0,0 @@
import { useQuery } from '@tanstack/react-query';
import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
const fetchActuatorInfo = async () => {
const data = await fetch(
`${BASE_PARAMS.basePath}/actuator/info`,
BASE_PARAMS
).then((res) => res.json());
return data;
};
export function useActuatorInfo() {
return useQuery(
['actuatorInfo'],
fetchActuatorInfo,
QUERY_REFETCH_OFF_OPTIONS
);
}

View file

@ -1,21 +1,19 @@
import { useQuery } from '@tanstack/react-query';
import {
QUERY_REFETCH_OFF_OPTIONS,
GIT_REPO_LATEST_RELEASE_LINK,
} from 'lib/constants';
import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
const fetchLatestVersion = async () => {
const data = await fetch(GIT_REPO_LATEST_RELEASE_LINK).then((res) =>
res.json()
);
const fetchLatestVersionInfo = async () => {
const data = await fetch(
`${BASE_PARAMS.basePath}/api/info`,
BASE_PARAMS
).then((res) => res.json());
return data;
};
export function useLatestVersion() {
return useQuery(
['latestVersion'],
fetchLatestVersion,
['versionInfo'],
fetchLatestVersionInfo,
QUERY_REFETCH_OFF_OPTIONS
);
}