Merge branch 'master' into issues/checkShowStreamsRequestExecution2

This commit is contained in:
VladSenyuta 2023-05-02 09:27:16 +03:00
commit db117f4e42
60 changed files with 969 additions and 631 deletions

92
.github/ISSUE_TEMPLATE/bug.yml vendored Normal file
View file

@ -0,0 +1,92 @@
name: "\U0001F41E Bug report"
description: File a bug report
labels: ["status/triage", "type/bug"]
assignees: []
body:
- type: markdown
attributes:
value: |
Hi, thanks for raising the issue(-s), all contributions really matter!
Please, note that we'll close the issue without further explanation if you don't follow
this template and don't provide the information requested within this template.
- type: checkboxes
id: terms
attributes:
label: Issue submitter TODO list
description: By you checking these checkboxes we can be sure you've done the essential things.
options:
- label: I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
required: true
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
required: true
- label: I've tried running `master`-labeled docker image and the issue still persists there
required: true
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
required: true
- type: textarea
attributes:
label: Describe the bug (actual behavior)
description: A clear and concise description of what the bug is. Use a list, if there is more than one problem
validations:
required: true
- type: textarea
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen
validations:
required: false
- type: textarea
attributes:
label: Your installation details
description: |
How do you run the app? Please provide as much info as possible:
1. App version (commit hash in the top left corner of the UI)
2. Helm chart version, if you use one
3. Your application config. Please remove the sensitive info like passwords or API keys.
4. Any IAAC configs
validations:
required: true
- type: textarea
attributes:
label: Steps to reproduce
description: |
Please write down the order of the actions required to reproduce the issue.
For the advanced setups/complicated issue, we might need you to provide
a minimal [reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
validations:
required: true
- type: textarea
attributes:
label: Screenshots
description: |
If applicable, add screenshots to help explain your problem
validations:
required: false
- type: textarea
attributes:
label: Logs
description: |
If applicable, *upload* screenshots to help explain your problem
validations:
required: false
- type: textarea
attributes:
label: Additional context
description: |
Add any other context about the problem here. E.G.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successful or the same issue occurred? Please provide steps as well.
2. Related issues (if there are any).
3. Logs (if available)
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
validations:
required: false

View file

@ -1,64 +0,0 @@
---
name: "\U0001F41E Bug report"
about: Create a bug report
title: ''
labels: status/triage, type/bug
assignees: ''
---
<!--
We will close the issue without further explanation if you don't follow this template and don't provide the information requested within this template.
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
<!--
Please follow the naming conventions for bugs:
<Feature/Area/Scope> : <Compact, but specific problem summary>
Avoid generic titles, like “Topics: incorrect layout of message sorting drop-down list”. Better use something like: “Topics: Message sorting drop-down list overlaps the "Submit" button”.
-->
**Describe the bug** (Actual behavior)
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
**Expected behavior**
<!--(A clear and concise description of what you expected to happen.)-->
**Set up**
<!--
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
-->
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->
**Additional context**
<!--
Add any other context about the problem here. E.g.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successfull or same issue occured? Please provide steps as well.
2. Related issues (if there are any).
3. Logs (if available)
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
-->

66
.github/ISSUE_TEMPLATE/feature.yml vendored Normal file
View file

@ -0,0 +1,66 @@
name: "\U0001F680 Feature request"
description: Propose a new feature
labels: ["status/triage", "type/feature"]
assignees: []
body:
- type: markdown
attributes:
value: |
Hi, thanks for raising the issue(-s), all contributions really matter!
Please, note that we'll close the issue without further explanation if you don't follow
this template and don't provide the information requested within this template.
- type: checkboxes
id: terms
attributes:
label: Issue submitter TODO list
description: By you checking these checkboxes we can be sure you've done the essential things.
options:
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
required: true
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there
required: true
- type: textarea
attributes:
label: Is your proposal related to a problem?
description: |
Provide a clear and concise description of what the problem is.
For example, "I'm always frustrated when..."
validations:
required: false
- type: textarea
attributes:
label: Describe the feature you're interested in
description: |
Provide a clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
attributes:
label: Describe alternatives you've considered
description: |
Let us know about other solutions you've tried or researched.
validations:
required: false
- type: input
attributes:
label: Version you're running
description: |
Please provide the app version you're currently running:
1. App version (commit hash in the top left corner of the UI)
validations:
required: true
- type: textarea
attributes:
label: Additional context
description: |
Is there anything else you can add about the proposal?
You might want to link to related issues here, if you haven't already.
validations:
required: false

View file

@ -1,46 +0,0 @@
---
name: "\U0001F680 Feature request"
about: Propose a new feature
title: ''
labels: status/triage, type/feature
assignees: ''
---
<!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
### Which version of the app are you running?
<!-- Please provide docker image version or check commit hash in the top left corner in UI) -->
### Is your proposal related to a problem?
<!--
Provide a clear and concise description of what the problem is.
For example, "I'm always frustrated when..."
-->
### Describe the solution you'd like
<!--
Provide a clear and concise description of what you want to happen.
-->
### Describe alternatives you've considered
<!--
Let us know about other solutions you've tried or researched.
-->
### Additional context
<!--
Is there anything else you can add about the proposal?
You might want to link to related issues here, if you haven't already.
-->

92
.github/ISSUE_TEMPLATE/helm.yml vendored Normal file
View file

@ -0,0 +1,92 @@
name: "⎈ K8s/Helm problem report"
description: "Report a problem with k8s/helm charts/etc"
labels: ["status/triage", "scope/k8s"]
assignees: []
body:
- type: markdown
attributes:
value: |
Hi, thanks for raising the issue(-s), all contributions really matter!
Please, note that we'll close the issue without further explanation if you don't follow
this template and don't provide the information requested within this template.
- type: checkboxes
id: terms
attributes:
label: Issue submitter TODO list
description: By you checking these checkboxes we can be sure you've done the essential things.
options:
- label: I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
required: true
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
required: true
- label: I've tried running `master`-labeled docker image and the issue still persists there
required: true
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
required: true
- type: textarea
attributes:
label: Describe the bug (actual behavior)
description: A clear and concise description of what the bug is. Use a list, if there is more than one problem
validations:
required: true
- type: textarea
attributes:
label: Expected behavior
description: A clear and concise description of what you expected to happen
validations:
required: false
- type: textarea
attributes:
label: Your installation details
description: |
How do you run the app? Please provide as much info as possible:
1. App version (commit hash in the top left corner of the UI)
2. Helm chart version
3. Your application config. Please remove the sensitive info like passwords or API keys.
4. Any IAAC configs
validations:
required: true
- type: textarea
attributes:
label: Steps to reproduce
description: |
Please write down the order of the actions required to reproduce the issue.
For the advanced setups/complicated issue, we might need you to provide
a minimal [reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
validations:
required: true
- type: textarea
attributes:
label: Screenshots
description: |
If applicable, add screenshots to help explain your problem
validations:
required: false
- type: textarea
attributes:
label: Logs
description: |
If applicable, *upload* screenshots to help explain your problem
validations:
required: false
- type: textarea
attributes:
label: Additional context
description: |
Add any other context about the problem here. E.G.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successful or the same issue occurred? Please provide steps as well.
2. Related issues (if there are any).
3. Logs (if available)
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
validations:
required: false

View file

@ -1,52 +0,0 @@
---
name: "⎈ K8s/Helm problem report"
about: Report a problem with k8s/helm charts/etc
title: ''
labels: scope/k8s, status/triage
assignees: azatsafin
---
<!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
**Steps to Reproduce**
Steps to reproduce the behavior:
1.
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->
**Additional context**
<!--
(Add any other context about the problem here)
-->

View file

@ -8,8 +8,6 @@ updates:
timezone: Europe/Moscow
reviewers:
- "Haarolean"
assignees:
- "Haarolean"
labels:
- "scope/backend"
- "type/dependencies"
@ -99,8 +97,6 @@ updates:
timezone: Europe/Moscow
reviewers:
- "Haarolean"
assignees:
- "Haarolean"
labels:
- "scope/infrastructure"
- "type/dependencies"

View file

@ -21,6 +21,12 @@
</properties>
<dependencies>
<dependency>
<!--TODO: remove, when spring-boot fixed dependency to 6.0.8+ (6.0.7 has CVE) -->
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>6.0.8</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
@ -109,6 +115,12 @@
<groupId>io.projectreactor.addons</groupId>
<artifactId>reactor-extra</artifactId>
</dependency>
<!-- https://github.com/provectus/kafka-ui/pull/3693 -->
<dependency>
<groupId>org.json</groupId>
<artifactId>json</artifactId>
<version>${org.json.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>

View file

@ -131,8 +131,9 @@ public class ClustersProperties {
@Data
public static class Masking {
Type type;
List<String> fields; //if null or empty list - policy will be applied to all fields
List<String> pattern; //used when type=MASK
List<String> fields;
String fieldsNamePattern;
List<String> maskingCharsReplacement; //used when type=MASK
String replacement; //used when type=REPLACE
String topicKeysPattern;
String topicValuesPattern;

View file

@ -0,0 +1,26 @@
package com.provectus.kafka.ui.config.auth;
import lombok.Data;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties("spring.ldap")
@Data
public class LdapProperties {
private String urls;
private String base;
private String adminUser;
private String adminPassword;
private String userFilterSearchBase;
private String userFilterSearchFilter;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aсtiveDirectory.domain:@null}")
private String activeDirectoryDomain;
@Value("${oauth2.ldap.groupRoleAttribute:cn}")
private String groupRoleAttribute;
}

View file

@ -1,13 +1,23 @@
package com.provectus.kafka.ui.config.auth;
import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.ldap.core.support.LdapContextSource;
import org.springframework.security.authentication.AuthenticationManager;
@ -16,70 +26,71 @@ import org.springframework.security.authentication.ReactiveAuthenticationManager
import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.ServerHttpSecurity;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.BindAuthenticator;
import org.springframework.security.ldap.authentication.LdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
import org.springframework.security.web.server.SecurityWebFilterChain;
@Configuration
@EnableWebFluxSecurity
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
@Import(LdapAutoConfiguration.class)
@EnableConfigurationProperties(LdapProperties.class)
@RequiredArgsConstructor
@Slf4j
public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
public class LdapSecurityConfig {
@Value("${spring.ldap.urls}")
private String ldapUrls;
@Value("${spring.ldap.dn.pattern:#{null}}")
private String ldapUserDnPattern;
@Value("${spring.ldap.adminUser:#{null}}")
private String adminUser;
@Value("${spring.ldap.adminPassword:#{null}}")
private String adminPassword;
@Value("${spring.ldap.userFilter.searchBase:#{null}}")
private String userFilterSearchBase;
@Value("${spring.ldap.userFilter.searchFilter:#{null}}")
private String userFilterSearchFilter;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aсtiveDirectory.domain:#{null}}")
private String activeDirectoryDomain;
private final LdapProperties props;
@Bean
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource) {
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
ApplicationContext context,
@Nullable AccessControlService acs) {
var rbacEnabled = acs != null && acs.isRbacEnabled();
BindAuthenticator ba = new BindAuthenticator(contextSource);
if (ldapUserDnPattern != null) {
ba.setUserDnPatterns(new String[] {ldapUserDnPattern});
if (props.getBase() != null) {
ba.setUserDnPatterns(new String[] {props.getBase()});
}
if (userFilterSearchFilter != null) {
if (props.getUserFilterSearchFilter() != null) {
LdapUserSearch userSearch =
new FilterBasedLdapUserSearch(userFilterSearchBase, userFilterSearchFilter, contextSource);
new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(),
contextSource);
ba.setUserSearch(userSearch);
}
AbstractLdapAuthenticationProvider authenticationProvider;
if (!isActiveDirectory) {
authenticationProvider = new LdapAuthenticationProvider(ba);
if (!props.isActiveDirectory()) {
authenticationProvider = rbacEnabled
? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
: new LdapAuthenticationProvider(ba);
} else {
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(activeDirectoryDomain, ldapUrls);
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
props.getUrls()); // TODO Issue #3741
authenticationProvider.setUseAuthenticationRequestCredentials(true);
}
if (rbacEnabled) {
authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper());
}
AuthenticationManager am = new ProviderManager(List.of(authenticationProvider));
return new ReactiveAuthenticationManagerAdapter(am);
}
@Bean
@Primary
public BaseLdapPathContextSource contextSource() {
LdapContextSource ctx = new LdapContextSource();
ctx.setUrl(ldapUrls);
ctx.setUserDn(adminUser);
ctx.setPassword(adminPassword);
ctx.setUrl(props.getUrls());
ctx.setUserDn(props.getAdminUser());
ctx.setPassword(props.getAdminPassword());
ctx.afterPropertiesSet();
return ctx;
}
@ -87,20 +98,35 @@ public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
@Bean
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
log.info("Configuring LDAP authentication.");
if (isActiveDirectory) {
if (props.isActiveDirectory()) {
log.info("Active Directory support for LDAP has been enabled.");
}
http
return http
.authorizeExchange()
.pathMatchers(AUTH_WHITELIST)
.permitAll()
.anyExchange()
.authenticated()
.and()
.httpBasic();
return http.csrf().disable().build();
.and()
.formLogin()
.and()
.logout()
.and()
.csrf().disable()
.build();
}
private static class UserDetailsMapper extends LdapUserDetailsMapper {
@Override
public UserDetails mapUserFromContext(DirContextOperations ctx, String username,
Collection<? extends GrantedAuthority> authorities) {
UserDetails userDetails = super.mapUserFromContext(ctx, username, authorities);
return new RbacLdapUser(userDetails);
}
}
}

View file

@ -115,7 +115,7 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
@Nullable
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
final String provider = getProviderByProviderId(providerId);
Optional<ProviderAuthorityExtractor> extractor = acs.getExtractors()
Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
.stream()
.filter(e -> e.isApplicable(provider))
.findFirst();

View file

@ -0,0 +1,60 @@
package com.provectus.kafka.ui.config.auth;
import java.util.Collection;
import java.util.stream.Collectors;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.userdetails.UserDetails;
public class RbacLdapUser implements UserDetails, RbacUser {
private final UserDetails userDetails;
public RbacLdapUser(UserDetails userDetails) {
this.userDetails = userDetails;
}
@Override
public String name() {
return userDetails.getUsername();
}
@Override
public Collection<String> groups() {
return userDetails.getAuthorities().stream().map(GrantedAuthority::getAuthority).collect(Collectors.toSet());
}
@Override
public Collection<? extends GrantedAuthority> getAuthorities() {
return userDetails.getAuthorities();
}
@Override
public String getPassword() {
return userDetails.getPassword();
}
@Override
public String getUsername() {
return userDetails.getUsername();
}
@Override
public boolean isAccountNonExpired() {
return userDetails.isAccountNonExpired();
}
@Override
public boolean isAccountNonLocked() {
return userDetails.isAccountNonLocked();
}
@Override
public boolean isCredentialsNonExpired() {
return userDetails.isCredentialsNonExpired();
}
@Override
public boolean isEnabled() {
return userDetails.isEnabled();
}
}

View file

@ -0,0 +1,21 @@
package com.provectus.kafka.ui.config.auth.condition;
import org.springframework.boot.autoconfigure.condition.AllNestedConditions;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
public class ActiveDirectoryCondition extends AllNestedConditions {
public ActiveDirectoryCondition() {
super(ConfigurationPhase.PARSE_CONFIGURATION);
}
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
public static class OnAuthType {
}
@ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false)
public static class OnActiveDirectory {
}
}

View file

@ -123,11 +123,11 @@ public class ConsumerRecordDeserializer {
}
private static Long getKeySize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.key() != null ? (long) consumerRecord.key().get().length : null;
return consumerRecord.key() != null ? (long) consumerRecord.serializedKeySize() : null;
}
private static Long getValueSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
return consumerRecord.value() != null ? (long) consumerRecord.value().get().length : null;
return consumerRecord.value() != null ? (long) consumerRecord.serializedValueSize() : null;
}
private static int headerSize(Header header) {

View file

@ -122,8 +122,6 @@ public class SerdesInitializer {
registeredSerdes,
Optional.ofNullable(clusterProperties.getDefaultKeySerde())
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default key serde not found"))
.or(() -> Optional.ofNullable(registeredSerdes.get(SchemaRegistrySerde.name())))
.or(() -> Optional.ofNullable(registeredSerdes.get(ProtobufFileSerde.name())))
.orElse(null),
Optional.ofNullable(clusterProperties.getDefaultValueSerde())
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default value serde not found"))

View file

@ -109,6 +109,7 @@ public class KafkaConnectService {
private Stream<String> getStringsForSearch(FullConnectorInfoDTO fullConnectorInfo) {
return Stream.of(
fullConnectorInfo.getName(),
fullConnectorInfo.getConnect(),
fullConnectorInfo.getStatus().getState().getValue(),
fullConnectorInfo.getType().getValue());
}

View file

@ -43,8 +43,7 @@ class TopicAnalysisStats {
Long max;
final UpdateDoublesSketch sizeSketch = DoublesSketch.builder().build();
void apply(byte[] bytes) {
int len = bytes.length;
void apply(int len) {
sum += len;
min = minNullable(min, len);
max = maxNullable(max, len);
@ -98,7 +97,7 @@ class TopicAnalysisStats {
if (rec.key() != null) {
byte[] keyBytes = rec.key().get();
keysSize.apply(keyBytes);
keysSize.apply(rec.serializedKeySize());
uniqKeys.update(keyBytes);
} else {
nullKeys++;
@ -106,7 +105,7 @@ class TopicAnalysisStats {
if (rec.value() != null) {
byte[] valueBytes = rec.value().get();
valuesSize.apply(valueBytes);
valuesSize.apply(rec.serializedValueSize());
uniqValues.update(valueBytes);
} else {
nullValues++;

View file

@ -44,7 +44,7 @@ public class DataMasking {
public static DataMasking create(@Nullable List<ClustersProperties.Masking> config) {
return new DataMasking(
Optional.ofNullable(config).orElse(List.of()).stream().map(property -> {
Preconditions.checkNotNull(property.getType(), "masking type not specifed");
Preconditions.checkNotNull(property.getType(), "masking type not specified");
Preconditions.checkArgument(
StringUtils.isNotEmpty(property.getTopicKeysPattern())
|| StringUtils.isNotEmpty(property.getTopicValuesPattern()),

View file

@ -0,0 +1,28 @@
package com.provectus.kafka.ui.service.masking.policies;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.regex.Pattern;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
interface FieldsSelector {
static FieldsSelector create(ClustersProperties.Masking property) {
if (StringUtils.hasText(property.getFieldsNamePattern()) && !CollectionUtils.isEmpty(property.getFields())) {
throw new ValidationException("You can't provide both fieldNames & fieldsNamePattern for masking");
}
if (StringUtils.hasText(property.getFieldsNamePattern())) {
Pattern pattern = Pattern.compile(property.getFieldsNamePattern());
return f -> pattern.matcher(f).matches();
}
if (!CollectionUtils.isEmpty(property.getFields())) {
return f -> property.getFields().contains(f);
}
//no pattern, no field names - mean all fields should be masked
return fieldName -> true;
}
boolean shouldBeMasked(String fieldName);
}

View file

@ -15,8 +15,8 @@ class Mask extends MaskingPolicy {
private final UnaryOperator<String> masker;
Mask(List<String> fieldNames, List<String> maskingChars) {
super(fieldNames);
Mask(FieldsSelector fieldsSelector, List<String> maskingChars) {
super(fieldsSelector);
this.masker = createMasker(maskingChars);
}
@ -38,22 +38,13 @@ class Mask extends MaskingPolicy {
for (int i = 0; i < input.length(); i++) {
int cp = input.codePointAt(i);
switch (Character.getType(cp)) {
case Character.SPACE_SEPARATOR:
case Character.LINE_SEPARATOR:
case Character.PARAGRAPH_SEPARATOR:
sb.appendCodePoint(cp); // keeping separators as-is
break;
case Character.UPPERCASE_LETTER:
sb.append(maskingChars.get(0));
break;
case Character.LOWERCASE_LETTER:
sb.append(maskingChars.get(1));
break;
case Character.DECIMAL_DIGIT_NUMBER:
sb.append(maskingChars.get(2));
break;
default:
sb.append(maskingChars.get(3));
case Character.SPACE_SEPARATOR,
Character.LINE_SEPARATOR,
Character.PARAGRAPH_SEPARATOR -> sb.appendCodePoint(cp); // keeping separators as-is
case Character.UPPERCASE_LETTER -> sb.append(maskingChars.get(0));
case Character.LOWERCASE_LETTER -> sb.append(maskingChars.get(1));
case Character.DECIMAL_DIGIT_NUMBER -> sb.append(maskingChars.get(2));
default -> sb.append(maskingChars.get(3));
}
}
return sb.toString();

View file

@ -2,46 +2,36 @@ package com.provectus.kafka.ui.service.masking.policies;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.provectus.kafka.ui.config.ClustersProperties;
import java.util.List;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public abstract class MaskingPolicy {
public static MaskingPolicy create(ClustersProperties.Masking property) {
List<String> fields = property.getFields() == null
? List.of() // empty list means that policy will be applied to all fields
: property.getFields();
switch (property.getType()) {
case REMOVE:
return new Remove(fields);
case REPLACE:
return new Replace(
fields,
property.getReplacement() == null
? Replace.DEFAULT_REPLACEMENT
: property.getReplacement()
);
case MASK:
return new Mask(
fields,
property.getPattern() == null
? Mask.DEFAULT_PATTERN
: property.getPattern()
);
default:
throw new IllegalStateException("Unknown policy type: " + property.getType());
}
FieldsSelector fieldsSelector = FieldsSelector.create(property);
return switch (property.getType()) {
case REMOVE -> new Remove(fieldsSelector);
case REPLACE -> new Replace(
fieldsSelector,
property.getReplacement() == null
? Replace.DEFAULT_REPLACEMENT
: property.getReplacement()
);
case MASK -> new Mask(
fieldsSelector,
property.getMaskingCharsReplacement() == null
? Mask.DEFAULT_PATTERN
: property.getMaskingCharsReplacement()
);
};
}
//----------------------------------------------------------------
// empty list means policy will be applied to all fields
private final List<String> fieldNames;
private final FieldsSelector fieldsSelector;
protected boolean fieldShouldBeMasked(String fieldName) {
return fieldNames.isEmpty() || fieldNames.contains(fieldName);
return fieldsSelector.shouldBeMasked(fieldName);
}
public abstract ContainerNode<?> applyToJsonContainer(ContainerNode<?> node);

View file

@ -4,12 +4,12 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.util.List;
class Remove extends MaskingPolicy {
Remove(List<String> fieldNames) {
super(fieldNames);
Remove(FieldsSelector fieldsSelector) {
super(fieldsSelector);
}
@Override

View file

@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.google.common.base.Preconditions;
import java.util.List;
class Replace extends MaskingPolicy {
@ -14,8 +13,8 @@ class Replace extends MaskingPolicy {
private final String replacement;
Replace(List<String> fieldNames, String replacementString) {
super(fieldNames);
Replace(FieldsSelector fieldsSelector, String replacementString) {
super(fieldsSelector);
this.replacement = Preconditions.checkNotNull(replacementString);
}

View file

@ -12,6 +12,7 @@ import com.provectus.kafka.ui.model.rbac.AccessContext;
import com.provectus.kafka.ui.model.rbac.Permission;
import com.provectus.kafka.ui.model.rbac.Resource;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.Subject;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
import com.provectus.kafka.ui.model.rbac.permission.SchemaAction;
@ -19,11 +20,11 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
@ -34,6 +35,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.core.env.Environment;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
import org.springframework.security.core.context.SecurityContext;
@ -50,10 +52,11 @@ public class AccessControlService {
@Nullable
private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
private final RoleBasedAccessControlProperties properties;
private final Environment environment;
private boolean rbacEnabled = false;
private Set<ProviderAuthorityExtractor> extractors = Collections.emptySet();
private final RoleBasedAccessControlProperties properties;
private Set<ProviderAuthorityExtractor> oauthExtractors = Collections.emptySet();
@PostConstruct
public void init() {
@ -63,21 +66,26 @@ public class AccessControlService {
}
rbacEnabled = true;
this.extractors = properties.getRoles()
this.oauthExtractors = properties.getRoles()
.stream()
.map(role -> role.getSubjects()
.stream()
.map(provider -> switch (provider.getProvider()) {
.map(Subject::getProvider)
.distinct()
.map(provider -> switch (provider) {
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
case LDAP, LDAP_AD -> new LdapAuthorityExtractor();
}).collect(Collectors.toSet()))
default -> null;
})
.filter(Objects::nonNull)
.collect(Collectors.toSet()))
.flatMap(Set::stream)
.collect(Collectors.toSet());
if ((clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())
&& !properties.getRoles().isEmpty()) {
if (!properties.getRoles().isEmpty()
&& "oauth2".equalsIgnoreCase(environment.getProperty("auth.type"))
&& (clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())) {
log.error("Roles are configured but no authentication methods are present. Authentication might fail.");
}
}
@ -354,8 +362,8 @@ public class AccessControlService {
return isAccessible(Resource.KSQL, null, user, context, requiredActions);
}
public Set<ProviderAuthorityExtractor> getExtractors() {
return extractors;
public Set<ProviderAuthorityExtractor> getOauthExtractors() {
return oauthExtractors;
}
public List<Role> getRoles() {

View file

@ -1,23 +0,0 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import lombok.extern.slf4j.Slf4j;
import reactor.core.publisher.Mono;
@Slf4j
public class LdapAuthorityExtractor implements ProviderAuthorityExtractor {
@Override
public boolean isApplicable(String provider) {
return false; // TODO #2752
}
@Override
public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
return Mono.just(Collections.emptySet()); // TODO #2752
}
}

View file

@ -0,0 +1,70 @@
package com.provectus.kafka.ui.service.rbac.extractor;
import com.provectus.kafka.ui.config.auth.LdapProperties;
import com.provectus.kafka.ui.model.rbac.Role;
import com.provectus.kafka.ui.model.rbac.provider.Provider;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
import org.springframework.util.Assert;
@Slf4j
public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
private final AccessControlService acs;
private final LdapProperties props;
private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
String role = record.get(getGroupRoleAttribute()).get(0);
return new SimpleGrantedAuthority(role);
};
public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
super(context.getBean(BaseLdapPathContextSource.class), null);
this.acs = context.getBean(AccessControlService.class);
this.props = context.getBean(LdapProperties.class);
}
@Override
public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
return acs.getRoles()
.stream()
.map(Role::getSubjects)
.flatMap(List::stream)
.filter(s -> s.getProvider().equals(Provider.LDAP))
.filter(s -> s.getType().equals("group"))
.flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
.collect(Collectors.toSet());
}
private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
Assert.notNull(groupSearchBase, "groupSearchBase is empty");
log.trace(
"Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
var ldapTemplate = getLdapTemplate();
ldapTemplate.setIgnoreNameNotFoundException(true);
Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
new String[] {props.getGroupRoleAttribute()});
return userRoles.stream()
.map(authorityMapper)
.peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
.collect(Collectors.toSet());
}
}

View file

@ -0,0 +1,53 @@
package com.provectus.kafka.ui.service.masking.policies;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.ValidationException;
import java.util.List;
import org.junit.jupiter.api.Test;
class FieldsSelectorTest {
@Test
void selectsFieldsDueToProvidedPattern() {
var properties = new ClustersProperties.Masking();
properties.setFieldsNamePattern("f1|f2");
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("f1")).isTrue();
assertThat(selector.shouldBeMasked("f2")).isTrue();
assertThat(selector.shouldBeMasked("doesNotMatchPattern")).isFalse();
}
@Test
void selectsFieldsDueToProvidedFieldNames() {
var properties = new ClustersProperties.Masking();
properties.setFields(List.of("f1", "f2"));
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("f1")).isTrue();
assertThat(selector.shouldBeMasked("f2")).isTrue();
assertThat(selector.shouldBeMasked("notInAList")).isFalse();
}
@Test
void selectAllFieldsIfNoPatternAndNoNamesProvided() {
var properties = new ClustersProperties.Masking();
var selector = FieldsSelector.create(properties);
assertThat(selector.shouldBeMasked("anyPropertyName")).isTrue();
}
@Test
void throwsExceptionIfBothFieldListAndPatternProvided() {
var properties = new ClustersProperties.Masking();
properties.setFieldsNamePattern("f1|f2");
properties.setFields(List.of("f3", "f4"));
assertThatThrownBy(() -> FieldsSelector.create(properties))
.isInstanceOf(ValidationException.class);
}
}

View file

@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
class MaskTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final List<String> PATTERN = List.of("X", "x", "n", "-");
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
Mask policy = new Mask(fields, PATTERN);
void testApplyToJsonContainer(FieldsSelector selector, ContainerNode<?> original, ContainerNode<?> expected) {
Mask policy = new Mask(selector, PATTERN);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"nnn\", \"name\": { \"first\": \"Xxxxx\", \"surname\": \"Xxxxnnn-\"}}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"nnn\", \"f2\": 234}, { \"name\": \"n-n\", \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Xxxxnnn-\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"Xxxxx\", \"name\": \"Xxxxnnn-\"}}")
)
@ -57,7 +57,7 @@ class MaskTest {
"null, xxxx"
})
void testApplyToString(String original, String expected) {
Mask policy = new Mask(List.of(), PATTERN);
Mask policy = new Mask(fieldName -> true, PATTERN);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}

View file

@ -15,39 +15,39 @@ import org.junit.jupiter.params.provider.MethodSource;
class RemoveTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Remove(fields);
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Remove(fieldsSelector);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"f2\": 234}, { \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("[{ \"f1\": 123}, { \"f2\": \"1.2\"} ]"),
parse("[{}, {}]")
)
@ -66,7 +66,7 @@ class RemoveTest {
"null, null"
})
void testApplyToString(String original, String expected) {
var policy = new Remove(List.of());
var policy = new Remove(fieldName -> true);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
}
}

View file

@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
class ReplaceTest {
private static final List<String> TARGET_FIELDS = List.of("id", "name");
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final String REPLACEMENT_STRING = "***";
@ParameterizedTest
@MethodSource
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Replace(fields, REPLACEMENT_STRING);
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
var policy = new Replace(fieldsSelector, REPLACEMENT_STRING);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"***\", \"name\": { \"first\": \"***\", \"surname\": \"***\"}}")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"***\", \"f2\": 234}, { \"name\": \"***\", \"f2\": 345} ]")
),
Arguments.of(
TARGET_FIELDS,
FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"***\"}}")
),
Arguments.of(
List.of(),
(FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{ \"outer\": { \"f1\": \"***\", \"f2\": \"***\", \"inner\" : {\"if1\": \"***\"}}}}")
)
@ -62,7 +62,7 @@ class ReplaceTest {
"null, ***"
})
void testApplyToString(String original, String expected) {
var policy = new Replace(List.of(), REPLACEMENT_STRING);
var policy = new Replace(fieldName -> true, REPLACEMENT_STRING);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
}
}

View file

@ -3632,7 +3632,9 @@ components:
type: array
items:
type: string
pattern:
fieldsNamePattern:
type: string
maskingCharsReplacement:
type: array
items:
type: string

View file

@ -486,11 +486,7 @@ public class TopicsTest extends BaseTest {
topicDetails
.waitUntilScreenReady();
TOPIC_LIST.add(topicToCopy);
SoftAssert softly = new SoftAssert();
softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully created."),
"isAlertWithMessageVisible()");
softly.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
softly.assertAll();
Assert.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
}
@AfterClass(alwaysRun = true)

View file

@ -1,26 +1,41 @@
import React from 'react';
import { FullConnectorInfo } from 'generated-sources';
import {
Action,
ConnectorAction,
ConnectorState,
FullConnectorInfo,
ResourceType,
} from 'generated-sources';
import { CellContext } from '@tanstack/react-table';
import { ClusterNameRoute } from 'lib/paths';
import useAppParams from 'lib/hooks/useAppParams';
import { Dropdown, DropdownItem } from 'components/common/Dropdown';
import { useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
import {
useDeleteConnector,
useUpdateConnectorState,
} from 'lib/hooks/api/kafkaConnect';
import { useConfirm } from 'lib/hooks/useConfirm';
import { useIsMutating } from '@tanstack/react-query';
import { ActionDropdownItem } from 'components/common/ActionComponent';
const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
row,
}) => {
const { connect, name } = row.original;
const { connect, name, status } = row.original;
const { clusterName } = useAppParams<ClusterNameRoute>();
const mutationsNumber = useIsMutating();
const isMutating = mutationsNumber > 0;
const confirm = useConfirm();
const deleteMutation = useDeleteConnector({
clusterName,
connectName: connect,
connectorName: name,
});
const stateMutation = useUpdateConnectorState({
clusterName,
connectName: connect,
connectorName: name,
});
const handleDelete = () => {
confirm(
<>
@ -31,8 +46,66 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
}
);
};
// const stateMutation = useUpdateConnectorState(routerProps);
const resumeConnectorHandler = () =>
stateMutation.mutateAsync(ConnectorAction.RESUME);
const restartConnectorHandler = () =>
stateMutation.mutateAsync(ConnectorAction.RESTART);
const restartAllTasksHandler = () =>
stateMutation.mutateAsync(ConnectorAction.RESTART_ALL_TASKS);
const restartFailedTasksHandler = () =>
stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS);
return (
<Dropdown>
{status.state === ConnectorState.PAUSED && (
<ActionDropdownItem
onClick={resumeConnectorHandler}
disabled={isMutating}
permission={{
resource: ResourceType.CONNECT,
action: Action.EDIT,
value: name,
}}
>
Resume
</ActionDropdownItem>
)}
<ActionDropdownItem
onClick={restartConnectorHandler}
disabled={isMutating}
permission={{
resource: ResourceType.CONNECT,
action: Action.EDIT,
value: name,
}}
>
Restart Connector
</ActionDropdownItem>
<ActionDropdownItem
onClick={restartAllTasksHandler}
disabled={isMutating}
permission={{
resource: ResourceType.CONNECT,
action: Action.EDIT,
value: name,
}}
>
Restart All Tasks
</ActionDropdownItem>
<ActionDropdownItem
onClick={restartFailedTasksHandler}
disabled={isMutating}
permission={{
resource: ResourceType.CONNECT,
action: Action.EDIT,
value: name,
}}
>
Restart Failed Tasks
</ActionDropdownItem>
<DropdownItem onClick={handleDelete} danger>
Remove Connector
</DropdownItem>

View file

@ -9,7 +9,11 @@ import { screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { render, WithRoute } from 'lib/testHelpers';
import { clusterConnectConnectorPath, clusterConnectorsPath } from 'lib/paths';
import { useConnectors, useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
import {
useConnectors,
useDeleteConnector,
useUpdateConnectorState,
} from 'lib/hooks/api/kafkaConnect';
const mockedUsedNavigate = jest.fn();
const mockDelete = jest.fn();
@ -22,6 +26,7 @@ jest.mock('react-router-dom', () => ({
jest.mock('lib/hooks/api/kafkaConnect', () => ({
useConnectors: jest.fn(),
useDeleteConnector: jest.fn(),
useUpdateConnectorState: jest.fn(),
}));
const clusterName = 'local';
@ -42,6 +47,10 @@ describe('Connectors List', () => {
(useConnectors as jest.Mock).mockImplementation(() => ({
data: connectors,
}));
const restartConnector = jest.fn();
(useUpdateConnectorState as jest.Mock).mockImplementation(() => ({
mutateAsync: restartConnector,
}));
});
it('renders', async () => {

View file

@ -1,4 +1,5 @@
import styled from 'styled-components';
import { Button } from 'components/common/Button/Button';
export const DiffWrapper = styled.div`
align-items: stretch;
@ -81,3 +82,6 @@ export const DiffTile = styled.div`
export const DiffVersionsSelect = styled.div`
width: 0.625em;
`;
export const BackButton = styled(Button)`
margin: 10px 9px;
`;

View file

@ -20,6 +20,7 @@ import useAppParams from 'lib/hooks/useAppParams';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as S from './Diff.styled';
import { BackButton } from './Diff.styled';
export interface DiffProps {
versions: SchemaSubject[];
@ -77,6 +78,13 @@ const Diff: React.FC<DiffProps> = ({ versions, areVersionsFetched }) => {
backText="Schema Registry"
backTo={clusterSchemasPath(clusterName)}
/>
<BackButton
buttonType="secondary"
buttonSize="S"
onClick={() => navigate(-1)}
>
Back
</BackButton>
<S.Section>
{areVersionsFetched ? (
<S.DiffBox>

View file

@ -3,6 +3,7 @@ import Diff, { DiffProps } from 'components/Schemas/Diff/Diff';
import { render, WithRoute } from 'lib/testHelpers';
import { screen } from '@testing-library/react';
import { clusterSchemaComparePath } from 'lib/paths';
import userEvent from '@testing-library/user-event';
import { versions } from './fixtures';
@ -142,4 +143,24 @@ describe('Diff', () => {
expect(select).toHaveTextContent(versions[0].version);
});
});
describe('Back button', () => {
beforeEach(() => {
setupComponent({
areVersionsFetched: true,
versions,
});
});
it('back button is appear', () => {
const backButton = screen.getAllByRole('button', { name: 'Back' });
expect(backButton[0]).toBeInTheDocument();
});
it('click on back button', () => {
const backButton = screen.getAllByRole('button', { name: 'Back' });
userEvent.click(backButton[0]);
expect(screen.queryByRole('Back')).not.toBeInTheDocument();
});
});
});

View file

@ -142,6 +142,8 @@ const Message: React.FC<Props> = ({
timestampType={timestampType}
keySize={keySize}
contentSize={valueSize}
keySerde={keySerde}
valueSerde={valueSerde}
/>
)}
</>

View file

@ -3,7 +3,6 @@ import EditorViewer from 'components/common/EditorViewer/EditorViewer';
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
import { SchemaType, TopicMessageTimestampTypeEnum } from 'generated-sources';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import { useSearchParams } from 'react-router-dom';
import * as S from './MessageContent.styled';
@ -17,6 +16,8 @@ export interface MessageContentProps {
timestampType?: TopicMessageTimestampTypeEnum;
keySize?: number;
contentSize?: number;
keySerde?: string;
valueSerde?: string;
}
const MessageContent: React.FC<MessageContentProps> = ({
@ -27,12 +28,10 @@ const MessageContent: React.FC<MessageContentProps> = ({
timestampType,
keySize,
contentSize,
keySerde,
valueSerde,
}) => {
const [activeTab, setActiveTab] = React.useState<Tab>('content');
const [searchParams] = useSearchParams();
const keyFormat = searchParams.get('keySerde') || '';
const valueFormat = searchParams.get('valueSerde') || '';
const activeTabContent = () => {
switch (activeTab) {
case 'content':
@ -110,7 +109,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Key Serde</S.MetadataLabel>
<span>
<S.MetadataValue>{keyFormat}</S.MetadataValue>
<S.MetadataValue>{keySerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={keySize} />
</S.MetadataMeta>
@ -120,7 +119,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Value Serde</S.MetadataLabel>
<span>
<S.MetadataValue>{valueFormat}</S.MetadataValue>
<S.MetadataValue>{valueSerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={contentSize} />
</S.MetadataMeta>

View file

@ -20,6 +20,8 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
headers={{ header: 'test' }}
timestamp={new Date(0)}
timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME}
keySerde="SchemaRegistry"
valueSerde="Avro"
{...props}
/>
</tbody>
@ -27,42 +29,20 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
);
};
const proto =
'syntax = "proto3";\npackage com.provectus;\n\nmessage TestProtoRecord {\n string f1 = 1;\n int32 f2 = 2;\n}\n';
global.TextEncoder = TextEncoder;
const searchParamsContentAVRO = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'AVRO',
limit: '100',
});
const searchParamsContentJSON = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'JSON',
limit: '100',
});
const searchParamsContentPROTOBUF = new URLSearchParams({
keySerde: 'SchemaRegistry',
valueSerde: 'PROTOBUF',
limit: '100',
});
describe('MessageContent screen', () => {
beforeEach(() => {
render(setupWrapper(), {
initialEntries: [`/messages?${searchParamsContentAVRO}`],
});
render(setupWrapper());
});
describe('renders', () => {
it('key format in document', () => {
describe('Checking keySerde and valueSerde', () => {
it('keySerde in document', () => {
expect(screen.getByText('SchemaRegistry')).toBeInTheDocument();
});
it('content format in document', () => {
expect(screen.getByText('AVRO')).toBeInTheDocument();
it('valueSerde in document', () => {
expect(screen.getByText('Avro')).toBeInTheDocument();
});
});
@ -98,42 +78,3 @@ describe('MessageContent screen', () => {
});
});
});
describe('checking content type depend on message type', () => {
it('renders component with message having JSON type', () => {
render(
setupWrapper({
messageContent: '{"data": "test"}',
}),
{ initialEntries: [`/messages?${searchParamsContentJSON}`] }
);
expect(screen.getByText('JSON')).toBeInTheDocument();
});
it('renders component with message having AVRO type', () => {
render(
setupWrapper({
messageContent: '{"data": "test"}',
}),
{ initialEntries: [`/messages?${searchParamsContentAVRO}`] }
);
expect(screen.getByText('AVRO')).toBeInTheDocument();
});
it('renders component with message having PROTOBUF type', () => {
render(
setupWrapper({
messageContent: proto,
}),
{ initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
it('renders component with message having no type which is equal to having PROTOBUF type', () => {
render(
setupWrapper({
messageContent: '',
}),
{ initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
});

View file

@ -8,15 +8,29 @@ export const Wrapper = styled.div`
export const Columns = styled.div`
margin: -0.75rem;
margin-bottom: 0.75rem;
display: flex;
flex-direction: column;
padding: 0.75rem;
gap: 8px;
@media screen and (min-width: 769px) {
display: flex;
}
`;
export const Column = styled.div`
flex-basis: 0;
flex-grow: 1;
flex-shrink: 1;
padding: 0.75rem;
export const Flex = styled.div`
display: flex;
flex-direction: row;
gap: 8px;
@media screen and (max-width: 1200px) {
flex-direction: column;
}
`;
export const FlexItem = styled.div`
width: 18rem;
@media screen and (max-width: 1450px) {
width: 50%;
}
@media screen and (max-width: 1200px) {
width: 100%;
}
`;

View file

@ -4,6 +4,7 @@ import { RouteParamsClusterTopic } from 'lib/paths';
import { Button } from 'components/common/Button/Button';
import Editor from 'components/common/Editor/Editor';
import Select, { SelectOption } from 'components/common/Select/Select';
import Switch from 'components/common/Switch/Switch';
import useAppParams from 'lib/hooks/useAppParams';
import { showAlert } from 'lib/errorHandling';
import { useSendMessage, useTopicDetails } from 'lib/hooks/api/topics';
@ -26,9 +27,12 @@ interface FormType {
partition: number;
keySerde: string;
valueSerde: string;
keepContents: boolean;
}
const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
closeSidebar,
}) => {
const { clusterName, topicName } = useAppParams<RouteParamsClusterTopic>();
const { data: topic } = useTopicDetails({ clusterName, topicName });
const { data: serdes = {} } = useSerdes({
@ -47,11 +51,13 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
handleSubmit,
formState: { isSubmitting },
control,
setValue,
} = useForm<FormType>({
mode: 'onChange',
defaultValues: {
...defaultValues,
partition: Number(partitionOptions[0].value),
keepContents: false,
},
});
@ -62,6 +68,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
content,
headers,
partition,
keepContents,
}: FormType) => {
let errors: string[] = [];
@ -110,7 +117,11 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
keySerde,
valueSerde,
});
onSubmit();
if (!keepContents) {
setValue('key', '');
setValue('content', '');
closeSidebar();
}
} catch (e) {
// do nothing
}
@ -120,7 +131,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
<S.Wrapper>
<form onSubmit={handleSubmit(submit)}>
<S.Columns>
<S.Column>
<S.FlexItem>
<InputLabel>Partition</InputLabel>
<Controller
control={control}
@ -137,47 +148,58 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
<S.Column>
<InputLabel>Key Serde</InputLabel>
</S.FlexItem>
<S.Flex>
<S.FlexItem>
<InputLabel>Key Serde</InputLabel>
<Controller
control={control}
name="keySerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectKeySerdeOptions"
aria-labelledby="selectKeySerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.key || [])}
value={value}
/>
)}
/>
</S.FlexItem>
<S.FlexItem>
<InputLabel>Value Serde</InputLabel>
<Controller
control={control}
name="valueSerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectValueSerdeOptions"
aria-labelledby="selectValueSerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.value || [])}
value={value}
/>
)}
/>
</S.FlexItem>
</S.Flex>
<div>
<Controller
control={control}
name="keySerde"
name="keepContents"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectKeySerdeOptions"
aria-labelledby="selectKeySerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.key || [])}
value={value}
/>
<Switch name={name} onChange={onChange} checked={value} />
)}
/>
</S.Column>
<S.Column>
<InputLabel>Value Serde</InputLabel>
<Controller
control={control}
name="valueSerde"
render={({ field: { name, onChange, value } }) => (
<Select
id="selectValueSerdeOptions"
aria-labelledby="selectValueSerdeOptions"
name={name}
onChange={onChange}
minWidth="100%"
options={getSerdeOptions(serdes.value || [])}
value={value}
/>
)}
/>
</S.Column>
<InputLabel>Keep contents</InputLabel>
</div>
</S.Columns>
<S.Columns>
<S.Column>
<div>
<InputLabel>Key</InputLabel>
<Controller
control={control}
@ -191,8 +213,8 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
<S.Column>
</div>
<div>
<InputLabel>Value</InputLabel>
<Controller
control={control}
@ -206,10 +228,10 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
</div>
</S.Columns>
<S.Columns>
<S.Column>
<div>
<InputLabel>Headers</InputLabel>
<Controller
control={control}
@ -224,7 +246,7 @@ const SendMessage: React.FC<{ onSubmit: () => void }> = ({ onSubmit }) => {
/>
)}
/>
</S.Column>
</div>
</S.Columns>
<Button
buttonSize="M"

View file

@ -49,7 +49,7 @@ const renderComponent = async () => {
const path = clusterTopicPath(clusterName, topicName);
await render(
<WithRoute path={clusterTopicPath()}>
<SendMessage onSubmit={mockOnSubmit} />
<SendMessage closeSidebar={mockOnSubmit} />
</WithRoute>,
{ initialEntries: [path] }
);

View file

@ -59,7 +59,7 @@ const Topic: React.FC = () => {
const deleteTopicHandler = async () => {
await deleteTopic.mutateAsync(topicName);
navigate('../..');
navigate(clusterTopicsPath(clusterName));
};
React.useEffect(() => {
@ -236,7 +236,7 @@ const Topic: React.FC = () => {
title="Produce Message"
>
<Suspense fallback={<PageLoader />}>
<SendMessage onSubmit={closeSidebar} />
<SendMessage closeSidebar={closeSidebar} />
</Suspense>
</SlidingSidebar>
</>

View file

@ -10,6 +10,7 @@ import {
clusterTopicMessagesPath,
clusterTopicPath,
clusterTopicSettingsPath,
clusterTopicsPath,
clusterTopicStatisticsPath,
getNonExactPath,
} from 'lib/paths';
@ -179,7 +180,9 @@ describe('Details', () => {
name: 'Confirm',
});
await userEvent.click(submitDeleteButton);
expect(mockNavigate).toHaveBeenCalledWith('../..');
expect(mockNavigate).toHaveBeenCalledWith(
clusterTopicsPath(mockClusterName)
);
});
it('shows a confirmation popup on deleting topic messages', async () => {

View file

@ -1,52 +1,38 @@
import React from 'react';
import WarningIcon from 'components/common/Icons/WarningIcon';
import { gitCommitPath } from 'lib/paths';
import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
import { BUILD_VERSION_PATTERN } from 'lib/constants';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import * as S from './Version.styled';
import compareVersions from './compareVersions';
const Version: React.FC = () => {
const { data: actuatorInfo = {} } = useActuatorInfo();
const { data: latestVersionInfo = {} } = useLatestVersion();
const tag = actuatorInfo?.build?.version;
const commit = actuatorInfo?.git?.commit.id;
const { tag_name: latestTag } = latestVersionInfo;
const outdated = compareVersions(tag, latestTag);
const currentVersion = tag?.match(BUILD_VERSION_PATTERN)
? tag
: formatTimestamp(actuatorInfo?.build?.time);
if (!tag) return null;
const { buildTime, commitId, isLatestRelease } = latestVersionInfo.build;
const { versionTag } = latestVersionInfo?.latestRelease || '';
return (
<S.Wrapper>
{!!outdated && (
{!isLatestRelease && (
<S.OutdatedWarning
title={`Your app version is outdated. Current latest version is ${latestTag}`}
title={`Your app version is outdated. Current latest version is ${versionTag}`}
>
<WarningIcon />
</S.OutdatedWarning>
)}
{commit && (
{commitId && (
<div>
<S.CurrentCommitLink
title="Current commit"
target="__blank"
href={gitCommitPath(commit)}
href={gitCommitPath(commitId)}
>
{commit}
{commitId}
</S.CurrentCommitLink>
</div>
)}
<S.CurrentVersion>{currentVersion}</S.CurrentVersion>
<S.CurrentVersion>{formatTimestamp(buildTime)}</S.CurrentVersion>
</S.Wrapper>
);
};

View file

@ -2,87 +2,40 @@ import React from 'react';
import { screen } from '@testing-library/dom';
import Version from 'components/Version/Version';
import { render } from 'lib/testHelpers';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
import { latestVersionPayload } from 'lib/fixtures/latestVersion';
import {
deprecatedVersionPayload,
latestVersionPayload,
} from 'lib/fixtures/latestVersion';
jest.mock('lib/hooks/api/actuatorInfo', () => ({
useActuatorInfo: jest.fn(),
}));
jest.mock('lib/hooks/api/latestVersion', () => ({
useLatestVersion: jest.fn(),
}));
describe('Version Component', () => {
const versionTag = 'v0.5.0';
const snapshotTag = 'test-SNAPSHOT';
const commitTag = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813';
const commitId = '96a577a';
const actuatorVersionPayload = actuatorInfoPayload(versionTag);
const formattedTimestamp = formatTimestamp(actuatorVersionPayload.build.time);
describe('render latest version', () => {
beforeEach(() => {
(useLatestVersion as jest.Mock).mockImplementation(() => ({
data: latestVersionPayload,
}));
});
it('renders latest release version as current version', async () => {
render(<Version />);
expect(screen.getByText(commitId)).toBeInTheDocument();
});
beforeEach(() => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorVersionPayload,
}));
it('should not show warning icon if it is last release', async () => {
render(<Version />);
expect(screen.queryByRole('img')).not.toBeInTheDocument();
});
});
it('show warning icon if it is not last release', async () => {
(useLatestVersion as jest.Mock).mockImplementation(() => ({
data: latestVersionPayload,
data: deprecatedVersionPayload,
}));
});
describe('tag does not exist', () => {
it('does not render component', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: null,
}));
const { container } = render(<Version />);
expect(container.firstChild).toBeEmptyDOMElement();
});
});
describe('renders current version', () => {
it('renders release build version as current version', async () => {
render(<Version />);
expect(screen.getByText(versionTag)).toBeInTheDocument();
});
it('renders formatted timestamp as current version when version is commit', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload(commitTag),
}));
render(<Version />);
expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
});
it('renders formatted timestamp as current version when version contains -SNAPSHOT', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload(snapshotTag),
}));
render(<Version />);
expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
});
});
describe('outdated build version', () => {
it('renders warning message', async () => {
(useActuatorInfo as jest.Mock).mockImplementation(() => ({
data: actuatorInfoPayload('v0.3.0'),
}));
render(<Version />);
expect(
screen.getByTitle(
`Your app version is outdated. Current latest version is ${latestVersionPayload.tag_name}`
)
).toBeInTheDocument();
});
});
describe('current commit id with link', () => {
it('renders', async () => {
render(<Version />);
expect(
screen.getByText(actuatorVersionPayload.git.commit.id)
).toBeInTheDocument();
});
render(<Version />);
expect(screen.getByRole('img')).toBeInTheDocument();
});
});

View file

@ -70,7 +70,7 @@ export const DropdownButton = styled.button`
`;
export const DangerItem = styled.div`
color: ${({ theme: { dropdown } }) => dropdown.item.color.normal};
color: ${({ theme: { dropdown } }) => dropdown.item.color.danger};
`;
export const DropdownItemHint = styled.div`

View file

@ -13,6 +13,7 @@ const WarningIcon: React.FC = () => {
return (
<WarningIconContainer>
<svg
role="img"
width="14"
height="13"
viewBox="0 0 14 13"

View file

@ -6,7 +6,7 @@ export const Wrapper = styled.div<{ $open?: boolean }>(
position: fixed;
top: ${theme.layout.navBarHeight};
bottom: 0;
width: 60vw;
width: 37vw;
right: calc(${$open ? '0px' : theme.layout.rightSidebarWidth} * -1);
box-shadow: -1px 0px 10px 0px rgba(0, 0, 0, 0.2);
transition: right 0.3s linear;

View file

@ -1,12 +0,0 @@
export const actuatorInfoPayload = (
version = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813'
) => ({
git: { commit: { id: 'befd3b3' } },
build: {
artifact: 'kafka-ui-api',
name: 'kafka-ui-api',
time: '2022-09-15T09:52:21.753Z',
version,
group: 'com.provectus',
},
});

View file

@ -1,3 +1,16 @@
export const latestVersionPayload = {
tag_name: 'v0.4.0',
export const deprecatedVersionPayload = {
build: {
buildTime: '2023-04-14T09:47:35.463Z',
commitId: '96a577a',
isLatestRelease: false,
version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
},
};
export const latestVersionPayload = {
build: {
buildTime: '2023-04-14T09:47:35.463Z',
commitId: '96a577a',
isLatestRelease: true,
version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
},
};

View file

@ -1,17 +0,0 @@
import fetchMock from 'fetch-mock';
import * as hooks from 'lib/hooks/api/actuatorInfo';
import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
const actuatorInfoPath = '/actuator/info';
describe('Actuator info hooks', () => {
beforeEach(() => fetchMock.restore());
describe('useActuatorInfo', () => {
it('returns the correct data', async () => {
const mock = fetchMock.getOnce(actuatorInfoPath, actuatorInfoPayload());
const { result } = renderQueryHook(() => hooks.useActuatorInfo());
await expectQueryWorks(mock, result);
});
});
});

View file

@ -1,18 +1,16 @@
import fetchMock from 'fetch-mock';
import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
import * as hooks from 'lib/hooks/api/latestVersion';
import { GIT_REPO_LATEST_RELEASE_LINK } from 'lib/constants';
import { latestVersionPayload } from 'lib/fixtures/latestVersion';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
const latestVersionPath = '/api/info';
describe('Latest version hooks', () => {
beforeEach(() => fetchMock.restore());
describe('useLatestVersion', () => {
it('returns the correct data', async () => {
const mock = fetchMock.getOnce(
GIT_REPO_LATEST_RELEASE_LINK,
latestVersionPayload
);
const { result } = renderQueryHook(() => hooks.useLatestVersion());
const mock = fetchMock.getOnce(latestVersionPath, latestVersionPayload);
const { result } = renderQueryHook(() => useLatestVersion());
await expectQueryWorks(mock, result);
});
});

View file

@ -1,19 +0,0 @@
import { useQuery } from '@tanstack/react-query';
import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
const fetchActuatorInfo = async () => {
const data = await fetch(
`${BASE_PARAMS.basePath}/actuator/info`,
BASE_PARAMS
).then((res) => res.json());
return data;
};
export function useActuatorInfo() {
return useQuery(
['actuatorInfo'],
fetchActuatorInfo,
QUERY_REFETCH_OFF_OPTIONS
);
}

View file

@ -76,7 +76,8 @@ export function useUpdateConnectorState(props: UseConnectorProps) {
return useMutation(
(action: ConnectorAction) => api.updateConnectorState({ ...props, action }),
{
onSuccess: () => client.invalidateQueries(connectorKey(props)),
onSuccess: () =>
client.invalidateQueries(['clusters', props.clusterName, 'connectors']),
}
);
}

View file

@ -1,21 +1,19 @@
import { useQuery } from '@tanstack/react-query';
import {
QUERY_REFETCH_OFF_OPTIONS,
GIT_REPO_LATEST_RELEASE_LINK,
} from 'lib/constants';
import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
const fetchLatestVersion = async () => {
const data = await fetch(GIT_REPO_LATEST_RELEASE_LINK).then((res) =>
res.json()
);
const fetchLatestVersionInfo = async () => {
const data = await fetch(
`${BASE_PARAMS.basePath}/api/info`,
BASE_PARAMS
).then((res) => res.json());
return data;
};
export function useLatestVersion() {
return useQuery(
['latestVersion'],
fetchLatestVersion,
['versionInfo'],
fetchLatestVersionInfo,
QUERY_REFETCH_OFF_OPTIONS
);
}

View file

@ -122,9 +122,6 @@ export function useCreateTopicMutation(clusterName: ClusterName) {
}),
{
onSuccess: () => {
showSuccessAlert({
message: `Topic successfully created.`,
});
client.invalidateQueries(topicKeys.all(clusterName));
},
}

View file

@ -40,6 +40,7 @@
<kafka-ui-serde-api.version>1.0.0</kafka-ui-serde-api.version>
<odd-oddrn-generator.version>0.1.15</odd-oddrn-generator.version>
<odd-oddrn-client.version>0.1.23</odd-oddrn-client.version>
<org.json.version>20230227</org.json.version>
<!-- Test dependency versions -->
<junit.version>5.9.1</junit.version>