Merge branch 'master' into ISSUE_754_acl
This commit is contained in:
commit
23a39061b7
62 changed files with 646 additions and 310 deletions
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: Official documentation
|
||||||
|
url: https://docs.kafka-ui.provectus.io/
|
||||||
|
about: Before reaching out for support, please refer to our documentation. Read "FAQ" and "Common problems", also try using search there.
|
||||||
|
- name: Community Discord
|
||||||
|
url: https://discord.gg/4DWzD7pGE5
|
||||||
|
about: Chat with other users, get some support or ask questions.
|
||||||
|
- name: GitHub Discussions
|
||||||
|
url: https://github.com/provectus/kafka-ui/discussions
|
||||||
|
about: An alternative place to ask questions or to get some support.
|
16
.github/ISSUE_TEMPLATE/question.md
vendored
16
.github/ISSUE_TEMPLATE/question.md
vendored
|
@ -1,16 +0,0 @@
|
||||||
---
|
|
||||||
name: "❓ Question"
|
|
||||||
about: Ask a question
|
|
||||||
title: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
|
|
||||||
To ask a question, please either:
|
|
||||||
1. Open up a discussion (https://github.com/provectus/kafka-ui/discussions)
|
|
||||||
2. Join us on discord (https://discord.gg/4DWzD7pGE5) and ask there.
|
|
||||||
|
|
||||||
Don't forget to check/search for existing issues/discussions.
|
|
||||||
|
|
||||||
-->
|
|
2
.github/workflows/block_merge.yml
vendored
2
.github/workflows/block_merge.yml
vendored
|
@ -6,7 +6,7 @@ jobs:
|
||||||
block_merge:
|
block_merge:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: mheap/github-action-required-labels@v3
|
- uses: mheap/github-action-required-labels@v4
|
||||||
with:
|
with:
|
||||||
mode: exactly
|
mode: exactly
|
||||||
count: 0
|
count: 0
|
||||||
|
|
4
.github/workflows/branch-deploy.yml
vendored
4
.github/workflows/branch-deploy.yml
vendored
|
@ -86,7 +86,7 @@ jobs:
|
||||||
|
|
||||||
- name: make comment with private deployment link
|
- name: make comment with private deployment link
|
||||||
if: ${{ github.event.label.name == 'status/feature_testing' }}
|
if: ${{ github.event.label.name == 'status/feature_testing' }}
|
||||||
uses: peter-evans/create-or-update-comment@v2
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
body: |
|
body: |
|
||||||
|
@ -94,7 +94,7 @@ jobs:
|
||||||
|
|
||||||
- name: make comment with public deployment link
|
- name: make comment with public deployment link
|
||||||
if: ${{ github.event.label.name == 'status/feature_testing_public' }}
|
if: ${{ github.event.label.name == 'status/feature_testing_public' }}
|
||||||
uses: peter-evans/create-or-update-comment@v2
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
body: |
|
body: |
|
||||||
|
|
2
.github/workflows/branch-remove.yml
vendored
2
.github/workflows/branch-remove.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
git add ../kafka-ui-from-branch/
|
git add ../kafka-ui-from-branch/
|
||||||
git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true
|
git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true
|
||||||
- name: make comment with deployment link
|
- name: make comment with deployment link
|
||||||
uses: peter-evans/create-or-update-comment@v2
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
body: |
|
body: |
|
||||||
|
|
2
.github/workflows/build-public-image.yml
vendored
2
.github/workflows/build-public-image.yml
vendored
|
@ -65,7 +65,7 @@ jobs:
|
||||||
cache-from: type=local,src=/tmp/.buildx-cache
|
cache-from: type=local,src=/tmp/.buildx-cache
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
- name: make comment with private deployment link
|
- name: make comment with private deployment link
|
||||||
uses: peter-evans/create-or-update-comment@v2
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
body: |
|
body: |
|
||||||
|
|
2
.github/workflows/cve.yaml
vendored
2
.github/workflows/cve.yaml
vendored
|
@ -55,7 +55,7 @@ jobs:
|
||||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||||
|
|
||||||
- name: Run CVE checks
|
- name: Run CVE checks
|
||||||
uses: aquasecurity/trivy-action@0.9.2
|
uses: aquasecurity/trivy-action@0.10.0
|
||||||
with:
|
with:
|
||||||
image-ref: "provectuslabs/kafka-ui:${{ steps.build.outputs.version }}"
|
image-ref: "provectuslabs/kafka-ui:${{ steps.build.outputs.version }}"
|
||||||
format: "table"
|
format: "table"
|
||||||
|
|
2
.github/workflows/delete-public-image.yml
vendored
2
.github/workflows/delete-public-image.yml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
||||||
--image-ids imageTag=${{ steps.extract_branch.outputs.tag }} \
|
--image-ids imageTag=${{ steps.extract_branch.outputs.tag }} \
|
||||||
--region us-east-1
|
--region us-east-1
|
||||||
- name: make comment with private deployment link
|
- name: make comment with private deployment link
|
||||||
uses: peter-evans/create-or-update-comment@v2
|
uses: peter-evans/create-or-update-comment@v3
|
||||||
with:
|
with:
|
||||||
issue-number: ${{ github.event.pull_request.number }}
|
issue-number: ${{ github.event.pull_request.number }}
|
||||||
body: |
|
body: |
|
||||||
|
|
2
.github/workflows/stale.yaml
vendored
2
.github/workflows/stale.yaml
vendored
|
@ -7,7 +7,7 @@ jobs:
|
||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v7
|
- uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
days-before-issue-stale: 7
|
days-before-issue-stale: 7
|
||||||
days-before-issue-close: 3
|
days-before-issue-close: 3
|
||||||
|
|
|
@ -6,8 +6,9 @@ Following versions of the project are currently being supported with security up
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ------- | ------------------ |
|
| ------- | ------------------ |
|
||||||
| 0.5.x | :white_check_mark: |
|
| 0.6.x | :white_check_mark: |
|
||||||
| 0.4.x | :x: |
|
| 0.5.x | :x: |
|
||||||
|
| 0.4.x | :x: |
|
||||||
| 0.3.x | :x: |
|
| 0.3.x | :x: |
|
||||||
| 0.2.x | :x: |
|
| 0.2.x | :x: |
|
||||||
| 0.1.x | :x: |
|
| 0.1.x | :x: |
|
||||||
|
|
|
@ -2,6 +2,6 @@ apiVersion: v2
|
||||||
name: kafka-ui
|
name: kafka-ui
|
||||||
description: A Helm chart for kafka-UI
|
description: A Helm chart for kafka-UI
|
||||||
type: application
|
type: application
|
||||||
version: 0.6.1
|
version: 0.6.2
|
||||||
appVersion: v0.6.1
|
appVersion: v0.6.2
|
||||||
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
|
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
|
||||||
|
|
|
@ -9,4 +9,6 @@ message MySpecificTopicValue {
|
||||||
message MyValue {
|
message MyValue {
|
||||||
int32 version = 1;
|
int32 version = 1;
|
||||||
string payload = 2;
|
string payload = 2;
|
||||||
|
map<int32, string> intToStringMap = 3;
|
||||||
|
map<string, MyValue> strToObjMap = 4;
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,8 @@ public class ClustersProperties {
|
||||||
|
|
||||||
String internalTopicPrefix;
|
String internalTopicPrefix;
|
||||||
|
|
||||||
|
Integer adminClientTimeout;
|
||||||
|
|
||||||
PollingProperties polling = new PollingProperties();
|
PollingProperties polling = new PollingProperties();
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
|
|
|
@ -5,7 +5,6 @@ import java.util.Map;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import org.openapitools.jackson.nullable.JsonNullableModule;
|
import org.openapitools.jackson.nullable.JsonNullableModule;
|
||||||
import org.springframework.beans.factory.ObjectProvider;
|
import org.springframework.beans.factory.ObjectProvider;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
import org.springframework.boot.autoconfigure.web.ServerProperties;
|
import org.springframework.boot.autoconfigure.web.ServerProperties;
|
||||||
import org.springframework.boot.autoconfigure.web.reactive.WebFluxProperties;
|
import org.springframework.boot.autoconfigure.web.reactive.WebFluxProperties;
|
||||||
import org.springframework.context.ApplicationContext;
|
import org.springframework.context.ApplicationContext;
|
||||||
|
@ -15,8 +14,6 @@ import org.springframework.http.server.reactive.ContextPathCompositeHandler;
|
||||||
import org.springframework.http.server.reactive.HttpHandler;
|
import org.springframework.http.server.reactive.HttpHandler;
|
||||||
import org.springframework.jmx.export.MBeanExporter;
|
import org.springframework.jmx.export.MBeanExporter;
|
||||||
import org.springframework.util.StringUtils;
|
import org.springframework.util.StringUtils;
|
||||||
import org.springframework.util.unit.DataSize;
|
|
||||||
import org.springframework.web.reactive.function.client.WebClient;
|
|
||||||
import org.springframework.web.server.adapter.WebHttpHandlerBuilder;
|
import org.springframework.web.server.adapter.WebHttpHandlerBuilder;
|
||||||
|
|
||||||
@Configuration
|
@Configuration
|
||||||
|
@ -52,14 +49,7 @@ public class Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public WebClient webClient(
|
// will be used by webflux json mapping
|
||||||
@Value("${webclient.max-in-memory-buffer-size:20MB}") DataSize maxBuffSize) {
|
|
||||||
return WebClient.builder()
|
|
||||||
.codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()))
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Bean
|
|
||||||
public JsonNullableModule jsonNullableModule() {
|
public JsonNullableModule jsonNullableModule() {
|
||||||
return new JsonNullableModule();
|
return new JsonNullableModule();
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
package com.provectus.kafka.ui.config;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.exception.ValidationException;
|
||||||
|
import java.beans.Transient;
|
||||||
|
import javax.annotation.PostConstruct;
|
||||||
|
import lombok.Data;
|
||||||
|
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.util.unit.DataSize;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
@ConfigurationProperties("webclient")
|
||||||
|
@Data
|
||||||
|
public class WebclientProperties {
|
||||||
|
|
||||||
|
String maxInMemoryBufferSize;
|
||||||
|
|
||||||
|
@PostConstruct
|
||||||
|
public void validate() {
|
||||||
|
validateAndSetDefaultBufferSize();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateAndSetDefaultBufferSize() {
|
||||||
|
if (maxInMemoryBufferSize != null) {
|
||||||
|
try {
|
||||||
|
DataSize.parse(maxInMemoryBufferSize);
|
||||||
|
} catch (Exception e) {
|
||||||
|
throw new ValidationException("Invalid format for webclient.maxInMemoryBufferSize");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -11,8 +11,6 @@ import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
import org.apache.kafka.common.Node;
|
import org.apache.kafka.common.Node;
|
||||||
import org.apache.kafka.common.TopicPartition;
|
import org.apache.kafka.common.TopicPartition;
|
||||||
|
|
||||||
|
@ -82,15 +80,8 @@ public class ConsumerGroupMapper {
|
||||||
InternalConsumerGroup c, T consumerGroup) {
|
InternalConsumerGroup c, T consumerGroup) {
|
||||||
consumerGroup.setGroupId(c.getGroupId());
|
consumerGroup.setGroupId(c.getGroupId());
|
||||||
consumerGroup.setMembers(c.getMembers().size());
|
consumerGroup.setMembers(c.getMembers().size());
|
||||||
|
|
||||||
int numTopics = Stream.concat(
|
|
||||||
c.getOffsets().keySet().stream().map(TopicPartition::topic),
|
|
||||||
c.getMembers().stream()
|
|
||||||
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
|
|
||||||
).collect(Collectors.toSet()).size();
|
|
||||||
|
|
||||||
consumerGroup.setMessagesBehind(c.getMessagesBehind());
|
consumerGroup.setMessagesBehind(c.getMessagesBehind());
|
||||||
consumerGroup.setTopics(numTopics);
|
consumerGroup.setTopics(c.getTopicNum());
|
||||||
consumerGroup.setSimple(c.isSimple());
|
consumerGroup.setSimple(c.isSimple());
|
||||||
|
|
||||||
Optional.ofNullable(c.getState())
|
Optional.ofNullable(c.getState())
|
||||||
|
|
|
@ -5,6 +5,7 @@ import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
import lombok.Builder;
|
import lombok.Builder;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
|
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
|
||||||
|
@ -21,6 +22,7 @@ public class InternalConsumerGroup {
|
||||||
private final Map<TopicPartition, Long> offsets;
|
private final Map<TopicPartition, Long> offsets;
|
||||||
private final Map<TopicPartition, Long> endOffsets;
|
private final Map<TopicPartition, Long> endOffsets;
|
||||||
private final Long messagesBehind;
|
private final Long messagesBehind;
|
||||||
|
private final Integer topicNum;
|
||||||
private final String partitionAssignor;
|
private final String partitionAssignor;
|
||||||
private final ConsumerGroupState state;
|
private final ConsumerGroupState state;
|
||||||
private final Node coordinator;
|
private final Node coordinator;
|
||||||
|
@ -44,22 +46,12 @@ public class InternalConsumerGroup {
|
||||||
builder.simple(description.isSimpleConsumerGroup());
|
builder.simple(description.isSimpleConsumerGroup());
|
||||||
builder.state(description.state());
|
builder.state(description.state());
|
||||||
builder.partitionAssignor(description.partitionAssignor());
|
builder.partitionAssignor(description.partitionAssignor());
|
||||||
builder.members(
|
Collection<InternalMember> internalMembers = initInternalMembers(description);
|
||||||
description.members().stream()
|
builder.members(internalMembers);
|
||||||
.map(m ->
|
|
||||||
InternalConsumerGroup.InternalMember.builder()
|
|
||||||
.assignment(m.assignment().topicPartitions())
|
|
||||||
.clientId(m.clientId())
|
|
||||||
.groupInstanceId(m.groupInstanceId().orElse(""))
|
|
||||||
.consumerId(m.consumerId())
|
|
||||||
.clientId(m.clientId())
|
|
||||||
.host(m.host())
|
|
||||||
.build()
|
|
||||||
).collect(Collectors.toList())
|
|
||||||
);
|
|
||||||
builder.offsets(groupOffsets);
|
builder.offsets(groupOffsets);
|
||||||
builder.endOffsets(topicEndOffsets);
|
builder.endOffsets(topicEndOffsets);
|
||||||
builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
|
builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
|
||||||
|
builder.topicNum(calculateTopicNum(groupOffsets, internalMembers));
|
||||||
Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
|
Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
|
||||||
return builder.build();
|
return builder.build();
|
||||||
}
|
}
|
||||||
|
@ -80,4 +72,31 @@ public class InternalConsumerGroup {
|
||||||
return messagesBehind;
|
return messagesBehind;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static Integer calculateTopicNum(Map<TopicPartition, Long> offsets, Collection<InternalMember> members) {
|
||||||
|
|
||||||
|
long topicNum = Stream.concat(
|
||||||
|
offsets.keySet().stream().map(TopicPartition::topic),
|
||||||
|
members.stream()
|
||||||
|
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
|
||||||
|
).distinct().count();
|
||||||
|
|
||||||
|
return Integer.valueOf((int) topicNum);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Collection<InternalMember> initInternalMembers(ConsumerGroupDescription description) {
|
||||||
|
return description.members().stream()
|
||||||
|
.map(m ->
|
||||||
|
InternalConsumerGroup.InternalMember.builder()
|
||||||
|
.assignment(m.assignment().topicPartitions())
|
||||||
|
.clientId(m.clientId())
|
||||||
|
.groupInstanceId(m.groupInstanceId().orElse(""))
|
||||||
|
.consumerId(m.consumerId())
|
||||||
|
.clientId(m.clientId())
|
||||||
|
.host(m.host())
|
||||||
|
.build()
|
||||||
|
).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,33 +1,36 @@
|
||||||
package com.provectus.kafka.ui.service;
|
package com.provectus.kafka.ui.service;
|
||||||
|
|
||||||
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||||
import com.provectus.kafka.ui.util.SslPropertiesUtil;
|
import com.provectus.kafka.ui.util.SslPropertiesUtil;
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.Setter;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.apache.kafka.clients.admin.AdminClient;
|
import org.apache.kafka.clients.admin.AdminClient;
|
||||||
import org.apache.kafka.clients.admin.AdminClientConfig;
|
import org.apache.kafka.clients.admin.AdminClientConfig;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import reactor.core.publisher.Mono;
|
import reactor.core.publisher.Mono;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@RequiredArgsConstructor
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class AdminClientServiceImpl implements AdminClientService, Closeable {
|
public class AdminClientServiceImpl implements AdminClientService, Closeable {
|
||||||
|
|
||||||
|
private static final int DEFAULT_CLIENT_TIMEOUT_MS = 30_000;
|
||||||
|
|
||||||
private static final AtomicLong CLIENT_ID_SEQ = new AtomicLong();
|
private static final AtomicLong CLIENT_ID_SEQ = new AtomicLong();
|
||||||
|
|
||||||
private final Map<String, ReactiveAdminClient> adminClientCache = new ConcurrentHashMap<>();
|
private final Map<String, ReactiveAdminClient> adminClientCache = new ConcurrentHashMap<>();
|
||||||
@Setter // used in tests
|
private final int clientTimeout;
|
||||||
@Value("${kafka.admin-client-timeout:30000}")
|
|
||||||
private int clientTimeout;
|
public AdminClientServiceImpl(ClustersProperties clustersProperties) {
|
||||||
|
this.clientTimeout = Optional.ofNullable(clustersProperties.getAdminClientTimeout())
|
||||||
|
.orElse(DEFAULT_CLIENT_TIMEOUT_MS);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Mono<ReactiveAdminClient> get(KafkaCluster cluster) {
|
public Mono<ReactiveAdminClient> get(KafkaCluster cluster) {
|
||||||
|
@ -42,7 +45,7 @@ public class AdminClientServiceImpl implements AdminClientService, Closeable {
|
||||||
SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties);
|
SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties);
|
||||||
properties.putAll(cluster.getProperties());
|
properties.putAll(cluster.getProperties());
|
||||||
properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
|
properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
|
||||||
properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout);
|
properties.putIfAbsent(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout);
|
||||||
properties.putIfAbsent(
|
properties.putIfAbsent(
|
||||||
AdminClientConfig.CLIENT_ID_CONFIG,
|
AdminClientConfig.CLIENT_ID_CONFIG,
|
||||||
"kafka-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet()
|
"kafka-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet()
|
||||||
|
|
|
@ -101,6 +101,9 @@ public class ConsumerGroupService {
|
||||||
public record ConsumerGroupsPage(List<InternalConsumerGroup> consumerGroups, int totalPages) {
|
public record ConsumerGroupsPage(List<InternalConsumerGroup> consumerGroups, int totalPages) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) {
|
||||||
|
}
|
||||||
|
|
||||||
public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
|
public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
|
||||||
KafkaCluster cluster,
|
KafkaCluster cluster,
|
||||||
int pageNum,
|
int pageNum,
|
||||||
|
@ -159,22 +162,19 @@ public class ConsumerGroupService {
|
||||||
sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
|
sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
|
||||||
}
|
}
|
||||||
case MESSAGES_BEHIND -> {
|
case MESSAGES_BEHIND -> {
|
||||||
record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
|
|
||||||
|
|
||||||
Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
|
Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
|
||||||
gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
|
gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
|
||||||
|
|
||||||
var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
|
yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
|
||||||
|
}
|
||||||
|
|
||||||
|
case TOPIC_NUM -> {
|
||||||
|
|
||||||
|
Comparator<GroupWithDescr> comparator = Comparator.comparingInt(gwd -> gwd.icg.getTopicNum());
|
||||||
|
|
||||||
|
yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
|
||||||
|
|
||||||
yield ac.describeConsumerGroups(groupNames)
|
|
||||||
.flatMap(descriptionsMap -> {
|
|
||||||
List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
|
|
||||||
return getConsumerGroups(ac, descriptions)
|
|
||||||
.map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
|
|
||||||
.map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
|
|
||||||
.map(GroupWithDescr::cgd).toList());
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -209,6 +209,27 @@ public class ConsumerGroupService {
|
||||||
.map(cgs -> new ArrayList<>(cgs.values()));
|
.map(cgs -> new ArrayList<>(cgs.values()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private Mono<List<ConsumerGroupDescription>> loadDescriptionsByInternalConsumerGroups(ReactiveAdminClient ac,
|
||||||
|
List<ConsumerGroupListing> groups,
|
||||||
|
Comparator<GroupWithDescr> comparator,
|
||||||
|
int pageNum,
|
||||||
|
int perPage,
|
||||||
|
SortOrderDTO sortOrderDto) {
|
||||||
|
var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
|
||||||
|
|
||||||
|
return ac.describeConsumerGroups(groupNames)
|
||||||
|
.flatMap(descriptionsMap -> {
|
||||||
|
List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
|
||||||
|
return getConsumerGroups(ac, descriptions)
|
||||||
|
.map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
|
||||||
|
.map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
|
||||||
|
.map(GroupWithDescr::cgd).toList());
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public Mono<InternalConsumerGroup> getConsumerGroupDetail(KafkaCluster cluster,
|
public Mono<InternalConsumerGroup> getConsumerGroupDetail(KafkaCluster cluster,
|
||||||
String consumerGroupId) {
|
String consumerGroupId) {
|
||||||
return adminClientService.get(cluster)
|
return adminClientService.get(cluster)
|
||||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui.service;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
|
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
|
||||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
|
import com.provectus.kafka.ui.config.WebclientProperties;
|
||||||
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
|
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
|
||||||
import com.provectus.kafka.ui.emitter.PollingSettings;
|
import com.provectus.kafka.ui.emitter.PollingSettings;
|
||||||
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
|
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
|
||||||
|
@ -22,9 +23,7 @@ import java.util.Optional;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import org.springframework.util.unit.DataSize;
|
import org.springframework.util.unit.DataSize;
|
||||||
import org.springframework.web.reactive.function.client.WebClient;
|
import org.springframework.web.reactive.function.client.WebClient;
|
||||||
|
@ -34,12 +33,18 @@ import reactor.util.function.Tuple2;
|
||||||
import reactor.util.function.Tuples;
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
@RequiredArgsConstructor
|
|
||||||
@Slf4j
|
@Slf4j
|
||||||
public class KafkaClusterFactory {
|
public class KafkaClusterFactory {
|
||||||
|
|
||||||
@Value("${webclient.max-in-memory-buffer-size:20MB}")
|
private static final DataSize DEFAULT_WEBCLIENT_BUFFER = DataSize.parse("20MB");
|
||||||
private DataSize maxBuffSize;
|
|
||||||
|
private final DataSize webClientMaxBuffSize;
|
||||||
|
|
||||||
|
public KafkaClusterFactory(WebclientProperties webclientProperties) {
|
||||||
|
this.webClientMaxBuffSize = Optional.ofNullable(webclientProperties.getMaxInMemoryBufferSize())
|
||||||
|
.map(DataSize::parse)
|
||||||
|
.orElse(DEFAULT_WEBCLIENT_BUFFER);
|
||||||
|
}
|
||||||
|
|
||||||
public KafkaCluster create(ClustersProperties properties,
|
public KafkaCluster create(ClustersProperties properties,
|
||||||
ClustersProperties.Cluster clusterProperties) {
|
ClustersProperties.Cluster clusterProperties) {
|
||||||
|
@ -140,7 +145,7 @@ public class KafkaClusterFactory {
|
||||||
url -> new RetryingKafkaConnectClient(
|
url -> new RetryingKafkaConnectClient(
|
||||||
connectCluster.toBuilder().address(url).build(),
|
connectCluster.toBuilder().address(url).build(),
|
||||||
cluster.getSsl(),
|
cluster.getSsl(),
|
||||||
maxBuffSize
|
webClientMaxBuffSize
|
||||||
),
|
),
|
||||||
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
|
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
|
||||||
"No alive connect instances available",
|
"No alive connect instances available",
|
||||||
|
@ -158,7 +163,7 @@ public class KafkaClusterFactory {
|
||||||
WebClient webClient = new WebClientConfigurator()
|
WebClient webClient = new WebClientConfigurator()
|
||||||
.configureSsl(clusterProperties.getSsl(), clusterProperties.getSchemaRegistrySsl())
|
.configureSsl(clusterProperties.getSsl(), clusterProperties.getSchemaRegistrySsl())
|
||||||
.configureBasicAuth(auth.getUsername(), auth.getPassword())
|
.configureBasicAuth(auth.getUsername(), auth.getPassword())
|
||||||
.configureBufferSize(maxBuffSize)
|
.configureBufferSize(webClientMaxBuffSize)
|
||||||
.build();
|
.build();
|
||||||
return ReactiveFailover.create(
|
return ReactiveFailover.create(
|
||||||
parseUrlList(clusterProperties.getSchemaRegistry()),
|
parseUrlList(clusterProperties.getSchemaRegistry()),
|
||||||
|
@ -181,7 +186,7 @@ public class KafkaClusterFactory {
|
||||||
clusterProperties.getKsqldbServerAuth(),
|
clusterProperties.getKsqldbServerAuth(),
|
||||||
clusterProperties.getSsl(),
|
clusterProperties.getSsl(),
|
||||||
clusterProperties.getKsqldbServerSsl(),
|
clusterProperties.getKsqldbServerSsl(),
|
||||||
maxBuffSize
|
webClientMaxBuffSize
|
||||||
),
|
),
|
||||||
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
|
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
|
||||||
"No live ksqldb instances available",
|
"No live ksqldb instances available",
|
||||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui.util;
|
||||||
|
|
||||||
|
|
||||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||||
|
import com.provectus.kafka.ui.config.WebclientProperties;
|
||||||
import com.provectus.kafka.ui.config.auth.OAuthProperties;
|
import com.provectus.kafka.ui.config.auth.OAuthProperties;
|
||||||
import com.provectus.kafka.ui.config.auth.RoleBasedAccessControlProperties;
|
import com.provectus.kafka.ui.config.auth.RoleBasedAccessControlProperties;
|
||||||
import com.provectus.kafka.ui.exception.FileUploadException;
|
import com.provectus.kafka.ui.exception.FileUploadException;
|
||||||
|
@ -97,6 +98,7 @@ public class DynamicConfigOperations {
|
||||||
.type(ctx.getEnvironment().getProperty("auth.type"))
|
.type(ctx.getEnvironment().getProperty("auth.type"))
|
||||||
.oauth2(getNullableBean(OAuthProperties.class))
|
.oauth2(getNullableBean(OAuthProperties.class))
|
||||||
.build())
|
.build())
|
||||||
|
.webclient(getNullableBean(WebclientProperties.class))
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -204,6 +206,7 @@ public class DynamicConfigOperations {
|
||||||
private ClustersProperties kafka;
|
private ClustersProperties kafka;
|
||||||
private RoleBasedAccessControlProperties rbac;
|
private RoleBasedAccessControlProperties rbac;
|
||||||
private Auth auth;
|
private Auth auth;
|
||||||
|
private WebclientProperties webclient;
|
||||||
|
|
||||||
@Data
|
@Data
|
||||||
@Builder
|
@Builder
|
||||||
|
@ -222,6 +225,9 @@ public class DynamicConfigOperations {
|
||||||
Optional.ofNullable(auth)
|
Optional.ofNullable(auth)
|
||||||
.flatMap(a -> Optional.ofNullable(a.oauth2))
|
.flatMap(a -> Optional.ofNullable(a.oauth2))
|
||||||
.ifPresent(OAuthProperties::validate);
|
.ifPresent(OAuthProperties::validate);
|
||||||
|
|
||||||
|
Optional.ofNullable(webclient)
|
||||||
|
.ifPresent(WebclientProperties::validate);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,9 @@ import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
// Specifies field that can contain any kind of value - primitive, complex and nulls
|
// Specifies field that can contain any kind of value - primitive, complex and nulls
|
||||||
public class AnyFieldSchema implements FieldSchema {
|
class AnyFieldSchema implements FieldSchema {
|
||||||
|
|
||||||
public static AnyFieldSchema get() {
|
static AnyFieldSchema get() {
|
||||||
return new AnyFieldSchema();
|
return new AnyFieldSchema();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,10 +4,10 @@ import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||||
|
|
||||||
public class ArrayFieldSchema implements FieldSchema {
|
class ArrayFieldSchema implements FieldSchema {
|
||||||
private final FieldSchema itemsSchema;
|
private final FieldSchema itemsSchema;
|
||||||
|
|
||||||
public ArrayFieldSchema(FieldSchema itemsSchema) {
|
ArrayFieldSchema(FieldSchema itemsSchema) {
|
||||||
this.itemsSchema = itemsSchema;
|
this.itemsSchema = itemsSchema;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,10 +7,10 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
public class EnumJsonType extends JsonType {
|
class EnumJsonType extends JsonType {
|
||||||
private final List<String> values;
|
private final List<String> values;
|
||||||
|
|
||||||
public EnumJsonType(List<String> values) {
|
EnumJsonType(List<String> values) {
|
||||||
super(Type.ENUM);
|
super(Type.ENUM);
|
||||||
this.values = values;
|
this.values = values;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,6 @@ package com.provectus.kafka.ui.util.jsonschema;
|
||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
public interface FieldSchema {
|
interface FieldSchema {
|
||||||
JsonNode toJsonNode(ObjectMapper mapper);
|
JsonNode toJsonNode(ObjectMapper mapper);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public abstract class JsonType {
|
abstract class JsonType {
|
||||||
|
|
||||||
protected final Type type;
|
protected final Type type;
|
||||||
|
|
||||||
|
@ -12,13 +12,13 @@ public abstract class JsonType {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Type getType() {
|
Type getType() {
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract Map<String, JsonNode> toJsonNode(ObjectMapper mapper);
|
abstract Map<String, JsonNode> toJsonNode(ObjectMapper mapper);
|
||||||
|
|
||||||
public enum Type {
|
enum Type {
|
||||||
NULL,
|
NULL,
|
||||||
BOOLEAN,
|
BOOLEAN,
|
||||||
OBJECT,
|
OBJECT,
|
||||||
|
|
|
@ -2,21 +2,27 @@ package com.provectus.kafka.ui.util.jsonschema;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.node.BooleanNode;
|
||||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||||
import com.fasterxml.jackson.databind.node.TextNode;
|
import com.fasterxml.jackson.databind.node.TextNode;
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
|
||||||
public class MapFieldSchema implements FieldSchema {
|
class MapFieldSchema implements FieldSchema {
|
||||||
private final FieldSchema itemSchema;
|
private final @Nullable FieldSchema itemSchema;
|
||||||
|
|
||||||
public MapFieldSchema(FieldSchema itemSchema) {
|
MapFieldSchema(@Nullable FieldSchema itemSchema) {
|
||||||
this.itemSchema = itemSchema;
|
this.itemSchema = itemSchema;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
MapFieldSchema() {
|
||||||
|
this(null);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public JsonNode toJsonNode(ObjectMapper mapper) {
|
public JsonNode toJsonNode(ObjectMapper mapper) {
|
||||||
final ObjectNode objectNode = mapper.createObjectNode();
|
final ObjectNode objectNode = mapper.createObjectNode();
|
||||||
objectNode.set("type", new TextNode(JsonType.Type.OBJECT.getName()));
|
objectNode.set("type", new TextNode(JsonType.Type.OBJECT.getName()));
|
||||||
objectNode.set("additionalProperties", itemSchema.toJsonNode(mapper));
|
objectNode.set("additionalProperties", itemSchema != null ? itemSchema.toJsonNode(mapper) : BooleanNode.TRUE);
|
||||||
return objectNode;
|
return objectNode;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,24 +9,24 @@ import java.util.stream.Collectors;
|
||||||
import reactor.util.function.Tuple2;
|
import reactor.util.function.Tuple2;
|
||||||
import reactor.util.function.Tuples;
|
import reactor.util.function.Tuples;
|
||||||
|
|
||||||
public class ObjectFieldSchema implements FieldSchema {
|
class ObjectFieldSchema implements FieldSchema {
|
||||||
|
|
||||||
public static final ObjectFieldSchema EMPTY = new ObjectFieldSchema(Map.of(), List.of());
|
static final ObjectFieldSchema EMPTY = new ObjectFieldSchema(Map.of(), List.of());
|
||||||
|
|
||||||
private final Map<String, FieldSchema> properties;
|
private final Map<String, FieldSchema> properties;
|
||||||
private final List<String> required;
|
private final List<String> required;
|
||||||
|
|
||||||
public ObjectFieldSchema(Map<String, FieldSchema> properties,
|
ObjectFieldSchema(Map<String, FieldSchema> properties,
|
||||||
List<String> required) {
|
List<String> required) {
|
||||||
this.properties = properties;
|
this.properties = properties;
|
||||||
this.required = required;
|
this.required = required;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, FieldSchema> getProperties() {
|
Map<String, FieldSchema> getProperties() {
|
||||||
return properties;
|
return properties;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<String> getRequired() {
|
List<String> getRequired() {
|
||||||
return required;
|
return required;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class OneOfFieldSchema implements FieldSchema {
|
class OneOfFieldSchema implements FieldSchema {
|
||||||
private final List<FieldSchema> schemaList;
|
private final List<FieldSchema> schemaList;
|
||||||
|
|
||||||
public OneOfFieldSchema(
|
OneOfFieldSchema(List<FieldSchema> schemaList) {
|
||||||
List<FieldSchema> schemaList) {
|
|
||||||
this.schemaList = schemaList;
|
this.schemaList = schemaList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -94,6 +94,9 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
|
||||||
if (wellKnownTypeSchema.isPresent()) {
|
if (wellKnownTypeSchema.isPresent()) {
|
||||||
return wellKnownTypeSchema.get();
|
return wellKnownTypeSchema.get();
|
||||||
}
|
}
|
||||||
|
if (field.isMapField()) {
|
||||||
|
return new MapFieldSchema();
|
||||||
|
}
|
||||||
final JsonType jsonType = convertType(field);
|
final JsonType jsonType = convertType(field);
|
||||||
FieldSchema fieldSchema;
|
FieldSchema fieldSchema;
|
||||||
if (jsonType.getType().equals(JsonType.Type.OBJECT)) {
|
if (jsonType.getType().equals(JsonType.Type.OBJECT)) {
|
||||||
|
@ -149,67 +152,47 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
|
||||||
}
|
}
|
||||||
|
|
||||||
private JsonType convertType(Descriptors.FieldDescriptor field) {
|
private JsonType convertType(Descriptors.FieldDescriptor field) {
|
||||||
switch (field.getType()) {
|
return switch (field.getType()) {
|
||||||
case INT32:
|
case INT32, FIXED32, SFIXED32, SINT32 -> new SimpleJsonType(
|
||||||
case FIXED32:
|
JsonType.Type.INTEGER,
|
||||||
case SFIXED32:
|
Map.of(
|
||||||
case SINT32:
|
"maximum", IntNode.valueOf(Integer.MAX_VALUE),
|
||||||
return new SimpleJsonType(
|
"minimum", IntNode.valueOf(Integer.MIN_VALUE)
|
||||||
JsonType.Type.INTEGER,
|
)
|
||||||
Map.of(
|
);
|
||||||
"maximum", IntNode.valueOf(Integer.MAX_VALUE),
|
case UINT32 -> new SimpleJsonType(
|
||||||
"minimum", IntNode.valueOf(Integer.MIN_VALUE)
|
JsonType.Type.INTEGER,
|
||||||
)
|
Map.of(
|
||||||
);
|
"maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
|
||||||
case UINT32:
|
"minimum", IntNode.valueOf(0)
|
||||||
return new SimpleJsonType(
|
)
|
||||||
JsonType.Type.INTEGER,
|
);
|
||||||
Map.of(
|
|
||||||
"maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
|
|
||||||
"minimum", IntNode.valueOf(0)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
//TODO: actually all *64 types will be printed with quotes (as strings),
|
//TODO: actually all *64 types will be printed with quotes (as strings),
|
||||||
// see JsonFormat::printSingleFieldValue for impl. This can cause problems when you copy-paste from messages
|
// see JsonFormat::printSingleFieldValue for impl. This can cause problems when you copy-paste from messages
|
||||||
// table to `Produce` area - need to think if it is critical or not.
|
// table to `Produce` area - need to think if it is critical or not.
|
||||||
case INT64:
|
case INT64, FIXED64, SFIXED64, SINT64 -> new SimpleJsonType(
|
||||||
case FIXED64:
|
JsonType.Type.INTEGER,
|
||||||
case SFIXED64:
|
Map.of(
|
||||||
case SINT64:
|
"maximum", LongNode.valueOf(Long.MAX_VALUE),
|
||||||
return new SimpleJsonType(
|
"minimum", LongNode.valueOf(Long.MIN_VALUE)
|
||||||
JsonType.Type.INTEGER,
|
)
|
||||||
Map.of(
|
);
|
||||||
"maximum", LongNode.valueOf(Long.MAX_VALUE),
|
case UINT64 -> new SimpleJsonType(
|
||||||
"minimum", LongNode.valueOf(Long.MIN_VALUE)
|
JsonType.Type.INTEGER,
|
||||||
)
|
Map.of(
|
||||||
);
|
"maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
|
||||||
case UINT64:
|
"minimum", LongNode.valueOf(0)
|
||||||
return new SimpleJsonType(
|
)
|
||||||
JsonType.Type.INTEGER,
|
);
|
||||||
Map.of(
|
case MESSAGE, GROUP -> new SimpleJsonType(JsonType.Type.OBJECT);
|
||||||
"maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
|
case ENUM -> new EnumJsonType(
|
||||||
"minimum", LongNode.valueOf(0)
|
field.getEnumType().getValues().stream()
|
||||||
)
|
.map(Descriptors.EnumValueDescriptor::getName)
|
||||||
);
|
.collect(Collectors.toList())
|
||||||
case MESSAGE:
|
);
|
||||||
case GROUP:
|
case BYTES, STRING -> new SimpleJsonType(JsonType.Type.STRING);
|
||||||
return new SimpleJsonType(JsonType.Type.OBJECT);
|
case FLOAT, DOUBLE -> new SimpleJsonType(JsonType.Type.NUMBER);
|
||||||
case ENUM:
|
case BOOL -> new SimpleJsonType(JsonType.Type.BOOLEAN);
|
||||||
return new EnumJsonType(
|
};
|
||||||
field.getEnumType().getValues().stream()
|
|
||||||
.map(Descriptors.EnumValueDescriptor::getName)
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
);
|
|
||||||
case BYTES:
|
|
||||||
case STRING:
|
|
||||||
return new SimpleJsonType(JsonType.Type.STRING);
|
|
||||||
case FLOAT:
|
|
||||||
case DOUBLE:
|
|
||||||
return new SimpleJsonType(JsonType.Type.NUMBER);
|
|
||||||
case BOOL:
|
|
||||||
return new SimpleJsonType(JsonType.Type.BOOLEAN);
|
|
||||||
default:
|
|
||||||
return new SimpleJsonType(JsonType.Type.STRING);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,10 +4,10 @@ import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.node.TextNode;
|
import com.fasterxml.jackson.databind.node.TextNode;
|
||||||
|
|
||||||
public class RefFieldSchema implements FieldSchema {
|
class RefFieldSchema implements FieldSchema {
|
||||||
private final String ref;
|
private final String ref;
|
||||||
|
|
||||||
public RefFieldSchema(String ref) {
|
RefFieldSchema(String ref) {
|
||||||
this.ref = ref;
|
this.ref = ref;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ public class RefFieldSchema implements FieldSchema {
|
||||||
return mapper.createObjectNode().set("$ref", new TextNode(ref));
|
return mapper.createObjectNode().set("$ref", new TextNode(ref));
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getRef() {
|
String getRef() {
|
||||||
return ref;
|
return ref;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,10 +3,10 @@ package com.provectus.kafka.ui.util.jsonschema;
|
||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
public class SimpleFieldSchema implements FieldSchema {
|
class SimpleFieldSchema implements FieldSchema {
|
||||||
private final JsonType type;
|
private final JsonType type;
|
||||||
|
|
||||||
public SimpleFieldSchema(JsonType type) {
|
SimpleFieldSchema(JsonType type) {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,15 +6,15 @@ import com.fasterxml.jackson.databind.node.TextNode;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class SimpleJsonType extends JsonType {
|
class SimpleJsonType extends JsonType {
|
||||||
|
|
||||||
private final Map<String, JsonNode> additionalTypeProperties;
|
private final Map<String, JsonNode> additionalTypeProperties;
|
||||||
|
|
||||||
public SimpleJsonType(Type type) {
|
SimpleJsonType(Type type) {
|
||||||
this(type, Map.of());
|
this(type, Map.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
public SimpleJsonType(Type type, Map<String, JsonNode> additionalTypeProperties) {
|
SimpleJsonType(Type type, Map<String, JsonNode> additionalTypeProperties) {
|
||||||
super(type);
|
super(type);
|
||||||
this.additionalTypeProperties = additionalTypeProperties;
|
this.additionalTypeProperties = additionalTypeProperties;
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,8 +59,10 @@ class ProtobufSchemaConverterTest {
|
||||||
TestMsg outer_ref = 2;
|
TestMsg outer_ref = 2;
|
||||||
EmbeddedMsg self_ref = 3;
|
EmbeddedMsg self_ref = 3;
|
||||||
}
|
}
|
||||||
}""";
|
|
||||||
|
|
||||||
|
map<int32, string> intToStringMap = 21;
|
||||||
|
map<string, EmbeddedMsg> strToObjMap = 22;
|
||||||
|
}""";
|
||||||
|
|
||||||
String expectedJsonSchema = """
|
String expectedJsonSchema = """
|
||||||
{
|
{
|
||||||
|
@ -109,7 +111,9 @@ class ProtobufSchemaConverterTest {
|
||||||
"v2": { "type": [ "number", "string", "object", "array", "boolean", "null" ] },
|
"v2": { "type": [ "number", "string", "object", "array", "boolean", "null" ] },
|
||||||
"uint32_w_field": { "type": "integer", "maximum": 4294967295, "minimum": 0 },
|
"uint32_w_field": { "type": "integer", "maximum": 4294967295, "minimum": 0 },
|
||||||
"bool_w_field": { "type": "boolean" },
|
"bool_w_field": { "type": "boolean" },
|
||||||
"uint64_w_field": { "type": "integer", "maximum": 18446744073709551615, "minimum": 0 }
|
"uint64_w_field": { "type": "integer", "maximum": 18446744073709551615, "minimum": 0 },
|
||||||
|
"strToObjMap": { "type": "object", "additionalProperties": true },
|
||||||
|
"intToStringMap": { "type": "object", "additionalProperties": true }
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"test.TestMsg.EmbeddedMsg": {
|
"test.TestMsg.EmbeddedMsg": {
|
||||||
|
|
|
@ -2562,6 +2562,7 @@ components:
|
||||||
- MEMBERS
|
- MEMBERS
|
||||||
- STATE
|
- STATE
|
||||||
- MESSAGES_BEHIND
|
- MESSAGES_BEHIND
|
||||||
|
- TOPIC_NUM
|
||||||
|
|
||||||
ConsumerGroupsPageResponse:
|
ConsumerGroupsPageResponse:
|
||||||
type: object
|
type: object
|
||||||
|
@ -3644,6 +3645,12 @@ components:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
$ref: '#/components/schemas/Action'
|
$ref: '#/components/schemas/Action'
|
||||||
|
webclient:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
maxInMemoryBufferSize:
|
||||||
|
type: string
|
||||||
|
description: "examples: 20, 12KB, 5MB"
|
||||||
kafka:
|
kafka:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
@ -3656,6 +3663,10 @@ components:
|
||||||
type: integer
|
type: integer
|
||||||
noDataEmptyPolls:
|
noDataEmptyPolls:
|
||||||
type: integer
|
type: integer
|
||||||
|
adminClientTimeout:
|
||||||
|
type: integer
|
||||||
|
internalTopicPrefix:
|
||||||
|
type: string
|
||||||
clusters:
|
clusters:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
|
|
|
@ -37,9 +37,13 @@ public abstract class BasePage extends WebUtils {
|
||||||
protected String pageTitleFromHeader = "//h1[text()='%s']";
|
protected String pageTitleFromHeader = "//h1[text()='%s']";
|
||||||
protected String pagePathFromHeader = "//a[text()='%s']/../h1";
|
protected String pagePathFromHeader = "//a[text()='%s']/../h1";
|
||||||
|
|
||||||
|
protected boolean isSpinnerVisible(int... timeoutInSeconds) {
|
||||||
|
return isVisible(loadingSpinner, timeoutInSeconds);
|
||||||
|
}
|
||||||
|
|
||||||
protected void waitUntilSpinnerDisappear(int... timeoutInSeconds) {
|
protected void waitUntilSpinnerDisappear(int... timeoutInSeconds) {
|
||||||
log.debug("\nwaitUntilSpinnerDisappear");
|
log.debug("\nwaitUntilSpinnerDisappear");
|
||||||
if (isVisible(loadingSpinner, timeoutInSeconds)) {
|
if (isSpinnerVisible(timeoutInSeconds)) {
|
||||||
loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(60));
|
loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(60));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
package com.provectus.kafka.ui.pages.ksqldb;
|
package com.provectus.kafka.ui.pages.ksqldb;
|
||||||
|
|
||||||
|
import static com.codeborne.selenide.Condition.visible;
|
||||||
import static com.codeborne.selenide.Selenide.$;
|
import static com.codeborne.selenide.Selenide.$;
|
||||||
import static com.codeborne.selenide.Selenide.$x;
|
import static com.codeborne.selenide.Selenide.$x;
|
||||||
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
|
||||||
|
@ -10,12 +11,12 @@ import com.codeborne.selenide.SelenideElement;
|
||||||
import com.provectus.kafka.ui.pages.BasePage;
|
import com.provectus.kafka.ui.pages.BasePage;
|
||||||
import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
|
import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
|
import java.time.Duration;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.openqa.selenium.By;
|
import org.openqa.selenium.By;
|
||||||
|
|
||||||
public class KsqlDbList extends BasePage {
|
public class KsqlDbList extends BasePage {
|
||||||
|
|
||||||
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
|
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
|
||||||
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
|
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
|
||||||
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
|
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
|
||||||
|
@ -76,9 +77,24 @@ public class KsqlDbList extends BasePage {
|
||||||
this.element = element;
|
this.element = element;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private SelenideElement getNameElm() {
|
||||||
|
return element.$x("./td[1]");
|
||||||
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getTableName() {
|
public String getTableName() {
|
||||||
return element.$x("./td[1]").getText().trim();
|
return getNameElm().getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public boolean isVisible() {
|
||||||
|
boolean isVisible = false;
|
||||||
|
try {
|
||||||
|
getNameElm().shouldBe(visible, Duration.ofMillis(500));
|
||||||
|
isVisible = true;
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
return isVisible;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
|
@ -110,9 +126,24 @@ public class KsqlDbList extends BasePage {
|
||||||
this.element = element;
|
this.element = element;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private SelenideElement getNameElm() {
|
||||||
|
return element.$x("./td[1]");
|
||||||
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getStreamName() {
|
public String getStreamName() {
|
||||||
return element.$x("./td[1]").getText().trim();
|
return getNameElm().getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public boolean isVisible() {
|
||||||
|
boolean isVisible = false;
|
||||||
|
try {
|
||||||
|
getNameElm().shouldBe(visible, Duration.ofMillis(500));
|
||||||
|
isVisible = true;
|
||||||
|
} catch (Throwable ignored) {
|
||||||
|
}
|
||||||
|
return isVisible;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
|
|
|
@ -40,9 +40,14 @@ public class KsqlQueryForm extends BasePage {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public KsqlQueryForm clickExecuteBtn() {
|
public String getEnteredQuery() {
|
||||||
|
return queryAreaValue.getText().trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
public KsqlQueryForm clickExecuteBtn(String query) {
|
||||||
clickByActions(executeBtn);
|
clickByActions(executeBtn);
|
||||||
if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
|
if (query.contains("EMIT CHANGES")) {
|
||||||
loadingSpinner.shouldBe(Condition.visible);
|
loadingSpinner.shouldBe(Condition.visible);
|
||||||
} else {
|
} else {
|
||||||
waitUntilSpinnerDisappear();
|
waitUntilSpinnerDisappear();
|
||||||
|
@ -66,19 +71,19 @@ public class KsqlQueryForm extends BasePage {
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public KsqlQueryForm clickAddStreamProperty() {
|
public KsqlQueryForm clickAddStreamProperty() {
|
||||||
clickByJavaScript(addStreamPropertyBtn);
|
clickByActions(addStreamPropertyBtn);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public KsqlQueryForm setQuery(String query) {
|
public KsqlQueryForm setQuery(String query) {
|
||||||
queryAreaValue.shouldBe(Condition.visible).click();
|
queryAreaValue.shouldBe(Condition.visible).click();
|
||||||
queryArea.setValue(query);
|
sendKeysByActions(queryArea, query);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
|
public KsqlQueryForm.KsqlResponseGridItem getItemByName(String name) {
|
||||||
return initItems().stream()
|
return initItems().stream()
|
||||||
.filter(e -> e.getName().equalsIgnoreCase(name))
|
.filter(e -> e.getName().equalsIgnoreCase(name))
|
||||||
.findFirst().orElseThrow();
|
.findFirst().orElseThrow();
|
||||||
|
@ -114,16 +119,20 @@ public class KsqlQueryForm extends BasePage {
|
||||||
return element.$x("./td[1]").getText().trim();
|
return element.$x("./td[1]").getText().trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private SelenideElement getNameElm() {
|
||||||
|
return element.$x("./td[2]");
|
||||||
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return element.$x("./td[2]").scrollTo().getText().trim();
|
return getNameElm().scrollTo().getText().trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Step
|
@Step
|
||||||
public boolean isVisible() {
|
public boolean isVisible() {
|
||||||
boolean isVisible = false;
|
boolean isVisible = false;
|
||||||
try {
|
try {
|
||||||
element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
|
getNameElm().shouldBe(visible, Duration.ofMillis(500));
|
||||||
isVisible = true;
|
isVisible = true;
|
||||||
} catch (Throwable ignored) {
|
} catch (Throwable ignored) {
|
||||||
}
|
}
|
||||||
|
|
|
@ -95,7 +95,7 @@ public class WebUtils {
|
||||||
return isSelected;
|
return isSelected;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static boolean selectElement(SelenideElement element, boolean select) {
|
public static void selectElement(SelenideElement element, boolean select) {
|
||||||
if (select) {
|
if (select) {
|
||||||
if (!element.isSelected()) {
|
if (!element.isSelected()) {
|
||||||
clickByJavaScript(element);
|
clickByJavaScript(element);
|
||||||
|
@ -105,6 +105,5 @@ public class WebUtils {
|
||||||
clickByJavaScript(element);
|
clickByJavaScript(element);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,57 +22,50 @@ public class SmokeBacklog extends BaseManualTest {
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@Suite(id = KSQL_DB_SUITE_ID)
|
@Suite(id = KSQL_DB_SUITE_ID)
|
||||||
@QaseId(276)
|
@QaseId(277)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseB() {
|
public void testCaseB() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@Suite(id = KSQL_DB_SUITE_ID)
|
@Suite(id = KSQL_DB_SUITE_ID)
|
||||||
@QaseId(277)
|
@QaseId(278)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseC() {
|
public void testCaseC() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
|
||||||
@Suite(id = KSQL_DB_SUITE_ID)
|
|
||||||
@QaseId(278)
|
|
||||||
@Test
|
|
||||||
public void testCaseD() {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@Suite(id = KSQL_DB_SUITE_ID)
|
@Suite(id = KSQL_DB_SUITE_ID)
|
||||||
@QaseId(284)
|
@QaseId(284)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseE() {
|
public void testCaseD() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@Suite(id = BROKERS_SUITE_ID)
|
@Suite(id = BROKERS_SUITE_ID)
|
||||||
@QaseId(331)
|
@QaseId(331)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseF() {
|
public void testCaseE() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@Suite(id = BROKERS_SUITE_ID)
|
@Suite(id = BROKERS_SUITE_ID)
|
||||||
@QaseId(332)
|
@QaseId(332)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseG() {
|
public void testCaseF() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||||
@QaseId(335)
|
@QaseId(335)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseH() {
|
public void testCaseG() {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Automation(state = TO_BE_AUTOMATED)
|
@Automation(state = TO_BE_AUTOMATED)
|
||||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||||
@QaseId(336)
|
@QaseId(336)
|
||||||
@Test
|
@Test
|
||||||
public void testCaseI() {
|
public void testCaseH() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,14 +1,17 @@
|
||||||
package com.provectus.kafka.ui.smokesuite.ksqldb;
|
package com.provectus.kafka.ui.smokesuite.ksqldb;
|
||||||
|
|
||||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
|
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
|
||||||
|
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
|
||||||
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.BaseTest;
|
import com.provectus.kafka.ui.BaseTest;
|
||||||
import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
|
import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
|
||||||
import com.provectus.kafka.ui.pages.ksqldb.models.Table;
|
import com.provectus.kafka.ui.pages.ksqldb.models.Table;
|
||||||
|
import io.qameta.allure.Step;
|
||||||
import io.qase.api.annotation.QaseId;
|
import io.qase.api.annotation.QaseId;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import org.testng.Assert;
|
||||||
import org.testng.annotations.AfterClass;
|
import org.testng.annotations.AfterClass;
|
||||||
import org.testng.annotations.BeforeClass;
|
import org.testng.annotations.BeforeClass;
|
||||||
import org.testng.annotations.Test;
|
import org.testng.annotations.Test;
|
||||||
|
@ -16,53 +19,30 @@ import org.testng.asserts.SoftAssert;
|
||||||
|
|
||||||
public class KsqlDbTest extends BaseTest {
|
public class KsqlDbTest extends BaseTest {
|
||||||
|
|
||||||
private static final Stream STREAM_FOR_CHECK_TABLES = new Stream()
|
private static final Stream DEFAULT_STREAM = new Stream()
|
||||||
.setName("STREAM_FOR_CHECK_TABLES_" + randomAlphabetic(4).toUpperCase())
|
.setName("DEFAULT_STREAM_" + randomAlphabetic(4).toUpperCase())
|
||||||
.setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
|
.setTopicName("DEFAULT_TOPIC_" + randomAlphabetic(4).toUpperCase());
|
||||||
private static final Table FIRST_TABLE = new Table()
|
private static final Table FIRST_TABLE = new Table()
|
||||||
.setName("FIRST_TABLE" + randomAlphabetic(4).toUpperCase())
|
.setName("FIRST_TABLE_" + randomAlphabetic(4).toUpperCase())
|
||||||
.setStreamName(STREAM_FOR_CHECK_TABLES.getName());
|
.setStreamName(DEFAULT_STREAM.getName());
|
||||||
private static final Table SECOND_TABLE = new Table()
|
private static final Table SECOND_TABLE = new Table()
|
||||||
.setName("SECOND_TABLE" + randomAlphabetic(4).toUpperCase())
|
.setName("SECOND_TABLE_" + randomAlphabetic(4).toUpperCase())
|
||||||
.setStreamName(STREAM_FOR_CHECK_TABLES.getName());
|
.setStreamName(DEFAULT_STREAM.getName());
|
||||||
private static final List<String> TOPIC_NAMES_LIST = new ArrayList<>();
|
private static final List<String> TOPIC_NAMES_LIST = new ArrayList<>();
|
||||||
|
|
||||||
@BeforeClass(alwaysRun = true)
|
@BeforeClass(alwaysRun = true)
|
||||||
public void beforeClass() {
|
public void beforeClass() {
|
||||||
apiService
|
apiService
|
||||||
.createStream(STREAM_FOR_CHECK_TABLES)
|
.createStream(DEFAULT_STREAM)
|
||||||
.createTables(FIRST_TABLE, SECOND_TABLE);
|
.createTables(FIRST_TABLE, SECOND_TABLE);
|
||||||
TOPIC_NAMES_LIST.addAll(List.of(STREAM_FOR_CHECK_TABLES.getTopicName(),
|
TOPIC_NAMES_LIST.addAll(List.of(DEFAULT_STREAM.getTopicName(),
|
||||||
FIRST_TABLE.getName(), SECOND_TABLE.getName()));
|
FIRST_TABLE.getName(), SECOND_TABLE.getName()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@QaseId(41)
|
|
||||||
@Test(priority = 1)
|
|
||||||
public void checkShowTablesRequestExecution() {
|
|
||||||
navigateToKsqlDb();
|
|
||||||
ksqlDbList
|
|
||||||
.clickExecuteKsqlRequestBtn();
|
|
||||||
ksqlQueryForm
|
|
||||||
.waitUntilScreenReady()
|
|
||||||
.setQuery(SHOW_TABLES.getQuery())
|
|
||||||
.clickExecuteBtn();
|
|
||||||
SoftAssert softly = new SoftAssert();
|
|
||||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
|
||||||
softly.assertTrue(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableName()");
|
|
||||||
softly.assertTrue(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableName()");
|
|
||||||
softly.assertAll();
|
|
||||||
}
|
|
||||||
|
|
||||||
@QaseId(86)
|
@QaseId(86)
|
||||||
@Test(priority = 2)
|
@Test(priority = 1)
|
||||||
public void clearResultsForExecutedRequest() {
|
public void clearResultsForExecutedRequest() {
|
||||||
navigateToKsqlDb();
|
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||||
ksqlDbList
|
|
||||||
.clickExecuteKsqlRequestBtn();
|
|
||||||
ksqlQueryForm
|
|
||||||
.waitUntilScreenReady()
|
|
||||||
.setQuery(SHOW_TABLES.getQuery())
|
|
||||||
.clickExecuteBtn();
|
|
||||||
SoftAssert softly = new SoftAssert();
|
SoftAssert softly = new SoftAssert();
|
||||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||||
softly.assertAll();
|
softly.assertAll();
|
||||||
|
@ -72,6 +52,40 @@ public class KsqlDbTest extends BaseTest {
|
||||||
softly.assertAll();
|
softly.assertAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@QaseId(276)
|
||||||
|
@Test(priority = 2)
|
||||||
|
public void clearEnteredQueryCheck() {
|
||||||
|
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||||
|
Assert.assertFalse(ksqlQueryForm.getEnteredQuery().isEmpty(), "getEnteredQuery()");
|
||||||
|
ksqlQueryForm
|
||||||
|
.clickClearBtn();
|
||||||
|
Assert.assertTrue(ksqlQueryForm.getEnteredQuery().isEmpty(), "getEnteredQuery()");
|
||||||
|
}
|
||||||
|
|
||||||
|
@QaseId(41)
|
||||||
|
@Test(priority = 3)
|
||||||
|
public void checkShowTablesRequestExecution() {
|
||||||
|
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||||
|
SoftAssert softly = new SoftAssert();
|
||||||
|
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||||
|
softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(), "getItemByName()");
|
||||||
|
softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(), "getItemByName()");
|
||||||
|
softly.assertAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Step
|
||||||
|
private void navigateToKsqlDbAndExecuteRequest(String query) {
|
||||||
|
naviSideBar
|
||||||
|
.openSideMenu(KSQL_DB);
|
||||||
|
ksqlDbList
|
||||||
|
.waitUntilScreenReady()
|
||||||
|
.clickExecuteKsqlRequestBtn();
|
||||||
|
ksqlQueryForm
|
||||||
|
.waitUntilScreenReady()
|
||||||
|
.setQuery(query)
|
||||||
|
.clickExecuteBtn(query);
|
||||||
|
}
|
||||||
|
|
||||||
@AfterClass(alwaysRun = true)
|
@AfterClass(alwaysRun = true)
|
||||||
public void afterClass() {
|
public void afterClass() {
|
||||||
TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
|
TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
|
||||||
|
|
|
@ -8,7 +8,6 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
||||||
|
|
||||||
import com.provectus.kafka.ui.BaseTest;
|
import com.provectus.kafka.ui.BaseTest;
|
||||||
import com.provectus.kafka.ui.models.Topic;
|
import com.provectus.kafka.ui.models.Topic;
|
||||||
import com.provectus.kafka.ui.pages.topics.TopicDetails;
|
|
||||||
import io.qameta.allure.Issue;
|
import io.qameta.allure.Issue;
|
||||||
import io.qameta.allure.Step;
|
import io.qameta.allure.Step;
|
||||||
import io.qase.api.annotation.QaseId;
|
import io.qase.api.annotation.QaseId;
|
||||||
|
@ -140,24 +139,22 @@ public class MessagesTest extends BaseTest {
|
||||||
softly.assertAll();
|
softly.assertAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Ignore
|
|
||||||
@Issue("https://github.com/provectus/kafka-ui/issues/2394")
|
|
||||||
@QaseId(15)
|
@QaseId(15)
|
||||||
@Test(priority = 6)
|
@Test(priority = 6)
|
||||||
public void checkMessageFilteringByOffset() {
|
public void checkMessageFilteringByOffset() {
|
||||||
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
|
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
|
||||||
topicDetails
|
int nextOffset = topicDetails
|
||||||
.openDetailsTab(MESSAGES);
|
.openDetailsTab(MESSAGES)
|
||||||
TopicDetails.MessageGridItem secondMessage = topicDetails.getMessageByOffset(1);
|
.getAllMessages().stream()
|
||||||
|
.findFirst().orElseThrow().getOffset() + 1;
|
||||||
topicDetails
|
topicDetails
|
||||||
.selectSeekTypeDdlMessagesTab("Offset")
|
.selectSeekTypeDdlMessagesTab("Offset")
|
||||||
.setSeekTypeValueFldMessagesTab(String.valueOf(secondMessage.getOffset()))
|
.setSeekTypeValueFldMessagesTab(String.valueOf(nextOffset))
|
||||||
.clickSubmitFiltersBtnMessagesTab();
|
.clickSubmitFiltersBtnMessagesTab();
|
||||||
SoftAssert softly = new SoftAssert();
|
SoftAssert softly = new SoftAssert();
|
||||||
topicDetails.getAllMessages().forEach(message ->
|
topicDetails.getAllMessages().forEach(message ->
|
||||||
softly.assertTrue(message.getOffset() == secondMessage.getOffset()
|
softly.assertTrue(message.getOffset() >= nextOffset,
|
||||||
|| message.getOffset() > secondMessage.getOffset(),
|
String.format("Expected offset not less: %s, but found: %s", nextOffset, message.getOffset())));
|
||||||
String.format("Expected offset is: %s, but found: %s", secondMessage.getOffset(), message.getOffset())));
|
|
||||||
softly.assertAll();
|
softly.assertAll();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -168,13 +165,11 @@ public class MessagesTest extends BaseTest {
|
||||||
@Test(priority = 7)
|
@Test(priority = 7)
|
||||||
public void checkMessageFilteringByTimestamp() {
|
public void checkMessageFilteringByTimestamp() {
|
||||||
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
|
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
|
||||||
topicDetails
|
LocalDateTime firstTimestamp = topicDetails
|
||||||
.openDetailsTab(MESSAGES);
|
.openDetailsTab(MESSAGES)
|
||||||
LocalDateTime firstTimestamp = topicDetails.getMessageByOffset(0).getTimestamp();
|
.getMessageByOffset(0).getTimestamp();
|
||||||
List<TopicDetails.MessageGridItem> nextMessages = topicDetails.getAllMessages().stream()
|
LocalDateTime nextTimestamp = topicDetails.getAllMessages().stream()
|
||||||
.filter(message -> message.getTimestamp().getMinute() != firstTimestamp.getMinute())
|
.filter(message -> message.getTimestamp().getMinute() != firstTimestamp.getMinute())
|
||||||
.toList();
|
|
||||||
LocalDateTime nextTimestamp = nextMessages.stream()
|
|
||||||
.findFirst().orElseThrow().getTimestamp();
|
.findFirst().orElseThrow().getTimestamp();
|
||||||
topicDetails
|
topicDetails
|
||||||
.selectSeekTypeDdlMessagesTab("Timestamp")
|
.selectSeekTypeDdlMessagesTab("Timestamp")
|
||||||
|
@ -183,8 +178,7 @@ public class MessagesTest extends BaseTest {
|
||||||
.clickSubmitFiltersBtnMessagesTab();
|
.clickSubmitFiltersBtnMessagesTab();
|
||||||
SoftAssert softly = new SoftAssert();
|
SoftAssert softly = new SoftAssert();
|
||||||
topicDetails.getAllMessages().forEach(message ->
|
topicDetails.getAllMessages().forEach(message ->
|
||||||
softly.assertTrue(message.getTimestamp().isEqual(nextTimestamp)
|
softly.assertFalse(message.getTimestamp().isBefore(nextTimestamp),
|
||||||
|| message.getTimestamp().isAfter(nextTimestamp),
|
|
||||||
String.format("Expected that %s is not before %s.", message.getTimestamp(), nextTimestamp)));
|
String.format("Expected that %s is not before %s.", message.getTimestamp(), nextTimestamp)));
|
||||||
softly.assertAll();
|
softly.assertAll();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
|
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
|
||||||
<suite name="RegressionSuite">
|
<suite name="RegressionSuite">
|
||||||
<test name="RegressionTest" enabled="true" parallel="classes" thread-count="3">
|
<test name="RegressionTest" enabled="true" parallel="classes" thread-count="2">
|
||||||
<packages>
|
<packages>
|
||||||
<package name="com.provectus.kafka.ui.smokesuite.*"/>
|
<package name="com.provectus.kafka.ui.smokesuite.*"/>
|
||||||
<package name="com.provectus.kafka.ui.sanitysuite.*"/>
|
<package name="com.provectus.kafka.ui.sanitysuite.*"/>
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
|
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
|
||||||
<suite name="SanitySuite">
|
<suite name="SanitySuite">
|
||||||
<test name="SanityTest" enabled="true" parallel="classes" thread-count="3">
|
<test name="SanityTest" enabled="true" parallel="classes" thread-count="2">
|
||||||
<packages>
|
<packages>
|
||||||
<package name="com.provectus.kafka.ui.sanitysuite.*"/>
|
<package name="com.provectus.kafka.ui.sanitysuite.*"/>
|
||||||
</packages>
|
</packages>
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
|
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
|
||||||
<suite name="SmokeSuite">
|
<suite name="SmokeSuite">
|
||||||
<test name="SmokeTest" enabled="true" parallel="classes" thread-count="3">
|
<test name="SmokeTest" enabled="true" parallel="classes" thread-count="2">
|
||||||
<packages>
|
<packages>
|
||||||
<package name="com.provectus.kafka.ui.smokesuite.*"/>
|
<package name="com.provectus.kafka.ui.smokesuite.*"/>
|
||||||
</packages>
|
</packages>
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { Table } from 'components/common/table/Table/Table.styled';
|
import { Table } from 'components/common/table/Table/Table.styled';
|
||||||
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
|
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
|
||||||
import { ConsumerGroupTopicPartition } from 'generated-sources';
|
import { ConsumerGroupTopicPartition, SortOrder } from 'generated-sources';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
|
|
||||||
import { ContentBox, TopicContentWrapper } from './TopicContent.styled';
|
import { ContentBox, TopicContentWrapper } from './TopicContent.styled';
|
||||||
|
@ -9,7 +9,125 @@ interface Props {
|
||||||
consumers: ConsumerGroupTopicPartition[];
|
consumers: ConsumerGroupTopicPartition[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type OrderByKey = keyof ConsumerGroupTopicPartition;
|
||||||
|
interface Headers {
|
||||||
|
title: string;
|
||||||
|
orderBy: OrderByKey | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const TABLE_HEADERS_MAP: Headers[] = [
|
||||||
|
{ title: 'Partition', orderBy: 'partition' },
|
||||||
|
{ title: 'Consumer ID', orderBy: 'consumerId' },
|
||||||
|
{ title: 'Host', orderBy: 'host' },
|
||||||
|
{ title: 'Messages Behind', orderBy: 'messagesBehind' },
|
||||||
|
{ title: 'Current Offset', orderBy: 'currentOffset' },
|
||||||
|
{ title: 'End offset', orderBy: 'endOffset' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const ipV4ToNum = (ip?: string) => {
|
||||||
|
if (typeof ip === 'string' && ip.length !== 0) {
|
||||||
|
const withoutSlash = ip.indexOf('/') !== -1 ? ip.slice(1) : ip;
|
||||||
|
return Number(
|
||||||
|
withoutSlash
|
||||||
|
.split('.')
|
||||||
|
.map((octet) => `000${octet}`.slice(-3))
|
||||||
|
.join('')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
type ComparatorFunction<T> = (
|
||||||
|
valueA: T,
|
||||||
|
valueB: T,
|
||||||
|
order: SortOrder,
|
||||||
|
property?: keyof T
|
||||||
|
) => number;
|
||||||
|
|
||||||
|
const numberComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
|
||||||
|
valueA,
|
||||||
|
valueB,
|
||||||
|
order,
|
||||||
|
property
|
||||||
|
) => {
|
||||||
|
if (property !== undefined) {
|
||||||
|
return order === SortOrder.ASC
|
||||||
|
? Number(valueA[property]) - Number(valueB[property])
|
||||||
|
: Number(valueB[property]) - Number(valueA[property]);
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
const ipComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
|
||||||
|
valueA,
|
||||||
|
valueB,
|
||||||
|
order
|
||||||
|
) =>
|
||||||
|
order === SortOrder.ASC
|
||||||
|
? ipV4ToNum(valueA.host) - ipV4ToNum(valueB.host)
|
||||||
|
: ipV4ToNum(valueB.host) - ipV4ToNum(valueA.host);
|
||||||
|
|
||||||
|
const consumerIdComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
|
||||||
|
valueA,
|
||||||
|
valueB,
|
||||||
|
order
|
||||||
|
) => {
|
||||||
|
if (valueA.consumerId && valueB.consumerId) {
|
||||||
|
if (order === SortOrder.ASC) {
|
||||||
|
if (valueA.consumerId?.toLowerCase() > valueB.consumerId?.toLowerCase()) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (order === SortOrder.DESC) {
|
||||||
|
if (valueB.consumerId?.toLowerCase() > valueA.consumerId?.toLowerCase()) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
const TopicContents: React.FC<Props> = ({ consumers }) => {
|
const TopicContents: React.FC<Props> = ({ consumers }) => {
|
||||||
|
const [orderBy, setOrderBy] = React.useState<OrderByKey>('partition');
|
||||||
|
const [sortOrder, setSortOrder] = React.useState<SortOrder>(SortOrder.DESC);
|
||||||
|
|
||||||
|
const handleOrder = React.useCallback((columnName: string | null) => {
|
||||||
|
if (typeof columnName === 'string') {
|
||||||
|
setOrderBy(columnName as OrderByKey);
|
||||||
|
setSortOrder((prevOrder) =>
|
||||||
|
prevOrder === SortOrder.DESC ? SortOrder.ASC : SortOrder.DESC
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const sortedConsumers = React.useMemo(() => {
|
||||||
|
if (orderBy && sortOrder) {
|
||||||
|
const isNumberProperty =
|
||||||
|
orderBy === 'partition' ||
|
||||||
|
orderBy === 'currentOffset' ||
|
||||||
|
orderBy === 'endOffset' ||
|
||||||
|
orderBy === 'messagesBehind';
|
||||||
|
|
||||||
|
let comparator: ComparatorFunction<ConsumerGroupTopicPartition>;
|
||||||
|
if (isNumberProperty) {
|
||||||
|
comparator = numberComparator;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (orderBy === 'host') {
|
||||||
|
comparator = ipComparator;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (orderBy === 'consumerId') {
|
||||||
|
comparator = consumerIdComparator;
|
||||||
|
}
|
||||||
|
|
||||||
|
return consumers.sort((a, b) => comparator(a, b, sortOrder, orderBy));
|
||||||
|
}
|
||||||
|
return consumers;
|
||||||
|
}, [orderBy, sortOrder, consumers]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<TopicContentWrapper>
|
<TopicContentWrapper>
|
||||||
<td colSpan={3}>
|
<td colSpan={3}>
|
||||||
|
@ -17,16 +135,20 @@ const TopicContents: React.FC<Props> = ({ consumers }) => {
|
||||||
<Table isFullwidth>
|
<Table isFullwidth>
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<TableHeaderCell title="Partition" />
|
{TABLE_HEADERS_MAP.map((header) => (
|
||||||
<TableHeaderCell title="Consumer ID" />
|
<TableHeaderCell
|
||||||
<TableHeaderCell title="Host" />
|
key={header.orderBy}
|
||||||
<TableHeaderCell title="Messages behind" />
|
title={header.title}
|
||||||
<TableHeaderCell title="Current offset" />
|
orderBy={orderBy}
|
||||||
<TableHeaderCell title="End offset" />
|
sortOrder={sortOrder}
|
||||||
|
orderValue={header.orderBy}
|
||||||
|
handleOrderBy={handleOrder}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{consumers.map((consumer) => (
|
{sortedConsumers.map((consumer) => (
|
||||||
<tr key={consumer.partition}>
|
<tr key={consumer.partition}>
|
||||||
<td>{consumer.partition}</td>
|
<td>{consumer.partition}</td>
|
||||||
<td>{consumer.consumerId}</td>
|
<td>{consumer.consumerId}</td>
|
||||||
|
|
|
@ -51,9 +51,9 @@ const List = () => {
|
||||||
accessorKey: 'members',
|
accessorKey: 'members',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
id: ConsumerGroupOrdering.TOPIC_NUM,
|
||||||
header: 'Num Of Topics',
|
header: 'Num Of Topics',
|
||||||
accessorKey: 'topics',
|
accessorKey: 'topics',
|
||||||
enableSorting: false,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: ConsumerGroupOrdering.MESSAGES_BEHIND,
|
id: ConsumerGroupOrdering.MESSAGES_BEHIND,
|
||||||
|
|
|
@ -31,7 +31,7 @@ const TableView: React.FC<TableViewProps> = ({ fetching, rows }) => {
|
||||||
data={rows || []}
|
data={rows || []}
|
||||||
columns={columns}
|
columns={columns}
|
||||||
emptyMessage={fetching ? 'Loading...' : 'No rows found'}
|
emptyMessage={fetching ? 'Loading...' : 'No rows found'}
|
||||||
enableSorting={false}
|
enableSorting
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -4,11 +4,7 @@ import { CellContext } from '@tanstack/react-table';
|
||||||
import ClusterContext from 'components/contexts/ClusterContext';
|
import ClusterContext from 'components/contexts/ClusterContext';
|
||||||
import { ClusterNameRoute } from 'lib/paths';
|
import { ClusterNameRoute } from 'lib/paths';
|
||||||
import useAppParams from 'lib/hooks/useAppParams';
|
import useAppParams from 'lib/hooks/useAppParams';
|
||||||
import {
|
import { Dropdown, DropdownItemHint } from 'components/common/Dropdown';
|
||||||
Dropdown,
|
|
||||||
DropdownItem,
|
|
||||||
DropdownItemHint,
|
|
||||||
} from 'components/common/Dropdown';
|
|
||||||
import {
|
import {
|
||||||
useDeleteTopic,
|
useDeleteTopic,
|
||||||
useClearTopicMessages,
|
useClearTopicMessages,
|
||||||
|
@ -55,7 +51,8 @@ const ActionsCell: React.FC<CellContext<Topic, unknown>> = ({ row }) => {
|
||||||
with DELETE policy
|
with DELETE policy
|
||||||
</DropdownItemHint>
|
</DropdownItemHint>
|
||||||
</ActionDropdownItem>
|
</ActionDropdownItem>
|
||||||
<DropdownItem
|
<ActionDropdownItem
|
||||||
|
disabled={!isTopicDeletionAllowed}
|
||||||
onClick={recreateTopic.mutateAsync}
|
onClick={recreateTopic.mutateAsync}
|
||||||
confirm={
|
confirm={
|
||||||
<>
|
<>
|
||||||
|
@ -63,9 +60,14 @@ const ActionsCell: React.FC<CellContext<Topic, unknown>> = ({ row }) => {
|
||||||
</>
|
</>
|
||||||
}
|
}
|
||||||
danger
|
danger
|
||||||
|
permission={{
|
||||||
|
resource: ResourceType.TOPIC,
|
||||||
|
action: [Action.VIEW, Action.CREATE, Action.DELETE],
|
||||||
|
value: name,
|
||||||
|
}}
|
||||||
>
|
>
|
||||||
Recreate Topic
|
Recreate Topic
|
||||||
</DropdownItem>
|
</ActionDropdownItem>
|
||||||
<ActionDropdownItem
|
<ActionDropdownItem
|
||||||
disabled={!isTopicDeletionAllowed}
|
disabled={!isTopicDeletionAllowed}
|
||||||
onClick={() => deleteTopic.mutateAsync(name)}
|
onClick={() => deleteTopic.mutateAsync(name)}
|
||||||
|
|
|
@ -27,7 +27,7 @@ export interface AddEditFilterContainerProps {
|
||||||
inputDisplayNameDefaultValue?: string;
|
inputDisplayNameDefaultValue?: string;
|
||||||
inputCodeDefaultValue?: string;
|
inputCodeDefaultValue?: string;
|
||||||
isAdd?: boolean;
|
isAdd?: boolean;
|
||||||
submitCallback?: (values: AddMessageFilters) => void;
|
submitCallback?: (values: AddMessageFilters) => Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const AddEditFilterContainer: React.FC<AddEditFilterContainerProps> = ({
|
const AddEditFilterContainer: React.FC<AddEditFilterContainerProps> = ({
|
||||||
|
|
|
@ -6,6 +6,7 @@ import SavedFilters from 'components/Topics/Topic/Messages/Filters/SavedFilters'
|
||||||
import SavedIcon from 'components/common/Icons/SavedIcon';
|
import SavedIcon from 'components/common/Icons/SavedIcon';
|
||||||
import QuestionIcon from 'components/common/Icons/QuestionIcon';
|
import QuestionIcon from 'components/common/Icons/QuestionIcon';
|
||||||
import useBoolean from 'lib/hooks/useBoolean';
|
import useBoolean from 'lib/hooks/useBoolean';
|
||||||
|
import { showAlert } from 'lib/errorHandling';
|
||||||
|
|
||||||
import AddEditFilterContainer from './AddEditFilterContainer';
|
import AddEditFilterContainer from './AddEditFilterContainer';
|
||||||
import InfoModal from './InfoModal';
|
import InfoModal from './InfoModal';
|
||||||
|
@ -43,6 +44,19 @@ const AddFilter: React.FC<FilterModalProps> = ({
|
||||||
|
|
||||||
const onSubmit = React.useCallback(
|
const onSubmit = React.useCallback(
|
||||||
async (values: AddMessageFilters) => {
|
async (values: AddMessageFilters) => {
|
||||||
|
const isFilterExists = filters.some(
|
||||||
|
(filter) => filter.name === values.name
|
||||||
|
);
|
||||||
|
|
||||||
|
if (isFilterExists) {
|
||||||
|
showAlert('error', {
|
||||||
|
id: '',
|
||||||
|
title: 'Validation Error',
|
||||||
|
message: 'Filter with the same name already exists',
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const data = { ...values };
|
const data = { ...values };
|
||||||
if (data.saveFilter) {
|
if (data.saveFilter) {
|
||||||
addFilter(data);
|
addFilter(data);
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import styled from 'styled-components';
|
|
||||||
import useDataSaver from 'lib/hooks/useDataSaver';
|
import useDataSaver from 'lib/hooks/useDataSaver';
|
||||||
import { TopicMessage } from 'generated-sources';
|
import { TopicMessage } from 'generated-sources';
|
||||||
import MessageToggleIcon from 'components/common/Icons/MessageToggleIcon';
|
import MessageToggleIcon from 'components/common/Icons/MessageToggleIcon';
|
||||||
|
@ -7,22 +6,12 @@ import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
|
||||||
import { Dropdown, DropdownItem } from 'components/common/Dropdown';
|
import { Dropdown, DropdownItem } from 'components/common/Dropdown';
|
||||||
import { formatTimestamp } from 'lib/dateTimeHelpers';
|
import { formatTimestamp } from 'lib/dateTimeHelpers';
|
||||||
import { JSONPath } from 'jsonpath-plus';
|
import { JSONPath } from 'jsonpath-plus';
|
||||||
|
import Ellipsis from 'components/common/Ellipsis/Ellipsis';
|
||||||
|
import WarningRedIcon from 'components/common/Icons/WarningRedIcon';
|
||||||
|
|
||||||
import MessageContent from './MessageContent/MessageContent';
|
import MessageContent from './MessageContent/MessageContent';
|
||||||
import * as S from './MessageContent/MessageContent.styled';
|
import * as S from './MessageContent/MessageContent.styled';
|
||||||
|
|
||||||
const StyledDataCell = styled.td`
|
|
||||||
overflow: hidden;
|
|
||||||
white-space: nowrap;
|
|
||||||
text-overflow: ellipsis;
|
|
||||||
max-width: 350px;
|
|
||||||
min-width: 350px;
|
|
||||||
`;
|
|
||||||
|
|
||||||
const ClickableRow = styled.tr`
|
|
||||||
cursor: pointer;
|
|
||||||
`;
|
|
||||||
|
|
||||||
export interface PreviewFilter {
|
export interface PreviewFilter {
|
||||||
field: string;
|
field: string;
|
||||||
path: string;
|
path: string;
|
||||||
|
@ -40,9 +29,13 @@ const Message: React.FC<Props> = ({
|
||||||
timestampType,
|
timestampType,
|
||||||
offset,
|
offset,
|
||||||
key,
|
key,
|
||||||
|
keySize,
|
||||||
partition,
|
partition,
|
||||||
content,
|
content,
|
||||||
|
valueSize,
|
||||||
headers,
|
headers,
|
||||||
|
valueSerde,
|
||||||
|
keySerde,
|
||||||
},
|
},
|
||||||
keyFilters,
|
keyFilters,
|
||||||
contentFilters,
|
contentFilters,
|
||||||
|
@ -100,7 +93,7 @@ const Message: React.FC<Props> = ({
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<ClickableRow
|
<S.ClickableRow
|
||||||
onMouseEnter={() => setVEllipsisOpen(true)}
|
onMouseEnter={() => setVEllipsisOpen(true)}
|
||||||
onMouseLeave={() => setVEllipsisOpen(false)}
|
onMouseLeave={() => setVEllipsisOpen(false)}
|
||||||
onClick={toggleIsOpen}
|
onClick={toggleIsOpen}
|
||||||
|
@ -115,16 +108,20 @@ const Message: React.FC<Props> = ({
|
||||||
<td>
|
<td>
|
||||||
<div>{formatTimestamp(timestamp)}</div>
|
<div>{formatTimestamp(timestamp)}</div>
|
||||||
</td>
|
</td>
|
||||||
<StyledDataCell title={key}>
|
<S.DataCell title={key}>
|
||||||
{renderFilteredJson(key, keyFilters)}
|
<Ellipsis text={renderFilteredJson(key, keyFilters)}>
|
||||||
</StyledDataCell>
|
{keySerde === 'Fallback' && <WarningRedIcon />}
|
||||||
<StyledDataCell title={content}>
|
</Ellipsis>
|
||||||
|
</S.DataCell>
|
||||||
|
<S.DataCell title={content}>
|
||||||
<S.Metadata>
|
<S.Metadata>
|
||||||
<S.MetadataValue>
|
<S.MetadataValue>
|
||||||
{renderFilteredJson(content, contentFilters)}
|
<Ellipsis text={renderFilteredJson(content, contentFilters)}>
|
||||||
|
{valueSerde === 'Fallback' && <WarningRedIcon />}
|
||||||
|
</Ellipsis>
|
||||||
</S.MetadataValue>
|
</S.MetadataValue>
|
||||||
</S.Metadata>
|
</S.Metadata>
|
||||||
</StyledDataCell>
|
</S.DataCell>
|
||||||
<td style={{ width: '5%' }}>
|
<td style={{ width: '5%' }}>
|
||||||
{vEllipsisOpen && (
|
{vEllipsisOpen && (
|
||||||
<Dropdown>
|
<Dropdown>
|
||||||
|
@ -135,7 +132,7 @@ const Message: React.FC<Props> = ({
|
||||||
</Dropdown>
|
</Dropdown>
|
||||||
)}
|
)}
|
||||||
</td>
|
</td>
|
||||||
</ClickableRow>
|
</S.ClickableRow>
|
||||||
{isOpen && (
|
{isOpen && (
|
||||||
<MessageContent
|
<MessageContent
|
||||||
messageKey={key}
|
messageKey={key}
|
||||||
|
@ -143,6 +140,8 @@ const Message: React.FC<Props> = ({
|
||||||
headers={headers}
|
headers={headers}
|
||||||
timestamp={timestamp}
|
timestamp={timestamp}
|
||||||
timestampType={timestampType}
|
timestampType={timestampType}
|
||||||
|
keySize={keySize}
|
||||||
|
contentSize={valueSize}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</>
|
</>
|
||||||
|
|
|
@ -35,7 +35,16 @@ export const ContentBox = styled.div`
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
export const DataCell = styled.td`
|
||||||
|
overflow: hidden;
|
||||||
|
white-space: nowrap;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
max-width: 350px;
|
||||||
|
min-width: 350px;
|
||||||
|
`;
|
||||||
|
export const ClickableRow = styled.tr`
|
||||||
|
cursor: pointer;
|
||||||
|
`;
|
||||||
export const MetadataWrapper = styled.div`
|
export const MetadataWrapper = styled.div`
|
||||||
background-color: ${({ theme }) => theme.topicMetaData.backgroundColor};
|
background-color: ${({ theme }) => theme.topicMetaData.backgroundColor};
|
||||||
padding: 24px;
|
padding: 24px;
|
||||||
|
|
|
@ -15,6 +15,8 @@ export interface MessageContentProps {
|
||||||
headers?: { [key: string]: string | undefined };
|
headers?: { [key: string]: string | undefined };
|
||||||
timestamp?: Date;
|
timestamp?: Date;
|
||||||
timestampType?: TopicMessageTimestampTypeEnum;
|
timestampType?: TopicMessageTimestampTypeEnum;
|
||||||
|
keySize?: number;
|
||||||
|
contentSize?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
const MessageContent: React.FC<MessageContentProps> = ({
|
const MessageContent: React.FC<MessageContentProps> = ({
|
||||||
|
@ -23,6 +25,8 @@ const MessageContent: React.FC<MessageContentProps> = ({
|
||||||
headers,
|
headers,
|
||||||
timestamp,
|
timestamp,
|
||||||
timestampType,
|
timestampType,
|
||||||
|
keySize,
|
||||||
|
contentSize,
|
||||||
}) => {
|
}) => {
|
||||||
const [activeTab, setActiveTab] = React.useState<Tab>('content');
|
const [activeTab, setActiveTab] = React.useState<Tab>('content');
|
||||||
const [searchParams] = useSearchParams();
|
const [searchParams] = useSearchParams();
|
||||||
|
@ -54,8 +58,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
setActiveTab('headers');
|
setActiveTab('headers');
|
||||||
};
|
};
|
||||||
const keySize = new TextEncoder().encode(messageKey).length;
|
|
||||||
const contentSize = new TextEncoder().encode(messageContent).length;
|
|
||||||
const contentType =
|
const contentType =
|
||||||
messageContent && messageContent.trim().startsWith('{')
|
messageContent && messageContent.trim().startsWith('{')
|
||||||
? SchemaType.JSON
|
? SchemaType.JSON
|
||||||
|
|
|
@ -11,6 +11,7 @@ import upperFirst from 'lodash/upperFirst';
|
||||||
|
|
||||||
jsf.option('fillProperties', false);
|
jsf.option('fillProperties', false);
|
||||||
jsf.option('alwaysFakeOptionals', true);
|
jsf.option('alwaysFakeOptionals', true);
|
||||||
|
jsf.option('failOnInvalidFormat', false);
|
||||||
|
|
||||||
const generateValueFromSchema = (preffered?: SerdeDescription) => {
|
const generateValueFromSchema = (preffered?: SerdeDescription) => {
|
||||||
if (!preffered?.schema) {
|
if (!preffered?.schema) {
|
||||||
|
|
|
@ -49,7 +49,7 @@ const CustomParamField: React.FC<Props> = ({
|
||||||
label: option,
|
label: option,
|
||||||
disabled:
|
disabled:
|
||||||
(config &&
|
(config &&
|
||||||
config[option].source !== ConfigSource.DYNAMIC_TOPIC_CONFIG) ||
|
config[option]?.source !== ConfigSource.DYNAMIC_TOPIC_CONFIG) ||
|
||||||
existingFields.includes(option),
|
existingFields.includes(option),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
import styled from 'styled-components';
|
||||||
|
|
||||||
|
export const Text = styled.div`
|
||||||
|
overflow: hidden;
|
||||||
|
white-space: nowrap;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
max-width: 340px;
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const Wrapper = styled.div`
|
||||||
|
display: flex;
|
||||||
|
gap: 8px;
|
||||||
|
align-items: center;
|
||||||
|
`;
|
|
@ -0,0 +1,20 @@
|
||||||
|
import React, { PropsWithChildren } from 'react';
|
||||||
|
|
||||||
|
import * as S from './Ellipsis.styled';
|
||||||
|
|
||||||
|
type EllipsisProps = {
|
||||||
|
text: React.ReactNode;
|
||||||
|
};
|
||||||
|
|
||||||
|
const Ellipsis: React.FC<PropsWithChildren<EllipsisProps>> = ({
|
||||||
|
text,
|
||||||
|
children,
|
||||||
|
}) => {
|
||||||
|
return (
|
||||||
|
<S.Wrapper>
|
||||||
|
<S.Text>{text}</S.Text>
|
||||||
|
{children}
|
||||||
|
</S.Wrapper>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
export default Ellipsis;
|
|
@ -0,0 +1,32 @@
|
||||||
|
import React from 'react';
|
||||||
|
import { useTheme } from 'styled-components';
|
||||||
|
|
||||||
|
const WarningRedIcon: React.FC = () => {
|
||||||
|
const theme = useTheme();
|
||||||
|
return (
|
||||||
|
<svg
|
||||||
|
width="20"
|
||||||
|
height="20"
|
||||||
|
viewBox="0 0 20 20"
|
||||||
|
fill="none"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
>
|
||||||
|
<rect
|
||||||
|
width="20"
|
||||||
|
height="20"
|
||||||
|
rx="10"
|
||||||
|
fill={theme.icons.warningRedIcon.rectFill}
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
d="M9 4.74219H11V12.7422H9V4.74219Z"
|
||||||
|
fill={theme.icons.warningRedIcon.pathFill}
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
d="M9 14.7422C9 14.1899 9.44772 13.7422 10 13.7422C10.5523 13.7422 11 14.1899 11 14.7422C11 15.2945 10.5523 15.7422 10 15.7422C9.44772 15.7422 9 15.2945 9 14.7422Z"
|
||||||
|
fill={theme.icons.warningRedIcon.pathFill}
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default WarningRedIcon;
|
|
@ -14,7 +14,7 @@ import type {
|
||||||
PaginationState,
|
PaginationState,
|
||||||
ColumnDef,
|
ColumnDef,
|
||||||
} from '@tanstack/react-table';
|
} from '@tanstack/react-table';
|
||||||
import { useSearchParams } from 'react-router-dom';
|
import { useSearchParams, useLocation } from 'react-router-dom';
|
||||||
import { PER_PAGE } from 'lib/constants';
|
import { PER_PAGE } from 'lib/constants';
|
||||||
import { Button } from 'components/common/Button/Button';
|
import { Button } from 'components/common/Button/Button';
|
||||||
import Input from 'components/common/Input/Input';
|
import Input from 'components/common/Input/Input';
|
||||||
|
@ -129,6 +129,7 @@ const Table: React.FC<TableProps<any>> = ({
|
||||||
onRowClick,
|
onRowClick,
|
||||||
}) => {
|
}) => {
|
||||||
const [searchParams, setSearchParams] = useSearchParams();
|
const [searchParams, setSearchParams] = useSearchParams();
|
||||||
|
const location = useLocation();
|
||||||
const [rowSelection, setRowSelection] = React.useState({});
|
const [rowSelection, setRowSelection] = React.useState({});
|
||||||
const onSortingChange = React.useCallback(
|
const onSortingChange = React.useCallback(
|
||||||
(updater: UpdaterFn<SortingState>) => {
|
(updater: UpdaterFn<SortingState>) => {
|
||||||
|
@ -136,7 +137,7 @@ const Table: React.FC<TableProps<any>> = ({
|
||||||
setSearchParams(searchParams);
|
setSearchParams(searchParams);
|
||||||
return newState;
|
return newState;
|
||||||
},
|
},
|
||||||
[searchParams]
|
[searchParams, location]
|
||||||
);
|
);
|
||||||
const onPaginationChange = React.useCallback(
|
const onPaginationChange = React.useCallback(
|
||||||
(updater: UpdaterFn<PaginationState>) => {
|
(updater: UpdaterFn<PaginationState>) => {
|
||||||
|
@ -145,7 +146,7 @@ const Table: React.FC<TableProps<any>> = ({
|
||||||
setRowSelection({});
|
setRowSelection({});
|
||||||
return newState;
|
return newState;
|
||||||
},
|
},
|
||||||
[searchParams]
|
[searchParams, location]
|
||||||
);
|
);
|
||||||
|
|
||||||
const table = useReactTable({
|
const table = useReactTable({
|
||||||
|
|
|
@ -173,6 +173,10 @@ const baseTheme = {
|
||||||
closeIcon: Colors.neutral[30],
|
closeIcon: Colors.neutral[30],
|
||||||
deleteIcon: Colors.red[20],
|
deleteIcon: Colors.red[20],
|
||||||
warningIcon: Colors.yellow[20],
|
warningIcon: Colors.yellow[20],
|
||||||
|
warningRedIcon: {
|
||||||
|
rectFill: Colors.red[10],
|
||||||
|
pathFill: Colors.red[50],
|
||||||
|
},
|
||||||
messageToggleIcon: {
|
messageToggleIcon: {
|
||||||
normal: Colors.brand[30],
|
normal: Colors.brand[30],
|
||||||
hover: Colors.brand[40],
|
hover: Colors.brand[40],
|
||||||
|
|
2
pom.xml
2
pom.xml
|
@ -52,7 +52,7 @@
|
||||||
<pnpm.version>v7.4.0</pnpm.version>
|
<pnpm.version>v7.4.0</pnpm.version>
|
||||||
|
|
||||||
<!-- Plugin versions -->
|
<!-- Plugin versions -->
|
||||||
<fabric8-maven-plugin.version>0.42.0</fabric8-maven-plugin.version>
|
<fabric8-maven-plugin.version>0.42.1</fabric8-maven-plugin.version>
|
||||||
<frontend-maven-plugin.version>1.12.1</frontend-maven-plugin.version>
|
<frontend-maven-plugin.version>1.12.1</frontend-maven-plugin.version>
|
||||||
<maven-clean-plugin.version>3.2.0</maven-clean-plugin.version>
|
<maven-clean-plugin.version>3.2.0</maven-clean-plugin.version>
|
||||||
<maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>
|
<maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>
|
||||||
|
|
Loading…
Add table
Reference in a new issue