Merge branch 'master' into ISSUE_754_acl

This commit is contained in:
Ilya Kuramshin 2023-04-21 12:40:33 +04:00 committed by GitHub
commit 23a39061b7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
62 changed files with 646 additions and 310 deletions

11
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View file

@ -0,0 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: Official documentation
url: https://docs.kafka-ui.provectus.io/
about: Before reaching out for support, please refer to our documentation. Read "FAQ" and "Common problems", also try using search there.
- name: Community Discord
url: https://discord.gg/4DWzD7pGE5
about: Chat with other users, get some support or ask questions.
- name: GitHub Discussions
url: https://github.com/provectus/kafka-ui/discussions
about: An alternative place to ask questions or to get some support.

View file

@ -1,16 +0,0 @@
---
name: "❓ Question"
about: Ask a question
title: ''
---
<!--
To ask a question, please either:
1. Open up a discussion (https://github.com/provectus/kafka-ui/discussions)
2. Join us on discord (https://discord.gg/4DWzD7pGE5) and ask there.
Don't forget to check/search for existing issues/discussions.
-->

View file

@ -6,7 +6,7 @@ jobs:
block_merge:
runs-on: ubuntu-latest
steps:
- uses: mheap/github-action-required-labels@v3
- uses: mheap/github-action-required-labels@v4
with:
mode: exactly
count: 0

View file

@ -86,7 +86,7 @@ jobs:
- name: make comment with private deployment link
if: ${{ github.event.label.name == 'status/feature_testing' }}
uses: peter-evans/create-or-update-comment@v2
uses: peter-evans/create-or-update-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
body: |
@ -94,7 +94,7 @@ jobs:
- name: make comment with public deployment link
if: ${{ github.event.label.name == 'status/feature_testing_public' }}
uses: peter-evans/create-or-update-comment@v2
uses: peter-evans/create-or-update-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
body: |

View file

@ -21,7 +21,7 @@ jobs:
git add ../kafka-ui-from-branch/
git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true
- name: make comment with deployment link
uses: peter-evans/create-or-update-comment@v2
uses: peter-evans/create-or-update-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
body: |

View file

@ -65,7 +65,7 @@ jobs:
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: make comment with private deployment link
uses: peter-evans/create-or-update-comment@v2
uses: peter-evans/create-or-update-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
body: |

View file

@ -55,7 +55,7 @@ jobs:
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Run CVE checks
uses: aquasecurity/trivy-action@0.9.2
uses: aquasecurity/trivy-action@0.10.0
with:
image-ref: "provectuslabs/kafka-ui:${{ steps.build.outputs.version }}"
format: "table"

View file

@ -33,7 +33,7 @@ jobs:
--image-ids imageTag=${{ steps.extract_branch.outputs.tag }} \
--region us-east-1
- name: make comment with private deployment link
uses: peter-evans/create-or-update-comment@v2
uses: peter-evans/create-or-update-comment@v3
with:
issue-number: ${{ github.event.pull_request.number }}
body: |

View file

@ -7,7 +7,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v7
- uses: actions/stale@v8
with:
days-before-issue-stale: 7
days-before-issue-close: 3

View file

@ -6,8 +6,9 @@ Following versions of the project are currently being supported with security up
| Version | Supported |
| ------- | ------------------ |
| 0.5.x | :white_check_mark: |
| 0.4.x | :x: |
| 0.6.x | :white_check_mark: |
| 0.5.x | :x: |
| 0.4.x | :x: |
| 0.3.x | :x: |
| 0.2.x | :x: |
| 0.1.x | :x: |

View file

@ -2,6 +2,6 @@ apiVersion: v2
name: kafka-ui
description: A Helm chart for kafka-UI
type: application
version: 0.6.1
appVersion: v0.6.1
version: 0.6.2
appVersion: v0.6.2
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png

View file

@ -9,4 +9,6 @@ message MySpecificTopicValue {
message MyValue {
int32 version = 1;
string payload = 2;
map<int32, string> intToStringMap = 3;
map<string, MyValue> strToObjMap = 4;
}

View file

@ -27,6 +27,8 @@ public class ClustersProperties {
String internalTopicPrefix;
Integer adminClientTimeout;
PollingProperties polling = new PollingProperties();
@Data

View file

@ -5,7 +5,6 @@ import java.util.Map;
import lombok.AllArgsConstructor;
import org.openapitools.jackson.nullable.JsonNullableModule;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.web.ServerProperties;
import org.springframework.boot.autoconfigure.web.reactive.WebFluxProperties;
import org.springframework.context.ApplicationContext;
@ -15,8 +14,6 @@ import org.springframework.http.server.reactive.ContextPathCompositeHandler;
import org.springframework.http.server.reactive.HttpHandler;
import org.springframework.jmx.export.MBeanExporter;
import org.springframework.util.StringUtils;
import org.springframework.util.unit.DataSize;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.server.adapter.WebHttpHandlerBuilder;
@Configuration
@ -52,14 +49,7 @@ public class Config {
}
@Bean
public WebClient webClient(
@Value("${webclient.max-in-memory-buffer-size:20MB}") DataSize maxBuffSize) {
return WebClient.builder()
.codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()))
.build();
}
@Bean
// will be used by webflux json mapping
public JsonNullableModule jsonNullableModule() {
return new JsonNullableModule();
}

View file

@ -0,0 +1,33 @@
package com.provectus.kafka.ui.config;
import com.provectus.kafka.ui.exception.ValidationException;
import java.beans.Transient;
import javax.annotation.PostConstruct;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.unit.DataSize;
@Configuration
@ConfigurationProperties("webclient")
@Data
public class WebclientProperties {
String maxInMemoryBufferSize;
@PostConstruct
public void validate() {
validateAndSetDefaultBufferSize();
}
private void validateAndSetDefaultBufferSize() {
if (maxInMemoryBufferSize != null) {
try {
DataSize.parse(maxInMemoryBufferSize);
} catch (Exception e) {
throw new ValidationException("Invalid format for webclient.maxInMemoryBufferSize");
}
}
}
}

View file

@ -11,8 +11,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
@ -82,15 +80,8 @@ public class ConsumerGroupMapper {
InternalConsumerGroup c, T consumerGroup) {
consumerGroup.setGroupId(c.getGroupId());
consumerGroup.setMembers(c.getMembers().size());
int numTopics = Stream.concat(
c.getOffsets().keySet().stream().map(TopicPartition::topic),
c.getMembers().stream()
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
).collect(Collectors.toSet()).size();
consumerGroup.setMessagesBehind(c.getMessagesBehind());
consumerGroup.setTopics(numTopics);
consumerGroup.setTopics(c.getTopicNum());
consumerGroup.setSimple(c.isSimple());
Optional.ofNullable(c.getState())

View file

@ -5,6 +5,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import lombok.Builder;
import lombok.Data;
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
@ -21,6 +22,7 @@ public class InternalConsumerGroup {
private final Map<TopicPartition, Long> offsets;
private final Map<TopicPartition, Long> endOffsets;
private final Long messagesBehind;
private final Integer topicNum;
private final String partitionAssignor;
private final ConsumerGroupState state;
private final Node coordinator;
@ -44,22 +46,12 @@ public class InternalConsumerGroup {
builder.simple(description.isSimpleConsumerGroup());
builder.state(description.state());
builder.partitionAssignor(description.partitionAssignor());
builder.members(
description.members().stream()
.map(m ->
InternalConsumerGroup.InternalMember.builder()
.assignment(m.assignment().topicPartitions())
.clientId(m.clientId())
.groupInstanceId(m.groupInstanceId().orElse(""))
.consumerId(m.consumerId())
.clientId(m.clientId())
.host(m.host())
.build()
).collect(Collectors.toList())
);
Collection<InternalMember> internalMembers = initInternalMembers(description);
builder.members(internalMembers);
builder.offsets(groupOffsets);
builder.endOffsets(topicEndOffsets);
builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
builder.topicNum(calculateTopicNum(groupOffsets, internalMembers));
Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
return builder.build();
}
@ -80,4 +72,31 @@ public class InternalConsumerGroup {
return messagesBehind;
}
private static Integer calculateTopicNum(Map<TopicPartition, Long> offsets, Collection<InternalMember> members) {
long topicNum = Stream.concat(
offsets.keySet().stream().map(TopicPartition::topic),
members.stream()
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
).distinct().count();
return Integer.valueOf((int) topicNum);
}
private static Collection<InternalMember> initInternalMembers(ConsumerGroupDescription description) {
return description.members().stream()
.map(m ->
InternalConsumerGroup.InternalMember.builder()
.assignment(m.assignment().topicPartitions())
.clientId(m.clientId())
.groupInstanceId(m.groupInstanceId().orElse(""))
.consumerId(m.consumerId())
.clientId(m.clientId())
.host(m.host())
.build()
).collect(Collectors.toList());
}
}

View file

@ -1,33 +1,36 @@
package com.provectus.kafka.ui.service;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.io.Closeable;
import java.time.Instant;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;
@Service
@RequiredArgsConstructor
@Slf4j
public class AdminClientServiceImpl implements AdminClientService, Closeable {
private static final int DEFAULT_CLIENT_TIMEOUT_MS = 30_000;
private static final AtomicLong CLIENT_ID_SEQ = new AtomicLong();
private final Map<String, ReactiveAdminClient> adminClientCache = new ConcurrentHashMap<>();
@Setter // used in tests
@Value("${kafka.admin-client-timeout:30000}")
private int clientTimeout;
private final int clientTimeout;
public AdminClientServiceImpl(ClustersProperties clustersProperties) {
this.clientTimeout = Optional.ofNullable(clustersProperties.getAdminClientTimeout())
.orElse(DEFAULT_CLIENT_TIMEOUT_MS);
}
@Override
public Mono<ReactiveAdminClient> get(KafkaCluster cluster) {
@ -42,7 +45,7 @@ public class AdminClientServiceImpl implements AdminClientService, Closeable {
SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties);
properties.putAll(cluster.getProperties());
properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout);
properties.putIfAbsent(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout);
properties.putIfAbsent(
AdminClientConfig.CLIENT_ID_CONFIG,
"kafka-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet()

View file

@ -101,6 +101,9 @@ public class ConsumerGroupService {
public record ConsumerGroupsPage(List<InternalConsumerGroup> consumerGroups, int totalPages) {
}
private record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) {
}
public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
KafkaCluster cluster,
int pageNum,
@ -159,22 +162,19 @@ public class ConsumerGroupService {
sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
}
case MESSAGES_BEHIND -> {
record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
}
case TOPIC_NUM -> {
Comparator<GroupWithDescr> comparator = Comparator.comparingInt(gwd -> gwd.icg.getTopicNum());
yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
yield ac.describeConsumerGroups(groupNames)
.flatMap(descriptionsMap -> {
List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
return getConsumerGroups(ac, descriptions)
.map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
.map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
.map(GroupWithDescr::cgd).toList());
}
);
}
};
}
@ -209,6 +209,27 @@ public class ConsumerGroupService {
.map(cgs -> new ArrayList<>(cgs.values()));
}
private Mono<List<ConsumerGroupDescription>> loadDescriptionsByInternalConsumerGroups(ReactiveAdminClient ac,
List<ConsumerGroupListing> groups,
Comparator<GroupWithDescr> comparator,
int pageNum,
int perPage,
SortOrderDTO sortOrderDto) {
var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
return ac.describeConsumerGroups(groupNames)
.flatMap(descriptionsMap -> {
List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
return getConsumerGroups(ac, descriptions)
.map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
.map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
.map(GroupWithDescr::cgd).toList());
}
);
}
public Mono<InternalConsumerGroup> getConsumerGroupDetail(KafkaCluster cluster,
String consumerGroupId) {
return adminClientService.get(cluster)

View file

@ -2,6 +2,7 @@ package com.provectus.kafka.ui.service;
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.config.WebclientProperties;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.emitter.PollingSettings;
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
@ -22,9 +23,7 @@ import java.util.Optional;
import java.util.Properties;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.unit.DataSize;
import org.springframework.web.reactive.function.client.WebClient;
@ -34,12 +33,18 @@ import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;
@Service
@RequiredArgsConstructor
@Slf4j
public class KafkaClusterFactory {
@Value("${webclient.max-in-memory-buffer-size:20MB}")
private DataSize maxBuffSize;
private static final DataSize DEFAULT_WEBCLIENT_BUFFER = DataSize.parse("20MB");
private final DataSize webClientMaxBuffSize;
public KafkaClusterFactory(WebclientProperties webclientProperties) {
this.webClientMaxBuffSize = Optional.ofNullable(webclientProperties.getMaxInMemoryBufferSize())
.map(DataSize::parse)
.orElse(DEFAULT_WEBCLIENT_BUFFER);
}
public KafkaCluster create(ClustersProperties properties,
ClustersProperties.Cluster clusterProperties) {
@ -140,7 +145,7 @@ public class KafkaClusterFactory {
url -> new RetryingKafkaConnectClient(
connectCluster.toBuilder().address(url).build(),
cluster.getSsl(),
maxBuffSize
webClientMaxBuffSize
),
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
"No alive connect instances available",
@ -158,7 +163,7 @@ public class KafkaClusterFactory {
WebClient webClient = new WebClientConfigurator()
.configureSsl(clusterProperties.getSsl(), clusterProperties.getSchemaRegistrySsl())
.configureBasicAuth(auth.getUsername(), auth.getPassword())
.configureBufferSize(maxBuffSize)
.configureBufferSize(webClientMaxBuffSize)
.build();
return ReactiveFailover.create(
parseUrlList(clusterProperties.getSchemaRegistry()),
@ -181,7 +186,7 @@ public class KafkaClusterFactory {
clusterProperties.getKsqldbServerAuth(),
clusterProperties.getSsl(),
clusterProperties.getKsqldbServerSsl(),
maxBuffSize
webClientMaxBuffSize
),
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
"No live ksqldb instances available",

View file

@ -2,6 +2,7 @@ package com.provectus.kafka.ui.util;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.config.WebclientProperties;
import com.provectus.kafka.ui.config.auth.OAuthProperties;
import com.provectus.kafka.ui.config.auth.RoleBasedAccessControlProperties;
import com.provectus.kafka.ui.exception.FileUploadException;
@ -97,6 +98,7 @@ public class DynamicConfigOperations {
.type(ctx.getEnvironment().getProperty("auth.type"))
.oauth2(getNullableBean(OAuthProperties.class))
.build())
.webclient(getNullableBean(WebclientProperties.class))
.build();
}
@ -204,6 +206,7 @@ public class DynamicConfigOperations {
private ClustersProperties kafka;
private RoleBasedAccessControlProperties rbac;
private Auth auth;
private WebclientProperties webclient;
@Data
@Builder
@ -222,6 +225,9 @@ public class DynamicConfigOperations {
Optional.ofNullable(auth)
.flatMap(a -> Optional.ofNullable(a.oauth2))
.ifPresent(OAuthProperties::validate);
Optional.ofNullable(webclient)
.ifPresent(WebclientProperties::validate);
}
}

View file

@ -4,9 +4,9 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
// Specifies field that can contain any kind of value - primitive, complex and nulls
public class AnyFieldSchema implements FieldSchema {
class AnyFieldSchema implements FieldSchema {
public static AnyFieldSchema get() {
static AnyFieldSchema get() {
return new AnyFieldSchema();
}

View file

@ -4,10 +4,10 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class ArrayFieldSchema implements FieldSchema {
class ArrayFieldSchema implements FieldSchema {
private final FieldSchema itemsSchema;
public ArrayFieldSchema(FieldSchema itemsSchema) {
ArrayFieldSchema(FieldSchema itemsSchema) {
this.itemsSchema = itemsSchema;
}

View file

@ -7,10 +7,10 @@ import java.util.List;
import java.util.Map;
public class EnumJsonType extends JsonType {
class EnumJsonType extends JsonType {
private final List<String> values;
public EnumJsonType(List<String> values) {
EnumJsonType(List<String> values) {
super(Type.ENUM);
this.values = values;
}

View file

@ -3,6 +3,6 @@ package com.provectus.kafka.ui.util.jsonschema;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
public interface FieldSchema {
interface FieldSchema {
JsonNode toJsonNode(ObjectMapper mapper);
}

View file

@ -4,7 +4,7 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.Map;
public abstract class JsonType {
abstract class JsonType {
protected final Type type;
@ -12,13 +12,13 @@ public abstract class JsonType {
this.type = type;
}
public Type getType() {
Type getType() {
return type;
}
public abstract Map<String, JsonNode> toJsonNode(ObjectMapper mapper);
abstract Map<String, JsonNode> toJsonNode(ObjectMapper mapper);
public enum Type {
enum Type {
NULL,
BOOLEAN,
OBJECT,

View file

@ -2,21 +2,27 @@ package com.provectus.kafka.ui.util.jsonschema;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.BooleanNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import javax.annotation.Nullable;
public class MapFieldSchema implements FieldSchema {
private final FieldSchema itemSchema;
class MapFieldSchema implements FieldSchema {
private final @Nullable FieldSchema itemSchema;
public MapFieldSchema(FieldSchema itemSchema) {
MapFieldSchema(@Nullable FieldSchema itemSchema) {
this.itemSchema = itemSchema;
}
MapFieldSchema() {
this(null);
}
@Override
public JsonNode toJsonNode(ObjectMapper mapper) {
final ObjectNode objectNode = mapper.createObjectNode();
objectNode.set("type", new TextNode(JsonType.Type.OBJECT.getName()));
objectNode.set("additionalProperties", itemSchema.toJsonNode(mapper));
objectNode.set("additionalProperties", itemSchema != null ? itemSchema.toJsonNode(mapper) : BooleanNode.TRUE);
return objectNode;
}
}

View file

@ -9,24 +9,24 @@ import java.util.stream.Collectors;
import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;
public class ObjectFieldSchema implements FieldSchema {
class ObjectFieldSchema implements FieldSchema {
public static final ObjectFieldSchema EMPTY = new ObjectFieldSchema(Map.of(), List.of());
static final ObjectFieldSchema EMPTY = new ObjectFieldSchema(Map.of(), List.of());
private final Map<String, FieldSchema> properties;
private final List<String> required;
public ObjectFieldSchema(Map<String, FieldSchema> properties,
ObjectFieldSchema(Map<String, FieldSchema> properties,
List<String> required) {
this.properties = properties;
this.required = required;
}
public Map<String, FieldSchema> getProperties() {
Map<String, FieldSchema> getProperties() {
return properties;
}
public List<String> getRequired() {
List<String> getRequired() {
return required;
}

View file

@ -5,11 +5,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import java.util.List;
import java.util.stream.Collectors;
public class OneOfFieldSchema implements FieldSchema {
class OneOfFieldSchema implements FieldSchema {
private final List<FieldSchema> schemaList;
public OneOfFieldSchema(
List<FieldSchema> schemaList) {
OneOfFieldSchema(List<FieldSchema> schemaList) {
this.schemaList = schemaList;
}

View file

@ -94,6 +94,9 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
if (wellKnownTypeSchema.isPresent()) {
return wellKnownTypeSchema.get();
}
if (field.isMapField()) {
return new MapFieldSchema();
}
final JsonType jsonType = convertType(field);
FieldSchema fieldSchema;
if (jsonType.getType().equals(JsonType.Type.OBJECT)) {
@ -149,67 +152,47 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
}
private JsonType convertType(Descriptors.FieldDescriptor field) {
switch (field.getType()) {
case INT32:
case FIXED32:
case SFIXED32:
case SINT32:
return new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", IntNode.valueOf(Integer.MAX_VALUE),
"minimum", IntNode.valueOf(Integer.MIN_VALUE)
)
);
case UINT32:
return new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
"minimum", IntNode.valueOf(0)
)
);
return switch (field.getType()) {
case INT32, FIXED32, SFIXED32, SINT32 -> new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", IntNode.valueOf(Integer.MAX_VALUE),
"minimum", IntNode.valueOf(Integer.MIN_VALUE)
)
);
case UINT32 -> new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
"minimum", IntNode.valueOf(0)
)
);
//TODO: actually all *64 types will be printed with quotes (as strings),
// see JsonFormat::printSingleFieldValue for impl. This can cause problems when you copy-paste from messages
// table to `Produce` area - need to think if it is critical or not.
case INT64:
case FIXED64:
case SFIXED64:
case SINT64:
return new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", LongNode.valueOf(Long.MAX_VALUE),
"minimum", LongNode.valueOf(Long.MIN_VALUE)
)
);
case UINT64:
return new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
"minimum", LongNode.valueOf(0)
)
);
case MESSAGE:
case GROUP:
return new SimpleJsonType(JsonType.Type.OBJECT);
case ENUM:
return new EnumJsonType(
field.getEnumType().getValues().stream()
.map(Descriptors.EnumValueDescriptor::getName)
.collect(Collectors.toList())
);
case BYTES:
case STRING:
return new SimpleJsonType(JsonType.Type.STRING);
case FLOAT:
case DOUBLE:
return new SimpleJsonType(JsonType.Type.NUMBER);
case BOOL:
return new SimpleJsonType(JsonType.Type.BOOLEAN);
default:
return new SimpleJsonType(JsonType.Type.STRING);
}
case INT64, FIXED64, SFIXED64, SINT64 -> new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", LongNode.valueOf(Long.MAX_VALUE),
"minimum", LongNode.valueOf(Long.MIN_VALUE)
)
);
case UINT64 -> new SimpleJsonType(
JsonType.Type.INTEGER,
Map.of(
"maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
"minimum", LongNode.valueOf(0)
)
);
case MESSAGE, GROUP -> new SimpleJsonType(JsonType.Type.OBJECT);
case ENUM -> new EnumJsonType(
field.getEnumType().getValues().stream()
.map(Descriptors.EnumValueDescriptor::getName)
.collect(Collectors.toList())
);
case BYTES, STRING -> new SimpleJsonType(JsonType.Type.STRING);
case FLOAT, DOUBLE -> new SimpleJsonType(JsonType.Type.NUMBER);
case BOOL -> new SimpleJsonType(JsonType.Type.BOOLEAN);
};
}
}

View file

@ -4,10 +4,10 @@ import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.TextNode;
public class RefFieldSchema implements FieldSchema {
class RefFieldSchema implements FieldSchema {
private final String ref;
public RefFieldSchema(String ref) {
RefFieldSchema(String ref) {
this.ref = ref;
}
@ -16,7 +16,7 @@ public class RefFieldSchema implements FieldSchema {
return mapper.createObjectNode().set("$ref", new TextNode(ref));
}
public String getRef() {
String getRef() {
return ref;
}
}

View file

@ -3,10 +3,10 @@ package com.provectus.kafka.ui.util.jsonschema;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
public class SimpleFieldSchema implements FieldSchema {
class SimpleFieldSchema implements FieldSchema {
private final JsonType type;
public SimpleFieldSchema(JsonType type) {
SimpleFieldSchema(JsonType type) {
this.type = type;
}

View file

@ -6,15 +6,15 @@ import com.fasterxml.jackson.databind.node.TextNode;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
public class SimpleJsonType extends JsonType {
class SimpleJsonType extends JsonType {
private final Map<String, JsonNode> additionalTypeProperties;
public SimpleJsonType(Type type) {
SimpleJsonType(Type type) {
this(type, Map.of());
}
public SimpleJsonType(Type type, Map<String, JsonNode> additionalTypeProperties) {
SimpleJsonType(Type type, Map<String, JsonNode> additionalTypeProperties) {
super(type);
this.additionalTypeProperties = additionalTypeProperties;
}

View file

@ -59,8 +59,10 @@ class ProtobufSchemaConverterTest {
TestMsg outer_ref = 2;
EmbeddedMsg self_ref = 3;
}
}""";
map<int32, string> intToStringMap = 21;
map<string, EmbeddedMsg> strToObjMap = 22;
}""";
String expectedJsonSchema = """
{
@ -109,7 +111,9 @@ class ProtobufSchemaConverterTest {
"v2": { "type": [ "number", "string", "object", "array", "boolean", "null" ] },
"uint32_w_field": { "type": "integer", "maximum": 4294967295, "minimum": 0 },
"bool_w_field": { "type": "boolean" },
"uint64_w_field": { "type": "integer", "maximum": 18446744073709551615, "minimum": 0 }
"uint64_w_field": { "type": "integer", "maximum": 18446744073709551615, "minimum": 0 },
"strToObjMap": { "type": "object", "additionalProperties": true },
"intToStringMap": { "type": "object", "additionalProperties": true }
}
},
"test.TestMsg.EmbeddedMsg": {

View file

@ -2562,6 +2562,7 @@ components:
- MEMBERS
- STATE
- MESSAGES_BEHIND
- TOPIC_NUM
ConsumerGroupsPageResponse:
type: object
@ -3644,6 +3645,12 @@ components:
type: array
items:
$ref: '#/components/schemas/Action'
webclient:
type: object
properties:
maxInMemoryBufferSize:
type: string
description: "examples: 20, 12KB, 5MB"
kafka:
type: object
properties:
@ -3656,6 +3663,10 @@ components:
type: integer
noDataEmptyPolls:
type: integer
adminClientTimeout:
type: integer
internalTopicPrefix:
type: string
clusters:
type: array
items:

View file

@ -37,9 +37,13 @@ public abstract class BasePage extends WebUtils {
protected String pageTitleFromHeader = "//h1[text()='%s']";
protected String pagePathFromHeader = "//a[text()='%s']/../h1";
protected boolean isSpinnerVisible(int... timeoutInSeconds) {
return isVisible(loadingSpinner, timeoutInSeconds);
}
protected void waitUntilSpinnerDisappear(int... timeoutInSeconds) {
log.debug("\nwaitUntilSpinnerDisappear");
if (isVisible(loadingSpinner, timeoutInSeconds)) {
if (isSpinnerVisible(timeoutInSeconds)) {
loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(60));
}
}

View file

@ -1,5 +1,6 @@
package com.provectus.kafka.ui.pages.ksqldb;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
@ -10,12 +11,12 @@ import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
import io.qameta.allure.Step;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import org.openqa.selenium.By;
public class KsqlDbList extends BasePage {
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
@ -76,9 +77,24 @@ public class KsqlDbList extends BasePage {
this.element = element;
}
private SelenideElement getNameElm() {
return element.$x("./td[1]");
}
@Step
public String getTableName() {
return element.$x("./td[1]").getText().trim();
return getNameElm().getText().trim();
}
@Step
public boolean isVisible() {
boolean isVisible = false;
try {
getNameElm().shouldBe(visible, Duration.ofMillis(500));
isVisible = true;
} catch (Throwable ignored) {
}
return isVisible;
}
@Step
@ -110,9 +126,24 @@ public class KsqlDbList extends BasePage {
this.element = element;
}
private SelenideElement getNameElm() {
return element.$x("./td[1]");
}
@Step
public String getStreamName() {
return element.$x("./td[1]").getText().trim();
return getNameElm().getText().trim();
}
@Step
public boolean isVisible() {
boolean isVisible = false;
try {
getNameElm().shouldBe(visible, Duration.ofMillis(500));
isVisible = true;
} catch (Throwable ignored) {
}
return isVisible;
}
@Step

View file

@ -40,9 +40,14 @@ public class KsqlQueryForm extends BasePage {
}
@Step
public KsqlQueryForm clickExecuteBtn() {
public String getEnteredQuery() {
return queryAreaValue.getText().trim();
}
@Step
public KsqlQueryForm clickExecuteBtn(String query) {
clickByActions(executeBtn);
if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
if (query.contains("EMIT CHANGES")) {
loadingSpinner.shouldBe(Condition.visible);
} else {
waitUntilSpinnerDisappear();
@ -66,19 +71,19 @@ public class KsqlQueryForm extends BasePage {
@Step
public KsqlQueryForm clickAddStreamProperty() {
clickByJavaScript(addStreamPropertyBtn);
clickByActions(addStreamPropertyBtn);
return this;
}
@Step
public KsqlQueryForm setQuery(String query) {
queryAreaValue.shouldBe(Condition.visible).click();
queryArea.setValue(query);
sendKeysByActions(queryArea, query);
return this;
}
@Step
public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
public KsqlQueryForm.KsqlResponseGridItem getItemByName(String name) {
return initItems().stream()
.filter(e -> e.getName().equalsIgnoreCase(name))
.findFirst().orElseThrow();
@ -114,16 +119,20 @@ public class KsqlQueryForm extends BasePage {
return element.$x("./td[1]").getText().trim();
}
private SelenideElement getNameElm() {
return element.$x("./td[2]");
}
@Step
public String getName() {
return element.$x("./td[2]").scrollTo().getText().trim();
return getNameElm().scrollTo().getText().trim();
}
@Step
public boolean isVisible() {
boolean isVisible = false;
try {
element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
getNameElm().shouldBe(visible, Duration.ofMillis(500));
isVisible = true;
} catch (Throwable ignored) {
}

View file

@ -95,7 +95,7 @@ public class WebUtils {
return isSelected;
}
public static boolean selectElement(SelenideElement element, boolean select) {
public static void selectElement(SelenideElement element, boolean select) {
if (select) {
if (!element.isSelected()) {
clickByJavaScript(element);
@ -105,6 +105,5 @@ public class WebUtils {
clickByJavaScript(element);
}
}
return true;
}
}

View file

@ -22,57 +22,50 @@ public class SmokeBacklog extends BaseManualTest {
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(276)
@QaseId(277)
@Test
public void testCaseB() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(277)
@QaseId(278)
@Test
public void testCaseC() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(278)
@Test
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(284)
@Test
public void testCaseE() {
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(331)
@Test
public void testCaseF() {
public void testCaseE() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(332)
@Test
public void testCaseG() {
public void testCaseF() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(335)
@Test
public void testCaseH() {
public void testCaseG() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(336)
@Test
public void testCaseI() {
public void testCaseH() {
}
}

View file

@ -1,14 +1,17 @@
package com.provectus.kafka.ui.smokesuite.ksqldb;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
import com.provectus.kafka.ui.pages.ksqldb.models.Table;
import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import java.util.ArrayList;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@ -16,53 +19,30 @@ import org.testng.asserts.SoftAssert;
public class KsqlDbTest extends BaseTest {
private static final Stream STREAM_FOR_CHECK_TABLES = new Stream()
.setName("STREAM_FOR_CHECK_TABLES_" + randomAlphabetic(4).toUpperCase())
.setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
private static final Stream DEFAULT_STREAM = new Stream()
.setName("DEFAULT_STREAM_" + randomAlphabetic(4).toUpperCase())
.setTopicName("DEFAULT_TOPIC_" + randomAlphabetic(4).toUpperCase());
private static final Table FIRST_TABLE = new Table()
.setName("FIRST_TABLE" + randomAlphabetic(4).toUpperCase())
.setStreamName(STREAM_FOR_CHECK_TABLES.getName());
.setName("FIRST_TABLE_" + randomAlphabetic(4).toUpperCase())
.setStreamName(DEFAULT_STREAM.getName());
private static final Table SECOND_TABLE = new Table()
.setName("SECOND_TABLE" + randomAlphabetic(4).toUpperCase())
.setStreamName(STREAM_FOR_CHECK_TABLES.getName());
.setName("SECOND_TABLE_" + randomAlphabetic(4).toUpperCase())
.setStreamName(DEFAULT_STREAM.getName());
private static final List<String> TOPIC_NAMES_LIST = new ArrayList<>();
@BeforeClass(alwaysRun = true)
public void beforeClass() {
apiService
.createStream(STREAM_FOR_CHECK_TABLES)
.createStream(DEFAULT_STREAM)
.createTables(FIRST_TABLE, SECOND_TABLE);
TOPIC_NAMES_LIST.addAll(List.of(STREAM_FOR_CHECK_TABLES.getTopicName(),
TOPIC_NAMES_LIST.addAll(List.of(DEFAULT_STREAM.getTopicName(),
FIRST_TABLE.getName(), SECOND_TABLE.getName()));
}
@QaseId(41)
@Test(priority = 1)
public void checkShowTablesRequestExecution() {
navigateToKsqlDb();
ksqlDbList
.clickExecuteKsqlRequestBtn();
ksqlQueryForm
.waitUntilScreenReady()
.setQuery(SHOW_TABLES.getQuery())
.clickExecuteBtn();
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertTrue(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableName()");
softly.assertTrue(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableName()");
softly.assertAll();
}
@QaseId(86)
@Test(priority = 2)
@Test(priority = 1)
public void clearResultsForExecutedRequest() {
navigateToKsqlDb();
ksqlDbList
.clickExecuteKsqlRequestBtn();
ksqlQueryForm
.waitUntilScreenReady()
.setQuery(SHOW_TABLES.getQuery())
.clickExecuteBtn();
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertAll();
@ -72,6 +52,40 @@ public class KsqlDbTest extends BaseTest {
softly.assertAll();
}
@QaseId(276)
@Test(priority = 2)
public void clearEnteredQueryCheck() {
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
Assert.assertFalse(ksqlQueryForm.getEnteredQuery().isEmpty(), "getEnteredQuery()");
ksqlQueryForm
.clickClearBtn();
Assert.assertTrue(ksqlQueryForm.getEnteredQuery().isEmpty(), "getEnteredQuery()");
}
@QaseId(41)
@Test(priority = 3)
public void checkShowTablesRequestExecution() {
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(), "getItemByName()");
softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(), "getItemByName()");
softly.assertAll();
}
@Step
private void navigateToKsqlDbAndExecuteRequest(String query) {
naviSideBar
.openSideMenu(KSQL_DB);
ksqlDbList
.waitUntilScreenReady()
.clickExecuteKsqlRequestBtn();
ksqlQueryForm
.waitUntilScreenReady()
.setQuery(query)
.clickExecuteBtn(query);
}
@AfterClass(alwaysRun = true)
public void afterClass() {
TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));

View file

@ -8,7 +8,6 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Topic;
import com.provectus.kafka.ui.pages.topics.TopicDetails;
import io.qameta.allure.Issue;
import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
@ -140,24 +139,22 @@ public class MessagesTest extends BaseTest {
softly.assertAll();
}
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2394")
@QaseId(15)
@Test(priority = 6)
public void checkMessageFilteringByOffset() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES);
TopicDetails.MessageGridItem secondMessage = topicDetails.getMessageByOffset(1);
int nextOffset = topicDetails
.openDetailsTab(MESSAGES)
.getAllMessages().stream()
.findFirst().orElseThrow().getOffset() + 1;
topicDetails
.selectSeekTypeDdlMessagesTab("Offset")
.setSeekTypeValueFldMessagesTab(String.valueOf(secondMessage.getOffset()))
.setSeekTypeValueFldMessagesTab(String.valueOf(nextOffset))
.clickSubmitFiltersBtnMessagesTab();
SoftAssert softly = new SoftAssert();
topicDetails.getAllMessages().forEach(message ->
softly.assertTrue(message.getOffset() == secondMessage.getOffset()
|| message.getOffset() > secondMessage.getOffset(),
String.format("Expected offset is: %s, but found: %s", secondMessage.getOffset(), message.getOffset())));
softly.assertTrue(message.getOffset() >= nextOffset,
String.format("Expected offset not less: %s, but found: %s", nextOffset, message.getOffset())));
softly.assertAll();
}
@ -168,13 +165,11 @@ public class MessagesTest extends BaseTest {
@Test(priority = 7)
public void checkMessageFilteringByTimestamp() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES);
LocalDateTime firstTimestamp = topicDetails.getMessageByOffset(0).getTimestamp();
List<TopicDetails.MessageGridItem> nextMessages = topicDetails.getAllMessages().stream()
LocalDateTime firstTimestamp = topicDetails
.openDetailsTab(MESSAGES)
.getMessageByOffset(0).getTimestamp();
LocalDateTime nextTimestamp = topicDetails.getAllMessages().stream()
.filter(message -> message.getTimestamp().getMinute() != firstTimestamp.getMinute())
.toList();
LocalDateTime nextTimestamp = nextMessages.stream()
.findFirst().orElseThrow().getTimestamp();
topicDetails
.selectSeekTypeDdlMessagesTab("Timestamp")
@ -183,8 +178,7 @@ public class MessagesTest extends BaseTest {
.clickSubmitFiltersBtnMessagesTab();
SoftAssert softly = new SoftAssert();
topicDetails.getAllMessages().forEach(message ->
softly.assertTrue(message.getTimestamp().isEqual(nextTimestamp)
|| message.getTimestamp().isAfter(nextTimestamp),
softly.assertFalse(message.getTimestamp().isBefore(nextTimestamp),
String.format("Expected that %s is not before %s.", message.getTimestamp(), nextTimestamp)));
softly.assertAll();
}

View file

@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
<suite name="RegressionSuite">
<test name="RegressionTest" enabled="true" parallel="classes" thread-count="3">
<test name="RegressionTest" enabled="true" parallel="classes" thread-count="2">
<packages>
<package name="com.provectus.kafka.ui.smokesuite.*"/>
<package name="com.provectus.kafka.ui.sanitysuite.*"/>

View file

@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
<suite name="SanitySuite">
<test name="SanityTest" enabled="true" parallel="classes" thread-count="3">
<test name="SanityTest" enabled="true" parallel="classes" thread-count="2">
<packages>
<package name="com.provectus.kafka.ui.sanitysuite.*"/>
</packages>

View file

@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
<suite name="SmokeSuite">
<test name="SmokeTest" enabled="true" parallel="classes" thread-count="3">
<test name="SmokeTest" enabled="true" parallel="classes" thread-count="2">
<packages>
<package name="com.provectus.kafka.ui.smokesuite.*"/>
</packages>

View file

@ -1,6 +1,6 @@
import { Table } from 'components/common/table/Table/Table.styled';
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
import { ConsumerGroupTopicPartition } from 'generated-sources';
import { ConsumerGroupTopicPartition, SortOrder } from 'generated-sources';
import React from 'react';
import { ContentBox, TopicContentWrapper } from './TopicContent.styled';
@ -9,7 +9,125 @@ interface Props {
consumers: ConsumerGroupTopicPartition[];
}
type OrderByKey = keyof ConsumerGroupTopicPartition;
interface Headers {
title: string;
orderBy: OrderByKey | undefined;
}
const TABLE_HEADERS_MAP: Headers[] = [
{ title: 'Partition', orderBy: 'partition' },
{ title: 'Consumer ID', orderBy: 'consumerId' },
{ title: 'Host', orderBy: 'host' },
{ title: 'Messages Behind', orderBy: 'messagesBehind' },
{ title: 'Current Offset', orderBy: 'currentOffset' },
{ title: 'End offset', orderBy: 'endOffset' },
];
const ipV4ToNum = (ip?: string) => {
if (typeof ip === 'string' && ip.length !== 0) {
const withoutSlash = ip.indexOf('/') !== -1 ? ip.slice(1) : ip;
return Number(
withoutSlash
.split('.')
.map((octet) => `000${octet}`.slice(-3))
.join('')
);
}
return 0;
};
type ComparatorFunction<T> = (
valueA: T,
valueB: T,
order: SortOrder,
property?: keyof T
) => number;
const numberComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
valueA,
valueB,
order,
property
) => {
if (property !== undefined) {
return order === SortOrder.ASC
? Number(valueA[property]) - Number(valueB[property])
: Number(valueB[property]) - Number(valueA[property]);
}
return 0;
};
const ipComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
valueA,
valueB,
order
) =>
order === SortOrder.ASC
? ipV4ToNum(valueA.host) - ipV4ToNum(valueB.host)
: ipV4ToNum(valueB.host) - ipV4ToNum(valueA.host);
const consumerIdComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
valueA,
valueB,
order
) => {
if (valueA.consumerId && valueB.consumerId) {
if (order === SortOrder.ASC) {
if (valueA.consumerId?.toLowerCase() > valueB.consumerId?.toLowerCase()) {
return 1;
}
}
if (order === SortOrder.DESC) {
if (valueB.consumerId?.toLowerCase() > valueA.consumerId?.toLowerCase()) {
return -1;
}
}
}
return 0;
};
const TopicContents: React.FC<Props> = ({ consumers }) => {
const [orderBy, setOrderBy] = React.useState<OrderByKey>('partition');
const [sortOrder, setSortOrder] = React.useState<SortOrder>(SortOrder.DESC);
const handleOrder = React.useCallback((columnName: string | null) => {
if (typeof columnName === 'string') {
setOrderBy(columnName as OrderByKey);
setSortOrder((prevOrder) =>
prevOrder === SortOrder.DESC ? SortOrder.ASC : SortOrder.DESC
);
}
}, []);
const sortedConsumers = React.useMemo(() => {
if (orderBy && sortOrder) {
const isNumberProperty =
orderBy === 'partition' ||
orderBy === 'currentOffset' ||
orderBy === 'endOffset' ||
orderBy === 'messagesBehind';
let comparator: ComparatorFunction<ConsumerGroupTopicPartition>;
if (isNumberProperty) {
comparator = numberComparator;
}
if (orderBy === 'host') {
comparator = ipComparator;
}
if (orderBy === 'consumerId') {
comparator = consumerIdComparator;
}
return consumers.sort((a, b) => comparator(a, b, sortOrder, orderBy));
}
return consumers;
}, [orderBy, sortOrder, consumers]);
return (
<TopicContentWrapper>
<td colSpan={3}>
@ -17,16 +135,20 @@ const TopicContents: React.FC<Props> = ({ consumers }) => {
<Table isFullwidth>
<thead>
<tr>
<TableHeaderCell title="Partition" />
<TableHeaderCell title="Consumer ID" />
<TableHeaderCell title="Host" />
<TableHeaderCell title="Messages behind" />
<TableHeaderCell title="Current offset" />
<TableHeaderCell title="End offset" />
{TABLE_HEADERS_MAP.map((header) => (
<TableHeaderCell
key={header.orderBy}
title={header.title}
orderBy={orderBy}
sortOrder={sortOrder}
orderValue={header.orderBy}
handleOrderBy={handleOrder}
/>
))}
</tr>
</thead>
<tbody>
{consumers.map((consumer) => (
{sortedConsumers.map((consumer) => (
<tr key={consumer.partition}>
<td>{consumer.partition}</td>
<td>{consumer.consumerId}</td>

View file

@ -51,9 +51,9 @@ const List = () => {
accessorKey: 'members',
},
{
id: ConsumerGroupOrdering.TOPIC_NUM,
header: 'Num Of Topics',
accessorKey: 'topics',
enableSorting: false,
},
{
id: ConsumerGroupOrdering.MESSAGES_BEHIND,

View file

@ -31,7 +31,7 @@ const TableView: React.FC<TableViewProps> = ({ fetching, rows }) => {
data={rows || []}
columns={columns}
emptyMessage={fetching ? 'Loading...' : 'No rows found'}
enableSorting={false}
enableSorting
/>
);
};

View file

@ -4,11 +4,7 @@ import { CellContext } from '@tanstack/react-table';
import ClusterContext from 'components/contexts/ClusterContext';
import { ClusterNameRoute } from 'lib/paths';
import useAppParams from 'lib/hooks/useAppParams';
import {
Dropdown,
DropdownItem,
DropdownItemHint,
} from 'components/common/Dropdown';
import { Dropdown, DropdownItemHint } from 'components/common/Dropdown';
import {
useDeleteTopic,
useClearTopicMessages,
@ -55,7 +51,8 @@ const ActionsCell: React.FC<CellContext<Topic, unknown>> = ({ row }) => {
with DELETE policy
</DropdownItemHint>
</ActionDropdownItem>
<DropdownItem
<ActionDropdownItem
disabled={!isTopicDeletionAllowed}
onClick={recreateTopic.mutateAsync}
confirm={
<>
@ -63,9 +60,14 @@ const ActionsCell: React.FC<CellContext<Topic, unknown>> = ({ row }) => {
</>
}
danger
permission={{
resource: ResourceType.TOPIC,
action: [Action.VIEW, Action.CREATE, Action.DELETE],
value: name,
}}
>
Recreate Topic
</DropdownItem>
</ActionDropdownItem>
<ActionDropdownItem
disabled={!isTopicDeletionAllowed}
onClick={() => deleteTopic.mutateAsync(name)}

View file

@ -27,7 +27,7 @@ export interface AddEditFilterContainerProps {
inputDisplayNameDefaultValue?: string;
inputCodeDefaultValue?: string;
isAdd?: boolean;
submitCallback?: (values: AddMessageFilters) => void;
submitCallback?: (values: AddMessageFilters) => Promise<void>;
}
const AddEditFilterContainer: React.FC<AddEditFilterContainerProps> = ({

View file

@ -6,6 +6,7 @@ import SavedFilters from 'components/Topics/Topic/Messages/Filters/SavedFilters'
import SavedIcon from 'components/common/Icons/SavedIcon';
import QuestionIcon from 'components/common/Icons/QuestionIcon';
import useBoolean from 'lib/hooks/useBoolean';
import { showAlert } from 'lib/errorHandling';
import AddEditFilterContainer from './AddEditFilterContainer';
import InfoModal from './InfoModal';
@ -43,6 +44,19 @@ const AddFilter: React.FC<FilterModalProps> = ({
const onSubmit = React.useCallback(
async (values: AddMessageFilters) => {
const isFilterExists = filters.some(
(filter) => filter.name === values.name
);
if (isFilterExists) {
showAlert('error', {
id: '',
title: 'Validation Error',
message: 'Filter with the same name already exists',
});
return;
}
const data = { ...values };
if (data.saveFilter) {
addFilter(data);

View file

@ -1,5 +1,4 @@
import React from 'react';
import styled from 'styled-components';
import useDataSaver from 'lib/hooks/useDataSaver';
import { TopicMessage } from 'generated-sources';
import MessageToggleIcon from 'components/common/Icons/MessageToggleIcon';
@ -7,22 +6,12 @@ import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
import { Dropdown, DropdownItem } from 'components/common/Dropdown';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import { JSONPath } from 'jsonpath-plus';
import Ellipsis from 'components/common/Ellipsis/Ellipsis';
import WarningRedIcon from 'components/common/Icons/WarningRedIcon';
import MessageContent from './MessageContent/MessageContent';
import * as S from './MessageContent/MessageContent.styled';
const StyledDataCell = styled.td`
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
max-width: 350px;
min-width: 350px;
`;
const ClickableRow = styled.tr`
cursor: pointer;
`;
export interface PreviewFilter {
field: string;
path: string;
@ -40,9 +29,13 @@ const Message: React.FC<Props> = ({
timestampType,
offset,
key,
keySize,
partition,
content,
valueSize,
headers,
valueSerde,
keySerde,
},
keyFilters,
contentFilters,
@ -100,7 +93,7 @@ const Message: React.FC<Props> = ({
return (
<>
<ClickableRow
<S.ClickableRow
onMouseEnter={() => setVEllipsisOpen(true)}
onMouseLeave={() => setVEllipsisOpen(false)}
onClick={toggleIsOpen}
@ -115,16 +108,20 @@ const Message: React.FC<Props> = ({
<td>
<div>{formatTimestamp(timestamp)}</div>
</td>
<StyledDataCell title={key}>
{renderFilteredJson(key, keyFilters)}
</StyledDataCell>
<StyledDataCell title={content}>
<S.DataCell title={key}>
<Ellipsis text={renderFilteredJson(key, keyFilters)}>
{keySerde === 'Fallback' && <WarningRedIcon />}
</Ellipsis>
</S.DataCell>
<S.DataCell title={content}>
<S.Metadata>
<S.MetadataValue>
{renderFilteredJson(content, contentFilters)}
<Ellipsis text={renderFilteredJson(content, contentFilters)}>
{valueSerde === 'Fallback' && <WarningRedIcon />}
</Ellipsis>
</S.MetadataValue>
</S.Metadata>
</StyledDataCell>
</S.DataCell>
<td style={{ width: '5%' }}>
{vEllipsisOpen && (
<Dropdown>
@ -135,7 +132,7 @@ const Message: React.FC<Props> = ({
</Dropdown>
)}
</td>
</ClickableRow>
</S.ClickableRow>
{isOpen && (
<MessageContent
messageKey={key}
@ -143,6 +140,8 @@ const Message: React.FC<Props> = ({
headers={headers}
timestamp={timestamp}
timestampType={timestampType}
keySize={keySize}
contentSize={valueSize}
/>
)}
</>

View file

@ -35,7 +35,16 @@ export const ContentBox = styled.div`
flex-grow: 1;
}
`;
export const DataCell = styled.td`
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
max-width: 350px;
min-width: 350px;
`;
export const ClickableRow = styled.tr`
cursor: pointer;
`;
export const MetadataWrapper = styled.div`
background-color: ${({ theme }) => theme.topicMetaData.backgroundColor};
padding: 24px;

View file

@ -15,6 +15,8 @@ export interface MessageContentProps {
headers?: { [key: string]: string | undefined };
timestamp?: Date;
timestampType?: TopicMessageTimestampTypeEnum;
keySize?: number;
contentSize?: number;
}
const MessageContent: React.FC<MessageContentProps> = ({
@ -23,6 +25,8 @@ const MessageContent: React.FC<MessageContentProps> = ({
headers,
timestamp,
timestampType,
keySize,
contentSize,
}) => {
const [activeTab, setActiveTab] = React.useState<Tab>('content');
const [searchParams] = useSearchParams();
@ -54,8 +58,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
e.preventDefault();
setActiveTab('headers');
};
const keySize = new TextEncoder().encode(messageKey).length;
const contentSize = new TextEncoder().encode(messageContent).length;
const contentType =
messageContent && messageContent.trim().startsWith('{')
? SchemaType.JSON

View file

@ -11,6 +11,7 @@ import upperFirst from 'lodash/upperFirst';
jsf.option('fillProperties', false);
jsf.option('alwaysFakeOptionals', true);
jsf.option('failOnInvalidFormat', false);
const generateValueFromSchema = (preffered?: SerdeDescription) => {
if (!preffered?.schema) {

View file

@ -49,7 +49,7 @@ const CustomParamField: React.FC<Props> = ({
label: option,
disabled:
(config &&
config[option].source !== ConfigSource.DYNAMIC_TOPIC_CONFIG) ||
config[option]?.source !== ConfigSource.DYNAMIC_TOPIC_CONFIG) ||
existingFields.includes(option),
}));

View file

@ -0,0 +1,14 @@
import styled from 'styled-components';
export const Text = styled.div`
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
max-width: 340px;
`;
export const Wrapper = styled.div`
display: flex;
gap: 8px;
align-items: center;
`;

View file

@ -0,0 +1,20 @@
import React, { PropsWithChildren } from 'react';
import * as S from './Ellipsis.styled';
type EllipsisProps = {
text: React.ReactNode;
};
const Ellipsis: React.FC<PropsWithChildren<EllipsisProps>> = ({
text,
children,
}) => {
return (
<S.Wrapper>
<S.Text>{text}</S.Text>
{children}
</S.Wrapper>
);
};
export default Ellipsis;

View file

@ -0,0 +1,32 @@
import React from 'react';
import { useTheme } from 'styled-components';
const WarningRedIcon: React.FC = () => {
const theme = useTheme();
return (
<svg
width="20"
height="20"
viewBox="0 0 20 20"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<rect
width="20"
height="20"
rx="10"
fill={theme.icons.warningRedIcon.rectFill}
/>
<path
d="M9 4.74219H11V12.7422H9V4.74219Z"
fill={theme.icons.warningRedIcon.pathFill}
/>
<path
d="M9 14.7422C9 14.1899 9.44772 13.7422 10 13.7422C10.5523 13.7422 11 14.1899 11 14.7422C11 15.2945 10.5523 15.7422 10 15.7422C9.44772 15.7422 9 15.2945 9 14.7422Z"
fill={theme.icons.warningRedIcon.pathFill}
/>
</svg>
);
};
export default WarningRedIcon;

View file

@ -14,7 +14,7 @@ import type {
PaginationState,
ColumnDef,
} from '@tanstack/react-table';
import { useSearchParams } from 'react-router-dom';
import { useSearchParams, useLocation } from 'react-router-dom';
import { PER_PAGE } from 'lib/constants';
import { Button } from 'components/common/Button/Button';
import Input from 'components/common/Input/Input';
@ -129,6 +129,7 @@ const Table: React.FC<TableProps<any>> = ({
onRowClick,
}) => {
const [searchParams, setSearchParams] = useSearchParams();
const location = useLocation();
const [rowSelection, setRowSelection] = React.useState({});
const onSortingChange = React.useCallback(
(updater: UpdaterFn<SortingState>) => {
@ -136,7 +137,7 @@ const Table: React.FC<TableProps<any>> = ({
setSearchParams(searchParams);
return newState;
},
[searchParams]
[searchParams, location]
);
const onPaginationChange = React.useCallback(
(updater: UpdaterFn<PaginationState>) => {
@ -145,7 +146,7 @@ const Table: React.FC<TableProps<any>> = ({
setRowSelection({});
return newState;
},
[searchParams]
[searchParams, location]
);
const table = useReactTable({

View file

@ -173,6 +173,10 @@ const baseTheme = {
closeIcon: Colors.neutral[30],
deleteIcon: Colors.red[20],
warningIcon: Colors.yellow[20],
warningRedIcon: {
rectFill: Colors.red[10],
pathFill: Colors.red[50],
},
messageToggleIcon: {
normal: Colors.brand[30],
hover: Colors.brand[40],

View file

@ -52,7 +52,7 @@
<pnpm.version>v7.4.0</pnpm.version>
<!-- Plugin versions -->
<fabric8-maven-plugin.version>0.42.0</fabric8-maven-plugin.version>
<fabric8-maven-plugin.version>0.42.1</fabric8-maven-plugin.version>
<frontend-maven-plugin.version>1.12.1</frontend-maven-plugin.version>
<maven-clean-plugin.version>3.2.0</maven-clean-plugin.version>
<maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>