+ {previewFor !== null && (
+
setPreviewFor(null)}
+ setFilters={(payload: PreviewFilter[]) =>
+ previewFor === 'key'
+ ? setKeyFilters(payload)
+ : setContentFilters(payload)
+ }
+ />
+ )}
@@ -77,18 +88,6 @@ const MessagesTable: React.FC = () => {
onPreview={() => setPreviewFor('content')}
/>
-
- {previewFor !== null && (
- setPreviewFor(null)}
- setFilters={(payload: PreviewFilter[]) =>
- previewFor === 'key'
- ? setKeyFilters(payload)
- : setContentFilters(payload)
- }
- />
- )}
@@ -139,7 +138,7 @@ const MessagesTable: React.FC = () => {
- >
+
);
};
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/PreviewModal.styled.ts b/kafka-ui-react-app/src/components/Topics/Topic/Messages/PreviewModal.styled.ts
index 35f221ddea..c1cecfbbd0 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/PreviewModal.styled.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/PreviewModal.styled.ts
@@ -7,6 +7,7 @@ export const PreviewModal = styled.div`
background: ${({ theme }) => theme.modal.backgroundColor};
position: absolute;
left: 25%;
+ top: 30px; // some margin
border: 1px solid ${({ theme }) => theme.modal.border.contrast};
box-shadow: ${({ theme }) => theme.modal.shadow};
padding: 32px;
From 398181e0d25fc94cf58793d8cafec5fb0a1f06ee Mon Sep 17 00:00:00 2001
From: Alexandr Nezboretskiy <88882353+anezboretskiy@users.noreply.github.com>
Date: Wed, 1 Feb 2023 11:24:40 +0200
Subject: [PATCH 12/54] [e2e] checking ksql request execution (#3295)
* Added ksqlApi methods
* Added classObjects to Facade
* Added models for streams and tables
* ad pages classes
* ad enums
* ad checkingKsqlRequestExecution()
* Refactored default topics to constant
* Refactored KsqlTests
* Resolve conversations
---------
Co-authored-by: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com>
---
.../kafka/ui/pages/ksqldb/KsqlDbList.java | 137 +++++++++++++++++
.../kafka/ui/pages/ksqldb/KsqlQueryForm.java | 144 ++++++++++++++++++
.../ui/pages/ksqldb/enums/KsqlMenuTabs.java | 16 ++
.../pages/ksqldb/enums/KsqlQueryConfig.java | 18 +++
.../kafka/ui/pages/ksqldb/models/Stream.java | 10 ++
.../kafka/ui/pages/ksqldb/models/Table.java | 10 ++
.../kafka/ui/services/ApiService.java | 83 ++++++++++
.../com/provectus/kafka/ui/base/Facade.java | 9 +-
.../kafka/ui/suite/ksqldb/KsqlTests.java | 65 ++++++++
.../kafka/ui/suite/topics/TopicsTests.java | 10 +-
10 files changed, 496 insertions(+), 6 deletions(-)
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
new file mode 100644
index 0000000000..ec735df65a
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
@@ -0,0 +1,137 @@
+package com.provectus.kafka.ui.pages.ksqldb;
+
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$x;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
+import io.qameta.allure.Step;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import org.openqa.selenium.By;
+
+public class KsqlDbList extends BasePage {
+ protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
+ protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
+ protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
+
+ @Step
+ public KsqlDbList waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ Arrays.asList(tablesTab, streamsTab).forEach(tab -> tab.shouldBe(Condition.visible));
+ return this;
+ }
+
+ @Step
+ public KsqlDbList clickExecuteKsqlRequestBtn() {
+ clickByJavaScript(executeKsqlBtn);
+ return this;
+ }
+
+ @Step
+ public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
+ $(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ private List initTablesItems() {
+ List gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
+ return initTablesItems().stream()
+ .filter(e -> e.getTableName().equals(tableName))
+ .findFirst().orElse(null);
+ }
+
+ public static class KsqlTablesGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ public KsqlTablesGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getTableName() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getTopicName() {
+ return element.$x("./td[2]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[5]").getText().trim();
+ }
+ }
+
+ private List initStreamsItems() {
+ List gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
+ return initStreamsItems().stream()
+ .filter(e -> e.getStreamName().equals(streamName))
+ .findFirst().orElse(null);
+ }
+
+ public static class KsqlStreamsGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ public KsqlStreamsGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getStreamName() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getTopicName() {
+ return element.$x("./td[2]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[5]").getText().trim();
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
new file mode 100644
index 0000000000..21ceacdbb7
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
@@ -0,0 +1,144 @@
+package com.provectus.kafka.ui.pages.ksqldb;
+
+import static com.codeborne.selenide.Condition.visible;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.ElementsCollection;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+
+public class KsqlQueryForm extends BasePage {
+ protected SelenideElement pageTitle = $x("//h1[text()='Query']");
+ protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
+ protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
+ protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
+ protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
+ protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
+ protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
+ protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
+ protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
+ protected ElementsCollection keyField = $$x("//input[@aria-label='value']");
+ protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
+
+ @Step
+ public KsqlQueryForm waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ pageTitle.shouldBe(Condition.visible);
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickClearBtn() {
+ clickByJavaScript(clearBtn);
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickExecuteBtn() {
+ clickByJavaScript(executeBtn);
+ if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
+ loadingSpinner.shouldBe(Condition.visible);
+ } else {
+ waitUntilSpinnerDisappear();
+ }
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickStopQueryBtn() {
+ clickByJavaScript(stopQueryBtn);
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickClearResultsBtn() {
+ clickByJavaScript(clearResultsBtn);
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickAddStreamProperty() {
+ clickByJavaScript(addStreamPropertyBtn);
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm setQuery(String query) {
+ queryAreaValue.shouldBe(Condition.visible).click();
+ queryArea.setValue(query);
+ return this;
+ }
+
+ private List initItems() {
+ List gridItemList = new ArrayList<>();
+ ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
+ return initItems().stream()
+ .filter(e -> e.getName().equalsIgnoreCase(name))
+ .findFirst().orElse(null);
+ }
+
+ public static class KsqlResponseGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ private KsqlResponseGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getType() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getName() {
+ return element.$x("./td[2]").scrollTo().getText().trim();
+ }
+
+ @Step
+ public boolean isVisible() {
+ boolean isVisible = false;
+ try {
+ element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
+ isVisible = true;
+ } catch (Throwable ignored) {
+ }
+ return isVisible;
+ }
+
+ @Step
+ public String getTopic() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[5]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[6]").getText().trim();
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
new file mode 100644
index 0000000000..f3bb55d42b
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
@@ -0,0 +1,16 @@
+package com.provectus.kafka.ui.pages.ksqldb.enums;
+
+public enum KsqlMenuTabs {
+ TABLES("Table"),
+ STREAMS("Streams");
+
+ private final String value;
+
+ KsqlMenuTabs(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
new file mode 100644
index 0000000000..c918b07997
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.pages.ksqldb.enums;
+
+public enum KsqlQueryConfig {
+ SHOW_TABLES("show tables;"),
+ SHOW_STREAMS("show streams;"),
+ SELECT_ALL_FROM("SELECT * FROM %s\n" +
+ "EMIT CHANGES;");
+
+ private final String query;
+
+ KsqlQueryConfig(String query) {
+ this.query = query;
+ }
+
+ public String getQuery(){
+ return query;
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
new file mode 100644
index 0000000000..f61b824386
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
@@ -0,0 +1,10 @@
+package com.provectus.kafka.ui.pages.ksqldb.models;
+
+import lombok.Data;
+import lombok.experimental.Accessors;
+
+@Data
+@Accessors(chain = true)
+public class Stream {
+ private String name, topicName, valueFormat, partitions;
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
new file mode 100644
index 0000000000..cbb0c1d2a7
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
@@ -0,0 +1,10 @@
+package com.provectus.kafka.ui.pages.ksqldb.models;
+
+import lombok.Data;
+import lombok.experimental.Accessors;
+
+@Data
+@Accessors(chain = true)
+public class Table {
+ private String name, streamName;
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
index 8451ef836e..c41796cb85 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
@@ -6,20 +6,28 @@ import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.provectus.kafka.ui.api.ApiClient;
import com.provectus.kafka.ui.api.api.KafkaConnectApi;
+import com.provectus.kafka.ui.api.api.KsqlApi;
import com.provectus.kafka.ui.api.api.MessagesApi;
import com.provectus.kafka.ui.api.api.SchemasApi;
import com.provectus.kafka.ui.api.api.TopicsApi;
import com.provectus.kafka.ui.api.model.CreateTopicMessage;
+import com.provectus.kafka.ui.api.model.KsqlCommandV2;
+import com.provectus.kafka.ui.api.model.KsqlCommandV2Response;
+import com.provectus.kafka.ui.api.model.KsqlResponse;
import com.provectus.kafka.ui.api.model.NewConnector;
import com.provectus.kafka.ui.api.model.NewSchemaSubject;
import com.provectus.kafka.ui.api.model.TopicCreation;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Schema;
import com.provectus.kafka.ui.models.Topic;
+import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
+import com.provectus.kafka.ui.pages.ksqldb.models.Table;
import com.provectus.kafka.ui.settings.BaseSource;
import io.qameta.allure.Step;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.Objects;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.reactive.function.client.WebClientResponseException;
@@ -48,6 +56,9 @@ public class ApiService extends BaseSource {
return new MessagesApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
}
+ @SneakyThrows
+ private KsqlApi ksqlApi(){return new KsqlApi(new ApiClient().setBasePath(BASE_LOCAL_URL));}
+
@SneakyThrows
private void createTopic(String clusterName, String topicName) {
TopicCreation topic = new TopicCreation();
@@ -173,4 +184,76 @@ public class ApiService extends BaseSource {
sendMessage(CLUSTER_NAME, topic);
return this;
}
+
+ @Step
+ public ApiService createStream(Stream stream) {
+ KsqlCommandV2Response pipeIdStream = ksqlApi().executeKsql(
+ CLUSTER_NAME, new KsqlCommandV2()
+ .ksql(String.format("CREATE STREAM %s (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE) ",
+ stream.getName())
+ + String.format("WITH (kafka_topic='%s', value_format='json', partitions=1);",
+ stream.getTopicName())))
+ .block();
+ assert pipeIdStream != null;
+ List responseListStream =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdStream.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListStream).size() != 0;
+ return this;
+ }
+
+ @Step
+ public ApiService createTables(Table firstTable, Table secondTable) {
+ KsqlCommandV2Response pipeIdTable1 = ksqlApi().executeKsql(
+ CLUSTER_NAME, new KsqlCommandV2().ksql(
+ String.format("CREATE TABLE %s AS ", firstTable.getName())
+ + " SELECT profileId, "
+ + " LATEST_BY_OFFSET(latitude) AS la, "
+ + " LATEST_BY_OFFSET(longitude) AS lo "
+ + String.format(" FROM %s ", firstTable.getStreamName())
+ + " GROUP BY profileId "
+ + " EMIT CHANGES;"))
+ .block();
+ assert pipeIdTable1 != null;
+ List responseListTable =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable1.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListTable).size() != 0;
+ KsqlCommandV2Response pipeIdTable2 = ksqlApi().executeKsql(
+ CLUSTER_NAME,
+ new KsqlCommandV2().ksql(String.format("CREATE TABLE %s AS ", secondTable.getName())
+ + " SELECT ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1) AS distanceInMiles, "
+ + " COLLECT_LIST(profileId) AS riders, "
+ + " COUNT(*) AS count "
+ + String.format(" FROM %s ", firstTable.getName())
+ + " GROUP BY ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1);"))
+ .block();
+ assert pipeIdTable2 != null;
+ List responseListTable2 =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable2.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListTable2).size() != 0;
+ return this;
+ }
+
+ @Step
+ public ApiService insertInto(Stream stream) {
+ String streamName = stream.getName();
+ KsqlCommandV2Response pipeIdInsert = ksqlApi().executeKsql(CLUSTER_NAME, new KsqlCommandV2()
+ .ksql(
+ "INSERT INTO " + streamName + " (profileId, latitude, longitude) VALUES ('c2309eec', 37.7877, -122.4205);"
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('18f4ea86', 37.3903, -122.0643); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4ab5cbad', 37.3952, -122.0813); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('8b6eae59', 37.3944, -122.0813); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4a7c7b41', 37.4049, -122.0822); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4ddad000', 37.7857, -122.4011);"))
+ .block();
+ assert pipeIdInsert != null;
+ List responseListInsert =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdInsert.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListInsert).size() != 0;
+ return this;
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
index 1dad47a700..ac4180fe61 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
@@ -1,6 +1,5 @@
package com.provectus.kafka.ui.base;
-import com.provectus.kafka.ui.services.ApiService;
import com.provectus.kafka.ui.pages.NaviSideBar;
import com.provectus.kafka.ui.pages.TopPanel;
import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab;
@@ -11,14 +10,17 @@ import com.provectus.kafka.ui.pages.connector.ConnectorDetails;
import com.provectus.kafka.ui.pages.connector.KafkaConnectList;
import com.provectus.kafka.ui.pages.consumer.ConsumersDetails;
import com.provectus.kafka.ui.pages.consumer.ConsumersList;
+import com.provectus.kafka.ui.pages.ksqldb.KsqlDbList;
+import com.provectus.kafka.ui.pages.ksqldb.KsqlQueryForm;
import com.provectus.kafka.ui.pages.schema.SchemaCreateForm;
import com.provectus.kafka.ui.pages.schema.SchemaDetails;
import com.provectus.kafka.ui.pages.schema.SchemaRegistryList;
import com.provectus.kafka.ui.pages.topic.ProduceMessagePanel;
-import com.provectus.kafka.ui.pages.topic.TopicSettingsTab;
import com.provectus.kafka.ui.pages.topic.TopicCreateEditForm;
import com.provectus.kafka.ui.pages.topic.TopicDetails;
+import com.provectus.kafka.ui.pages.topic.TopicSettingsTab;
import com.provectus.kafka.ui.pages.topic.TopicsList;
+import com.provectus.kafka.ui.services.ApiService;
public abstract class Facade {
protected ApiService apiService = new ApiService();
@@ -40,4 +42,7 @@ public abstract class Facade {
protected BrokersDetails brokersDetails = new BrokersDetails();
protected BrokersConfigTab brokersConfigTab = new BrokersConfigTab();
protected TopicSettingsTab topicSettingsTab = new TopicSettingsTab();
+ protected KsqlQueryForm ksqlQueryForm = new KsqlQueryForm();
+ protected KsqlDbList ksqlDbList = new KsqlDbList();
+
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
new file mode 100644
index 0000000000..ee03fd8de1
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
@@ -0,0 +1,65 @@
+package com.provectus.kafka.ui.suite.ksqldb;
+
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KSQL_DB;
+import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
+import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
+
+import com.provectus.kafka.ui.base.BaseTest;
+import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
+import com.provectus.kafka.ui.pages.ksqldb.models.Table;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qase.api.annotation.CaseId;
+import org.assertj.core.api.SoftAssertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Order;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class KsqlTests extends BaseTest {
+ private static final long SUITE_ID = 8;
+ private static final String SUITE_TITLE = "KSQL_DB";
+ private static final Stream STREAM_FOR_CHECKING_TABLES = new Stream()
+ .setName("STREAM_FOR_CHECKING_TABLES_" + randomAlphabetic(4).toUpperCase())
+ .setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
+ private static final Table FIRST_TABLE = new Table()
+ .setName("FIRST_TABLE"+ randomAlphabetic(4).toUpperCase())
+ .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+ private static final Table SECOND_TABLE = new Table()
+ .setName("SECOND_TABLE"+ randomAlphabetic(4).toUpperCase())
+ .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+
+ @BeforeAll
+ public void beforeAll(){
+ apiService
+ .createStream(STREAM_FOR_CHECKING_TABLES)
+ .createTables(FIRST_TABLE, SECOND_TABLE);
+ }
+
+ @DisplayName("check KSQL request execution")
+ @Suite(suiteId = SUITE_ID,title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(41)
+ @Test
+ @Order(1)
+ public void checkingKsqlRequestExecution() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady()
+ .clickExecuteKsqlRequestBtn();
+ ksqlQueryForm
+ .waitUntilScreenReady()
+ .setQuery(SHOW_TABLES.getQuery())
+ .clickExecuteBtn();
+ SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible())
+ .as("getTableName()").isTrue();
+ softly.assertThat(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible())
+ .as("getTableName()").isTrue();
+ softly.assertAll();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
index 7f301835c8..fa51ebe1fc 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
@@ -63,12 +63,14 @@ public class TopicsTests extends BaseTest {
.setNumberOfPartitions(1)
.setMaxMessageBytes("1000012")
.setMaxSizeOnDisk(NOT_SET);
+ private static final Topic TOPIC_FOR_CHECK_FILTERS = new Topic()
+ .setName("topic-for-check-filters-" + randomAlphabetic(5));
private static final Topic TOPIC_FOR_DELETE = new Topic().setName("topic-to-delete-" + randomAlphabetic(5));
private static final List TOPIC_LIST = new ArrayList<>();
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE));
+ TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
}
@@ -337,7 +339,7 @@ public class TopicsTests extends BaseTest {
@Order(11)
void addingNewFilterWithinTopic() {
String filterName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails("_schemas");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES)
.clickMessagesAddFiltersBtn()
@@ -364,7 +366,7 @@ public class TopicsTests extends BaseTest {
@Order(12)
void checkFilterSavingWithinSavedFilters() {
String displayName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails("my_ksql_1ksql_processing_log");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES)
.clickMessagesAddFiltersBtn()
@@ -389,7 +391,7 @@ public class TopicsTests extends BaseTest {
@Order(13)
void checkingApplyingSavedFilterWithinTopicMessages() {
String displayName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails("my_ksql_1ksql_processing_log");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES)
.clickMessagesAddFiltersBtn()
From 9e1e9b379905772afd7fb6815a2d28d817bf193e Mon Sep 17 00:00:00 2001
From: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com>
Date: Thu, 2 Feb 2023 15:24:42 +0200
Subject: [PATCH 13/54] [e2e] switch to TestNG (#3301)
* test commit
* fix BaseTest
* upd global
* upd global
* upd global
* add local browser VM option
* fix TopicsList column header locator
* fix withStartupTimeout()
* switch e2e to TestNG
* upd pom
* upd page classes
* upd -pl kafka-ui-e2e-checks
---
.github/workflows/e2e-checks.yaml | 2 +-
kafka-ui-e2e-checks/README.md | 67 ++-
kafka-ui-e2e-checks/pom.xml | 143 ++---
.../provectus/kafka/ui/models/Connector.java | 1 -
.../com/provectus/kafka/ui/models/Schema.java | 10 +-
.../com/provectus/kafka/ui/models/Topic.java | 9 +-
.../provectus/kafka/ui/pages/BasePage.java | 175 +++---
.../provectus/kafka/ui/pages/NaviSideBar.java | 21 +-
.../provectus/kafka/ui/pages/TopPanel.java | 8 +-
.../ui/pages/brokers/BrokersConfigTab.java | 47 +-
.../ui/pages/brokers/BrokersDetails.java | 145 ++---
.../kafka/ui/pages/brokers/BrokersList.java | 179 +++---
.../ConnectorCreateForm.java | 12 +-
.../ConnectorDetails.java | 42 +-
.../KafkaConnectList.java | 16 +-
.../ConsumersDetails.java | 6 +-
.../ConsumersList.java | 7 +-
.../pages/{ksqldb => ksqlDb}/KsqlDbList.java | 8 +-
.../{ksqldb => ksqlDb}/KsqlQueryForm.java | 6 +-
.../enums/KsqlMenuTabs.java | 7 +-
.../enums/KsqlQueryConfig.java | 11 +-
.../{ksqldb => ksqlDb}/models/Stream.java | 5 +-
.../{ksqldb => ksqlDb}/models/Table.java | 5 +-
.../{schema => schemas}/SchemaCreateForm.java | 46 +-
.../{schema => schemas}/SchemaDetails.java | 20 +-
.../SchemaRegistryList.java | 8 +-
.../ui/pages/topic/TopicCreateEditForm.java | 289 ---------
.../kafka/ui/pages/topic/TopicDetails.java | 452 --------------
.../ui/pages/topic/TopicSettingsTab.java | 65 ---
.../kafka/ui/pages/topic/TopicsList.java | 260 ---------
.../pages/topic/enums/CleanupPolicyValue.java | 24 -
.../topic/enums/CustomParameterType.java | 36 --
.../ui/pages/topic/enums/MaxSizeOnDisk.java | 26 -
.../ui/pages/topic/enums/TimeToRetain.java | 25 -
.../ProduceMessagePanel.java | 11 +-
.../ui/pages/topics/TopicCreateEditForm.java | 267 +++++++++
.../kafka/ui/pages/topics/TopicDetails.java | 446 ++++++++++++++
.../ui/pages/topics/TopicSettingsTab.java | 66 +++
.../kafka/ui/pages/topics/TopicsList.java | 261 +++++++++
.../topics/enums/CleanupPolicyValue.java | 25 +
.../topics/enums/CustomParameterType.java | 37 ++
.../ui/pages/topics/enums/MaxSizeOnDisk.java | 27 +
.../ui/pages/topics/enums/TimeToRetain.java | 26 +
.../kafka/ui/services/ApiService.java | 251 ++++----
.../ui/settings/listeners/AllureListener.java | 35 ++
.../ui/settings/listeners/LoggerListener.java | 37 ++
.../kafka/ui/utilities/TimeUtils.java | 15 +-
.../kafka/ui/utilities/WebUtils.java | 122 ++--
.../qaseIoUtils/DisplayNameGenerator.java | 33 --
.../utilities/qaseIoUtils/QaseExtension.java | 15 +-
.../qaseIoUtils/TestCaseGenerator.java | 2 +-
.../java/com/provectus/kafka/ui/BaseTest.java | 135 +++++
.../provectus/kafka/ui/{base => }/Facade.java | 29 +-
.../com/provectus/kafka/ui/base/BaseTest.java | 142 -----
.../kafka/ui/smokeSuite/SmokeTest.java | 26 +
.../ui/smokeSuite/brokers/BrokersTest.java | 61 ++
.../connectors/ConnectorsTest.java} | 100 ++--
.../ui/smokeSuite/ksqlDb/KsqlDbTest.java | 58 ++
.../ui/smokeSuite/schemas/SchemasTest.java | 229 ++++++++
.../ui/smokeSuite/topics/MessagesTest.java | 292 ++++++++++
.../ui/smokeSuite/topics/TopicsTest.java | 503 ++++++++++++++++
.../provectus/kafka/ui/suite/SmokeTests.java | 23 -
.../kafka/ui/suite/brokers/BrokersTests.java | 63 --
.../kafka/ui/suite/ksqldb/KsqlTests.java | 65 ---
.../kafka/ui/suite/schemas/SchemasTests.java | 249 --------
.../ui/suite/topics/TopicMessagesTests.java | 316 ----------
.../kafka/ui/suite/topics/TopicsTests.java | 551 ------------------
.../src/test/resources/regression.xml | 8 +
.../src/test/resources/smoke.xml | 8 +
69 files changed, 3318 insertions(+), 3399 deletions(-)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{connector => connectors}/ConnectorCreateForm.java (89%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{connector => connectors}/ConnectorDetails.java (62%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{connector => connectors}/KafkaConnectList.java (77%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{consumer => consumers}/ConsumersDetails.java (95%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{consumer => consumers}/ConsumersList.java (84%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ksqldb => ksqlDb}/KsqlDbList.java (95%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ksqldb => ksqlDb}/KsqlQueryForm.java (97%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ksqldb => ksqlDb}/enums/KsqlMenuTabs.java (64%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ksqldb => ksqlDb}/enums/KsqlQueryConfig.java (58%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ksqldb => ksqlDb}/models/Stream.java (50%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ksqldb => ksqlDb}/models/Table.java (55%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{schema => schemas}/SchemaCreateForm.java (76%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{schema => schemas}/SchemaDetails.java (82%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{schema => schemas}/SchemaRegistryList.java (90%)
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicSettingsTab.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CleanupPolicyValue.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CustomParameterType.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/MaxSizeOnDisk.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{topic => topics}/ProduceMessagePanel.java (94%)
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CleanupPolicyValue.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CustomParameterType.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/MaxSizeOnDisk.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/TimeToRetain.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/AllureListener.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/LoggerListener.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/DisplayNameGenerator.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
rename kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/{base => }/Facade.java (65%)
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
rename kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/{suite/connectors/ConnectorsTests.java => smokeSuite/connectors/ConnectorsTest.java} (63%)
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/SmokeTests.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/brokers/BrokersTests.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
create mode 100644 kafka-ui-e2e-checks/src/test/resources/regression.xml
create mode 100644 kafka-ui-e2e-checks/src/test/resources/smoke.xml
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml
index 89e9d71f0b..85e3c17d2f 100644
--- a/.github/workflows/e2e-checks.yaml
+++ b/.github/workflows/e2e-checks.yaml
@@ -42,7 +42,7 @@ jobs:
- name: e2e run
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -pl '!kafka-ui-api' test -Pprod
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -pl 'kafka-ui-e2e-checks' test -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Pprod
- name: Generate allure report
uses: simple-elf/allure-report-action@master
if: always()
diff --git a/kafka-ui-e2e-checks/README.md b/kafka-ui-e2e-checks/README.md
index 42f17fc84a..651e3c1143 100644
--- a/kafka-ui-e2e-checks/README.md
+++ b/kafka-ui-e2e-checks/README.md
@@ -1,6 +1,6 @@
### E2E UI automation for Kafka-ui
-This repository is for E2E UI automation.
+This repository is for E2E UI automation.
### Table of Contents
@@ -16,28 +16,42 @@ This repository is for E2E UI automation.
- [How to develop](#how-to-develop)
### Prerequisites
+
- Docker & Docker-compose
- Java (install aarch64 jdk if you have M1/arm chip)
- Maven
-
+
### How to install
+
```
git clone https://github.com/provectus/kafka-ui.git
cd kafka-ui-e2e-checks
docker pull selenoid/vnc:chrome_86.0
```
+
### How to run checks
-1. Run `kafka-ui`:
+1. Run `kafka-ui`:
+
```
cd kafka-ui
docker-compose -f documentation/compose/e2e-tests.yaml up -d
```
-2. Run tests using your QaseIO API token as environment variable (put instead %s into command below)
+
+2. Run Smoke test suite using your QaseIO API token as environment variable (put instead %s into command below)
+
```
-./mvnw -DQASEIO_API_TOKEN='%s' -pl '!kafka-ui-api' test -Pprod
+./mvnw -DQASEIO_API_TOKEN='%s' -pl 'kafka-ui-e2e-checks' test -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Pprod
```
-3. To run tests on your local Chrome browser just add next VM option to the Run Configuration
+
+3. Run Regression test suite using your QaseIO API token as environment variable (put instead %s into command below)
+
+```
+./mvnw -DQASEIO_API_TOKEN='%s' -pl 'kafka-ui-e2e-checks' test -Dsurefire.suiteXmlFiles='src/test/resources/regression.xml' -Pprod
+```
+
+4. To run tests on your local Chrome browser just add next VM option to the Run Configuration
+
```
-Dbrowser=local
```
@@ -47,25 +61,40 @@ docker-compose -f documentation/compose/e2e-tests.yaml up -d
Reports are in `allure-results` folder.
If you have installed allure commandline [here](https://www.npmjs.com/package/allure-commandline))
You can see allure report with command:
+
```
allure serve
```
+
### Screenshots
Reference screenshots are in `SCREENSHOTS_FOLDER` (default,`kafka-ui-e2e-checks/screenshots`)
### How to develop
-> ⚠️ todo
-### Setting for different environments
-> ⚠️ todo
-### Test Data
-> ⚠️ todo
-### Actions
-> ⚠️ todo
-### Checks
-> ⚠️ todo
-### Parallelization
-> ⚠️ todo
-### Tips
- - install `Selenium UI Testing plugin` in IDEA
+
+> ⚠️ todo
+
+### Setting for different environments
+
+> ⚠️ todo
+
+### Test Data
+
+> ⚠️ todo
+
+### Actions
+
+> ⚠️ todo
+
+### Checks
+
+> ⚠️ todo
+
+### Parallelization
+
+> ⚠️ todo
+
+### Tips
+
+- install `Selenium UI Testing plugin` in IDEA
diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml
index 1d1aaf3d5f..33ee947af0 100644
--- a/kafka-ui-e2e-checks/pom.xml
+++ b/kafka-ui-e2e-checks/pom.xml
@@ -1,42 +1,34 @@
-
kafka-ui
com.provectus
0.0.1-SNAPSHOT
- 4.0.0
+ 4.0.0
kafka-ui-e2e-checks
+
+ 3.0.0-M8
${project.version}
- 1.9.9.1
- 2.18.1
- 2.4.8
+ 1.17.6
+ 1.9.2
6.6.3
+ 7.6.1
+ 2.20.1
+ 1.9.9.1
3.23.1
- 1.0.1
2.2
1.7.36
- 2.20.1
2.3.1
- 2.6
- 1.5.4
- 2.18.1
- 2.22.2
- 2.10.0
3.3.1
2.1.3
-
- net.minidev
- json-smart
- ${json-smart.version}
-
org.apache.kafka
kafka_2.13
@@ -122,23 +114,43 @@
org.testcontainers
testcontainers
+ ${testcontainers.version}
-
- io.qameta.allure
- allure-junit5
- ${allure.version}
+ org.testcontainers
+ selenium
+ ${testcontainers.version}
+
+
+ org.junit.platform
+ junit-platform-launcher
+ ${junit.platform.version}
+
+
+ org.junit.platform
+ junit-platform-engine
+ ${junit.platform.version}
com.codeborne
selenide
${selenide.version}
+
+ org.testng
+ testng
+ ${testng.version}
+
io.qameta.allure
allure-selenide
${allure.version}
+
+ io.qameta.allure
+ allure-testng
+ ${allure.version}
+
org.hamcrest
hamcrest
@@ -150,20 +162,9 @@
${assertj.version}
- com.google.auto.service
- auto-service
- ${google.auto-service.version}
-
-
- org.junit.jupiter
- junit-jupiter-api
- ${junit.version}
-
-
- org.junit.jupiter
- junit-jupiter-engine
- ${junit.version}
- test
+ org.aspectj
+ aspectjrt
+ ${aspectj.version}
org.slf4j
@@ -175,61 +176,16 @@
lombok
${org.projectlombok.version}
-
- org.aspectj
- aspectjrt
- ${aspectj.version}
-
-
-
- org.testcontainers
- junit-jupiter
-
-
- io.qameta.allure
- allure-java-commons
- ${allure.java-commons.version}
-
io.github.cdimascio
dotenv-java
${dotenv.version}
-
- org.junit.platform
- junit-platform-launcher
-
-
- ru.yandex.qatools.allure
- allure-maven-plugin
- ${allure.maven-plugin.version}
-
-
- ru.yandex.qatools.ashot
- ashot
- ${ashot.version}
-
-
- org.seleniumhq.selenium
- selenium-remote-driver
-
-
-
-
- io.qameta.allure.plugins
- screen-diff-plugin
- ${allure.screendiff-plugin.version}
-
com.provectus
kafka-ui-contract
${kafka-ui-contract}
-
- org.testcontainers
- selenium
- test
-
io.qase
qase-api
@@ -252,6 +208,13 @@
true
+
+
+ org.apache.maven.surefire
+ surefire-testng
+ ${maven.surefire-plugin.version}
+
+
org.apache.maven.plugins
@@ -269,18 +232,16 @@
maven-surefire-plugin
${maven.surefire-plugin.version}
- false
-
-
- junit.jupiter.extensions.autodetection.enabled
- true
-
-
-javaagent:"${settings.localRepository}/org/aspectj/aspectjweaver/${aspectj.version}/aspectjweaver-${aspectj.version}.jar"
+
+ org.apache.maven.surefire
+ surefire-testng
+ ${maven.surefire-plugin.version}
+
org.aspectj
aspectjweaver
@@ -291,11 +252,7 @@
io.qameta.allure
allure-maven
- ${allure-maven.version}
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
+ 2.10.0
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java
index 9e30ba9f19..48088cdf91 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Connector.java
@@ -8,5 +8,4 @@ import lombok.experimental.Accessors;
public class Connector {
private String name, config;
-
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java
index a9c7dca75b..cd573037ba 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Schema.java
@@ -4,28 +4,28 @@ import com.provectus.kafka.ui.api.model.SchemaType;
import lombok.Data;
import lombok.experimental.Accessors;
-import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
+import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@Data
@Accessors(chain = true)
public class Schema {
- private String name,valuePath;
+ private String name, valuePath;
private SchemaType type;
- public static Schema createSchemaAvro(){
+ public static Schema createSchemaAvro() {
return new Schema().setName("schema_avro-" + randomAlphabetic(5))
.setType(SchemaType.AVRO)
.setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_avro_value.json");
}
- public static Schema createSchemaJson(){
+ public static Schema createSchemaJson() {
return new Schema().setName("schema_json-" + randomAlphabetic(5))
.setType(SchemaType.JSON)
.setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_Json_Value.json");
}
- public static Schema createSchemaProtobuf(){
+ public static Schema createSchemaProtobuf() {
return new Schema().setName("schema_protobuf-" + randomAlphabetic(5))
.setType(SchemaType.PROTOBUF)
.setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_protobuf_value.txt");
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
index 16b3ef6e2f..21486a93f1 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
@@ -1,15 +1,16 @@
package com.provectus.kafka.ui.models;
-import com.provectus.kafka.ui.pages.topic.enums.CleanupPolicyValue;
-import com.provectus.kafka.ui.pages.topic.enums.CustomParameterType;
-import com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk;
-import com.provectus.kafka.ui.pages.topic.enums.TimeToRetain;
+import com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue;
+import com.provectus.kafka.ui.pages.topics.enums.CustomParameterType;
+import com.provectus.kafka.ui.pages.topics.enums.MaxSizeOnDisk;
+import com.provectus.kafka.ui.pages.topics.enums.TimeToRetain;
import lombok.Data;
import lombok.experimental.Accessors;
@Data
@Accessors(chain = true)
public class Topic {
+
private String name, timeToRetainData, maxMessageBytes, messageKey, messageContent, customParameterValue;
private int numberOfPartitions;
private CustomParameterType customParameterType;
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index afe95432ca..daea5c0d54 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -1,103 +1,104 @@
package com.provectus.kafka.ui.pages;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.utilities.WebUtils;
-import java.time.Duration;
import lombok.extern.slf4j.Slf4j;
+import java.time.Duration;
+
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
@Slf4j
public abstract class BasePage extends WebUtils {
- protected SelenideElement loadingSpinner = $x("//div[@role='progressbar']");
- protected SelenideElement submitBtn = $x("//button[@type='submit']");
- protected SelenideElement tableGrid = $x("//table");
- protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
- protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
- protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
- protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
- protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
- protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
- protected ElementsCollection allGridItems = $$x("//tr[@class]");
- protected String summaryCellLocator = "//div[contains(text(),'%s')]";
- protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
- protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
+ protected SelenideElement loadingSpinner = $x("//div[@role='progressbar']");
+ protected SelenideElement submitBtn = $x("//button[@type='submit']");
+ protected SelenideElement tableGrid = $x("//table");
+ protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
+ protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
+ protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
+ protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
+ protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
+ protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
+ protected ElementsCollection allGridItems = $$x("//tr[@class]");
+ protected String summaryCellLocator = "//div[contains(text(),'%s')]";
+ protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
+ protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
- protected void waitUntilSpinnerDisappear() {
- log.debug("\nwaitUntilSpinnerDisappear");
- if(isVisible(loadingSpinner)){
- loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(30));
- }
- }
-
- protected void clickSubmitBtn() {
- clickByJavaScript(submitBtn);
- }
-
- protected SelenideElement getTableElement(String elementName) {
- log.debug("\ngetTableElement: {}", elementName);
- return $x(String.format(tableElementNameLocator, elementName));
- }
-
- protected String getAlertHeader() {
- log.debug("\ngetAlertHeader");
- String result = alertHeader.shouldBe(Condition.visible).getText();
- log.debug("-> {}", result);
- return result;
- }
-
- protected String getAlertMessage() {
- log.debug("\ngetAlertMessage");
- String result = alertMessage.shouldBe(Condition.visible).getText();
- log.debug("-> {}", result);
- return result;
- }
-
- protected boolean isAlertVisible(AlertHeader header) {
- log.debug("\nisAlertVisible: {}", header.toString());
- boolean result = getAlertHeader().equals(header.toString());
- log.debug("-> {}", result);
- return result;
- }
-
- protected boolean isAlertVisible(AlertHeader header, String message) {
- log.debug("\nisAlertVisible: {} {}", header, message);
- boolean result = isAlertVisible(header) && getAlertMessage().equals(message);
- log.debug("-> {}", result);
- return result;
- }
-
- protected void clickConfirmButton() {
- confirmBtn.shouldBe(Condition.enabled).click();
- confirmBtn.shouldBe(Condition.disappear);
- }
-
- protected void clickCancelButton() {
- cancelBtn.shouldBe(Condition.enabled).click();
- cancelBtn.shouldBe(Condition.disappear);
- }
-
- protected boolean isConfirmationModalVisible() {
- return isVisible(confirmationMdl);
- }
-
- public enum AlertHeader {
- SUCCESS("Success"),
- VALIDATION_ERROR("Validation Error"),
- BAD_REQUEST("400 Bad Request");
-
- private final String value;
-
- AlertHeader(String value) {
- this.value = value;
+ protected void waitUntilSpinnerDisappear() {
+ log.debug("\nwaitUntilSpinnerDisappear");
+ if (isVisible(loadingSpinner)) {
+ loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(30));
+ }
}
- public String toString() {
- return value;
+ protected void clickSubmitBtn() {
+ clickByJavaScript(submitBtn);
+ }
+
+ protected SelenideElement getTableElement(String elementName) {
+ log.debug("\ngetTableElement: {}", elementName);
+ return $x(String.format(tableElementNameLocator, elementName));
+ }
+
+ protected String getAlertHeader() {
+ log.debug("\ngetAlertHeader");
+ String result = alertHeader.shouldBe(Condition.visible).getText();
+ log.debug("-> {}", result);
+ return result;
+ }
+
+ protected String getAlertMessage() {
+ log.debug("\ngetAlertMessage");
+ String result = alertMessage.shouldBe(Condition.visible).getText();
+ log.debug("-> {}", result);
+ return result;
+ }
+
+ protected boolean isAlertVisible(AlertHeader header) {
+ log.debug("\nisAlertVisible: {}", header.toString());
+ boolean result = getAlertHeader().equals(header.toString());
+ log.debug("-> {}", result);
+ return result;
+ }
+
+ protected boolean isAlertVisible(AlertHeader header, String message) {
+ log.debug("\nisAlertVisible: {} {}", header, message);
+ boolean result = isAlertVisible(header) && getAlertMessage().equals(message);
+ log.debug("-> {}", result);
+ return result;
+ }
+
+ protected void clickConfirmButton() {
+ confirmBtn.shouldBe(Condition.enabled).click();
+ confirmBtn.shouldBe(Condition.disappear);
+ }
+
+ protected void clickCancelButton() {
+ cancelBtn.shouldBe(Condition.enabled).click();
+ cancelBtn.shouldBe(Condition.disappear);
+ }
+
+ protected boolean isConfirmationModalVisible() {
+ return isVisible(confirmationMdl);
+ }
+
+ public enum AlertHeader {
+ SUCCESS("Success"),
+ VALIDATION_ERROR("Validation Error"),
+ BAD_REQUEST("400 Bad Request");
+
+ private final String value;
+
+ AlertHeader(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
}
- }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java
index b70bf7755a..3c5b0fe6d9 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java
@@ -1,16 +1,17 @@
package com.provectus.kafka.ui.pages;
-import static com.codeborne.selenide.Selenide.$x;
-import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
-
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import io.qameta.allure.Step;
+
import java.time.Duration;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static com.codeborne.selenide.Selenide.$x;
+import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
+
public class NaviSideBar extends BasePage {
protected SelenideElement dashboardMenuItem = $x("//a[@title='Dashboard']");
@@ -45,6 +46,13 @@ public class NaviSideBar extends BasePage {
return this;
}
+ public List getAllMenuButtons() {
+ expandCluster(CLUSTER_NAME);
+ return Stream.of(SideMenuOption.values())
+ .map(option -> $x(String.format(sideMenuOptionElementLocator, option.value)))
+ .collect(Collectors.toList());
+ }
+
public enum SideMenuOption {
DASHBOARD("Dashboard"),
BROKERS("Brokers"),
@@ -60,11 +68,4 @@ public class NaviSideBar extends BasePage {
this.value = value;
}
}
-
- public List getAllMenuButtons() {
- expandCluster(CLUSTER_NAME);
- return Stream.of(SideMenuOption.values())
- .map(option -> $x(String.format(sideMenuOptionElementLocator, option.value)))
- .collect(Collectors.toList());
- }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java
index 5e5a317647..28de4fe058 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java
@@ -1,12 +1,14 @@
package com.provectus.kafka.ui.pages;
-import static com.codeborne.selenide.Selenide.$x;
-
import com.codeborne.selenide.SelenideElement;
+
import java.util.Arrays;
import java.util.List;
-public class TopPanel extends BasePage{
+import static com.codeborne.selenide.Selenide.$x;
+
+public class TopPanel extends BasePage {
+
protected SelenideElement kafkaLogo = $x("//a[contains(text(),'UI for Apache Kafka')]");
protected SelenideElement kafkaVersion = $x("//a[@title='Current commit']");
protected SelenideElement logOutBtn = $x("//button[contains(text(),'Log out')]");
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java
index 70bba6047a..7b37d6709c 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java
@@ -1,40 +1,41 @@
package com.provectus.kafka.ui.pages.brokers;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
public class BrokersConfigTab extends BasePage {
- protected List editBtn = $$x("//button[@aria-label='editAction']");
- protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key']");
+ protected List editBtn = $$x("//button[@aria-label='editAction']");
+ protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key']");
- @Step
- public BrokersConfigTab waitUntilScreenReady(){
- waitUntilSpinnerDisappear();
- searchByKeyField.shouldBe(Condition.visible);
- return this;
- }
+ @Step
+ public BrokersConfigTab waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ searchByKeyField.shouldBe(Condition.visible);
+ return this;
+ }
- @Step
- public boolean isSearchByKeyVisible() {
- return isVisible(searchByKeyField);
- }
+ @Step
+ public boolean isSearchByKeyVisible() {
+ return isVisible(searchByKeyField);
+ }
- public List getColumnHeaders() {
- return Stream.of("Key", "Value", "Source")
- .map(name -> $x(String.format(columnHeaderLocator, name)))
- .collect(Collectors.toList());
- }
+ public List getColumnHeaders() {
+ return Stream.of("Key", "Value", "Source")
+ .map(name -> $x(String.format(columnHeaderLocator, name)))
+ .collect(Collectors.toList());
+ }
- public List getEditButtons() {
- return editBtn;
- }
+ public List getEditButtons() {
+ return editBtn;
+ }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java
index 4eca65f1f4..8cc3dd98ba 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java
@@ -1,91 +1,92 @@
package com.provectus.kafka.ui.pages.brokers;
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$x;
-
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import org.openqa.selenium.By;
+
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.openqa.selenium.By;
+
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$x;
public class BrokersDetails extends BasePage {
- protected SelenideElement logDirectoriesTab = $x("//a[text()='Log directories']");
- protected SelenideElement metricsTab = $x("//a[text()='Metrics']");
- protected String brokersTabLocator = "//a[text()='%s']";
+ protected SelenideElement logDirectoriesTab = $x("//a[text()='Log directories']");
+ protected SelenideElement metricsTab = $x("//a[text()='Metrics']");
+ protected String brokersTabLocator = "//a[text()='%s']";
- @Step
- public BrokersDetails waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- Arrays.asList(logDirectoriesTab, metricsTab).forEach(element -> element.shouldBe(Condition.visible));
- return this;
- }
-
- @Step
- public BrokersDetails openDetailsTab(DetailsTab menu) {
- $(By.linkText(menu.toString())).shouldBe(Condition.enabled).click();
- waitUntilSpinnerDisappear();
- return this;
- }
-
- private List getVisibleColumnHeaders() {
- return Stream.of("Name", "Topics", "Error", "Partitions")
- .map(name -> $x(String.format(columnHeaderLocator, name)))
- .collect(Collectors.toList());
- }
-
- private List getEnabledColumnHeaders() {
- return Stream.of("Name", "Error")
- .map(name -> $x(String.format(columnHeaderLocator, name)))
- .collect(Collectors.toList());
- }
-
- private List getVisibleSummaryCells() {
- return Stream.of("Segment Size", "Segment Count", "Port", "Host")
- .map(name -> $x(String.format(summaryCellLocator, name)))
- .collect(Collectors.toList());
- }
-
- private List getDetailsTabs() {
- return Stream.of(DetailsTab.values())
- .map(name -> $x(String.format(brokersTabLocator, name)))
- .collect(Collectors.toList());
- }
-
- @Step
- public List getAllEnabledElements() {
- List enabledElements = new ArrayList<>(getEnabledColumnHeaders());
- enabledElements.addAll(getDetailsTabs());
- return enabledElements;
- }
-
- @Step
- public List getAllVisibleElements() {
- List visibleElements = new ArrayList<>(getVisibleSummaryCells());
- visibleElements.addAll(getVisibleColumnHeaders());
- visibleElements.addAll(getDetailsTabs());
- return visibleElements;
- }
-
- public enum DetailsTab {
- LOG_DIRECTORIES("Log directories"),
- CONFIGS("Configs"),
- METRICS("Metrics");
-
- private final String value;
-
- DetailsTab(String value) {
- this.value = value;
+ @Step
+ public BrokersDetails waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ Arrays.asList(logDirectoriesTab, metricsTab).forEach(element -> element.shouldBe(Condition.visible));
+ return this;
}
- public String toString() {
- return value;
+ @Step
+ public BrokersDetails openDetailsTab(DetailsTab menu) {
+ $(By.linkText(menu.toString())).shouldBe(Condition.enabled).click();
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ private List getVisibleColumnHeaders() {
+ return Stream.of("Name", "Topics", "Error", "Partitions")
+ .map(name -> $x(String.format(columnHeaderLocator, name)))
+ .collect(Collectors.toList());
+ }
+
+ private List getEnabledColumnHeaders() {
+ return Stream.of("Name", "Error")
+ .map(name -> $x(String.format(columnHeaderLocator, name)))
+ .collect(Collectors.toList());
+ }
+
+ private List getVisibleSummaryCells() {
+ return Stream.of("Segment Size", "Segment Count", "Port", "Host")
+ .map(name -> $x(String.format(summaryCellLocator, name)))
+ .collect(Collectors.toList());
+ }
+
+ private List getDetailsTabs() {
+ return Stream.of(DetailsTab.values())
+ .map(name -> $x(String.format(brokersTabLocator, name)))
+ .collect(Collectors.toList());
+ }
+
+ @Step
+ public List getAllEnabledElements() {
+ List enabledElements = new ArrayList<>(getEnabledColumnHeaders());
+ enabledElements.addAll(getDetailsTabs());
+ return enabledElements;
+ }
+
+ @Step
+ public List getAllVisibleElements() {
+ List visibleElements = new ArrayList<>(getVisibleSummaryCells());
+ visibleElements.addAll(getVisibleColumnHeaders());
+ visibleElements.addAll(getDetailsTabs());
+ return visibleElements;
+ }
+
+ public enum DetailsTab {
+ LOG_DIRECTORIES("Log directories"),
+ CONFIGS("Configs"),
+ METRICS("Metrics");
+
+ private final String value;
+
+ DetailsTab(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
}
- }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
index 239a635934..6db2d20ad6 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
@@ -1,123 +1,124 @@
package com.provectus.kafka.ui.pages.brokers;
-import static com.codeborne.selenide.Selenide.$x;
-
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static com.codeborne.selenide.Selenide.$x;
+
public class BrokersList extends BasePage {
- protected SelenideElement brokersListHeader = $x("//h1[text()='Brokers']");
+ protected SelenideElement brokersListHeader = $x("//h1[text()='Brokers']");
- @Step
- public BrokersList waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- brokersListHeader.shouldBe(Condition.visible);
- return this;
- }
-
- @Step
- public BrokersList openBroker(int brokerId) {
- getBrokerItem(brokerId).openItem();
- return this;
- }
-
- private List getUptimeSummaryCells() {
- return Stream.of("Broker Count", "Active Controller", "Version")
- .map(name -> $x(String.format(summaryCellLocator, name)))
- .collect(Collectors.toList());
- }
-
- private List getPartitionsSummaryCells() {
- return Stream.of("Online", "URP", "In Sync Replicas", "Out Of Sync Replicas")
- .map(name -> $x(String.format(summaryCellLocator, name)))
- .collect(Collectors.toList());
- }
-
- @Step
- public List getAllVisibleElements() {
- List visibleElements = new ArrayList<>(getUptimeSummaryCells());
- visibleElements.addAll(getPartitionsSummaryCells());
- return visibleElements;
- }
-
- private List getEnabledColumnHeaders() {
- return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
- .map(name -> $x(String.format(columnHeaderLocator, name)))
- .collect(Collectors.toList());
- }
-
- @Step
- public List getAllEnabledElements() {
- return getEnabledColumnHeaders();
- }
-
- private List initGridItems() {
- List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
- .forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
- return gridItemList;
- }
-
- @Step
- public BrokerGridItem getBrokerItem(int id){
- return initGridItems().stream()
- .filter(e ->e.getId() == id)
- .findFirst().orElse(null);
- }
-
- @Step
- public List getAllBrokers(){
- return initGridItems();
- }
-
- public static class BrokerGridItem extends BasePage {
-
- private final SelenideElement element;
-
- public BrokerGridItem(SelenideElement element) {
- this.element = element;
- }
-
- private SelenideElement getIdElm() {
- return element.$x("./td[1]/div/a");
+ @Step
+ public BrokersList waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ brokersListHeader.shouldBe(Condition.visible);
+ return this;
}
@Step
- public int getId() {
- return Integer.parseInt(getIdElm().getText().trim());
+ public BrokersList openBroker(int brokerId) {
+ getBrokerItem(brokerId).openItem();
+ return this;
+ }
+
+ private List getUptimeSummaryCells() {
+ return Stream.of("Broker Count", "Active Controller", "Version")
+ .map(name -> $x(String.format(summaryCellLocator, name)))
+ .collect(Collectors.toList());
+ }
+
+ private List getPartitionsSummaryCells() {
+ return Stream.of("Online", "URP", "In Sync Replicas", "Out Of Sync Replicas")
+ .map(name -> $x(String.format(summaryCellLocator, name)))
+ .collect(Collectors.toList());
}
@Step
- public void openItem() {
- getIdElm().click();
+ public List getAllVisibleElements() {
+ List visibleElements = new ArrayList<>(getUptimeSummaryCells());
+ visibleElements.addAll(getPartitionsSummaryCells());
+ return visibleElements;
+ }
+
+ private List getEnabledColumnHeaders() {
+ return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
+ .map(name -> $x(String.format(columnHeaderLocator, name)))
+ .collect(Collectors.toList());
}
@Step
- public int getSegmentSize(){
- return Integer.parseInt(element.$x("./td[2]").getText().trim());
+ public List getAllEnabledElements() {
+ return getEnabledColumnHeaders();
+ }
+
+ private List initGridItems() {
+ List gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
+ return gridItemList;
}
@Step
- public int getSegmentCount(){
- return Integer.parseInt(element.$x("./td[3]").getText().trim());
+ public BrokerGridItem getBrokerItem(int id) {
+ return initGridItems().stream()
+ .filter(e -> e.getId() == id)
+ .findFirst().orElseThrow();
}
@Step
- public int getPort(){
- return Integer.parseInt(element.$x("./td[4]").getText().trim());
+ public List getAllBrokers() {
+ return initGridItems();
}
- @Step
- public String getHost(){
- return element.$x("./td[5]").getText().trim();
+ public static class BrokerGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ public BrokerGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ private SelenideElement getIdElm() {
+ return element.$x("./td[1]/div/a");
+ }
+
+ @Step
+ public int getId() {
+ return Integer.parseInt(getIdElm().getText().trim());
+ }
+
+ @Step
+ public void openItem() {
+ getIdElm().click();
+ }
+
+ @Step
+ public int getSegmentSize() {
+ return Integer.parseInt(element.$x("./td[2]").getText().trim());
+ }
+
+ @Step
+ public int getSegmentCount() {
+ return Integer.parseInt(element.$x("./td[3]").getText().trim());
+ }
+
+ @Step
+ public int getPort() {
+ return Integer.parseInt(element.$x("./td[4]").getText().trim());
+ }
+
+ @Step
+ public String getHost() {
+ return element.$x("./td[5]").getText().trim();
+ }
}
- }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorCreateForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorCreateForm.java
similarity index 89%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorCreateForm.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorCreateForm.java
index 093341cd64..7bc2aa88aa 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorCreateForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorCreateForm.java
@@ -1,12 +1,12 @@
-package com.provectus.kafka.ui.pages.connector;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.connectors;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import static com.codeborne.selenide.Selenide.$x;
+
public class ConnectorCreateForm extends BasePage {
protected SelenideElement nameField = $x("//input[@name='name']");
@@ -31,8 +31,8 @@ public class ConnectorCreateForm extends BasePage {
@Step
public ConnectorCreateForm clickSubmitButton() {
- clickSubmitBtn();
- waitUntilSpinnerDisappear();
- return this;
+ clickSubmitBtn();
+ waitUntilSpinnerDisappear();
+ return this;
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorDetails.java
similarity index 62%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorDetails.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorDetails.java
index 291c4aa6f3..fbe1984ce3 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/ConnectorDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/ConnectorDetails.java
@@ -1,28 +1,28 @@
-package com.provectus.kafka.ui.pages.connector;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.connectors;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import static com.codeborne.selenide.Selenide.$x;
+
public class ConnectorDetails extends BasePage {
- protected SelenideElement deleteBtn = $x("//li/div[contains(text(),'Delete')]");
- protected SelenideElement confirmBtnMdl = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
- protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
- protected SelenideElement taskTab = $x("//a[contains(text(),'Tasks')]");
- protected SelenideElement configTab = $x("//a[contains(text(),'Config')]");
- protected SelenideElement configField = $x("//div[@id='config']");
- protected String connectorHeaderLocator = "//h1[contains(text(),'%s')]";
+ protected SelenideElement deleteBtn = $x("//li/div[contains(text(),'Delete')]");
+ protected SelenideElement confirmBtnMdl = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
+ protected SelenideElement contentTextArea = $x("//textarea[@class='ace_text-input']");
+ protected SelenideElement taskTab = $x("//a[contains(text(),'Tasks')]");
+ protected SelenideElement configTab = $x("//a[contains(text(),'Config')]");
+ protected SelenideElement configField = $x("//div[@id='config']");
+ protected String connectorHeaderLocator = "//h1[contains(text(),'%s')]";
- @Step
- public ConnectorDetails waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- dotMenuBtn.shouldBe(Condition.visible);
- return this;
- }
+ @Step
+ public ConnectorDetails waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ dotMenuBtn.shouldBe(Condition.visible);
+ return this;
+ }
@Step
public ConnectorDetails openConfigTab() {
@@ -41,8 +41,8 @@ public class ConnectorDetails extends BasePage {
@Step
public ConnectorDetails clickSubmitButton() {
- clickSubmitBtn();
- return this;
+ clickSubmitBtn();
+ return this;
}
@Step
@@ -74,11 +74,11 @@ public class ConnectorDetails extends BasePage {
@Step
public boolean isConnectorHeaderVisible(String connectorName) {
- return isVisible($x(String.format(connectorHeaderLocator,connectorName)));
+ return isVisible($x(String.format(connectorHeaderLocator, connectorName)));
}
@Step
- public boolean isAlertWithMessageVisible(AlertHeader header, String message){
- return isAlertVisible(header, message);
+ public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
+ return isAlertVisible(header, message);
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/KafkaConnectList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java
similarity index 77%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/KafkaConnectList.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java
index b86734b2dc..3be1826511 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connector/KafkaConnectList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java
@@ -1,26 +1,26 @@
-package com.provectus.kafka.ui.pages.connector;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.connectors;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import static com.codeborne.selenide.Selenide.$x;
+
public class KafkaConnectList extends BasePage {
protected SelenideElement createConnectorBtn = $x("//button[contains(text(),'Create Connector')]");
- public KafkaConnectList(){
+ public KafkaConnectList() {
tableElementNameLocator = "//tbody//td[contains(text(),'%s')]";
}
@Step
public KafkaConnectList waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- createConnectorBtn.shouldBe(Condition.visible);
- return this;
+ waitUntilSpinnerDisappear();
+ createConnectorBtn.shouldBe(Condition.visible);
+ return this;
}
@Step
@@ -31,7 +31,7 @@ public class KafkaConnectList extends BasePage {
@Step
public KafkaConnectList openConnector(String connectorName) {
- getTableElement(connectorName).shouldBe(Condition.enabled).click();
+ getTableElement(connectorName).shouldBe(Condition.enabled).click();
return this;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersDetails.java
similarity index 95%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersDetails.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersDetails.java
index dabdff88ec..240dc613c4 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersDetails.java
@@ -1,11 +1,11 @@
-package com.provectus.kafka.ui.pages.consumer;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.consumers;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import static com.codeborne.selenide.Selenide.$x;
+
public class ConsumersDetails extends BasePage {
protected String consumerIdHeaderLocator = "//h1[contains(text(),'%s')]";
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
similarity index 84%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersList.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
index 07824736be..b3a3be42a2 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumer/ConsumersList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
@@ -1,9 +1,10 @@
-package com.provectus.kafka.ui.pages.consumer;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.consumers;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
+
+import static com.codeborne.selenide.Selenide.$x;
+
public class ConsumersList extends BasePage {
protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']");
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
similarity index 95%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
index ec735df65a..84c23d0ac3 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.pages.ksqldb;
+package com.provectus.kafka.ui.pages.ksqlDb;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
@@ -7,7 +7,7 @@ import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
-import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
+import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs;
import io.qameta.allure.Step;
import java.util.ArrayList;
import java.util.Arrays;
@@ -50,7 +50,7 @@ public class KsqlDbList extends BasePage {
public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
return initTablesItems().stream()
.filter(e -> e.getTableName().equals(tableName))
- .findFirst().orElse(null);
+ .findFirst().orElseThrow();
}
public static class KsqlTablesGridItem extends BasePage {
@@ -98,7 +98,7 @@ public class KsqlDbList extends BasePage {
public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
return initStreamsItems().stream()
.filter(e -> e.getStreamName().equals(streamName))
- .findFirst().orElse(null);
+ .findFirst().orElseThrow();
}
public static class KsqlStreamsGridItem extends BasePage {
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
similarity index 97%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
index 21ceacdbb7..e34fbedb1c 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.pages.ksqldb;
+package com.provectus.kafka.ui.pages.ksqlDb;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$$x;
@@ -24,7 +24,7 @@ public class KsqlQueryForm extends BasePage {
protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
- protected ElementsCollection keyField = $$x("//input[@aria-label='value']");
+ protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
@Step
@@ -89,7 +89,7 @@ public class KsqlQueryForm extends BasePage {
public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
return initItems().stream()
.filter(e -> e.getName().equalsIgnoreCase(name))
- .findFirst().orElse(null);
+ .findFirst().orElseThrow();
}
public static class KsqlResponseGridItem extends BasePage {
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlMenuTabs.java
similarity index 64%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlMenuTabs.java
index f3bb55d42b..bb719dc0f6 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlMenuTabs.java
@@ -1,16 +1,17 @@
-package com.provectus.kafka.ui.pages.ksqldb.enums;
+package com.provectus.kafka.ui.pages.ksqlDb.enums;
public enum KsqlMenuTabs {
+
TABLES("Table"),
STREAMS("Streams");
private final String value;
KsqlMenuTabs(String value) {
- this.value = value;
+ this.value = value;
}
public String toString() {
- return value;
+ return value;
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlQueryConfig.java
similarity index 58%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlQueryConfig.java
index c918b07997..9f85837474 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/enums/KsqlQueryConfig.java
@@ -1,18 +1,19 @@
-package com.provectus.kafka.ui.pages.ksqldb.enums;
+package com.provectus.kafka.ui.pages.ksqlDb.enums;
public enum KsqlQueryConfig {
+
SHOW_TABLES("show tables;"),
SHOW_STREAMS("show streams;"),
SELECT_ALL_FROM("SELECT * FROM %s\n" +
- "EMIT CHANGES;");
+ "EMIT CHANGES;");
private final String query;
KsqlQueryConfig(String query) {
- this.query = query;
+ this.query = query;
}
- public String getQuery(){
- return query;
+ public String getQuery() {
+ return query;
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Stream.java
similarity index 50%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Stream.java
index f61b824386..4030a478c4 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Stream.java
@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.pages.ksqldb.models;
+package com.provectus.kafka.ui.pages.ksqlDb.models;
import lombok.Data;
import lombok.experimental.Accessors;
@@ -6,5 +6,6 @@ import lombok.experimental.Accessors;
@Data
@Accessors(chain = true)
public class Stream {
- private String name, topicName, valueFormat, partitions;
+
+ private String name, topicName, valueFormat, partitions;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Table.java
similarity index 55%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Table.java
index cbb0c1d2a7..1856fffd85 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/models/Table.java
@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.pages.ksqldb.models;
+package com.provectus.kafka.ui.pages.ksqlDb.models;
import lombok.Data;
import lombok.experimental.Accessors;
@@ -6,5 +6,6 @@ import lombok.experimental.Accessors;
@Data
@Accessors(chain = true)
public class Table {
- private String name, streamName;
+
+ private String name, streamName;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaCreateForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaCreateForm.java
similarity index 76%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaCreateForm.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaCreateForm.java
index d749b5feb8..b823b6b992 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaCreateForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaCreateForm.java
@@ -1,8 +1,4 @@
-package com.provectus.kafka.ui.pages.schema;
-
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.schemas;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
@@ -10,10 +6,13 @@ import com.provectus.kafka.ui.api.model.CompatibilityLevel;
import com.provectus.kafka.ui.api.model.SchemaType;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static com.codeborne.selenide.Selenide.*;
+
public class SchemaCreateForm extends BasePage {
protected SelenideElement schemaNameField = $x("//input[@name='subject']");
@@ -30,7 +29,7 @@ public class SchemaCreateForm extends BasePage {
protected String ddlElementLocator = "//li[@value='%s']";
@Step
- public SchemaCreateForm waitUntilScreenReady(){
+ public SchemaCreateForm waitUntilScreenReady() {
waitUntilSpinnerDisappear();
pageTitle.shouldBe(Condition.visible);
return this;
@@ -69,25 +68,25 @@ public class SchemaCreateForm extends BasePage {
}
@Step
- public SchemaCreateForm openSchemaVersionDdl(){
- schemaVersionDdl.shouldBe(Condition.enabled).click();
- return this;
+ public SchemaCreateForm openSchemaVersionDdl() {
+ schemaVersionDdl.shouldBe(Condition.enabled).click();
+ return this;
}
@Step
- public int getVersionsNumberFromList(){
- return elementsCompareVersionDdl.size();
+ public int getVersionsNumberFromList() {
+ return elementsCompareVersionDdl.size();
}
@Step
- public SchemaCreateForm selectVersionFromDropDown(int versionNumberDd){
- $x(String.format(ddlElementLocator,versionNumberDd)).shouldBe(Condition.visible).click();
- return this;
+ public SchemaCreateForm selectVersionFromDropDown(int versionNumberDd) {
+ $x(String.format(ddlElementLocator, versionNumberDd)).shouldBe(Condition.visible).click();
+ return this;
}
@Step
- public int getMarkedLinesNumber(){
- return visibleMarkers.size();
+ public int getMarkedLinesNumber() {
+ return visibleMarkers.size();
}
@Step
@@ -100,23 +99,22 @@ public class SchemaCreateForm extends BasePage {
@Step
public List getAllDetailsPageElements() {
- return Stream.of(compatibilityLevelList, newSchemaTextArea, latestSchemaTextArea, submitBtn, schemaTypeDdl)
- .collect(Collectors.toList());
+ return Stream.of(compatibilityLevelList, newSchemaTextArea, latestSchemaTextArea, submitBtn, schemaTypeDdl)
+ .collect(Collectors.toList());
}
@Step
- public boolean isSubmitBtnEnabled(){
- return isEnabled(submitBtn);
+ public boolean isSubmitBtnEnabled() {
+ return isEnabled(submitBtn);
}
@Step
- public boolean isSchemaDropDownEnabled(){
+ public boolean isSchemaDropDownEnabled() {
boolean enabled = true;
- try{
+ try {
String attribute = schemaTypeDdl.getAttribute("disabled");
enabled = false;
- }
- catch (Throwable ignored){
+ } catch (Throwable ignored) {
}
return enabled;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java
similarity index 82%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaDetails.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java
index ce21133709..fc7013d46f 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java
@@ -1,12 +1,12 @@
-package com.provectus.kafka.ui.pages.schema;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.schemas;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import static com.codeborne.selenide.Selenide.$x;
+
public class SchemaDetails extends BasePage {
protected SelenideElement actualVersionTextArea = $x("//div[@id='schema']");
@@ -33,12 +33,12 @@ public class SchemaDetails extends BasePage {
@Step
public boolean isSchemaHeaderVisible(String schemaName) {
- return isVisible($x(String.format(schemaHeaderLocator,schemaName)));
+ return isVisible($x(String.format(schemaHeaderLocator, schemaName)));
}
@Step
- public int getLatestVersion(){
- return Integer.parseInt(latestVersionField.getText());
+ public int getLatestVersion() {
+ return Integer.parseInt(latestVersionField.getText());
}
@Step
@@ -47,15 +47,15 @@ public class SchemaDetails extends BasePage {
}
@Step
- public SchemaDetails openEditSchema(){
+ public SchemaDetails openEditSchema() {
editSchemaBtn.shouldBe(Condition.visible).click();
return this;
}
@Step
- public SchemaDetails openCompareVersionMenu(){
- compareVersionBtn.shouldBe(Condition.enabled).click();
- return this;
+ public SchemaDetails openCompareVersionMenu() {
+ compareVersionBtn.shouldBe(Condition.enabled).click();
+ return this;
}
@Step
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaRegistryList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java
similarity index 90%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaRegistryList.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java
index e6476ab1f6..8f65947734 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schema/SchemaRegistryList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java
@@ -1,18 +1,18 @@
-package com.provectus.kafka.ui.pages.schema;
-
-import static com.codeborne.selenide.Selenide.$x;
+package com.provectus.kafka.ui.pages.schemas;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import static com.codeborne.selenide.Selenide.$x;
+
public class SchemaRegistryList extends BasePage {
protected SelenideElement createSchemaBtn = $x("//button[contains(text(),'Create Schema')]");
@Step
- public SchemaRegistryList waitUntilScreenReady(){
+ public SchemaRegistryList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
createSchemaBtn.shouldBe(Condition.visible);
return this;
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java
deleted file mode 100644
index 1ad65bdb36..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java
+++ /dev/null
@@ -1,289 +0,0 @@
-package com.provectus.kafka.ui.pages.topic;
-
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$$;
-import static com.codeborne.selenide.Selenide.$x;
-import static org.assertj.core.api.Assertions.assertThat;
-
-import com.codeborne.selenide.ClickOptions;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.ElementsCollection;
-import com.codeborne.selenide.SelenideElement;
-import com.provectus.kafka.ui.pages.BasePage;
-import com.provectus.kafka.ui.pages.topic.enums.CleanupPolicyValue;
-import com.provectus.kafka.ui.pages.topic.enums.CustomParameterType;
-import com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk;
-import com.provectus.kafka.ui.pages.topic.enums.TimeToRetain;
-import io.qameta.allure.Step;
-
-public class TopicCreateEditForm extends BasePage {
-
- protected SelenideElement timeToRetainField = $x("//input[@id='timeToRetain']");
- protected SelenideElement partitionsField = $x("//input[@name='partitions']");
- protected SelenideElement nameField = $x("//input[@name='name']");
- protected SelenideElement maxMessageBytesField = $x("//input[@name='maxMessageBytes']");
- protected SelenideElement minInSyncReplicasField = $x("//input[@name='minInSyncReplicas']");
- protected SelenideElement cleanUpPolicyDdl = $x("//ul[@id='topicFormCleanupPolicy']");
- protected SelenideElement maxSizeOnDiscDdl = $x("//ul[@id='topicFormRetentionBytes']");
- protected SelenideElement customParameterDdl = $x("//ul[contains(@name,'customParams')]");
- protected SelenideElement deleteCustomParameterBtn = $x("//span[contains(@title,'Delete customParam')]");
- protected SelenideElement addCustomParameterTypeBtn = $x("//button[contains(text(),'Add Custom Parameter')]");
- protected SelenideElement customParameterValueField = $x("//input[@placeholder='Value']");
- protected SelenideElement validationCustomParameterValueMsg = $x("//p[contains(text(),'Value is required')]");
- protected String ddlElementLocator = "//li[@value='%s']";
- protected String btnTimeToRetainLocator = "//button[@class][text()='%s']";
-
-
- @Step
- public TopicCreateEditForm waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- nameField.shouldBe(Condition.visible);
- return this;
- }
-
- public boolean isCreateTopicButtonEnabled() {
- return isEnabled(submitBtn);
- }
-
- public boolean isDeleteCustomParameterButtonEnabled() {
- return isEnabled(deleteCustomParameterBtn);
- }
-
- public boolean isNameFieldEnabled(){
- return isEnabled(nameField);
- }
-
- @Step
- public TopicCreateEditForm setTopicName(String topicName) {
- nameField.shouldBe(Condition.enabled).clear();
- if (topicName != null) {
- nameField.sendKeys(topicName);
- }
- return this;
- }
-
- @Step
- public TopicCreateEditForm setMinInsyncReplicas(Integer minInsyncReplicas) {
- minInSyncReplicasField.setValue(minInsyncReplicas.toString());
- return this;
- }
-
- @Step
- public TopicCreateEditForm setTimeToRetainDataInMs(Long ms) {
- timeToRetainField.setValue(ms.toString());
- return this;
- }
-
- @Step
- public TopicCreateEditForm setTimeToRetainDataInMs(String ms) {
- timeToRetainField.setValue(ms);
- return this;
- }
-
- @Step
- public TopicCreateEditForm setMaxSizeOnDiskInGB(MaxSizeOnDisk MaxSizeOnDisk) {
- maxSizeOnDiscDdl.shouldBe(Condition.visible).click();
- $x(String.format(ddlElementLocator, MaxSizeOnDisk.getOptionValue())).shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public TopicCreateEditForm clickAddCustomParameterTypeButton() {
- addCustomParameterTypeBtn.click();
- return this;
- }
-
- @Step
- public TopicCreateEditForm setCustomParameterType(CustomParameterType customParameterType) {
- customParameterDdl.shouldBe(Condition.visible).click();
- $x(String.format(ddlElementLocator, customParameterType.getOptionValue())).shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public TopicCreateEditForm clearCustomParameterValue() {
- clearByKeyboard(customParameterValueField);
- return this;
- }
-
- @Step
- public TopicCreateEditForm setMaxMessageBytes(Long bytes) {
- maxMessageBytesField.setValue(bytes.toString());
- return this;
- }
-
- @Step
- public TopicCreateEditForm setMaxMessageBytes(String bytes) {
- return setMaxMessageBytes(Long.parseLong(bytes));
- }
-
- @Step
- public TopicCreateEditForm setNumberOfPartitions(int partitions) {
- partitionsField.shouldBe(Condition.enabled).clear();
- partitionsField.sendKeys(String.valueOf(partitions));
- return this;
- }
-
- @Step
- public TopicCreateEditForm setTimeToRetainDataByButtons(TimeToRetain timeToRetain) {
- $x(String.format(btnTimeToRetainLocator, timeToRetain.getButton())).shouldBe(Condition.enabled).click();
- return this;
- }
-
- @Step
- public TopicCreateEditForm selectCleanupPolicy(CleanupPolicyValue cleanupPolicyOptionValue) {
- cleanUpPolicyDdl.shouldBe(Condition.visible).click();
- $x(String.format(ddlElementLocator, cleanupPolicyOptionValue.getOptionValue())).shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public TopicCreateEditForm selectRetentionBytes(String visibleValue) {
- return selectFromDropDownByVisibleText("retentionBytes", visibleValue);
- }
-
- @Step
- public TopicCreateEditForm selectRetentionBytes(Long optionValue) {
- return selectFromDropDownByOptionValue("retentionBytes", optionValue.toString());
- }
-
- @Step
- public TopicCreateEditForm clickCreateTopicBtn() {
- clickSubmitBtn();
- return this;
- }
-
- @Step
- public TopicCreateEditForm addCustomParameter(String customParameterName,
- String customParameterValue) {
- ElementsCollection customParametersElements =
- $$("ul[role=listbox][name^=customParams][name$=name]");
- KafkaUISelectElement kafkaUISelectElement = null;
- if (customParametersElements.size() == 1) {
- if ("Select".equals(customParametersElements.first().getText())) {
- kafkaUISelectElement = new KafkaUISelectElement(customParametersElements.first());
- }
- } else {
- $$("button")
- .find(Condition.exactText("Add Custom Parameter"))
- .click();
- customParametersElements = $$("ul[role=listbox][name^=customParams][name$=name]");
- kafkaUISelectElement = new KafkaUISelectElement(customParametersElements.last());
- }
- if (kafkaUISelectElement != null) {
- kafkaUISelectElement.selectByVisibleText(customParameterName);
- }
- $(String.format("input[name=\"customParams.%d.value\"]", customParametersElements.size() - 1))
- .setValue(customParameterValue);
- return this;
- }
-
- @Step
- public TopicCreateEditForm updateCustomParameter(String customParameterName,
- String customParameterValue) {
- SelenideElement selenideElement = $$("ul[role=listbox][name^=customParams][name$=name]")
- .find(Condition.exactText(customParameterName));
- String name = selenideElement.getAttribute("name");
- if (name != null) {
- name = name.substring(0, name.lastIndexOf("."));
- }
- $(String.format("input[name^=%s]", name)).setValue(customParameterValue);
- return this;
- }
-
- @Step
- public TopicCreateEditForm cleanupPolicyIs(String value) {
- String cleanupPolicy = new KafkaUISelectElement("cleanupPolicy")
- .getCurrentValue();
- assertThat(cleanupPolicy)
- .as("Clear policy value should be " + value)
- .isEqualToIgnoringCase(value);
- return this;
- }
-
- @Step
- public TopicCreateEditForm timeToRetainIs(String time) {
- String value = timeToRetainField.getValue();
- assertThat(value)
- .as("Time to retain data (in ms) should be " + time)
- .isEqualTo(time);
- return this;
- }
-
- @Step
- public String getCleanupPolicy() {
- return new KafkaUISelectElement("cleanupPolicy").getCurrentValue();
- }
-
- @Step
- public String getTimeToRetain() {
- return timeToRetainField.getValue();
- }
-
- @Step
- public String getMaxSizeOnDisk() {
- return new KafkaUISelectElement("retentionBytes").getCurrentValue();
- }
-
- @Step
- public String getMaxMessageBytes() {
- return maxMessageBytesField.getValue();
- }
-
- @Step
- public boolean isValidationMessageCustomParameterValueVisible() {
- return isVisible(validationCustomParameterValueMsg);
- }
-
- @Step
- public String getCustomParameterValue() {
- return customParameterValueField.getValue();
- }
-
- private static class KafkaUISelectElement {
-
- private final SelenideElement selectElement;
-
- public KafkaUISelectElement(String selectElementName) {
- this.selectElement = $("ul[role=listbox][name=" + selectElementName + "]");
- }
-
- public KafkaUISelectElement(SelenideElement selectElement) {
- this.selectElement = selectElement;
- }
-
- public void selectByOptionValue(String optionValue) {
- selectElement.click();
- selectElement
- .$$x(".//ul/li[@role='option']")
- .find(Condition.attribute("value", optionValue))
- .click(ClickOptions.usingJavaScript());
- }
-
- public void selectByVisibleText(String visibleText) {
- selectElement.click();
- selectElement
- .$$("ul>li[role=option]")
- .find(Condition.exactText(visibleText))
- .click();
- }
-
- public String getCurrentValue() {
- return selectElement.$("li").getText();
- }
- }
-
- private TopicCreateEditForm selectFromDropDownByOptionValue(String dropDownElementName,
- String optionValue) {
- KafkaUISelectElement select = new KafkaUISelectElement(dropDownElementName);
- select.selectByOptionValue(optionValue);
- return this;
- }
-
- private TopicCreateEditForm selectFromDropDownByVisibleText(String dropDownElementName,
- String visibleText) {
- KafkaUISelectElement select = new KafkaUISelectElement(dropDownElementName);
- select.selectByVisibleText(visibleText);
- return this;
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
deleted file mode 100644
index 119f602a40..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
+++ /dev/null
@@ -1,452 +0,0 @@
-package com.provectus.kafka.ui.pages.topic;
-
-import static com.codeborne.selenide.Selenide.$;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-import static com.codeborne.selenide.Selenide.sleep;
-import static org.apache.commons.lang.math.RandomUtils.nextInt;
-
-import com.codeborne.selenide.CollectionCondition;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.ElementsCollection;
-import com.codeborne.selenide.SelenideElement;
-import com.provectus.kafka.ui.pages.BasePage;
-import io.qameta.allure.Step;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-import java.time.LocalTime;
-import java.time.YearMonth;
-import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Locale;
-import java.util.Objects;
-import org.openqa.selenium.By;
-
-public class TopicDetails extends BasePage {
-
- protected SelenideElement clearMessagesBtn = $x(("//div[contains(text(), 'Clear messages')]"));
- protected SelenideElement recreateTopicBtn = $x("//div[text()='Recreate Topic']");
- protected SelenideElement messageAmountCell = $x("//tbody/tr/td[5]");
- protected SelenideElement overviewTab = $x("//a[contains(text(),'Overview')]");
- protected SelenideElement messagesTab = $x("//a[contains(text(),'Messages')]");
- protected SelenideElement seekTypeDdl = $x("//ul[@id='selectSeekType']/li");
- protected SelenideElement seekTypeField = $x("//label[text()='Seek Type']//..//div/input");
- protected SelenideElement addFiltersBtn = $x("//button[text()='Add Filters']");
- protected SelenideElement savedFiltersLink = $x("//div[text()='Saved Filters']");
- protected SelenideElement addFilterCodeModalTitle = $x("//label[text()='Filter code']");
- protected SelenideElement addFilterCodeInput = $x("//div[@id='ace-editor']//textarea");
- protected SelenideElement saveThisFilterCheckBoxAddFilterMdl = $x("//input[@name='saveFilter']");
- protected SelenideElement displayNameInputAddFilterMdl = $x("//input[@placeholder='Enter Name']");
- protected SelenideElement cancelBtnAddFilterMdl = $x("//button[text()='Cancel']");
- protected SelenideElement addFilterBtnAddFilterMdl = $x("//button[text()='Add filter']");
- protected SelenideElement addFiltersBtnMessages = $x("//button[text()='Add Filters']");
- protected SelenideElement selectFilterBtnAddFilterMdl = $x("//button[text()='Select filter']");
- protected SelenideElement editSettingsMenu = $x("//li[@role][contains(text(),'Edit settings')]");
- protected SelenideElement removeTopicBtn = $x("//ul[@role='menu']//div[contains(text(),'Remove Topic')]");
- protected SelenideElement produceMessageBtn = $x("//div//button[text()='Produce Message']");
- protected SelenideElement contentMessageTab = $x("//html//div[@id='root']/div/main//table//p");
- protected SelenideElement cleanUpPolicyField = $x("//div[contains(text(),'Clean Up Policy')]/../span/*");
- protected SelenideElement partitionsField = $x("//div[contains(text(),'Partitions')]/../span");
- protected SelenideElement backToCreateFiltersLink = $x("//div[text()='Back To create filters']");
- protected ElementsCollection messageGridItems = $$x("//tbody//tr");
- protected SelenideElement actualCalendarDate = $x("//div[@class='react-datepicker__current-month']");
- protected SelenideElement previousMonthButton = $x("//button[@aria-label='Previous Month']");
- protected SelenideElement nextMonthButton = $x("//button[@aria-label='Next Month']");
- protected SelenideElement calendarTimeFld = $x("//input[@placeholder='Time']");
- protected String dayCellLtr = "//div[@role='option'][contains(text(),'%d')]";
- protected String seekFilterDdlLocator = "//ul[@id='selectSeekType']/ul/li[text()='%s']";
- protected String savedFilterNameLocator = "//div[@role='savedFilter']/div[contains(text(),'%s')]";
- protected String consumerIdLocator = "//a[@title='%s']";
- protected String topicHeaderLocator = "//h1[contains(text(),'%s')]";
- protected String activeFilterNameLocator = "//div[@data-testid='activeSmartFilter'][contains(text(),'%s')]";
- protected String settingsGridValueLocator = "//tbody/tr/td/span[text()='%s']//ancestor::tr/td[2]/span";
-
- @Step
- public TopicDetails waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- overviewTab.shouldBe(Condition.visible);
- return this;
- }
-
- @Step
- public TopicDetails openDetailsTab(TopicMenu menu) {
- $(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
- waitUntilSpinnerDisappear();
- return this;
- }
-
- @Step
- public String getSettingsGridValueByKey(String key){
- return $x(String.format(settingsGridValueLocator, key)).scrollTo().shouldBe(Condition.visible).getText();
- }
-
- @Step
- public TopicDetails openDotMenu() {
- clickByJavaScript(dotMenuBtn);
- return this;
- }
-
- @Step
- public boolean isAlertWithMessageVisible(AlertHeader header, String message){
- return isAlertVisible(header, message);
- }
-
- @Step
- public TopicDetails clickEditSettingsMenu() {
- editSettingsMenu.shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public boolean isConfirmationMdlVisible(){
- return isConfirmationModalVisible();
- }
-
- @Step
- public TopicDetails clickClearMessagesMenu() {
- clearMessagesBtn.shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public TopicDetails clickRecreateTopicMenu(){
- recreateTopicBtn.shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public String getCleanUpPolicy() {
- return cleanUpPolicyField.getText();
- }
-
- @Step
- public int getPartitions() {
- return Integer.parseInt(partitionsField.getText().trim());
- }
-
- @Step
- public boolean isTopicHeaderVisible(String topicName) {
- return isVisible($x(String.format(topicHeaderLocator, topicName)));
- }
-
- @Step
- public TopicDetails clickDeleteTopicMenu() {
- removeTopicBtn.shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public TopicDetails clickConfirmBtnMdl() {
- clickConfirmButton();
- return this;
- }
-
- @Step
- public TopicDetails clickProduceMessageBtn() {
- clickByJavaScript(produceMessageBtn);
- return this;
- }
-
- @Step
- public TopicDetails selectSeekTypeDdlMessagesTab(String seekTypeName){
- seekTypeDdl.shouldBe(Condition.enabled).click();
- $x(String.format(seekFilterDdlLocator, seekTypeName)).shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public TopicDetails setSeekTypeValueFldMessagesTab(String seekTypeValue){
- seekTypeField.shouldBe(Condition.enabled).sendKeys(seekTypeValue);
- return this;
- }
-
- @Step
- public TopicDetails clickSubmitFiltersBtnMessagesTab(){
- clickByJavaScript(submitBtn);
- waitUntilSpinnerDisappear();
- return this;
- }
-
- @Step
- public TopicDetails clickMessagesAddFiltersBtn() {
- addFiltersBtn.shouldBe(Condition.enabled).click();
- return this;
- }
-
- @Step
- public TopicDetails openSavedFiltersListMdl(){
- savedFiltersLink.shouldBe(Condition.enabled).click();
- backToCreateFiltersLink.shouldBe(Condition.visible);
- return this;
- }
-
- @Step
- public boolean isFilterVisibleAtSavedFiltersMdl(String filterName){
- return isVisible($x(String.format(savedFilterNameLocator,filterName)));
- }
-
- @Step
- public TopicDetails selectFilterAtSavedFiltersMdl(String filterName){
- $x(String.format(savedFilterNameLocator, filterName)).shouldBe(Condition.enabled).click();
- return this;
- }
-
- @Step
- public TopicDetails clickSelectFilterBtnAtSavedFiltersMdl(){
- selectFilterBtnAddFilterMdl.shouldBe(Condition.enabled).click();
- addFilterCodeModalTitle.shouldBe(Condition.disappear);
- return this;
- }
-
- @Step
- public TopicDetails waitUntilAddFiltersMdlVisible() {
- addFilterCodeModalTitle.shouldBe(Condition.visible);
- return this;
- }
-
- @Step
- public TopicDetails setFilterCodeFieldAddFilterMdl(String filterCode) {
- addFilterCodeInput.shouldBe(Condition.enabled).sendKeys(filterCode);
- return this;
- }
-
- @Step
- public TopicDetails selectSaveThisFilterCheckboxMdl(boolean select){
- selectElement(saveThisFilterCheckBoxAddFilterMdl, select);
- return this;
- }
-
- @Step
- public boolean isSaveThisFilterCheckBoxSelected() {
- return isSelected(saveThisFilterCheckBoxAddFilterMdl);
- }
-
- @Step
- public TopicDetails setDisplayNameFldAddFilterMdl(String displayName) {
- displayNameInputAddFilterMdl.shouldBe(Condition.enabled).sendKeys(displayName);
- return this;
- }
-
- @Step
- public TopicDetails clickAddFilterBtnAndCloseMdl(boolean closeModal) {
- addFilterBtnAddFilterMdl.shouldBe(Condition.enabled).click();
- if(closeModal){
- addFilterCodeModalTitle.shouldBe(Condition.hidden);}
- else{
- addFilterCodeModalTitle.shouldBe(Condition.visible);
- }
- return this;
- }
-
- @Step
- public boolean isAddFilterBtnAddFilterMdlEnabled() {
- return isEnabled(addFilterBtnAddFilterMdl);
- }
-
- @Step
- public boolean isActiveFilterVisible(String activeFilterName) {
- return isVisible($x(String.format(activeFilterNameLocator, activeFilterName)));
- }
-
- public List getAllAddFilterModalVisibleElements() {
- return Arrays.asList(savedFiltersLink, displayNameInputAddFilterMdl, addFilterBtnAddFilterMdl, cancelBtnAddFilterMdl);
- }
-
- public List getAllAddFilterModalEnabledElements() {
- return Arrays.asList(displayNameInputAddFilterMdl, cancelBtnAddFilterMdl);
- }
-
- public List getAllAddFilterModalDisabledElements() {
- return Arrays.asList(addFilterBtnAddFilterMdl);
- }
-
- @Step
- public TopicDetails openConsumerGroup(String consumerId) {
- $x(String.format(consumerIdLocator, consumerId)).click();
- return this;
- }
-
- @Step
- public boolean isKeyMessageVisible(String keyMessage) {
- return keyMessage.equals($("td[title]").getText());
- }
-
- @Step
- public boolean isContentMessageVisible(String contentMessage) {
- return contentMessage.matches(contentMessageTab.getText().trim());
- }
-
- private void selectYear(int expectedYear) {
- while (getActualCalendarDate().getYear() > expectedYear) {
- clickByJavaScript(previousMonthButton);
- sleep(1000);
- if (LocalTime.now().plusMinutes(3).isBefore(LocalTime.now())) {
- throw new IllegalArgumentException("Unable to select year");
- }
- }
- }
-
- private void selectMonth(int expectedMonth) {
- while (getActualCalendarDate().getMonthValue() > expectedMonth) {
- clickByJavaScript(previousMonthButton);
- sleep(1000);
- if (LocalTime.now().plusMinutes(3).isBefore(LocalTime.now())) {
- throw new IllegalArgumentException("Unable to select month");
- }
- }
- }
-
- private void selectDay(int expectedDay) {
- Objects.requireNonNull($$x(String.format(dayCellLtr, expectedDay)).stream()
- .filter(day -> !Objects.requireNonNull(day.getAttribute("class")).contains("outside-month"))
- .findFirst().orElse(null)).shouldBe(Condition.enabled).click();
- }
-
- private void setTime(LocalDateTime dateTime) {
- calendarTimeFld.shouldBe(Condition.enabled)
- .sendKeys(String.valueOf(dateTime.getHour()), String.valueOf(dateTime.getMinute()));
- }
-
- @Step
- public TopicDetails selectDateAndTimeByCalendar(LocalDateTime dateTime) {
- setTime(dateTime);
- selectYear(dateTime.getYear());
- selectMonth(dateTime.getMonthValue());
- selectDay(dateTime.getDayOfMonth());
- return this;
- }
-
- private LocalDate getActualCalendarDate() {
- String monthAndYearStr = actualCalendarDate.getText().trim();
- DateTimeFormatter formatter = new DateTimeFormatterBuilder()
- .parseCaseInsensitive()
- .append(DateTimeFormatter.ofPattern("MMMM yyyy"))
- .toFormatter(Locale.ENGLISH);
- YearMonth yearMonth = formatter.parse(monthAndYearStr, YearMonth::from);
- return yearMonth.atDay(1);
- }
-
- @Step
- public TopicDetails openCalendarSeekType(){
- seekTypeField.shouldBe(Condition.enabled).click();
- actualCalendarDate.shouldBe(Condition.visible);
- return this;
- }
-
- @Step
- public int getMessageCountAmount() {
- return Integer.parseInt(messageAmountCell.getText().trim());
- }
-
- private List initItems() {
- List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
- .forEach(item -> gridItemList.add(new TopicDetails.MessageGridItem(item)));
- return gridItemList;
- }
-
- @Step
- public TopicDetails.MessageGridItem getMessageByOffset(int offset) {
- return initItems().stream()
- .filter(e -> e.getOffset() == offset)
- .findFirst().orElse(null);
- }
-
- @Step
- public List getAllMessages(){
- return initItems();
- }
-
- @Step
- public TopicDetails.MessageGridItem getRandomMessage() {
- return getMessageByOffset(nextInt(initItems().size() - 1));
- }
-
- public enum TopicMenu {
- OVERVIEW("Overview"),
- MESSAGES("Messages"),
- CONSUMERS("Consumers"),
- SETTINGS("Settings");
-
- private final String value;
-
- TopicMenu(String value) {
- this.value = value;
- }
-
- public String toString() {
- return value;
- }
- }
-
- public static class MessageGridItem extends BasePage {
-
- private final SelenideElement element;
-
- private MessageGridItem(SelenideElement element) {
- this.element = element;
- }
-
- @Step
- public MessageGridItem clickExpand() {
- clickByJavaScript(element.$x("./td[1]/span"));
- return this;
- }
-
- private SelenideElement getOffsetElm() {
- return element.$x("./td[2]");
- }
-
- @Step
- public int getOffset() {
- return Integer.parseInt(getOffsetElm().getText().trim());
- }
-
- @Step
- public int getPartition() {
- return Integer.parseInt(element.$x("./td[3]").getText().trim());
- }
-
- @Step
- public LocalDateTime getTimestamp() {
- String timestampValue = element.$x("./td[4]/div").getText().trim();
- DateTimeFormatter formatter = DateTimeFormatter.ofPattern("M/d/yyyy, HH:mm:ss");
- return LocalDateTime.parse(timestampValue, formatter);
- }
-
- @Step
- public String getKey() {
- return element.$x("./td[5]").getText().trim();
- }
-
- @Step
- public String getValue() {
- return element.$x("./td[6]/span/p").getText().trim();
- }
-
- @Step
- public MessageGridItem openDotMenu() {
- getOffsetElm().hover();
- element.$x("./td[7]/div/button[@aria-label='Dropdown Toggle']")
- .shouldBe(Condition.visible).click();
- return this;
- }
-
- @Step
- public MessageGridItem clickCopyToClipBoard() {
- clickByJavaScript(element.$x("./td[7]//li[text() = 'Copy to clipboard']")
- .shouldBe(Condition.visible));
- return this;
- }
-
- @Step
- public MessageGridItem clickSaveAsFile() {
- clickByJavaScript(element.$x("./td[7]//li[text() = 'Save as a file']")
- .shouldBe(Condition.visible));
- return this;
- }
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicSettingsTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicSettingsTab.java
deleted file mode 100644
index 4bf78fb2af..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicSettingsTab.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package com.provectus.kafka.ui.pages.topic;
-
-import static com.codeborne.selenide.Selenide.$x;
-
-import com.codeborne.selenide.CollectionCondition;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.SelenideElement;
-import com.provectus.kafka.ui.pages.BasePage;
-import io.qameta.allure.Step;
-import java.util.ArrayList;
-import java.util.List;
-
-public class TopicSettingsTab extends BasePage {
-
- protected SelenideElement defaultValueColumnHeaderLocator = $x("//div[text() = 'Default Value']");
-
- @Step
- public TopicSettingsTab waitUntilScreenReady(){
- waitUntilSpinnerDisappear();
- defaultValueColumnHeaderLocator.shouldBe(Condition.visible);
- return this;
- }
-
- private List initGridItems() {
- List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
- .forEach(item -> gridItemList.add(new SettingsGridItem(item)));
- return gridItemList;
- }
-
- private TopicSettingsTab.SettingsGridItem getItemByKey(String key){
- return initGridItems().stream()
- .filter(e ->e.getKey().equals(key))
- .findFirst().orElse(null);
- }
-
- @Step
- public String getValueByKey(String key){
- return getItemByKey(key).getValue();
- }
-
- public static class SettingsGridItem extends BasePage {
-
- private final SelenideElement element;
-
- public SettingsGridItem(SelenideElement element) {
- this.element = element;
- }
-
- @Step
- public String getKey(){
- return element.$x("./td[1]/span").getText().trim();
- }
-
- @Step
- public String getValue(){
- return element.$x("./td[2]/span").getText().trim();
- }
-
- @Step
- public String getDefaultValue() {
- return element.$x("./td[3]/span").getText().trim();
- }
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
deleted file mode 100644
index 6280ffe8a4..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
+++ /dev/null
@@ -1,260 +0,0 @@
-package com.provectus.kafka.ui.pages.topic;
-
-import static com.codeborne.selenide.Condition.visible;
-import static com.codeborne.selenide.Selenide.$x;
-
-import com.codeborne.selenide.CollectionCondition;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.SelenideElement;
-import com.provectus.kafka.ui.pages.BasePage;
-import io.qameta.allure.Step;
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-public class TopicsList extends BasePage {
-
- protected SelenideElement topicListHeader = $x("//h1[text()='Topics']");
- protected SelenideElement addTopicBtn = $x("//button[normalize-space(text()) ='Add a Topic']");
- protected SelenideElement searchField = $x("//input[@placeholder='Search by Topic Name']");
- protected SelenideElement showInternalRadioBtn = $x("//input[@name='ShowInternalTopics']");
- protected SelenideElement deleteSelectedTopicsBtn = $x("//button[text()='Delete selected topics']");
- protected SelenideElement copySelectedTopicBtn = $x("//button[text()='Copy selected topic']");
- protected SelenideElement purgeMessagesOfSelectedTopicsBtn = $x("//button[text()='Purge messages of selected topics']");
- protected SelenideElement clearMessagesBtn = $x("//ul[contains(@class ,'open')]//div[text()='Clear Messages']");
- protected SelenideElement recreateTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Recreate Topic']");
- protected SelenideElement removeTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Remove Topic']");
-
- @Step
- public TopicsList waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- topicListHeader.shouldBe(visible);
- return this;
- }
-
- @Step
- public TopicsList clickAddTopicBtn() {
- clickByJavaScript(addTopicBtn);
- return this;
- }
-
- @Step
- public boolean isTopicVisible(String topicName) {
- tableGrid.shouldBe(visible);
- return isVisible(getTableElement(topicName));
- }
-
- @Step
- public boolean isShowInternalRadioBtnSelected() {
- return isSelected(showInternalRadioBtn);
- }
-
- @Step
- public TopicsList setShowInternalRadioButton(boolean select) {
- selectElement(showInternalRadioBtn, select);
- return this;
- }
-
- @Step
- public TopicsList openTopic(String topicName) {
- getTopicItem(topicName).openItem();
- return this;
- }
-
- @Step
- public TopicsList openDotMenuByTopicName(String topicName){
- getTopicItem(topicName).openDotMenu();
- return this;
- }
-
- @Step
- public boolean isCopySelectedTopicBtnEnabled(){
- return isEnabled(copySelectedTopicBtn);
- }
-
- @Step
- public List getActionButtons() {
- return Stream.of(deleteSelectedTopicsBtn, copySelectedTopicBtn, purgeMessagesOfSelectedTopicsBtn)
- .collect(Collectors.toList());
- }
-
- @Step
- public TopicsList clickCopySelectedTopicBtn(){
- copySelectedTopicBtn.shouldBe(Condition.enabled).click();
- return this;
- }
-
- @Step
- public TopicsList clickPurgeMessagesOfSelectedTopicsBtn(){
- purgeMessagesOfSelectedTopicsBtn.shouldBe(Condition.enabled).click();
- return this;
- }
-
- @Step
- public TopicsList clickClearMessagesBtn(){
- clickByJavaScript(clearMessagesBtn.shouldBe(visible));
- return this;
- }
-
- @Step
- public TopicsList clickRecreateTopicBtn(){
- clickByJavaScript(recreateTopicBtn.shouldBe(visible));
- return this;
- }
-
- @Step
- public TopicsList clickRemoveTopicBtn(){
- clickByJavaScript(removeTopicBtn.shouldBe(visible));
- return this;
- }
-
- @Step
- public TopicsList clickConfirmBtnMdl() {
- clickConfirmButton();
- return this;
- }
-
- @Step
- public TopicsList clickCancelBtnMdl(){
- clickCancelButton();
- return this;
- }
-
- @Step
- public boolean isConfirmationMdlVisible(){
- return isConfirmationModalVisible();
- }
-
- @Step
- public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
- return isAlertVisible(header, message);
- }
-
- private List getVisibleColumnHeaders() {
- return Stream.of("Replication Factor","Number of messages","Topic Name", "Partitions", "Out of sync replicas", "Size")
- .map(name -> $x(String.format(columnHeaderLocator, name)))
- .collect(Collectors.toList());
- }
-
- private List getEnabledColumnHeaders(){
- return Stream.of("Topic Name", "Partitions", "Out of sync replicas", "Size")
- .map(name -> $x(String.format(columnHeaderLocator, name)))
- .collect(Collectors.toList());
- }
-
- @Step
- public List getAllVisibleElements() {
- List visibleElements = new ArrayList<>(getVisibleColumnHeaders());
- visibleElements.addAll(Arrays.asList(searchField, addTopicBtn, tableGrid));
- visibleElements.addAll(getActionButtons());
- return visibleElements;
- }
-
- @Step
- public List getAllEnabledElements() {
- List enabledElements = new ArrayList<>(getEnabledColumnHeaders());
- enabledElements.addAll(Arrays.asList(searchField, showInternalRadioBtn,addTopicBtn));
- return enabledElements;
- }
-
- private List initGridItems() {
- List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
- .forEach(item -> gridItemList.add(new TopicGridItem(item)));
- return gridItemList;
- }
-
- @Step
- public TopicGridItem getTopicItem(String name) {
- return initGridItems().stream()
- .filter(e -> e.getName().equals(name))
- .findFirst().orElse(null);
- }
-
- @Step
- public List getNonInternalTopics() {
- return initGridItems().stream()
- .filter(e -> !e.isInternal())
- .collect(Collectors.toList());
- }
-
- @Step
- public List getInternalTopics() {
- return initGridItems().stream()
- .filter(TopicGridItem::isInternal)
- .collect(Collectors.toList());
- }
-
- public static class TopicGridItem extends BasePage {
-
- private final SelenideElement element;
-
- public TopicGridItem(SelenideElement element) {
- this.element = element;
- }
-
- @Step
- public TopicsList selectItem(boolean select) {
- selectElement(element.$x("./td[1]/input"), select);
- return new TopicsList();
- }
-
- @Step
- public boolean isInternal() {
- boolean internal = false;
- try {
- element.$x("./td[2]/a/span").shouldBe(visible, Duration.ofMillis(500));
- internal = true;
- } catch (Throwable ignored) {
- }
- return internal;
- }
-
- private SelenideElement getNameElm() {
- return element.$x("./td[2]");
- }
-
- @Step
- public String getName() {
- return getNameElm().getText().trim();
- }
-
- @Step
- public void openItem() {
- getNameElm().click();
- }
-
- @Step
- public int getPartition() {
- return Integer.parseInt(element.$x("./td[3]").getText().trim());
- }
-
- @Step
- public int getOutOfSyncReplicas() {
- return Integer.parseInt(element.$x("./td[4]").getText().trim());
- }
-
- @Step
- public int getReplicationFactor() {
- return Integer.parseInt(element.$x("./td[5]").getText().trim());
- }
-
- @Step
- public int getNumberOfMessages() {
- return Integer.parseInt(element.$x("./td[6]").getText().trim());
- }
-
- @Step
- public int getSize() {
- return Integer.parseInt(element.$x("./td[7]").getText().trim());
- }
-
- @Step
- public void openDotMenu(){
- element.$x("./td[8]//button").click();
- }
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CleanupPolicyValue.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CleanupPolicyValue.java
deleted file mode 100644
index 86b87325a0..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CleanupPolicyValue.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.provectus.kafka.ui.pages.topic.enums;
-
-public enum CleanupPolicyValue {
- DELETE("delete", "Delete"),
- COMPACT("compact", "Compact"),
- COMPACT_DELETE("compact,delete", "Compact,Delete");
-
- private final String optionValue;
- private final String visibleText;
-
- CleanupPolicyValue(String optionValue, String visibleText) {
- this.optionValue = optionValue;
- this.visibleText = visibleText;
- }
-
- public String getOptionValue() {
- return optionValue;
- }
-
- public String getVisibleText() {
- return visibleText;
- }
-}
-
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CustomParameterType.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CustomParameterType.java
deleted file mode 100644
index bc2f3befae..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/CustomParameterType.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.provectus.kafka.ui.pages.topic.enums;
-
-public enum CustomParameterType {
- COMPRESSION_TYPE("compression.type"),
- DELETE_RETENTION_MS("delete.retention.ms"),
- FILE_DELETE_DELAY_MS("file.delete.delay.ms"),
- FLUSH_MESSAGES("flush.messages"),
- FLUSH_MS("flush.ms"),
- FOLLOWER_REPLICATION_THROTTLED_REPLICAS("follower.replication.throttled.replicas"),
- INDEX_INTERVAL_BYTES("index.interval.bytes"),
- LEADER_REPLICATION_THROTTLED_REPLICAS("leader.replication.throttled.replicas"),
- MAX_COMPACTION_LAG_MS("max.compaction.lag.ms"),
- MESSAGE_DOWNCONVERSION_ENABLE("message.downconversion.enable"),
- MESSAGE_FORMAT_VERSION("message.format.version"),
- MESSAGE_TIMESTAMP_DIFFERENCE_MAX_MS("message.timestamp.difference.max.ms"),
- MESSAGE_TIMESTAMP_TYPE("message.timestamp.type"),
- MIN_CLEANABLE_DIRTY_RATIO("min.cleanable.dirty.ratio"),
- MIN_COMPACTION_LAG_MS("min.compaction.lag.ms"),
- PREALLOCATE("preallocate"),
- RETENTION_BYTES("retention.bytes"),
- SEGMENT_BYTES("segment.bytes"),
- SEGMENT_INDEX_BYTES("segment.index.bytes"),
- SEGMENT_JITTER_MS("segment.jitter.ms"),
- SEGMENT_MS("segment.ms"),
- UNCLEAN_LEADER_ELECTION_ENABLE("unclean.leader.election.enable");
-
- private final String optionValue;
-
- CustomParameterType(String optionValue) {
- this.optionValue = optionValue;
- }
-
- public String getOptionValue() {
- return optionValue;
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/MaxSizeOnDisk.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/MaxSizeOnDisk.java
deleted file mode 100644
index bdb476d1e6..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/MaxSizeOnDisk.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.provectus.kafka.ui.pages.topic.enums;
-
-public enum MaxSizeOnDisk {
- NOT_SET("-1", "Not Set"),
- SIZE_1_GB("1073741824", "1 GB"),
- SIZE_10_GB("10737418240", "10 GB"),
- SIZE_20_GB("21474836480", "20 GB"),
- SIZE_50_GB("53687091200", "50 GB");
-
- private final String optionValue;
- private final String visibleText;
-
- MaxSizeOnDisk(String optionValue, String visibleText) {
- this.optionValue = optionValue;
- this.visibleText = visibleText;
- }
-
- public String getOptionValue() {
- return optionValue;
- }
-
- public String getVisibleText() {
- return visibleText;
- }
-}
-
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java
deleted file mode 100644
index 9d42bf800f..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package com.provectus.kafka.ui.pages.topic.enums;
-
-public enum TimeToRetain {
- BTN_12_HOURS("12 hours", "43200000"),
- BTN_1_DAY("1 day", "86400000"),
- BTN_2_DAYS("2 days", "172800000"),
- BTN_7_DAYS("7 days", "604800000"),
- BTN_4_WEEKS("4 weeks", "2419200000");
-
- private final String button;
- private final String value;
-
- TimeToRetain(String button, String value) {
- this.button = button;
- this.value = value;
- }
-
- public String getButton(){
- return button;
- }
-
- public String getValue(){
- return value;
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/ProduceMessagePanel.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
similarity index 94%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/ProduceMessagePanel.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
index d4dd3d8c15..a16d2c83a0 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/ProduceMessagePanel.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
@@ -1,14 +1,15 @@
-package com.provectus.kafka.ui.pages.topic;
-
-import static com.codeborne.selenide.Selenide.$x;
-import static com.codeborne.selenide.Selenide.refresh;
+package com.provectus.kafka.ui.pages.topics;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+
import java.util.Arrays;
+import static com.codeborne.selenide.Selenide.$x;
+import static com.codeborne.selenide.Selenide.refresh;
+
public class ProduceMessagePanel extends BasePage {
protected SelenideElement keyTextArea = $x("//div[@id='key']/textarea");
@@ -20,7 +21,7 @@ public class ProduceMessagePanel extends BasePage {
protected SelenideElement contentSerdeDdl = $x("//ul[@name='valueSerde']");
@Step
- public ProduceMessagePanel waitUntilScreenReady(){
+ public ProduceMessagePanel waitUntilScreenReady() {
waitUntilSpinnerDisappear();
Arrays.asList(partitionDdl, keySerdeDdl, contentSerdeDdl).forEach(element -> element.shouldBe(Condition.visible));
return this;
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
new file mode 100644
index 0000000000..7deed1b7fd
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
@@ -0,0 +1,267 @@
+package com.provectus.kafka.ui.pages.topics;
+
+import com.codeborne.selenide.ClickOptions;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.ElementsCollection;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue;
+import com.provectus.kafka.ui.pages.topics.enums.CustomParameterType;
+import com.provectus.kafka.ui.pages.topics.enums.MaxSizeOnDisk;
+import com.provectus.kafka.ui.pages.topics.enums.TimeToRetain;
+import io.qameta.allure.Step;
+
+import static com.codeborne.selenide.Selenide.*;
+
+public class TopicCreateEditForm extends BasePage {
+
+ protected SelenideElement timeToRetainField = $x("//input[@id='timeToRetain']");
+ protected SelenideElement partitionsField = $x("//input[@name='partitions']");
+ protected SelenideElement nameField = $x("//input[@name='name']");
+ protected SelenideElement maxMessageBytesField = $x("//input[@name='maxMessageBytes']");
+ protected SelenideElement minInSyncReplicasField = $x("//input[@name='minInSyncReplicas']");
+ protected SelenideElement cleanUpPolicyDdl = $x("//ul[@id='topicFormCleanupPolicy']");
+ protected SelenideElement maxSizeOnDiscDdl = $x("//ul[@id='topicFormRetentionBytes']");
+ protected SelenideElement customParameterDdl = $x("//ul[contains(@name,'customParams')]");
+ protected SelenideElement deleteCustomParameterBtn = $x("//span[contains(@title,'Delete customParam')]");
+ protected SelenideElement addCustomParameterTypeBtn = $x("//button[contains(text(),'Add Custom Parameter')]");
+ protected SelenideElement customParameterValueField = $x("//input[@placeholder='Value']");
+ protected SelenideElement validationCustomParameterValueMsg = $x("//p[contains(text(),'Value is required')]");
+ protected String ddlElementLocator = "//li[@value='%s']";
+ protected String btnTimeToRetainLocator = "//button[@class][text()='%s']";
+
+
+ @Step
+ public TopicCreateEditForm waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ nameField.shouldBe(Condition.visible);
+ return this;
+ }
+
+ public boolean isCreateTopicButtonEnabled() {
+ return isEnabled(submitBtn);
+ }
+
+ public boolean isDeleteCustomParameterButtonEnabled() {
+ return isEnabled(deleteCustomParameterBtn);
+ }
+
+ public boolean isNameFieldEnabled() {
+ return isEnabled(nameField);
+ }
+
+ @Step
+ public TopicCreateEditForm setTopicName(String topicName) {
+ nameField.shouldBe(Condition.enabled).clear();
+ if (topicName != null) {
+ nameField.sendKeys(topicName);
+ }
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setMinInsyncReplicas(Integer minInsyncReplicas) {
+ minInSyncReplicasField.setValue(minInsyncReplicas.toString());
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setTimeToRetainDataInMs(Long ms) {
+ timeToRetainField.setValue(ms.toString());
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setTimeToRetainDataInMs(String ms) {
+ timeToRetainField.setValue(ms);
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setMaxSizeOnDiskInGB(MaxSizeOnDisk MaxSizeOnDisk) {
+ maxSizeOnDiscDdl.shouldBe(Condition.visible).click();
+ $x(String.format(ddlElementLocator, MaxSizeOnDisk.getOptionValue())).shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm clickAddCustomParameterTypeButton() {
+ addCustomParameterTypeBtn.click();
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setCustomParameterType(CustomParameterType customParameterType) {
+ customParameterDdl.shouldBe(Condition.visible).click();
+ $x(String.format(ddlElementLocator, customParameterType.getOptionValue())).shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm clearCustomParameterValue() {
+ clearByKeyboard(customParameterValueField);
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setNumberOfPartitions(int partitions) {
+ partitionsField.shouldBe(Condition.enabled).clear();
+ partitionsField.sendKeys(String.valueOf(partitions));
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setTimeToRetainDataByButtons(TimeToRetain timeToRetain) {
+ $x(String.format(btnTimeToRetainLocator, timeToRetain.getButton())).shouldBe(Condition.enabled).click();
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm selectCleanupPolicy(CleanupPolicyValue cleanupPolicyOptionValue) {
+ cleanUpPolicyDdl.shouldBe(Condition.visible).click();
+ $x(String.format(ddlElementLocator, cleanupPolicyOptionValue.getOptionValue())).shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm selectRetentionBytes(String visibleValue) {
+ return selectFromDropDownByVisibleText("retentionBytes", visibleValue);
+ }
+
+ @Step
+ public TopicCreateEditForm selectRetentionBytes(Long optionValue) {
+ return selectFromDropDownByOptionValue("retentionBytes", optionValue.toString());
+ }
+
+ @Step
+ public TopicCreateEditForm clickCreateTopicBtn() {
+ clickSubmitBtn();
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm addCustomParameter(String customParameterName,
+ String customParameterValue) {
+ ElementsCollection customParametersElements =
+ $$("ul[role=listbox][name^=customParams][name$=name]");
+ KafkaUISelectElement kafkaUISelectElement = null;
+ if (customParametersElements.size() == 1) {
+ if ("Select".equals(customParametersElements.first().getText())) {
+ kafkaUISelectElement = new KafkaUISelectElement(customParametersElements.first());
+ }
+ } else {
+ $$("button")
+ .find(Condition.exactText("Add Custom Parameter"))
+ .click();
+ customParametersElements = $$("ul[role=listbox][name^=customParams][name$=name]");
+ kafkaUISelectElement = new KafkaUISelectElement(customParametersElements.last());
+ }
+ if (kafkaUISelectElement != null) {
+ kafkaUISelectElement.selectByVisibleText(customParameterName);
+ }
+ $(String.format("input[name=\"customParams.%d.value\"]", customParametersElements.size() - 1))
+ .setValue(customParameterValue);
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm updateCustomParameter(String customParameterName,
+ String customParameterValue) {
+ SelenideElement selenideElement = $$("ul[role=listbox][name^=customParams][name$=name]")
+ .find(Condition.exactText(customParameterName));
+ String name = selenideElement.getAttribute("name");
+ if (name != null) {
+ name = name.substring(0, name.lastIndexOf("."));
+ }
+ $(String.format("input[name^=%s]", name)).setValue(customParameterValue);
+ return this;
+ }
+
+ @Step
+ public String getCleanupPolicy() {
+ return new KafkaUISelectElement("cleanupPolicy").getCurrentValue();
+ }
+
+ @Step
+ public String getTimeToRetain() {
+ return timeToRetainField.getValue();
+ }
+
+ @Step
+ public String getMaxSizeOnDisk() {
+ return new KafkaUISelectElement("retentionBytes").getCurrentValue();
+ }
+
+ @Step
+ public String getMaxMessageBytes() {
+ return maxMessageBytesField.getValue();
+ }
+
+ @Step
+ public TopicCreateEditForm setMaxMessageBytes(Long bytes) {
+ maxMessageBytesField.setValue(bytes.toString());
+ return this;
+ }
+
+ @Step
+ public TopicCreateEditForm setMaxMessageBytes(String bytes) {
+ return setMaxMessageBytes(Long.parseLong(bytes));
+ }
+
+ @Step
+ public boolean isValidationMessageCustomParameterValueVisible() {
+ return isVisible(validationCustomParameterValueMsg);
+ }
+
+ @Step
+ public String getCustomParameterValue() {
+ return customParameterValueField.getValue();
+ }
+
+ private TopicCreateEditForm selectFromDropDownByOptionValue(String dropDownElementName,
+ String optionValue) {
+ KafkaUISelectElement select = new KafkaUISelectElement(dropDownElementName);
+ select.selectByOptionValue(optionValue);
+ return this;
+ }
+
+ private TopicCreateEditForm selectFromDropDownByVisibleText(String dropDownElementName,
+ String visibleText) {
+ KafkaUISelectElement select = new KafkaUISelectElement(dropDownElementName);
+ select.selectByVisibleText(visibleText);
+ return this;
+ }
+
+ private static class KafkaUISelectElement {
+
+ private final SelenideElement selectElement;
+
+ public KafkaUISelectElement(String selectElementName) {
+ this.selectElement = $("ul[role=listbox][name=" + selectElementName + "]");
+ }
+
+ public KafkaUISelectElement(SelenideElement selectElement) {
+ this.selectElement = selectElement;
+ }
+
+ public void selectByOptionValue(String optionValue) {
+ selectElement.click();
+ selectElement
+ .$$x(".//ul/li[@role='option']")
+ .find(Condition.attribute("value", optionValue))
+ .click(ClickOptions.usingJavaScript());
+ }
+
+ public void selectByVisibleText(String visibleText) {
+ selectElement.click();
+ selectElement
+ .$$("ul>li[role=option]")
+ .find(Condition.exactText(visibleText))
+ .click();
+ }
+
+ public String getCurrentValue() {
+ return selectElement.$("li").getText();
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
new file mode 100644
index 0000000000..78ec2cef14
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
@@ -0,0 +1,446 @@
+package com.provectus.kafka.ui.pages.topics;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.ElementsCollection;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
+import org.openqa.selenium.By;
+
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.YearMonth;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
+import java.util.*;
+
+import static com.codeborne.selenide.Selenide.*;
+import static org.testcontainers.shaded.org.apache.commons.lang3.RandomUtils.nextInt;
+
+public class TopicDetails extends BasePage {
+
+ protected SelenideElement clearMessagesBtn = $x(("//div[contains(text(), 'Clear messages')]"));
+ protected SelenideElement recreateTopicBtn = $x("//div[text()='Recreate Topic']");
+ protected SelenideElement messageAmountCell = $x("//tbody/tr/td[5]");
+ protected SelenideElement overviewTab = $x("//a[contains(text(),'Overview')]");
+ protected SelenideElement messagesTab = $x("//a[contains(text(),'Messages')]");
+ protected SelenideElement seekTypeDdl = $x("//ul[@id='selectSeekType']/li");
+ protected SelenideElement seekTypeField = $x("//label[text()='Seek Type']//..//div/input");
+ protected SelenideElement addFiltersBtn = $x("//button[text()='Add Filters']");
+ protected SelenideElement savedFiltersLink = $x("//div[text()='Saved Filters']");
+ protected SelenideElement addFilterCodeModalTitle = $x("//label[text()='Filter code']");
+ protected SelenideElement addFilterCodeInput = $x("//div[@id='ace-editor']//textarea");
+ protected SelenideElement saveThisFilterCheckBoxAddFilterMdl = $x("//input[@name='saveFilter']");
+ protected SelenideElement displayNameInputAddFilterMdl = $x("//input[@placeholder='Enter Name']");
+ protected SelenideElement cancelBtnAddFilterMdl = $x("//button[text()='Cancel']");
+ protected SelenideElement addFilterBtnAddFilterMdl = $x("//button[text()='Add filter']");
+ protected SelenideElement addFiltersBtnMessages = $x("//button[text()='Add Filters']");
+ protected SelenideElement selectFilterBtnAddFilterMdl = $x("//button[text()='Select filter']");
+ protected SelenideElement editSettingsMenu = $x("//li[@role][contains(text(),'Edit settings')]");
+ protected SelenideElement removeTopicBtn = $x("//ul[@role='menu']//div[contains(text(),'Remove Topic')]");
+ protected SelenideElement produceMessageBtn = $x("//div//button[text()='Produce Message']");
+ protected SelenideElement contentMessageTab = $x("//html//div[@id='root']/div/main//table//p");
+ protected SelenideElement cleanUpPolicyField = $x("//div[contains(text(),'Clean Up Policy')]/../span/*");
+ protected SelenideElement partitionsField = $x("//div[contains(text(),'Partitions')]/../span");
+ protected SelenideElement backToCreateFiltersLink = $x("//div[text()='Back To create filters']");
+ protected ElementsCollection messageGridItems = $$x("//tbody//tr");
+ protected SelenideElement actualCalendarDate = $x("//div[@class='react-datepicker__current-month']");
+ protected SelenideElement previousMonthButton = $x("//button[@aria-label='Previous Month']");
+ protected SelenideElement nextMonthButton = $x("//button[@aria-label='Next Month']");
+ protected SelenideElement calendarTimeFld = $x("//input[@placeholder='Time']");
+ protected String dayCellLtr = "//div[@role='option'][contains(text(),'%d')]";
+ protected String seekFilterDdlLocator = "//ul[@id='selectSeekType']/ul/li[text()='%s']";
+ protected String savedFilterNameLocator = "//div[@role='savedFilter']/div[contains(text(),'%s')]";
+ protected String consumerIdLocator = "//a[@title='%s']";
+ protected String topicHeaderLocator = "//h1[contains(text(),'%s')]";
+ protected String activeFilterNameLocator = "//div[@data-testid='activeSmartFilter'][contains(text(),'%s')]";
+ protected String settingsGridValueLocator = "//tbody/tr/td/span[text()='%s']//ancestor::tr/td[2]/span";
+
+ @Step
+ public TopicDetails waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ overviewTab.shouldBe(Condition.visible);
+ return this;
+ }
+
+ @Step
+ public TopicDetails openDetailsTab(TopicMenu menu) {
+ $(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ @Step
+ public String getSettingsGridValueByKey(String key) {
+ return $x(String.format(settingsGridValueLocator, key)).scrollTo().shouldBe(Condition.visible).getText();
+ }
+
+ @Step
+ public TopicDetails openDotMenu() {
+ clickByJavaScript(dotMenuBtn);
+ return this;
+ }
+
+ @Step
+ public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
+ return isAlertVisible(header, message);
+ }
+
+ @Step
+ public TopicDetails clickEditSettingsMenu() {
+ editSettingsMenu.shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public boolean isConfirmationMdlVisible() {
+ return isConfirmationModalVisible();
+ }
+
+ @Step
+ public TopicDetails clickClearMessagesMenu() {
+ clearMessagesBtn.shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public TopicDetails clickRecreateTopicMenu() {
+ recreateTopicBtn.shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public String getCleanUpPolicy() {
+ return cleanUpPolicyField.getText();
+ }
+
+ @Step
+ public int getPartitions() {
+ return Integer.parseInt(partitionsField.getText().trim());
+ }
+
+ @Step
+ public boolean isTopicHeaderVisible(String topicName) {
+ return isVisible($x(String.format(topicHeaderLocator, topicName)));
+ }
+
+ @Step
+ public TopicDetails clickDeleteTopicMenu() {
+ removeTopicBtn.shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public TopicDetails clickConfirmBtnMdl() {
+ clickConfirmButton();
+ return this;
+ }
+
+ @Step
+ public TopicDetails clickProduceMessageBtn() {
+ clickByJavaScript(produceMessageBtn);
+ return this;
+ }
+
+ @Step
+ public TopicDetails selectSeekTypeDdlMessagesTab(String seekTypeName) {
+ seekTypeDdl.shouldBe(Condition.enabled).click();
+ $x(String.format(seekFilterDdlLocator, seekTypeName)).shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public TopicDetails setSeekTypeValueFldMessagesTab(String seekTypeValue) {
+ seekTypeField.shouldBe(Condition.enabled).sendKeys(seekTypeValue);
+ return this;
+ }
+
+ @Step
+ public TopicDetails clickSubmitFiltersBtnMessagesTab() {
+ clickByJavaScript(submitBtn);
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ @Step
+ public TopicDetails clickMessagesAddFiltersBtn() {
+ addFiltersBtn.shouldBe(Condition.enabled).click();
+ return this;
+ }
+
+ @Step
+ public TopicDetails openSavedFiltersListMdl() {
+ savedFiltersLink.shouldBe(Condition.enabled).click();
+ backToCreateFiltersLink.shouldBe(Condition.visible);
+ return this;
+ }
+
+ @Step
+ public boolean isFilterVisibleAtSavedFiltersMdl(String filterName) {
+ return isVisible($x(String.format(savedFilterNameLocator, filterName)));
+ }
+
+ @Step
+ public TopicDetails selectFilterAtSavedFiltersMdl(String filterName) {
+ $x(String.format(savedFilterNameLocator, filterName)).shouldBe(Condition.enabled).click();
+ return this;
+ }
+
+ @Step
+ public TopicDetails clickSelectFilterBtnAtSavedFiltersMdl() {
+ selectFilterBtnAddFilterMdl.shouldBe(Condition.enabled).click();
+ addFilterCodeModalTitle.shouldBe(Condition.disappear);
+ return this;
+ }
+
+ @Step
+ public TopicDetails waitUntilAddFiltersMdlVisible() {
+ addFilterCodeModalTitle.shouldBe(Condition.visible);
+ return this;
+ }
+
+ @Step
+ public TopicDetails setFilterCodeFieldAddFilterMdl(String filterCode) {
+ addFilterCodeInput.shouldBe(Condition.enabled).sendKeys(filterCode);
+ return this;
+ }
+
+ @Step
+ public TopicDetails selectSaveThisFilterCheckboxMdl(boolean select) {
+ selectElement(saveThisFilterCheckBoxAddFilterMdl, select);
+ return this;
+ }
+
+ @Step
+ public boolean isSaveThisFilterCheckBoxSelected() {
+ return isSelected(saveThisFilterCheckBoxAddFilterMdl);
+ }
+
+ @Step
+ public TopicDetails setDisplayNameFldAddFilterMdl(String displayName) {
+ displayNameInputAddFilterMdl.shouldBe(Condition.enabled).sendKeys(displayName);
+ return this;
+ }
+
+ @Step
+ public TopicDetails clickAddFilterBtnAndCloseMdl(boolean closeModal) {
+ addFilterBtnAddFilterMdl.shouldBe(Condition.enabled).click();
+ if (closeModal) {
+ addFilterCodeModalTitle.shouldBe(Condition.hidden);
+ } else {
+ addFilterCodeModalTitle.shouldBe(Condition.visible);
+ }
+ return this;
+ }
+
+ @Step
+ public boolean isAddFilterBtnAddFilterMdlEnabled() {
+ return isEnabled(addFilterBtnAddFilterMdl);
+ }
+
+ @Step
+ public boolean isActiveFilterVisible(String activeFilterName) {
+ return isVisible($x(String.format(activeFilterNameLocator, activeFilterName)));
+ }
+
+ public List getAllAddFilterModalVisibleElements() {
+ return Arrays.asList(savedFiltersLink, displayNameInputAddFilterMdl, addFilterBtnAddFilterMdl, cancelBtnAddFilterMdl);
+ }
+
+ public List getAllAddFilterModalEnabledElements() {
+ return Arrays.asList(displayNameInputAddFilterMdl, cancelBtnAddFilterMdl);
+ }
+
+ public List getAllAddFilterModalDisabledElements() {
+ return Collections.singletonList(addFilterBtnAddFilterMdl);
+ }
+
+ @Step
+ public TopicDetails openConsumerGroup(String consumerId) {
+ $x(String.format(consumerIdLocator, consumerId)).click();
+ return this;
+ }
+
+ @Step
+ public boolean isKeyMessageVisible(String keyMessage) {
+ return keyMessage.equals($("td[title]").getText());
+ }
+
+ @Step
+ public boolean isContentMessageVisible(String contentMessage) {
+ return contentMessage.matches(contentMessageTab.getText().trim());
+ }
+
+ private void selectYear(int expectedYear) {
+ while (getActualCalendarDate().getYear() > expectedYear) {
+ clickByJavaScript(previousMonthButton);
+ sleep(1000);
+ if (LocalTime.now().plusMinutes(3).isBefore(LocalTime.now())) {
+ throw new IllegalArgumentException("Unable to select year");
+ }
+ }
+ }
+
+ private void selectMonth(int expectedMonth) {
+ while (getActualCalendarDate().getMonthValue() > expectedMonth) {
+ clickByJavaScript(previousMonthButton);
+ sleep(1000);
+ if (LocalTime.now().plusMinutes(3).isBefore(LocalTime.now())) {
+ throw new IllegalArgumentException("Unable to select month");
+ }
+ }
+ }
+
+ private void selectDay(int expectedDay) {
+ Objects.requireNonNull($$x(String.format(dayCellLtr, expectedDay)).stream()
+ .filter(day -> !Objects.requireNonNull(day.getAttribute("class")).contains("outside-month"))
+ .findFirst().orElseThrow()).shouldBe(Condition.enabled).click();
+ }
+
+ private void setTime(LocalDateTime dateTime) {
+ calendarTimeFld.shouldBe(Condition.enabled)
+ .sendKeys(String.valueOf(dateTime.getHour()), String.valueOf(dateTime.getMinute()));
+ }
+
+ @Step
+ public TopicDetails selectDateAndTimeByCalendar(LocalDateTime dateTime) {
+ setTime(dateTime);
+ selectYear(dateTime.getYear());
+ selectMonth(dateTime.getMonthValue());
+ selectDay(dateTime.getDayOfMonth());
+ return this;
+ }
+
+ private LocalDate getActualCalendarDate() {
+ String monthAndYearStr = actualCalendarDate.getText().trim();
+ DateTimeFormatter formatter = new DateTimeFormatterBuilder()
+ .parseCaseInsensitive()
+ .append(DateTimeFormatter.ofPattern("MMMM yyyy"))
+ .toFormatter(Locale.ENGLISH);
+ YearMonth yearMonth = formatter.parse(monthAndYearStr, YearMonth::from);
+ return yearMonth.atDay(1);
+ }
+
+ @Step
+ public TopicDetails openCalendarSeekType() {
+ seekTypeField.shouldBe(Condition.enabled).click();
+ actualCalendarDate.shouldBe(Condition.visible);
+ return this;
+ }
+
+ @Step
+ public int getMessageCountAmount() {
+ return Integer.parseInt(messageAmountCell.getText().trim());
+ }
+
+ private List initItems() {
+ List gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new TopicDetails.MessageGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public TopicDetails.MessageGridItem getMessageByOffset(int offset) {
+ return initItems().stream()
+ .filter(e -> e.getOffset() == offset)
+ .findFirst().orElseThrow();
+ }
+
+ @Step
+ public List getAllMessages() {
+ return initItems();
+ }
+
+ @Step
+ public TopicDetails.MessageGridItem getRandomMessage() {
+ return getMessageByOffset(nextInt(0, initItems().size() - 1));
+ }
+
+ public enum TopicMenu {
+ OVERVIEW("Overview"),
+ MESSAGES("Messages"),
+ CONSUMERS("Consumers"),
+ SETTINGS("Settings");
+
+ private final String value;
+
+ TopicMenu(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
+ }
+
+ public static class MessageGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ private MessageGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public MessageGridItem clickExpand() {
+ clickByJavaScript(element.$x("./td[1]/span"));
+ return this;
+ }
+
+ private SelenideElement getOffsetElm() {
+ return element.$x("./td[2]");
+ }
+
+ @Step
+ public int getOffset() {
+ return Integer.parseInt(getOffsetElm().getText().trim());
+ }
+
+ @Step
+ public int getPartition() {
+ return Integer.parseInt(element.$x("./td[3]").getText().trim());
+ }
+
+ @Step
+ public LocalDateTime getTimestamp() {
+ String timestampValue = element.$x("./td[4]/div").getText().trim();
+ DateTimeFormatter formatter = DateTimeFormatter.ofPattern("M/d/yyyy, HH:mm:ss");
+ return LocalDateTime.parse(timestampValue, formatter);
+ }
+
+ @Step
+ public String getKey() {
+ return element.$x("./td[5]").getText().trim();
+ }
+
+ @Step
+ public String getValue() {
+ return element.$x("./td[6]/span/p").getText().trim();
+ }
+
+ @Step
+ public MessageGridItem openDotMenu() {
+ getOffsetElm().hover();
+ element.$x("./td[7]/div/button[@aria-label='Dropdown Toggle']")
+ .shouldBe(Condition.visible).click();
+ return this;
+ }
+
+ @Step
+ public MessageGridItem clickCopyToClipBoard() {
+ clickByJavaScript(element.$x("./td[7]//li[text() = 'Copy to clipboard']")
+ .shouldBe(Condition.visible));
+ return this;
+ }
+
+ @Step
+ public MessageGridItem clickSaveAsFile() {
+ clickByJavaScript(element.$x("./td[7]//li[text() = 'Save as a file']")
+ .shouldBe(Condition.visible));
+ return this;
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
new file mode 100644
index 0000000000..2b00c97c40
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
@@ -0,0 +1,66 @@
+package com.provectus.kafka.ui.pages.topics;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static com.codeborne.selenide.Selenide.$x;
+
+public class TopicSettingsTab extends BasePage {
+
+ protected SelenideElement defaultValueColumnHeaderLocator = $x("//div[text() = 'Default Value']");
+
+ @Step
+ public TopicSettingsTab waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ defaultValueColumnHeaderLocator.shouldBe(Condition.visible);
+ return this;
+ }
+
+ private List initGridItems() {
+ List gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new SettingsGridItem(item)));
+ return gridItemList;
+ }
+
+ private TopicSettingsTab.SettingsGridItem getItemByKey(String key) {
+ return initGridItems().stream()
+ .filter(e -> e.getKey().equals(key))
+ .findFirst().orElseThrow();
+ }
+
+ @Step
+ public String getValueByKey(String key) {
+ return getItemByKey(key).getValue();
+ }
+
+ public static class SettingsGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ public SettingsGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getKey() {
+ return element.$x("./td[1]/span").getText().trim();
+ }
+
+ @Step
+ public String getValue() {
+ return element.$x("./td[2]/span").getText().trim();
+ }
+
+ @Step
+ public String getDefaultValue() {
+ return element.$x("./td[3]/span").getText().trim();
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
new file mode 100644
index 0000000000..291c94667f
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
@@ -0,0 +1,261 @@
+package com.provectus.kafka.ui.pages.topics;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static com.codeborne.selenide.Condition.visible;
+import static com.codeborne.selenide.Selenide.$x;
+
+public class TopicsList extends BasePage {
+
+ protected SelenideElement topicListHeader = $x("//h1[text()='Topics']");
+ protected SelenideElement addTopicBtn = $x("//button[normalize-space(text()) ='Add a Topic']");
+ protected SelenideElement searchField = $x("//input[@placeholder='Search by Topic Name']");
+ protected SelenideElement showInternalRadioBtn = $x("//input[@name='ShowInternalTopics']");
+ protected SelenideElement deleteSelectedTopicsBtn = $x("//button[text()='Delete selected topics']");
+ protected SelenideElement copySelectedTopicBtn = $x("//button[text()='Copy selected topic']");
+ protected SelenideElement purgeMessagesOfSelectedTopicsBtn = $x("//button[text()='Purge messages of selected topics']");
+ protected SelenideElement clearMessagesBtn = $x("//ul[contains(@class ,'open')]//div[text()='Clear Messages']");
+ protected SelenideElement recreateTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Recreate Topic']");
+ protected SelenideElement removeTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Remove Topic']");
+
+ @Step
+ public TopicsList waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ topicListHeader.shouldBe(visible);
+ return this;
+ }
+
+ @Step
+ public TopicsList clickAddTopicBtn() {
+ clickByJavaScript(addTopicBtn);
+ return this;
+ }
+
+ @Step
+ public boolean isTopicVisible(String topicName) {
+ tableGrid.shouldBe(visible);
+ return isVisible(getTableElement(topicName));
+ }
+
+ @Step
+ public boolean isShowInternalRadioBtnSelected() {
+ return isSelected(showInternalRadioBtn);
+ }
+
+ @Step
+ public TopicsList setShowInternalRadioButton(boolean select) {
+ selectElement(showInternalRadioBtn, select);
+ return this;
+ }
+
+ @Step
+ public TopicsList openTopic(String topicName) {
+ getTopicItem(topicName).openItem();
+ return this;
+ }
+
+ @Step
+ public TopicsList openDotMenuByTopicName(String topicName) {
+ getTopicItem(topicName).openDotMenu();
+ return this;
+ }
+
+ @Step
+ public boolean isCopySelectedTopicBtnEnabled() {
+ return isEnabled(copySelectedTopicBtn);
+ }
+
+ @Step
+ public List getActionButtons() {
+ return Stream.of(deleteSelectedTopicsBtn, copySelectedTopicBtn, purgeMessagesOfSelectedTopicsBtn)
+ .collect(Collectors.toList());
+ }
+
+ @Step
+ public TopicsList clickCopySelectedTopicBtn() {
+ copySelectedTopicBtn.shouldBe(Condition.enabled).click();
+ return this;
+ }
+
+ @Step
+ public TopicsList clickPurgeMessagesOfSelectedTopicsBtn() {
+ purgeMessagesOfSelectedTopicsBtn.shouldBe(Condition.enabled).click();
+ return this;
+ }
+
+ @Step
+ public TopicsList clickClearMessagesBtn() {
+ clickByJavaScript(clearMessagesBtn.shouldBe(visible));
+ return this;
+ }
+
+ @Step
+ public TopicsList clickRecreateTopicBtn() {
+ clickByJavaScript(recreateTopicBtn.shouldBe(visible));
+ return this;
+ }
+
+ @Step
+ public TopicsList clickRemoveTopicBtn() {
+ clickByJavaScript(removeTopicBtn.shouldBe(visible));
+ return this;
+ }
+
+ @Step
+ public TopicsList clickConfirmBtnMdl() {
+ clickConfirmButton();
+ return this;
+ }
+
+ @Step
+ public TopicsList clickCancelBtnMdl() {
+ clickCancelButton();
+ return this;
+ }
+
+ @Step
+ public boolean isConfirmationMdlVisible() {
+ return isConfirmationModalVisible();
+ }
+
+ @Step
+ public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
+ return isAlertVisible(header, message);
+ }
+
+ private List getVisibleColumnHeaders() {
+ return Stream.of("Replication Factor", "Number of messages", "Topic Name", "Partitions", "Out of sync replicas", "Size")
+ .map(name -> $x(String.format(columnHeaderLocator, name)))
+ .collect(Collectors.toList());
+ }
+
+ private List getEnabledColumnHeaders() {
+ return Stream.of("Topic Name", "Partitions", "Out of sync replicas", "Size")
+ .map(name -> $x(String.format(columnHeaderLocator, name)))
+ .collect(Collectors.toList());
+ }
+
+ @Step
+ public List getAllVisibleElements() {
+ List visibleElements = new ArrayList<>(getVisibleColumnHeaders());
+ visibleElements.addAll(Arrays.asList(searchField, addTopicBtn, tableGrid));
+ visibleElements.addAll(getActionButtons());
+ return visibleElements;
+ }
+
+ @Step
+ public List getAllEnabledElements() {
+ List enabledElements = new ArrayList<>(getEnabledColumnHeaders());
+ enabledElements.addAll(Arrays.asList(searchField, showInternalRadioBtn, addTopicBtn));
+ return enabledElements;
+ }
+
+ private List initGridItems() {
+ List gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new TopicGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public TopicGridItem getTopicItem(String name) {
+ return initGridItems().stream()
+ .filter(e -> e.getName().equals(name))
+ .findFirst().orElseThrow();
+ }
+
+ @Step
+ public List getNonInternalTopics() {
+ return initGridItems().stream()
+ .filter(e -> !e.isInternal())
+ .collect(Collectors.toList());
+ }
+
+ @Step
+ public List getInternalTopics() {
+ return initGridItems().stream()
+ .filter(TopicGridItem::isInternal)
+ .collect(Collectors.toList());
+ }
+
+ public static class TopicGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ public TopicGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public TopicsList selectItem(boolean select) {
+ selectElement(element.$x("./td[1]/input"), select);
+ return new TopicsList();
+ }
+
+ @Step
+ public boolean isInternal() {
+ boolean internal = false;
+ try {
+ element.$x("./td[2]/a/span").shouldBe(visible, Duration.ofMillis(500));
+ internal = true;
+ } catch (Throwable ignored) {
+ }
+ return internal;
+ }
+
+ private SelenideElement getNameElm() {
+ return element.$x("./td[2]");
+ }
+
+ @Step
+ public String getName() {
+ return getNameElm().getText().trim();
+ }
+
+ @Step
+ public void openItem() {
+ getNameElm().click();
+ }
+
+ @Step
+ public int getPartition() {
+ return Integer.parseInt(element.$x("./td[3]").getText().trim());
+ }
+
+ @Step
+ public int getOutOfSyncReplicas() {
+ return Integer.parseInt(element.$x("./td[4]").getText().trim());
+ }
+
+ @Step
+ public int getReplicationFactor() {
+ return Integer.parseInt(element.$x("./td[5]").getText().trim());
+ }
+
+ @Step
+ public int getNumberOfMessages() {
+ return Integer.parseInt(element.$x("./td[6]").getText().trim());
+ }
+
+ @Step
+ public int getSize() {
+ return Integer.parseInt(element.$x("./td[7]").getText().trim());
+ }
+
+ @Step
+ public void openDotMenu() {
+ element.$x("./td[8]//button").click();
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CleanupPolicyValue.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CleanupPolicyValue.java
new file mode 100644
index 0000000000..48c0c0fbcb
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CleanupPolicyValue.java
@@ -0,0 +1,25 @@
+package com.provectus.kafka.ui.pages.topics.enums;
+
+public enum CleanupPolicyValue {
+
+ DELETE("delete", "Delete"),
+ COMPACT("compact", "Compact"),
+ COMPACT_DELETE("compact,delete", "Compact,Delete");
+
+ private final String optionValue;
+ private final String visibleText;
+
+ CleanupPolicyValue(String optionValue, String visibleText) {
+ this.optionValue = optionValue;
+ this.visibleText = visibleText;
+ }
+
+ public String getOptionValue() {
+ return optionValue;
+ }
+
+ public String getVisibleText() {
+ return visibleText;
+ }
+}
+
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CustomParameterType.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CustomParameterType.java
new file mode 100644
index 0000000000..f4cb5cb951
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/CustomParameterType.java
@@ -0,0 +1,37 @@
+package com.provectus.kafka.ui.pages.topics.enums;
+
+public enum CustomParameterType {
+
+ COMPRESSION_TYPE("compression.type"),
+ DELETE_RETENTION_MS("delete.retention.ms"),
+ FILE_DELETE_DELAY_MS("file.delete.delay.ms"),
+ FLUSH_MESSAGES("flush.messages"),
+ FLUSH_MS("flush.ms"),
+ FOLLOWER_REPLICATION_THROTTLED_REPLICAS("follower.replication.throttled.replicas"),
+ INDEX_INTERVAL_BYTES("index.interval.bytes"),
+ LEADER_REPLICATION_THROTTLED_REPLICAS("leader.replication.throttled.replicas"),
+ MAX_COMPACTION_LAG_MS("max.compaction.lag.ms"),
+ MESSAGE_DOWNCONVERSION_ENABLE("message.downconversion.enable"),
+ MESSAGE_FORMAT_VERSION("message.format.version"),
+ MESSAGE_TIMESTAMP_DIFFERENCE_MAX_MS("message.timestamp.difference.max.ms"),
+ MESSAGE_TIMESTAMP_TYPE("message.timestamp.type"),
+ MIN_CLEANABLE_DIRTY_RATIO("min.cleanable.dirty.ratio"),
+ MIN_COMPACTION_LAG_MS("min.compaction.lag.ms"),
+ PREALLOCATE("preallocate"),
+ RETENTION_BYTES("retention.bytes"),
+ SEGMENT_BYTES("segment.bytes"),
+ SEGMENT_INDEX_BYTES("segment.index.bytes"),
+ SEGMENT_JITTER_MS("segment.jitter.ms"),
+ SEGMENT_MS("segment.ms"),
+ UNCLEAN_LEADER_ELECTION_ENABLE("unclean.leader.election.enable");
+
+ private final String optionValue;
+
+ CustomParameterType(String optionValue) {
+ this.optionValue = optionValue;
+ }
+
+ public String getOptionValue() {
+ return optionValue;
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/MaxSizeOnDisk.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/MaxSizeOnDisk.java
new file mode 100644
index 0000000000..8f459eea75
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/MaxSizeOnDisk.java
@@ -0,0 +1,27 @@
+package com.provectus.kafka.ui.pages.topics.enums;
+
+public enum MaxSizeOnDisk {
+
+ NOT_SET("-1", "Not Set"),
+ SIZE_1_GB("1073741824", "1 GB"),
+ SIZE_10_GB("10737418240", "10 GB"),
+ SIZE_20_GB("21474836480", "20 GB"),
+ SIZE_50_GB("53687091200", "50 GB");
+
+ private final String optionValue;
+ private final String visibleText;
+
+ MaxSizeOnDisk(String optionValue, String visibleText) {
+ this.optionValue = optionValue;
+ this.visibleText = visibleText;
+ }
+
+ public String getOptionValue() {
+ return optionValue;
+ }
+
+ public String getVisibleText() {
+ return visibleText;
+ }
+}
+
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/TimeToRetain.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/TimeToRetain.java
new file mode 100644
index 0000000000..c07abdc175
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/enums/TimeToRetain.java
@@ -0,0 +1,26 @@
+package com.provectus.kafka.ui.pages.topics.enums;
+
+public enum TimeToRetain {
+
+ BTN_12_HOURS("12 hours", "43200000"),
+ BTN_1_DAY("1 day", "86400000"),
+ BTN_2_DAYS("2 days", "172800000"),
+ BTN_7_DAYS("7 days", "604800000"),
+ BTN_4_WEEKS("4 weeks", "2419200000");
+
+ private final String button;
+ private final String value;
+
+ TimeToRetain(String button, String value) {
+ this.button = button;
+ this.value = value;
+ }
+
+ public String getButton() {
+ return button;
+ }
+
+ public String getValue() {
+ return value;
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
index c41796cb85..54f69b5198 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
@@ -1,36 +1,27 @@
package com.provectus.kafka.ui.services;
-import static com.codeborne.selenide.Selenide.sleep;
-import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
-
import com.fasterxml.jackson.databind.ObjectMapper;
import com.provectus.kafka.ui.api.ApiClient;
-import com.provectus.kafka.ui.api.api.KafkaConnectApi;
-import com.provectus.kafka.ui.api.api.KsqlApi;
-import com.provectus.kafka.ui.api.api.MessagesApi;
-import com.provectus.kafka.ui.api.api.SchemasApi;
-import com.provectus.kafka.ui.api.api.TopicsApi;
-import com.provectus.kafka.ui.api.model.CreateTopicMessage;
-import com.provectus.kafka.ui.api.model.KsqlCommandV2;
-import com.provectus.kafka.ui.api.model.KsqlCommandV2Response;
-import com.provectus.kafka.ui.api.model.KsqlResponse;
-import com.provectus.kafka.ui.api.model.NewConnector;
-import com.provectus.kafka.ui.api.model.NewSchemaSubject;
-import com.provectus.kafka.ui.api.model.TopicCreation;
+import com.provectus.kafka.ui.api.api.*;
+import com.provectus.kafka.ui.api.model.*;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Schema;
import com.provectus.kafka.ui.models.Topic;
-import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
-import com.provectus.kafka.ui.pages.ksqldb.models.Table;
+import com.provectus.kafka.ui.pages.ksqlDb.models.Stream;
+import com.provectus.kafka.ui.pages.ksqlDb.models.Table;
import com.provectus.kafka.ui.settings.BaseSource;
import io.qameta.allure.Step;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.web.reactive.function.client.WebClientResponseException;
+
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
-import lombok.SneakyThrows;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.web.reactive.function.client.WebClientResponseException;
+
+import static com.codeborne.selenide.Selenide.sleep;
+import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
@Slf4j
@@ -38,45 +29,47 @@ public class ApiService extends BaseSource {
@SneakyThrows
private TopicsApi topicApi() {
- return new TopicsApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
+ return new TopicsApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
}
@SneakyThrows
private SchemasApi schemaApi() {
- return new SchemasApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
+ return new SchemasApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
}
@SneakyThrows
private KafkaConnectApi connectorApi() {
- return new KafkaConnectApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
+ return new KafkaConnectApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
}
@SneakyThrows
private MessagesApi messageApi() {
- return new MessagesApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
+ return new MessagesApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
}
@SneakyThrows
- private KsqlApi ksqlApi(){return new KsqlApi(new ApiClient().setBasePath(BASE_LOCAL_URL));}
+ private KsqlApi ksqlApi() {
+ return new KsqlApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
+ }
@SneakyThrows
private void createTopic(String clusterName, String topicName) {
- TopicCreation topic = new TopicCreation();
- topic.setName(topicName);
- topic.setPartitions(1);
- topic.setReplicationFactor(1);
- try {
- topicApi().createTopic(clusterName, topic).block();
- sleep(2000);
- } catch (WebClientResponseException ex) {
- ex.printStackTrace();
- }
+ TopicCreation topic = new TopicCreation();
+ topic.setName(topicName);
+ topic.setPartitions(1);
+ topic.setReplicationFactor(1);
+ try {
+ topicApi().createTopic(clusterName, topic).block();
+ sleep(2000);
+ } catch (WebClientResponseException ex) {
+ ex.printStackTrace();
+ }
}
@Step
public ApiService createTopic(String topicName) {
- createTopic(CLUSTER_NAME, topicName);
- return this;
+ createTopic(CLUSTER_NAME, topicName);
+ return this;
}
@SneakyThrows
@@ -88,9 +81,9 @@ public class ApiService extends BaseSource {
}
@Step
- public ApiService deleteTopic(String topicName){
- deleteTopic(CLUSTER_NAME, topicName);
- return this;
+ public ApiService deleteTopic(String topicName) {
+ deleteTopic(CLUSTER_NAME, topicName);
+ return this;
}
@SneakyThrows
@@ -107,9 +100,9 @@ public class ApiService extends BaseSource {
}
@Step
- public ApiService createSchema(Schema schema){
- createSchema(CLUSTER_NAME, schema);
- return this;
+ public ApiService createSchema(Schema schema) {
+ createSchema(CLUSTER_NAME, schema);
+ return this;
}
@SneakyThrows
@@ -121,9 +114,9 @@ public class ApiService extends BaseSource {
}
@Step
- public ApiService deleteSchema(String schemaName){
- deleteSchema(CLUSTER_NAME, schemaName);
- return this;
+ public ApiService deleteSchema(String schemaName) {
+ deleteSchema(CLUSTER_NAME, schemaName);
+ return this;
}
@SneakyThrows
@@ -135,9 +128,9 @@ public class ApiService extends BaseSource {
}
@Step
- public ApiService deleteConnector(String connectName, String connectorName){
- deleteConnector(CLUSTER_NAME, connectName, connectorName);
- return this;
+ public ApiService deleteConnector(String connectName, String connectorName) {
+ deleteConnector(CLUSTER_NAME, connectName, connectorName);
+ return this;
}
@SneakyThrows
@@ -154,9 +147,9 @@ public class ApiService extends BaseSource {
}
@Step
- public ApiService createConnector(String connectName, Connector connector){
- createConnector(CLUSTER_NAME, connectName, connector);
- return this;
+ public ApiService createConnector(String connectName, Connector connector) {
+ createConnector(CLUSTER_NAME, connectName, connector);
+ return this;
}
@Step
@@ -166,94 +159,102 @@ public class ApiService extends BaseSource {
@SneakyThrows
private void sendMessage(String clusterName, Topic topic) {
- CreateTopicMessage createMessage = new CreateTopicMessage();
- createMessage.setPartition(0);
- createMessage.setKeySerde("String");
- createMessage.setValueSerde("String");
- createMessage.setKey(topic.getMessageKey());
- createMessage.setContent(topic.getMessageContent());
- try {
- messageApi().sendTopicMessages(clusterName, topic.getName(), createMessage).block();
- } catch (WebClientResponseException ex) {
- ex.getRawStatusCode();
- }
+ CreateTopicMessage createMessage = new CreateTopicMessage();
+ createMessage.setPartition(0);
+ createMessage.setKeySerde("String");
+ createMessage.setValueSerde("String");
+ createMessage.setKey(topic.getMessageKey());
+ createMessage.setContent(topic.getMessageContent());
+ try {
+ messageApi().sendTopicMessages(clusterName, topic.getName(), createMessage).block();
+ } catch (WebClientResponseException ex) {
+ ex.getRawStatusCode();
+ }
}
@Step
public ApiService sendMessage(Topic topic) {
- sendMessage(CLUSTER_NAME, topic);
- return this;
+ sendMessage(CLUSTER_NAME, topic);
+ return this;
}
@Step
public ApiService createStream(Stream stream) {
- KsqlCommandV2Response pipeIdStream = ksqlApi().executeKsql(
- CLUSTER_NAME, new KsqlCommandV2()
- .ksql(String.format("CREATE STREAM %s (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE) ",
- stream.getName())
- + String.format("WITH (kafka_topic='%s', value_format='json', partitions=1);",
- stream.getTopicName())))
- .block();
- assert pipeIdStream != null;
- List responseListStream =
- ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdStream.getPipeId()).collectList().block();
- assert Objects.requireNonNull(responseListStream).size() != 0;
- return this;
+ KsqlCommandV2Response pipeIdStream = ksqlApi()
+ .executeKsql(CLUSTER_NAME, new KsqlCommandV2()
+ .ksql(String.format("CREATE STREAM %s (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE) ",
+ stream.getName())
+ + String.format("WITH (kafka_topic='%s', value_format='json', partitions=1);",
+ stream.getTopicName())))
+ .block();
+ assert pipeIdStream != null;
+ List responseListStream = ksqlApi()
+ .openKsqlResponsePipe(CLUSTER_NAME, pipeIdStream.getPipeId())
+ .collectList()
+ .block();
+ assert Objects.requireNonNull(responseListStream).size() != 0;
+ return this;
}
@Step
public ApiService createTables(Table firstTable, Table secondTable) {
- KsqlCommandV2Response pipeIdTable1 = ksqlApi().executeKsql(
- CLUSTER_NAME, new KsqlCommandV2().ksql(
- String.format("CREATE TABLE %s AS ", firstTable.getName())
- + " SELECT profileId, "
- + " LATEST_BY_OFFSET(latitude) AS la, "
- + " LATEST_BY_OFFSET(longitude) AS lo "
- + String.format(" FROM %s ", firstTable.getStreamName())
- + " GROUP BY profileId "
- + " EMIT CHANGES;"))
- .block();
- assert pipeIdTable1 != null;
- List responseListTable =
- ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable1.getPipeId()).collectList().block();
- assert Objects.requireNonNull(responseListTable).size() != 0;
- KsqlCommandV2Response pipeIdTable2 = ksqlApi().executeKsql(
- CLUSTER_NAME,
- new KsqlCommandV2().ksql(String.format("CREATE TABLE %s AS ", secondTable.getName())
- + " SELECT ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1) AS distanceInMiles, "
- + " COLLECT_LIST(profileId) AS riders, "
- + " COUNT(*) AS count "
- + String.format(" FROM %s ", firstTable.getName())
- + " GROUP BY ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1);"))
- .block();
- assert pipeIdTable2 != null;
- List responseListTable2 =
- ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable2.getPipeId()).collectList().block();
- assert Objects.requireNonNull(responseListTable2).size() != 0;
- return this;
+ KsqlCommandV2Response pipeIdTable1 = ksqlApi()
+ .executeKsql(CLUSTER_NAME, new KsqlCommandV2()
+ .ksql(String.format("CREATE TABLE %s AS ", firstTable.getName())
+ + " SELECT profileId, "
+ + " LATEST_BY_OFFSET(latitude) AS la, "
+ + " LATEST_BY_OFFSET(longitude) AS lo "
+ + String.format(" FROM %s ", firstTable.getStreamName())
+ + " GROUP BY profileId "
+ + " EMIT CHANGES;"))
+ .block();
+ assert pipeIdTable1 != null;
+ List responseListTable = ksqlApi()
+ .openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable1.getPipeId())
+ .collectList()
+ .block();
+ assert Objects.requireNonNull(responseListTable).size() != 0;
+ KsqlCommandV2Response pipeIdTable2 = ksqlApi()
+ .executeKsql(CLUSTER_NAME, new KsqlCommandV2()
+ .ksql(String.format("CREATE TABLE %s AS ", secondTable.getName())
+ + " SELECT ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1) AS distanceInMiles, "
+ + " COLLECT_LIST(profileId) AS riders, "
+ + " COUNT(*) AS count "
+ + String.format(" FROM %s ", firstTable.getName())
+ + " GROUP BY ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1);"))
+ .block();
+ assert pipeIdTable2 != null;
+ List responseListTable2 = ksqlApi()
+ .openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable2.getPipeId())
+ .collectList()
+ .block();
+ assert Objects.requireNonNull(responseListTable2).size() != 0;
+ return this;
}
@Step
public ApiService insertInto(Stream stream) {
- String streamName = stream.getName();
- KsqlCommandV2Response pipeIdInsert = ksqlApi().executeKsql(CLUSTER_NAME, new KsqlCommandV2()
- .ksql(
- "INSERT INTO " + streamName + " (profileId, latitude, longitude) VALUES ('c2309eec', 37.7877, -122.4205);"
- + "INSERT INTO " + streamName +
- " (profileId, latitude, longitude) VALUES ('18f4ea86', 37.3903, -122.0643); "
- + "INSERT INTO " + streamName +
- " (profileId, latitude, longitude) VALUES ('4ab5cbad', 37.3952, -122.0813); "
- + "INSERT INTO " + streamName +
- " (profileId, latitude, longitude) VALUES ('8b6eae59', 37.3944, -122.0813); "
- + "INSERT INTO " + streamName +
- " (profileId, latitude, longitude) VALUES ('4a7c7b41', 37.4049, -122.0822); "
- + "INSERT INTO " + streamName +
- " (profileId, latitude, longitude) VALUES ('4ddad000', 37.7857, -122.4011);"))
- .block();
- assert pipeIdInsert != null;
- List responseListInsert =
- ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdInsert.getPipeId()).collectList().block();
- assert Objects.requireNonNull(responseListInsert).size() != 0;
- return this;
+ String streamName = stream.getName();
+ KsqlCommandV2Response pipeIdInsert = ksqlApi()
+ .executeKsql(CLUSTER_NAME, new KsqlCommandV2()
+ .ksql("INSERT INTO " + streamName + " (profileId, latitude, longitude) VALUES ('c2309eec', 37.7877, -122.4205);"
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('18f4ea86', 37.3903, -122.0643); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4ab5cbad', 37.3952, -122.0813); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('8b6eae59', 37.3944, -122.0813); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4a7c7b41', 37.4049, -122.0822); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4ddad000', 37.7857, -122.4011);"))
+ .block();
+ assert pipeIdInsert != null;
+ List responseListInsert = ksqlApi()
+ .openKsqlResponsePipe(CLUSTER_NAME, pipeIdInsert.getPipeId())
+ .collectList()
+ .block();
+ assert Objects.requireNonNull(responseListInsert).size() != 0;
+ return this;
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/AllureListener.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/AllureListener.java
new file mode 100644
index 0000000000..74119f8480
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/AllureListener.java
@@ -0,0 +1,35 @@
+package com.provectus.kafka.ui.settings.listeners;
+
+import com.codeborne.selenide.Screenshots;
+import io.qameta.allure.Allure;
+import io.qameta.allure.testng.AllureTestNg;
+import org.testng.ITestListener;
+import org.testng.ITestResult;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Objects;
+
+import static java.nio.file.Files.newInputStream;
+
+public class AllureListener extends AllureTestNg implements ITestListener {
+
+ private void takeScreenshot() {
+ File screenshot = Screenshots.takeScreenShotAsFile();
+ try {
+ Allure.addAttachment(Objects.requireNonNull(screenshot).getName(), newInputStream(screenshot.toPath()));
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void onTestFailure(ITestResult result) {
+ takeScreenshot();
+ }
+
+ @Override
+ public void onTestSkipped(ITestResult result) {
+ takeScreenshot();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/LoggerListener.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/LoggerListener.java
new file mode 100644
index 0000000000..ca096cd238
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/LoggerListener.java
@@ -0,0 +1,37 @@
+package com.provectus.kafka.ui.settings.listeners;
+
+import lombok.extern.slf4j.Slf4j;
+import org.testng.ITestResult;
+import org.testng.TestListenerAdapter;
+
+@Slf4j
+public class LoggerListener extends TestListenerAdapter {
+
+ @Override
+ public void onTestStart(final ITestResult testResult) {
+ log.info(String.format("\n------------------------------------------------------------------------ " +
+ "\nTEST STARTED: %s.%s \n------------------------------------------------------------------------ \n",
+ testResult.getInstanceName(), testResult.getName()));
+ }
+
+ @Override
+ public void onTestSuccess(final ITestResult testResult) {
+ log.info(String.format("\n------------------------------------------------------------------------ " +
+ "\nTEST PASSED: %s.%s \n------------------------------------------------------------------------ \n",
+ testResult.getInstanceName(), testResult.getName()));
+ }
+
+ @Override
+ public void onTestFailure(final ITestResult testResult) {
+ log.info(String.format("\n------------------------------------------------------------------------ " +
+ "\nTEST FAILED: %s.%s \n------------------------------------------------------------------------ \n",
+ testResult.getInstanceName(), testResult.getName()));
+ }
+
+ @Override
+ public void onTestSkipped(final ITestResult testResult) {
+ log.info(String.format("\n------------------------------------------------------------------------ " +
+ "\nTEST SKIPPED: %s.%s \n------------------------------------------------------------------------ \n",
+ testResult.getInstanceName(), testResult.getName()));
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java
index 259f70c3cc..f53e8897e9 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java
@@ -1,16 +1,17 @@
package com.provectus.kafka.ui.utilities;
-import static com.codeborne.selenide.Selenide.sleep;
+import lombok.extern.slf4j.Slf4j;
import java.time.LocalTime;
-import lombok.extern.slf4j.Slf4j;
+
+import static com.codeborne.selenide.Selenide.sleep;
@Slf4j
public class TimeUtils {
- public static void waitUntilNewMinuteStarted(){
- int secondsLeft = 60 - LocalTime.now().getSecond();
- log.debug("\nwaitUntilNewMinuteStarted: {}s", secondsLeft);
- sleep(secondsLeft * 1000);
- }
+ public static void waitUntilNewMinuteStarted() {
+ int secondsLeft = 60 - LocalTime.now().getSecond();
+ log.debug("\nwaitUntilNewMinuteStarted: {}s", secondsLeft);
+ sleep(secondsLeft * 1000);
+ }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java
index 307361ad5d..7e0de1ac65 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java
@@ -1,7 +1,5 @@
package com.provectus.kafka.ui.utilities;
-import static com.codeborne.selenide.Selenide.executeJavaScript;
-
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
@@ -9,73 +7,75 @@ import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.Keys;
import org.openqa.selenium.interactions.Actions;
+import static com.codeborne.selenide.Selenide.executeJavaScript;
+
@Slf4j
public class WebUtils {
- public static void clickByActions(SelenideElement element) {
- log.debug("\nclickByActions: {}", element.getSearchCriteria());
- element.shouldBe(Condition.enabled);
- new Actions(WebDriverRunner.getWebDriver())
- .moveToElement(element)
- .click(element)
- .perform();
- }
-
- public static void clickByJavaScript(SelenideElement element) {
- log.debug("\nclickByJavaScript: {}", element.getSearchCriteria());
- element.shouldBe(Condition.enabled);
- String script = "arguments[0].click();";
- executeJavaScript(script, element);
- }
-
- public static void clearByKeyboard(SelenideElement field) {
- log.debug("\nclearByKeyboard: {}", field.getSearchCriteria());
- field.shouldBe(Condition.enabled).sendKeys(Keys.END);
- field.sendKeys(Keys.chord(Keys.CONTROL + "a"), Keys.DELETE);
- }
-
- public static boolean isVisible(SelenideElement element) {
- log.debug("\nisVisible: {}", element.getSearchCriteria());
- boolean isVisible = false;
- try {
- element.shouldBe(Condition.visible);
- isVisible = true;
- } catch (Throwable e) {
- log.debug("{} is not visible", element.getSearchCriteria());
+ public static void clickByActions(SelenideElement element) {
+ log.debug("\nclickByActions: {}", element.getSearchCriteria());
+ element.shouldBe(Condition.enabled);
+ new Actions(WebDriverRunner.getWebDriver())
+ .moveToElement(element)
+ .click(element)
+ .perform();
}
- return isVisible;
- }
- public static boolean isEnabled(SelenideElement element) {
- log.debug("\nisEnabled: {}", element.getSearchCriteria());
- boolean isEnabled = false;
- try {
- element.shouldBe(Condition.enabled);
- isEnabled = true;
- } catch (Throwable e) {
- log.debug("{} is not enabled", element.getSearchCriteria());
+ public static void clickByJavaScript(SelenideElement element) {
+ log.debug("\nclickByJavaScript: {}", element.getSearchCriteria());
+ element.shouldBe(Condition.enabled);
+ String script = "arguments[0].click();";
+ executeJavaScript(script, element);
}
- return isEnabled;
- }
- public static boolean isSelected(SelenideElement element) {
- log.debug("\nisSelected: {}", element.getSearchCriteria());
- boolean isSelected = false;
- try {
- element.shouldBe(Condition.selected);
- isSelected = true;
- } catch (Throwable e) {
- log.debug("{} is not selected", element.getSearchCriteria());
+ public static void clearByKeyboard(SelenideElement field) {
+ log.debug("\nclearByKeyboard: {}", field.getSearchCriteria());
+ field.shouldBe(Condition.enabled).sendKeys(Keys.END);
+ field.sendKeys(Keys.chord(Keys.CONTROL + "a"), Keys.DELETE);
}
- return isSelected;
- }
- public static boolean selectElement(SelenideElement element, boolean select){
- if (select) {
- if (!element.isSelected()) clickByJavaScript(element);
- } else {
- if (element.isSelected()) clickByJavaScript(element);
+ public static boolean isVisible(SelenideElement element) {
+ log.debug("\nisVisible: {}", element.getSearchCriteria());
+ boolean isVisible = false;
+ try {
+ element.shouldBe(Condition.visible);
+ isVisible = true;
+ } catch (Throwable e) {
+ log.debug("{} is not visible", element.getSearchCriteria());
+ }
+ return isVisible;
+ }
+
+ public static boolean isEnabled(SelenideElement element) {
+ log.debug("\nisEnabled: {}", element.getSearchCriteria());
+ boolean isEnabled = false;
+ try {
+ element.shouldBe(Condition.enabled);
+ isEnabled = true;
+ } catch (Throwable e) {
+ log.debug("{} is not enabled", element.getSearchCriteria());
+ }
+ return isEnabled;
+ }
+
+ public static boolean isSelected(SelenideElement element) {
+ log.debug("\nisSelected: {}", element.getSearchCriteria());
+ boolean isSelected = false;
+ try {
+ element.shouldBe(Condition.selected);
+ isSelected = true;
+ } catch (Throwable e) {
+ log.debug("{} is not selected", element.getSearchCriteria());
+ }
+ return isSelected;
+ }
+
+ public static boolean selectElement(SelenideElement element, boolean select) {
+ if (select) {
+ if (!element.isSelected()) clickByJavaScript(element);
+ } else {
+ if (element.isSelected()) clickByJavaScript(element);
+ }
+ return true;
}
- return true;
- }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/DisplayNameGenerator.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/DisplayNameGenerator.java
deleted file mode 100644
index 15d239983c..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/DisplayNameGenerator.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package com.provectus.kafka.ui.utilities.qaseIoUtils;
-
-import org.junit.platform.commons.util.ClassUtils;
-import org.junit.platform.commons.util.Preconditions;
-
-import java.lang.reflect.Method;
-
-public class DisplayNameGenerator implements org.junit.jupiter.api.DisplayNameGenerator {
- @Override
- public String generateDisplayNameForClass(Class> testClass) {
- String name = testClass.getName();
- int lastDot = name.lastIndexOf('.');
- return name.substring(lastDot + 1).replaceAll("([A-Z])", " $1").toLowerCase();
- }
-
- @Override
- public String generateDisplayNameForNestedClass(Class> nestedClass) {
- return nestedClass.getSimpleName();
- }
-
- @Override
- public String generateDisplayNameForMethod(Class> testClass, Method testMethod) {
- return testMethod.getName().replaceAll("([A-Z])", " $1").toLowerCase()
- + parameterTypesAsString(testMethod);
- }
-
- static String parameterTypesAsString(Method method) {
- Preconditions.notNull(method, "Method must not be null");
- return method.getParameterTypes().length == 0
- ? ""
- : '(' + ClassUtils.nullSafeToString(Class::getSimpleName, method.getParameterTypes()) + ')';
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java
index 474acd1099..e12f070517 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java
@@ -9,7 +9,6 @@ import io.qase.client.model.ResultCreate;
import io.qase.client.model.ResultCreate.StatusEnum;
import io.qase.client.model.ResultCreateSteps;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang.StringUtils;
import org.junit.platform.engine.TestExecutionResult;
import org.junit.platform.engine.TestSource;
import org.junit.platform.engine.support.descriptor.MethodSource;
@@ -26,19 +25,17 @@ import java.util.concurrent.ConcurrentHashMap;
import static io.qase.api.QaseClient.getConfig;
import static io.qase.api.utils.IntegrationUtils.getCaseId;
import static io.qase.api.utils.IntegrationUtils.getStacktrace;
+import static org.apache.commons.lang3.StringUtils.isEmpty;
import static org.junit.platform.engine.TestExecutionResult.Status.SUCCESSFUL;
@Slf4j
public class QaseExtension implements TestExecutionListener {
- private final ApiClient apiClient = QaseClient.getApiClient();
- private final ResultsApi resultsApi = new ResultsApi(apiClient);
- private final Map testStartTimes = new ConcurrentHashMap<>();
private static final String QASE_PROJECT = "KAFKAUI";
static {
String qaseApiToken = System.getProperty("QASEIO_API_TOKEN");
- if (StringUtils.isEmpty(qaseApiToken)) {
+ if (isEmpty(qaseApiToken)) {
log.warn("QASEIO_API_TOKEN system property is not set. Support for Qase will be disabled.");
System.setProperty("QASE_ENABLE", "false");
} else {
@@ -47,12 +44,16 @@ public class QaseExtension implements TestExecutionListener {
System.setProperty("QASE_API_TOKEN", qaseApiToken);
System.setProperty("QASE_USE_BULK", "false");
if ("true".equalsIgnoreCase(System.getProperty("QASEIO_CREATE_TESTRUN"))) {
- System.setProperty("QASE_RUN_NAME", "Automation run " +
- new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date()));
+ System.setProperty("QASE_RUN_NAME", "Automation run " +
+ new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date()));
}
}
}
+ private final ApiClient apiClient = QaseClient.getApiClient();
+ private final ResultsApi resultsApi = new ResultsApi(apiClient);
+ private final Map testStartTimes = new ConcurrentHashMap<>();
+
@Override
public void executionStarted(TestIdentifier testIdentifier) {
if (QaseClient.isEnabled() && testIdentifier.isTest()) {
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java
index 05666621c4..1c534b7395 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java
@@ -23,9 +23,9 @@ import static io.qase.api.QaseClient.getConfig;
@Slf4j
public class TestCaseGenerator {
- public static boolean FAILED = false;
private static final ApiClient apiClient = QaseClient.getApiClient();
private static final CasesApi casesApi = new CasesApi(apiClient);
+ public static boolean FAILED = false;
@SneakyThrows
public static void createTestCaseIfNotExists(Method testMethod) {
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
new file mode 100644
index 0000000000..bc68cc6637
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
@@ -0,0 +1,135 @@
+package com.provectus.kafka.ui;
+
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.Selenide;
+import com.codeborne.selenide.SelenideElement;
+import com.codeborne.selenide.WebDriverRunner;
+import com.provectus.kafka.ui.settings.listeners.AllureListener;
+import com.provectus.kafka.ui.settings.listeners.LoggerListener;
+import io.qase.api.annotation.Step;
+import lombok.extern.slf4j.Slf4j;
+import org.openqa.selenium.Dimension;
+import org.openqa.selenium.chrome.ChromeOptions;
+import org.openqa.selenium.remote.RemoteWebDriver;
+import org.testcontainers.Testcontainers;
+import org.testcontainers.containers.BrowserWebDriverContainer;
+import org.testcontainers.containers.output.Slf4jLogConsumer;
+import org.testcontainers.utility.DockerImageName;
+import org.testng.annotations.*;
+import org.testng.asserts.SoftAssert;
+
+import java.time.Duration;
+import java.util.List;
+
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.TOPICS;
+import static com.provectus.kafka.ui.settings.BaseSource.*;
+import static com.provectus.kafka.ui.settings.configs.Profiles.CONTAINER;
+import static com.provectus.kafka.ui.settings.configs.Profiles.LOCAL;
+import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
+
+@Slf4j
+@Listeners({AllureListener.class, LoggerListener.class})
+public abstract class BaseTest extends Facade {
+
+ private static final String SELENIUM_IMAGE_NAME = "selenium/standalone-chrome:103.0";
+ private static final String SELENIARM_STANDALONE_CHROMIUM = "seleniarm/standalone-chromium:103.0";
+ protected static BrowserWebDriverContainer> webDriverContainer = null;
+
+ private static boolean isARM64() {
+ return System.getProperty("os.arch").equals("aarch64");
+ }
+
+ @BeforeSuite(alwaysRun = true)
+ public void beforeSuite() {
+ switch (BROWSER) {
+ case (CONTAINER) -> {
+ DockerImageName image = isARM64()
+ ? DockerImageName.parse(SELENIARM_STANDALONE_CHROMIUM).asCompatibleSubstituteFor(SELENIUM_IMAGE_NAME)
+ : DockerImageName.parse(SELENIUM_IMAGE_NAME);
+ log.info("Using [{}] as image name for chrome", image.getUnversionedPart());
+ webDriverContainer = new BrowserWebDriverContainer<>(image)
+ .withEnv("JAVA_OPTS", "-Dwebdriver.chrome.whitelistedIps=")
+ .withStartupTimeout(Duration.ofSeconds(180))
+ .withCapabilities(new ChromeOptions()
+ .addArguments("--disable-dev-shm-usage")
+ .addArguments("--disable-gpu")
+ .addArguments("--no-sandbox")
+ .addArguments("--verbose")
+ .addArguments("--lang=es")
+ )
+ .withLogConsumer(new Slf4jLogConsumer(log).withPrefix("[CHROME]: "));
+ try {
+ Testcontainers.exposeHostPorts(8080);
+ log.info("Starting browser container");
+ webDriverContainer.start();
+ } catch (Throwable e) {
+ log.error("Couldn't start a container", e);
+ }
+ }
+ case (LOCAL) -> loggerSetup();
+ default -> throw new IllegalStateException("Unexpected value: " + BROWSER);
+ }
+ }
+
+ @AfterSuite(alwaysRun = true)
+ public void afterSuite() {
+ switch (BROWSER) {
+ case (CONTAINER) -> {
+ if (webDriverContainer.isRunning()) {
+ webDriverContainer.close();
+ webDriverContainer.stop();
+ }
+ }
+ case (LOCAL) -> browserQuit();
+ default -> throw new IllegalStateException("Unexpected value: " + BROWSER);
+ }
+ }
+
+ @BeforeMethod(alwaysRun = true)
+ public void beforeMethod() {
+ switch (BROWSER) {
+ case (CONTAINER) -> {
+ RemoteWebDriver remoteWebDriver = webDriverContainer.getWebDriver();
+ WebDriverRunner.setWebDriver(remoteWebDriver);
+ remoteWebDriver.manage()
+ .window().setSize(new Dimension(1440, 1024));
+ Selenide.open(BASE_CONTAINER_URL);
+ }
+ case (LOCAL) -> openUrl(BASE_LOCAL_URL);
+ default -> throw new IllegalStateException("Unexpected value: " + BROWSER);
+ }
+ naviSideBar.waitUntilScreenReady();
+ }
+
+ @AfterMethod(alwaysRun = true)
+ public void afterMethod() {
+ browserClear();
+ }
+
+ @Step
+ protected void navigateToTopics() {
+ naviSideBar
+ .openSideMenu(TOPICS);
+ topicsList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToTopicsAndOpenDetails(String topicName) {
+ naviSideBar
+ .openSideMenu(TOPICS);
+ topicsList
+ .waitUntilScreenReady()
+ .openTopic(topicName);
+ topicDetails
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void verifyElementsCondition(List elementList, Condition expectedCondition) {
+ SoftAssert softly = new SoftAssert();
+ elementList.forEach(element -> softly.assertTrue(element.is(expectedCondition),
+ element.getSearchCriteria() + " is " + expectedCondition));
+ softly.assertAll();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java
similarity index 65%
rename from kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
rename to kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java
index ac4180fe61..a26f600e1d 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java
@@ -1,25 +1,21 @@
-package com.provectus.kafka.ui.base;
+package com.provectus.kafka.ui;
import com.provectus.kafka.ui.pages.NaviSideBar;
import com.provectus.kafka.ui.pages.TopPanel;
import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab;
import com.provectus.kafka.ui.pages.brokers.BrokersDetails;
import com.provectus.kafka.ui.pages.brokers.BrokersList;
-import com.provectus.kafka.ui.pages.connector.ConnectorCreateForm;
-import com.provectus.kafka.ui.pages.connector.ConnectorDetails;
-import com.provectus.kafka.ui.pages.connector.KafkaConnectList;
-import com.provectus.kafka.ui.pages.consumer.ConsumersDetails;
-import com.provectus.kafka.ui.pages.consumer.ConsumersList;
-import com.provectus.kafka.ui.pages.ksqldb.KsqlDbList;
-import com.provectus.kafka.ui.pages.ksqldb.KsqlQueryForm;
-import com.provectus.kafka.ui.pages.schema.SchemaCreateForm;
-import com.provectus.kafka.ui.pages.schema.SchemaDetails;
-import com.provectus.kafka.ui.pages.schema.SchemaRegistryList;
-import com.provectus.kafka.ui.pages.topic.ProduceMessagePanel;
-import com.provectus.kafka.ui.pages.topic.TopicCreateEditForm;
-import com.provectus.kafka.ui.pages.topic.TopicDetails;
-import com.provectus.kafka.ui.pages.topic.TopicSettingsTab;
-import com.provectus.kafka.ui.pages.topic.TopicsList;
+import com.provectus.kafka.ui.pages.connectors.ConnectorCreateForm;
+import com.provectus.kafka.ui.pages.connectors.ConnectorDetails;
+import com.provectus.kafka.ui.pages.connectors.KafkaConnectList;
+import com.provectus.kafka.ui.pages.consumers.ConsumersDetails;
+import com.provectus.kafka.ui.pages.consumers.ConsumersList;
+import com.provectus.kafka.ui.pages.ksqlDb.KsqlDbList;
+import com.provectus.kafka.ui.pages.ksqlDb.KsqlQueryForm;
+import com.provectus.kafka.ui.pages.schemas.SchemaCreateForm;
+import com.provectus.kafka.ui.pages.schemas.SchemaDetails;
+import com.provectus.kafka.ui.pages.schemas.SchemaRegistryList;
+import com.provectus.kafka.ui.pages.topics.*;
import com.provectus.kafka.ui.services.ApiService;
public abstract class Facade {
@@ -44,5 +40,4 @@ public abstract class Facade {
protected TopicSettingsTab topicSettingsTab = new TopicSettingsTab();
protected KsqlQueryForm ksqlQueryForm = new KsqlQueryForm();
protected KsqlDbList ksqlDbList = new KsqlDbList();
-
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
deleted file mode 100644
index 6db92244ba..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
+++ /dev/null
@@ -1,142 +0,0 @@
-package com.provectus.kafka.ui.base;
-
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.TOPICS;
-import static com.provectus.kafka.ui.settings.BaseSource.BASE_CONTAINER_URL;
-import static com.provectus.kafka.ui.settings.BaseSource.BASE_LOCAL_URL;
-import static com.provectus.kafka.ui.settings.BaseSource.BROWSER;
-import static com.provectus.kafka.ui.settings.configs.Profiles.CONTAINER;
-import static com.provectus.kafka.ui.settings.configs.Profiles.LOCAL;
-import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.browserClear;
-import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.browserQuit;
-import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.loggerSetup;
-import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.openUrl;
-
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.Selenide;
-import com.codeborne.selenide.SelenideElement;
-import com.codeborne.selenide.WebDriverRunner;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.DisplayNameGenerator;
-import io.qase.api.annotation.Step;
-import java.time.Duration;
-import java.util.List;
-import lombok.extern.slf4j.Slf4j;
-import org.assertj.core.api.SoftAssertions;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.DisplayNameGeneration;
-import org.openqa.selenium.Dimension;
-import org.openqa.selenium.chrome.ChromeOptions;
-import org.openqa.selenium.remote.RemoteWebDriver;
-import org.testcontainers.Testcontainers;
-import org.testcontainers.containers.BrowserWebDriverContainer;
-import org.testcontainers.containers.output.Slf4jLogConsumer;
-import org.testcontainers.utility.DockerImageName;
-
-@Slf4j
-@DisplayNameGeneration(DisplayNameGenerator.class)
-public abstract class BaseTest extends Facade {
-
- private static final String SELENIUM_IMAGE_NAME = "selenium/standalone-chrome:103.0";
- private static final String SELENIARM_STANDALONE_CHROMIUM = "seleniarm/standalone-chromium:103.0";
- protected static BrowserWebDriverContainer> webDriverContainer = null;
-
- private static boolean isARM64() {
- return System.getProperty("os.arch").equals("aarch64");
- }
-
- @BeforeAll
- public static void start() {
- switch (BROWSER) {
- case (CONTAINER) -> {
- DockerImageName image = isARM64()
- ? DockerImageName.parse(SELENIARM_STANDALONE_CHROMIUM).asCompatibleSubstituteFor(SELENIUM_IMAGE_NAME)
- : DockerImageName.parse(SELENIUM_IMAGE_NAME);
- log.info("Using [{}] as image name for chrome", image.getUnversionedPart());
- webDriverContainer = new BrowserWebDriverContainer<>(image)
- .withEnv("JAVA_OPTS", "-Dwebdriver.chrome.whitelistedIps=")
- .withStartupTimeout(Duration.ofSeconds(180))
- .withCapabilities(new ChromeOptions()
- .addArguments("--disable-dev-shm-usage")
- .addArguments("--disable-gpu")
- .addArguments("--no-sandbox")
- .addArguments("--verbose")
- .addArguments("--lang=es")
- )
- .withLogConsumer(new Slf4jLogConsumer(log).withPrefix("[CHROME]: "));
- try {
- Testcontainers.exposeHostPorts(8080);
- log.info("Starting browser container");
- webDriverContainer.start();
- } catch (Throwable e) {
- log.error("Couldn't start a container", e);
- }
- }
- case (LOCAL) -> loggerSetup();
- default -> throw new IllegalStateException("Unexpected value: " + BROWSER);
- }
- }
-
- @AfterAll
- public static void tearDown() {
- switch (BROWSER) {
- case (CONTAINER) -> {
- if (webDriverContainer.isRunning()) {
- webDriverContainer.close();
- webDriverContainer.stop();
- }
- }
- case (LOCAL) -> browserQuit();
- default -> throw new IllegalStateException("Unexpected value: " + BROWSER);
- }
- }
-
- @BeforeEach
- public void beforeMethod() {
- switch (BROWSER) {
- case (CONTAINER) -> {
- RemoteWebDriver remoteWebDriver = webDriverContainer.getWebDriver();
- WebDriverRunner.setWebDriver(remoteWebDriver);
- remoteWebDriver.manage()
- .window().setSize(new Dimension(1440, 1024));
- Selenide.open(BASE_CONTAINER_URL);
- }
- case (LOCAL) -> openUrl(BASE_LOCAL_URL);
- default -> throw new IllegalStateException("Unexpected value: " + BROWSER);
- }
- naviSideBar.waitUntilScreenReady();
- }
-
- @AfterEach
- public void afterMethod() {
- browserClear();
- }
-
- @Step
- protected void navigateToTopics() {
- naviSideBar
- .openSideMenu(TOPICS);
- topicsList
- .waitUntilScreenReady();
- }
-
- @Step
- protected void navigateToTopicsAndOpenDetails(String topicName){
- naviSideBar
- .openSideMenu(TOPICS);
- topicsList
- .waitUntilScreenReady()
- .openTopic(topicName);
- topicDetails
- .waitUntilScreenReady();
- }
-
- @Step
- protected void verifyElementsCondition(List elementList, Condition expectedCondition) {
- SoftAssertions softly = new SoftAssertions();
- elementList.forEach(element -> softly.assertThat(element.is(expectedCondition))
- .as(element.getSearchCriteria() + " is " + expectedCondition).isTrue());
- softly.assertAll();
- }
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
new file mode 100644
index 0000000000..e0971a0a9e
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
@@ -0,0 +1,26 @@
+package com.provectus.kafka.ui.smokeSuite;
+
+import com.codeborne.selenide.Condition;
+import com.provectus.kafka.ui.BaseTest;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qase.api.annotation.CaseId;
+import org.testng.annotations.Test;
+
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+public class SmokeTest extends BaseTest {
+
+ @Test
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(198)
+ public void checkBasePageElements() {
+ verifyElementsCondition(
+ Stream.concat(topPanel.getAllVisibleElements().stream(), naviSideBar.getAllMenuButtons().stream())
+ .collect(Collectors.toList()), Condition.visible);
+ verifyElementsCondition(
+ Stream.concat(topPanel.getAllEnabledElements().stream(), naviSideBar.getAllMenuButtons().stream())
+ .collect(Collectors.toList()), Condition.enabled);
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
new file mode 100644
index 0000000000..7f46456a6c
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
@@ -0,0 +1,61 @@
+package com.provectus.kafka.ui.smokeSuite.brokers;
+
+import com.codeborne.selenide.Condition;
+import com.provectus.kafka.ui.BaseTest;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qameta.allure.Step;
+import io.qase.api.annotation.CaseId;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.BROKERS;
+import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS;
+
+public class BrokersTest extends BaseTest {
+
+ private static final String SUITE_TITLE = "Brokers";
+ private static final long SUITE_ID = 1;
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(1)
+ @Test
+ public void checkBrokersOverview() {
+ navigateToBrokers();
+ Assert.assertTrue(brokersList.getAllBrokers().size() > 0, "getAllBrokers()");
+ verifyElementsCondition(brokersList.getAllVisibleElements(), Condition.visible);
+ verifyElementsCondition(brokersList.getAllEnabledElements(), Condition.enabled);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(85)
+ @Test
+ public void checkExistingBrokersInCluster() {
+ navigateToBrokers();
+ Assert.assertTrue(brokersList.getAllBrokers().size() > 0, "getAllBrokers()");
+ brokersList
+ .openBroker(1);
+ brokersDetails
+ .waitUntilScreenReady();
+ verifyElementsCondition(brokersDetails.getAllVisibleElements(), Condition.visible);
+ verifyElementsCondition(brokersDetails.getAllEnabledElements(), Condition.enabled);
+ brokersDetails
+ .openDetailsTab(CONFIGS);
+ brokersConfigTab
+ .waitUntilScreenReady();
+ verifyElementsCondition(brokersConfigTab.getColumnHeaders(), Condition.visible);
+ verifyElementsCondition(brokersConfigTab.getEditButtons(), Condition.enabled);
+ Assert.assertTrue(brokersConfigTab.isSearchByKeyVisible(), "isSearchByKeyVisible()");
+ }
+
+ @Step
+ private void navigateToBrokers() {
+ naviSideBar
+ .openSideMenu(BROKERS);
+ brokersList
+ .waitUntilScreenReady();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
similarity index 63%
rename from kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java
rename to kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
index ec2e3d5a0f..0b162283e4 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
@@ -1,11 +1,6 @@
-package com.provectus.kafka.ui.suite.connectors;
+package com.provectus.kafka.ui.smokeSuite.connectors;
-import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KAFKA_CONNECT;
-import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString;
-import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
-
-import com.provectus.kafka.ui.base.BaseTest;
+import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Topic;
import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
@@ -13,17 +8,21 @@ import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
import io.qameta.allure.Step;
import io.qase.api.annotation.CaseId;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
import java.util.ArrayList;
import java.util.List;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.TestInstance;
-@TestInstance(TestInstance.Lifecycle.PER_CLASS)
-public class ConnectorsTests extends BaseTest {
+import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KAFKA_CONNECT;
+import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString;
+import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
+
+public class ConnectorsTest extends BaseTest {
+
private static final long SUITE_ID = 10;
private static final String SUITE_TITLE = "Kafka Connect";
private static final String CONNECT_NAME = "first";
@@ -47,19 +46,18 @@ public class ConnectorsTests extends BaseTest {
.setName("sink_postgres_activities_e2e_checks_for_update-" + randomAlphabetic(5))
.setConfig(getResourceAsString("config_for_create_connector_via_api.json"));
- @BeforeAll
- public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE));
- TOPIC_LIST.forEach(topic -> apiService
- .createTopic(topic.getName())
- .sendMessage(topic)
- );
- CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE));
- CONNECTOR_LIST.forEach(connector -> apiService
- .createConnector(CONNECT_NAME, connector));
+ @BeforeClass(alwaysRun = true)
+ public void beforeClass() {
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE));
+ TOPIC_LIST.forEach(topic -> apiService
+ .createTopic(topic.getName())
+ .sendMessage(topic)
+ );
+ CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE));
+ CONNECTOR_LIST.forEach(connector -> apiService
+ .createConnector(CONNECT_NAME, connector));
}
- @DisplayName("should create a connector")
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(42)
@@ -69,7 +67,7 @@ public class ConnectorsTests extends BaseTest {
.setName("sink_postgres_activities_e2e_checks-" + randomAlphabetic(5))
.setConfig(getResourceAsString("config_for_create_connector.json"));
navigateToConnectors();
- kafkaConnectList
+ kafkaConnectList
.clickCreateConnectorBtn();
connectorCreateForm
.waitUntilScreenReady()
@@ -78,65 +76,63 @@ public class ConnectorsTests extends BaseTest {
connectorDetails
.waitUntilScreenReady();
navigateToConnectorsAndOpenDetails(connectorForCreate.getName());
- Assertions.assertTrue(connectorDetails.isConnectorHeaderVisible(connectorForCreate.getName()),"isConnectorTitleVisible()");
+ Assert.assertTrue(connectorDetails.isConnectorHeaderVisible(connectorForCreate.getName()), "isConnectorTitleVisible()");
navigateToConnectors();
- Assertions.assertTrue(kafkaConnectList.isConnectorVisible(CONNECTOR_FOR_DELETE.getName()), "isConnectorVisible()");
+ Assert.assertTrue(kafkaConnectList.isConnectorVisible(CONNECTOR_FOR_DELETE.getName()), "isConnectorVisible()");
CONNECTOR_LIST.add(connectorForCreate);
}
- @DisplayName("should update a connector")
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(196)
@Test
public void updateConnector() {
- navigateToConnectorsAndOpenDetails(CONNECTOR_FOR_UPDATE.getName());
- connectorDetails
+ navigateToConnectorsAndOpenDetails(CONNECTOR_FOR_UPDATE.getName());
+ connectorDetails
.openConfigTab()
.setConfig(CONNECTOR_FOR_UPDATE.getConfig())
.clickSubmitButton();
- Assertions.assertTrue(connectorDetails.isAlertWithMessageVisible(SUCCESS,"Config successfully updated."),"isAlertWithMessageVisible()");
+ Assert.assertTrue(connectorDetails.isAlertWithMessageVisible(SUCCESS, "Config successfully updated."), "isAlertWithMessageVisible()");
navigateToConnectors();
- Assertions.assertTrue(kafkaConnectList.isConnectorVisible(CONNECTOR_FOR_UPDATE.getName()), "isConnectorVisible()");
+ Assert.assertTrue(kafkaConnectList.isConnectorVisible(CONNECTOR_FOR_UPDATE.getName()), "isConnectorVisible()");
}
- @DisplayName("should delete connector")
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(195)
@Test
public void deleteConnector() {
- navigateToConnectorsAndOpenDetails(CONNECTOR_FOR_DELETE.getName());
- connectorDetails
+ navigateToConnectorsAndOpenDetails(CONNECTOR_FOR_DELETE.getName());
+ connectorDetails
.openDotMenu()
.clickDeleteBtn()
.clickConfirmBtn();
- navigateToConnectors();
- Assertions.assertFalse(kafkaConnectList.isConnectorVisible(CONNECTOR_FOR_DELETE.getName()), "isConnectorVisible()");
+ navigateToConnectors();
+ Assert.assertFalse(kafkaConnectList.isConnectorVisible(CONNECTOR_FOR_DELETE.getName()), "isConnectorVisible()");
CONNECTOR_LIST.remove(CONNECTOR_FOR_DELETE);
}
- @AfterAll
- public void afterAll() {
+ @AfterClass(alwaysRun = true)
+ public void afterClass() {
CONNECTOR_LIST.forEach(connector ->
apiService.deleteConnector(CONNECT_NAME, connector.getName()));
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
}
@Step
- private void navigateToConnectors(){
- naviSideBar
- .openSideMenu(KAFKA_CONNECT);
- kafkaConnectList
- .waitUntilScreenReady();
+ private void navigateToConnectors() {
+ naviSideBar
+ .openSideMenu(KAFKA_CONNECT);
+ kafkaConnectList
+ .waitUntilScreenReady();
}
@Step
- private void navigateToConnectorsAndOpenDetails(String connectorName){
- navigateToConnectors();
- kafkaConnectList
- .openConnector(connectorName);
- connectorDetails
- .waitUntilScreenReady();
+ private void navigateToConnectorsAndOpenDetails(String connectorName) {
+ navigateToConnectors();
+ kafkaConnectList
+ .openConnector(connectorName);
+ connectorDetails
+ .waitUntilScreenReady();
}
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
new file mode 100644
index 0000000000..8689c0abde
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
@@ -0,0 +1,58 @@
+package com.provectus.kafka.ui.smokeSuite.ksqlDb;
+
+import com.provectus.kafka.ui.BaseTest;
+import com.provectus.kafka.ui.pages.ksqlDb.models.Stream;
+import com.provectus.kafka.ui.pages.ksqlDb.models.Table;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qase.api.annotation.CaseId;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+import org.testng.asserts.SoftAssert;
+
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KSQL_DB;
+import static com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlQueryConfig.SHOW_TABLES;
+import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
+
+public class KsqlDbTest extends BaseTest {
+
+ private static final long SUITE_ID = 8;
+ private static final String SUITE_TITLE = "KSQL_DB";
+ private static final Stream STREAM_FOR_CHECKING_TABLES = new Stream()
+ .setName("STREAM_FOR_CHECKING_TABLES_" + randomAlphabetic(4).toUpperCase())
+ .setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
+ private static final Table FIRST_TABLE = new Table()
+ .setName("FIRST_TABLE" + randomAlphabetic(4).toUpperCase())
+ .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+ private static final Table SECOND_TABLE = new Table()
+ .setName("SECOND_TABLE" + randomAlphabetic(4).toUpperCase())
+ .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+
+ @BeforeClass(alwaysRun = true)
+ public void beforeClass() {
+ apiService
+ .createStream(STREAM_FOR_CHECKING_TABLES)
+ .createTables(FIRST_TABLE, SECOND_TABLE);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(41)
+ @Test
+ public void checkShowTablesRequestExecution() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady()
+ .clickExecuteKsqlRequestBtn();
+ ksqlQueryForm
+ .waitUntilScreenReady()
+ .setQuery(SHOW_TABLES.getQuery())
+ .clickExecuteBtn();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableName()");
+ softly.assertTrue(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableName()");
+ softly.assertAll();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
new file mode 100644
index 0000000000..5c8871b4fc
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
@@ -0,0 +1,229 @@
+package com.provectus.kafka.ui.smokeSuite.schemas;
+
+import com.codeborne.selenide.Condition;
+import com.provectus.kafka.ui.api.model.CompatibilityLevel;
+import com.provectus.kafka.ui.BaseTest;
+import com.provectus.kafka.ui.models.Schema;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qameta.allure.Step;
+import io.qase.api.annotation.CaseId;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+import org.testng.asserts.SoftAssert;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.SCHEMA_REGISTRY;
+import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
+
+public class SchemasTest extends BaseTest {
+
+ private static final long SUITE_ID = 11;
+ private static final String SUITE_TITLE = "Schema Registry";
+ private static final List SCHEMA_LIST = new ArrayList<>();
+ private static final Schema AVRO_API = Schema.createSchemaAvro();
+ private static final Schema JSON_API = Schema.createSchemaJson();
+ private static final Schema PROTOBUF_API = Schema.createSchemaProtobuf();
+
+ @BeforeClass(alwaysRun = true)
+ public void beforeClass() {
+ SCHEMA_LIST.addAll(List.of(AVRO_API, JSON_API, PROTOBUF_API));
+ SCHEMA_LIST.forEach(schema -> apiService.createSchema(schema));
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(43)
+ @Test(priority = 1)
+ public void createSchemaAvro() {
+ Schema schemaAvro = Schema.createSchemaAvro();
+ navigateToSchemaRegistry();
+ schemaRegistryList
+ .clickCreateSchema();
+ schemaCreateForm
+ .setSubjectName(schemaAvro.getName())
+ .setSchemaField(fileToString(schemaAvro.getValuePath()))
+ .selectSchemaTypeFromDropdown(schemaAvro.getType())
+ .clickSubmitButton();
+ schemaDetails
+ .waitUntilScreenReady();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(schemaDetails.isSchemaHeaderVisible(schemaAvro.getName()), "isSchemaHeaderVisible()");
+ softly.assertEquals(schemaDetails.getSchemaType(), schemaAvro.getType().getValue(), "getSchemaType()");
+ softly.assertEquals(schemaDetails.getCompatibility(), CompatibilityLevel.CompatibilityEnum.BACKWARD.getValue(),
+ "getCompatibility()");
+ softly.assertAll();
+ navigateToSchemaRegistry();
+ Assert.assertTrue(schemaRegistryList.isSchemaVisible(AVRO_API.getName()), "isSchemaVisible()");
+ SCHEMA_LIST.add(schemaAvro);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(186)
+ @Test(priority = 2)
+ public void updateSchemaAvro() {
+ AVRO_API.setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_avro_for_update.json");
+ navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
+ schemaDetails
+ .openEditSchema();
+ schemaCreateForm
+ .waitUntilScreenReady();
+ verifyElementsCondition(schemaCreateForm.getAllDetailsPageElements(), Condition.visible);
+ SoftAssert softly = new SoftAssert();
+ softly.assertFalse(schemaCreateForm.isSubmitBtnEnabled(), "isSubmitBtnEnabled()");
+ softly.assertFalse(schemaCreateForm.isSchemaDropDownEnabled(), "isSchemaDropDownEnabled()");
+ softly.assertAll();
+ schemaCreateForm
+ .selectCompatibilityLevelFromDropdown(CompatibilityLevel.CompatibilityEnum.NONE)
+ .setNewSchemaValue(fileToString(AVRO_API.getValuePath()))
+ .clickSubmitButton();
+ schemaDetails
+ .waitUntilScreenReady();
+ Assert.assertEquals(CompatibilityLevel.CompatibilityEnum.NONE.toString(), schemaDetails.getCompatibility(), "getCompatibility()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(186)
+ @Test(priority = 3)
+ public void compareVersionsOperation() {
+ navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
+ int latestVersion = schemaDetails
+ .waitUntilScreenReady()
+ .getLatestVersion();
+ schemaDetails
+ .openCompareVersionMenu();
+ int versionsNumberFromDdl = schemaCreateForm
+ .waitUntilScreenReady()
+ .openSchemaVersionDdl()
+ .getVersionsNumberFromList();
+ Assert.assertEquals(latestVersion, versionsNumberFromDdl, "Versions number is not matched");
+ schemaCreateForm
+ .selectVersionFromDropDown(1);
+ Assert.assertEquals(53, schemaCreateForm.getMarkedLinesNumber(), "getAllMarkedLines()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(187)
+ @Test(priority = 4)
+ public void deleteSchemaAvro() {
+ navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
+ schemaDetails
+ .removeSchema();
+ schemaRegistryList
+ .waitUntilScreenReady();
+ Assert.assertFalse(schemaRegistryList.isSchemaVisible(AVRO_API.getName()), "isSchemaVisible()");
+ SCHEMA_LIST.remove(AVRO_API);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(89)
+ @Test(priority = 5)
+ public void createSchemaJson() {
+ Schema schemaJson = Schema.createSchemaJson();
+ navigateToSchemaRegistry();
+ schemaRegistryList
+ .clickCreateSchema();
+ schemaCreateForm
+ .setSubjectName(schemaJson.getName())
+ .setSchemaField(fileToString(schemaJson.getValuePath()))
+ .selectSchemaTypeFromDropdown(schemaJson.getType())
+ .clickSubmitButton();
+ schemaDetails
+ .waitUntilScreenReady();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(schemaDetails.isSchemaHeaderVisible(schemaJson.getName()), "isSchemaHeaderVisible()");
+ softly.assertEquals(schemaDetails.getSchemaType(), schemaJson.getType().getValue(), "getSchemaType()");
+ softly.assertEquals(schemaDetails.getCompatibility(), CompatibilityLevel.CompatibilityEnum.BACKWARD.getValue(),
+ "getCompatibility()");
+ softly.assertAll();
+ navigateToSchemaRegistry();
+ Assert.assertTrue(schemaRegistryList.isSchemaVisible(JSON_API.getName()), "isSchemaVisible()");
+ SCHEMA_LIST.add(schemaJson);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(189)
+ @Test(priority = 6)
+ public void deleteSchemaJson() {
+ navigateToSchemaRegistryAndOpenDetails(JSON_API.getName());
+ schemaDetails
+ .removeSchema();
+ schemaRegistryList
+ .waitUntilScreenReady();
+ Assert.assertFalse(schemaRegistryList.isSchemaVisible(JSON_API.getName()), "isSchemaVisible()");
+ SCHEMA_LIST.remove(JSON_API);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(91)
+ @Test(priority = 7)
+ public void createSchemaProtobuf() {
+ Schema schemaProtobuf = Schema.createSchemaProtobuf();
+ navigateToSchemaRegistry();
+ schemaRegistryList
+ .clickCreateSchema();
+ schemaCreateForm
+ .setSubjectName(schemaProtobuf.getName())
+ .setSchemaField(fileToString(schemaProtobuf.getValuePath()))
+ .selectSchemaTypeFromDropdown(schemaProtobuf.getType())
+ .clickSubmitButton();
+ schemaDetails
+ .waitUntilScreenReady();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(schemaDetails.isSchemaHeaderVisible(schemaProtobuf.getName()), "isSchemaHeaderVisible()");
+ softly.assertEquals(schemaDetails.getSchemaType(), schemaProtobuf.getType().getValue(), "getSchemaType()");
+ softly.assertEquals(schemaDetails.getCompatibility(), CompatibilityLevel.CompatibilityEnum.BACKWARD.getValue(),
+ "getCompatibility()");
+ softly.assertAll();
+ navigateToSchemaRegistry();
+ Assert.assertTrue(schemaRegistryList.isSchemaVisible(PROTOBUF_API.getName()), "isSchemaVisible()");
+ SCHEMA_LIST.add(schemaProtobuf);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(223)
+ @Test(priority = 8)
+ public void deleteSchemaProtobuf() {
+ navigateToSchemaRegistryAndOpenDetails(PROTOBUF_API.getName());
+ schemaDetails
+ .removeSchema();
+ schemaRegistryList
+ .waitUntilScreenReady();
+ Assert.assertFalse(schemaRegistryList.isSchemaVisible(PROTOBUF_API.getName()), "isSchemaVisible()");
+ SCHEMA_LIST.remove(PROTOBUF_API);
+ }
+
+ @AfterClass(alwaysRun = true)
+ public void afterClass() {
+ SCHEMA_LIST.forEach(schema -> apiService.deleteSchema(schema.getName()));
+ }
+
+ @Step
+ private void navigateToSchemaRegistry() {
+ naviSideBar
+ .openSideMenu(SCHEMA_REGISTRY);
+ schemaRegistryList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ private void navigateToSchemaRegistryAndOpenDetails(String schemaName) {
+ navigateToSchemaRegistry();
+ schemaRegistryList
+ .openSchema(schemaName);
+ schemaDetails
+ .waitUntilScreenReady();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
new file mode 100644
index 0000000000..66f7d8a50a
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
@@ -0,0 +1,292 @@
+package com.provectus.kafka.ui.smokeSuite.topics;
+
+import com.provectus.kafka.ui.BaseTest;
+import com.provectus.kafka.ui.models.Topic;
+import com.provectus.kafka.ui.pages.topics.TopicDetails;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qameta.allure.Issue;
+import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.Step;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Ignore;
+import org.testng.annotations.Test;
+import org.testng.asserts.SoftAssert;
+
+import java.time.LocalDateTime;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.IntStream;
+
+import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
+import static com.provectus.kafka.ui.pages.topics.TopicDetails.TopicMenu.MESSAGES;
+import static com.provectus.kafka.ui.pages.topics.TopicDetails.TopicMenu.OVERVIEW;
+import static com.provectus.kafka.ui.utilities.TimeUtils.waitUntilNewMinuteStarted;
+import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
+
+public class MessagesTest extends BaseTest {
+
+ private static final long SUITE_ID = 2;
+ private static final String SUITE_TITLE = "Topics";
+ private static final Topic TOPIC_FOR_MESSAGES = new Topic()
+ .setName("topic-with-clean-message-attribute-" + randomAlphabetic(5))
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
+ private static final Topic TOPIC_TO_CLEAR_AND_PURGE_MESSAGES = new Topic()
+ .setName("topic-to-clear-and-purge-messages-attribute-" + randomAlphabetic(5))
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
+ private static final Topic TOPIC_FOR_CHECKING_FILTERS = new Topic()
+ .setName("topic-for-checking-filters-" + randomAlphabetic(5))
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
+ private static final Topic TOPIC_TO_RECREATE = new Topic()
+ .setName("topic-to-recreate-attribute-" + randomAlphabetic(5))
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
+ private static final List TOPIC_LIST = new ArrayList<>();
+
+ @BeforeClass(alwaysRun = true)
+ public void beforeClass() {
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECKING_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
+ TOPIC_TO_RECREATE));
+ TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
+ IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
+ waitUntilNewMinuteStarted();
+ IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(222)
+ @Test(priority = 1)
+ public void produceMessage() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES);
+ produceMessage(TOPIC_FOR_MESSAGES);
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicDetails.isKeyMessageVisible((TOPIC_FOR_MESSAGES.getMessageKey())),
+ "isKeyMessageVisible()");
+ softly.assertTrue(topicDetails.isContentMessageVisible((TOPIC_FOR_MESSAGES.getMessageContent()).trim()),
+ "isContentMessageVisible()");
+ softly.assertAll();
+ }
+
+ @Ignore
+ @Issue("https://github.com/provectus/kafka-ui/issues/2778")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(19)
+ @Test(priority = 2)
+ public void clearMessage() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
+ topicDetails
+ .openDetailsTab(OVERVIEW);
+ int messageAmount = topicDetails.getMessageCountAmount();
+ produceMessage(TOPIC_FOR_MESSAGES);
+ Assert.assertEquals(messageAmount + 1, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
+ topicDetails
+ .openDotMenu()
+ .clickClearMessagesMenu()
+ .waitUntilScreenReady();
+ Assert.assertEquals(0, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(239)
+ @Test(priority = 3)
+ public void checkClearTopicMessage() {
+ navigateToTopicsAndOpenDetails(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName());
+ topicDetails
+ .openDetailsTab(OVERVIEW);
+ produceMessage(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES);
+ navigateToTopics();
+ Assert.assertEquals(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages(), 1,
+ "getNumberOfMessages()");
+ topicsList
+ .openDotMenuByTopicName(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())
+ .clickClearMessagesBtn()
+ .clickConfirmBtnMdl();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicsList.isAlertWithMessageVisible(SUCCESS,
+ String.format("%s messages have been successfully cleared!", TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())),
+ "isAlertWithMessageVisible()");
+ softly.assertEquals(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages(), 0,
+ "getNumberOfMessages()");
+ softly.assertAll();
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(10)
+ @Test(priority = 4)
+ public void checkPurgeMessagePossibility() {
+ navigateToTopics();
+ int messageAmount = topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages();
+ topicsList
+ .openTopic(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName());
+ topicDetails
+ .openDetailsTab(OVERVIEW);
+ produceMessage(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES);
+ navigateToTopics();
+ Assert.assertEquals(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages(),
+ messageAmount + 1, "getNumberOfMessages()");
+ topicsList
+ .getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())
+ .selectItem(true)
+ .clickPurgeMessagesOfSelectedTopicsBtn();
+ Assert.assertTrue(topicsList.isConfirmationMdlVisible(), "isConfirmationMdlVisible()");
+ topicsList
+ .clickCancelBtnMdl()
+ .clickPurgeMessagesOfSelectedTopicsBtn()
+ .clickConfirmBtnMdl();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicsList.isAlertWithMessageVisible(SUCCESS,
+ String.format("%s messages have been successfully cleared!", TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())),
+ "isAlertWithMessageVisible()");
+ softly.assertEquals(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages(), 0,
+ "getNumberOfMessages()");
+ softly.assertAll();
+ }
+
+ @Ignore
+ @Issue("https://github.com/provectus/kafka-ui/issues/2819")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(21)
+ @Test(priority = 5)
+ public void copyMessageFromTopicProfile() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES)
+ .getRandomMessage()
+ .openDotMenu()
+ .clickCopyToClipBoard();
+ Assert.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Copied successfully!"),
+ "isAlertWithMessageVisible()");
+ }
+
+ @Ignore
+ @Issue("https://github.com/provectus/kafka-ui/issues/2394")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(15)
+ @Test(priority = 6)
+ public void checkingMessageFilteringByOffset() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES);
+ TopicDetails.MessageGridItem secondMessage = topicDetails.getMessageByOffset(1);
+ topicDetails
+ .selectSeekTypeDdlMessagesTab("Offset")
+ .setSeekTypeValueFldMessagesTab(String.valueOf(secondMessage.getOffset()))
+ .clickSubmitFiltersBtnMessagesTab();
+ SoftAssert softly = new SoftAssert();
+ topicDetails.getAllMessages().forEach(message ->
+ softly.assertTrue(message.getOffset() == secondMessage.getOffset()
+ || message.getOffset() > secondMessage.getOffset(),
+ String.format("Expected offset is: %s, but found: %s", secondMessage.getOffset(), message.getOffset())));
+ softly.assertAll();
+ }
+
+ @Ignore
+ @Issue("https://github.com/provectus/kafka-ui/issues/3215")
+ @Issue("https://github.com/provectus/kafka-ui/issues/2345")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(16)
+ @Test(priority = 7)
+ public void checkingMessageFilteringByTimestamp() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES);
+ LocalDateTime firstTimestamp = topicDetails.getMessageByOffset(0).getTimestamp();
+ List nextMessages = topicDetails.getAllMessages().stream()
+ .filter(message -> message.getTimestamp().getMinute() != firstTimestamp.getMinute())
+ .toList();
+ LocalDateTime nextTimestamp = Objects.requireNonNull(nextMessages.stream()
+ .findFirst().orElseThrow()).getTimestamp();
+ topicDetails
+ .selectSeekTypeDdlMessagesTab("Timestamp")
+ .openCalendarSeekType()
+ .selectDateAndTimeByCalendar(nextTimestamp)
+ .clickSubmitFiltersBtnMessagesTab();
+ SoftAssert softly = new SoftAssert();
+ topicDetails.getAllMessages().forEach(message ->
+ softly.assertTrue(message.getTimestamp().isEqual(nextTimestamp)
+ || message.getTimestamp().isAfter(nextTimestamp),
+ String.format("Expected timestamp is: %s, but found: %s", nextTimestamp, message.getTimestamp())));
+ softly.assertAll();
+ }
+
+ @Ignore
+ @Issue("https://github.com/provectus/kafka-ui/issues/2778")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(246)
+ @Test(priority = 8)
+ public void checkClearTopicMessageFromOverviewTab() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(OVERVIEW)
+ .openDotMenu()
+ .clickClearMessagesMenu()
+ .clickConfirmBtnMdl();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS,
+ String.format("%s messages have been successfully cleared!", TOPIC_FOR_CHECKING_FILTERS.getName())),
+ "isAlertWithMessageVisible()");
+ softly.assertEquals(topicDetails.getMessageCountAmount(), 0,
+ "getMessageCountAmount()= " + topicDetails.getMessageCountAmount());
+ softly.assertAll();
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(240)
+ @Test(priority = 9)
+ public void checkRecreateTopic() {
+ navigateToTopicsAndOpenDetails(TOPIC_TO_RECREATE.getName());
+ topicDetails
+ .openDetailsTab(OVERVIEW);
+ produceMessage(TOPIC_TO_RECREATE);
+ navigateToTopics();
+ Assert.assertEquals(topicsList.getTopicItem(TOPIC_TO_RECREATE.getName()).getNumberOfMessages(), 1,
+ "getNumberOfMessages()");
+ topicsList
+ .openDotMenuByTopicName(TOPIC_TO_RECREATE.getName())
+ .clickRecreateTopicBtn()
+ .clickConfirmBtnMdl();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS,
+ String.format("Topic %s successfully recreated!", TOPIC_TO_RECREATE.getName())),
+ "isAlertWithMessageVisible()");
+ softly.assertEquals(topicsList.getTopicItem(TOPIC_TO_RECREATE.getName()).getNumberOfMessages(), 0,
+ "getNumberOfMessages()");
+ softly.assertAll();
+ }
+
+ @Step
+ protected void produceMessage(Topic topic) {
+ topicDetails
+ .clickProduceMessageBtn();
+ produceMessagePanel
+ .waitUntilScreenReady()
+ .setKeyField(topic.getMessageKey())
+ .setContentFiled(topic.getMessageContent())
+ .submitProduceMessage();
+ topicDetails
+ .waitUntilScreenReady();
+ }
+
+ @AfterClass(alwaysRun = true)
+ public void afterClass() {
+ TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
new file mode 100644
index 0000000000..62fb0b0448
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
@@ -0,0 +1,503 @@
+package com.provectus.kafka.ui.smokeSuite.topics;
+
+import com.codeborne.selenide.Condition;
+import com.provectus.kafka.ui.BaseTest;
+import com.provectus.kafka.ui.models.Topic;
+import com.provectus.kafka.ui.pages.topics.TopicDetails;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qameta.allure.Issue;
+import io.qase.api.annotation.CaseId;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Ignore;
+import org.testng.annotations.Test;
+import org.testng.asserts.SoftAssert;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
+import static com.provectus.kafka.ui.pages.topics.TopicDetails.TopicMenu.MESSAGES;
+import static com.provectus.kafka.ui.pages.topics.TopicDetails.TopicMenu.SETTINGS;
+import static com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue.COMPACT;
+import static com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue.DELETE;
+import static com.provectus.kafka.ui.pages.topics.enums.CustomParameterType.COMPRESSION_TYPE;
+import static com.provectus.kafka.ui.pages.topics.enums.MaxSizeOnDisk.*;
+import static com.provectus.kafka.ui.pages.topics.enums.TimeToRetain.BTN_2_DAYS;
+import static com.provectus.kafka.ui.pages.topics.enums.TimeToRetain.BTN_7_DAYS;
+import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
+import static org.apache.commons.lang3.RandomUtils.nextInt;
+
+public class TopicsTest extends BaseTest {
+
+ private static final long SUITE_ID = 2;
+ private static final String SUITE_TITLE = "Topics";
+ private static final Topic TOPIC_TO_CREATE = new Topic()
+ .setName("new-topic-" + randomAlphabetic(5))
+ .setNumberOfPartitions(1)
+ .setCustomParameterType(COMPRESSION_TYPE)
+ .setCustomParameterValue("producer")
+ .setCleanupPolicyValue(DELETE);
+ private static final Topic TOPIC_TO_UPDATE_AND_DELETE = new Topic()
+ .setName("topic-to-update-and-delete-" + randomAlphabetic(5))
+ .setNumberOfPartitions(1)
+ .setCleanupPolicyValue(DELETE)
+ .setTimeToRetain(BTN_7_DAYS)
+ .setMaxSizeOnDisk(NOT_SET)
+ .setMaxMessageBytes("1048588")
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
+ private static final Topic TOPIC_TO_CHECK_SETTINGS = new Topic()
+ .setName("new-topic-" + randomAlphabetic(5))
+ .setNumberOfPartitions(1)
+ .setMaxMessageBytes("1000012")
+ .setMaxSizeOnDisk(NOT_SET);
+ private static final Topic TOPIC_FOR_CHECK_FILTERS = new Topic()
+ .setName("topic-for-check-filters-" + randomAlphabetic(5));
+ private static final Topic TOPIC_FOR_DELETE = new Topic().setName("topic-to-delete-" + randomAlphabetic(5));
+ private static final List TOPIC_LIST = new ArrayList<>();
+
+ @BeforeClass(alwaysRun = true)
+ public void beforeClass() {
+ TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS));
+ TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
+ }
+
+ @Suite(suiteId = 4, title = "Create new Topic")
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(199)
+ @Test(priority = 1)
+ public void createTopic() {
+ navigateToTopics();
+ topicsList
+ .clickAddTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .setTopicName(TOPIC_TO_CREATE.getName())
+ .setNumberOfPartitions(TOPIC_TO_CREATE.getNumberOfPartitions())
+ .selectCleanupPolicy(TOPIC_TO_CREATE.getCleanupPolicyValue())
+ .clickCreateTopicBtn();
+ navigateToTopicsAndOpenDetails(TOPIC_TO_CREATE.getName());
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicDetails.isTopicHeaderVisible(TOPIC_TO_CREATE.getName()), "isTopicHeaderVisible()");
+ softly.assertEquals(topicDetails.getCleanUpPolicy(), TOPIC_TO_CREATE.getCleanupPolicyValue().toString(), "getCleanUpPolicy()");
+ softly.assertEquals(topicDetails.getPartitions(), TOPIC_TO_CREATE.getNumberOfPartitions(), "getPartitions()");
+ softly.assertAll();
+ navigateToTopics();
+ Assert.assertTrue(topicsList.isTopicVisible(TOPIC_TO_CREATE.getName()), "isTopicVisible()");
+ TOPIC_LIST.add(TOPIC_TO_CREATE);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(7)
+ @Test(priority = 2)
+ void checkAvailableOperations() {
+ navigateToTopics();
+ topicsList
+ .getTopicItem("my_ksql_1ksql_processing_log")
+ .selectItem(true);
+ verifyElementsCondition(topicsList.getActionButtons(), Condition.enabled);
+ topicsList
+ .getTopicItem("_confluent-ksql-my_ksql_1_command_topic")
+ .selectItem(true);
+ Assert.assertFalse(topicsList.isCopySelectedTopicBtnEnabled(), "isCopySelectedTopicBtnEnabled()");
+ }
+
+ @Ignore
+ @Issue("https://github.com/provectus/kafka-ui/issues/2625")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(197)
+ @Test(priority = 3)
+ public void updateTopic() {
+ navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
+ topicDetails
+ .openDotMenu()
+ .clickEditSettingsMenu();
+ topicCreateEditForm
+ .waitUntilScreenReady();
+ SoftAssert softly = new SoftAssert();
+ softly.assertEquals(topicCreateEditForm.getCleanupPolicy(),
+ TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue().getVisibleText(), "getCleanupPolicy()");
+ softly.assertEquals(topicCreateEditForm.getTimeToRetain(),
+ TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain().getValue(), "getTimeToRetain()");
+ softly.assertEquals(topicCreateEditForm.getMaxSizeOnDisk(),
+ TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk().getVisibleText(), "getMaxSizeOnDisk()");
+ softly.assertEquals(topicCreateEditForm.getMaxMessageBytes(),
+ TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes(), "getMaxMessageBytes()");
+ softly.assertAll();
+ TOPIC_TO_UPDATE_AND_DELETE
+ .setCleanupPolicyValue(COMPACT)
+ .setTimeToRetain(BTN_2_DAYS)
+ .setMaxSizeOnDisk(SIZE_50_GB).setMaxMessageBytes("1048589");
+ topicCreateEditForm
+ .selectCleanupPolicy((TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue()))
+ .setTimeToRetainDataByButtons(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain())
+ .setMaxSizeOnDiskInGB(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk())
+ .setMaxMessageBytes(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes())
+ .clickCreateTopicBtn();
+ softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully updated."),
+ "isAlertWithMessageVisible()");
+ softly.assertTrue(topicDetails.isTopicHeaderVisible(TOPIC_TO_UPDATE_AND_DELETE.getName()),
+ "isTopicHeaderVisible()");
+ softly.assertAll();
+ topicDetails
+ .waitUntilScreenReady();
+ navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
+ topicDetails
+ .openDotMenu()
+ .clickEditSettingsMenu();
+ softly.assertFalse(topicCreateEditForm.isNameFieldEnabled(), "isNameFieldEnabled()");
+ softly.assertEquals(topicCreateEditForm.getCleanupPolicy(),
+ TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue().getVisibleText(), "getCleanupPolicy()");
+ softly.assertEquals(topicCreateEditForm.getTimeToRetain(),
+ TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain().getValue(), "getTimeToRetain()");
+ softly.assertEquals(topicCreateEditForm.getMaxSizeOnDisk(),
+ TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk().getVisibleText(), "getMaxSizeOnDisk()");
+ softly.assertEquals(topicCreateEditForm.getMaxMessageBytes(),
+ TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes(), "getMaxMessageBytes()");
+ softly.assertAll();
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(242)
+ @Test(priority = 4)
+ public void removeTopicFromTopicList() {
+ navigateToTopics();
+ topicsList
+ .openDotMenuByTopicName(TOPIC_TO_UPDATE_AND_DELETE.getName())
+ .clickRemoveTopicBtn()
+ .clickConfirmBtnMdl();
+ Assert.assertTrue(topicsList.isAlertWithMessageVisible(SUCCESS,
+ String.format("Topic %s successfully deleted!", TOPIC_TO_UPDATE_AND_DELETE.getName())),
+ "isAlertWithMessageVisible()");
+ TOPIC_LIST.remove(TOPIC_TO_UPDATE_AND_DELETE);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(207)
+ @Test(priority = 5)
+ public void deleteTopic() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_DELETE.getName());
+ topicDetails
+ .openDotMenu()
+ .clickDeleteTopicMenu()
+ .clickConfirmBtnMdl();
+ navigateToTopics();
+ Assert.assertFalse(topicsList.isTopicVisible(TOPIC_FOR_DELETE.getName()), "isTopicVisible");
+ TOPIC_LIST.remove(TOPIC_FOR_DELETE);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(20)
+ @Test(priority = 6)
+ public void redirectToConsumerFromTopic() {
+ String topicName = "source-activities";
+ String consumerGroupId = "connect-sink_postgres_activities";
+ navigateToTopicsAndOpenDetails(topicName);
+ topicDetails
+ .openDetailsTab(TopicDetails.TopicMenu.CONSUMERS)
+ .openConsumerGroup(consumerGroupId);
+ consumersDetails
+ .waitUntilScreenReady();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(consumersDetails.isRedirectedConsumerTitleVisible(consumerGroupId),
+ "isRedirectedConsumerTitleVisible()");
+ softly.assertTrue(consumersDetails.isTopicInConsumersDetailsVisible(topicName),
+ "isTopicInConsumersDetailsVisible()");
+ softly.assertAll();
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(4)
+ @Test(priority = 7)
+ public void checkTopicCreatePossibility() {
+ navigateToTopics();
+ topicsList
+ .clickAddTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady();
+ Assert.assertFalse(topicCreateEditForm.isCreateTopicButtonEnabled(), "isCreateTopicButtonEnabled()");
+ topicCreateEditForm
+ .setTopicName("testName");
+ Assert.assertFalse(topicCreateEditForm.isCreateTopicButtonEnabled(), "isCreateTopicButtonEnabled()");
+ topicCreateEditForm
+ .setTopicName(null)
+ .setNumberOfPartitions(nextInt(1, 10));
+ Assert.assertFalse(topicCreateEditForm.isCreateTopicButtonEnabled(), "isCreateTopicButtonEnabled()");
+ topicCreateEditForm
+ .setTopicName("testName");
+ Assert.assertTrue(topicCreateEditForm.isCreateTopicButtonEnabled(), "isCreateTopicButtonEnabled()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(266)
+ @Test(priority = 8)
+ public void checkTimeToRetainDataCustomValueWithEditingTopic() {
+ Topic topicToRetainData = new Topic()
+ .setName("topic-to-retain-data-" + randomAlphabetic(5))
+ .setTimeToRetainData("86400000");
+ navigateToTopics();
+ topicsList
+ .clickAddTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .setTopicName(topicToRetainData.getName())
+ .setNumberOfPartitions(1)
+ .setTimeToRetainDataInMs("604800000");
+ Assert.assertEquals(topicCreateEditForm.getTimeToRetain(), "604800000", "getTimeToRetain()");
+ topicCreateEditForm
+ .setTimeToRetainDataInMs(topicToRetainData.getTimeToRetainData())
+ .clickCreateTopicBtn();
+ topicDetails
+ .waitUntilScreenReady()
+ .openDotMenu()
+ .clickEditSettingsMenu();
+ Assert.assertEquals(topicCreateEditForm.getTimeToRetain(), topicToRetainData.getTimeToRetainData(),
+ "getTimeToRetain()");
+ topicDetails
+ .openDetailsTab(SETTINGS);
+ Assert.assertEquals(topicDetails.getSettingsGridValueByKey("retention.ms"), topicToRetainData.getTimeToRetainData(),
+ "getSettingsGridValueByKey()");
+ TOPIC_LIST.add(topicToRetainData);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(6)
+ @Test(priority = 9)
+ public void checkCustomParametersWithinCreateNewTopic() {
+ navigateToTopics();
+ topicsList
+ .clickAddTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .setTopicName(TOPIC_TO_CREATE.getName())
+ .clickAddCustomParameterTypeButton()
+ .setCustomParameterType(TOPIC_TO_CREATE.getCustomParameterType());
+ Assert.assertTrue(topicCreateEditForm.isDeleteCustomParameterButtonEnabled(),
+ "isDeleteCustomParameterButtonEnabled()");
+ topicCreateEditForm
+ .clearCustomParameterValue();
+ Assert.assertTrue(topicCreateEditForm.isValidationMessageCustomParameterValueVisible(),
+ "isValidationMessageCustomParameterValueVisible()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(2)
+ @Test(priority = 10)
+ public void checkTopicListElements() {
+ navigateToTopics();
+ verifyElementsCondition(topicsList.getAllVisibleElements(), Condition.visible);
+ verifyElementsCondition(topicsList.getAllEnabledElements(), Condition.enabled);
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(12)
+ @Test(priority = 11)
+ public void addingNewFilterWithinTopic() {
+ String filterName = randomAlphabetic(5);
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES)
+ .clickMessagesAddFiltersBtn()
+ .waitUntilAddFiltersMdlVisible();
+ verifyElementsCondition(topicDetails.getAllAddFilterModalVisibleElements(), Condition.visible);
+ verifyElementsCondition(topicDetails.getAllAddFilterModalEnabledElements(), Condition.enabled);
+ verifyElementsCondition(topicDetails.getAllAddFilterModalDisabledElements(), Condition.disabled);
+ Assert.assertFalse(topicDetails.isSaveThisFilterCheckBoxSelected(), "isSaveThisFilterCheckBoxSelected()");
+ topicDetails
+ .setFilterCodeFieldAddFilterMdl(filterName);
+ Assert.assertTrue(topicDetails.isAddFilterBtnAddFilterMdlEnabled(), "isAddFilterBtnAddFilterMdlEnabled()");
+ topicDetails.clickAddFilterBtnAndCloseMdl(true);
+ Assert.assertTrue(topicDetails.isActiveFilterVisible(filterName), "isActiveFilterVisible()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(13)
+ @Test(priority = 12)
+ public void checkFilterSavingWithinSavedFilters() {
+ String displayName = randomAlphabetic(5);
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES)
+ .clickMessagesAddFiltersBtn()
+ .waitUntilAddFiltersMdlVisible()
+ .setFilterCodeFieldAddFilterMdl(randomAlphabetic(4))
+ .selectSaveThisFilterCheckboxMdl(true)
+ .setDisplayNameFldAddFilterMdl(displayName);
+ Assert.assertTrue(topicDetails.isAddFilterBtnAddFilterMdlEnabled(),
+ "isAddFilterBtnAddFilterMdlEnabled()");
+ topicDetails
+ .clickAddFilterBtnAndCloseMdl(false)
+ .openSavedFiltersListMdl();
+ Assert.assertTrue(topicDetails.isFilterVisibleAtSavedFiltersMdl(displayName),
+ "isFilterVisibleAtSavedFiltersMdl()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(14)
+ @Test(priority = 13)
+ public void checkApplyingSavedFilterWithinTopicMessages() {
+ String displayName = randomAlphabetic(5);
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES)
+ .clickMessagesAddFiltersBtn()
+ .waitUntilAddFiltersMdlVisible()
+ .setFilterCodeFieldAddFilterMdl(randomAlphabetic(4))
+ .selectSaveThisFilterCheckboxMdl(true)
+ .setDisplayNameFldAddFilterMdl(displayName)
+ .clickAddFilterBtnAndCloseMdl(false)
+ .openSavedFiltersListMdl()
+ .selectFilterAtSavedFiltersMdl(displayName)
+ .clickSelectFilterBtnAtSavedFiltersMdl();
+ Assert.assertTrue(topicDetails.isActiveFilterVisible(displayName), "isActiveFilterVisible()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(11)
+ @Test(priority = 14)
+ public void checkShowInternalTopicsButtonFunctionality() {
+ navigateToTopics();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicsList.isShowInternalRadioBtnSelected(), "isInternalRadioBtnSelected()");
+ softly.assertTrue(topicsList.getInternalTopics().size() > 0, "getInternalTopics()");
+ softly.assertTrue(topicsList.getNonInternalTopics().size() > 0, "getNonInternalTopics()");
+ softly.assertAll();
+ topicsList
+ .setShowInternalRadioButton(false);
+ softly.assertEquals(topicsList.getInternalTopics().size(), 0, "getInternalTopics()");
+ softly.assertTrue(topicsList.getNonInternalTopics().size() > 0, "getNonInternalTopics()");
+ softly.assertAll();
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(56)
+ @Test(priority = 15)
+ public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
+ navigateToTopics();
+ topicsList
+ .clickAddTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .setTopicName(TOPIC_TO_CHECK_SETTINGS.getName())
+ .setNumberOfPartitions(TOPIC_TO_CHECK_SETTINGS.getNumberOfPartitions())
+ .setMaxMessageBytes(TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes())
+ .clickCreateTopicBtn();
+ topicDetails
+ .waitUntilScreenReady();
+ TOPIC_LIST.add(TOPIC_TO_CHECK_SETTINGS);
+ topicDetails
+ .openDetailsTab(SETTINGS);
+ topicSettingsTab
+ .waitUntilScreenReady();
+ SoftAssert softly = new SoftAssert();
+ softly.assertEquals(topicSettingsTab.getValueByKey("retention.bytes"),
+ TOPIC_TO_CHECK_SETTINGS.getMaxSizeOnDisk().getOptionValue(), "getValueOfKey(retention.bytes)");
+ softly.assertEquals(topicSettingsTab.getValueByKey("max.message.bytes"),
+ TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes(), "getValueOfKey(max.message.bytes)");
+ softly.assertAll();
+ TOPIC_TO_CHECK_SETTINGS
+ .setMaxSizeOnDisk(SIZE_1_GB)
+ .setMaxMessageBytes("1000056");
+ topicDetails
+ .openDotMenu()
+ .clickEditSettingsMenu();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .setMaxSizeOnDiskInGB(TOPIC_TO_CHECK_SETTINGS.getMaxSizeOnDisk())
+ .setMaxMessageBytes(TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes())
+ .clickCreateTopicBtn();
+ topicDetails
+ .waitUntilScreenReady()
+ .openDetailsTab(SETTINGS);
+ topicSettingsTab
+ .waitUntilScreenReady();
+ softly.assertEquals(topicSettingsTab.getValueByKey("retention.bytes"),
+ TOPIC_TO_CHECK_SETTINGS.getMaxSizeOnDisk().getOptionValue(), "getValueOfKey(retention.bytes)");
+ softly.assertEquals(topicSettingsTab.getValueByKey("max.message.bytes"),
+ TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes(), "getValueOfKey(max.message.bytes)");
+ softly.assertAll();
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(247)
+ @Test(priority = 16)
+ public void recreateTopicFromTopicProfile() {
+ Topic topicToRecreate = new Topic()
+ .setName("topic-to-recreate-" + randomAlphabetic(5))
+ .setNumberOfPartitions(1);
+ navigateToTopics();
+ topicsList
+ .clickAddTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .setTopicName(topicToRecreate.getName())
+ .setNumberOfPartitions(topicToRecreate.getNumberOfPartitions())
+ .clickCreateTopicBtn();
+ topicDetails
+ .waitUntilScreenReady();
+ TOPIC_LIST.add(topicToRecreate);
+ topicDetails
+ .openDotMenu()
+ .clickRecreateTopicMenu();
+ Assert.assertTrue(topicDetails.isConfirmationMdlVisible(), "isConfirmationMdlVisible()");
+ topicDetails
+ .clickConfirmBtnMdl();
+ Assert.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS,
+ String.format("Topic %s successfully recreated!", topicToRecreate.getName())),
+ "isAlertWithMessageVisible()");
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(8)
+ @Test(priority = 17)
+ public void checkCopyTopicPossibility() {
+ Topic topicToCopy = new Topic()
+ .setName("topic-to-copy-" + randomAlphabetic(5))
+ .setNumberOfPartitions(1);
+ navigateToTopics();
+ topicsList
+ .getTopicItem("_schemas")
+ .selectItem(true)
+ .clickCopySelectedTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady();
+ Assert.assertFalse(topicCreateEditForm.isCreateTopicButtonEnabled(), "isCreateTopicButtonEnabled()");
+ topicCreateEditForm
+ .setTopicName(topicToCopy.getName())
+ .setNumberOfPartitions(topicToCopy.getNumberOfPartitions())
+ .clickCreateTopicBtn();
+ topicDetails
+ .waitUntilScreenReady();
+ TOPIC_LIST.add(topicToCopy);
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully created."),
+ "isAlertWithMessageVisible()");
+ softly.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
+ softly.assertAll();
+ }
+
+ @AfterClass(alwaysRun = true)
+ public void afterClass() {
+ TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/SmokeTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/SmokeTests.java
deleted file mode 100644
index ac5f5c5755..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/SmokeTests.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.provectus.kafka.ui.suite;
-
-import com.codeborne.selenide.Condition;
-import com.provectus.kafka.ui.base.BaseTest;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
-import io.qase.api.annotation.CaseId;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import org.junit.jupiter.api.Test;
-
-public class SmokeTests extends BaseTest {
-
- @Test
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(198)
- public void checkBasePageElements(){
- verifyElementsCondition(Stream.concat(topPanel.getAllVisibleElements().stream(), naviSideBar.getAllMenuButtons().stream())
- .collect(Collectors.toList()),Condition.visible);
- verifyElementsCondition(Stream.concat(topPanel.getAllEnabledElements().stream(), naviSideBar.getAllMenuButtons().stream())
- .collect(Collectors.toList()),Condition.enabled);
- }
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/brokers/BrokersTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/brokers/BrokersTests.java
deleted file mode 100644
index 0cba13812c..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/brokers/BrokersTests.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.provectus.kafka.ui.suite.brokers;
-
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.BROKERS;
-import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS;
-import static org.assertj.core.api.Assertions.assertThat;
-
-import com.codeborne.selenide.Condition;
-import com.provectus.kafka.ui.base.BaseTest;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
-import io.qameta.allure.Step;
-import io.qase.api.annotation.CaseId;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Test;
-
-public class BrokersTests extends BaseTest {
- private static final String SUITE_TITLE = "Brokers";
- private static final long SUITE_ID = 1;
-
- @DisplayName("Checking the Brokers overview")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(1)
- @Test
- public void checkBrokersOverview(){
- navigateToBrokers();
- assertThat(brokersList.getAllBrokers()).as("getAllBrokers()").size().isGreaterThan(0);
- verifyElementsCondition(brokersList.getAllVisibleElements(), Condition.visible);
- verifyElementsCondition(brokersList.getAllEnabledElements(), Condition.enabled);
- }
-
- @DisplayName("Checking the existing Broker's profile in a cluster")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(85)
- @Test
- public void checkExistingBrokersInCluster(){
- navigateToBrokers();
- assertThat(brokersList.getAllBrokers()).as("getAllBrokers()").size().isGreaterThan(0);
- brokersList
- .openBroker(1);
- brokersDetails
- .waitUntilScreenReady();
- verifyElementsCondition(brokersDetails.getAllVisibleElements(), Condition.visible);
- verifyElementsCondition(brokersDetails.getAllEnabledElements(), Condition.enabled);
- brokersDetails
- .openDetailsTab(CONFIGS);
- brokersConfigTab
- .waitUntilScreenReady();
- verifyElementsCondition(brokersConfigTab.getColumnHeaders(), Condition.visible);
- verifyElementsCondition(brokersConfigTab.getEditButtons(), Condition.enabled);
- assertThat(brokersConfigTab.isSearchByKeyVisible()).as("isSearchByKeyVisible()").isTrue();
- }
-
- @Step
- private void navigateToBrokers(){
- naviSideBar
- .openSideMenu(BROKERS);
- brokersList
- .waitUntilScreenReady();
- }
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
deleted file mode 100644
index ee03fd8de1..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package com.provectus.kafka.ui.suite.ksqldb;
-
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KSQL_DB;
-import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
-import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
-
-import com.provectus.kafka.ui.base.BaseTest;
-import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
-import com.provectus.kafka.ui.pages.ksqldb.models.Table;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
-import io.qase.api.annotation.CaseId;
-import org.assertj.core.api.SoftAssertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Order;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.TestInstance;
-
-@TestInstance(TestInstance.Lifecycle.PER_CLASS)
-public class KsqlTests extends BaseTest {
- private static final long SUITE_ID = 8;
- private static final String SUITE_TITLE = "KSQL_DB";
- private static final Stream STREAM_FOR_CHECKING_TABLES = new Stream()
- .setName("STREAM_FOR_CHECKING_TABLES_" + randomAlphabetic(4).toUpperCase())
- .setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
- private static final Table FIRST_TABLE = new Table()
- .setName("FIRST_TABLE"+ randomAlphabetic(4).toUpperCase())
- .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
- private static final Table SECOND_TABLE = new Table()
- .setName("SECOND_TABLE"+ randomAlphabetic(4).toUpperCase())
- .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
-
- @BeforeAll
- public void beforeAll(){
- apiService
- .createStream(STREAM_FOR_CHECKING_TABLES)
- .createTables(FIRST_TABLE, SECOND_TABLE);
- }
-
- @DisplayName("check KSQL request execution")
- @Suite(suiteId = SUITE_ID,title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(41)
- @Test
- @Order(1)
- public void checkingKsqlRequestExecution() {
- naviSideBar
- .openSideMenu(KSQL_DB);
- ksqlDbList
- .waitUntilScreenReady()
- .clickExecuteKsqlRequestBtn();
- ksqlQueryForm
- .waitUntilScreenReady()
- .setQuery(SHOW_TABLES.getQuery())
- .clickExecuteBtn();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible())
- .as("getTableName()").isTrue();
- softly.assertThat(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible())
- .as("getTableName()").isTrue();
- softly.assertAll();
- }
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java
deleted file mode 100644
index ea98b4abe1..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java
+++ /dev/null
@@ -1,249 +0,0 @@
-package com.provectus.kafka.ui.suite.schemas;
-
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.SCHEMA_REGISTRY;
-import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
-
-import com.codeborne.selenide.Condition;
-import com.provectus.kafka.ui.api.model.CompatibilityLevel;
-import com.provectus.kafka.ui.base.BaseTest;
-import com.provectus.kafka.ui.models.Schema;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
-import io.qameta.allure.Step;
-import io.qase.api.annotation.CaseId;
-import java.util.ArrayList;
-import java.util.List;
-import lombok.SneakyThrows;
-import org.assertj.core.api.SoftAssertions;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.MethodOrderer;
-import org.junit.jupiter.api.Order;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.TestInstance;
-import org.junit.jupiter.api.TestMethodOrder;
-
-@TestInstance(TestInstance.Lifecycle.PER_CLASS)
-@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
-public class SchemasTests extends BaseTest {
- private static final long SUITE_ID = 11;
- private static final String SUITE_TITLE = "Schema Registry";
- private static final List SCHEMA_LIST = new ArrayList<>();
- private static final Schema AVRO_API = Schema.createSchemaAvro();
- private static final Schema JSON_API = Schema.createSchemaJson();
- private static final Schema PROTOBUF_API = Schema.createSchemaProtobuf();
-
- @BeforeAll
- @SneakyThrows
- public void beforeAll() {
- SCHEMA_LIST.addAll(List.of(AVRO_API, JSON_API, PROTOBUF_API));
- SCHEMA_LIST.forEach(schema -> apiService.createSchema(schema));
- }
-
- @DisplayName("should create AVRO schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(43)
- @Test
- @Order(1)
- void createSchemaAvro() {
- Schema schemaAvro = Schema.createSchemaAvro();
- navigateToSchemaRegistry();
- schemaRegistryList
- .clickCreateSchema();
- schemaCreateForm
- .setSubjectName(schemaAvro.getName())
- .setSchemaField(fileToString(schemaAvro.getValuePath()))
- .selectSchemaTypeFromDropdown(schemaAvro.getType())
- .clickSubmitButton();
- schemaDetails
- .waitUntilScreenReady();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(schemaDetails.isSchemaHeaderVisible(schemaAvro.getName())).as("isSchemaHeaderVisible()").isTrue();
- softly.assertThat(schemaDetails.getSchemaType()).as("getSchemaType()").isEqualTo(schemaAvro.getType().getValue());
- softly.assertThat(schemaDetails.getCompatibility()).as("getCompatibility()").isEqualTo(CompatibilityLevel.CompatibilityEnum.BACKWARD.getValue());
- softly.assertAll();
- navigateToSchemaRegistry();
- Assertions.assertTrue(schemaRegistryList.isSchemaVisible(AVRO_API.getName()),"isSchemaVisible()");
- SCHEMA_LIST.add(schemaAvro);
- }
-
- @DisplayName("should update AVRO schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(186)
- @Test
- @Order(2)
- void updateSchemaAvro() {
- AVRO_API.setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_avro_for_update.json");
- navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
- schemaDetails
- .openEditSchema();
- schemaCreateForm
- .waitUntilScreenReady();
- verifyElementsCondition(schemaCreateForm.getAllDetailsPageElements(), Condition.visible);
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(schemaCreateForm.isSubmitBtnEnabled()).as("isSubmitBtnEnabled()").isFalse();
- softly.assertThat(schemaCreateForm.isSchemaDropDownEnabled()).as("isSchemaDropDownEnabled()").isFalse();
- softly.assertAll();
- schemaCreateForm
- .selectCompatibilityLevelFromDropdown(CompatibilityLevel.CompatibilityEnum.NONE)
- .setNewSchemaValue(fileToString(AVRO_API.getValuePath()))
- .clickSubmitButton();
- schemaDetails
- .waitUntilScreenReady();
- Assertions.assertEquals(CompatibilityLevel.CompatibilityEnum.NONE.toString(), schemaDetails.getCompatibility(), "getCompatibility()");
- }
-
- @DisplayName("Checking Compare Versions operation for Schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(186)
- @Test
- @Order(3)
- void compareVersionsOperation() {
- navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
- int latestVersion = schemaDetails
- .waitUntilScreenReady()
- .getLatestVersion();
- schemaDetails
- .openCompareVersionMenu();
- int versionsNumberFromDdl = schemaCreateForm
- .waitUntilScreenReady()
- .openSchemaVersionDdl()
- .getVersionsNumberFromList();
- Assertions.assertEquals(latestVersion,versionsNumberFromDdl,"Versions number is not matched");
- schemaCreateForm
- .selectVersionFromDropDown(1);
- Assertions.assertEquals(53, schemaCreateForm.getMarkedLinesNumber(), "getAllMarkedLines()");
- }
-
- @DisplayName("should delete AVRO schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(187)
- @Test
- @Order(4)
- void deleteSchemaAvro() {
- navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
- schemaDetails
- .removeSchema();
- schemaRegistryList
- .waitUntilScreenReady();
- Assertions.assertFalse(schemaRegistryList.isSchemaVisible(AVRO_API.getName()),"isSchemaVisible()");
- SCHEMA_LIST.remove(AVRO_API);
- }
-
- @DisplayName("should create JSON schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(89)
- @Test
- @Order(5)
- void createSchemaJson() {
- Schema schemaJson = Schema.createSchemaJson();
- navigateToSchemaRegistry();
- schemaRegistryList
- .clickCreateSchema();
- schemaCreateForm
- .setSubjectName(schemaJson.getName())
- .setSchemaField(fileToString(schemaJson.getValuePath()))
- .selectSchemaTypeFromDropdown(schemaJson.getType())
- .clickSubmitButton();
- schemaDetails
- .waitUntilScreenReady();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(schemaDetails.isSchemaHeaderVisible(schemaJson.getName())).as("isSchemaHeaderVisible()").isTrue();
- softly.assertThat(schemaDetails.getSchemaType()).as("getSchemaType()").isEqualTo(schemaJson.getType().getValue());
- softly.assertThat(schemaDetails.getCompatibility()).as("getCompatibility()").isEqualTo(CompatibilityLevel.CompatibilityEnum.BACKWARD.getValue());
- softly.assertAll();
- navigateToSchemaRegistry();
- Assertions.assertTrue(schemaRegistryList.isSchemaVisible(JSON_API.getName()),"isSchemaVisible()");
- SCHEMA_LIST.add(schemaJson);
- }
-
- @DisplayName("should delete JSON schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(189)
- @Test
- @Order(6)
- void deleteSchemaJson() {
- navigateToSchemaRegistryAndOpenDetails(JSON_API.getName());
- schemaDetails
- .removeSchema();
- schemaRegistryList
- .waitUntilScreenReady();
- Assertions.assertFalse(schemaRegistryList.isSchemaVisible(JSON_API.getName()),"isSchemaVisible()");
- SCHEMA_LIST.remove(JSON_API);
- }
-
- @DisplayName("should create PROTOBUF schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(91)
- @Test
- @Order(7)
- void createSchemaProtobuf() {
- Schema schemaProtobuf = Schema.createSchemaProtobuf();
- navigateToSchemaRegistry();
- schemaRegistryList
- .clickCreateSchema();
- schemaCreateForm
- .setSubjectName(schemaProtobuf.getName())
- .setSchemaField(fileToString(schemaProtobuf.getValuePath()))
- .selectSchemaTypeFromDropdown(schemaProtobuf.getType())
- .clickSubmitButton();
- schemaDetails
- .waitUntilScreenReady();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(schemaDetails.isSchemaHeaderVisible(schemaProtobuf.getName())).as("isSchemaHeaderVisible()").isTrue();
- softly.assertThat(schemaDetails.getSchemaType()).as("getSchemaType()").isEqualTo(schemaProtobuf.getType().getValue());
- softly.assertThat(schemaDetails.getCompatibility()).as("getCompatibility()").isEqualTo(CompatibilityLevel.CompatibilityEnum.BACKWARD.getValue());
- softly.assertAll();
- navigateToSchemaRegistry();
- Assertions.assertTrue(schemaRegistryList.isSchemaVisible(PROTOBUF_API.getName()),"isSchemaVisible()");
- SCHEMA_LIST.add(schemaProtobuf);
- }
-
- @DisplayName("should delete PROTOBUF schema")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(223)
- @Test
- @Order(8)
- void deleteSchemaProtobuf() {
- navigateToSchemaRegistryAndOpenDetails(PROTOBUF_API.getName());
- schemaDetails
- .removeSchema();
- schemaRegistryList
- .waitUntilScreenReady();
- Assertions.assertFalse(schemaRegistryList.isSchemaVisible(PROTOBUF_API.getName()),"isSchemaVisible()");
- SCHEMA_LIST.remove(PROTOBUF_API);
- }
-
- @AfterAll
- public void afterAll() {
- SCHEMA_LIST.forEach(schema -> apiService.deleteSchema(schema.getName()));
- }
-
- @Step
- private void navigateToSchemaRegistry(){
- naviSideBar
- .openSideMenu(SCHEMA_REGISTRY);
- schemaRegistryList
- .waitUntilScreenReady();
- }
-
- @Step
- private void navigateToSchemaRegistryAndOpenDetails(String schemaName){
- navigateToSchemaRegistry();
- schemaRegistryList
- .openSchema(schemaName);
- schemaDetails
- .waitUntilScreenReady();
- }
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
deleted file mode 100644
index 4993a2d483..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
+++ /dev/null
@@ -1,316 +0,0 @@
-package com.provectus.kafka.ui.suite.topics;
-
-import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
-import static com.provectus.kafka.ui.pages.topic.TopicDetails.TopicMenu.MESSAGES;
-import static com.provectus.kafka.ui.pages.topic.TopicDetails.TopicMenu.OVERVIEW;
-import static com.provectus.kafka.ui.utilities.TimeUtils.waitUntilNewMinuteStarted;
-import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
-import static org.assertj.core.api.Assertions.assertThat;
-
-import com.provectus.kafka.ui.base.BaseTest;
-import com.provectus.kafka.ui.models.Topic;
-import com.provectus.kafka.ui.pages.topic.TopicDetails;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
-import io.qameta.allure.Issue;
-import io.qase.api.annotation.CaseId;
-import io.qase.api.annotation.Step;
-import java.time.LocalDateTime;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-import java.util.stream.Collectors;
-import java.util.stream.IntStream;
-import org.assertj.core.api.SoftAssertions;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.Order;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.TestInstance;
-
-@TestInstance(TestInstance.Lifecycle.PER_CLASS)
-public class TopicMessagesTests extends BaseTest {
- private static final long SUITE_ID = 2;
- private static final String SUITE_TITLE = "Topics";
- private static final Topic TOPIC_FOR_MESSAGES = new Topic()
- .setName("topic-with-clean-message-attribute-" + randomAlphabetic(5))
- .setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
- private static final Topic TOPIC_TO_CLEAR_AND_PURGE_MESSAGES = new Topic()
- .setName("topic-to-clear-and-purge-messages-attribute-" + randomAlphabetic(5))
- .setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
- private static final Topic TOPIC_FOR_CHECKING_FILTERS = new Topic()
- .setName("topic-for-checking-filters-" + randomAlphabetic(5))
- .setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
- private static final Topic TOPIC_TO_RECREATE = new Topic()
- .setName("topic-to-recreate-attribute-" + randomAlphabetic(5))
- .setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
- private static final List TOPIC_LIST = new ArrayList<>();
-
- @BeforeAll
- public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECKING_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
- TOPIC_TO_RECREATE));
- TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
- IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
- waitUntilNewMinuteStarted();
- IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
- }
-
- @DisplayName("produce message")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(222)
- @Order(1)
- @Test
- void produceMessage() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
- topicDetails
- .openDetailsTab(MESSAGES);
- produceMessage(TOPIC_FOR_MESSAGES);
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicDetails.isKeyMessageVisible((TOPIC_FOR_MESSAGES.getMessageKey())))
- .withFailMessage("isKeyMessageVisible()").isTrue();
- softly.assertThat(topicDetails.isContentMessageVisible((TOPIC_FOR_MESSAGES.getMessageContent()).trim()))
- .withFailMessage("isContentMessageVisible()").isTrue();
- softly.assertAll();
- }
-
- @Disabled
- @Issue("https://github.com/provectus/kafka-ui/issues/2778")
- @DisplayName("clear message")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(19)
- @Order(2)
- @Test
- void clearMessage() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
- topicDetails
- .openDetailsTab(OVERVIEW);
- int messageAmount = topicDetails.getMessageCountAmount();
- produceMessage(TOPIC_FOR_MESSAGES);
- Assertions.assertEquals(messageAmount + 1, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
- topicDetails
- .openDotMenu()
- .clickClearMessagesMenu()
- .waitUntilScreenReady();
- Assertions.assertEquals(0, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
- }
-
- @DisplayName("TopicTests.clearMessageOfTopic : Clear message of topic")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(239)
- @Order(3)
- @Test
- void checkClearTopicMessage() {
- navigateToTopicsAndOpenDetails(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName());
- topicDetails
- .openDetailsTab(OVERVIEW);
- produceMessage(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES);
- navigateToTopics();
- assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
- .as("getNumberOfMessages()").isEqualTo(1);
- topicsList
- .openDotMenuByTopicName(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())
- .clickClearMessagesBtn()
- .clickConfirmBtnMdl();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicsList.isAlertWithMessageVisible(SUCCESS,
- String.format("%s messages have been successfully cleared!", TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())))
- .as("isAlertWithMessageVisible()").isTrue();
- softly.assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
- .as("getNumberOfMessages()").isEqualTo(0);
- softly.assertAll();
- }
-
- @DisplayName("TopicTests.purgeMessagesOfTopics : Purge messages of topics")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(10)
- @Order(4)
- @Test
- void checkPurgeMessagePossibility(){
- navigateToTopics();
- int messageAmount = topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages();
- topicsList
- .openTopic(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName());
- topicDetails
- .openDetailsTab(OVERVIEW);
- produceMessage(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES);
- navigateToTopics();
- assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
- .as("getNumberOfMessages()").isEqualTo(messageAmount +1);
- topicsList
- .getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())
- .selectItem(true)
- .clickPurgeMessagesOfSelectedTopicsBtn();
- assertThat(topicsList.isConfirmationMdlVisible()).as("isConfirmationMdlVisible()").isTrue();
- topicsList
- .clickCancelBtnMdl()
- .clickPurgeMessagesOfSelectedTopicsBtn()
- .clickConfirmBtnMdl();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicsList.isAlertWithMessageVisible(SUCCESS,
- String.format("%s messages have been successfully cleared!",TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())))
- .as("isAlertWithMessageVisible()").isTrue();
- softly.assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
- .as("getNumberOfMessages()").isEqualTo(0);
- softly.assertAll();
- }
-
- @Disabled
- @Issue("https://github.com/provectus/kafka-ui/issues/2819")
- @DisplayName("Message copy from topic profile")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(21)
- @Order(5)
- @Test
- void copyMessageFromTopicProfile() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES)
- .getRandomMessage()
- .openDotMenu()
- .clickCopyToClipBoard();
- Assertions.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Copied successfully!"),
- "isAlertWithMessageVisible()");
- }
-
- @Disabled
- @Issue("https://github.com/provectus/kafka-ui/issues/2394")
- @DisplayName("Checking messages filtering by Offset within Topic/Messages")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(15)
- @Order(6)
- @Test
- void checkingMessageFilteringByOffset() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES);
- TopicDetails.MessageGridItem secondMessage = topicDetails.getMessageByOffset(1);
- topicDetails
- .selectSeekTypeDdlMessagesTab("Offset")
- .setSeekTypeValueFldMessagesTab(String.valueOf(secondMessage.getOffset()))
- .clickSubmitFiltersBtnMessagesTab();
- SoftAssertions softly = new SoftAssertions();
- topicDetails.getAllMessages().forEach(message ->
- softly.assertThat(message.getOffset() == secondMessage.getOffset()
- || message.getOffset() > secondMessage.getOffset())
- .as(String.format("Expected offset is: %s, but found: %s", secondMessage.getOffset(), message.getOffset()))
- .isTrue());
- softly.assertAll();
- }
-
- @Disabled
- @Issue("https://github.com/provectus/kafka-ui/issues/3215")
- @Issue("https://github.com/provectus/kafka-ui/issues/2345")
- @DisplayName("Checking messages filtering by Timestamp within Messages/Topic")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(16)
- @Order(7)
- @Test
- void checkingMessageFilteringByTimestamp() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES);
- LocalDateTime firstTimestamp = topicDetails.getMessageByOffset(0).getTimestamp();
- List nextMessages = topicDetails.getAllMessages().stream()
- .filter(message -> message.getTimestamp().getMinute() != firstTimestamp.getMinute())
- .collect(Collectors.toList());
- LocalDateTime nextTimestamp = Objects.requireNonNull(nextMessages.stream()
- .findFirst().orElse(null)).getTimestamp();
- topicDetails
- .selectSeekTypeDdlMessagesTab("Timestamp")
- .openCalendarSeekType()
- .selectDateAndTimeByCalendar(nextTimestamp)
- .clickSubmitFiltersBtnMessagesTab();
- SoftAssertions softly = new SoftAssertions();
- topicDetails.getAllMessages().forEach(message ->
- softly.assertThat(message.getTimestamp().isEqual(nextTimestamp)
- || message.getTimestamp().isAfter(nextTimestamp))
- .as(String.format("Expected timestamp is: %s, but found: %s", nextTimestamp, message.getTimestamp()))
- .isTrue());
- softly.assertAll();
- }
-
- @Disabled
- @Issue("https://github.com/provectus/kafka-ui/issues/2778")
- @DisplayName("Clear message of topic from topic profile")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(246)
- @Order(8)
- @Test
- void checkClearTopicMessageFromOverviewTab() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
- topicDetails
- .openDetailsTab(OVERVIEW)
- .openDotMenu()
- .clickClearMessagesMenu()
- .clickConfirmBtnMdl();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS,
- String.format("%s messages have been successfully cleared!", TOPIC_FOR_CHECKING_FILTERS.getName())))
- .as("isAlertWithMessageVisible()").isTrue();
- softly.assertThat(topicDetails.getMessageCountAmount())
- .as("getMessageCountAmount()= " + topicDetails.getMessageCountAmount()).isEqualTo(0);
- softly.assertAll();
- }
-
- @DisplayName("TopicTests.recreateTopic : Recreate topic")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(240)
- @Order(9)
- @Test
- void checkRecreateTopic(){
- navigateToTopicsAndOpenDetails(TOPIC_TO_RECREATE.getName());
- topicDetails
- .openDetailsTab(OVERVIEW);
- produceMessage(TOPIC_TO_RECREATE);
- navigateToTopics();
- assertThat(topicsList.getTopicItem(TOPIC_TO_RECREATE.getName()).getNumberOfMessages())
- .as("getNumberOfMessages()").isEqualTo(1);
- topicsList
- .openDotMenuByTopicName(TOPIC_TO_RECREATE.getName())
- .clickRecreateTopicBtn()
- .clickConfirmBtnMdl();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS,
- String.format("Topic %s successfully recreated!", TOPIC_TO_RECREATE.getName())))
- .as("isAlertWithMessageVisible()").isTrue();
- softly.assertThat(topicsList.getTopicItem(TOPIC_TO_RECREATE.getName()).getNumberOfMessages())
- .as("getNumberOfMessages()").isEqualTo(0);
- softly.assertAll();
- }
-
- @Step
- protected void produceMessage(Topic topic){
- topicDetails
- .clickProduceMessageBtn();
- produceMessagePanel
- .waitUntilScreenReady()
- .setKeyField(topic.getMessageKey())
- .setContentFiled(topic.getMessageContent())
- .submitProduceMessage();
- topicDetails
- .waitUntilScreenReady();
- }
-
- @AfterAll
- public void afterAll() {
- TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
- }
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
deleted file mode 100644
index fa51ebe1fc..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
+++ /dev/null
@@ -1,551 +0,0 @@
-package com.provectus.kafka.ui.suite.topics;
-
-import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
-import static com.provectus.kafka.ui.pages.topic.TopicDetails.TopicMenu.MESSAGES;
-import static com.provectus.kafka.ui.pages.topic.TopicDetails.TopicMenu.SETTINGS;
-import static com.provectus.kafka.ui.pages.topic.enums.CleanupPolicyValue.COMPACT;
-import static com.provectus.kafka.ui.pages.topic.enums.CleanupPolicyValue.DELETE;
-import static com.provectus.kafka.ui.pages.topic.enums.CustomParameterType.COMPRESSION_TYPE;
-import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.NOT_SET;
-import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.SIZE_1_GB;
-import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.SIZE_50_GB;
-import static com.provectus.kafka.ui.pages.topic.enums.TimeToRetain.BTN_2_DAYS;
-import static com.provectus.kafka.ui.pages.topic.enums.TimeToRetain.BTN_7_DAYS;
-import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
-import static org.apache.commons.lang3.RandomUtils.nextInt;
-import static org.assertj.core.api.Assertions.assertThat;
-
-import com.codeborne.selenide.Condition;
-import com.provectus.kafka.ui.base.BaseTest;
-import com.provectus.kafka.ui.models.Topic;
-import com.provectus.kafka.ui.pages.topic.TopicDetails;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
-import io.qameta.allure.Issue;
-import io.qase.api.annotation.CaseId;
-import java.util.ArrayList;
-import java.util.List;
-import org.assertj.core.api.SoftAssertions;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Disabled;
-import org.junit.jupiter.api.DisplayName;
-import org.junit.jupiter.api.MethodOrderer;
-import org.junit.jupiter.api.Order;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.TestInstance;
-import org.junit.jupiter.api.TestMethodOrder;
-
-@TestInstance(TestInstance.Lifecycle.PER_CLASS)
-@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
-public class TopicsTests extends BaseTest {
- private static final long SUITE_ID = 2;
- private static final String SUITE_TITLE = "Topics";
- private static final Topic TOPIC_TO_CREATE = new Topic()
- .setName("new-topic-" + randomAlphabetic(5))
- .setNumberOfPartitions(1)
- .setCustomParameterType(COMPRESSION_TYPE)
- .setCustomParameterValue("producer")
- .setCleanupPolicyValue(DELETE);
- private static final Topic TOPIC_TO_UPDATE_AND_DELETE = new Topic()
- .setName("topic-to-update-and-delete-" + randomAlphabetic(5))
- .setNumberOfPartitions(1)
- .setCleanupPolicyValue(DELETE)
- .setTimeToRetain(BTN_7_DAYS)
- .setMaxSizeOnDisk(NOT_SET)
- .setMaxMessageBytes("1048588")
- .setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
- private static final Topic TOPIC_TO_CHECK_SETTINGS = new Topic()
- .setName("new-topic-" + randomAlphabetic(5))
- .setNumberOfPartitions(1)
- .setMaxMessageBytes("1000012")
- .setMaxSizeOnDisk(NOT_SET);
- private static final Topic TOPIC_FOR_CHECK_FILTERS = new Topic()
- .setName("topic-for-check-filters-" + randomAlphabetic(5));
- private static final Topic TOPIC_FOR_DELETE = new Topic().setName("topic-to-delete-" + randomAlphabetic(5));
- private static final List TOPIC_LIST = new ArrayList<>();
-
- @BeforeAll
- public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS));
- TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
- }
-
- @DisplayName("should create a topic")
- @Suite(suiteId = 4, title = "Create new Topic")
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(199)
- @Test
- @Order(1)
- public void createTopic() {
- navigateToTopics();
- topicsList
- .clickAddTopicBtn();
- topicCreateEditForm
- .waitUntilScreenReady()
- .setTopicName(TOPIC_TO_CREATE.getName())
- .setNumberOfPartitions(TOPIC_TO_CREATE.getNumberOfPartitions())
- .selectCleanupPolicy(TOPIC_TO_CREATE.getCleanupPolicyValue())
- .clickCreateTopicBtn();
- navigateToTopicsAndOpenDetails(TOPIC_TO_CREATE.getName());
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicDetails.isTopicHeaderVisible(TOPIC_TO_CREATE.getName())).as("isTopicHeaderVisible()")
- .isTrue();
- softly.assertThat(topicDetails.getCleanUpPolicy()).as("getCleanUpPolicy()")
- .isEqualTo(TOPIC_TO_CREATE.getCleanupPolicyValue().toString());
- softly.assertThat(topicDetails.getPartitions()).as("getPartitions()")
- .isEqualTo(TOPIC_TO_CREATE.getNumberOfPartitions());
- softly.assertAll();
- navigateToTopics();
- Assertions.assertTrue(topicsList.isTopicVisible(TOPIC_TO_CREATE.getName()), "isTopicVisible");
- TOPIC_LIST.add(TOPIC_TO_CREATE);
- }
-
- @DisplayName("Checking available operations for selected Topic within 'All Topics' page")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(7)
- @Test
- @Order(2)
- void checkAvailableOperations() {
- navigateToTopics();
- topicsList
- .getTopicItem("my_ksql_1ksql_processing_log")
- .selectItem(true);
- verifyElementsCondition(topicsList.getActionButtons(),Condition.enabled);
- topicsList
- .getTopicItem("_confluent-ksql-my_ksql_1_command_topic")
- .selectItem(true);
- Assertions.assertFalse(topicsList.isCopySelectedTopicBtnEnabled(), "isCopySelectedTopicBtnEnabled()");
- }
-
- @Disabled()
- @Issue("https://github.com/provectus/kafka-ui/issues/2625")
- @DisplayName("should update a topic")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(197)
- @Test
- @Order(3)
- public void updateTopic() {
- navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
- topicDetails
- .openDotMenu()
- .clickEditSettingsMenu();
- topicCreateEditForm
- .waitUntilScreenReady();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicCreateEditForm.getCleanupPolicy()).as("getCleanupPolicy()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue().getVisibleText());
- softly.assertThat(topicCreateEditForm.getTimeToRetain()).as("getTimeToRetain()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain().getValue());
- softly.assertThat(topicCreateEditForm.getMaxSizeOnDisk()).as("getMaxSizeOnDisk()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk().getVisibleText());
- softly.assertThat(topicCreateEditForm.getMaxMessageBytes()).as("getMaxMessageBytes()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes());
- softly.assertAll();
- TOPIC_TO_UPDATE_AND_DELETE
- .setCleanupPolicyValue(COMPACT)
- .setTimeToRetain(BTN_2_DAYS)
- .setMaxSizeOnDisk(SIZE_50_GB).setMaxMessageBytes("1048589");
- topicCreateEditForm
- .selectCleanupPolicy((TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue()))
- .setTimeToRetainDataByButtons(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain())
- .setMaxSizeOnDiskInGB(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk())
- .setMaxMessageBytes(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes())
- .clickCreateTopicBtn();
- softly.assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully updated."))
- .as("isAlertWithMessageVisible()").isTrue();
- softly.assertThat(topicDetails.isTopicHeaderVisible(TOPIC_TO_UPDATE_AND_DELETE.getName()))
- .as("isTopicHeaderVisible()").isTrue();
- softly.assertAll();
- topicDetails
- .waitUntilScreenReady();
- navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
- topicDetails
- .openDotMenu()
- .clickEditSettingsMenu();
- softly.assertThat(topicCreateEditForm.isNameFieldEnabled()).as("isNameFieldEnabled()").isFalse();
- softly.assertThat(topicCreateEditForm.getCleanupPolicy()).as("getCleanupPolicy()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue().getVisibleText());
- softly.assertThat(topicCreateEditForm.getTimeToRetain()).as("getTimeToRetain()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain().getValue());
- softly.assertThat(topicCreateEditForm.getMaxSizeOnDisk()).as("getMaxSizeOnDisk()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk().getVisibleText());
- softly.assertThat(topicCreateEditForm.getMaxMessageBytes()).as("getMaxMessageBytes()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes());
- softly.assertAll();
- }
-
- @DisplayName("TopicTests.removeTopicFromAllTopics : Remove topic from 'all topics'/'TopicList'")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(242)
- @Test
- @Order(4)
- public void removeTopicFromTopicList() {
- navigateToTopics();
- topicsList
- .openDotMenuByTopicName(TOPIC_TO_UPDATE_AND_DELETE.getName())
- .clickRemoveTopicBtn()
- .clickConfirmBtnMdl();
- Assertions.assertTrue(topicsList.isAlertWithMessageVisible(SUCCESS,
- String.format("Topic %s successfully deleted!", TOPIC_TO_UPDATE_AND_DELETE.getName())),
- "isAlertWithMessageVisible()");
- TOPIC_LIST.remove(TOPIC_TO_UPDATE_AND_DELETE);
- }
-
- @DisplayName("should delete topic")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(207)
- @Test
- @Order(5)
- public void deleteTopic() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_DELETE.getName());
- topicDetails
- .openDotMenu()
- .clickDeleteTopicMenu()
- .clickConfirmBtnMdl();
- navigateToTopics();
- Assertions.assertFalse(topicsList.isTopicVisible(TOPIC_FOR_DELETE.getName()), "isTopicVisible");
- TOPIC_LIST.remove(TOPIC_FOR_DELETE);
- }
-
- @DisplayName("Redirect to consumer from topic profile")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(20)
- @Test
- @Order(6)
- void redirectToConsumerFromTopic() {
- String topicName = "source-activities";
- String consumerGroupId = "connect-sink_postgres_activities";
- navigateToTopicsAndOpenDetails(topicName);
- topicDetails
- .openDetailsTab(TopicDetails.TopicMenu.CONSUMERS)
- .openConsumerGroup(consumerGroupId);
- consumersDetails
- .waitUntilScreenReady();
- assertThat(consumersDetails.isRedirectedConsumerTitleVisible(consumerGroupId))
- .withFailMessage("isRedirectedConsumerTitleVisible").isTrue();
- assertThat(consumersDetails.isTopicInConsumersDetailsVisible(topicName))
- .withFailMessage("isTopicInConsumersDetailsVisible").isTrue();
- }
-
- @DisplayName("Checking Topic creation possibility in case of empty Topic Name")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(4)
- @Test
- @Order(7)
- void checkTopicCreatePossibility() {
- navigateToTopics();
- topicsList
- .clickAddTopicBtn();
- topicCreateEditForm
- .waitUntilScreenReady();
- assertThat(topicCreateEditForm.isCreateTopicButtonEnabled()).as("isCreateTopicButtonEnabled()").isFalse();
- topicCreateEditForm
- .setTopicName("testName");
- assertThat(topicCreateEditForm.isCreateTopicButtonEnabled()).as("isCreateTopicButtonEnabled()").isFalse();
- topicCreateEditForm
- .setTopicName(null)
- .setNumberOfPartitions(nextInt(1, 10));
- assertThat(topicCreateEditForm.isCreateTopicButtonEnabled()).as("isCreateTopicButtonEnabled()").isFalse();
- topicCreateEditForm
- .setTopicName("testName");
- assertThat(topicCreateEditForm.isCreateTopicButtonEnabled()).as("isCreateTopicButtonEnabled()").isTrue();
- }
-
- @DisplayName("Checking 'Time to retain data (in ms)' custom value with editing Topic's settings")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(266)
- @Test
- @Order(8)
- void checkTimeToRetainDataCustomValueWithEditingTopic() {
- Topic topicToRetainData = new Topic()
- .setName("topic-to-retain-data-" + randomAlphabetic(5))
- .setTimeToRetainData("86400000");
- navigateToTopics();
- topicsList
- .clickAddTopicBtn();
- topicCreateEditForm
- .waitUntilScreenReady()
- .setTopicName(topicToRetainData.getName())
- .setNumberOfPartitions(1)
- .setTimeToRetainDataInMs("604800000");
- assertThat(topicCreateEditForm.getTimeToRetain()).as("getTimeToRetain()").isEqualTo("604800000");
- topicCreateEditForm
- .setTimeToRetainDataInMs(topicToRetainData.getTimeToRetainData())
- .clickCreateTopicBtn();
- topicDetails
- .waitUntilScreenReady()
- .openDotMenu()
- .clickEditSettingsMenu();
- assertThat(topicCreateEditForm.getTimeToRetain()).as("getTimeToRetain()")
- .isEqualTo(topicToRetainData.getTimeToRetainData());
- topicDetails
- .openDetailsTab(SETTINGS);
- assertThat(topicDetails.getSettingsGridValueByKey("retention.ms")).as("getSettingsGridValueByKey()")
- .isEqualTo(topicToRetainData.getTimeToRetainData());
- TOPIC_LIST.add(topicToRetainData);
- }
-
- @DisplayName("Checking requiredness of Custom parameters within 'Create new Topic'")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(6)
- @Test
- @Order(9)
- void checkCustomParametersWithinCreateNewTopic() {
- navigateToTopics();
- topicsList
- .clickAddTopicBtn();
- topicCreateEditForm
- .waitUntilScreenReady()
- .setTopicName(TOPIC_TO_CREATE.getName())
- .clickAddCustomParameterTypeButton()
- .setCustomParameterType(TOPIC_TO_CREATE.getCustomParameterType());
- assertThat(topicCreateEditForm.isDeleteCustomParameterButtonEnabled()).as("isDeleteCustomParameterButtonEnabled()")
- .isTrue();
- topicCreateEditForm
- .clearCustomParameterValue();
- assertThat(topicCreateEditForm.isValidationMessageCustomParameterValueVisible())
- .as("isValidationMessageCustomParameterValueVisible()").isTrue();
- }
-
- @DisplayName("Checking Topics section within Kafka-ui Application")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(2)
- @Test
- @Order(10)
- void checkTopicListElements() {
- navigateToTopics();
- verifyElementsCondition(topicsList.getAllVisibleElements(), Condition.visible);
- verifyElementsCondition(topicsList.getAllEnabledElements(), Condition.enabled);
- }
-
- @DisplayName("Filter adding within Topic")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(12)
- @Test
- @Order(11)
- void addingNewFilterWithinTopic() {
- String filterName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES)
- .clickMessagesAddFiltersBtn()
- .waitUntilAddFiltersMdlVisible();
- verifyElementsCondition(topicDetails.getAllAddFilterModalVisibleElements(), Condition.visible);
- verifyElementsCondition(topicDetails.getAllAddFilterModalEnabledElements(), Condition.enabled);
- verifyElementsCondition(topicDetails.getAllAddFilterModalDisabledElements(), Condition.disabled);
- assertThat(topicDetails.isSaveThisFilterCheckBoxSelected()).as("isSaveThisFilterCheckBoxSelected()")
- .isFalse();
- topicDetails
- .setFilterCodeFieldAddFilterMdl(filterName);
- assertThat(topicDetails.isAddFilterBtnAddFilterMdlEnabled()).as("isAddFilterBtnAddFilterMdlEnabled()")
- .isTrue();
- topicDetails.clickAddFilterBtnAndCloseMdl(true);
- assertThat(topicDetails.isActiveFilterVisible(filterName)).as("isActiveFilterVisible()")
- .isTrue();
- }
-
- @DisplayName("Checking filter saving within Messages/Topic profile/Saved Filters")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(13)
- @Test
- @Order(12)
- void checkFilterSavingWithinSavedFilters() {
- String displayName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES)
- .clickMessagesAddFiltersBtn()
- .waitUntilAddFiltersMdlVisible()
- .setFilterCodeFieldAddFilterMdl(randomAlphabetic(4))
- .selectSaveThisFilterCheckboxMdl(true)
- .setDisplayNameFldAddFilterMdl(displayName);
- assertThat(topicDetails.isAddFilterBtnAddFilterMdlEnabled()).as("isAddFilterBtnAddFilterMdlEnabled()")
- .isTrue();
- topicDetails
- .clickAddFilterBtnAndCloseMdl(false)
- .openSavedFiltersListMdl();
- assertThat(topicDetails.isFilterVisibleAtSavedFiltersMdl(displayName))
- .as("isFilterVisibleAtSavedFiltersMdl()").isTrue();
- }
-
- @DisplayName("Checking applying saved filter within Topic/Messages")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(14)
- @Test
- @Order(13)
- void checkingApplyingSavedFilterWithinTopicMessages() {
- String displayName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES)
- .clickMessagesAddFiltersBtn()
- .waitUntilAddFiltersMdlVisible()
- .setFilterCodeFieldAddFilterMdl(randomAlphabetic(4))
- .selectSaveThisFilterCheckboxMdl(true)
- .setDisplayNameFldAddFilterMdl(displayName)
- .clickAddFilterBtnAndCloseMdl(false)
- .openSavedFiltersListMdl()
- .selectFilterAtSavedFiltersMdl(displayName)
- .clickSelectFilterBtnAtSavedFiltersMdl();
- assertThat(topicDetails.isActiveFilterVisible(displayName))
- .as("isActiveFilterVisible()").isTrue();
- }
-
- @DisplayName("Checking 'Show Internal Topics' toggle functionality within 'All Topics' page")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(11)
- @Test
- @Order(14)
- void checkShowInternalTopicsButtonFunctionality(){
- navigateToTopics();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicsList.isShowInternalRadioBtnSelected()).as("isInternalRadioBtnSelected()").isTrue();
- softly.assertThat(topicsList.getInternalTopics()).as("getInternalTopics()").size().isGreaterThan(0);
- softly.assertThat(topicsList.getNonInternalTopics()).as("getNonInternalTopics()").size().isGreaterThan(0);
- softly.assertAll();
- topicsList
- .setShowInternalRadioButton(false);
- softly.assertThat(topicsList.getInternalTopics()).as("getInternalTopics()").size().isEqualTo(0);
- softly.assertThat(topicsList.getNonInternalTopics()).as("getNonInternalTopics()").size().isGreaterThan(0);
- softly.assertAll();
- }
-
- @DisplayName("Checking Topics settings to make sure retention.bytes is right according to Max size on disk in GB selected value")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(56)
- @Test
- @Order(15)
- void checkRetentionBytesAccordingToMaxSizeOnDisk(){
- navigateToTopics();
- topicsList
- .clickAddTopicBtn();
- topicCreateEditForm
- .waitUntilScreenReady()
- .setTopicName(TOPIC_TO_CHECK_SETTINGS.getName())
- .setNumberOfPartitions(TOPIC_TO_CHECK_SETTINGS.getNumberOfPartitions())
- .setMaxMessageBytes(TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes())
- .clickCreateTopicBtn();
- topicDetails
- .waitUntilScreenReady();
- TOPIC_LIST.add(TOPIC_TO_CHECK_SETTINGS);
- topicDetails
- .openDetailsTab(SETTINGS);
- topicSettingsTab
- .waitUntilScreenReady();
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicSettingsTab.getValueByKey("retention.bytes"))
- .as("getValueOfKey(retention.bytes)").isEqualTo(TOPIC_TO_CHECK_SETTINGS.getMaxSizeOnDisk().getOptionValue());
- softly.assertThat(topicSettingsTab.getValueByKey("max.message.bytes"))
- .as("getValueOfKey(max.message.bytes)").isEqualTo(TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes());
- softly.assertAll();
- TOPIC_TO_CHECK_SETTINGS
- .setMaxSizeOnDisk(SIZE_1_GB)
- .setMaxMessageBytes("1000056");
- topicDetails
- .openDotMenu()
- .clickEditSettingsMenu();
- topicCreateEditForm
- .waitUntilScreenReady()
- .setMaxSizeOnDiskInGB(TOPIC_TO_CHECK_SETTINGS.getMaxSizeOnDisk())
- .setMaxMessageBytes(TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes())
- .clickCreateTopicBtn();
- topicDetails
- .waitUntilScreenReady()
- .openDetailsTab(SETTINGS);
- topicSettingsTab
- .waitUntilScreenReady();
- softly.assertThat(topicSettingsTab.getValueByKey("retention.bytes"))
- .as("getValueOfKey(retention.bytes)").isEqualTo(TOPIC_TO_CHECK_SETTINGS.getMaxSizeOnDisk().getOptionValue());
- softly.assertThat(topicSettingsTab.getValueByKey("max.message.bytes"))
- .as("getValueOfKey(max.message.bytes)").isEqualTo(TOPIC_TO_CHECK_SETTINGS.getMaxMessageBytes());
- softly.assertAll();
- }
-
- @DisplayName("TopicTests.recreateTopicFromTopicProfile : Recreate topic from topic profile")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(247)
- @Test
- @Order(16)
- void recreateTopicFromTopicProfile(){
- Topic topicToRecreate = new Topic()
- .setName("topic-to-recreate-" + randomAlphabetic(5))
- .setNumberOfPartitions(1);
- navigateToTopics();
- topicsList
- .clickAddTopicBtn();
- topicCreateEditForm
- .waitUntilScreenReady()
- .setTopicName(topicToRecreate.getName())
- .setNumberOfPartitions(topicToRecreate.getNumberOfPartitions())
- .clickCreateTopicBtn();
- topicDetails
- .waitUntilScreenReady();
- TOPIC_LIST.add(topicToRecreate);
- topicDetails
- .openDotMenu()
- .clickRecreateTopicMenu();
- assertThat(topicDetails.isConfirmationMdlVisible()).as("isConfirmationMdlVisible()").isTrue();
- topicDetails
- .clickConfirmBtnMdl();
- assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS,
- String.format("Topic %s successfully recreated!", topicToRecreate.getName())))
- .as("isAlertWithMessageVisible()").isTrue();
- }
-
- @DisplayName("TopicTests.copyTopic : Copy topic")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(8)
- @Test
- @Order(17)
- void checkCopyTopicPossibility(){
- Topic topicToCopy = new Topic()
- .setName("topic-to-copy-" + randomAlphabetic(5))
- .setNumberOfPartitions(1);
- navigateToTopics();
- topicsList
- .getTopicItem("_schemas")
- .selectItem(true)
- .clickCopySelectedTopicBtn();
- topicCreateEditForm
- .waitUntilScreenReady();
- assertThat(topicCreateEditForm.isCreateTopicButtonEnabled()).as("isCreateTopicButtonEnabled()").isFalse();
- topicCreateEditForm
- .setTopicName(topicToCopy.getName())
- .setNumberOfPartitions(topicToCopy.getNumberOfPartitions())
- .clickCreateTopicBtn();
- topicDetails
- .waitUntilScreenReady();
- TOPIC_LIST.add(topicToCopy);
- SoftAssertions softly = new SoftAssertions();
- softly.assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully created."))
- .as("isAlertWithMessageVisible()").isTrue();
- softly.assertThat(topicDetails.isTopicHeaderVisible(topicToCopy.getName()))
- .as("isTopicHeaderVisible()").isTrue();
- softly.assertAll();
- }
-
- @AfterAll
- public void afterAll() {
- TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
- }
-}
diff --git a/kafka-ui-e2e-checks/src/test/resources/regression.xml b/kafka-ui-e2e-checks/src/test/resources/regression.xml
new file mode 100644
index 0000000000..fff1df6085
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/resources/regression.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
diff --git a/kafka-ui-e2e-checks/src/test/resources/smoke.xml b/kafka-ui-e2e-checks/src/test/resources/smoke.xml
new file mode 100644
index 0000000000..e0a8f082e8
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/resources/smoke.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
From 6ffcd845fa3363033ef764e9a18a8de0808faa75 Mon Sep 17 00:00:00 2001
From: Roman Zabaluev
Date: Thu, 9 Feb 2023 14:53:43 +0300
Subject: [PATCH 14/54] Fix e2e run (#3326)
* Fix e2e run
* add suite profiles
Co-authored-by: VladSenyuta
---
.github/workflows/e2e-checks.yaml | 4 ++--
kafka-ui-e2e-checks/README.md | 14 ++++++++++----
.../kafka/ui/settings/configs/Profiles.java | 14 +++++++++-----
.../provectus/kafka/ui/testSuite/TestClass.java | 4 ++++
.../src/test/resources/regression.xml | 3 ++-
kafka-ui-e2e-checks/src/test/resources/sanity.xml | 8 ++++++++
6 files changed, 35 insertions(+), 12 deletions(-)
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java
create mode 100644 kafka-ui-e2e-checks/src/test/resources/sanity.xml
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml
index 85e3c17d2f..40178c67d8 100644
--- a/.github/workflows/e2e-checks.yaml
+++ b/.github/workflows/e2e-checks.yaml
@@ -33,7 +33,7 @@ jobs:
id: build_app
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- ./mvnw -B -V -ntp clean package -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
- name: compose app
id: compose_app
# use the following command until #819 will be fixed
@@ -42,7 +42,7 @@ jobs:
- name: e2e run
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -pl 'kafka-ui-e2e-checks' test -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Pprod
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Dsuite=smoke -f 'kafka-ui-e2e-checks' test -Pprod
- name: Generate allure report
uses: simple-elf/allure-report-action@master
if: always()
diff --git a/kafka-ui-e2e-checks/README.md b/kafka-ui-e2e-checks/README.md
index 651e3c1143..bd8e0ba704 100644
--- a/kafka-ui-e2e-checks/README.md
+++ b/kafka-ui-e2e-checks/README.md
@@ -41,16 +41,22 @@ docker-compose -f documentation/compose/e2e-tests.yaml up -d
2. Run Smoke test suite using your QaseIO API token as environment variable (put instead %s into command below)
```
-./mvnw -DQASEIO_API_TOKEN='%s' -pl 'kafka-ui-e2e-checks' test -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Pprod
+./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Dsuite=smoke -f 'kafka-ui-e2e-checks' test -Pprod
```
-3. Run Regression test suite using your QaseIO API token as environment variable (put instead %s into command below)
+3. Run Sanity test suite using your QaseIO API token as environment variable (put instead %s into command below)
```
-./mvnw -DQASEIO_API_TOKEN='%s' -pl 'kafka-ui-e2e-checks' test -Dsurefire.suiteXmlFiles='src/test/resources/regression.xml' -Pprod
+./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/sanity.xml' -Dsuite=sanity -f 'kafka-ui-e2e-checks' test -Pprod
```
-4. To run tests on your local Chrome browser just add next VM option to the Run Configuration
+4. Run Regression test suite using your QaseIO API token as environment variable (put instead %s into command below)
+
+```
+./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/regression.xml' -Dsuite=regression -f 'kafka-ui-e2e-checks' test -Pprod
+```
+
+5. To run tests on your local Chrome browser just add next VM option to the Run Configuration
```
-Dbrowser=local
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java
index 9a5ca69107..27696c46d6 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java
@@ -4,10 +4,14 @@ import org.aeonbits.owner.Config;
public interface Profiles extends Config {
- String CONTAINER = "container";
- String LOCAL = "local";
+ String CONTAINER = "container";
+ String LOCAL = "local";
- @Key("browser")
- @DefaultValue(CONTAINER)
- String browser();
+ @Key("browser")
+ @DefaultValue(CONTAINER)
+ String browser();
+
+ @Key("suite")
+ @DefaultValue("custom")
+ String suite();
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java
new file mode 100644
index 0000000000..af5a4aea3f
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java
@@ -0,0 +1,4 @@
+package com.provectus.kafka.ui.testSuite;
+
+public class TestClass {
+}
diff --git a/kafka-ui-e2e-checks/src/test/resources/regression.xml b/kafka-ui-e2e-checks/src/test/resources/regression.xml
index fff1df6085..2c34f4a929 100644
--- a/kafka-ui-e2e-checks/src/test/resources/regression.xml
+++ b/kafka-ui-e2e-checks/src/test/resources/regression.xml
@@ -2,7 +2,8 @@
-
+
+
diff --git a/kafka-ui-e2e-checks/src/test/resources/sanity.xml b/kafka-ui-e2e-checks/src/test/resources/sanity.xml
new file mode 100644
index 0000000000..e1c8e8a31f
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/resources/sanity.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
From 45a6e73d295c904a26ded39621058165fc200506 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Thu, 9 Feb 2023 16:13:04 +0400
Subject: [PATCH 15/54] [BE] SR: maxInMemorySize property setting added to SR
client (#3310)
Co-authored-by: iliax
Co-authored-by: Roman Zabaluev
---
.../java/com/provectus/kafka/ui/service/KafkaClusterFactory.java | 1 +
1 file changed, 1 insertion(+)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
index 7113f5af36..aea92ab4c4 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
@@ -82,6 +82,7 @@ public class KafkaClusterFactory {
WebClient webClient = new WebClientConfigurator()
.configureSsl(clusterProperties.getSchemaRegistrySsl())
.configureBasicAuth(auth.getUsername(), auth.getPassword())
+ .configureBufferSize(maxBuffSize)
.build();
return ReactiveFailover.create(
parseUrlList(clusterProperties.getSchemaRegistry()),
From 8663ef6e8422b84a18cc19d82832d4f0fcf6b1cd Mon Sep 17 00:00:00 2001
From: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com>
Date: Thu, 9 Feb 2023 15:23:50 +0200
Subject: [PATCH 16/54] [e2e] Checking Clearing results for already executed
KSQL Request (#3327)
* test commit
* fix BaseTest
* upd global
* upd global
* upd global
* add local browser VM option
* fix TopicsList column header locator
* fix withStartupTimeout()
* switch e2e to TestNG
* upd pom
* upd page classes
* upd -pl kafka-ui-e2e-checks
* test commit
* Revert "test commit"
This reverts commit 4b505321ac5e164986a7a1886ac40c6744b8ecb1.
* fix workflow module
* upd test -f 'kafka-ui-e2e-checks'
* crt clearResultsForExecutedRequest test
* add suite profiles
* fix clickClearResultsBtn
---
.../kafka/ui/pages/ksqlDb/KsqlQueryForm.java | 225 +++++++++---------
.../ui/smokeSuite/ksqlDb/KsqlDbTest.java | 26 +-
2 files changed, 143 insertions(+), 108 deletions(-)
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
index e34fbedb1c..30ac1007fd 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
@@ -1,144 +1,155 @@
package com.provectus.kafka.ui.pages.ksqlDb;
-import static com.codeborne.selenide.Condition.visible;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
+import static com.codeborne.selenide.Condition.visible;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
public class KsqlQueryForm extends BasePage {
- protected SelenideElement pageTitle = $x("//h1[text()='Query']");
- protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
- protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
- protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
- protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
- protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
- protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
- protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
- protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
- protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
- protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
+ protected SelenideElement pageTitle = $x("//h1[text()='Query']");
+ protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
+ protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
+ protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
+ protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
+ protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
+ protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
+ protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
+ protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
+ protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
+ protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
- @Step
- public KsqlQueryForm waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- pageTitle.shouldBe(Condition.visible);
- return this;
- }
-
- @Step
- public KsqlQueryForm clickClearBtn() {
- clickByJavaScript(clearBtn);
- return this;
- }
-
- @Step
- public KsqlQueryForm clickExecuteBtn() {
- clickByJavaScript(executeBtn);
- if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
- loadingSpinner.shouldBe(Condition.visible);
- } else {
- waitUntilSpinnerDisappear();
- }
- return this;
- }
-
- @Step
- public KsqlQueryForm clickStopQueryBtn() {
- clickByJavaScript(stopQueryBtn);
- waitUntilSpinnerDisappear();
- return this;
- }
-
- @Step
- public KsqlQueryForm clickClearResultsBtn() {
- clickByJavaScript(clearResultsBtn);
- waitUntilSpinnerDisappear();
- return this;
- }
-
- @Step
- public KsqlQueryForm clickAddStreamProperty() {
- clickByJavaScript(addStreamPropertyBtn);
- return this;
- }
-
- @Step
- public KsqlQueryForm setQuery(String query) {
- queryAreaValue.shouldBe(Condition.visible).click();
- queryArea.setValue(query);
- return this;
- }
-
- private List initItems() {
- List gridItemList = new ArrayList<>();
- ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
- .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
- return gridItemList;
- }
-
- @Step
- public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
- return initItems().stream()
- .filter(e -> e.getName().equalsIgnoreCase(name))
- .findFirst().orElseThrow();
- }
-
- public static class KsqlResponseGridItem extends BasePage {
-
- private final SelenideElement element;
-
- private KsqlResponseGridItem(SelenideElement element) {
- this.element = element;
+ @Step
+ public KsqlQueryForm waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ pageTitle.shouldBe(Condition.visible);
+ return this;
}
@Step
- public String getType() {
- return element.$x("./td[1]").getText().trim();
+ public KsqlQueryForm clickClearBtn() {
+ clickByJavaScript(clearBtn);
+ return this;
}
@Step
- public String getName() {
- return element.$x("./td[2]").scrollTo().getText().trim();
+ public KsqlQueryForm clickExecuteBtn() {
+ clickByActions(executeBtn);
+ if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
+ loadingSpinner.shouldBe(Condition.visible);
+ } else {
+ waitUntilSpinnerDisappear();
+ }
+ return this;
}
@Step
- public boolean isVisible() {
- boolean isVisible = false;
- try {
- element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
- isVisible = true;
- } catch (Throwable ignored) {
- }
- return isVisible;
+ public KsqlQueryForm clickStopQueryBtn() {
+ clickByActions(stopQueryBtn);
+ waitUntilSpinnerDisappear();
+ return this;
}
@Step
- public String getTopic() {
- return element.$x("./td[3]").getText().trim();
+ public KsqlQueryForm clickClearResultsBtn() {
+ clickByActions(clearResultsBtn);
+ waitUntilSpinnerDisappear();
+ return this;
}
@Step
- public String getKeyFormat() {
- return element.$x("./td[4]").getText().trim();
+ public KsqlQueryForm clickAddStreamProperty() {
+ clickByJavaScript(addStreamPropertyBtn);
+ return this;
}
@Step
- public String getValueFormat() {
- return element.$x("./td[5]").getText().trim();
+ public KsqlQueryForm setQuery(String query) {
+ queryAreaValue.shouldBe(Condition.visible).click();
+ queryArea.setValue(query);
+ return this;
}
@Step
- public String getIsWindowed() {
- return element.$x("./td[6]").getText().trim();
+ public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
+ return initItems().stream()
+ .filter(e -> e.getName().equalsIgnoreCase(name))
+ .findFirst().orElseThrow();
+ }
+
+ @Step
+ public boolean areResultsVisible() {
+ boolean visible = false;
+ try {
+ visible = initItems().size() > 0;
+ } catch (Throwable ignored) {
+ }
+ return visible;
+ }
+
+ private List initItems() {
+ List gridItemList = new ArrayList<>();
+ ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
+ return gridItemList;
+ }
+
+ public static class KsqlResponseGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ private KsqlResponseGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getType() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getName() {
+ return element.$x("./td[2]").scrollTo().getText().trim();
+ }
+
+ @Step
+ public boolean isVisible() {
+ boolean isVisible = false;
+ try {
+ element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
+ isVisible = true;
+ } catch (Throwable ignored) {
+ }
+ return isVisible;
+ }
+
+ @Step
+ public String getTopic() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[5]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[6]").getText().trim();
+ }
}
- }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
index 8689c0abde..be847b00a7 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
@@ -39,7 +39,7 @@ public class KsqlDbTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(41)
- @Test
+ @Test(priority = 1)
public void checkShowTablesRequestExecution() {
naviSideBar
.openSideMenu(KSQL_DB);
@@ -51,8 +51,32 @@ public class KsqlDbTest extends BaseTest {
.setQuery(SHOW_TABLES.getQuery())
.clickExecuteBtn();
SoftAssert softly = new SoftAssert();
+ softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertTrue(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableName()");
softly.assertTrue(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableName()");
softly.assertAll();
}
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(86)
+ @Test(priority = 2)
+ public void clearResultsForExecutedRequest() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady()
+ .clickExecuteKsqlRequestBtn();
+ ksqlQueryForm
+ .waitUntilScreenReady()
+ .setQuery(SHOW_TABLES.getQuery())
+ .clickExecuteBtn();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
+ softly.assertAll();
+ ksqlQueryForm
+ .clickClearResultsBtn();
+ softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
+ softly.assertAll();
+ }
}
From eeef330fc03b5fdf6891b1e6d5b60ebee4bc4f8f Mon Sep 17 00:00:00 2001
From: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com>
Date: Mon, 13 Feb 2023 13:06:44 +0200
Subject: [PATCH 17/54] [e2e] Check custom parameters availability (#3340)
* test commit
* fix BaseTest
* upd global
* upd global
* upd global
* add local browser VM option
* fix TopicsList column header locator
* fix withStartupTimeout()
* switch e2e to TestNG
* upd pom
* upd page classes
* upd -pl kafka-ui-e2e-checks
* test commit
* Revert "test commit"
This reverts commit 4b505321ac5e164986a7a1886ac40c6744b8ecb1.
* fix workflow module
* create checkCustomParametersAvailability()
* upd test -f 'kafka-ui-e2e-checks'
* upd checkCustomParametersAvailability()
* upd checkCustomParametersWithinEditExistingTopic
---
.../provectus/kafka/ui/pages/BasePage.java | 7 ++-
.../kafka/ui/pages/brokers/BrokersList.java | 2 +-
.../kafka/ui/pages/ksqlDb/KsqlDbList.java | 4 +-
.../ui/pages/topics/TopicCreateEditForm.java | 19 +++++--
.../kafka/ui/pages/topics/TopicDetails.java | 2 +-
.../ui/pages/topics/TopicSettingsTab.java | 2 +-
.../kafka/ui/pages/topics/TopicsList.java | 2 +-
.../ui/smokeSuite/topics/TopicsTest.java | 53 +++++++++++++------
8 files changed, 63 insertions(+), 28 deletions(-)
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index daea5c0d54..e51942a39c 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -23,7 +23,8 @@ public abstract class BasePage extends WebUtils {
protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
- protected ElementsCollection allGridItems = $$x("//tr[@class]");
+ protected ElementsCollection ddlOptions = $$x("//li[@value]");
+ protected ElementsCollection gridItems = $$x("//tr[@class]");
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
@@ -44,6 +45,10 @@ public abstract class BasePage extends WebUtils {
return $x(String.format(tableElementNameLocator, elementName));
}
+ protected ElementsCollection getDdlOptions() {
+ return ddlOptions;
+ }
+
protected String getAlertHeader() {
log.debug("\ngetAlertHeader");
String result = alertHeader.shouldBe(Condition.visible).getText();
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
index 6db2d20ad6..3d3a58f355 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
@@ -62,7 +62,7 @@ public class BrokersList extends BasePage {
private List initGridItems() {
List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
index 84c23d0ac3..25246a86ed 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
@@ -41,7 +41,7 @@ public class KsqlDbList extends BasePage {
private List initTablesItems() {
List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
return gridItemList;
}
@@ -89,7 +89,7 @@ public class KsqlDbList extends BasePage {
private List initStreamsItems() {
List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
index 7deed1b7fd..f60bd6d431 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
@@ -1,9 +1,6 @@
package com.provectus.kafka.ui.pages.topics;
-import com.codeborne.selenide.ClickOptions;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.ElementsCollection;
-import com.codeborne.selenide.SelenideElement;
+import com.codeborne.selenide.*;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue;
import com.provectus.kafka.ui.pages.topics.enums.CustomParameterType;
@@ -91,8 +88,20 @@ public class TopicCreateEditForm extends BasePage {
}
@Step
- public TopicCreateEditForm setCustomParameterType(CustomParameterType customParameterType) {
+ public TopicCreateEditForm openCustomParameterTypeDdl() {
customParameterDdl.shouldBe(Condition.visible).click();
+ ddlOptions.shouldHave(CollectionCondition.sizeGreaterThan(0));
+ return this;
+ }
+
+ @Step
+ public ElementsCollection getAllDdlOptions() {
+ return getDdlOptions();
+ }
+
+ @Step
+ public TopicCreateEditForm setCustomParameterType(CustomParameterType customParameterType) {
+ openCustomParameterTypeDdl();
$x(String.format(ddlElementLocator, customParameterType.getOptionValue())).shouldBe(Condition.visible).click();
return this;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
index 78ec2cef14..980a66b791 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
@@ -337,7 +337,7 @@ public class TopicDetails extends BasePage {
private List initItems() {
List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new TopicDetails.MessageGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
index 2b00c97c40..3c0fcac211 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
@@ -24,7 +24,7 @@ public class TopicSettingsTab extends BasePage {
private List initGridItems() {
List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new SettingsGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
index 291c94667f..499d5fe965 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
@@ -163,7 +163,7 @@ public class TopicsList extends BasePage {
private List initGridItems() {
List gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new TopicGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
index 62fb0b0448..8b86d82c9e 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
@@ -108,11 +108,32 @@ public class TopicsTest extends BaseTest {
}
@Ignore
- @Issue("https://github.com/provectus/kafka-ui/issues/2625")
+ @Issue("https://github.com/provectus/kafka-ui/issues/3071")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(268)
+ @Test(priority = 3)
+ public void checkCustomParametersWithinEditExistingTopic() {
+ navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
+ topicDetails
+ .openDotMenu()
+ .clickEditSettingsMenu();
+ SoftAssert softly = new SoftAssert();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .clickAddCustomParameterTypeButton()
+ .openCustomParameterTypeDdl()
+ .getAllDdlOptions()
+ .forEach(option ->
+ softly.assertTrue(!option.is(Condition.attribute("disabled")),
+ option.getText() + " is enabled:"));
+ softly.assertAll();
+ }
+
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(197)
- @Test(priority = 3)
+ @Test(priority = 4)
public void updateTopic() {
navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
topicDetails
@@ -166,7 +187,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(242)
- @Test(priority = 4)
+ @Test(priority = 5)
public void removeTopicFromTopicList() {
navigateToTopics();
topicsList
@@ -182,7 +203,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(207)
- @Test(priority = 5)
+ @Test(priority = 6)
public void deleteTopic() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_DELETE.getName());
topicDetails
@@ -197,7 +218,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(20)
- @Test(priority = 6)
+ @Test(priority = 7)
public void redirectToConsumerFromTopic() {
String topicName = "source-activities";
String consumerGroupId = "connect-sink_postgres_activities";
@@ -218,7 +239,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(4)
- @Test(priority = 7)
+ @Test(priority = 8)
public void checkTopicCreatePossibility() {
navigateToTopics();
topicsList
@@ -241,7 +262,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(266)
- @Test(priority = 8)
+ @Test(priority = 9)
public void checkTimeToRetainDataCustomValueWithEditingTopic() {
Topic topicToRetainData = new Topic()
.setName("topic-to-retain-data-" + randomAlphabetic(5))
@@ -274,7 +295,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(6)
- @Test(priority = 9)
+ @Test(priority = 10)
public void checkCustomParametersWithinCreateNewTopic() {
navigateToTopics();
topicsList
@@ -295,7 +316,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(2)
- @Test(priority = 10)
+ @Test(priority = 11)
public void checkTopicListElements() {
navigateToTopics();
verifyElementsCondition(topicsList.getAllVisibleElements(), Condition.visible);
@@ -305,7 +326,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(12)
- @Test(priority = 11)
+ @Test(priority = 12)
public void addingNewFilterWithinTopic() {
String filterName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
@@ -327,7 +348,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(13)
- @Test(priority = 12)
+ @Test(priority = 13)
public void checkFilterSavingWithinSavedFilters() {
String displayName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
@@ -350,7 +371,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(14)
- @Test(priority = 13)
+ @Test(priority = 14)
public void checkApplyingSavedFilterWithinTopicMessages() {
String displayName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
@@ -371,7 +392,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(11)
- @Test(priority = 14)
+ @Test(priority = 15)
public void checkShowInternalTopicsButtonFunctionality() {
navigateToTopics();
SoftAssert softly = new SoftAssert();
@@ -389,7 +410,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(56)
- @Test(priority = 15)
+ @Test(priority = 16)
public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
navigateToTopics();
topicsList
@@ -439,7 +460,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(247)
- @Test(priority = 16)
+ @Test(priority = 17)
public void recreateTopicFromTopicProfile() {
Topic topicToRecreate = new Topic()
.setName("topic-to-recreate-" + randomAlphabetic(5))
@@ -469,7 +490,7 @@ public class TopicsTest extends BaseTest {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(8)
- @Test(priority = 17)
+ @Test(priority = 18)
public void checkCopyTopicPossibility() {
Topic topicToCopy = new Topic()
.setName("topic-to-copy-" + randomAlphabetic(5))
From ebd25c61b1736b707ca1003c0c9872a0c605df58 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 14 Feb 2023 14:13:52 +0400
Subject: [PATCH 18/54] Using JRE instead of JDK in docker image construction
(#3341)
Co-authored-by: iliax
---
kafka-ui-api/Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/kafka-ui-api/Dockerfile b/kafka-ui-api/Dockerfile
index 96e0eb414b..81df41bd1d 100644
--- a/kafka-ui-api/Dockerfile
+++ b/kafka-ui-api/Dockerfile
@@ -1,4 +1,4 @@
-FROM azul/zulu-openjdk-alpine:17
+FROM azul/zulu-openjdk-alpine:17-jre
RUN apk add --no-cache gcompat # need to make snappy codec work
RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
From cbb166026d8c6360836def9bf9c208313023961c Mon Sep 17 00:00:00 2001
From: Konstantinos Papadakis <37328944+cpapad@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:31:05 +0200
Subject: [PATCH 19/54] Helm: Make secrets resource creation optional (#3324)
* Fixes #3299
* bumps chart version
---------
Co-authored-by: Roman Zabaluev
---
charts/kafka-ui/Chart.yaml | 2 +-
charts/kafka-ui/templates/secret.yaml | 2 ++
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/charts/kafka-ui/Chart.yaml b/charts/kafka-ui/Chart.yaml
index 3a1759a391..6e5f0ee2d5 100644
--- a/charts/kafka-ui/Chart.yaml
+++ b/charts/kafka-ui/Chart.yaml
@@ -2,6 +2,6 @@ apiVersion: v2
name: kafka-ui
description: A Helm chart for kafka-UI
type: application
-version: 0.5.2
+version: 0.5.3
appVersion: v0.5.0
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
diff --git a/charts/kafka-ui/templates/secret.yaml b/charts/kafka-ui/templates/secret.yaml
index a2d1f25fa2..1f974503dc 100644
--- a/charts/kafka-ui/templates/secret.yaml
+++ b/charts/kafka-ui/templates/secret.yaml
@@ -1,3 +1,4 @@
+{{- if .Values.envs.secret -}}
apiVersion: v1
kind: Secret
metadata:
@@ -9,3 +10,4 @@ data:
{{- range $key, $val := .Values.envs.secret }}
{{ $key }}: {{ $val | b64enc | quote }}
{{- end -}}
+{{- end}}
\ No newline at end of file
From a87b31aca1f24f0ad22e6903d8b4f199827e9cc3 Mon Sep 17 00:00:00 2001
From: Michael Kreis
Date: Wed, 15 Feb 2023 15:13:53 +0100
Subject: [PATCH 20/54] BE: Extend testcase with a recursive reference (#3314)
Fixes #2824
Co-authored-by: Michael Kreis
Co-authored-by: Roman Zabaluev
---
.../kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java
index 662c3ae22b..02da33bb12 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/ProtobufSchemaConverterTest.java
@@ -57,6 +57,7 @@ class ProtobufSchemaConverterTest {
message EmbeddedMsg {
int32 emb_f1 = 1;
TestMsg outer_ref = 2;
+ EmbeddedMsg self_ref = 3;
}
}""";
@@ -116,7 +117,8 @@ class ProtobufSchemaConverterTest {
"properties":
{
"emb_f1": { "type": "integer", "maximum": 2147483647, "minimum": -2147483648 },
- "outer_ref": { "$ref": "#/definitions/test.TestMsg" }
+ "outer_ref": { "$ref": "#/definitions/test.TestMsg" },
+ "self_ref": { "$ref": "#/definitions/test.TestMsg.EmbeddedMsg" }
}
}
},
From f2ec4d76de481f1a049ae59c4c6fa352bdab7a30 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Fri, 17 Feb 2023 16:18:28 +0400
Subject: [PATCH 21/54] Avro (Embedded) serde implementation (#3266)
* Avro (Embedded) serde implementation
---------
Co-authored-by: iliax
---
.../kafka/ui/serdes/SerdesInitializer.java | 2 +
.../ui/serdes/builtin/AvroEmbeddedSerde.java | 72 +++++++++++++++
.../serdes/builtin/AvroEmbeddedSerdeTest.java | 92 +++++++++++++++++++
3 files changed, 166 insertions(+)
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java
create mode 100644 kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerdeTest.java
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
index b49767d1d5..2e057874e7 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
@@ -9,6 +9,7 @@ import com.provectus.kafka.ui.config.ClustersProperties.SerdeConfig;
import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.serde.api.PropertyResolver;
import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.builtin.AvroEmbeddedSerde;
import com.provectus.kafka.ui.serdes.builtin.Base64Serde;
import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
@@ -43,6 +44,7 @@ public class SerdesInitializer {
.put(Int64Serde.name(), Int64Serde.class)
.put(UInt32Serde.name(), UInt32Serde.class)
.put(UInt64Serde.name(), UInt64Serde.class)
+ .put(AvroEmbeddedSerde.name(), AvroEmbeddedSerde.class)
.put(Base64Serde.name(), Base64Serde.class)
.put(UuidBinarySerde.name(), UuidBinarySerde.class)
.build(),
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java
new file mode 100644
index 0000000000..73a1ed5484
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerde.java
@@ -0,0 +1,72 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.PropertyResolver;
+import com.provectus.kafka.ui.serde.api.RecordHeaders;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import java.util.Map;
+import java.util.Optional;
+import lombok.SneakyThrows;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.SeekableByteArrayInput;
+import org.apache.avro.generic.GenericDatumReader;
+
+public class AvroEmbeddedSerde implements BuiltInSerde {
+
+ public static String name() {
+ return "Avro (Embedded)";
+ }
+
+ @Override
+ public void configure(PropertyResolver serdeProperties,
+ PropertyResolver kafkaClusterProperties,
+ PropertyResolver globalProperties) {
+ }
+
+ @Override
+ public Optional getDescription() {
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional getSchema(String topic, Target type) {
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean canDeserialize(String topic, Target type) {
+ return true;
+ }
+
+ @Override
+ public boolean canSerialize(String topic, Target type) {
+ return false;
+ }
+
+ @Override
+ public Serializer serializer(String topic, Target type) {
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public Deserializer deserializer(String topic, Target type) {
+ return new Deserializer() {
+ @SneakyThrows
+ @Override
+ public DeserializeResult deserialize(RecordHeaders headers, byte[] data) {
+ try (var reader = new DataFileReader<>(new SeekableByteArrayInput(data), new GenericDatumReader<>())) {
+ if (!reader.hasNext()) {
+ // this is very strange situation, when only header present in payload
+ // returning null in this case
+ return new DeserializeResult(null, DeserializeResult.Type.JSON, Map.of());
+ }
+ Object avroObj = reader.next();
+ String jsonValue = new String(AvroSchemaUtils.toJson(avroObj));
+ return new DeserializeResult(jsonValue, DeserializeResult.Type.JSON, Map.of());
+ }
+ }
+ };
+ }
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerdeTest.java
new file mode 100644
index 0000000000..2f4734ce06
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/AvroEmbeddedSerdeTest.java
@@ -0,0 +1,92 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.PropertyResolverImpl;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import org.apache.avro.Schema;
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.generic.GenericRecord;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EnumSource;
+
+class AvroEmbeddedSerdeTest {
+
+ private AvroEmbeddedSerde avroEmbeddedSerde;
+
+ @BeforeEach
+ void init() {
+ avroEmbeddedSerde = new AvroEmbeddedSerde();
+ avroEmbeddedSerde.configure(
+ PropertyResolverImpl.empty(),
+ PropertyResolverImpl.empty(),
+ PropertyResolverImpl.empty()
+ );
+ }
+
+ @ParameterizedTest
+ @EnumSource
+ void canDeserializeReturnsTrueForAllTargets(Serde.Target target) {
+ assertThat(avroEmbeddedSerde.canDeserialize("anyTopic", target))
+ .isTrue();
+ }
+
+ @ParameterizedTest
+ @EnumSource
+ void canSerializeReturnsFalseForAllTargets(Serde.Target target) {
+ assertThat(avroEmbeddedSerde.canSerialize("anyTopic", target))
+ .isFalse();
+ }
+
+ @Test
+ void deserializerParsesAvroDataWithEmbeddedSchema() throws Exception {
+ Schema schema = new Schema.Parser().parse("""
+ {
+ "type": "record",
+ "name": "TestAvroRecord",
+ "fields": [
+ { "name": "field1", "type": "string" },
+ { "name": "field2", "type": "int" }
+ ]
+ }
+ """
+ );
+ GenericRecord record = new GenericData.Record(schema);
+ record.put("field1", "this is test msg");
+ record.put("field2", 100500);
+
+ String jsonRecord = new String(AvroSchemaUtils.toJson(record));
+ byte[] serializedRecordBytes = serializeAvroWithEmbeddedSchema(record);
+
+ var deserializer = avroEmbeddedSerde.deserializer("anyTopic", Serde.Target.KEY);
+ DeserializeResult result = deserializer.deserialize(null, serializedRecordBytes);
+ assertThat(result.getType()).isEqualTo(DeserializeResult.Type.JSON);
+ assertThat(result.getAdditionalProperties()).isEmpty();
+ assertJsonEquals(jsonRecord, result.getResult());
+ }
+
+ private void assertJsonEquals(String expected, String actual) throws IOException {
+ var mapper = new JsonMapper();
+ assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected));
+ }
+
+ private byte[] serializeAvroWithEmbeddedSchema(GenericRecord record) throws IOException {
+ try (DataFileWriter writer = new DataFileWriter<>(new GenericDatumWriter<>());
+ ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
+ writer.create(record.getSchema(), baos);
+ writer.append(record);
+ writer.flush();
+ return baos.toByteArray();
+ }
+ }
+
+}
From ba99c20ad984a1b9f6c77401450150aed53cb939 Mon Sep 17 00:00:00 2001
From: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com>
Date: Tue, 21 Feb 2023 11:04:18 +0200
Subject: [PATCH 22/54] [e2e] Update Qase integration, impl additional e2e
workflows (#3372)
---
.github/workflows/e2e-automation.yml | 85 +++++++++
.github/workflows/e2e-checks.yaml | 8 +-
.github/workflows/e2e-weekly.yml | 70 +++++++
kafka-ui-e2e-checks/README.md | 18 +-
kafka-ui-e2e-checks/pom.xml | 28 ++-
.../kafka/ui/settings/BaseSource.java | 21 ++-
.../kafka/ui/settings/configs/Profiles.java | 8 +-
.../utilities/qaseIoUtils/QaseExtension.java | 145 ---------------
.../qaseIoUtils/TestCaseGenerator.java | 173 ------------------
.../utilities/qaseIoUtils/enums/Status.java | 5 -
.../ui/utilities/qaseUtils/QaseSetup.java | 34 ++++
.../annotations/AutomationStatus.java | 4 +-
.../annotations/Suite.java | 2 +-
.../ui/utilities/qaseUtils/enums/Status.java | 5 +
.../provectus/kafka/ui/variables/Browser.java | 7 +
.../provectus/kafka/ui/variables/Suite.java | 11 ++
...it.platform.launcher.TestExecutionListener | 1 -
.../java/com/provectus/kafka/ui/BaseTest.java | 11 +-
.../kafka/ui/manualSuite/TestClass.java | 4 +
.../kafka/ui/qaseSuite/TestClass.java | 4 +
.../kafka/ui/sanitySuite/TestClass.java | 4 +
.../kafka/ui/smokeSuite/SmokeTest.java | 4 +-
.../ui/smokeSuite/brokers/BrokersTest.java | 6 +-
.../smokeSuite/connectors/ConnectorsTest.java | 6 +-
.../ui/smokeSuite/ksqlDb/KsqlDbTest.java | 6 +-
.../ui/smokeSuite/schemas/SchemasTest.java | 6 +-
.../ui/smokeSuite/topics/MessagesTest.java | 8 +-
.../ui/smokeSuite/topics/TopicsTest.java | 6 +-
.../kafka/ui/testSuite/TestClass.java | 4 -
.../src/test/resources/manual.xml | 8 +
.../src/test/resources/qase.xml | 8 +
.../src/test/resources/regression.xml | 1 +
32 files changed, 305 insertions(+), 406 deletions(-)
create mode 100644 .github/workflows/e2e-automation.yml
create mode 100644 .github/workflows/e2e-weekly.yml
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/enums/Status.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/{qaseIoUtils => qaseUtils}/annotations/AutomationStatus.java (58%)
rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/{qaseIoUtils => qaseUtils}/annotations/Suite.java (74%)
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Browser.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java
delete mode 100644 kafka-ui-e2e-checks/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/sanitySuite/TestClass.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java
create mode 100644 kafka-ui-e2e-checks/src/test/resources/manual.xml
create mode 100644 kafka-ui-e2e-checks/src/test/resources/qase.xml
diff --git a/.github/workflows/e2e-automation.yml b/.github/workflows/e2e-automation.yml
new file mode 100644
index 0000000000..50e3e9f1d4
--- /dev/null
+++ b/.github/workflows/e2e-automation.yml
@@ -0,0 +1,85 @@
+name: E2E Automation suite
+on:
+ workflow_dispatch:
+ inputs:
+ test_suite:
+ description: 'Select test suite to run'
+ default: 'regression'
+ required: true
+ type: choice
+ options:
+ - manual
+ - qase
+ - regression
+ - sanity
+ - smoke
+ qase_token:
+ description: 'Set Qase token to enable integration'
+ required: false
+ type: string
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Pull with Docker
+ id: pull_chrome
+ run: |
+ docker pull selenium/standalone-chrome:103.0
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Compose with Docker
+ id: compose_app
+ # use the following command until #819 will be fixed
+ run: |
+ docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ github.event.inputs.qase_token }} -Dsurefire.suiteXmlFiles='src/test/resources/${{ github.event.inputs.test_suite }}.xml' -Dsuite=${{ github.event.inputs.test_suite }} -f 'kafka-ui-e2e-checks' test -Pprod
+ - name: Generate Allure report
+ uses: simple-elf/allure-report-action@master
+ if: always()
+ id: allure-report
+ with:
+ allure_results: ./kafka-ui-e2e-checks/allure-results
+ gh_pages: allure-results
+ allure_report: allure-report
+ subfolder: allure-results
+ report_url: "http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com"
+ - uses: jakejarvis/s3-sync-action@master
+ if: always()
+ env:
+ AWS_S3_BUCKET: 'kafkaui-allure-reports'
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_REGION: 'eu-central-1'
+ SOURCE_DIR: 'allure-history/allure-results'
+ - name: Deploy report to Amazon S3
+ if: always()
+ uses: Sibz/github-status-action@v1.1.6
+ with:
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Test report"
+ state: "success"
+ sha: ${{ github.sha }}
+ target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
+ - name: Dump Docker logs on failure
+ if: failure()
+ uses: jwalton/gh-docker-logs@v2.2.1
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml
index 40178c67d8..a21821e79c 100644
--- a/.github/workflows/e2e-checks.yaml
+++ b/.github/workflows/e2e-checks.yaml
@@ -1,7 +1,7 @@
-name: E2E tests
+name: E2E PR health check
on:
pull_request_target:
- types: ["opened", "edited", "reopened", "synchronize"]
+ types: [ "opened", "edited", "reopened", "synchronize" ]
paths:
- "kafka-ui-api/**"
- "kafka-ui-contract/**"
@@ -42,7 +42,7 @@ jobs:
- name: e2e run
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Dsuite=smoke -f 'kafka-ui-e2e-checks' test -Pprod
+ ./mvnw -B -V -ntp -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -f 'kafka-ui-e2e-checks' test -Pprod
- name: Generate allure report
uses: simple-elf/allure-report-action@master
if: always()
@@ -66,7 +66,7 @@ jobs:
uses: Sibz/github-status-action@v1.1.6
with:
authToken: ${{secrets.GITHUB_TOKEN}}
- context: "Test report"
+ context: "Click Details button to open Allure report"
state: "success"
sha: ${{ github.event.pull_request.head.sha || github.sha }}
target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
diff --git a/.github/workflows/e2e-weekly.yml b/.github/workflows/e2e-weekly.yml
new file mode 100644
index 0000000000..2bf2001ec2
--- /dev/null
+++ b/.github/workflows/e2e-weekly.yml
@@ -0,0 +1,70 @@
+name: E2E Weekly suite
+on:
+ schedule:
+ - cron: '0 1 * * 1'
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Pull with Docker
+ id: pull_chrome
+ run: |
+ docker pull selenium/standalone-chrome:103.0
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Compose with Docker
+ id: compose_app
+ # use the following command until #819 will be fixed
+ run: |
+ docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -Dsurefire.suiteXmlFiles='src/test/resources/sanity.xml' -Dsuite=weekly -f 'kafka-ui-e2e-checks' test -Pprod
+ - name: Generate Allure report
+ uses: simple-elf/allure-report-action@master
+ if: always()
+ id: allure-report
+ with:
+ allure_results: ./kafka-ui-e2e-checks/allure-results
+ gh_pages: allure-results
+ allure_report: allure-report
+ subfolder: allure-results
+ report_url: "http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com"
+ - uses: jakejarvis/s3-sync-action@master
+ if: always()
+ env:
+ AWS_S3_BUCKET: 'kafkaui-allure-reports'
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_REGION: 'eu-central-1'
+ SOURCE_DIR: 'allure-history/allure-results'
+ - name: Deploy report to Amazon S3
+ if: always()
+ uses: Sibz/github-status-action@v1.1.6
+ with:
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Test report"
+ state: "success"
+ sha: ${{ github.sha }}
+ target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
+ - name: Dump Docker logs on failure
+ if: failure()
+ uses: jwalton/gh-docker-logs@v2.2.1
diff --git a/kafka-ui-e2e-checks/README.md b/kafka-ui-e2e-checks/README.md
index bd8e0ba704..ed23187635 100644
--- a/kafka-ui-e2e-checks/README.md
+++ b/kafka-ui-e2e-checks/README.md
@@ -38,25 +38,13 @@ cd kafka-ui
docker-compose -f documentation/compose/e2e-tests.yaml up -d
```
-2. Run Smoke test suite using your QaseIO API token as environment variable (put instead %s into command below)
+2. To run test suite select its name (options: regression, sanity, smoke) and put it instead %s into command below
```
-./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -Dsuite=smoke -f 'kafka-ui-e2e-checks' test -Pprod
+./mvnw -Dsurefire.suiteXmlFiles='src/test/resources/%s.xml' -f 'kafka-ui-e2e-checks' test -Pprod
```
-3. Run Sanity test suite using your QaseIO API token as environment variable (put instead %s into command below)
-
-```
-./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/sanity.xml' -Dsuite=sanity -f 'kafka-ui-e2e-checks' test -Pprod
-```
-
-4. Run Regression test suite using your QaseIO API token as environment variable (put instead %s into command below)
-
-```
-./mvnw -DQASEIO_API_TOKEN='%s' -Dsurefire.suiteXmlFiles='src/test/resources/regression.xml' -Dsuite=regression -f 'kafka-ui-e2e-checks' test -Pprod
-```
-
-5. To run tests on your local Chrome browser just add next VM option to the Run Configuration
+3. To run tests on your local Chrome browser just add next VM option to the Run Configuration
```
-Dbrowser=local
diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml
index 33ee947af0..5a91f0f995 100644
--- a/kafka-ui-e2e-checks/pom.xml
+++ b/kafka-ui-e2e-checks/pom.xml
@@ -15,17 +15,16 @@
3.0.0-M8
${project.version}
1.17.6
- 1.9.2
6.6.3
7.6.1
2.20.1
+ 3.0.2
1.9.9.1
3.23.1
2.2
1.7.36
2.3.1
3.3.1
- 2.1.3
@@ -121,16 +120,6 @@
selenium
${testcontainers.version}
-
- org.junit.platform
- junit-platform-launcher
- ${junit.platform.version}
-
-
- org.junit.platform
- junit-platform-engine
- ${junit.platform.version}
-
com.codeborne
selenide
@@ -151,6 +140,16 @@
allure-testng
${allure.version}
+
+ io.qase
+ qase-testng
+ ${qase.io.version}
+
+
+ io.qase
+ qase-api
+ ${qase.io.version}
+
org.hamcrest
hamcrest
@@ -186,11 +185,6 @@
kafka-ui-contract
${kafka-ui-contract}
-
- io.qase
- qase-api
- ${qase.io.version}
-
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java
index e108d4e554..ebead7b089 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java
@@ -5,16 +5,17 @@ import org.aeonbits.owner.ConfigFactory;
public abstract class BaseSource {
- private static Config config;
- public static final String BASE_CONTAINER_URL = "http://host.testcontainers.internal:8080";
- public static final String BASE_LOCAL_URL = "http://localhost:8080";
- public static final String CLUSTER_NAME = "local";
- public static final String BROWSER = config().browser();
+ public static final String BASE_CONTAINER_URL = "http://host.testcontainers.internal:8080";
+ public static final String BASE_LOCAL_URL = "http://localhost:8080";
+ public static final String CLUSTER_NAME = "local";
+ private static Config config;
+ public static final String BROWSER = config().browser();
+ public static final String SUITE_NAME = config().suite();
- private static Config config() {
- if (config == null) {
- config = ConfigFactory.create(Config.class, System.getProperties());
+ private static Config config() {
+ if (config == null) {
+ config = ConfigFactory.create(Config.class, System.getProperties());
+ }
+ return config;
}
- return config;
- }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java
index 27696c46d6..ef61d7d770 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/configs/Profiles.java
@@ -2,16 +2,16 @@ package com.provectus.kafka.ui.settings.configs;
import org.aeonbits.owner.Config;
-public interface Profiles extends Config {
+import static com.provectus.kafka.ui.variables.Browser.CONTAINER;
+import static com.provectus.kafka.ui.variables.Suite.CUSTOM;
- String CONTAINER = "container";
- String LOCAL = "local";
+public interface Profiles extends Config {
@Key("browser")
@DefaultValue(CONTAINER)
String browser();
@Key("suite")
- @DefaultValue("custom")
+ @DefaultValue(CUSTOM)
String suite();
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java
deleted file mode 100644
index e12f070517..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/QaseExtension.java
+++ /dev/null
@@ -1,145 +0,0 @@
-package com.provectus.kafka.ui.utilities.qaseIoUtils;
-
-import io.qase.api.QaseClient;
-import io.qase.api.StepStorage;
-import io.qase.api.exceptions.QaseException;
-import io.qase.client.ApiClient;
-import io.qase.client.api.ResultsApi;
-import io.qase.client.model.ResultCreate;
-import io.qase.client.model.ResultCreate.StatusEnum;
-import io.qase.client.model.ResultCreateSteps;
-import lombok.extern.slf4j.Slf4j;
-import org.junit.platform.engine.TestExecutionResult;
-import org.junit.platform.engine.TestSource;
-import org.junit.platform.engine.support.descriptor.MethodSource;
-import org.junit.platform.launcher.TestExecutionListener;
-import org.junit.platform.launcher.TestIdentifier;
-
-import javax.annotation.Nullable;
-import java.lang.reflect.Method;
-import java.text.SimpleDateFormat;
-import java.time.Duration;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-
-import static io.qase.api.QaseClient.getConfig;
-import static io.qase.api.utils.IntegrationUtils.getCaseId;
-import static io.qase.api.utils.IntegrationUtils.getStacktrace;
-import static org.apache.commons.lang3.StringUtils.isEmpty;
-import static org.junit.platform.engine.TestExecutionResult.Status.SUCCESSFUL;
-
-@Slf4j
-public class QaseExtension implements TestExecutionListener {
-
- private static final String QASE_PROJECT = "KAFKAUI";
-
- static {
- String qaseApiToken = System.getProperty("QASEIO_API_TOKEN");
- if (isEmpty(qaseApiToken)) {
- log.warn("QASEIO_API_TOKEN system property is not set. Support for Qase will be disabled.");
- System.setProperty("QASE_ENABLE", "false");
- } else {
- System.setProperty("QASE_ENABLE", "true");
- System.setProperty("QASE_PROJECT_CODE", QASE_PROJECT);
- System.setProperty("QASE_API_TOKEN", qaseApiToken);
- System.setProperty("QASE_USE_BULK", "false");
- if ("true".equalsIgnoreCase(System.getProperty("QASEIO_CREATE_TESTRUN"))) {
- System.setProperty("QASE_RUN_NAME", "Automation run " +
- new SimpleDateFormat("dd/MM/yyyy HH:mm:ss").format(new Date()));
- }
- }
- }
-
- private final ApiClient apiClient = QaseClient.getApiClient();
- private final ResultsApi resultsApi = new ResultsApi(apiClient);
- private final Map testStartTimes = new ConcurrentHashMap<>();
-
- @Override
- public void executionStarted(TestIdentifier testIdentifier) {
- if (QaseClient.isEnabled() && testIdentifier.isTest()) {
- testStartTimes.put(testIdentifier, System.currentTimeMillis());
- }
- }
-
- @Override
- public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult testExecutionResult) {
- if (!testIdentifier.isTest() || !QaseClient.isEnabled()
- || !testStartTimes.containsKey(testIdentifier)) {
- return;
- }
- TestSource testSource = testIdentifier.getSource().orElse(null);
- Method testMethod = null;
- if (testSource instanceof MethodSource) {
- testMethod = getMethod((MethodSource) testSource);
- }
- TestCaseGenerator.createTestCaseIfNotExists(testMethod);
- Duration duration = Duration.ofMillis(System.currentTimeMillis() - this.testStartTimes.remove(testIdentifier));
- sendResults(testExecutionResult, duration, testMethod);
- }
-
- private void sendResults(TestExecutionResult testExecutionResult, Duration timeSpent, Method testMethod) {
- if (testMethod != null) {
- ResultCreate resultCreate = getResultItem(testExecutionResult, timeSpent, testMethod);
- try {
- resultsApi.createResult(getConfig().projectCode(),
- getConfig().runId(),
- resultCreate);
- log.info("Method = " + testMethod.getName() + ": Result added to test run with Id = {}", getConfig().runId());
- } catch (QaseException e) {
- log.error("Method = " + testMethod.getName() + ": Result not added to test Run because there is no @CaseId annotation or case not found", e);
- }
- }
- }
-
- private ResultCreate getResultItem(TestExecutionResult testExecutionResult, Duration timeSpent, Method testMethod) {
- String testCaseTitle = TestCaseGenerator.generateTestCaseTitle(testMethod);
- TestCaseGenerator.createTestCaseIfNotExists(testMethod);
- Long caseId = getCaseId(testMethod);
- Map cases = TestCaseGenerator.getTestCasesTitleAndId();
- StatusEnum status = StatusEnum.SKIPPED;
-
- if (caseId == null || !TestCaseGenerator.isCaseIdPresentInQaseIo(testMethod)) {
- for (Map.Entry map : cases.entrySet()) {
- if (map.getValue().matches(testCaseTitle)) {
- caseId = map.getKey();
- log.info("There is no annotation @CaseId but there is test case with title '" + testCaseTitle + "' and with id = " + caseId
- + " that will be added to test Run");
- }
- }
- }
-
- if (TestCaseGenerator.getAutomationStatus(testMethod) == 2) {
- status = testExecutionResult.getStatus() == SUCCESSFUL ? StatusEnum.PASSED : StatusEnum.FAILED;
- }
-
- String comment = testExecutionResult.getThrowable()
- .flatMap(throwable -> Optional.of(throwable.toString())).orElse(null);
- Boolean isDefect = testExecutionResult.getThrowable()
- .flatMap(throwable -> Optional.of(throwable instanceof AssertionError))
- .orElse(false);
- String stacktrace = testExecutionResult.getThrowable()
- .flatMap(throwable -> Optional.of(getStacktrace(throwable))).orElse(null);
- LinkedList steps = StepStorage.getSteps();
- return new ResultCreate()
- .caseId(caseId)
- .status(status)
- .timeMs(timeSpent.toMillis())
- .comment(comment)
- .stacktrace(stacktrace)
- .steps(steps.isEmpty() ? null : steps)
- .defect(isDefect);
- }
-
- @Nullable
- private Method getMethod(MethodSource testSource) {
- try {
- Class> testClass = Class.forName(testSource.getClassName());
- return Arrays.stream(testClass.getDeclaredMethods())
- .filter(method -> MethodSource.from(method).equals(testSource))
- .findFirst().orElse(null);
- } catch (ClassNotFoundException e) {
- log.error(e.getMessage());
- return null;
- }
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java
deleted file mode 100644
index 1c534b7395..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/TestCaseGenerator.java
+++ /dev/null
@@ -1,173 +0,0 @@
-package com.provectus.kafka.ui.utilities.qaseIoUtils;
-
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
-import io.qase.api.QaseClient;
-import io.qase.api.annotation.CaseId;
-import io.qase.client.ApiClient;
-import io.qase.client.api.CasesApi;
-import io.qase.client.model.*;
-import lombok.SneakyThrows;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.ArrayUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.junit.Assert;
-import org.junit.platform.engine.support.descriptor.MethodSource;
-
-import java.lang.reflect.Method;
-import java.util.*;
-
-import static io.qase.api.QaseClient.getConfig;
-
-@Slf4j
-public class TestCaseGenerator {
-
- private static final ApiClient apiClient = QaseClient.getApiClient();
- private static final CasesApi casesApi = new CasesApi(apiClient);
- public static boolean FAILED = false;
-
- @SneakyThrows
- public static void createTestCaseIfNotExists(Method testMethod) {
- TestCaseCreate caseCreate = new TestCaseCreate();
- String testCaseTitle = generateTestCaseTitle(testMethod);
- if (!isMethodAnnotatedWithCaseId(testMethod) || !TestCaseGenerator.isCaseIdPresentInQaseIo(testMethod)) {
- if (!isCaseTitleExistInQaseIo(testMethod)) {
- caseCreate.setTitle(testCaseTitle);
- caseCreate.setAutomation(getAutomationStatus(testMethod));
- if (isMethodAnnotatedWithSuite(testMethod)) {
- long suiteId = testMethod.getAnnotation(Suite.class).suiteId();
- caseCreate.suiteId(suiteId);
- }
- Long caseId = Objects.requireNonNull(casesApi.createCase(getConfig().projectCode(), caseCreate).getResult()).getId();
- log.info("New test case = '" + testCaseTitle + "' created with id " + caseId);
- }
- }
- }
-
- @SneakyThrows
- public static HashMap getTestCasesTitleAndId() {
- HashMap map = new HashMap<>();
- boolean getCases = true;
- int offSet = 0;
- while (getCases) {
- getCases = false;
- TestCaseListResponse response =
- casesApi.getCases(getConfig().projectCode(), new Filters().status(Filters.SERIALIZED_NAME_STATUS), 100, offSet);
- TestCaseListResponseAllOfResult result = response.getResult();
- Assert.assertNotNull(result);
- List entities = result.getEntities();
- Assert.assertNotNull(entities);
- if (entities.size() > 0) {
- for (TestCase test : entities) {
- map.put(test.getId(), test.getTitle());
- }
- offSet = offSet + 100;
- getCases = true;
- }
- }
- return map;
- }
-
- public static boolean isCaseIdPresentInQaseIo(Method testMethod) {
- if (!testMethod.isAnnotationPresent(CaseId.class)) {
- return false;
- }
- long caseId = testMethod.getAnnotation(CaseId.class).value();
- HashMap cases = getTestCasesTitleAndId();
- String title;
- if (!cases.containsKey(caseId)) {
- FAILED = true;
- log.error("The method " + testMethod.getName() + " has wrong @CaseId =" + caseId + " that does not exist in Qase.io. " +
- "Please put correct @CaseId");
- return false;
- } else {
- for (Map.Entry map : cases.entrySet()) {
- if (map.getKey().equals(caseId)) {
- title = map.getValue();
- if (!title.matches(generateTestCaseTitle(testMethod))) {
- log.error("This CaseId =" + caseId + " belong to test with title = " + title);
- return false;
- }
- }
- }
- }
- return true;
- }
-
- private static boolean isCaseTitleExistInQaseIo(Method testMethod) {
- HashMap cases = getTestCasesTitleAndId();
- String title = generateTestCaseTitle(testMethod);
- if (cases.containsValue(title)) {
- for (Map.Entry map : cases.entrySet()) {
- if (map.getValue().matches(title)) {
- long caseId = map.getKey();
- log.info("Test case with title '" + title + "' and id " + caseId + " exist in Qase.io. Verify that annotation @CaseId is correct");
- return true;
- }
- }
- }
- return false;
- }
-
- public static int getAutomationStatus(Method testMethod) {
- if (testMethod.isAnnotationPresent(AutomationStatus.class)) {
- if (testMethod.getAnnotation(AutomationStatus.class).status().equals(Status.TO_BE_AUTOMATED))
- return 1;
- else if (testMethod.getAnnotation(AutomationStatus.class).status().equals(Status.MANUAL))
- return 0;
- }
- return 2;
- }
-
- private static boolean isMethodAnnotatedWithCaseId(Method testMethod) {
- if (!testMethod.isAnnotationPresent(CaseId.class)) {
- FAILED = true;
- log.error("You must put annotation @CaseId. The method " + testMethod.getName() + " is NOT annotated with @CaseId.");
- return false;
- }
- return true;
- }
-
- private static boolean isMethodAnnotatedWithSuite(Method testMethod) {
- if (!testMethod.isAnnotationPresent(Suite.class)) {
- log.info("The method " + testMethod.getName() + " is not annotated with @Suite and new test case will be added without suite");
- return false;
- }
- log.trace("The method is annotated with @Suite with id " + testMethod.getAnnotation(Suite.class).suiteId());
- return true;
- }
-
- private static boolean isMethodAnnotatedWithAutomationStatus(Method testMethod) {
- if (!testMethod.isAnnotationPresent(AutomationStatus.class)) {
- log.error("The method " + testMethod.getName() + " is NOT annotated with @AutomationStatus.");
- return false;
- }
- return true;
- }
-
- private static String formatTestCaseTitle(String testMethodName) {
- String[] split = StringUtils.splitByCharacterTypeCamelCase(testMethodName);
- String[] name = Arrays.stream(split).map(String::toLowerCase).toArray(String[]::new);
- String[] subarray = ArrayUtils.subarray(name, 1, name.length);
- ArrayList stringList = new ArrayList<>(Arrays.asList(subarray));
- stringList.add(0, StringUtils.capitalize(name[0]));
- return StringUtils.join(stringList, " ");
- }
-
- public static String generateTestCaseTitle(Method testMethod) {
- return getClassName(MethodSource.from(testMethod)) + "." + testMethod.getName() + " : " +
- formatTestCaseTitle(testMethod.getName());
- }
-
- private static String getClassName(MethodSource testSource) {
- Class> testClass;
- try {
- testClass = Class.forName(testSource.getClassName());
- } catch (ClassNotFoundException e) {
- log.error(e.getMessage());
- return null;
- }
- return testClass.getSimpleName();
- }
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/enums/Status.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/enums/Status.java
deleted file mode 100644
index 024acbc91a..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/enums/Status.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package com.provectus.kafka.ui.utilities.qaseIoUtils.enums;
-
-public enum Status {
- AUTOMATED, TO_BE_AUTOMATED, MANUAL;
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java
new file mode 100644
index 0000000000..aa1a1213f9
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java
@@ -0,0 +1,34 @@
+package com.provectus.kafka.ui.utilities.qaseUtils;
+
+import lombok.extern.slf4j.Slf4j;
+
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.time.format.DateTimeFormatter;
+
+import static com.provectus.kafka.ui.settings.BaseSource.SUITE_NAME;
+import static com.provectus.kafka.ui.variables.Suite.MANUAL;
+import static org.apache.commons.lang3.BooleanUtils.FALSE;
+import static org.apache.commons.lang3.BooleanUtils.TRUE;
+import static org.apache.commons.lang3.StringUtils.isEmpty;
+
+@Slf4j
+public class QaseSetup {
+
+ public static void testRunSetup() {
+ String qaseApiToken = System.getProperty("QASEIO_API_TOKEN");
+ if (isEmpty(qaseApiToken)) {
+ log.warn("Integration with Qase is disabled due to run config or token wasn't defined.");
+ System.setProperty("QASE_ENABLE", FALSE);
+ } else {
+ log.warn("Integration with Qase is enabled. Find this run at https://app.qase.io/run/KAFKAUI.");
+ String automation = SUITE_NAME.equalsIgnoreCase(MANUAL) ? "" : "Automation ";
+ System.setProperty("QASE_ENABLE", TRUE);
+ System.setProperty("QASE_PROJECT_CODE", "KAFKAUI");
+ System.setProperty("QASE_API_TOKEN", qaseApiToken);
+ System.setProperty("QASE_USE_BULK", TRUE);
+ System.setProperty("QASE_RUN_NAME", DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm")
+ .format(OffsetDateTime.now(ZoneOffset.UTC)) + ": " + automation + SUITE_NAME.toUpperCase() + " suite");
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/annotations/AutomationStatus.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/AutomationStatus.java
similarity index 58%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/annotations/AutomationStatus.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/AutomationStatus.java
index abaed2a0ee..7f3c0da89e 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/annotations/AutomationStatus.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/AutomationStatus.java
@@ -1,6 +1,6 @@
-package com.provectus.kafka.ui.utilities.qaseIoUtils.annotations;
+package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/annotations/Suite.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Suite.java
similarity index 74%
rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/annotations/Suite.java
rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Suite.java
index 3e688cd797..09b971b48c 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseIoUtils/annotations/Suite.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Suite.java
@@ -1,4 +1,4 @@
-package com.provectus.kafka.ui.utilities.qaseIoUtils.annotations;
+package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java
new file mode 100644
index 0000000000..d97e42b3e4
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java
@@ -0,0 +1,5 @@
+package com.provectus.kafka.ui.utilities.qaseUtils.enums;
+
+public enum Status {
+ AUTOMATED, TO_BE_AUTOMATED, MANUAL
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Browser.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Browser.java
new file mode 100644
index 0000000000..cb3873cdb5
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Browser.java
@@ -0,0 +1,7 @@
+package com.provectus.kafka.ui.variables;
+
+public interface Browser {
+
+ String CONTAINER = "container";
+ String LOCAL = "local";
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java
new file mode 100644
index 0000000000..c0a823600a
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java
@@ -0,0 +1,11 @@
+package com.provectus.kafka.ui.variables;
+
+public interface Suite {
+
+ String CUSTOM = "custom";
+ String MANUAL = "manual";
+ String QASE = "qase";
+ String REGRESSION = "regression";
+ String SANITY = "sanity";
+ String SMOKE = "smoke";
+}
diff --git a/kafka-ui-e2e-checks/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener b/kafka-ui-e2e-checks/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener
deleted file mode 100644
index dd3c3f5e90..0000000000
--- a/kafka-ui-e2e-checks/src/main/resources/META-INF/services/org.junit.platform.launcher.TestExecutionListener
+++ /dev/null
@@ -1 +0,0 @@
-com.provectus.kafka.ui.utilities.qaseIoUtils.QaseExtension
\ No newline at end of file
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
index bc68cc6637..aa05462dc0 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
@@ -6,7 +6,8 @@ import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
import com.provectus.kafka.ui.settings.listeners.AllureListener;
import com.provectus.kafka.ui.settings.listeners.LoggerListener;
-import io.qase.api.annotation.Step;
+import io.qameta.allure.Step;
+import io.qase.testng.QaseListener;
import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.chrome.ChromeOptions;
@@ -23,12 +24,13 @@ import java.util.List;
import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.TOPICS;
import static com.provectus.kafka.ui.settings.BaseSource.*;
-import static com.provectus.kafka.ui.settings.configs.Profiles.CONTAINER;
-import static com.provectus.kafka.ui.settings.configs.Profiles.LOCAL;
import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
+import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.testRunSetup;
+import static com.provectus.kafka.ui.variables.Browser.CONTAINER;
+import static com.provectus.kafka.ui.variables.Browser.LOCAL;
@Slf4j
-@Listeners({AllureListener.class, LoggerListener.class})
+@Listeners({AllureListener.class, LoggerListener.class, QaseListener.class})
public abstract class BaseTest extends Facade {
private static final String SELENIUM_IMAGE_NAME = "selenium/standalone-chrome:103.0";
@@ -41,6 +43,7 @@ public abstract class BaseTest extends Facade {
@BeforeSuite(alwaysRun = true)
public void beforeSuite() {
+ testRunSetup();
switch (BROWSER) {
case (CONTAINER) -> {
DockerImageName image = isARM64()
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java
new file mode 100644
index 0000000000..c75516955a
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java
@@ -0,0 +1,4 @@
+package com.provectus.kafka.ui.manualSuite;
+
+public class TestClass {
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java
new file mode 100644
index 0000000000..d3cea52921
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java
@@ -0,0 +1,4 @@
+package com.provectus.kafka.ui.qaseSuite;
+
+public class TestClass {
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/sanitySuite/TestClass.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/sanitySuite/TestClass.java
new file mode 100644
index 0000000000..89402f3fc8
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/sanitySuite/TestClass.java
@@ -0,0 +1,4 @@
+package com.provectus.kafka.ui.sanitySuite;
+
+public class TestClass {
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
index e0971a0a9e..fc87e88502 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
@@ -2,8 +2,8 @@ package com.provectus.kafka.ui.smokeSuite;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qase.api.annotation.CaseId;
import org.testng.annotations.Test;
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
index 7f46456a6c..c2f3e20c4a 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
@@ -2,9 +2,9 @@ package com.provectus.kafka.ui.smokeSuite.brokers;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Step;
import io.qase.api.annotation.CaseId;
import org.testng.Assert;
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
index 0b162283e4..30b380d6d0 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
@@ -3,9 +3,9 @@ package com.provectus.kafka.ui.smokeSuite.connectors;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Topic;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Step;
import io.qase.api.annotation.CaseId;
import org.testng.Assert;
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
index be847b00a7..9716b3905d 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
@@ -3,9 +3,9 @@ package com.provectus.kafka.ui.smokeSuite.ksqlDb;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.pages.ksqlDb.models.Stream;
import com.provectus.kafka.ui.pages.ksqlDb.models.Table;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qase.api.annotation.CaseId;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
index 5c8871b4fc..6473684c06 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
@@ -4,9 +4,9 @@ import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.api.model.CompatibilityLevel;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Schema;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Step;
import io.qase.api.annotation.CaseId;
import org.testng.Assert;
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
index 66f7d8a50a..2dee8588d7 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
@@ -3,12 +3,12 @@ package com.provectus.kafka.ui.smokeSuite.topics;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Topic;
import com.provectus.kafka.ui.pages.topics.TopicDetails;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Issue;
+import io.qameta.allure.Step;
import io.qase.api.annotation.CaseId;
-import io.qase.api.annotation.Step;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
index 8b86d82c9e..6f847d7d68 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
@@ -4,9 +4,9 @@ import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Topic;
import com.provectus.kafka.ui.pages.topics.TopicDetails;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Issue;
import io.qase.api.annotation.CaseId;
import org.testng.Assert;
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java
deleted file mode 100644
index af5a4aea3f..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/testSuite/TestClass.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package com.provectus.kafka.ui.testSuite;
-
-public class TestClass {
-}
diff --git a/kafka-ui-e2e-checks/src/test/resources/manual.xml b/kafka-ui-e2e-checks/src/test/resources/manual.xml
new file mode 100644
index 0000000000..dff467651e
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/resources/manual.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
diff --git a/kafka-ui-e2e-checks/src/test/resources/qase.xml b/kafka-ui-e2e-checks/src/test/resources/qase.xml
new file mode 100644
index 0000000000..2b5d023b1a
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/resources/qase.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
diff --git a/kafka-ui-e2e-checks/src/test/resources/regression.xml b/kafka-ui-e2e-checks/src/test/resources/regression.xml
index 2c34f4a929..01db95d03b 100644
--- a/kafka-ui-e2e-checks/src/test/resources/regression.xml
+++ b/kafka-ui-e2e-checks/src/test/resources/regression.xml
@@ -4,6 +4,7 @@
+
From 37e6f021b35d6316469819fdaae927467b33de46 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 21 Feb 2023 17:21:13 +0400
Subject: [PATCH 23/54] Bump testng from 7.6.1 to 7.7.0 in /kafka-ui-e2e-checks
(#3304)
Bumps [testng](https://github.com/cbeust/testng) from 7.6.1 to 7.7.0.
- [Release notes](https://github.com/cbeust/testng/releases)
- [Changelog](https://github.com/cbeust/testng/blob/master/CHANGES.txt)
- [Commits](https://github.com/cbeust/testng/compare/7.6.1...7.7.0)
---
updated-dependencies:
- dependency-name: org.testng:testng
dependency-type: direct:production
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
kafka-ui-e2e-checks/pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml
index 5a91f0f995..833f7a9224 100644
--- a/kafka-ui-e2e-checks/pom.xml
+++ b/kafka-ui-e2e-checks/pom.xml
@@ -16,7 +16,7 @@
${project.version}
1.17.6
6.6.3
- 7.6.1
+ 7.7.0
2.20.1
3.0.2
1.9.9.1
From fadd307564183eeb93169b147b0c2baf5b738641 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 21 Feb 2023 17:24:13 +0400
Subject: [PATCH 24/54] Bump svenstaro/upload-release-action from 2.3.0 to
2.4.1 (#3306)
Bumps [svenstaro/upload-release-action](https://github.com/svenstaro/upload-release-action) from 2.3.0 to 2.4.1.
- [Release notes](https://github.com/svenstaro/upload-release-action/releases)
- [Changelog](https://github.com/svenstaro/upload-release-action/blob/master/CHANGELOG.md)
- [Commits](https://github.com/svenstaro/upload-release-action/compare/2.3.0...2.4.1)
---
updated-dependencies:
- dependency-name: svenstaro/upload-release-action
dependency-type: direct:production
update-type: version-update:semver-minor
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/release.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 0e87c10fd2..af538c90c8 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -33,7 +33,7 @@ jobs:
echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Upload files to a GitHub release
- uses: svenstaro/upload-release-action@2.3.0
+ uses: svenstaro/upload-release-action@2.4.1
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: kafka-ui-api/target/kafka-ui-api-${{ steps.build.outputs.version }}.jar
From c9f0298000c7752049e626bf4a061cff3926e97f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 21 Feb 2023 17:25:12 +0400
Subject: [PATCH 25/54] Bump aquasecurity/trivy-action from 0.8.0 to 0.9.1
(#3337)
Bumps [aquasecurity/trivy-action](https://github.com/aquasecurity/trivy-action) from 0.8.0 to 0.9.1.
- [Release notes](https://github.com/aquasecurity/trivy-action/releases)
- [Commits](https://github.com/aquasecurity/trivy-action/compare/0.8.0...0.9.1)
---
updated-dependencies:
- dependency-name: aquasecurity/trivy-action
dependency-type: direct:production
update-type: version-update:semver-minor
...
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/cve.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/cve.yaml b/.github/workflows/cve.yaml
index ef7d784ca1..5b0e2779dd 100644
--- a/.github/workflows/cve.yaml
+++ b/.github/workflows/cve.yaml
@@ -55,7 +55,7 @@ jobs:
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Run CVE checks
- uses: aquasecurity/trivy-action@0.8.0
+ uses: aquasecurity/trivy-action@0.9.1
with:
image-ref: "provectuslabs/kafka-ui:${{ steps.build.outputs.version }}"
format: "table"
From e584b15d97d0c5e14e4a1aa24a951dcd02ee336b Mon Sep 17 00:00:00 2001
From: mkuts12
Date: Tue, 21 Feb 2023 15:40:05 +0200
Subject: [PATCH 26/54] Update serialization guide (#3375)
[line](https://github.com/provectus/kafka-ui/blob/f2ec4d76de481f1a049ae59c4c6fa352bdab7a30/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java#L41)
but I'm not familiar enough with Go to be sure.
---
documentation/guides/Serialization.md | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/documentation/guides/Serialization.md b/documentation/guides/Serialization.md
index 19869a8bc1..3f827441d2 100644
--- a/documentation/guides/Serialization.md
+++ b/documentation/guides/Serialization.md
@@ -20,7 +20,7 @@ kafka:
clusters:
- name: Cluster1
# Other Cluster configuration omitted ...
- serdes:
+ serde:
# registering String serde with custom config
- name: AsciiString
className: com.provectus.kafka.ui.serdes.builtin.StringSerde
@@ -43,7 +43,7 @@ kafka:
clusters:
- name: Cluster1
# Other Cluster configuration omitted ...
- serdes:
+ serde:
- name: ProtobufFile
properties:
# path to the protobuf schema files
@@ -84,7 +84,7 @@ kafka:
- name: Cluster1
# this url will be used by "SchemaRegistry" by default
schemaRegistry: http://main-schema-registry:8081
- serdes:
+ serde:
- name: AnotherSchemaRegistry
className: com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde
properties:
@@ -109,7 +109,7 @@ Sample configuration:
kafka:
clusters:
- name: Cluster1
- serdes:
+ serde:
- name: String
topicKeysPattern: click-events|imp-events
@@ -131,7 +131,7 @@ kafka:
- name: Cluster1
defaultKeySerde: Int32
defaultValueSerde: String
- serdes:
+ serde:
- name: Int32
topicKeysPattern: click-events|imp-events
```
@@ -156,7 +156,7 @@ Sample configuration:
kafka:
clusters:
- name: Cluster1
- serdes:
+ serde:
- name: MyCustomSerde
className: my.lovely.org.KafkaUiSerde
filePath: /var/lib/kui-serde/my-kui-serde.jar
From 741bbc1be1bd8dba4fdfe854ca01f6f60b80c0a0 Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Wed, 22 Feb 2023 11:50:49 +0400
Subject: [PATCH 27/54] [BE] Supress kafka authorization errors (#3376)
* wip
* wip
* typo fix, minor impr
* test fixes
* wip
---------
Co-authored-by: iliax
Co-authored-by: Roman Zabaluev
---
.../kafka/ui/service/ReactiveAdminClient.java | 35 +++++++++++++------
.../kafka/ui/service/TopicsService.java | 9 +++--
2 files changed, 32 insertions(+), 12 deletions(-)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index ea56edcf96..7cdf2ef16d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -62,9 +62,11 @@ import org.apache.kafka.common.TopicPartitionInfo;
import org.apache.kafka.common.TopicPartitionReplica;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.config.ConfigResource;
+import org.apache.kafka.common.errors.ClusterAuthorizationException;
import org.apache.kafka.common.errors.GroupIdNotFoundException;
import org.apache.kafka.common.errors.GroupNotEmptyException;
import org.apache.kafka.common.errors.InvalidRequestException;
+import org.apache.kafka.common.errors.TopicAuthorizationException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
@@ -176,6 +178,7 @@ public class ReactiveAdminClient implements Closeable {
}
//NOTE: skips not-found topics (for which UnknownTopicOrPartitionException was thrown by AdminClient)
+ //and topics for which DESCRIBE_CONFIGS permission is not set (TopicAuthorizationException was thrown)
public Mono
+
+ org.opendatadiscovery
+ oddrn-generator-java
+ ${odd-oddrn-generator.version}
+
+
+ org.opendatadiscovery
+ ingestion-contract-client
+
+
+ org.springframework.boot
+ spring-boot-starter-webflux
+
+
+ io.projectreactor
+ reactor-core
+
+
+ io.projectreactor.ipc
+ reactor-netty
+
+
+ ${odd-oddrn-client.version}
+
+
org.springframework.security
spring-security-ldap
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java
index 2f8bb2dbf4..9ffd901c07 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java
@@ -37,10 +37,10 @@ public class KafkaConnectController extends AbstractController implements KafkaC
public Mono>> getConnects(String clusterName,
ServerWebExchange exchange) {
- Flux flux = Flux.fromIterable(kafkaConnectService.getConnects(getCluster(clusterName)))
+ Flux availableConnects = kafkaConnectService.getConnects(getCluster(clusterName))
.filterWhen(dto -> accessControlService.isConnectAccessible(dto, clusterName));
- return Mono.just(ResponseEntity.ok(flux));
+ return Mono.just(ResponseEntity.ok(availableConnects));
}
@Override
@@ -54,7 +54,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
.build());
return validateAccess.thenReturn(
- ResponseEntity.ok(kafkaConnectService.getConnectors(getCluster(clusterName), connectName))
+ ResponseEntity.ok(kafkaConnectService.getConnectorNames(getCluster(clusterName), connectName))
);
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java
index 30daa1ca57..aa26709822 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java
@@ -17,7 +17,8 @@ import org.springframework.stereotype.Component;
class KafkaConfigSanitizer extends Sanitizer {
private static final List DEFAULT_PATTERNS_TO_SANITIZE = Arrays.asList(
"basic.auth.user.info", /* For Schema Registry credentials */
- "password", "secret", "token", "key", ".*credentials.*" /* General credential patterns */
+ "password", "secret", "token", "key", ".*credentials.*", /* General credential patterns */
+ "aws.access.*", "aws.secret.*", "aws.session.*" /* AWS-related credential patterns */
);
KafkaConfigSanitizer(
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
index cfc190cffd..163732fae9 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java
@@ -28,10 +28,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
-import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@@ -40,7 +40,6 @@ import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.client.WebClientResponseException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
-import reactor.util.function.Tuple2;
import reactor.util.function.Tuples;
@Service
@@ -52,18 +51,18 @@ public class KafkaConnectService {
private final ObjectMapper objectMapper;
private final KafkaConfigSanitizer kafkaConfigSanitizer;
- public List getConnects(KafkaCluster cluster) {
- return Optional.ofNullable(cluster.getOriginalProperties().getKafkaConnect())
- .map(lst -> lst.stream().map(clusterMapper::toKafkaConnect).toList())
- .orElse(List.of());
+ public Flux getConnects(KafkaCluster cluster) {
+ return Flux.fromIterable(
+ Optional.ofNullable(cluster.getOriginalProperties().getKafkaConnect())
+ .map(lst -> lst.stream().map(clusterMapper::toKafkaConnect).toList())
+ .orElse(List.of())
+ );
}
public Flux getAllConnectors(final KafkaCluster cluster,
- final String search) {
- Mono> clusters = Mono.just(Flux.fromIterable(getConnects(cluster))); // TODO get rid
- return clusters
- .flatMapMany(Function.identity())
- .flatMap(connect -> getConnectorNames(cluster, connect.getName()))
+ @Nullable final String search) {
+ return getConnects(cluster)
+ .flatMap(connect -> getConnectorNames(cluster, connect.getName()).map(cn -> Tuples.of(connect.getName(), cn)))
.flatMap(pair -> getConnector(cluster, pair.getT1(), pair.getT2()))
.flatMap(connector ->
getConnectorConfig(cluster, connector.getConnect(), connector.getName())
@@ -99,56 +98,46 @@ public class KafkaConnectService {
.filter(matchesSearchTerm(search));
}
- private Predicate matchesSearchTerm(final String search) {
- return connector -> getSearchValues(connector)
- .anyMatch(value -> value.contains(
- StringUtils.defaultString(
- search,
- StringUtils.EMPTY)
- .toUpperCase()));
+ private Predicate matchesSearchTerm(@Nullable final String search) {
+ if (search == null) {
+ return c -> true;
+ }
+ return connector -> getStringsForSearch(connector)
+ .anyMatch(string -> StringUtils.containsIgnoreCase(string, search));
}
- private Stream getSearchValues(FullConnectorInfoDTO fullConnectorInfo) {
+ private Stream getStringsForSearch(FullConnectorInfoDTO fullConnectorInfo) {
return Stream.of(
- fullConnectorInfo.getName(),
- fullConnectorInfo.getStatus().getState().getValue(),
- fullConnectorInfo.getType().getValue())
- .map(String::toUpperCase);
+ fullConnectorInfo.getName(),
+ fullConnectorInfo.getStatus().getState().getValue(),
+ fullConnectorInfo.getType().getValue());
}
- private Mono getConnectorTopics(KafkaCluster cluster, String connectClusterName,
- String connectorName) {
+ public Mono getConnectorTopics(KafkaCluster cluster, String connectClusterName,
+ String connectorName) {
return api(cluster, connectClusterName)
.mono(c -> c.getConnectorTopics(connectorName))
.map(result -> result.get(connectorName))
- // old connectors don't have this api, setting empty list for
+ // old Connect API versions don't have this endpoint, setting empty list for
// backward-compatibility
.onErrorResume(Exception.class, e -> Mono.just(new ConnectorTopics().topics(List.of())));
}
- private Flux> getConnectorNames(KafkaCluster cluster, String connectName) {
- return getConnectors(cluster, connectName)
- .collectList().map(e -> e.get(0))
+ public Flux getConnectorNames(KafkaCluster cluster, String connectName) {
+ return api(cluster, connectName)
+ .flux(client -> client.getConnectors(null))
// for some reason `getConnectors` method returns the response as a single string
- .map(this::parseToList)
- .flatMapMany(Flux::fromIterable)
- .map(connector -> Tuples.of(connectName, connector));
+ .collectList().map(e -> e.get(0))
+ .map(this::parseConnectorsNamesStringToList)
+ .flatMapMany(Flux::fromIterable);
}
@SneakyThrows
- private List parseToList(String json) {
+ private List parseConnectorsNamesStringToList(String json) {
return objectMapper.readValue(json, new TypeReference<>() {
});
}
- public Flux getConnectors(KafkaCluster cluster, String connectName) {
- return api(cluster, connectName)
- .flux(client ->
- client.getConnectors(null)
- .doOnError(e -> log.error("Unexpected error upon getting connectors", e))
- );
- }
-
public Mono createConnector(KafkaCluster cluster, String connectName,
Mono connector) {
return api(cluster, connectName)
@@ -171,9 +160,7 @@ public class KafkaConnectService {
private Mono connectorExists(KafkaCluster cluster, String connectName,
String connectorName) {
return getConnectorNames(cluster, connectName)
- .map(Tuple2::getT2)
- .collectList()
- .map(connectorNames -> connectorNames.contains(connectorName));
+ .any(name -> name.equals(connectorName));
}
public Mono getConnector(KafkaCluster cluster, String connectName,
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorInfo.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorInfo.java
new file mode 100644
index 0000000000..a7844579bf
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorInfo.java
@@ -0,0 +1,167 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.provectus.kafka.ui.model.ConnectorTypeDTO;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.Function;
+import java.util.stream.Stream;
+import javax.annotation.Nullable;
+import org.apache.commons.collections.CollectionUtils;
+import org.opendatadiscovery.oddrn.JdbcUrlParser;
+import org.opendatadiscovery.oddrn.model.HivePath;
+import org.opendatadiscovery.oddrn.model.MysqlPath;
+import org.opendatadiscovery.oddrn.model.PostgreSqlPath;
+import org.opendatadiscovery.oddrn.model.SnowflakePath;
+
+record ConnectorInfo(List inputs,
+ List outputs) {
+
+ static ConnectorInfo extract(String className,
+ ConnectorTypeDTO type,
+ Map config,
+ List topicsFromApi, // can be empty for old Connect API versions
+ Function topicOddrnBuilder) {
+ return switch (className) {
+ case "org.apache.kafka.connect.file.FileStreamSinkConnector",
+ "org.apache.kafka.connect.file.FileStreamSourceConnector",
+ "FileStreamSource",
+ "FileStreamSink" -> extractFileIoConnector(type, topicsFromApi, config, topicOddrnBuilder);
+ case "io.confluent.connect.s3.S3SinkConnector" -> extractS3Sink(type, topicsFromApi, config, topicOddrnBuilder);
+ case "io.confluent.connect.jdbc.JdbcSinkConnector" ->
+ extractJdbcSink(type, topicsFromApi, config, topicOddrnBuilder);
+ case "io.debezium.connector.postgresql.PostgresConnector" -> extractDebeziumPg(config);
+ case "io.debezium.connector.mysql.MySqlConnector" -> extractDebeziumMysql(config);
+ default -> new ConnectorInfo(
+ extractInputs(type, topicsFromApi, config, topicOddrnBuilder),
+ extractOutputs(type, topicsFromApi, config, topicOddrnBuilder)
+ );
+ };
+ }
+
+ private static ConnectorInfo extractFileIoConnector(ConnectorTypeDTO type,
+ List topics,
+ Map config,
+ Function topicOddrnBuilder) {
+ return new ConnectorInfo(
+ extractInputs(type, topics, config, topicOddrnBuilder),
+ extractOutputs(type, topics, config, topicOddrnBuilder)
+ );
+ }
+
+ private static ConnectorInfo extractJdbcSink(ConnectorTypeDTO type,
+ List topics,
+ Map config,
+ Function topicOddrnBuilder) {
+ String tableNameFormat = (String) config.getOrDefault("table.name.format", "${topic}");
+ List targetTables = extractTopicNamesBestEffort(topics, config)
+ .map(topic -> tableNameFormat.replace("${kafka}", topic))
+ .toList();
+
+ String connectionUrl = (String) config.get("connection.url");
+ List outputs = new ArrayList<>();
+ @Nullable var knownJdbcPath = new JdbcUrlParser().parse(connectionUrl);
+ if (knownJdbcPath instanceof PostgreSqlPath p) {
+ targetTables.forEach(t -> outputs.add(p.toBuilder().table(t).build().oddrn()));
+ }
+ if (knownJdbcPath instanceof MysqlPath p) {
+ targetTables.forEach(t -> outputs.add(p.toBuilder().table(t).build().oddrn()));
+ }
+ if (knownJdbcPath instanceof HivePath p) {
+ targetTables.forEach(t -> outputs.add(p.toBuilder().table(t).build().oddrn()));
+ }
+ if (knownJdbcPath instanceof SnowflakePath p) {
+ targetTables.forEach(t -> outputs.add(p.toBuilder().table(t).build().oddrn()));
+ }
+ return new ConnectorInfo(
+ extractInputs(type, topics, config, topicOddrnBuilder),
+ outputs
+ );
+ }
+
+ private static ConnectorInfo extractDebeziumPg(Map config) {
+ String host = (String) config.get("database.hostname");
+ String dbName = (String) config.get("database.dbname");
+ var inputs = List.of(
+ PostgreSqlPath.builder()
+ .host(host)
+ .database(dbName)
+ .build().oddrn()
+ );
+ return new ConnectorInfo(inputs, List.of());
+ }
+
+ private static ConnectorInfo extractDebeziumMysql(Map config) {
+ String host = (String) config.get("database.hostname");
+ var inputs = List.of(
+ MysqlPath.builder()
+ .host(host)
+ .build()
+ .oddrn()
+ );
+ return new ConnectorInfo(inputs, List.of());
+ }
+
+ private static ConnectorInfo extractS3Sink(ConnectorTypeDTO type,
+ List topics,
+ Map config,
+ Function topicOrrdnBuilder) {
+ String bucketName = (String) config.get("s3.bucket.name");
+ String topicsDir = (String) config.getOrDefault("topics.dir", "topics");
+ String directoryDelim = (String) config.getOrDefault("directory.delim", "/");
+ List outputs = extractTopicNamesBestEffort(topics, config)
+ .map(topic -> Oddrn.awsS3Oddrn(bucketName, topicsDir + directoryDelim + topic))
+ .toList();
+ return new ConnectorInfo(
+ extractInputs(type, topics, config, topicOrrdnBuilder),
+ outputs
+ );
+ }
+
+ private static List extractInputs(ConnectorTypeDTO type,
+ List topicsFromApi,
+ Map config,
+ Function topicOrrdnBuilder) {
+ return type == ConnectorTypeDTO.SINK
+ ? extractTopicsOddrns(config, topicsFromApi, topicOrrdnBuilder)
+ : List.of();
+ }
+
+ private static List extractOutputs(ConnectorTypeDTO type,
+ List topicsFromApi,
+ Map config,
+ Function topicOrrdnBuilder) {
+ return type == ConnectorTypeDTO.SOURCE
+ ? extractTopicsOddrns(config, topicsFromApi, topicOrrdnBuilder)
+ : List.of();
+ }
+
+ private static Stream extractTopicNamesBestEffort(
+ // topic list can be empty for old Connect API versions
+ List topicsFromApi,
+ Map config
+ ) {
+ if (CollectionUtils.isNotEmpty(topicsFromApi)) {
+ return topicsFromApi.stream();
+ }
+
+ // trying to extract topic names from config
+ String topicsString = (String) config.get("topics");
+ String topicString = (String) config.get("topic");
+ return Stream.of(topicsString, topicString)
+ .filter(Objects::nonNull)
+ .flatMap(str -> Stream.of(str.split(",")))
+ .map(String::trim)
+ .filter(s -> !s.isBlank());
+ }
+
+ private static List extractTopicsOddrns(Map config,
+ List topicsFromApi,
+ Function topicOrrdnBuilder) {
+ return extractTopicNamesBestEffort(topicsFromApi, config)
+ .map(topicOrrdnBuilder)
+ .toList();
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java
new file mode 100644
index 0000000000..2fad00bbfa
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java
@@ -0,0 +1,96 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.provectus.kafka.ui.connect.model.ConnectorTopics;
+import com.provectus.kafka.ui.model.ConnectDTO;
+import com.provectus.kafka.ui.model.ConnectorDTO;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.service.KafkaConnectService;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import lombok.RequiredArgsConstructor;
+import org.opendatadiscovery.client.model.DataEntity;
+import org.opendatadiscovery.client.model.DataEntityList;
+import org.opendatadiscovery.client.model.DataEntityType;
+import org.opendatadiscovery.client.model.DataSource;
+import org.opendatadiscovery.client.model.DataTransformer;
+import org.opendatadiscovery.client.model.MetadataExtension;
+import reactor.core.publisher.Flux;
+
+@RequiredArgsConstructor
+class ConnectorsExporter {
+
+ private final KafkaConnectService kafkaConnectService;
+
+ Flux export(KafkaCluster cluster) {
+ return kafkaConnectService.getConnects(cluster)
+ .flatMap(connect -> kafkaConnectService.getConnectorNames(cluster, connect.getName())
+ .flatMap(connectorName -> kafkaConnectService.getConnector(cluster, connect.getName(), connectorName))
+ .flatMap(connectorDTO ->
+ kafkaConnectService.getConnectorTopics(cluster, connect.getName(), connectorDTO.getName())
+ .map(topics -> createConnectorDataEntity(cluster, connect, connectorDTO, topics)))
+ .buffer(100)
+ .map(connectDataEntities -> {
+ String dsOddrn = Oddrn.connectDataSourceOddrn(connect.getAddress());
+ return new DataEntityList()
+ .dataSourceOddrn(dsOddrn)
+ .items(connectDataEntities);
+ })
+ );
+ }
+
+ Flux getConnectDataSources(KafkaCluster cluster) {
+ return kafkaConnectService.getConnects(cluster)
+ .map(ConnectorsExporter::toDataSource);
+ }
+
+ private static DataSource toDataSource(ConnectDTO connect) {
+ return new DataSource()
+ .oddrn(Oddrn.connectDataSourceOddrn(connect.getAddress()))
+ .name(connect.getName())
+ .description("Kafka Connect");
+ }
+
+ private static DataEntity createConnectorDataEntity(KafkaCluster cluster,
+ ConnectDTO connect,
+ ConnectorDTO connector,
+ ConnectorTopics connectorTopics) {
+ var metadata = new HashMap<>(extractMetadata(connector));
+ metadata.put("type", connector.getType().name());
+
+ var info = extractConnectorInfo(cluster, connector, connectorTopics);
+ DataTransformer transformer = new DataTransformer();
+ transformer.setInputs(info.inputs());
+ transformer.setOutputs(info.outputs());
+
+ return new DataEntity()
+ .oddrn(Oddrn.connectorOddrn(connect.getAddress(), connector.getName()))
+ .name(connector.getName())
+ .description("Kafka Connector \"%s\" (%s)".formatted(connector.getName(), connector.getType()))
+ .type(DataEntityType.JOB)
+ .dataTransformer(transformer)
+ .metadata(List.of(
+ new MetadataExtension()
+ .schemaUrl(URI.create("wontbeused.oops"))
+ .metadata(metadata)));
+ }
+
+ private static Map extractMetadata(ConnectorDTO connector) {
+ // will be sanitized by KafkaConfigSanitizer (if it's enabled)
+ return connector.getConfig();
+ }
+
+ private static ConnectorInfo extractConnectorInfo(KafkaCluster cluster,
+ ConnectorDTO connector,
+ ConnectorTopics topics) {
+ return ConnectorInfo.extract(
+ (String) connector.getConfig().get("connector.class"),
+ connector.getType(),
+ connector.getConfig(),
+ topics.getTopics(),
+ topic -> Oddrn.topicOddrn(cluster, topic)
+ );
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddExporter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddExporter.java
new file mode 100644
index 0000000000..2917e811be
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddExporter.java
@@ -0,0 +1,106 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.service.KafkaConnectService;
+import com.provectus.kafka.ui.service.StatisticsCache;
+import java.util.List;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+import lombok.SneakyThrows;
+import org.opendatadiscovery.client.ApiClient;
+import org.opendatadiscovery.client.api.OpenDataDiscoveryIngestionApi;
+import org.opendatadiscovery.client.model.DataEntity;
+import org.opendatadiscovery.client.model.DataEntityList;
+import org.opendatadiscovery.client.model.DataSource;
+import org.opendatadiscovery.client.model.DataSourceList;
+import org.springframework.http.HttpHeaders;
+import reactor.core.publisher.Mono;
+
+class OddExporter {
+
+ private final OpenDataDiscoveryIngestionApi oddApi;
+ private final TopicsExporter topicsExporter;
+ private final ConnectorsExporter connectorsExporter;
+
+ public OddExporter(StatisticsCache statisticsCache,
+ KafkaConnectService connectService,
+ OddIntegrationProperties oddIntegrationProperties) {
+ this(
+ createApiClient(oddIntegrationProperties),
+ new TopicsExporter(createTopicsFilter(oddIntegrationProperties), statisticsCache),
+ new ConnectorsExporter(connectService)
+ );
+ }
+
+ @VisibleForTesting
+ OddExporter(OpenDataDiscoveryIngestionApi oddApi,
+ TopicsExporter topicsExporter,
+ ConnectorsExporter connectorsExporter) {
+ this.oddApi = oddApi;
+ this.topicsExporter = topicsExporter;
+ this.connectorsExporter = connectorsExporter;
+ }
+
+ private static Predicate createTopicsFilter(OddIntegrationProperties properties) {
+ if (properties.getTopicsRegex() == null) {
+ return topic -> !topic.startsWith("_");
+ }
+ Pattern pattern = Pattern.compile(properties.getTopicsRegex());
+ return topic -> pattern.matcher(topic).matches();
+ }
+
+ private static OpenDataDiscoveryIngestionApi createApiClient(OddIntegrationProperties properties) {
+ Preconditions.checkNotNull(properties.getUrl(), "ODD url not set");
+ Preconditions.checkNotNull(properties.getToken(), "ODD token not set");
+ var apiClient = new ApiClient()
+ .setBasePath(properties.getUrl())
+ .addDefaultHeader(HttpHeaders.AUTHORIZATION, "Bearer " + properties.getToken());
+ return new OpenDataDiscoveryIngestionApi(apiClient);
+ }
+
+ public Mono export(KafkaCluster cluster) {
+ return exportTopics(cluster)
+ .then(exportKafkaConnects(cluster));
+ }
+
+ private Mono exportTopics(KafkaCluster c) {
+ return createKafkaDataSource(c)
+ .thenMany(topicsExporter.export(c))
+ .concatMap(this::sentDataEntities)
+ .then();
+ }
+
+ private Mono exportKafkaConnects(KafkaCluster cluster) {
+ return createConnectDataSources(cluster)
+ .thenMany(connectorsExporter.export(cluster))
+ .concatMap(this::sentDataEntities)
+ .then();
+ }
+
+ private Mono createConnectDataSources(KafkaCluster cluster) {
+ return connectorsExporter.getConnectDataSources(cluster)
+ .buffer(100)
+ .concatMap(dataSources -> oddApi.createDataSource(new DataSourceList().items(dataSources)))
+ .then();
+ }
+
+ private Mono createKafkaDataSource(KafkaCluster cluster) {
+ String clusterOddrn = Oddrn.clusterOddrn(cluster);
+ return oddApi.createDataSource(
+ new DataSourceList()
+ .addItemsItem(
+ new DataSource()
+ .oddrn(clusterOddrn)
+ .name(cluster.getName())
+ .description("Kafka cluster")
+ )
+ );
+ }
+
+ private Mono sentDataEntities(DataEntityList dataEntityList) {
+ return oddApi.postDataEntityList(dataEntityList);
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddExporterScheduler.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddExporterScheduler.java
new file mode 100644
index 0000000000..7201737f9f
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddExporterScheduler.java
@@ -0,0 +1,27 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.provectus.kafka.ui.service.ClustersStorage;
+import lombok.RequiredArgsConstructor;
+import org.springframework.scheduling.annotation.Scheduled;
+import reactor.core.publisher.Flux;
+import reactor.core.scheduler.Schedulers;
+
+@RequiredArgsConstructor
+class OddExporterScheduler {
+
+ private final ClustersStorage clustersStorage;
+ private final OddExporter oddExporter;
+
+ @Scheduled(fixedRateString = "${kafka.send-stats-to-odd-millis:30000}")
+ public void sendMetricsToOdd() {
+ Flux.fromIterable(clustersStorage.getKafkaClusters())
+ .parallel()
+ .runOn(Schedulers.parallel())
+ .flatMap(oddExporter::export)
+ .then()
+ .block();
+ }
+
+
+}
+
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddIntegrationConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddIntegrationConfig.java
new file mode 100644
index 0000000000..6bade3022a
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddIntegrationConfig.java
@@ -0,0 +1,31 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.provectus.kafka.ui.service.ClustersStorage;
+import com.provectus.kafka.ui.service.KafkaConnectService;
+import com.provectus.kafka.ui.service.StatisticsCache;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+@ConditionalOnProperty(value = "integration.odd.url")
+class OddIntegrationConfig {
+
+ @Bean
+ OddIntegrationProperties oddIntegrationProperties() {
+ return new OddIntegrationProperties();
+ }
+
+ @Bean
+ OddExporter oddExporter(StatisticsCache statisticsCache,
+ KafkaConnectService connectService,
+ OddIntegrationProperties oddIntegrationProperties) {
+ return new OddExporter(statisticsCache, connectService, oddIntegrationProperties);
+ }
+
+ @Bean
+ OddExporterScheduler oddExporterScheduler(ClustersStorage storage, OddExporter exporter) {
+ return new OddExporterScheduler(storage, exporter);
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddIntegrationProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddIntegrationProperties.java
new file mode 100644
index 0000000000..cbb8d89238
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/OddIntegrationProperties.java
@@ -0,0 +1,15 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import lombok.Data;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+
+@Data
+@ConfigurationProperties("integration.odd")
+public class OddIntegrationProperties {
+
+ String url;
+ String token;
+ String topicsRegex;
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java
new file mode 100644
index 0000000000..d228843b21
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java
@@ -0,0 +1,79 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.provectus.kafka.ui.model.KafkaCluster;
+import java.net.URI;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import lombok.experimental.UtilityClass;
+import org.opendatadiscovery.oddrn.Generator;
+import org.opendatadiscovery.oddrn.model.AwsS3Path;
+import org.opendatadiscovery.oddrn.model.KafkaConnectorPath;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+@UtilityClass
+public class Oddrn {
+
+ private static final Generator GENERATOR = new Generator();
+
+ String clusterOddrn(KafkaCluster cluster) {
+ return KafkaPath.builder()
+ .cluster(bootstrapServersForOddrn(cluster.getBootstrapServers()))
+ .build()
+ .oddrn();
+ }
+
+ KafkaPath topicOddrnPath(KafkaCluster cluster, String topic) {
+ return KafkaPath.builder()
+ .cluster(bootstrapServersForOddrn(cluster.getBootstrapServers()))
+ .topic(topic)
+ .build();
+ }
+
+ String topicOddrn(KafkaCluster cluster, String topic) {
+ return topicOddrnPath(cluster, topic).oddrn();
+ }
+
+ String awsS3Oddrn(String bucket, String key) {
+ return AwsS3Path.builder()
+ .bucket(bucket)
+ .key(key)
+ .build()
+ .oddrn();
+ }
+
+ String connectDataSourceOddrn(String connectUrl) {
+ return KafkaConnectorPath.builder()
+ .host(normalizedConnectHosts(connectUrl))
+ .build()
+ .oddrn();
+ }
+
+ private String normalizedConnectHosts(String connectUrlStr) {
+ return Stream.of(connectUrlStr.split(","))
+ .map(String::trim)
+ .sorted()
+ .map(url -> {
+ var uri = URI.create(url);
+ String host = uri.getHost();
+ String portSuffix = (uri.getPort() > 0 ? (":" + uri.getPort()) : "");
+ return host + portSuffix;
+ })
+ .collect(Collectors.joining(","));
+ }
+
+ String connectorOddrn(String connectUrl, String connectorName) {
+ return KafkaConnectorPath.builder()
+ .host(normalizedConnectHosts(connectUrl))
+ .connector(connectorName)
+ .build()
+ .oddrn();
+ }
+
+ private String bootstrapServersForOddrn(String bootstrapServers) {
+ return Stream.of(bootstrapServers.split(","))
+ .map(String::trim)
+ .sorted()
+ .collect(Collectors.joining(","));
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporter.java
new file mode 100644
index 0000000000..ad72e6f1dc
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporter.java
@@ -0,0 +1,111 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import com.google.common.collect.ImmutableMap;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.Statistics;
+import com.provectus.kafka.ui.service.StatisticsCache;
+import com.provectus.kafka.ui.service.integration.odd.schema.DataSetFieldsExtractors;
+import java.net.URI;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.admin.ConfigEntry;
+import org.apache.kafka.clients.admin.TopicDescription;
+import org.opendatadiscovery.client.model.DataEntity;
+import org.opendatadiscovery.client.model.DataEntityList;
+import org.opendatadiscovery.client.model.DataEntityType;
+import org.opendatadiscovery.client.model.DataSet;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.MetadataExtension;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+import org.springframework.web.reactive.function.client.WebClientResponseException;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+
+@Slf4j
+@RequiredArgsConstructor
+class TopicsExporter {
+
+ private final Predicate topicFilter;
+ private final StatisticsCache statisticsCache;
+
+ Flux export(KafkaCluster cluster) {
+ String clusterOddrn = Oddrn.clusterOddrn(cluster);
+ Statistics stats = statisticsCache.get(cluster);
+ return Flux.fromIterable(stats.getTopicDescriptions().keySet())
+ .filter(topicFilter)
+ .flatMap(topic -> createTopicDataEntity(cluster, topic, stats))
+ .buffer(100)
+ .map(topicsEntities ->
+ new DataEntityList()
+ .dataSourceOddrn(clusterOddrn)
+ .items(topicsEntities));
+ }
+
+ private Mono createTopicDataEntity(KafkaCluster cluster, String topic, Statistics stats) {
+ KafkaPath topicOddrnPath = Oddrn.topicOddrnPath(cluster, topic);
+ return
+ Mono.zip(
+ getTopicSchema(cluster, topic, topicOddrnPath, true),
+ getTopicSchema(cluster, topic, topicOddrnPath, false)
+ )
+ .map(keyValueFields -> {
+ var dataset = new DataSet();
+ keyValueFields.getT1().forEach(dataset::addFieldListItem);
+ keyValueFields.getT2().forEach(dataset::addFieldListItem);
+ return new DataEntity()
+ .name(topic)
+ .description("Kafka topic \"%s\"".formatted(topic))
+ .oddrn(Oddrn.topicOddrn(cluster, topic))
+ .type(DataEntityType.KAFKA_TOPIC)
+ .dataset(dataset)
+ .addMetadataItem(
+ new MetadataExtension()
+ .schemaUrl(URI.create("wontbeused.oops"))
+ .metadata(getTopicMetadata(topic, stats)));
+ }
+ );
+ }
+
+ private Map getNonDefaultConfigs(String topic, Statistics stats) {
+ List config = stats.getTopicConfigs().get(topic);
+ if (config == null) {
+ return Map.of();
+ }
+ return config.stream()
+ .filter(c -> c.source() == ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG)
+ .collect(Collectors.toMap(ConfigEntry::name, ConfigEntry::value));
+ }
+
+ private Map getTopicMetadata(String topic, Statistics stats) {
+ TopicDescription topicDescription = stats.getTopicDescriptions().get(topic);
+ return ImmutableMap.builder()
+ .put("partitions", topicDescription.partitions().size())
+ .put("replication_factor", topicDescription.partitions().get(0).replicas().size())
+ .putAll(getNonDefaultConfigs(topic, stats))
+ .build();
+ }
+
+ private Mono> getTopicSchema(KafkaCluster cluster,
+ String topic,
+ KafkaPath topicOddrn,
+ //currently we only retrieve value schema
+ boolean isKey) {
+ if (cluster.getSchemaRegistryClient() == null) {
+ return Mono.just(List.of());
+ }
+ String subject = topic + (isKey ? "-key" : "-value");
+ return cluster.getSchemaRegistryClient()
+ .mono(client -> client.getSubjectVersion(subject, "latest"))
+ .map(subj -> DataSetFieldsExtractors.extract(subj, topicOddrn, isKey))
+ .onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.just(List.of()))
+ .onErrorResume(th -> true, th -> {
+ log.warn("Error retrieving subject {} for cluster {}", subject, cluster.getName(), th);
+ return Mono.just(List.of());
+ });
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
new file mode 100644
index 0000000000..538bbde1a8
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
@@ -0,0 +1,262 @@
+package com.provectus.kafka.ui.service.integration.odd.schema;
+
+import com.google.common.collect.ImmutableSet;
+import com.provectus.kafka.ui.service.integration.odd.Oddrn;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import java.util.ArrayList;
+import java.util.List;
+import lombok.experimental.UtilityClass;
+import org.apache.avro.Schema;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.DataSetFieldType;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+@UtilityClass
+class AvroExtractor {
+
+ static List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
+ var schema = new Schema.Parser().parse(subject.getSchema());
+ List result = new ArrayList<>();
+ result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
+ extract(
+ schema,
+ topicOddrn.oddrn() + "/columns/" + (isKey ? "key" : "value"),
+ null,
+ null,
+ null,
+ false,
+ ImmutableSet.of(),
+ result
+ );
+ return result;
+ }
+
+ private void extract(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink
+ ) {
+ switch (schema.getType()) {
+ case RECORD -> extractRecord(schema, parentOddr, oddrn, name, doc, nullable, registeredRecords, sink);
+ case UNION -> extractUnion(schema, parentOddr, oddrn, name, doc, registeredRecords, sink);
+ case ARRAY -> extractArray(schema, parentOddr, oddrn, name, doc, nullable, registeredRecords, sink);
+ case MAP -> extractMap(schema, parentOddr, oddrn, name, doc, nullable, registeredRecords, sink);
+ default -> extractPrimitive(schema, parentOddr, oddrn, name, doc, nullable, sink);
+ }
+ }
+
+ private DataSetField createDataSetField(String name,
+ String doc,
+ String parentOddrn,
+ String oddrn,
+ Schema schema,
+ Boolean nullable) {
+ return new DataSetField()
+ .name(name)
+ .description(doc)
+ .parentFieldOddrn(parentOddrn)
+ .oddrn(oddrn)
+ .type(mapSchema(schema, nullable));
+ }
+
+ private void extractRecord(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ if (!isRoot) {
+ sink.add(createDataSetField(name, doc, parentOddr, oddrn, schema, nullable));
+ if (registeredRecords.contains(schema.getFullName())) {
+ // avoiding recursion by checking if record already registered in parsing chain
+ return;
+ }
+ }
+ var newRegisteredRecords = ImmutableSet.builder()
+ .addAll(registeredRecords)
+ .add(schema.getFullName())
+ .build();
+
+ schema.getFields().forEach(f ->
+ extract(
+ f.schema(),
+ isRoot ? parentOddr : oddrn,
+ isRoot
+ ? parentOddr + "/" + f.name()
+ : oddrn + "/fields/" + f.name(),
+ f.name(),
+ f.doc(),
+ false,
+ newRegisteredRecords,
+ sink
+ ));
+ }
+
+ private void extractUnion(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ ImmutableSet registeredRecords,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ boolean containsNull = schema.getTypes().stream().map(Schema::getType).anyMatch(t -> t == Schema.Type.NULL);
+ // if it is not root and there is only 2 values for union (null and smth else)
+ // we registering this field as optional without mentioning union
+ if (!isRoot && containsNull && schema.getTypes().size() == 2) {
+ var nonNullSchema = schema.getTypes().stream()
+ .filter(s -> s.getType() != Schema.Type.NULL)
+ .findFirst()
+ .orElseThrow(IllegalStateException::new);
+ extract(
+ nonNullSchema,
+ parentOddr,
+ oddrn,
+ name,
+ doc,
+ true,
+ registeredRecords,
+ sink
+ );
+ return;
+ }
+ oddrn = isRoot ? parentOddr + "/union" : oddrn;
+ if (isRoot) {
+ sink.add(createDataSetField("Avro root union", doc, parentOddr, oddrn, schema, containsNull));
+ } else {
+ sink.add(createDataSetField(name, doc, parentOddr, oddrn, schema, containsNull));
+ }
+ for (Schema t : schema.getTypes()) {
+ if (t.getType() != Schema.Type.NULL) {
+ extract(
+ t,
+ oddrn,
+ oddrn + "/values/" + t.getName(),
+ t.getName(),
+ t.getDoc(),
+ containsNull,
+ registeredRecords,
+ sink
+ );
+ }
+ }
+ }
+
+ private void extractArray(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ oddrn = isRoot ? parentOddr + "/array" : oddrn;
+ if (isRoot) {
+ sink.add(createDataSetField("Avro root Array", doc, parentOddr, oddrn, schema, nullable));
+ } else {
+ sink.add(createDataSetField(name, doc, parentOddr, oddrn, schema, nullable));
+ }
+ extract(
+ schema.getElementType(),
+ oddrn,
+ oddrn + "/items/" + schema.getElementType().getName(),
+ schema.getElementType().getName(),
+ schema.getElementType().getDoc(),
+ false,
+ registeredRecords,
+ sink
+ );
+ }
+
+ private void extractMap(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ oddrn = isRoot ? parentOddr + "/map" : oddrn;
+ if (isRoot) {
+ sink.add(createDataSetField("Avro root map", doc, parentOddr, oddrn, schema, nullable));
+ } else {
+ sink.add(createDataSetField(name, doc, parentOddr, oddrn, schema, nullable));
+ }
+ extract(
+ new Schema.Parser().parse("\"string\""),
+ oddrn,
+ oddrn + "/key",
+ "key",
+ null,
+ nullable,
+ registeredRecords,
+ sink
+ );
+ extract(
+ schema.getValueType(),
+ oddrn,
+ oddrn + "/value",
+ "value",
+ null,
+ nullable,
+ registeredRecords,
+ sink
+ );
+ }
+
+
+ private void extractPrimitive(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ String primOddrn = isRoot ? (parentOddr + "/" + schema.getType()) : oddrn;
+ if (isRoot) {
+ sink.add(createDataSetField("Root avro " + schema.getType(),
+ doc, parentOddr, primOddrn, schema, nullable));
+ } else {
+ sink.add(createDataSetField(name, doc, parentOddr, primOddrn, schema, nullable));
+ }
+ }
+
+ private DataSetFieldType.TypeEnum mapType(Schema.Type type) {
+ return switch (type) {
+ case INT, LONG -> DataSetFieldType.TypeEnum.INTEGER;
+ case FLOAT, DOUBLE, FIXED -> DataSetFieldType.TypeEnum.NUMBER;
+ case STRING, ENUM -> DataSetFieldType.TypeEnum.STRING;
+ case BOOLEAN -> DataSetFieldType.TypeEnum.BOOLEAN;
+ case BYTES -> DataSetFieldType.TypeEnum.BINARY;
+ case ARRAY -> DataSetFieldType.TypeEnum.LIST;
+ case RECORD -> DataSetFieldType.TypeEnum.STRUCT;
+ case MAP -> DataSetFieldType.TypeEnum.MAP;
+ case UNION -> DataSetFieldType.TypeEnum.UNION;
+ case NULL -> DataSetFieldType.TypeEnum.UNKNOWN;
+ };
+ }
+
+ private DataSetFieldType mapSchema(Schema schema, Boolean nullable) {
+ return new DataSetFieldType()
+ .logicalType(logicalType(schema))
+ .isNullable(nullable)
+ .type(mapType(schema.getType()));
+ }
+
+ private String logicalType(Schema schema) {
+ return schema.getType() == Schema.Type.RECORD
+ ? schema.getFullName()
+ : schema.getType().toString().toLowerCase();
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
new file mode 100644
index 0000000000..746f172b57
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
@@ -0,0 +1,38 @@
+package com.provectus.kafka.ui.service.integration.odd.schema;
+
+import com.provectus.kafka.ui.service.integration.odd.Oddrn;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import com.provectus.kafka.ui.sr.model.SchemaType;
+import java.util.List;
+import java.util.Optional;
+import lombok.experimental.UtilityClass;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.DataSetFieldType;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+@UtilityClass
+public class DataSetFieldsExtractors {
+
+ public List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
+ SchemaType schemaType = Optional.ofNullable(subject.getSchemaType()).orElse(SchemaType.AVRO);
+ return switch (schemaType) {
+ case AVRO -> AvroExtractor.extract(subject, topicOddrn, isKey);
+ case JSON -> JsonSchemaExtractor.extract(subject, topicOddrn, isKey);
+ case PROTOBUF -> ProtoExtractor.extract(subject, topicOddrn, isKey);
+ };
+ }
+
+
+ DataSetField rootField(KafkaPath topicOddrn, boolean isKey) {
+ var rootOddrn = topicOddrn.oddrn() + "/columns/" + (isKey ? "key" : "value");
+ return new DataSetField()
+ .name(isKey ? "key" : "value")
+ .description("Topic's " + (isKey ? "key" : "value") + " schema")
+ .parentFieldOddrn(topicOddrn.oddrn())
+ .oddrn(rootOddrn)
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .isNullable(true));
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
new file mode 100644
index 0000000000..f92e1fc876
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
@@ -0,0 +1,311 @@
+package com.provectus.kafka.ui.service.integration.odd.schema;
+
+import com.google.common.collect.ImmutableSet;
+import com.provectus.kafka.ui.service.integration.odd.Oddrn;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import io.confluent.kafka.schemaregistry.json.JsonSchema;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import javax.annotation.Nullable;
+import lombok.experimental.UtilityClass;
+import org.everit.json.schema.ArraySchema;
+import org.everit.json.schema.BooleanSchema;
+import org.everit.json.schema.CombinedSchema;
+import org.everit.json.schema.FalseSchema;
+import org.everit.json.schema.NullSchema;
+import org.everit.json.schema.NumberSchema;
+import org.everit.json.schema.ObjectSchema;
+import org.everit.json.schema.ReferenceSchema;
+import org.everit.json.schema.Schema;
+import org.everit.json.schema.StringSchema;
+import org.everit.json.schema.TrueSchema;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.DataSetFieldType;
+import org.opendatadiscovery.client.model.MetadataExtension;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+@UtilityClass
+class JsonSchemaExtractor {
+
+ static List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
+ Schema schema = new JsonSchema(subject.getSchema()).rawSchema();
+ List result = new ArrayList<>();
+ result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
+ extract(
+ schema,
+ topicOddrn.oddrn() + "/columns/" + (isKey ? "key" : "value"),
+ null,
+ null,
+ null,
+ ImmutableSet.of(),
+ result
+ );
+ return result;
+ }
+
+ private void extract(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ if (schema instanceof ReferenceSchema s) {
+ Optional.ofNullable(s.getReferredSchema())
+ .ifPresent(refSchema -> extract(refSchema, parentOddr, oddrn, name, nullable, registeredRecords, sink));
+ } else if (schema instanceof ObjectSchema s) {
+ extractObject(s, parentOddr, oddrn, name, nullable, registeredRecords, sink);
+ } else if (schema instanceof ArraySchema s) {
+ extractArray(s, parentOddr, oddrn, name, nullable, registeredRecords, sink);
+ } else if (schema instanceof CombinedSchema cs) {
+ extractCombined(cs, parentOddr, oddrn, name, nullable, registeredRecords, sink);
+ } else if (schema instanceof BooleanSchema
+ || schema instanceof NumberSchema
+ || schema instanceof StringSchema
+ || schema instanceof NullSchema
+ ) {
+ extractPrimitive(schema, parentOddr, oddrn, name, nullable, sink);
+ } else {
+ extractUnknown(schema, parentOddr, oddrn, name, nullable, sink);
+ }
+ }
+
+ private void extractPrimitive(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ sink.add(
+ createDataSetField(
+ schema,
+ isRoot ? "Root JSON primitive" : name,
+ parentOddr,
+ isRoot ? (parentOddr + "/" + logicalTypeName(schema)) : oddrn,
+ mapType(schema),
+ logicalTypeName(schema),
+ nullable
+ )
+ );
+ }
+
+ private void extractUnknown(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ sink.add(
+ createDataSetField(
+ schema,
+ isRoot ? "Root type " + logicalTypeName(schema) : name,
+ parentOddr,
+ isRoot ? (parentOddr + "/" + logicalTypeName(schema)) : oddrn,
+ DataSetFieldType.TypeEnum.UNKNOWN,
+ logicalTypeName(schema),
+ nullable
+ )
+ );
+ }
+
+ private void extractObject(ObjectSchema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ // schemaLocation can be null for empty object schemas (like if it used in anyOf)
+ @Nullable var schemaLocation = schema.getSchemaLocation();
+ if (!isRoot) {
+ sink.add(createDataSetField(
+ schema,
+ name,
+ parentOddr,
+ oddrn,
+ DataSetFieldType.TypeEnum.STRUCT,
+ logicalTypeName(schema),
+ nullable
+ ));
+ if (schemaLocation != null && registeredRecords.contains(schemaLocation)) {
+ // avoiding recursion by checking if record already registered in parsing chain
+ return;
+ }
+ }
+
+ var newRegisteredRecords = schemaLocation == null
+ ? registeredRecords
+ : ImmutableSet.builder()
+ .addAll(registeredRecords)
+ .add(schemaLocation)
+ .build();
+
+ schema.getPropertySchemas().forEach((propertyName, propertySchema) -> {
+ boolean required = schema.getRequiredProperties().contains(propertyName);
+ extract(
+ propertySchema,
+ isRoot ? parentOddr : oddrn,
+ isRoot
+ ? parentOddr + "/" + propertyName
+ : oddrn + "/fields/" + propertyName,
+ propertyName,
+ !required,
+ newRegisteredRecords,
+ sink
+ );
+ });
+ }
+
+ private void extractArray(ArraySchema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ boolean isRoot = oddrn == null;
+ oddrn = isRoot ? parentOddr + "/array" : oddrn;
+ if (isRoot) {
+ sink.add(
+ createDataSetField(
+ schema,
+ "Json array root",
+ parentOddr,
+ oddrn,
+ DataSetFieldType.TypeEnum.LIST,
+ "array",
+ nullable
+ ));
+ } else {
+ sink.add(
+ createDataSetField(
+ schema,
+ name,
+ parentOddr,
+ oddrn,
+ DataSetFieldType.TypeEnum.LIST,
+ "array",
+ nullable
+ ));
+ }
+ @Nullable var itemsSchema = schema.getAllItemSchema();
+ if (itemsSchema != null) {
+ extract(
+ itemsSchema,
+ oddrn,
+ oddrn + "/items/" + logicalTypeName(itemsSchema),
+ logicalTypeName(itemsSchema),
+ false,
+ registeredRecords,
+ sink
+ );
+ }
+ }
+
+ private void extractCombined(CombinedSchema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ String combineType = "unknown";
+ if (schema.getCriterion() == CombinedSchema.ALL_CRITERION) {
+ combineType = "allOf";
+ }
+ if (schema.getCriterion() == CombinedSchema.ANY_CRITERION) {
+ combineType = "anyOf";
+ }
+ if (schema.getCriterion() == CombinedSchema.ONE_CRITERION) {
+ combineType = "oneOf";
+ }
+
+ boolean isRoot = oddrn == null;
+ oddrn = isRoot ? (parentOddr + "/" + combineType) : (oddrn + "/" + combineType);
+ sink.add(
+ createDataSetField(
+ schema,
+ isRoot ? "Root %s".formatted(combineType) : name,
+ parentOddr,
+ oddrn,
+ DataSetFieldType.TypeEnum.UNION,
+ combineType,
+ nullable
+ ).addMetadataItem(new MetadataExtension()
+ .schemaUrl(URI.create("wontbeused.oops"))
+ .metadata(Map.of("criterion", combineType)))
+ );
+
+ for (Schema subschema : schema.getSubschemas()) {
+ extract(
+ subschema,
+ oddrn,
+ oddrn + "/values/" + logicalTypeName(subschema),
+ logicalTypeName(subschema),
+ nullable,
+ registeredRecords,
+ sink
+ );
+ }
+ }
+
+ private String getDescription(Schema schema) {
+ return Optional.ofNullable(schema.getTitle())
+ .orElse(schema.getDescription());
+ }
+
+ private String logicalTypeName(Schema schema) {
+ return schema.getClass()
+ .getSimpleName()
+ .replace("Schema", "");
+ }
+
+ private DataSetField createDataSetField(Schema schema,
+ String name,
+ String parentOddrn,
+ String oddrn,
+ DataSetFieldType.TypeEnum type,
+ String logicalType,
+ Boolean nullable) {
+ return new DataSetField()
+ .name(name)
+ .parentFieldOddrn(parentOddrn)
+ .oddrn(oddrn)
+ .description(getDescription(schema))
+ .type(
+ new DataSetFieldType()
+ .isNullable(nullable)
+ .logicalType(logicalType)
+ .type(type)
+ );
+ }
+
+ private DataSetFieldType.TypeEnum mapType(Schema type) {
+ if (type instanceof NumberSchema) {
+ return DataSetFieldType.TypeEnum.NUMBER;
+ }
+ if (type instanceof StringSchema) {
+ return DataSetFieldType.TypeEnum.STRING;
+ }
+ if (type instanceof BooleanSchema || type instanceof TrueSchema || type instanceof FalseSchema) {
+ return DataSetFieldType.TypeEnum.BOOLEAN;
+ }
+ if (type instanceof ObjectSchema) {
+ return DataSetFieldType.TypeEnum.STRUCT;
+ }
+ if (type instanceof ReferenceSchema s) {
+ return mapType(s.getReferredSchema());
+ }
+ if (type instanceof CombinedSchema) {
+ return DataSetFieldType.TypeEnum.UNION;
+ }
+ return DataSetFieldType.TypeEnum.UNKNOWN;
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
new file mode 100644
index 0000000000..b4a5378239
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
@@ -0,0 +1,230 @@
+package com.provectus.kafka.ui.service.integration.odd.schema;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.protobuf.BoolValue;
+import com.google.protobuf.BytesValue;
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.DoubleValue;
+import com.google.protobuf.Duration;
+import com.google.protobuf.FloatValue;
+import com.google.protobuf.Int32Value;
+import com.google.protobuf.Int64Value;
+import com.google.protobuf.StringValue;
+import com.google.protobuf.Timestamp;
+import com.google.protobuf.UInt32Value;
+import com.google.protobuf.UInt64Value;
+import com.google.protobuf.Value;
+import com.provectus.kafka.ui.service.integration.odd.Oddrn;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import lombok.experimental.UtilityClass;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.DataSetFieldType;
+import org.opendatadiscovery.client.model.DataSetFieldType.TypeEnum;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+@UtilityClass
+class ProtoExtractor {
+
+ private static final Set PRIMITIVES_WRAPPER_TYPE_NAMES = Set.of(
+ BoolValue.getDescriptor().getFullName(),
+ Int32Value.getDescriptor().getFullName(),
+ UInt32Value.getDescriptor().getFullName(),
+ Int64Value.getDescriptor().getFullName(),
+ UInt64Value.getDescriptor().getFullName(),
+ StringValue.getDescriptor().getFullName(),
+ BytesValue.getDescriptor().getFullName(),
+ FloatValue.getDescriptor().getFullName(),
+ DoubleValue.getDescriptor().getFullName()
+ );
+
+ List extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
+ Descriptor schema = new ProtobufSchema(subject.getSchema()).toDescriptor();
+ List result = new ArrayList<>();
+ result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
+ var rootOddrn = topicOddrn.oddrn() + "/columns/" + (isKey ? "key" : "value");
+ schema.getFields().forEach(f ->
+ extract(f,
+ rootOddrn,
+ rootOddrn + "/" + f.getName(),
+ f.getName(),
+ !f.isRequired(),
+ f.isRepeated(),
+ ImmutableSet.of(schema.getFullName()),
+ result
+ ));
+ return result;
+ }
+
+ private void extract(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ boolean repeated,
+ ImmutableSet registeredRecords,
+ List sink) {
+ if (repeated) {
+ extractRepeated(field, parentOddr, oddrn, name, nullable, registeredRecords, sink);
+ } else if (field.getType() == Descriptors.FieldDescriptor.Type.MESSAGE) {
+ extractMessage(field, parentOddr, oddrn, name, nullable, registeredRecords, sink);
+ } else {
+ extractPrimitive(field, parentOddr, oddrn, name, nullable, sink);
+ }
+ }
+
+ // converts some(!) Protobuf Well-known type (from google.protobuf.* packages)
+ // see JsonFormat::buildWellKnownTypePrinters for impl details
+ private boolean extractProtoWellKnownType(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ List sink) {
+ // all well-known types are messages
+ if (field.getType() != Descriptors.FieldDescriptor.Type.MESSAGE) {
+ return false;
+ }
+ String typeName = field.getMessageType().getFullName();
+ if (typeName.equals(Timestamp.getDescriptor().getFullName())) {
+ sink.add(createDataSetField(name, parentOddr, oddrn, TypeEnum.DATETIME, typeName, nullable));
+ return true;
+ }
+ if (typeName.equals(Duration.getDescriptor().getFullName())) {
+ sink.add(createDataSetField(name, parentOddr, oddrn, TypeEnum.DURATION, typeName, nullable));
+ return true;
+ }
+ if (typeName.equals(Value.getDescriptor().getFullName())) {
+ //TODO: use ANY type when it will appear in ODD
+ sink.add(createDataSetField(name, parentOddr, oddrn, TypeEnum.UNKNOWN, typeName, nullable));
+ return true;
+ }
+ if (PRIMITIVES_WRAPPER_TYPE_NAMES.contains(typeName)) {
+ var wrapped = field.getMessageType().findFieldByName("value");
+ sink.add(createDataSetField(name, parentOddr, oddrn, mapType(wrapped.getType()), typeName, true));
+ return true;
+ }
+ return false;
+ }
+
+ private void extractRepeated(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ sink.add(createDataSetField(name, parentOddr, oddrn, TypeEnum.LIST, "repeated", nullable));
+
+ String itemName = field.getType() == Descriptors.FieldDescriptor.Type.MESSAGE
+ ? field.getMessageType().getName()
+ : field.getType().name().toLowerCase();
+
+ extract(
+ field,
+ oddrn,
+ oddrn + "/items/" + itemName,
+ itemName,
+ nullable,
+ false,
+ registeredRecords,
+ sink
+ );
+ }
+
+ private void extractMessage(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ ImmutableSet registeredRecords,
+ List sink) {
+ if (extractProtoWellKnownType(field, parentOddr, oddrn, name, nullable, sink)) {
+ return;
+ }
+ sink.add(createDataSetField(name, parentOddr, oddrn, TypeEnum.STRUCT, getLogicalTypeName(field), nullable));
+
+ String msgTypeName = field.getMessageType().getFullName();
+ if (registeredRecords.contains(msgTypeName)) {
+ // avoiding recursion by checking if record already registered in parsing chain
+ return;
+ }
+ var newRegisteredRecords = ImmutableSet.builder()
+ .addAll(registeredRecords)
+ .add(msgTypeName)
+ .build();
+
+ field.getMessageType()
+ .getFields()
+ .forEach(f -> {
+ extract(f,
+ oddrn,
+ oddrn + "/fields/" + f.getName(),
+ f.getName(),
+ !f.isRequired(),
+ f.isRepeated(),
+ newRegisteredRecords,
+ sink
+ );
+ });
+ }
+
+ private void extractPrimitive(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn,
+ String name,
+ boolean nullable,
+ List sink) {
+ sink.add(
+ createDataSetField(
+ name,
+ parentOddr,
+ oddrn,
+ mapType(field.getType()),
+ getLogicalTypeName(field),
+ nullable
+ )
+ );
+ }
+
+ private String getLogicalTypeName(Descriptors.FieldDescriptor f) {
+ return f.getType() == Descriptors.FieldDescriptor.Type.MESSAGE
+ ? f.getMessageType().getFullName()
+ : f.getType().name().toLowerCase();
+ }
+
+ private DataSetField createDataSetField(String name,
+ String parentOddrn,
+ String oddrn,
+ TypeEnum type,
+ String logicalType,
+ Boolean nullable) {
+ return new DataSetField()
+ .name(name)
+ .parentFieldOddrn(parentOddrn)
+ .oddrn(oddrn)
+ .type(
+ new DataSetFieldType()
+ .isNullable(nullable)
+ .logicalType(logicalType)
+ .type(type)
+ );
+ }
+
+
+ private TypeEnum mapType(Descriptors.FieldDescriptor.Type type) {
+ return switch (type) {
+ case INT32, INT64, SINT32, SFIXED32, SINT64, UINT32, UINT64, FIXED32, FIXED64, SFIXED64 -> TypeEnum.INTEGER;
+ case FLOAT, DOUBLE -> TypeEnum.NUMBER;
+ case STRING, ENUM -> TypeEnum.STRING;
+ case BOOL -> TypeEnum.BOOLEAN;
+ case BYTES -> TypeEnum.BINARY;
+ case MESSAGE, GROUP -> TypeEnum.STRUCT;
+ };
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ReactiveFailover.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ReactiveFailover.java
index 1066dc7178..926298271d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ReactiveFailover.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ReactiveFailover.java
@@ -9,7 +9,6 @@ import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
-import org.springframework.web.reactive.function.client.WebClientRequestException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -25,6 +24,16 @@ public class ReactiveFailover {
private final Predicate failoverExceptionsPredicate;
private final String noAvailablePublishersMsg;
+ // creates single-publisher failover (basically for tests usage)
+ public static ReactiveFailover createNoop(T publisher) {
+ return create(
+ List.of(publisher),
+ th -> true,
+ "publisher is not available",
+ DEFAULT_RETRY_GRACE_PERIOD_MS
+ );
+ }
+
public static ReactiveFailover create(List publishers,
Predicate failoverExeptionsPredicate,
String noAvailablePublishersMsg,
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java
index f71149d3d8..a827183521 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java
@@ -142,9 +142,8 @@ public class KafkaConnectServiceTests extends AbstractIntegrationTest {
.uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName)
.exchange()
.expectStatus().isOk()
- .expectBody()
- .jsonPath(String.format("$[?(@ == '%s')]", connectorName))
- .exists();
+ .expectBodyList(String.class)
+ .contains(connectorName);
}
@Test
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java
index 0cc5a36c06..232e1d3703 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java
@@ -26,6 +26,13 @@ class KafkaConfigSanitizerTest {
assertThat(sanitizer.sanitize("main.consumer.sasl.jaas.config", "secret")).isEqualTo("******");
assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("******");
assertThat(sanitizer.sanitize("basic.auth.user.info", "secret")).isEqualTo("******");
+
+ //AWS var sanitizing
+ assertThat(sanitizer.sanitize("aws.access.key.id", "secret")).isEqualTo("******");
+ assertThat(sanitizer.sanitize("aws.accessKeyId", "secret")).isEqualTo("******");
+ assertThat(sanitizer.sanitize("aws.secret.access.key", "secret")).isEqualTo("******");
+ assertThat(sanitizer.sanitize("aws.secretAccessKey", "secret")).isEqualTo("******");
+ assertThat(sanitizer.sanitize("aws.sessionToken", "secret")).isEqualTo("******");
}
@Test
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java
new file mode 100644
index 0000000000..20c0d96ad1
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java
@@ -0,0 +1,111 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import com.provectus.kafka.ui.connect.model.ConnectorTopics;
+import com.provectus.kafka.ui.model.ConnectDTO;
+import com.provectus.kafka.ui.model.ConnectorDTO;
+import com.provectus.kafka.ui.model.ConnectorTypeDTO;
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.service.KafkaConnectService;
+import java.util.List;
+import java.util.Map;
+import org.junit.jupiter.api.Test;
+import org.opendatadiscovery.client.model.DataEntity;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+import reactor.test.StepVerifier;
+
+class ConnectorsExporterTest {
+
+ private static final KafkaCluster CLUSTER = KafkaCluster.builder()
+ .name("test cluster")
+ .bootstrapServers("localhost:9092")
+ .build();
+
+ private final KafkaConnectService kafkaConnectService = mock(KafkaConnectService.class);
+ private final ConnectorsExporter exporter = new ConnectorsExporter(kafkaConnectService);
+
+ @Test
+ void exportsConnectorsAsDataTransformers() {
+ ConnectDTO connect = new ConnectDTO();
+ connect.setName("testConnect");
+ connect.setAddress("http://kconnect:8083");
+
+ ConnectorDTO sinkConnector = new ConnectorDTO();
+ sinkConnector.setName("testSink");
+ sinkConnector.setType(ConnectorTypeDTO.SINK);
+ sinkConnector.setConnect(connect.getName());
+ sinkConnector.setConfig(
+ Map.of(
+ "connector.class", "FileStreamSink",
+ "file", "filePathHere",
+ "topic", "inputTopic"
+ )
+ );
+
+ ConnectorDTO sourceConnector = new ConnectorDTO();
+ sourceConnector.setName("testSource");
+ sourceConnector.setConnect(connect.getName());
+ sourceConnector.setType(ConnectorTypeDTO.SOURCE);
+ sourceConnector.setConfig(
+ Map.of(
+ "connector.class", "FileStreamSource",
+ "file", "filePathHere",
+ "topic", "outputTopic"
+ )
+ );
+
+ when(kafkaConnectService.getConnects(CLUSTER))
+ .thenReturn(Flux.just(connect));
+
+ when(kafkaConnectService.getConnectorNames(CLUSTER, connect.getName()))
+ .thenReturn(Flux.just(sinkConnector.getName(), sourceConnector.getName()));
+
+ when(kafkaConnectService.getConnector(CLUSTER, connect.getName(), sinkConnector.getName()))
+ .thenReturn(Mono.just(sinkConnector));
+
+ when(kafkaConnectService.getConnector(CLUSTER, connect.getName(), sourceConnector.getName()))
+ .thenReturn(Mono.just(sourceConnector));
+
+ when(kafkaConnectService.getConnectorTopics(CLUSTER, connect.getName(), sourceConnector.getName()))
+ .thenReturn(Mono.just(new ConnectorTopics().topics(List.of("outputTopic"))));
+
+ when(kafkaConnectService.getConnectorTopics(CLUSTER, connect.getName(), sinkConnector.getName()))
+ .thenReturn(Mono.just(new ConnectorTopics().topics(List.of("inputTopic"))));
+
+ StepVerifier.create(exporter.export(CLUSTER))
+ .assertNext(dataEntityList -> {
+ assertThat(dataEntityList.getDataSourceOddrn())
+ .isEqualTo("//kafkaconnect/host/kconnect:8083");
+
+ assertThat(dataEntityList.getItems())
+ .hasSize(2);
+
+ assertThat(dataEntityList.getItems())
+ .filteredOn(DataEntity::getOddrn, "//kafkaconnect/host/kconnect:8083/connectors/testSink")
+ .singleElement()
+ .satisfies(sink -> {
+ assertThat(sink.getMetadata().get(0).getMetadata())
+ .containsOnlyKeys("type", "connector.class", "file", "topic");
+ assertThat(sink.getDataTransformer().getInputs()).contains(
+ "//kafka/cluster/localhost:9092/topics/inputTopic");
+ });
+
+ assertThat(dataEntityList.getItems())
+ .filteredOn(DataEntity::getOddrn, "//kafkaconnect/host/kconnect:8083/connectors/testSource")
+ .singleElement()
+ .satisfies(source -> {
+ assertThat(source.getMetadata().get(0).getMetadata())
+ .containsOnlyKeys("type", "connector.class", "file", "topic");
+ assertThat(source.getDataTransformer().getOutputs()).contains(
+ "//kafka/cluster/localhost:9092/topics/outputTopic");
+ });
+
+ })
+ .verifyComplete();
+ }
+
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java
new file mode 100644
index 0000000000..4d512612a6
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/TopicsExporterTest.java
@@ -0,0 +1,167 @@
+package com.provectus.kafka.ui.service.integration.odd;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.Statistics;
+import com.provectus.kafka.ui.service.StatisticsCache;
+import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import com.provectus.kafka.ui.sr.model.SchemaType;
+import com.provectus.kafka.ui.util.ReactiveFailover;
+import java.util.List;
+import java.util.Map;
+import org.apache.kafka.clients.admin.ConfigEntry;
+import org.apache.kafka.clients.admin.TopicDescription;
+import org.apache.kafka.common.Node;
+import org.apache.kafka.common.TopicPartitionInfo;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.opendatadiscovery.client.model.DataEntity;
+import org.opendatadiscovery.client.model.DataEntityType;
+import reactor.core.publisher.Mono;
+import reactor.test.StepVerifier;
+
+class TopicsExporterTest {
+
+ private final KafkaSrClientApi schemaRegistryClientMock = mock(KafkaSrClientApi.class);
+
+ private final KafkaCluster cluster = KafkaCluster.builder()
+ .name("testCluster")
+ .bootstrapServers("localhost:9092,localhost:19092")
+ .schemaRegistryClient(ReactiveFailover.createNoop(schemaRegistryClientMock))
+ .build();
+
+ private Statistics stats;
+
+ private TopicsExporter topicsExporter;
+
+ @BeforeEach
+ void init() {
+ var statisticsCacheMock = mock(StatisticsCache.class);
+ when(statisticsCacheMock.get(cluster)).thenAnswer(invocationOnMock -> stats);
+
+ topicsExporter = new TopicsExporter(
+ topic -> !topic.startsWith("_"),
+ statisticsCacheMock
+ );
+ }
+
+ @Test
+ void doesNotExportTopicsWhichDontFitFiltrationRule() {
+ when(schemaRegistryClientMock.getSubjectVersion(anyString(), anyString()))
+ .thenReturn(Mono.error(new RuntimeException("Not found")));
+
+ stats = Statistics.empty()
+ .toBuilder()
+ .topicDescriptions(
+ Map.of(
+ "_hidden", new TopicDescription("_hidden", false, List.of(
+ new TopicPartitionInfo(0, null, List.of(), List.of())
+ )),
+ "visible", new TopicDescription("visible", false, List.of(
+ new TopicPartitionInfo(0, null, List.of(), List.of())
+ ))
+ )
+ )
+ .build();
+
+ StepVerifier.create(topicsExporter.export(cluster))
+ .assertNext(entityList -> {
+ assertThat(entityList.getDataSourceOddrn())
+ .isNotEmpty();
+
+ assertThat(entityList.getItems())
+ .hasSize(1)
+ .allSatisfy(e -> e.getOddrn().contains("visible"));
+ })
+ .verifyComplete();
+ }
+
+ @Test
+ void doesExportTopicData() {
+ when(schemaRegistryClientMock.getSubjectVersion("testTopic-value", "latest"))
+ .thenReturn(Mono.just(
+ new SchemaSubject()
+ .schema("\"string\"")
+ .schemaType(SchemaType.AVRO)
+ ));
+
+ when(schemaRegistryClientMock.getSubjectVersion("testTopic-key", "latest"))
+ .thenReturn(Mono.just(
+ new SchemaSubject()
+ .schema("\"int\"")
+ .schemaType(SchemaType.AVRO)
+ ));
+
+ stats = Statistics.empty()
+ .toBuilder()
+ .topicDescriptions(
+ Map.of(
+ "testTopic",
+ new TopicDescription(
+ "testTopic",
+ false,
+ List.of(
+ new TopicPartitionInfo(
+ 0,
+ null,
+ List.of(
+ new Node(1, "host1", 9092),
+ new Node(2, "host2", 9092)
+ ),
+ List.of())
+ ))
+ )
+ )
+ .topicConfigs(
+ Map.of(
+ "testTopic", List.of(
+ new ConfigEntry(
+ "custom.config",
+ "100500",
+ ConfigEntry.ConfigSource.DYNAMIC_TOPIC_CONFIG,
+ false,
+ false,
+ List.of(),
+ ConfigEntry.ConfigType.INT,
+ null
+ )
+ )
+ )
+ )
+ .build();
+
+ StepVerifier.create(topicsExporter.export(cluster))
+ .assertNext(entityList -> {
+ assertThat(entityList.getItems())
+ .hasSize(1);
+
+ DataEntity topicEntity = entityList.getItems().get(0);
+ assertThat(topicEntity.getName()).isNotEmpty();
+ assertThat(topicEntity.getOddrn())
+ .isEqualTo("//kafka/cluster/localhost:19092,localhost:9092/topics/testTopic");
+ assertThat(topicEntity.getType()).isEqualTo(DataEntityType.KAFKA_TOPIC);
+ assertThat(topicEntity.getMetadata())
+ .hasSize(1)
+ .singleElement()
+ .satisfies(e ->
+ assertThat(e.getMetadata())
+ .containsExactlyInAnyOrderEntriesOf(
+ Map.of(
+ "partitions", 1,
+ "replication_factor", 2,
+ "custom.config", "100500")));
+
+ assertThat(topicEntity.getDataset()).isNotNull();
+ assertThat(topicEntity.getDataset().getFieldList())
+ .hasSize(4); // 2 field for key, 2 for value
+ })
+ .verifyComplete();
+ }
+
+
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractorTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractorTest.java
new file mode 100644
index 0000000000..d523d7cd41
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractorTest.java
@@ -0,0 +1,272 @@
+package com.provectus.kafka.ui.service.integration.odd.schema;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.DataSetFieldType;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+class AvroExtractorTest {
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void test(boolean isKey) {
+ var list = AvroExtractor.extract(
+ new SchemaSubject()
+ .schema("""
+ {
+ "type": "record",
+ "name": "Message",
+ "namespace": "com.provectus.kafka",
+ "fields":
+ [
+ {
+ "name": "f1",
+ "type":
+ {
+ "type": "array",
+ "items":
+ {
+ "type": "record",
+ "name": "ArrElement",
+ "fields":
+ [
+ {
+ "name": "longmap",
+ "type":
+ {
+ "type": "map",
+ "values": "long"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "f2",
+ "type":
+ {
+ "type": "record",
+ "name": "InnerMessage",
+ "fields":
+ [
+ {
+ "name": "text",
+ "doc": "string field here",
+ "type": "string"
+ },
+ {
+ "name": "innerMsgRef",
+ "type": "InnerMessage"
+ },
+ {
+ "name": "nullable_union",
+ "type":
+ [
+ "null",
+ "string",
+ "int"
+ ],
+ "default": null
+ },
+ {
+ "name": "order_enum",
+ "type":
+ {
+ "type": "enum",
+ "name": "Suit",
+ "symbols":
+ [
+ "SPADES",
+ "HEARTS"
+ ]
+ }
+ },
+ {
+ "name": "str_list",
+ "type":
+ {
+ "type": "array",
+ "items": "string"
+ }
+ }
+ ]
+ }
+ }
+ ]
+ }
+ """),
+
+ KafkaPath.builder()
+ .cluster("localhost:9092")
+ .topic("someTopic")
+ .build(),
+ isKey
+ );
+
+ String baseOddrn = "//kafka/cluster/localhost:9092/topics/someTopic/columns/" + (isKey ? "key" : "value");
+
+ assertThat(list).contains(
+ DataSetFieldsExtractors.rootField(
+ KafkaPath.builder().cluster("localhost:9092").topic("someTopic").build(),
+ isKey
+ ),
+ new DataSetField()
+ .name("f1")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/f1")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.LIST)
+ .logicalType("array")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("ArrElement")
+ .parentFieldOddrn(baseOddrn + "/f1")
+ .oddrn(baseOddrn + "/f1/items/ArrElement")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("com.provectus.kafka.ArrElement")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("longmap")
+ .parentFieldOddrn(baseOddrn + "/f1/items/ArrElement")
+ .oddrn(baseOddrn + "/f1/items/ArrElement/fields/longmap")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.MAP)
+ .logicalType("map")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("key")
+ .parentFieldOddrn(baseOddrn + "/f1/items/ArrElement/fields/longmap")
+ .oddrn(baseOddrn + "/f1/items/ArrElement/fields/longmap/key")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRING)
+ .logicalType("string")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("value")
+ .parentFieldOddrn(baseOddrn + "/f1/items/ArrElement/fields/longmap")
+ .oddrn(baseOddrn + "/f1/items/ArrElement/fields/longmap/value")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.INTEGER)
+ .logicalType("long")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("f2")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/f2")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("com.provectus.kafka.InnerMessage")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("text")
+ .parentFieldOddrn(baseOddrn + "/f2")
+ .oddrn(baseOddrn + "/f2/fields/text")
+ .description("string field here")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRING)
+ .logicalType("string")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("innerMsgRef")
+ .parentFieldOddrn(baseOddrn + "/f2")
+ .oddrn(baseOddrn + "/f2/fields/innerMsgRef")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("com.provectus.kafka.InnerMessage")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("nullable_union")
+ .parentFieldOddrn(baseOddrn + "/f2")
+ .oddrn(baseOddrn + "/f2/fields/nullable_union")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.UNION)
+ .logicalType("union")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("string")
+ .parentFieldOddrn(baseOddrn + "/f2/fields/nullable_union")
+ .oddrn(baseOddrn + "/f2/fields/nullable_union/values/string")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRING)
+ .logicalType("string")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("int")
+ .parentFieldOddrn(baseOddrn + "/f2/fields/nullable_union")
+ .oddrn(baseOddrn + "/f2/fields/nullable_union/values/int")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.INTEGER)
+ .logicalType("int")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("int")
+ .parentFieldOddrn(baseOddrn + "/f2/fields/nullable_union")
+ .oddrn(baseOddrn + "/f2/fields/nullable_union/values/int")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.INTEGER)
+ .logicalType("int")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("order_enum")
+ .parentFieldOddrn(baseOddrn + "/f2")
+ .oddrn(baseOddrn + "/f2/fields/order_enum")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRING)
+ .logicalType("enum")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("str_list")
+ .parentFieldOddrn(baseOddrn + "/f2")
+ .oddrn(baseOddrn + "/f2/fields/str_list")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.LIST)
+ .logicalType("array")
+ .isNullable(false)
+ ),
+ new DataSetField()
+ .name("string")
+ .parentFieldOddrn(baseOddrn + "/f2/fields/str_list")
+ .oddrn(baseOddrn + "/f2/fields/str_list/items/string")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRING)
+ .logicalType("string")
+ .isNullable(false)
+ )
+ );
+ }
+
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractorTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractorTest.java
new file mode 100644
index 0000000000..7968e52e6d
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractorTest.java
@@ -0,0 +1,145 @@
+package com.provectus.kafka.ui.service.integration.odd.schema;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import java.net.URI;
+import java.util.List;
+import java.util.Map;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.DataSetFieldType;
+import org.opendatadiscovery.client.model.MetadataExtension;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+class JsonSchemaExtractorTest {
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void test(boolean isKey) {
+ String jsonSchema = """
+ {
+ "$id": "http://example.com/test.TestMsg",
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "type": "object",
+ "required": [ "int32_field" ],
+ "properties":
+ {
+ "int32_field": { "type": "integer", "title": "field title" },
+ "lst_s_field": { "type": "array", "items": { "type": "string" }, "description": "field descr" },
+ "untyped_struct_field": { "type": "object", "properties": {} },
+ "union_field": { "type": [ "number", "object", "null" ] },
+ "struct_field": {
+ "type": "object",
+ "properties": {
+ "bool_field": { "type": "boolean" }
+ }
+ }
+ }
+ }
+ """;
+ var fields = JsonSchemaExtractor.extract(
+ new SchemaSubject().schema(jsonSchema),
+ KafkaPath.builder()
+ .cluster("localhost:9092")
+ .topic("someTopic")
+ .build(),
+ isKey
+ );
+
+ String baseOddrn = "//kafka/cluster/localhost:9092/topics/someTopic/columns/" + (isKey ? "key" : "value");
+
+ assertThat(fields).contains(
+ DataSetFieldsExtractors.rootField(
+ KafkaPath.builder().cluster("localhost:9092").topic("someTopic").build(),
+ isKey
+ ),
+ new DataSetField()
+ .name("int32_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/int32_field")
+ .description("field title")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.NUMBER)
+ .logicalType("Number")
+ .isNullable(false)),
+ new DataSetField()
+ .name("lst_s_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/lst_s_field")
+ .description("field descr")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.LIST)
+ .logicalType("array")
+ .isNullable(true)),
+ new DataSetField()
+ .name("String")
+ .parentFieldOddrn(baseOddrn + "/lst_s_field")
+ .oddrn(baseOddrn + "/lst_s_field/items/String")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRING)
+ .logicalType("String")
+ .isNullable(false)),
+ new DataSetField()
+ .name("untyped_struct_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/untyped_struct_field")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("Object")
+ .isNullable(true)),
+ new DataSetField()
+ .name("union_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/union_field/anyOf")
+ .metadata(List.of(new MetadataExtension()
+ .schemaUrl(URI.create("wontbeused.oops"))
+ .metadata(Map.of("criterion", "anyOf"))))
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.UNION)
+ .logicalType("anyOf")
+ .isNullable(true)),
+ new DataSetField()
+ .name("Number")
+ .parentFieldOddrn(baseOddrn + "/union_field/anyOf")
+ .oddrn(baseOddrn + "/union_field/anyOf/values/Number")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.NUMBER)
+ .logicalType("Number")
+ .isNullable(true)),
+ new DataSetField()
+ .name("Object")
+ .parentFieldOddrn(baseOddrn + "/union_field/anyOf")
+ .oddrn(baseOddrn + "/union_field/anyOf/values/Object")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("Object")
+ .isNullable(true)),
+ new DataSetField()
+ .name("Null")
+ .parentFieldOddrn(baseOddrn + "/union_field/anyOf")
+ .oddrn(baseOddrn + "/union_field/anyOf/values/Null")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.UNKNOWN)
+ .logicalType("Null")
+ .isNullable(true)),
+ new DataSetField()
+ .name("struct_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/struct_field")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("Object")
+ .isNullable(true)),
+ new DataSetField()
+ .name("bool_field")
+ .parentFieldOddrn(baseOddrn + "/struct_field")
+ .oddrn(baseOddrn + "/struct_field/fields/bool_field")
+ .type(new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.BOOLEAN)
+ .logicalType("Boolean")
+ .isNullable(true))
+ );
+ }
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractorTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractorTest.java
new file mode 100644
index 0000000000..cbb97a859c
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractorTest.java
@@ -0,0 +1,187 @@
+package com.provectus.kafka.ui.service.integration.odd.schema;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.sr.model.SchemaSubject;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.ValueSource;
+import org.opendatadiscovery.client.model.DataSetField;
+import org.opendatadiscovery.client.model.DataSetFieldType;
+import org.opendatadiscovery.oddrn.model.KafkaPath;
+
+class ProtoExtractorTest {
+
+ @ParameterizedTest
+ @ValueSource(booleans = {true, false})
+ void test(boolean isKey) {
+ String protoSchema = """
+ syntax = "proto3";
+ package test;
+
+ import "google/protobuf/timestamp.proto";
+ import "google/protobuf/duration.proto";
+ import "google/protobuf/struct.proto";
+ import "google/protobuf/wrappers.proto";
+
+ message TestMsg {
+ map mapField = 100;
+ int32 int32_field = 2;
+ bool bool_field = 3;
+ SampleEnum enum_field = 4;
+
+ enum SampleEnum {
+ ENUM_V1 = 0;
+ ENUM_V2 = 1;
+ }
+
+ google.protobuf.Timestamp ts_field = 5;
+ google.protobuf.Duration duration_field = 8;
+
+ oneof some_oneof1 {
+ google.protobuf.Value one_of_v1 = 9;
+ google.protobuf.Value one_of_v2 = 10;
+ }
+ // wrapper field:
+ google.protobuf.Int64Value int64_w_field = 11;
+
+ //embedded msg
+ EmbeddedMsg emb = 19;
+
+ message EmbeddedMsg {
+ int32 emb_f1 = 1;
+ TestMsg outer_ref = 2;
+ }
+ }""";
+
+ var list = ProtoExtractor.extract(
+ new SchemaSubject()
+ .schema(protoSchema),
+ KafkaPath.builder()
+ .cluster("localhost:9092")
+ .topic("someTopic")
+ .build(),
+ isKey
+ );
+
+ String baseOddrn = "//kafka/cluster/localhost:9092/topics/someTopic/columns/" + (isKey ? "key" : "value");
+
+ assertThat(list)
+ .contains(
+ DataSetFieldsExtractors.rootField(
+ KafkaPath.builder().cluster("localhost:9092").topic("someTopic").build(),
+ isKey
+ ),
+ new DataSetField()
+ .name("mapField")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/mapField")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.LIST)
+ .logicalType("repeated")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("int32_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/int32_field")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.INTEGER)
+ .logicalType("int32")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("enum_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/enum_field")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRING)
+ .logicalType("enum")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("ts_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/ts_field")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.DATETIME)
+ .logicalType("google.protobuf.Timestamp")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("duration_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/duration_field")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.DURATION)
+ .logicalType("google.protobuf.Duration")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("one_of_v1")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/one_of_v1")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.UNKNOWN)
+ .logicalType("google.protobuf.Value")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("one_of_v2")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/one_of_v2")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.UNKNOWN)
+ .logicalType("google.protobuf.Value")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("int64_w_field")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/int64_w_field")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.INTEGER)
+ .logicalType("google.protobuf.Int64Value")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("emb")
+ .parentFieldOddrn(baseOddrn)
+ .oddrn(baseOddrn + "/emb")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("test.TestMsg.EmbeddedMsg")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("emb_f1")
+ .parentFieldOddrn(baseOddrn + "/emb")
+ .oddrn(baseOddrn + "/emb/fields/emb_f1")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.INTEGER)
+ .logicalType("int32")
+ .isNullable(true)
+ ),
+ new DataSetField()
+ .name("outer_ref")
+ .parentFieldOddrn(baseOddrn + "/emb")
+ .oddrn(baseOddrn + "/emb/fields/outer_ref")
+ .type(
+ new DataSetFieldType()
+ .type(DataSetFieldType.TypeEnum.STRUCT)
+ .logicalType("test.TestMsg")
+ .isNullable(true)
+ )
+ );
+ }
+
+}
diff --git a/pom.xml b/pom.xml
index 9abb41a865..9266c2c14f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -42,6 +42,8 @@
2.7.5
5.7.5
1.0.0
+ 0.1.15
+ 0.1.19
5.9.1
From b5e3d1f92825c05942d950d8e1383bba4834cf2f Mon Sep 17 00:00:00 2001
From: David <58771979+David-DB88@users.noreply.github.com>
Date: Mon, 6 Mar 2023 12:49:38 +0400
Subject: [PATCH 35/54] [FE] Brokers: fix missing tooltip for config source
(#3356)
* .
* ..
* added getReferenceProps getFloatingProps
* testing
* testing2
* testing3
* added autoUpdate
* added @floating-ui/react
* removed @floating-ui/react-dom-interactions
---------
Co-authored-by: davitbejanyan
Co-authored-by: Oleg Shur
---
kafka-ui-react-app/package.json | 2 +-
kafka-ui-react-app/pnpm-lock.yaml | 47 ++++++++++---------
.../common/ActionComponent/ActionComponent.ts | 2 +-
.../src/components/common/Tooltip/Tooltip.tsx | 16 ++++---
.../src/lib/hooks/useActionTooltip.ts | 2 +-
5 files changed, 38 insertions(+), 31 deletions(-)
diff --git a/kafka-ui-react-app/package.json b/kafka-ui-react-app/package.json
index 94759221df..f4f5bf855f 100644
--- a/kafka-ui-react-app/package.json
+++ b/kafka-ui-react-app/package.json
@@ -7,7 +7,7 @@
"@babel/core": "^7.16.0",
"@babel/plugin-syntax-flow": "^7.18.6",
"@babel/plugin-transform-react-jsx": "^7.18.6",
- "@floating-ui/react-dom-interactions": "^0.10.3",
+ "@floating-ui/react": "^0.19.2",
"@hookform/error-message": "^2.0.0",
"@hookform/resolvers": "^2.7.1",
"@microsoft/fetch-event-source": "^2.0.1",
diff --git a/kafka-ui-react-app/pnpm-lock.yaml b/kafka-ui-react-app/pnpm-lock.yaml
index eb9b03c044..41d370327b 100644
--- a/kafka-ui-react-app/pnpm-lock.yaml
+++ b/kafka-ui-react-app/pnpm-lock.yaml
@@ -7,7 +7,7 @@ specifiers:
'@babel/preset-env': ^7.18.2
'@babel/preset-react': ^7.17.12
'@babel/preset-typescript': ^7.17.12
- '@floating-ui/react-dom-interactions': ^0.10.3
+ '@floating-ui/react': ^0.19.2
'@hookform/error-message': ^2.0.0
'@hookform/resolvers': ^2.7.1
'@jest/types': ^29.0.3
@@ -98,7 +98,7 @@ dependencies:
'@babel/core': 7.18.2
'@babel/plugin-syntax-flow': 7.18.6_@babel+core@7.18.2
'@babel/plugin-transform-react-jsx': 7.18.6_@babel+core@7.18.2
- '@floating-ui/react-dom-interactions': 0.10.3_ohobp6rpsmerwlq5ipwfh5yigy
+ '@floating-ui/react': 0.19.2_ohobp6rpsmerwlq5ipwfh5yigy
'@hookform/error-message': 2.0.0_l2dcsysovzdujulgxvsen7vbsm
'@hookform/resolvers': 2.8.9_react-hook-form@7.6.9
'@microsoft/fetch-event-source': 2.0.1
@@ -2886,41 +2886,42 @@ packages:
- supports-color
dev: true
- /@floating-ui/core/1.0.1:
- resolution: {integrity: sha512-bO37brCPfteXQfFY0DyNDGB3+IMe4j150KFQcgJ5aBP295p9nBGeHEs/p0czrRbtlHq4Px/yoPXO/+dOCcF4uA==}
+ /@floating-ui/core/1.2.1:
+ resolution: {integrity: sha512-LSqwPZkK3rYfD7GKoIeExXOyYx6Q1O4iqZWwIehDNuv3Dv425FIAE8PRwtAx1imEolFTHgBEcoFHm9MDnYgPCg==}
dev: false
- /@floating-ui/dom/1.0.4:
- resolution: {integrity: sha512-maYJRv+sAXTy4K9mzdv0JPyNW5YPVHrqtY90tEdI6XNpuLOP26Ci2pfwPsKBA/Wh4Z3FX5sUrtUFTdMYj9v+ug==}
+ /@floating-ui/dom/1.2.1:
+ resolution: {integrity: sha512-Rt45SmRiV8eU+xXSB9t0uMYiQ/ZWGE/jumse2o3i5RGlyvcbqOF4q+1qBnzLE2kZ5JGhq0iMkcGXUKbFe7MpTA==}
dependencies:
- '@floating-ui/core': 1.0.1
+ '@floating-ui/core': 1.2.1
dev: false
- /@floating-ui/react-dom-interactions/0.10.3_ohobp6rpsmerwlq5ipwfh5yigy:
- resolution: {integrity: sha512-UEHqdnzyoiWNU5az/tAljr9iXFzN18DcvpMqW+/cXz4FEhDEB1ogLtWldOWCujLerPBnSRocADALafelOReMpw==}
+ /@floating-ui/react-dom/1.3.0_ef5jwxihqo6n7gxfmzogljlgcm:
+ resolution: {integrity: sha512-htwHm67Ji5E/pROEAr7f8IKFShuiCKHwUC/UY4vC3I5jiSvGFAYnSYiZO5MlGmads+QqvUkR9ANHEguGrDv72g==}
peerDependencies:
react: '>=16.8.0'
react-dom: '>=16.8.0'
dependencies:
- '@floating-ui/react-dom': 1.0.0_ef5jwxihqo6n7gxfmzogljlgcm
+ '@floating-ui/dom': 1.2.1
+ react: 18.1.0
+ react-dom: 18.1.0_react@18.1.0
+ dev: false
+
+ /@floating-ui/react/0.19.2_ohobp6rpsmerwlq5ipwfh5yigy:
+ resolution: {integrity: sha512-JyNk4A0Ezirq8FlXECvRtQOX/iBe5Ize0W/pLkrZjfHW9GUV7Xnq6zm6fyZuQzaHHqEnVizmvlA96e1/CkZv+w==}
+ peerDependencies:
+ react: '>=16.8.0'
+ react-dom: '>=16.8.0'
+ dependencies:
+ '@floating-ui/react-dom': 1.3.0_ef5jwxihqo6n7gxfmzogljlgcm
aria-hidden: 1.2.1_7cpxmzzodpxnolj5zcc5cr63ji
react: 18.1.0
react-dom: 18.1.0_react@18.1.0
+ tabbable: 6.1.1
transitivePeerDependencies:
- '@types/react'
dev: false
- /@floating-ui/react-dom/1.0.0_ef5jwxihqo6n7gxfmzogljlgcm:
- resolution: {integrity: sha512-uiOalFKPG937UCLm42RxjESTWUVpbbatvlphQAU6bsv+ence6IoVG8JOUZcy8eW81NkU+Idiwvx10WFLmR4MIg==}
- peerDependencies:
- react: '>=16.8.0'
- react-dom: '>=16.8.0'
- dependencies:
- '@floating-ui/dom': 1.0.4
- react: 18.1.0
- react-dom: 18.1.0_react@18.1.0
- dev: false
-
/@hookform/error-message/2.0.0_l2dcsysovzdujulgxvsen7vbsm:
resolution: {integrity: sha512-Y90nHzjgL2MP7GFy75kscdvxrCTjtyxGmOLLxX14nd08OXRIh9lMH/y9Kpdo0p1IPowJBiZMHyueg7p+yrqynQ==}
peerDependencies:
@@ -8481,6 +8482,10 @@ packages:
tslib: 2.4.0
dev: true
+ /tabbable/6.1.1:
+ resolution: {integrity: sha512-4kl5w+nCB44EVRdO0g/UGoOp3vlwgycUVtkk/7DPyeLZUCuNFFKCFG6/t/DgHLrUPHjrZg6s5tNm+56Q2B0xyg==}
+ dev: false
+
/tapable/2.2.1:
resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==}
engines: {node: '>=6'}
diff --git a/kafka-ui-react-app/src/components/common/ActionComponent/ActionComponent.ts b/kafka-ui-react-app/src/components/common/ActionComponent/ActionComponent.ts
index 346b916ca1..4d6e593ef2 100644
--- a/kafka-ui-react-app/src/components/common/ActionComponent/ActionComponent.ts
+++ b/kafka-ui-react-app/src/components/common/ActionComponent/ActionComponent.ts
@@ -1,4 +1,4 @@
-import { Placement } from '@floating-ui/react-dom-interactions';
+import { Placement } from '@floating-ui/react';
import { Action, ResourceType } from 'generated-sources';
export interface ActionComponentProps {
diff --git a/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx b/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx
index cd76f4cc4f..4c22cd72fd 100644
--- a/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx
+++ b/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx
@@ -4,7 +4,7 @@ import {
useHover,
useInteractions,
Placement,
-} from '@floating-ui/react-dom-interactions';
+} from '@floating-ui/react';
import * as S from './Tooltip.styled';
@@ -16,26 +16,28 @@ export interface PropsTypes {
const Tooltip: React.FC = ({ value, content, placement }) => {
const [open, setOpen] = useState(false);
- const { x, y, reference, floating, strategy, context } = useFloating({
+ const { x, y, refs, strategy, context } = useFloating({
open,
onOpenChange: setOpen,
placement,
});
-
- useInteractions([useHover(context)]);
-
+ const hover = useHover(context);
+ const { getReferenceProps, getFloatingProps } = useInteractions([hover]);
return (
<>
- {value}
+
+ {value}
+
{open && (
{content}
diff --git a/kafka-ui-react-app/src/lib/hooks/useActionTooltip.ts b/kafka-ui-react-app/src/lib/hooks/useActionTooltip.ts
index f8f8498898..3fcb7ec48b 100644
--- a/kafka-ui-react-app/src/lib/hooks/useActionTooltip.ts
+++ b/kafka-ui-react-app/src/lib/hooks/useActionTooltip.ts
@@ -6,7 +6,7 @@ import {
useFloating,
useHover,
useInteractions,
-} from '@floating-ui/react-dom-interactions';
+} from '@floating-ui/react';
export function useActionTooltip(isDisabled?: boolean, placement?: Placement) {
const [open, setOpen] = useState(false);
From 18c046af5b8e6d639af921dacb664b30d0b0f44b Mon Sep 17 00:00:00 2001
From: Snowfox0618 <60497052+Snowfox0618@users.noreply.github.com>
Date: Mon, 6 Mar 2023 16:50:58 +0800
Subject: [PATCH 36/54] [FE] Fix number of partitions field validation issue
(#3400)
* Now user can only input valid digits, and '-' is not allowed in positive-only inputs.
* Fix ISSUE#3319
Now user can only input valid digits, and '-' is not allowed in positive-only inputs.
* Revert "Fix ISSUE#3319"
This reverts commit a4e34f5af3b4f049bc19089cfa53b7a99aafb0cf.
* Fix ISSUE#3319
Created a helper function, and added a unit test to cover it.
* Fix ISSUE#3319
Located the helper function outside the component, and renamed some unit tests to make their meaning more clear.
* Fix ISSUE#3319
- Added an attribute 'integerOnly' to component 'Input', to represent whether this input component instance will accept decimal.
- Improved input-check function and paste-check function, to avoid invalid number format (like '3-3', '3.3.3').
- Added new unit tests to test new input-check and paste-check functions.
- Added attribute 'integerOnly' to Input instances in the TopicForm component.
---
.../Topics/shared/Form/TopicForm.tsx | 8 +
.../src/components/common/Input/Input.tsx | 116 +++++++++++--
.../common/Input/__tests__/Input.spec.tsx | 159 ++++++++++++++++--
.../common/NewTable/__test__/Table.spec.tsx | 11 ++
4 files changed, 269 insertions(+), 25 deletions(-)
diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx
index ae9d429236..e8d7f1b844 100644
--- a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx
+++ b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx
@@ -116,6 +116,8 @@ const TopicForm: React.FC = ({
placeholder="Number of partitions"
min="1"
name="partitions"
+ positiveOnly
+ integerOnly
/>
@@ -161,6 +163,8 @@ const TopicForm: React.FC = ({
placeholder="Min In Sync Replicas"
min="1"
name="minInSyncReplicas"
+ positiveOnly
+ integerOnly
/>
@@ -177,6 +181,8 @@ const TopicForm: React.FC = ({
placeholder="Replication Factor"
min="1"
name="replicationFactor"
+ positiveOnly
+ integerOnly
/>
@@ -227,6 +233,8 @@ const TopicForm: React.FC = ({
placeholder="Maximum message size"
min="1"
name="maxMessageBytes"
+ positiveOnly
+ integerOnly
/>
diff --git a/kafka-ui-react-app/src/components/common/Input/Input.tsx b/kafka-ui-react-app/src/components/common/Input/Input.tsx
index 932f954aa5..f5049e6961 100644
--- a/kafka-ui-react-app/src/components/common/Input/Input.tsx
+++ b/kafka-ui-react-app/src/components/common/Input/Input.tsx
@@ -11,6 +11,87 @@ export interface InputProps
hookFormOptions?: RegisterOptions;
search?: boolean;
positiveOnly?: boolean;
+
+ // Some may only accept integer, like `Number of Partitions`
+ // some may accept decimal
+ integerOnly?: boolean;
+}
+
+function inputNumberCheck(
+ key: string,
+ positiveOnly: boolean,
+ integerOnly: boolean,
+ getValues: (name: string) => string,
+ componentName: string
+) {
+ let isValid = true;
+ if (!((key >= '0' && key <= '9') || key === '-' || key === '.')) {
+ // If not a valid digit char.
+ isValid = false;
+ } else {
+ // If there is any restriction.
+ if (positiveOnly) {
+ isValid = !(key === '-');
+ }
+ if (isValid && integerOnly) {
+ isValid = !(key === '.');
+ }
+
+ // Check invalid format
+ const value = getValues(componentName);
+
+ if (isValid && (key === '-' || key === '.')) {
+ if (!positiveOnly) {
+ if (key === '-') {
+ if (value !== '') {
+ // '-' should not appear anywhere except the start of the string
+ isValid = false;
+ }
+ }
+ }
+ if (!integerOnly) {
+ if (key === '.') {
+ if (value === '' || value.indexOf('.') !== -1) {
+ // '.' should not appear at the start of the string or appear twice
+ isValid = false;
+ }
+ }
+ }
+ }
+ }
+ return isValid;
+}
+
+function pasteNumberCheck(
+ text: string,
+ positiveOnly: boolean,
+ integerOnly: boolean
+) {
+ let value: string;
+ value = text;
+ let sign = '';
+ if (!positiveOnly) {
+ if (value.charAt(0) === '-') {
+ sign = '-';
+ }
+ }
+ if (integerOnly) {
+ value = value.replace(/\D/g, '');
+ } else {
+ value = value.replace(/[^\d.]/g, '');
+ if (value.indexOf('.') !== value.lastIndexOf('.')) {
+ const strs = value.split('.');
+ value = '';
+ for (let i = 0; i < strs.length; i += 1) {
+ value += strs[i];
+ if (i === 0) {
+ value += '.';
+ }
+ }
+ }
+ }
+ value = sign + value;
+ return value;
}
const Input: React.FC = ({
@@ -20,17 +101,27 @@ const Input: React.FC = ({
inputSize = 'L',
type,
positiveOnly,
+ integerOnly,
...rest
}) => {
const methods = useFormContext();
+
const keyPressEventHandler = (
event: React.KeyboardEvent
) => {
- const { key, code } = event;
+ const { key } = event;
if (type === 'number') {
- // Manualy prevent input of 'e' character for all number inputs
+ // Manually prevent input of non-digit and non-minus for all number inputs
// and prevent input of negative numbers for positiveOnly inputs
- if (key === 'e' || (positiveOnly && (key === '-' || code === 'Minus'))) {
+ if (
+ !inputNumberCheck(
+ key,
+ typeof positiveOnly === 'boolean' ? positiveOnly : false,
+ typeof integerOnly === 'boolean' ? integerOnly : false,
+ methods.getValues,
+ typeof name === 'string' ? name : ''
+ )
+ ) {
event.preventDefault();
}
}
@@ -38,17 +129,14 @@ const Input: React.FC = ({
const pasteEventHandler = (event: React.ClipboardEvent) => {
if (type === 'number') {
const { clipboardData } = event;
- const text = clipboardData.getData('Text');
- // replace all non-digit characters with empty string
- let value = text.replace(/[^\d.]/g, '');
- if (positiveOnly) {
- // check if value is negative
- const parsedData = parseFloat(value);
- if (parsedData < 0) {
- // remove minus sign
- value = String(Math.abs(parsedData));
- }
- }
+ // The 'clipboardData' does not have key 'Text', but has key 'text' instead.
+ const text = clipboardData.getData('text');
+ // Check the format of pasted text.
+ const value = pasteNumberCheck(
+ text,
+ typeof positiveOnly === 'boolean' ? positiveOnly : false,
+ typeof integerOnly === 'boolean' ? integerOnly : false
+ );
// if paste value contains non-numeric characters or
// negative for positiveOnly fields then prevent paste
if (value !== text) {
diff --git a/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx b/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx
index 3082f6f610..0254196965 100644
--- a/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx
+++ b/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx
@@ -4,12 +4,23 @@ import { screen } from '@testing-library/react';
import { render } from 'lib/testHelpers';
import userEvent from '@testing-library/user-event';
+// Mock useFormContext
+let component: HTMLInputElement;
+
const setupWrapper = (props?: Partial) => (
);
jest.mock('react-hook-form', () => ({
useFormContext: () => ({
register: jest.fn(),
+
+ // Mock methods.getValues and methods.setValue
+ getValues: jest.fn(() => {
+ return component.value;
+ }),
+ setValue: jest.fn((key, val) => {
+ component.value = val;
+ }),
}),
}));
@@ -23,20 +34,146 @@ describe('Custom Input', () => {
});
});
describe('number', () => {
- const getInput = () => screen.getByRole('spinbutton');
+ const getInput = () => screen.getByRole('spinbutton');
- it('allows user to type only numbers', async () => {
- render(setupWrapper({ type: 'number' }));
- const input = getInput();
- await userEvent.type(input, 'abc131');
- expect(input).toHaveValue(131);
+ describe('input', () => {
+ it('allows user to type numbers only', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, 'abc131');
+ expect(input).toHaveValue(131);
+ });
+
+ it('allows user to type negative values', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '-2');
+ expect(input).toHaveValue(-2);
+ });
+
+ it('allows user to type positive values only', async () => {
+ render(setupWrapper({ type: 'number', positiveOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '-2');
+ expect(input).toHaveValue(2);
+ });
+
+ it('allows user to type decimal', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '2.3');
+ expect(input).toHaveValue(2.3);
+ });
+
+ it('allows user to type integer only', async () => {
+ render(setupWrapper({ type: 'number', integerOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '2.3');
+ expect(input).toHaveValue(23);
+ });
+
+ it("not allow '-' appear at any position of the string except the start", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '2-3');
+ expect(input).toHaveValue(23);
+ });
+
+ it("not allow '.' appear at the start of the string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '.33');
+ expect(input).toHaveValue(33);
+ });
+
+ it("not allow '.' appear twice in the string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '3.3.3');
+ expect(input).toHaveValue(3.33);
+ });
});
- it('allows negative values', async () => {
- render(setupWrapper({ type: 'number' }));
- const input = getInput();
- await userEvent.type(input, '-2');
- expect(input).toHaveValue(-2);
+ describe('paste', () => {
+ it('allows user to paste numbers only', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('abc131');
+ expect(input).toHaveValue(131);
+ });
+
+ it('allows user to paste negative values', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('-2');
+ expect(input).toHaveValue(-2);
+ });
+
+ it('allows user to paste positive values only', async () => {
+ render(setupWrapper({ type: 'number', positiveOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('-2');
+ expect(input).toHaveValue(2);
+ });
+
+ it('allows user to paste decimal', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('2.3');
+ expect(input).toHaveValue(2.3);
+ });
+
+ it('allows user to paste integer only', async () => {
+ render(setupWrapper({ type: 'number', integerOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('2.3');
+ expect(input).toHaveValue(23);
+ });
+
+ it("not allow '-' appear at any position of the pasted string except the start", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('2-3');
+ expect(input).toHaveValue(23);
+ });
+
+ it("not allow '.' appear at the start of the pasted string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('.33');
+ expect(input).toHaveValue(0.33);
+ });
+
+ it("not allow '.' appear twice in the pasted string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('3.3.3');
+ expect(input).toHaveValue(3.33);
+ });
});
});
});
diff --git a/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx b/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx
index 43dd7b8dbf..c60cccdabe 100644
--- a/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx
+++ b/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx
@@ -20,6 +20,17 @@ jest.mock('react-router-dom', () => ({
useNavigate: () => mockedUsedNavigate,
}));
+// This is needed by ESLint.
+jest.mock('react-hook-form', () => ({
+ useFormContext: () => ({
+ register: jest.fn(),
+
+ // Mock methods.getValues and methods.setValue
+ getValues: jest.fn(),
+ setValue: jest.fn(),
+ }),
+}));
+
type Datum = typeof data[0];
const data = [
From f193e5fed7f2e5e33ef3b5d72bf0db33e5e9d6dc Mon Sep 17 00:00:00 2001
From: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com>
Date: Mon, 6 Mar 2023 10:59:48 +0200
Subject: [PATCH 37/54] [e2e] Setup qase integration (once again) (#3441)
* add checkNewBrokersTest
* add manual cases and listeners
* add manual cases and listeners
* add manual cases and listeners
* upd manual suite
* upd listeners
* add readme
* upd template
* upd naming
* upd template
* upd template
* upd template
* upd template
* fix naming
* fix MessagesTest
* upd manual cases
* upd comments
---
.github/workflows/e2e-automation.yml | 2 -
.github/workflows/e2e-manual.yml | 43 ++++++
kafka-ui-e2e-checks/QASE.md | 70 ++++++++++
kafka-ui-e2e-checks/README.md | 5 +
kafka-ui-e2e-checks/pom.xml | 6 +-
.../listeners/QaseCreateListener.java | 123 ++++++++++++++++++
.../listeners/QaseResultListener.java | 102 +++++++++++++++
.../ui/utilities/qaseUtils/QaseSetup.java | 2 +-
.../qaseUtils/annotations/Automation.java | 15 +++
.../annotations/AutomationStatus.java | 12 --
.../qaseUtils/annotations/Status.java | 13 ++
.../qaseUtils/annotations/Suite.java | 7 +-
.../ui/utilities/qaseUtils/enums/State.java | 18 +++
.../ui/utilities/qaseUtils/enums/Status.java | 15 ++-
.../provectus/kafka/ui/variables/Suite.java | 1 -
.../java/com/provectus/kafka/ui/BaseTest.java | 8 +-
.../java/com/provectus/kafka/ui/Facade.java | 1 +
.../kafka/ui/manualSuite/BaseManualTest.java | 28 ++++
.../kafka/ui/manualSuite/TestClass.java | 4 -
.../ui/manualSuite/suite/DataMaskingTest.java | 29 +++++
.../kafka/ui/manualSuite/suite/RbacTest.java | 53 ++++++++
.../ui/manualSuite/suite/TopicsTest.java | 95 ++++++++++++++
.../kafka/ui/qaseSuite/BaseQaseTest.java | 16 +++
.../kafka/ui/qaseSuite/Template.java | 58 +++++++++
.../kafka/ui/qaseSuite/TestClass.java | 4 -
.../kafka/ui/smokeSuite/SmokeTest.java | 7 +-
.../ui/smokeSuite/brokers/BrokersTest.java | 16 +--
.../smokeSuite/connectors/ConnectorsTest.java | 19 +--
.../ui/smokeSuite/ksqlDb/KsqlDbTest.java | 25 ++--
.../ui/smokeSuite/schemas/SchemasTest.java | 41 ++----
.../ui/smokeSuite/topics/MessagesTest.java | 74 ++++-------
.../ui/smokeSuite/topics/TopicsTest.java | 81 +++---------
32 files changed, 768 insertions(+), 225 deletions(-)
create mode 100644 .github/workflows/e2e-manual.yml
create mode 100644 kafka-ui-e2e-checks/QASE.md
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseCreateListener.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseResultListener.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Automation.java
delete mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/AutomationStatus.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Status.java
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/State.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/DataMaskingTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/RbacTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java
delete mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java
diff --git a/.github/workflows/e2e-automation.yml b/.github/workflows/e2e-automation.yml
index 50e3e9f1d4..5a5018c9e5 100644
--- a/.github/workflows/e2e-automation.yml
+++ b/.github/workflows/e2e-automation.yml
@@ -8,8 +8,6 @@ on:
required: true
type: choice
options:
- - manual
- - qase
- regression
- sanity
- smoke
diff --git a/.github/workflows/e2e-manual.yml b/.github/workflows/e2e-manual.yml
new file mode 100644
index 0000000000..68963e29fa
--- /dev/null
+++ b/.github/workflows/e2e-manual.yml
@@ -0,0 +1,43 @@
+name: E2E Manual suite
+on:
+ workflow_dispatch:
+ inputs:
+ test_suite:
+ description: 'Select test suite to run'
+ default: 'manual'
+ required: true
+ type: choice
+ options:
+ - manual
+ - qase
+ qase_token:
+ description: 'Set Qase token to enable integration'
+ required: true
+ type: string
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ github.event.inputs.qase_token }} -Dsurefire.suiteXmlFiles='src/test/resources/${{ github.event.inputs.test_suite }}.xml' -Dsuite=${{ github.event.inputs.test_suite }} -f 'kafka-ui-e2e-checks' test -Pprod
diff --git a/kafka-ui-e2e-checks/QASE.md b/kafka-ui-e2e-checks/QASE.md
new file mode 100644
index 0000000000..b09731515a
--- /dev/null
+++ b/kafka-ui-e2e-checks/QASE.md
@@ -0,0 +1,70 @@
+### E2E integration with Qase.io TMS (for internal users)
+
+### Table of Contents
+
+- [Intro](#intro)
+- [Set up Qase.io integration](#set-up-qase-integration)
+- [Test case creation](#test-case-creation)
+- [Test run reporting](#test-run-reporting)
+
+### Intro
+
+We're using [Qase.io](https://help.qase.io/en/) as TMS to keep test cases and accumulate test runs.
+Integration is set up through API using [qase-api](https://mvnrepository.com/artifact/io.qase/qase-api)
+and [qase-testng](https://mvnrepository.com/artifact/io.qase/qase-testng) libraries.
+
+### Set up Qase integration
+
+To set up integration locally add next VM option `-DQASEIO_API_TOKEN='%s'`
+(add your [Qase token](https://app.qase.io/user/api/token) instead of '%s') into your run configuration
+
+### Test case creation
+
+All new test cases can be added into TMS by default if they have no QaseId and QaseTitle matching already existing
+cases.
+But to handle `@Suite` and `@Automation` we added custom QaseCreateListener. To create new test case for next sync with
+Qase (see example `kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java`):
+
+1. Create new class in `kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/suit`
+2. Inherit it from `kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java`
+3. Create new test method with some name inside the class and annotate it with:
+
+- `@Automation` (optional - Not automated by default) - to set one of automation states: NOT_AUTOMATED, TO_BE_AUTOMATED,
+ AUTOMATED
+- `@QaseTitle` (required) - to set title for new test case and to check is there no existing cases with same title in
+ Qase.io
+- `@Status` (optional - Draft by default) - to set one of case statuses: ACTUAL, DRAFT, DEPRECATED
+- `@Suite` (optional) - to store new case in some existing package need to set its id, otherwise case will be stored in
+ the root
+- `@Test` (required) - annotation from TestNG to specify this method as test
+
+4. Create new private void step methods with some name inside the same class and annotate it with
+ @io.qase.api.annotation.Step to specify this method as step.
+5. Use defined step methods inside created test method in concrete order
+6. If there are any additional cases to create you can repeat scenario in a new class
+7. There are two ways to sync newly created cases in the framework with Qase.io:
+
+- sync can be performed locally - run new test classes with
+ already [set up Qase.io integration](#Set up Qase.io integration)
+- also you can commit and push your changes, then
+ run [E2E Manual suite](https://github.com/provectus/kafka-ui/actions/workflows/e2e-manual.yml) on your branch
+
+8. No test run in Qase.io will be created, new test case will be stored defined directory
+ in [project's repository](https://app.qase.io/project/KAFKAUI)
+9. To add expected results into created test case edit in Qase.io manually
+
+### Test run reporting
+
+To handle manual test cases with status `Skipped` we added custom QaseResultListener. To create new test run:
+
+1. All test methods should be annotated with actual `@QaseId`
+2. There are two ways to sync newly created cases in the framework with Qase.io:
+
+- run can be performed locally - run test classes (or suites) with
+ already [set up Qase.io integration](#Set up Qase.io integration), they will be labeled as `Automation CUSTOM suite`
+- also you can commit and push your changes, then
+ run [E2E Automation suite](https://github.com/provectus/kafka-ui/actions/workflows/e2e-automation.yml) on your branch
+
+3. All new test runs will be added into [project's test runs](https://app.qase.io/run/KAFKAUI) with corresponding label
+ using QaseId to identify existing cases
+4. All test cases from manual suite are set up to have `Skipped` status in test runs to perform them manually
diff --git a/kafka-ui-e2e-checks/README.md b/kafka-ui-e2e-checks/README.md
index ed23187635..d7f3c77c1f 100644
--- a/kafka-ui-e2e-checks/README.md
+++ b/kafka-ui-e2e-checks/README.md
@@ -7,6 +7,7 @@ This repository is for E2E UI automation.
- [Prerequisites](#prerequisites)
- [How to install](#how-to-install)
- [How to run checks](#how-to-run-checks)
+- [Qase.io integration (for internal users)](#qase-integration)
- [Reporting](#reporting)
- [Environments setup](#environments-setup)
- [Test Data](#test-data)
@@ -50,6 +51,10 @@ docker-compose -f documentation/compose/e2e-tests.yaml up -d
-Dbrowser=local
```
+### Qase integration
+
+Found instruction for Qase.io integration (for internal use only) at `kafka-ui-e2e-checks/QASE.md`
+
### Reporting
Reports are in `allure-results` folder.
diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml
index b672477312..6d3fc6e64e 100644
--- a/kafka-ui-e2e-checks/pom.xml
+++ b/kafka-ui-e2e-checks/pom.xml
@@ -17,10 +17,10 @@
1.17.6
6.11.2
7.7.0
- 2.20.1
- 3.0.2
+ 2.21.0
+ 3.0.3
1.9.9.1
- 3.23.1
+ 3.24.2
2.2
1.7.36
2.3.1
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseCreateListener.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseCreateListener.java
new file mode 100644
index 0000000000..c40481c300
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseCreateListener.java
@@ -0,0 +1,123 @@
+package com.provectus.kafka.ui.settings.listeners;
+
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import io.qase.api.QaseClient;
+import io.qase.api.StepStorage;
+import io.qase.api.annotation.QaseId;
+import io.qase.client.ApiClient;
+import io.qase.client.api.CasesApi;
+import io.qase.client.model.*;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.testng.Assert;
+import org.testng.ITestListener;
+import org.testng.ITestResult;
+import org.testng.TestListenerAdapter;
+
+import java.lang.reflect.Method;
+import java.util.*;
+
+import static io.qase.api.utils.IntegrationUtils.getCaseTitle;
+
+@Slf4j
+public class QaseCreateListener extends TestListenerAdapter implements ITestListener {
+
+ private static final CasesApi QASE_API = getQaseApi();
+
+ private static CasesApi getQaseApi() {
+ ApiClient apiClient = QaseClient.getApiClient();
+ apiClient.setApiKey(System.getProperty("QASEIO_API_TOKEN"));
+ return new CasesApi(apiClient);
+ }
+
+ private static int getStatus(Method method) {
+ if (method.isAnnotationPresent(Status.class))
+ return method.getDeclaredAnnotation(Status.class).status().getValue();
+ return 1;
+ }
+
+ private static int getAutomation(Method method) {
+ if (method.isAnnotationPresent(Automation.class))
+ return method.getDeclaredAnnotation(Automation.class).state().getValue();
+ return 0;
+ }
+
+ @SneakyThrows
+ private static HashMap getCaseTitlesAndIdsFromQase() {
+ HashMap cases = new HashMap<>();
+ boolean getCases = true;
+ int offSet = 0;
+ while (getCases) {
+ getCases = false;
+ TestCaseListResponse response = QASE_API.getCases(System.getProperty("QASE_PROJECT_CODE"),
+ new GetCasesFiltersParameter().status(GetCasesFiltersParameter.SERIALIZED_NAME_STATUS), 100, offSet);
+ TestCaseListResponseAllOfResult result = response.getResult();
+ Assert.assertNotNull(result);
+ List entities = result.getEntities();
+ Assert.assertNotNull(entities);
+ if (entities.size() > 0) {
+ for (TestCase testCase : entities) {
+ cases.put(testCase.getId(), testCase.getTitle());
+ }
+ offSet = offSet + 100;
+ getCases = true;
+ }
+ }
+ return cases;
+ }
+
+ private static boolean isCaseWithTitleExistInQase(Method method) {
+ HashMap cases = getCaseTitlesAndIdsFromQase();
+ String title = getCaseTitle(method);
+ if (cases.containsValue(title)) {
+ for (Map.Entry map : cases.entrySet()) {
+ if (map.getValue().matches(title)) {
+ long id = map.getKey();
+ log.warn(String.format("Test case with @QaseTitle='%s' already exists with @QaseId=%d. " +
+ "Please verify @QaseTitle annotation", title, id));
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ @Override
+ @SneakyThrows
+ public void onTestSuccess(final ITestResult testResult) {
+ Method method = testResult.getMethod()
+ .getConstructorOrMethod()
+ .getMethod();
+ String title = getCaseTitle(method);
+ if (!method.isAnnotationPresent(QaseId.class)) {
+ if (title != null) {
+ if (!isCaseWithTitleExistInQase(method)) {
+ LinkedList resultSteps = StepStorage.stopSteps();
+ LinkedList createSteps = new LinkedList<>();
+ resultSteps.forEach(step -> {
+ TestCaseCreateStepsInner caseStep = new TestCaseCreateStepsInner();
+ caseStep.setAction(step.getAction());
+ caseStep.setExpectedResult(step.getExpectedResult());
+ createSteps.add(caseStep);
+ });
+ TestCaseCreate newCase = new TestCaseCreate();
+ newCase.setTitle(title);
+ newCase.setStatus(getStatus(method));
+ newCase.setAutomation(getAutomation(method));
+ newCase.setSteps(createSteps);
+ if (method.isAnnotationPresent(Suite.class)) {
+ long suiteId = method.getDeclaredAnnotation(Suite.class).id();
+ newCase.suiteId(suiteId);
+ }
+ Long id = Objects.requireNonNull(QASE_API.createCase(System.getProperty("QASE_PROJECT_CODE"),
+ newCase).getResult()).getId();
+ log.info(String.format("New test case '%s' was created with @QaseId=%d", title, id));
+ }
+ } else
+ log.warn("To create new test case in Qase.io please add @QaseTitle annotation");
+ } else
+ log.warn("To create new test case in Qase.io please remove @QaseId annotation");
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseResultListener.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseResultListener.java
new file mode 100644
index 0000000000..abab897bfc
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/listeners/QaseResultListener.java
@@ -0,0 +1,102 @@
+package com.provectus.kafka.ui.settings.listeners;
+
+import io.qase.api.StepStorage;
+import io.qase.api.config.QaseConfig;
+import io.qase.api.services.QaseTestCaseListener;
+import io.qase.client.model.ResultCreate;
+import io.qase.client.model.ResultCreateCase;
+import io.qase.client.model.ResultCreateStepsInner;
+import io.qase.testng.guice.module.TestNgModule;
+import lombok.AccessLevel;
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
+import org.testng.ITestContext;
+import org.testng.ITestListener;
+import org.testng.ITestResult;
+import org.testng.TestListenerAdapter;
+
+import java.lang.reflect.Method;
+import java.util.LinkedList;
+import java.util.Optional;
+
+import static io.qase.api.utils.IntegrationUtils.*;
+import static io.qase.client.model.ResultCreate.StatusEnum.*;
+
+@Slf4j
+public class QaseResultListener extends TestListenerAdapter implements ITestListener {
+
+ private static final String REPORTER_NAME = "TestNG";
+
+ static {
+ System.setProperty(QaseConfig.QASE_CLIENT_REPORTER_NAME_KEY, REPORTER_NAME);
+ }
+
+ @Getter(lazy = true, value = AccessLevel.PRIVATE)
+ private final QaseTestCaseListener qaseTestCaseListener = createQaseListener();
+
+ private static QaseTestCaseListener createQaseListener() {
+ return TestNgModule.getInjector().getInstance(QaseTestCaseListener.class);
+ }
+
+ @Override
+ public void onTestStart(ITestResult result) {
+ getQaseTestCaseListener().onTestCaseStarted();
+ super.onTestStart(result);
+ }
+
+ @Override
+ public void onTestSuccess(ITestResult tr) {
+ getQaseTestCaseListener()
+ .onTestCaseFinished(resultCreate -> setupResultItem(resultCreate, tr, PASSED));
+ super.onTestSuccess(tr);
+ }
+
+ @Override
+ public void onTestSkipped(ITestResult tr) {
+ getQaseTestCaseListener()
+ .onTestCaseFinished(resultCreate -> setupResultItem(resultCreate, tr, SKIPPED));
+ super.onTestSuccess(tr);
+ }
+
+ @Override
+ public void onTestFailure(ITestResult tr) {
+ getQaseTestCaseListener()
+ .onTestCaseFinished(resultCreate -> setupResultItem(resultCreate, tr, FAILED));
+ super.onTestFailure(tr);
+ }
+
+ @Override
+ public void onFinish(ITestContext testContext) {
+ getQaseTestCaseListener().onTestCasesSetFinished();
+ super.onFinish(testContext);
+ }
+
+ private void setupResultItem(ResultCreate resultCreate, ITestResult result, ResultCreate.StatusEnum status) {
+ Optional resultThrowable = Optional.ofNullable(result.getThrowable());
+ String comment = resultThrowable
+ .flatMap(throwable -> Optional.of(throwable.toString())).orElse(null);
+ Boolean isDefect = resultThrowable
+ .flatMap(throwable -> Optional.of(throwable instanceof AssertionError))
+ .orElse(false);
+ String stacktrace = resultThrowable
+ .flatMap(throwable -> Optional.of(getStacktrace(throwable)))
+ .orElse(null);
+ Method method = result.getMethod()
+ .getConstructorOrMethod()
+ .getMethod();
+ Long caseId = getCaseId(method);
+ String caseTitle = null;
+ if (caseId == null) {
+ caseTitle = getCaseTitle(method);
+ }
+ LinkedList steps = StepStorage.stopSteps();
+ resultCreate
+ ._case(caseTitle == null ? null : new ResultCreateCase().title(caseTitle))
+ .caseId(caseId)
+ .status(status)
+ .comment(comment)
+ .stacktrace(stacktrace)
+ .steps(steps.isEmpty() ? null : steps)
+ .defect(isDefect);
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java
index aa1a1213f9..60be014a0a 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/QaseSetup.java
@@ -15,7 +15,7 @@ import static org.apache.commons.lang3.StringUtils.isEmpty;
@Slf4j
public class QaseSetup {
- public static void testRunSetup() {
+ public static void qaseIntegrationSetup() {
String qaseApiToken = System.getProperty("QASEIO_API_TOKEN");
if (isEmpty(qaseApiToken)) {
log.warn("Integration with Qase is disabled due to run config or token wasn't defined.");
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Automation.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Automation.java
new file mode 100644
index 0000000000..556263c111
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Automation.java
@@ -0,0 +1,15 @@
+package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
+
+import com.provectus.kafka.ui.utilities.qaseUtils.enums.State;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface Automation {
+
+ State state();
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/AutomationStatus.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/AutomationStatus.java
deleted file mode 100644
index 7f3c0da89e..0000000000
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/AutomationStatus.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
-
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
-
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-
-@Retention(RetentionPolicy.RUNTIME)
-public @interface AutomationStatus {
-
- Status status();
-}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Status.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Status.java
new file mode 100644
index 0000000000..3c31f2345c
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Status.java
@@ -0,0 +1,13 @@
+package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target(ElementType.METHOD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface Status {
+
+ com.provectus.kafka.ui.utilities.qaseUtils.enums.Status status();
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Suite.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Suite.java
index 09b971b48c..fa1c2c3dd7 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Suite.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/annotations/Suite.java
@@ -1,10 +1,13 @@
package com.provectus.kafka.ui.utilities.qaseUtils.annotations;
+import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface Suite {
- long suiteId();
- String title();
+
+ long id();
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/State.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/State.java
new file mode 100644
index 0000000000..cdbbaf38de
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/State.java
@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.utilities.qaseUtils.enums;
+
+public enum State {
+
+ NOT_AUTOMATED(0),
+ TO_BE_AUTOMATED(1),
+ AUTOMATED(2);
+
+ private final int value;
+
+ State(int value) {
+ this.value = value;
+ }
+
+ public int getValue() {
+ return value;
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java
index d97e42b3e4..adc8bf24b9 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/qaseUtils/enums/Status.java
@@ -1,5 +1,18 @@
package com.provectus.kafka.ui.utilities.qaseUtils.enums;
public enum Status {
- AUTOMATED, TO_BE_AUTOMATED, MANUAL
+
+ ACTUAL(0),
+ DRAFT(1),
+ DEPRECATED(2);
+
+ private final int value;
+
+ Status(int value) {
+ this.value = value;
+ }
+
+ public int getValue() {
+ return value;
+ }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java
index c0a823600a..74f60dd0d2 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Suite.java
@@ -4,7 +4,6 @@ public interface Suite {
String CUSTOM = "custom";
String MANUAL = "manual";
- String QASE = "qase";
String REGRESSION = "regression";
String SANITY = "sanity";
String SMOKE = "smoke";
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
index aa05462dc0..399f81e1aa 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
@@ -6,8 +6,8 @@ import com.codeborne.selenide.SelenideElement;
import com.codeborne.selenide.WebDriverRunner;
import com.provectus.kafka.ui.settings.listeners.AllureListener;
import com.provectus.kafka.ui.settings.listeners.LoggerListener;
+import com.provectus.kafka.ui.settings.listeners.QaseResultListener;
import io.qameta.allure.Step;
-import io.qase.testng.QaseListener;
import lombok.extern.slf4j.Slf4j;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.chrome.ChromeOptions;
@@ -25,12 +25,12 @@ import java.util.List;
import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.TOPICS;
import static com.provectus.kafka.ui.settings.BaseSource.*;
import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
-import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.testRunSetup;
+import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
import static com.provectus.kafka.ui.variables.Browser.CONTAINER;
import static com.provectus.kafka.ui.variables.Browser.LOCAL;
@Slf4j
-@Listeners({AllureListener.class, LoggerListener.class, QaseListener.class})
+@Listeners({AllureListener.class, LoggerListener.class, QaseResultListener.class})
public abstract class BaseTest extends Facade {
private static final String SELENIUM_IMAGE_NAME = "selenium/standalone-chrome:103.0";
@@ -43,7 +43,7 @@ public abstract class BaseTest extends Facade {
@BeforeSuite(alwaysRun = true)
public void beforeSuite() {
- testRunSetup();
+ qaseIntegrationSetup();
switch (BROWSER) {
case (CONTAINER) -> {
DockerImageName image = isARM64()
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java
index a26f600e1d..c370c01b5f 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java
@@ -19,6 +19,7 @@ import com.provectus.kafka.ui.pages.topics.*;
import com.provectus.kafka.ui.services.ApiService;
public abstract class Facade {
+
protected ApiService apiService = new ApiService();
protected ConnectorCreateForm connectorCreateForm = new ConnectorCreateForm();
protected KafkaConnectList kafkaConnectList = new KafkaConnectList();
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java
new file mode 100644
index 0000000000..d9891a0b50
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java
@@ -0,0 +1,28 @@
+package com.provectus.kafka.ui.manualSuite;
+
+import com.provectus.kafka.ui.settings.listeners.QaseResultListener;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
+import org.testng.SkipException;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.Listeners;
+
+import java.lang.reflect.Method;
+
+import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
+import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
+
+@Listeners(QaseResultListener.class)
+public abstract class BaseManualTest {
+
+ @BeforeSuite
+ public void beforeSuite() {
+ qaseIntegrationSetup();
+ }
+
+ @BeforeMethod
+ public void beforeMethod(Method method) {
+ if (method.getAnnotation(Automation.class).state().equals(NOT_AUTOMATED))
+ throw new SkipException("Skip test exception");
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java
deleted file mode 100644
index c75516955a..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/TestClass.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package com.provectus.kafka.ui.manualSuite;
-
-public class TestClass {
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/DataMaskingTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/DataMaskingTest.java
new file mode 100644
index 0000000000..23b6e6539e
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/DataMaskingTest.java
@@ -0,0 +1,29 @@
+package com.provectus.kafka.ui.manualSuite.suite;
+
+import com.provectus.kafka.ui.manualSuite.BaseManualTest;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
+import io.qase.api.annotation.QaseId;
+import org.testng.annotations.Test;
+
+import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
+
+public class DataMaskingTest extends BaseManualTest {
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(262)
+ @Test
+ public void testCaseA() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(264)
+ @Test
+ public void testCaseB() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(265)
+ @Test
+ public void testCaseC() {
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/RbacTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/RbacTest.java
new file mode 100644
index 0000000000..aad85652f5
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/RbacTest.java
@@ -0,0 +1,53 @@
+package com.provectus.kafka.ui.manualSuite.suite;
+
+import com.provectus.kafka.ui.manualSuite.BaseManualTest;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
+import io.qase.api.annotation.QaseId;
+import org.testng.annotations.Test;
+
+import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
+
+public class RbacTest extends BaseManualTest {
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(249)
+ @Test
+ public void testCaseA() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(251)
+ @Test
+ public void testCaseB() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(257)
+ @Test
+ public void testCaseC() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(258)
+ @Test
+ public void testCaseD() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(259)
+ @Test
+ public void testCaseE() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(260)
+ @Test
+ public void testCaseF() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(261)
+ @Test
+ public void testCaseG() {
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
new file mode 100644
index 0000000000..d768f939ce
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
@@ -0,0 +1,95 @@
+package com.provectus.kafka.ui.manualSuite.suite;
+
+import com.provectus.kafka.ui.manualSuite.BaseManualTest;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
+import io.qase.api.annotation.QaseId;
+import org.testng.annotations.Test;
+
+import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
+
+public class TopicsTest extends BaseManualTest {
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(17)
+ @Test
+ public void testCaseA() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(18)
+ @Test
+ public void testCaseB() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(22)
+ @Test
+ public void testCaseC() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(46)
+ @Test
+ public void testCaseD() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(47)
+ @Test
+ public void testCaseE() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(48)
+ @Test
+ public void testCaseF() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(49)
+ @Test
+ public void testCaseG() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(50)
+ @Test
+ public void testCaseH() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(57)
+ @Test
+ public void testCaseI() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(58)
+ @Test
+ public void testCaseJ() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(269)
+ @Test
+ public void testCaseK() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(270)
+ @Test
+ public void testCaseL() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(271)
+ @Test
+ public void testCaseM() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(272)
+ @Test
+ public void testCaseN() {
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java
new file mode 100644
index 0000000000..977cbd6dc4
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java
@@ -0,0 +1,16 @@
+package com.provectus.kafka.ui.qaseSuite;
+
+import com.provectus.kafka.ui.settings.listeners.QaseCreateListener;
+import org.testng.annotations.BeforeSuite;
+import org.testng.annotations.Listeners;
+
+import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
+
+@Listeners(QaseCreateListener.class)
+public abstract class BaseQaseTest {
+
+ @BeforeSuite
+ public void beforeSuite() {
+ qaseIntegrationSetup();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java
new file mode 100644
index 0000000000..cf0101fe79
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java
@@ -0,0 +1,58 @@
+package com.provectus.kafka.ui.qaseSuite;
+
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Status;
+import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
+import io.qase.api.annotation.QaseTitle;
+import io.qase.api.annotation.Step;
+
+import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED;
+import static com.provectus.kafka.ui.utilities.qaseUtils.enums.Status.DRAFT;
+
+public class Template extends BaseQaseTest {
+
+ /**
+ * this class is a kind of placeholder or example, use is as template to create new one
+ * copy class into kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/suite
+ * place it into regarding folder and rename according to test case summary from Qase.io
+ * uncomment @Test and set all annotations according to kafka-ui-e2e-checks/QASE.md
+ */
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseTitle("testCaseA title")
+ @Status(status = DRAFT)
+ @Suite(id = 0)
+// @org.testng.annotations.Test
+ public void testCaseA() {
+ stepA();
+ stepB();
+ stepC();
+ stepD();
+ stepE();
+ stepF();
+ }
+
+ @Step("stepA action")
+ private void stepA() {
+ }
+
+ @Step("stepB action")
+ private void stepB() {
+ }
+
+ @Step("stepC action")
+ private void stepC() {
+ }
+
+ @Step("stepD action")
+ private void stepD() {
+ }
+
+ @Step("stepE action")
+ private void stepE() {
+ }
+
+ @Step("stepF action")
+ private void stepF() {
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java
deleted file mode 100644
index d3cea52921..0000000000
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/TestClass.java
+++ /dev/null
@@ -1,4 +0,0 @@
-package com.provectus.kafka.ui.qaseSuite;
-
-public class TestClass {
-}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
index fc87e88502..a0c4f05da2 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
@@ -2,9 +2,7 @@ package com.provectus.kafka.ui.smokeSuite;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
-import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.QaseId;
import org.testng.annotations.Test;
import java.util.stream.Collectors;
@@ -12,9 +10,8 @@ import java.util.stream.Stream;
public class SmokeTest extends BaseTest {
+ @QaseId(198)
@Test
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(198)
public void checkBasePageElements() {
verifyElementsCondition(
Stream.concat(topPanel.getAllVisibleElements().stream(), naviSideBar.getAllMenuButtons().stream())
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
index c2f3e20c4a..7b11aa6556 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
@@ -2,11 +2,8 @@ package com.provectus.kafka.ui.smokeSuite.brokers;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Step;
-import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.Test;
@@ -15,12 +12,7 @@ import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CON
public class BrokersTest extends BaseTest {
- private static final String SUITE_TITLE = "Brokers";
- private static final long SUITE_ID = 1;
-
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(1)
+ @QaseId(1)
@Test
public void checkBrokersOverview() {
navigateToBrokers();
@@ -29,9 +21,7 @@ public class BrokersTest extends BaseTest {
verifyElementsCondition(brokersList.getAllEnabledElements(), Condition.enabled);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(85)
+ @QaseId(85)
@Test
public void checkExistingBrokersInCluster() {
navigateToBrokers();
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
index 30b380d6d0..5291648014 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
@@ -3,11 +3,8 @@ package com.provectus.kafka.ui.smokeSuite.connectors;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Topic;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Step;
-import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
@@ -23,8 +20,6 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
public class ConnectorsTest extends BaseTest {
- private static final long SUITE_ID = 10;
- private static final String SUITE_TITLE = "Kafka Connect";
private static final String CONNECT_NAME = "first";
private static final List TOPIC_LIST = new ArrayList<>();
private static final List CONNECTOR_LIST = new ArrayList<>();
@@ -58,9 +53,7 @@ public class ConnectorsTest extends BaseTest {
.createConnector(CONNECT_NAME, connector));
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(42)
+ @QaseId(42)
@Test
public void createConnector() {
Connector connectorForCreate = new Connector()
@@ -82,9 +75,7 @@ public class ConnectorsTest extends BaseTest {
CONNECTOR_LIST.add(connectorForCreate);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(196)
+ @QaseId(196)
@Test
public void updateConnector() {
navigateToConnectorsAndOpenDetails(CONNECTOR_FOR_UPDATE.getName());
@@ -97,9 +88,7 @@ public class ConnectorsTest extends BaseTest {
Assert.assertTrue(kafkaConnectList.isConnectorVisible(CONNECTOR_FOR_UPDATE.getName()), "isConnectorVisible()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(195)
+ @QaseId(195)
@Test
public void deleteConnector() {
navigateToConnectorsAndOpenDetails(CONNECTOR_FOR_DELETE.getName());
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
index 9716b3905d..3627f321df 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
@@ -3,10 +3,7 @@ package com.provectus.kafka.ui.smokeSuite.ksqlDb;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.pages.ksqlDb.models.Stream;
import com.provectus.kafka.ui.pages.ksqlDb.models.Table;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
-import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.QaseId;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.testng.asserts.SoftAssert;
@@ -17,28 +14,24 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
public class KsqlDbTest extends BaseTest {
- private static final long SUITE_ID = 8;
- private static final String SUITE_TITLE = "KSQL_DB";
- private static final Stream STREAM_FOR_CHECKING_TABLES = new Stream()
- .setName("STREAM_FOR_CHECKING_TABLES_" + randomAlphabetic(4).toUpperCase())
+ private static final Stream STREAM_FOR_CHECK_TABLES = new Stream()
+ .setName("STREAM_FOR_CHECK_TABLES_" + randomAlphabetic(4).toUpperCase())
.setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
private static final Table FIRST_TABLE = new Table()
.setName("FIRST_TABLE" + randomAlphabetic(4).toUpperCase())
- .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+ .setStreamName(STREAM_FOR_CHECK_TABLES.getName());
private static final Table SECOND_TABLE = new Table()
.setName("SECOND_TABLE" + randomAlphabetic(4).toUpperCase())
- .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+ .setStreamName(STREAM_FOR_CHECK_TABLES.getName());
@BeforeClass(alwaysRun = true)
public void beforeClass() {
apiService
- .createStream(STREAM_FOR_CHECKING_TABLES)
+ .createStream(STREAM_FOR_CHECK_TABLES)
.createTables(FIRST_TABLE, SECOND_TABLE);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(41)
+ @QaseId(41)
@Test(priority = 1)
public void checkShowTablesRequestExecution() {
naviSideBar
@@ -57,9 +50,7 @@ public class KsqlDbTest extends BaseTest {
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(86)
+ @QaseId(86)
@Test(priority = 2)
public void clearResultsForExecutedRequest() {
naviSideBar
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
index 6473684c06..56ff2c0ff0 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
@@ -1,14 +1,11 @@
package com.provectus.kafka.ui.smokeSuite.schemas;
import com.codeborne.selenide.Condition;
-import com.provectus.kafka.ui.api.model.CompatibilityLevel;
import com.provectus.kafka.ui.BaseTest;
+import com.provectus.kafka.ui.api.model.CompatibilityLevel;
import com.provectus.kafka.ui.models.Schema;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Step;
-import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
@@ -23,8 +20,6 @@ import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
public class SchemasTest extends BaseTest {
- private static final long SUITE_ID = 11;
- private static final String SUITE_TITLE = "Schema Registry";
private static final List SCHEMA_LIST = new ArrayList<>();
private static final Schema AVRO_API = Schema.createSchemaAvro();
private static final Schema JSON_API = Schema.createSchemaJson();
@@ -36,9 +31,7 @@ public class SchemasTest extends BaseTest {
SCHEMA_LIST.forEach(schema -> apiService.createSchema(schema));
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(43)
+ @QaseId(43)
@Test(priority = 1)
public void createSchemaAvro() {
Schema schemaAvro = Schema.createSchemaAvro();
@@ -63,9 +56,7 @@ public class SchemasTest extends BaseTest {
SCHEMA_LIST.add(schemaAvro);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(186)
+ @QaseId(186)
@Test(priority = 2)
public void updateSchemaAvro() {
AVRO_API.setValuePath(System.getProperty("user.dir") + "/src/main/resources/testData/schema_avro_for_update.json");
@@ -88,9 +79,7 @@ public class SchemasTest extends BaseTest {
Assert.assertEquals(CompatibilityLevel.CompatibilityEnum.NONE.toString(), schemaDetails.getCompatibility(), "getCompatibility()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(186)
+ @QaseId(186)
@Test(priority = 3)
public void compareVersionsOperation() {
navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
@@ -109,9 +98,7 @@ public class SchemasTest extends BaseTest {
Assert.assertEquals(53, schemaCreateForm.getMarkedLinesNumber(), "getAllMarkedLines()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(187)
+ @QaseId(187)
@Test(priority = 4)
public void deleteSchemaAvro() {
navigateToSchemaRegistryAndOpenDetails(AVRO_API.getName());
@@ -123,9 +110,7 @@ public class SchemasTest extends BaseTest {
SCHEMA_LIST.remove(AVRO_API);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(89)
+ @QaseId(89)
@Test(priority = 5)
public void createSchemaJson() {
Schema schemaJson = Schema.createSchemaJson();
@@ -150,9 +135,7 @@ public class SchemasTest extends BaseTest {
SCHEMA_LIST.add(schemaJson);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(189)
+ @QaseId(189)
@Test(priority = 6)
public void deleteSchemaJson() {
navigateToSchemaRegistryAndOpenDetails(JSON_API.getName());
@@ -164,9 +147,7 @@ public class SchemasTest extends BaseTest {
SCHEMA_LIST.remove(JSON_API);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(91)
+ @QaseId(91)
@Test(priority = 7)
public void createSchemaProtobuf() {
Schema schemaProtobuf = Schema.createSchemaProtobuf();
@@ -191,9 +172,7 @@ public class SchemasTest extends BaseTest {
SCHEMA_LIST.add(schemaProtobuf);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(223)
+ @QaseId(223)
@Test(priority = 8)
public void deleteSchemaProtobuf() {
navigateToSchemaRegistryAndOpenDetails(PROTOBUF_API.getName());
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
index 5e5af5c044..9f8d18b685 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
@@ -3,12 +3,8 @@ package com.provectus.kafka.ui.smokeSuite.topics;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Topic;
import com.provectus.kafka.ui.pages.topics.TopicDetails;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Issue;
import io.qameta.allure.Step;
-import io.qase.api.annotation.CaseId;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
@@ -31,8 +27,6 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
public class MessagesTest extends BaseTest {
- private static final long SUITE_ID = 2;
- private static final String SUITE_TITLE = "Topics";
private static final Topic TOPIC_FOR_MESSAGES = new Topic()
.setName("topic-with-clean-message-attribute-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
@@ -41,8 +35,8 @@ public class MessagesTest extends BaseTest {
.setName("topic-to-clear-and-purge-messages-attribute-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
.setMessageContent(randomAlphabetic(10));
- private static final Topic TOPIC_FOR_CHECKING_FILTERS = new Topic()
- .setName("topic-for-checking-filters-" + randomAlphabetic(5))
+ private static final Topic TOPIC_FOR_CHECK_FILTERS = new Topic()
+ .setName("topic-for-check-filters-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
.setMessageContent(randomAlphabetic(10));
private static final Topic TOPIC_TO_RECREATE = new Topic()
@@ -50,25 +44,23 @@ public class MessagesTest extends BaseTest {
.setMessageKey(randomAlphabetic(5))
.setMessageContent(randomAlphabetic(10));
private static final Topic TOPIC_FOR_CHECK_MESSAGES_COUNT = new Topic()
- .setName("topic-for-check-messages-count" + randomAlphabetic(5))
- .setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
+ .setName("topic-for-check-messages-count" + randomAlphabetic(5))
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
private static final List TOPIC_LIST = new ArrayList<>();
@BeforeClass(alwaysRun = true)
public void beforeClass() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECKING_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECK_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
TOPIC_TO_RECREATE, TOPIC_FOR_CHECK_MESSAGES_COUNT));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
- IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
+ IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
waitUntilNewMinuteStarted();
- IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
+ IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS));
IntStream.range(1, 110).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_MESSAGES_COUNT));
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(222)
+ @QaseId(222)
@Test(priority = 1)
public void produceMessage() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
@@ -85,9 +77,7 @@ public class MessagesTest extends BaseTest {
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2778")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(19)
+ @QaseId(19)
@Test(priority = 2)
public void clearMessage() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
@@ -103,9 +93,7 @@ public class MessagesTest extends BaseTest {
Assert.assertEquals(0, topicDetails.getMessageCountAmount(), "getMessageCountAmount()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(239)
+ @QaseId(239)
@Test(priority = 3)
public void checkClearTopicMessage() {
navigateToTopicsAndOpenDetails(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName());
@@ -128,9 +116,7 @@ public class MessagesTest extends BaseTest {
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(10)
+ @QaseId(10)
@Test(priority = 4)
public void checkPurgeMessagePossibility() {
navigateToTopics();
@@ -163,12 +149,10 @@ public class MessagesTest extends BaseTest {
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2819")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(21)
+ @QaseId(21)
@Test(priority = 5)
public void copyMessageFromTopicProfile() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES)
.getRandomMessage()
@@ -180,12 +164,10 @@ public class MessagesTest extends BaseTest {
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2394")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(15)
+ @QaseId(15)
@Test(priority = 6)
- public void checkingMessageFilteringByOffset() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ public void checkMessageFilteringByOffset() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES);
TopicDetails.MessageGridItem secondMessage = topicDetails.getMessageByOffset(1);
@@ -204,12 +186,10 @@ public class MessagesTest extends BaseTest {
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/3215")
@Issue("https://github.com/provectus/kafka-ui/issues/2345")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(16)
+ @QaseId(16)
@Test(priority = 7)
- public void checkingMessageFilteringByTimestamp() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ public void checkMessageFilteringByTimestamp() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES);
LocalDateTime firstTimestamp = topicDetails.getMessageByOffset(0).getTimestamp();
@@ -233,12 +213,10 @@ public class MessagesTest extends BaseTest {
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2778")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(246)
+ @QaseId(246)
@Test(priority = 8)
public void checkClearTopicMessageFromOverviewTab() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(OVERVIEW)
.openDotMenu()
@@ -246,16 +224,14 @@ public class MessagesTest extends BaseTest {
.clickConfirmBtnMdl();
SoftAssert softly = new SoftAssert();
softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS,
- String.format("%s messages have been successfully cleared!", TOPIC_FOR_CHECKING_FILTERS.getName())),
+ String.format("%s messages have been successfully cleared!", TOPIC_FOR_CHECK_FILTERS.getName())),
"isAlertWithMessageVisible()");
softly.assertEquals(topicDetails.getMessageCountAmount(), 0,
"getMessageCountAmount()= " + topicDetails.getMessageCountAmount());
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(240)
+ @QaseId(240)
@Test(priority = 9)
public void checkRecreateTopic() {
navigateToTopicsAndOpenDetails(TOPIC_TO_RECREATE.getName());
@@ -293,7 +269,7 @@ public class MessagesTest extends BaseTest {
softly.assertTrue(topicDetails.isNextButtonEnabled(), "isNextButtonEnabled()");
softly.assertAll();
int lastOffsetOnPage = topicDetails.getAllMessages()
- .get(messagesPerPage -1).getOffset();
+ .get(messagesPerPage - 1).getOffset();
topicDetails
.clickNextButton();
softly.assertEquals(topicDetails.getAllMessages().stream().findFirst().orElseThrow().getOffset(),
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
index 6f847d7d68..3f1347cb39 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
@@ -4,11 +4,8 @@ import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Topic;
import com.provectus.kafka.ui.pages.topics.TopicDetails;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.AutomationStatus;
-import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite;
-import com.provectus.kafka.ui.utilities.qaseUtils.enums.Status;
import io.qameta.allure.Issue;
-import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
@@ -33,8 +30,6 @@ import static org.apache.commons.lang3.RandomUtils.nextInt;
public class TopicsTest extends BaseTest {
- private static final long SUITE_ID = 2;
- private static final String SUITE_TITLE = "Topics";
private static final Topic TOPIC_TO_CREATE = new Topic()
.setName("new-topic-" + randomAlphabetic(5))
.setNumberOfPartitions(1)
@@ -66,9 +61,7 @@ public class TopicsTest extends BaseTest {
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
}
- @Suite(suiteId = 4, title = "Create new Topic")
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(199)
+ @QaseId(199)
@Test(priority = 1)
public void createTopic() {
navigateToTopics();
@@ -91,9 +84,7 @@ public class TopicsTest extends BaseTest {
TOPIC_LIST.add(TOPIC_TO_CREATE);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(7)
+ @QaseId(7)
@Test(priority = 2)
void checkAvailableOperations() {
navigateToTopics();
@@ -109,9 +100,7 @@ public class TopicsTest extends BaseTest {
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/3071")
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(268)
+ @QaseId(268)
@Test(priority = 3)
public void checkCustomParametersWithinEditExistingTopic() {
navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
@@ -130,9 +119,7 @@ public class TopicsTest extends BaseTest {
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(197)
+ @QaseId(197)
@Test(priority = 4)
public void updateTopic() {
navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
@@ -184,9 +171,7 @@ public class TopicsTest extends BaseTest {
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(242)
+ @QaseId(242)
@Test(priority = 5)
public void removeTopicFromTopicList() {
navigateToTopics();
@@ -200,9 +185,7 @@ public class TopicsTest extends BaseTest {
TOPIC_LIST.remove(TOPIC_TO_UPDATE_AND_DELETE);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(207)
+ @QaseId(207)
@Test(priority = 6)
public void deleteTopic() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_DELETE.getName());
@@ -215,9 +198,7 @@ public class TopicsTest extends BaseTest {
TOPIC_LIST.remove(TOPIC_FOR_DELETE);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(20)
+ @QaseId(20)
@Test(priority = 7)
public void redirectToConsumerFromTopic() {
String topicName = "source-activities";
@@ -236,9 +217,7 @@ public class TopicsTest extends BaseTest {
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(4)
+ @QaseId(4)
@Test(priority = 8)
public void checkTopicCreatePossibility() {
navigateToTopics();
@@ -259,9 +238,7 @@ public class TopicsTest extends BaseTest {
Assert.assertTrue(topicCreateEditForm.isCreateTopicButtonEnabled(), "isCreateTopicButtonEnabled()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(266)
+ @QaseId(266)
@Test(priority = 9)
public void checkTimeToRetainDataCustomValueWithEditingTopic() {
Topic topicToRetainData = new Topic()
@@ -292,9 +269,7 @@ public class TopicsTest extends BaseTest {
TOPIC_LIST.add(topicToRetainData);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(6)
+ @QaseId(6)
@Test(priority = 10)
public void checkCustomParametersWithinCreateNewTopic() {
navigateToTopics();
@@ -313,9 +288,7 @@ public class TopicsTest extends BaseTest {
"isValidationMessageCustomParameterValueVisible()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(2)
+ @QaseId(2)
@Test(priority = 11)
public void checkTopicListElements() {
navigateToTopics();
@@ -323,11 +296,9 @@ public class TopicsTest extends BaseTest {
verifyElementsCondition(topicsList.getAllEnabledElements(), Condition.enabled);
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(12)
+ @QaseId(12)
@Test(priority = 12)
- public void addingNewFilterWithinTopic() {
+ public void addNewFilterWithinTopic() {
String filterName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
@@ -345,9 +316,7 @@ public class TopicsTest extends BaseTest {
Assert.assertTrue(topicDetails.isActiveFilterVisible(filterName), "isActiveFilterVisible()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(13)
+ @QaseId(13)
@Test(priority = 13)
public void checkFilterSavingWithinSavedFilters() {
String displayName = randomAlphabetic(5);
@@ -368,9 +337,7 @@ public class TopicsTest extends BaseTest {
"isFilterVisibleAtSavedFiltersMdl()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(14)
+ @QaseId(14)
@Test(priority = 14)
public void checkApplyingSavedFilterWithinTopicMessages() {
String displayName = randomAlphabetic(5);
@@ -389,9 +356,7 @@ public class TopicsTest extends BaseTest {
Assert.assertTrue(topicDetails.isActiveFilterVisible(displayName), "isActiveFilterVisible()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(11)
+ @QaseId(11)
@Test(priority = 15)
public void checkShowInternalTopicsButtonFunctionality() {
navigateToTopics();
@@ -407,9 +372,7 @@ public class TopicsTest extends BaseTest {
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(56)
+ @QaseId(56)
@Test(priority = 16)
public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
navigateToTopics();
@@ -457,9 +420,7 @@ public class TopicsTest extends BaseTest {
softly.assertAll();
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(247)
+ @QaseId(247)
@Test(priority = 17)
public void recreateTopicFromTopicProfile() {
Topic topicToRecreate = new Topic()
@@ -487,9 +448,7 @@ public class TopicsTest extends BaseTest {
"isAlertWithMessageVisible()");
}
- @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
- @AutomationStatus(status = Status.AUTOMATED)
- @CaseId(8)
+ @QaseId(8)
@Test(priority = 18)
public void checkCopyTopicPossibility() {
Topic topicToCopy = new Topic()
From 8889463f7b2fc8b5e559c9f163346aa9825f55fc Mon Sep 17 00:00:00 2001
From: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com>
Date: Mon, 6 Mar 2023 16:41:35 +0200
Subject: [PATCH 38/54] add httpcomponents (#3449)
---
kafka-ui-e2e-checks/pom.xml | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml
index 6d3fc6e64e..0b458cf173 100644
--- a/kafka-ui-e2e-checks/pom.xml
+++ b/kafka-ui-e2e-checks/pom.xml
@@ -15,6 +15,8 @@
3.0.0-M8
${project.version}
1.17.6
+ 5.2.1
+ 4.8.1
6.11.2
7.7.0
2.21.0
@@ -120,6 +122,21 @@
selenium
${testcontainers.version}
+
+ org.apache.httpcomponents.core5
+ httpcore5
+ ${httpcomponents.version}
+
+
+ org.apache.httpcomponents.client5
+ httpclient5
+ ${httpcomponents.version}
+
+
+ org.seleniumhq.selenium
+ selenium-http
+ ${selenium.version}
+
com.codeborne
selenide
From be151b4d82b4029e14d2a37d54efbff939fe7bf4 Mon Sep 17 00:00:00 2001
From: David <58771979+David-DB88@users.noreply.github.com>
Date: Tue, 7 Mar 2023 10:36:52 +0400
Subject: [PATCH 39/54] [FE] Add a warning when copying to clipboard in non-SSL
envs (#3394)
* set a warning message on copyToClipboard if user use http
* Update kafka-ui-react-app/src/lib/hooks/useDataSaver.ts
* changed custom type whit warning
* added warning title
* added test case for warning message
---------
Co-authored-by: davitbejanyan
Co-authored-by: Roman Zabaluev
Co-authored-by: Oleg Shur
---
.../components/common/Alert/Alert.styled.ts | 4 +--
.../src/components/common/Alert/Alert.tsx | 4 +--
kafka-ui-react-app/src/lib/errorHandling.tsx | 3 +-
.../lib/hooks/__tests__/useDataSaver.spec.tsx | 31 ++++++++++++++++++-
.../src/lib/hooks/useDataSaver.ts | 9 +++++-
5 files changed, 44 insertions(+), 7 deletions(-)
diff --git a/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts b/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts
index 58ae2ed09d..eefd3bd3be 100644
--- a/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts
+++ b/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts
@@ -1,7 +1,7 @@
-import { ToastType } from 'react-hot-toast';
import styled from 'styled-components';
+import { ToastTypes } from 'lib/errorHandling';
-export const Alert = styled.div<{ $type: ToastType }>`
+export const Alert = styled.div<{ $type: ToastTypes }>`
background-color: ${({ $type, theme }) => theme.alert.color[$type]};
width: 500px;
min-height: 64px;
diff --git a/kafka-ui-react-app/src/components/common/Alert/Alert.tsx b/kafka-ui-react-app/src/components/common/Alert/Alert.tsx
index 8f9d167d1e..5b58a573d4 100644
--- a/kafka-ui-react-app/src/components/common/Alert/Alert.tsx
+++ b/kafka-ui-react-app/src/components/common/Alert/Alert.tsx
@@ -1,13 +1,13 @@
import React from 'react';
import CloseIcon from 'components/common/Icons/CloseIcon';
import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
-import { ToastType } from 'react-hot-toast';
+import { ToastTypes } from 'lib/errorHandling';
import * as S from './Alert.styled';
export interface AlertProps {
title: string;
- type: ToastType;
+ type: ToastTypes;
message: React.ReactNode;
onDissmiss(): void;
}
diff --git a/kafka-ui-react-app/src/lib/errorHandling.tsx b/kafka-ui-react-app/src/lib/errorHandling.tsx
index f07f4cf52a..58b2a8bb70 100644
--- a/kafka-ui-react-app/src/lib/errorHandling.tsx
+++ b/kafka-ui-react-app/src/lib/errorHandling.tsx
@@ -9,6 +9,7 @@ interface ServerResponse {
url?: string;
message?: ErrorResponse['message'];
}
+export type ToastTypes = ToastType | 'warning';
export const getResponse = async (
response: Response
@@ -34,7 +35,7 @@ interface AlertOptions {
}
export const showAlert = (
- type: ToastType,
+ type: ToastTypes,
{ title, message, id }: AlertOptions
) => {
toast.custom(
diff --git a/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx b/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx
index f0631850e5..9b125575d9 100644
--- a/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx
+++ b/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx
@@ -1,7 +1,12 @@
import React, { useEffect } from 'react';
import useDataSaver from 'lib/hooks/useDataSaver';
import { render } from '@testing-library/react';
+import { showAlert } from 'lib/errorHandling';
+jest.mock('lib/errorHandling', () => ({
+ ...jest.requireActual('lib/errorHandling'),
+ showAlert: jest.fn(),
+}));
describe('useDataSaver hook', () => {
const content = {
title: 'title',
@@ -38,7 +43,6 @@ describe('useDataSaver hook', () => {
mockCreate.mockRestore();
});
});
-
describe('copies the data to the clipboard', () => {
Object.assign(navigator, {
clipboard: {
@@ -74,4 +78,29 @@ describe('useDataSaver hook', () => {
);
});
});
+ describe('navigator clipboard is undefined', () => {
+ it('calls showAlert with the correct parameters when clipboard API is unavailable', () => {
+ Object.assign(navigator, {
+ clipboard: undefined,
+ });
+
+ const HookWrapper: React.FC = () => {
+ const { copyToClipboard } = useDataSaver('topic', content);
+ useEffect(() => {
+ copyToClipboard();
+ }, [copyToClipboard]);
+ return null;
+ };
+
+ render();
+
+ expect(showAlert).toHaveBeenCalledTimes(1);
+ expect(showAlert).toHaveBeenCalledWith('warning', {
+ id: 'topic',
+ title: 'Warning',
+ message:
+ 'Copying to clipboard is unavailable due to unsecured (non-HTTPS) connection',
+ });
+ });
+ });
});
diff --git a/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts b/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts
index d1ff61386d..9bcc103679 100644
--- a/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts
+++ b/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts
@@ -1,4 +1,4 @@
-import { showSuccessAlert } from 'lib/errorHandling';
+import { showAlert, showSuccessAlert } from 'lib/errorHandling';
const useDataSaver = (
subject: string,
@@ -14,6 +14,13 @@ const useDataSaver = (
title: '',
message: 'Copied successfully!',
});
+ } else {
+ showAlert('warning', {
+ id: subject,
+ title: 'Warning',
+ message:
+ 'Copying to clipboard is unavailable due to unsecured (non-HTTPS) connection',
+ });
}
};
const saveFile = () => {
From e261143bb466998302e7b91ccd6ad07622bc548a Mon Sep 17 00:00:00 2001
From: kristi-dev <92114648+kristi-dev@users.noreply.github.com>
Date: Tue, 7 Mar 2023 06:39:15 +0000
Subject: [PATCH 40/54] [FE] Fix oldest first button overlaps the Submit button
(#3005)
Co-authored-by: Roman Zabaluev
---
.../components/Topics/Topic/Messages/Filters/Filters.styled.ts | 1 +
1 file changed, 1 insertion(+)
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts
index a8df901be4..10802ee0db 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts
@@ -33,6 +33,7 @@ export const FilterInputs = styled.div`
gap: 8px;
align-items: flex-end;
width: 90%;
+ flex-wrap: wrap;
`;
export const SeekTypeSelectorWrapper = styled.div`
From b3f74cbfea7b43e50a817331e0ed549e35d566d9 Mon Sep 17 00:00:00 2001
From: Arthur <79512124+ArthurNiedial@users.noreply.github.com>
Date: Tue, 7 Mar 2023 11:36:12 +0200
Subject: [PATCH 41/54] [e2e]Checking the URL redirections for kafka-ui pages
(#3450)
* [e2e]Checking the URL redirections for kafka-ui pages
* [e2e]Checking the URL redirections for kafka-ui pages
* [e2e]Checking the URL redirections for kafka-ui pages
* [e2e]Checking the URL redirections for kafka-ui pages
* rmv ignore from copyMessageFromTopicProfile
* move @QaseId(21) to manualSuite
---------
Co-authored-by: VladSenyuta
---
.../ui/pages/consumers/ConsumersList.java | 9 +++
.../com/provectus/kafka/ui/variables/Url.java | 11 ++++
.../java/com/provectus/kafka/ui/BaseTest.java | 62 ++++++++++++++++++-
.../ui/manualSuite/suite/TopicsTest.java | 32 ++++++----
.../kafka/ui/smokeSuite/SmokeTest.java | 31 ++++++++++
.../ui/smokeSuite/brokers/BrokersTest.java | 10 ---
.../smokeSuite/connectors/ConnectorsTest.java | 19 ------
.../ui/smokeSuite/ksqlDb/KsqlDbTest.java | 9 +--
.../ui/smokeSuite/schemas/SchemasTest.java | 19 ------
.../ui/smokeSuite/topics/MessagesTest.java | 15 -----
10 files changed, 132 insertions(+), 85 deletions(-)
create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
index b3a3be42a2..6d0c1d48f7 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
@@ -1,11 +1,20 @@
package com.provectus.kafka.ui.pages.consumers;
+import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
public class ConsumersList extends BasePage {
protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']");
+
+ @Step
+ public ConsumersList waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ consumerListHeader.shouldHave(Condition.visible);
+ return this;
+ }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java
new file mode 100644
index 0000000000..f612d743a5
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java
@@ -0,0 +1,11 @@
+package com.provectus.kafka.ui.variables;
+
+public interface Url {
+
+ String BROKERS_LIST_URL = "http://%s:8080/ui/clusters/local/brokers";
+ String TOPICS_LIST_URL = "http://%s:8080/ui/clusters/local/all-topics?perPage=25";
+ String CONSUMERS_LIST_URL = "http://%s:8080/ui/clusters/local/consumer-groups";
+ String SCHEMA_REGISTRY_LIST_URL = "http://%s:8080/ui/clusters/local/schemas";
+ String KAFKA_CONNECT_LIST_URL = "http://%s:8080/ui/clusters/local/connectors";
+ String KSQL_DB_LIST_URL = "http://%s:8080/ui/clusters/local/ksqldb/tables";
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
index 399f81e1aa..b2e7e007e5 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
@@ -22,7 +22,7 @@ import org.testng.asserts.SoftAssert;
import java.time.Duration;
import java.util.List;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.TOPICS;
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.*;
import static com.provectus.kafka.ui.settings.BaseSource.*;
import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
@@ -108,6 +108,14 @@ public abstract class BaseTest extends Facade {
public void afterMethod() {
browserClear();
}
+
+ @Step
+ protected void navigateToBrokers() {
+ naviSideBar
+ .openSideMenu(BROKERS);
+ brokersList
+ .waitUntilScreenReady();
+ }
@Step
protected void navigateToTopics() {
@@ -127,7 +135,57 @@ public abstract class BaseTest extends Facade {
topicDetails
.waitUntilScreenReady();
}
-
+
+ @Step
+ protected void navigateToConsumers() {
+ naviSideBar
+ .openSideMenu(CONSUMERS);
+ consumersList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToSchemaRegistry() {
+ naviSideBar
+ .openSideMenu(SCHEMA_REGISTRY);
+ schemaRegistryList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToSchemaRegistryAndOpenDetails(String schemaName) {
+ navigateToSchemaRegistry();
+ schemaRegistryList
+ .openSchema(schemaName);
+ schemaDetails
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToConnectors() {
+ naviSideBar
+ .openSideMenu(KAFKA_CONNECT);
+ kafkaConnectList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToConnectorsAndOpenDetails(String connectorName) {
+ navigateToConnectors();
+ kafkaConnectList
+ .openConnector(connectorName);
+ connectorDetails
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToKsqlDb() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady();
+ }
+
@Step
protected void verifyElementsCondition(List elementList, Condition expectedCondition) {
SoftAssert softly = new SoftAssert();
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
index d768f939ce..35188e3f45 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
@@ -22,74 +22,80 @@ public class TopicsTest extends BaseManualTest {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(22)
- @Test
+ @QaseId(21)
+ @Test()
public void testCaseC() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(46)
+ @QaseId(22)
@Test
public void testCaseD() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(47)
+ @QaseId(46)
@Test
public void testCaseE() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(48)
+ @QaseId(47)
@Test
public void testCaseF() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(49)
+ @QaseId(48)
@Test
public void testCaseG() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(50)
+ @QaseId(49)
@Test
public void testCaseH() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(57)
+ @QaseId(50)
@Test
public void testCaseI() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(58)
+ @QaseId(57)
@Test
public void testCaseJ() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(269)
+ @QaseId(58)
@Test
public void testCaseK() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(270)
+ @QaseId(269)
@Test
public void testCaseL() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(271)
+ @QaseId(270)
@Test
public void testCaseM() {
}
@Automation(state = NOT_AUTOMATED)
- @QaseId(272)
+ @QaseId(271)
@Test
public void testCaseN() {
}
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(272)
+ @Test
+ public void testCaseO() {
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
index a0c4f05da2..0ea5d2eb82 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
@@ -1,13 +1,20 @@
package com.provectus.kafka.ui.smokeSuite;
import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.WebDriverRunner;
import com.provectus.kafka.ui.BaseTest;
+import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
+import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static com.provectus.kafka.ui.settings.BaseSource.BROWSER;
+import static com.provectus.kafka.ui.variables.Browser.LOCAL;
+import static com.provectus.kafka.ui.variables.Url.*;
+
public class SmokeTest extends BaseTest {
@QaseId(198)
@@ -20,4 +27,28 @@ public class SmokeTest extends BaseTest {
Stream.concat(topPanel.getAllEnabledElements().stream(), naviSideBar.getAllMenuButtons().stream())
.collect(Collectors.toList()), Condition.enabled);
}
+
+ @QaseId(45)
+ @Test
+ public void checkUrlWhileNavigating() {
+ navigateToBrokers();
+ verifyCurrentUrl(BROKERS_LIST_URL);
+ navigateToTopics();
+ verifyCurrentUrl(TOPICS_LIST_URL);
+ navigateToConsumers();
+ verifyCurrentUrl(CONSUMERS_LIST_URL);
+ navigateToSchemaRegistry();
+ verifyCurrentUrl(SCHEMA_REGISTRY_LIST_URL);
+ navigateToConnectors();
+ verifyCurrentUrl(KAFKA_CONNECT_LIST_URL);
+ navigateToKsqlDb();
+ verifyCurrentUrl(KSQL_DB_LIST_URL);
+ }
+
+ @Step
+ private void verifyCurrentUrl(String expectedUrl) {
+ String host = BROWSER.equals(LOCAL) ? "localhost" : "host.testcontainers.internal";
+ Assert.assertEquals(WebDriverRunner.getWebDriver().getCurrentUrl(),
+ String.format(expectedUrl, host), "getCurrentUrl()");
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
index 7b11aa6556..c9029e30ae 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
@@ -2,12 +2,10 @@ package com.provectus.kafka.ui.smokeSuite.brokers;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
-import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.Test;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.BROKERS;
import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS;
public class BrokersTest extends BaseTest {
@@ -40,12 +38,4 @@ public class BrokersTest extends BaseTest {
verifyElementsCondition(brokersConfigTab.getEditButtons(), Condition.enabled);
Assert.assertTrue(brokersConfigTab.isSearchByKeyVisible(), "isSearchByKeyVisible()");
}
-
- @Step
- private void navigateToBrokers() {
- naviSideBar
- .openSideMenu(BROKERS);
- brokersList
- .waitUntilScreenReady();
- }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
index 5291648014..c54138d8c5 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
@@ -3,7 +3,6 @@ package com.provectus.kafka.ui.smokeSuite.connectors;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Topic;
-import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
@@ -14,7 +13,6 @@ import java.util.ArrayList;
import java.util.List;
import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KAFKA_CONNECT;
import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@@ -107,21 +105,4 @@ public class ConnectorsTest extends BaseTest {
apiService.deleteConnector(CONNECT_NAME, connector.getName()));
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
}
-
- @Step
- private void navigateToConnectors() {
- naviSideBar
- .openSideMenu(KAFKA_CONNECT);
- kafkaConnectList
- .waitUntilScreenReady();
- }
-
- @Step
- private void navigateToConnectorsAndOpenDetails(String connectorName) {
- navigateToConnectors();
- kafkaConnectList
- .openConnector(connectorName);
- connectorDetails
- .waitUntilScreenReady();
- }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
index 3627f321df..ab1705922a 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
@@ -8,7 +8,6 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.testng.asserts.SoftAssert;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KSQL_DB;
import static com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlQueryConfig.SHOW_TABLES;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@@ -34,10 +33,8 @@ public class KsqlDbTest extends BaseTest {
@QaseId(41)
@Test(priority = 1)
public void checkShowTablesRequestExecution() {
- naviSideBar
- .openSideMenu(KSQL_DB);
+ navigateToKsqlDb();
ksqlDbList
- .waitUntilScreenReady()
.clickExecuteKsqlRequestBtn();
ksqlQueryForm
.waitUntilScreenReady()
@@ -53,10 +50,8 @@ public class KsqlDbTest extends BaseTest {
@QaseId(86)
@Test(priority = 2)
public void clearResultsForExecutedRequest() {
- naviSideBar
- .openSideMenu(KSQL_DB);
+ navigateToKsqlDb();
ksqlDbList
- .waitUntilScreenReady()
.clickExecuteKsqlRequestBtn();
ksqlQueryForm
.waitUntilScreenReady()
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
index 56ff2c0ff0..bc9519ee50 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
@@ -4,7 +4,6 @@ import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.api.model.CompatibilityLevel;
import com.provectus.kafka.ui.models.Schema;
-import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
@@ -15,7 +14,6 @@ import org.testng.asserts.SoftAssert;
import java.util.ArrayList;
import java.util.List;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.SCHEMA_REGISTRY;
import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
public class SchemasTest extends BaseTest {
@@ -188,21 +186,4 @@ public class SchemasTest extends BaseTest {
public void afterClass() {
SCHEMA_LIST.forEach(schema -> apiService.deleteSchema(schema.getName()));
}
-
- @Step
- private void navigateToSchemaRegistry() {
- naviSideBar
- .openSideMenu(SCHEMA_REGISTRY);
- schemaRegistryList
- .waitUntilScreenReady();
- }
-
- @Step
- private void navigateToSchemaRegistryAndOpenDetails(String schemaName) {
- navigateToSchemaRegistry();
- schemaRegistryList
- .openSchema(schemaName);
- schemaDetails
- .waitUntilScreenReady();
- }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
index 9f8d18b685..dc7fc403c8 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
@@ -147,21 +147,6 @@ public class MessagesTest extends BaseTest {
softly.assertAll();
}
- @Ignore
- @Issue("https://github.com/provectus/kafka-ui/issues/2819")
- @QaseId(21)
- @Test(priority = 5)
- public void copyMessageFromTopicProfile() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES)
- .getRandomMessage()
- .openDotMenu()
- .clickCopyToClipBoard();
- Assert.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Copied successfully!"),
- "isAlertWithMessageVisible()");
- }
-
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2394")
@QaseId(15)
From 4d20cb695804d4247731dffc62ebdb5d685647be Mon Sep 17 00:00:00 2001
From: Ilya Kuramshin
Date: Tue, 7 Mar 2023 16:24:19 +0400
Subject: [PATCH 42/54] [BE] ProtobufFilesSerde imports implementation &
refactoring (#3357)
* Feature: Support more complex Protobuf files
The changes in https://github.com/provectus/kafka-ui/pull/2874 added
initial support for using more than 1 Protobuf file in Kafka UI in
absence of a proper schema registry.
This change is building upon that functionality to support more complex
scenarios in which there are multiple Protobuf files being used and not
all of them are explicitly listed (for example imports).
It's using the already present Wire library from Square to do the heavy
lifting and create a comprehensive schema from all Protobuf files and
directories listed in the Kafka UI configuration.
* Refactor schema loading logic and reuse in tests
* Add support for reading Protobufs from ZIP archives
* Remove unused ProtobufFileSerde#toLocation(Path)
* wip
* wip
* wip
* wip
* wip
* wip
* wip
---------
Co-authored-by: Jochen Schalanda
Co-authored-by: iliax
Co-authored-by: Roman Zabaluev
---
documentation/compose/kafka-ui-serdes.yaml | 3 +-
documentation/compose/proto/key-types.proto | 4 +
documentation/guides/Protobuf.md | 22 +-
documentation/guides/Serialization.md | 6 +-
.../ui/serdes/builtin/ProtobufFileSerde.java | 397 ++++++++++----
.../serdes/builtin/ProtobufFileSerdeTest.java | 514 +++++++++---------
.../{ => protobuf-serde}/address-book.proto | 12 +-
.../protobuf-serde/lang-description.proto | 11 +
.../protobuf-serde/language/language.proto | 11 +
.../{ => protobuf-serde}/sensor.proto | 2 +-
10 files changed, 595 insertions(+), 387 deletions(-)
rename kafka-ui-api/src/test/resources/{ => protobuf-serde}/address-book.proto (81%)
create mode 100644 kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto
create mode 100644 kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto
rename kafka-ui-api/src/test/resources/{ => protobuf-serde}/sensor.proto (93%)
diff --git a/documentation/compose/kafka-ui-serdes.yaml b/documentation/compose/kafka-ui-serdes.yaml
index 143d454bb5..c380a34f6b 100644
--- a/documentation/compose/kafka-ui-serdes.yaml
+++ b/documentation/compose/kafka-ui-serdes.yaml
@@ -28,8 +28,7 @@ services:
kafka.clusters.0.serde.0.name: ProtobufFile
kafka.clusters.0.serde.0.topicKeysPattern: "topic1"
kafka.clusters.0.serde.0.topicValuesPattern: "topic1"
- kafka.clusters.0.serde.0.properties.protobufFiles.0: /protofiles/key-types.proto
- kafka.clusters.0.serde.0.properties.protobufFiles.1: /protofiles/values.proto
+ kafka.clusters.0.serde.0.properties.protobufFilesDir: /protofiles/
kafka.clusters.0.serde.0.properties.protobufMessageNameForKey: test.MyKey # default type for keys
kafka.clusters.0.serde.0.properties.protobufMessageName: test.MyValue # default type for values
kafka.clusters.0.serde.0.properties.protobufMessageNameForKeyByTopic.topic1: test.MySpecificTopicKey # keys type for topic "topic1"
diff --git a/documentation/compose/proto/key-types.proto b/documentation/compose/proto/key-types.proto
index 908aed5689..1f5e22a427 100644
--- a/documentation/compose/proto/key-types.proto
+++ b/documentation/compose/proto/key-types.proto
@@ -1,11 +1,15 @@
syntax = "proto3";
package test;
+import "google/protobuf/wrappers.proto";
+
message MyKey {
string myKeyF1 = 1;
+ google.protobuf.UInt64Value uint_64_wrapper = 2;
}
message MySpecificTopicKey {
string special_field1 = 1;
string special_field2 = 2;
+ google.protobuf.FloatValue float_wrapper = 3;
}
diff --git a/documentation/guides/Protobuf.md b/documentation/guides/Protobuf.md
index 533a6a8f48..12f92448c8 100644
--- a/documentation/guides/Protobuf.md
+++ b/documentation/guides/Protobuf.md
@@ -12,22 +12,26 @@ To configure Kafkaui to deserialize protobuf messages using a supplied protobuf
```yaml
kafka:
clusters:
- - # Cluster configuration omitted.
- # protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
+ - # Cluster configuration omitted...
+ # protobufFilesDir specifies root location for proto files (will be scanned recursively)
+ # NOTE: if 'protobufFilesDir' specified, then 'protobufFile' and 'protobufFiles' settings will be ignored
+ protobufFilesDir: "/path/to/my-protobufs"
+ # (DEPRECATED) protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
protobufFile: path/to/my.proto
- # protobufFiles is the path to one or more protobuf schemas.
- protobufFiles:
- - /path/to/my.proto
- - /path/to/another.proto
- # protobufMessageName is the default protobuf type that is used to deserilize
- # the message's value if the topic is not found in protobufMessageNameByTopic.
+ # (DEPRECATED) protobufFiles is the location of one or more protobuf schemas
+ protobufFiles:
+ - /path/to/my-protobufs/my.proto
+ - /path/to/my-protobufs/another.proto
+ - /path/to/my-protobufs:test/test.proto
+ # protobufMessageName is the default protobuf type that is used to deserialize
+ # the message's value if the topic is not found in protobufMessageNameByTopic.
protobufMessageName: my.DefaultValType
# protobufMessageNameByTopic is a mapping of topic names to protobuf types.
# This mapping is required and is used to deserialize the Kafka message's value.
protobufMessageNameByTopic:
topic1: my.Type1
topic2: my.Type2
- # protobufMessageNameForKey is the default protobuf type that is used to deserilize
+ # protobufMessageNameForKey is the default protobuf type that is used to deserialize
# the message's key if the topic is not found in protobufMessageNameForKeyByTopic.
protobufMessageNameForKey: my.DefaultKeyType
# protobufMessageNameForKeyByTopic is a mapping of topic names to protobuf types.
diff --git a/documentation/guides/Serialization.md b/documentation/guides/Serialization.md
index 3f827441d2..b9690f2cba 100644
--- a/documentation/guides/Serialization.md
+++ b/documentation/guides/Serialization.md
@@ -46,10 +46,8 @@ kafka:
serde:
- name: ProtobufFile
properties:
- # path to the protobuf schema files
- protobufFiles:
- - path/to/my.proto
- - path/to/another.proto
+ # path to the protobuf schema files directory
+ protobufFilesDir: "path/to/protofiles"
# default protobuf type that is used for KEY serialization/deserialization
# optional
protobufMessageNameForKey: my.Type1
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java
index 664c75b70d..8a4c28a320 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java
@@ -1,9 +1,36 @@
package com.provectus.kafka.ui.serdes.builtin;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.protobuf.AnyProto;
+import com.google.protobuf.ApiProto;
+import com.google.protobuf.DescriptorProtos;
+import com.google.protobuf.Descriptors;
import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.DurationProto;
import com.google.protobuf.DynamicMessage;
+import com.google.protobuf.EmptyProto;
+import com.google.protobuf.FieldMaskProto;
+import com.google.protobuf.SourceContextProto;
+import com.google.protobuf.StructProto;
+import com.google.protobuf.TimestampProto;
+import com.google.protobuf.TypeProto;
+import com.google.protobuf.WrappersProto;
import com.google.protobuf.util.JsonFormat;
+import com.google.type.ColorProto;
+import com.google.type.DateProto;
+import com.google.type.DateTimeProto;
+import com.google.type.DayOfWeekProto;
+import com.google.type.ExprProto;
+import com.google.type.FractionProto;
+import com.google.type.IntervalProto;
+import com.google.type.LatLngProto;
+import com.google.type.MoneyProto;
+import com.google.type.MonthProto;
+import com.google.type.PhoneNumberProto;
+import com.google.type.PostalAddressProto;
+import com.google.type.QuaternionProto;
+import com.google.type.TimeOfDayProto;
import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.serde.api.DeserializeResult;
import com.provectus.kafka.ui.serde.api.PropertyResolver;
@@ -11,13 +38,19 @@ import com.provectus.kafka.ui.serde.api.RecordHeaders;
import com.provectus.kafka.ui.serde.api.SchemaDescription;
import com.provectus.kafka.ui.serdes.BuiltInSerde;
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
+import com.squareup.wire.schema.ErrorCollector;
+import com.squareup.wire.schema.Linker;
+import com.squareup.wire.schema.Loader;
+import com.squareup.wire.schema.Location;
+import com.squareup.wire.schema.ProtoFile;
+import com.squareup.wire.schema.internal.parser.ProtoFileElement;
+import com.squareup.wire.schema.internal.parser.ProtoParser;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
@@ -28,7 +61,10 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.jetbrains.annotations.NotNull;
+@Slf4j
public class ProtobufFileSerde implements BuiltInSerde {
public static String name() {
@@ -51,132 +87,35 @@ public class ProtobufFileSerde implements BuiltInSerde {
@Override
public boolean canBeAutoConfigured(PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
- Optional protobufFile = kafkaClusterProperties.getProperty("protobufFile", String.class);
- Optional> protobufFiles = kafkaClusterProperties.getListProperty("protobufFiles", String.class);
- return protobufFile.isPresent() || protobufFiles.filter(files -> !files.isEmpty()).isPresent();
+ return Configuration.canBeAutoConfigured(kafkaClusterProperties);
}
@Override
public void autoConfigure(PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
- configure(kafkaClusterProperties);
+ configure(Configuration.create(kafkaClusterProperties));
}
@Override
public void configure(PropertyResolver serdeProperties,
PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
- configure(serdeProperties);
- }
-
- private void configure(PropertyResolver properties) {
- Map protobufSchemas = joinPathProperties(properties).stream()
- .map(path -> Map.entry(path, new ProtobufSchema(readFileAsString(path))))
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
-
- // Load all referenced message schemas and store their source proto file with the descriptors
- Map descriptorPaths = new HashMap<>();
- Optional protobufMessageName = properties.getProperty("protobufMessageName", String.class);
- protobufMessageName.ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
-
- Optional protobufMessageNameForKey =
- properties.getProperty("protobufMessageNameForKey", String.class);
- protobufMessageNameForKey
- .ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
-
- Optional> protobufMessageNameByTopic =
- properties.getMapProperty("protobufMessageNameByTopic", String.class, String.class);
- protobufMessageNameByTopic
- .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
-
- Optional> protobufMessageNameForKeyByTopic =
- properties.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class);
- protobufMessageNameForKeyByTopic
- .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
-
- // Fill dictionary for descriptor lookup by full message name
- Map descriptorMap = descriptorPaths.keySet().stream()
- .collect(Collectors.toMap(Descriptor::getFullName, Function.identity()));
-
- configure(
- protobufMessageName.map(descriptorMap::get).orElse(null),
- protobufMessageNameForKey.map(descriptorMap::get).orElse(null),
- descriptorPaths,
- protobufMessageNameByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of()),
- protobufMessageNameForKeyByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of())
- );
+ configure(Configuration.create(serdeProperties));
}
@VisibleForTesting
- void configure(
- @Nullable Descriptor defaultMessageDescriptor,
- @Nullable Descriptor defaultKeyMessageDescriptor,
- Map descriptorPaths,
- Map messageDescriptorMap,
- Map keyMessageDescriptorMap) {
- if (defaultMessageDescriptor == null
- && defaultKeyMessageDescriptor == null
- && messageDescriptorMap.isEmpty()
- && keyMessageDescriptorMap.isEmpty()) {
+ void configure(Configuration configuration) {
+ if (configuration.defaultMessageDescriptor() == null
+ && configuration.defaultKeyMessageDescriptor() == null
+ && configuration.messageDescriptorMap().isEmpty()
+ && configuration.keyMessageDescriptorMap().isEmpty()) {
throw new ValidationException("Neither default, not per-topic descriptors defined for " + name() + " serde");
}
- this.defaultMessageDescriptor = defaultMessageDescriptor;
- this.defaultKeyMessageDescriptor = defaultKeyMessageDescriptor;
- this.descriptorPaths = descriptorPaths;
- this.messageDescriptorMap = messageDescriptorMap;
- this.keyMessageDescriptorMap = keyMessageDescriptorMap;
- }
-
- private static void addProtobufSchema(Map descriptorPaths,
- Map protobufSchemas,
- String messageName) {
- var descriptorAndPath = getDescriptorAndPath(protobufSchemas, messageName);
- descriptorPaths.put(descriptorAndPath.getKey(), descriptorAndPath.getValue());
- }
-
- private static void addProtobufSchemas(Map descriptorPaths,
- Map protobufSchemas,
- Map messageNamesByTopic) {
- messageNamesByTopic.values().stream()
- .map(msgName -> getDescriptorAndPath(protobufSchemas, msgName))
- .forEach(entry -> descriptorPaths.put(entry.getKey(), entry.getValue()));
- }
-
- private static List joinPathProperties(PropertyResolver propertyResolver) {
- return Stream.concat(
- propertyResolver.getProperty("protobufFile", String.class).map(List::of).stream(),
- propertyResolver.getListProperty("protobufFiles", String.class).stream())
- .flatMap(Collection::stream)
- .distinct()
- .map(Path::of)
- .collect(Collectors.toList());
- }
-
- private static Map.Entry getDescriptorAndPath(Map protobufSchemas,
- String msgName) {
- return protobufSchemas.entrySet().stream()
- .filter(schema -> schema.getValue().toDescriptor(msgName) != null)
- .map(schema -> Map.entry(schema.getValue().toDescriptor(msgName), schema.getKey()))
- .findFirst()
- .orElseThrow(() -> new NullPointerException(
- "The given message type not found in protobuf definition: " + msgName));
- }
-
- private static String readFileAsString(Path path) {
- try {
- return Files.readString(path);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- private Map populateDescriptors(Map descriptorMap,
- Map messageNameMap) {
- Map descriptors = new HashMap<>();
- for (Map.Entry entry : messageNameMap.entrySet()) {
- descriptors.put(entry.getKey(), descriptorMap.get(entry.getValue()));
- }
- return descriptors;
+ this.defaultMessageDescriptor = configuration.defaultMessageDescriptor();
+ this.defaultKeyMessageDescriptor = configuration.defaultKeyMessageDescriptor();
+ this.descriptorPaths = configuration.descriptorPaths();
+ this.messageDescriptorMap = configuration.messageDescriptorMap();
+ this.keyMessageDescriptorMap = configuration.keyMessageDescriptorMap();
}
@Override
@@ -249,4 +188,238 @@ public class ProtobufFileSerde implements BuiltInSerde {
Map.of("messageName", descriptor.getFullName())
);
}
+
+ @SneakyThrows
+ private static String readFileAsString(Path path) {
+ return Files.readString(path);
+ }
+
+ //----------------------------------------------------------------------------------------------------------------
+
+ @VisibleForTesting
+ record Configuration(@Nullable Descriptor defaultMessageDescriptor,
+ @Nullable Descriptor defaultKeyMessageDescriptor,
+ Map descriptorPaths,
+ Map messageDescriptorMap,
+ Map keyMessageDescriptorMap) {
+
+ static boolean canBeAutoConfigured(PropertyResolver kafkaClusterProperties) {
+ Optional protobufFile = kafkaClusterProperties.getProperty("protobufFile", String.class);
+ Optional> protobufFiles = kafkaClusterProperties.getListProperty("protobufFiles", String.class);
+ Optional protobufFilesDir = kafkaClusterProperties.getProperty("protobufFilesDir", String.class);
+ return protobufFilesDir.isPresent()
+ || protobufFile.isPresent()
+ || protobufFiles.filter(files -> !files.isEmpty()).isPresent();
+ }
+
+ static Configuration create(PropertyResolver properties) {
+ var protobufSchemas = loadSchemas(
+ properties.getProperty("protobufFile", String.class),
+ properties.getListProperty("protobufFiles", String.class),
+ properties.getProperty("protobufFilesDir", String.class)
+ );
+
+ // Load all referenced message schemas and store their source proto file with the descriptors
+ Map descriptorPaths = new HashMap<>();
+ Optional protobufMessageName = properties.getProperty("protobufMessageName", String.class);
+ protobufMessageName.ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
+
+ Optional protobufMessageNameForKey =
+ properties.getProperty("protobufMessageNameForKey", String.class);
+ protobufMessageNameForKey
+ .ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
+
+ Optional> protobufMessageNameByTopic =
+ properties.getMapProperty("protobufMessageNameByTopic", String.class, String.class);
+ protobufMessageNameByTopic
+ .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
+
+ Optional> protobufMessageNameForKeyByTopic =
+ properties.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class);
+ protobufMessageNameForKeyByTopic
+ .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
+
+ // Fill dictionary for descriptor lookup by full message name
+ Map descriptorMap = descriptorPaths.keySet().stream()
+ .collect(Collectors.toMap(Descriptor::getFullName, Function.identity()));
+
+ return new Configuration(
+ protobufMessageName.map(descriptorMap::get).orElse(null),
+ protobufMessageNameForKey.map(descriptorMap::get).orElse(null),
+ descriptorPaths,
+ protobufMessageNameByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of()),
+ protobufMessageNameForKeyByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of())
+ );
+ }
+
+ private static Map.Entry getDescriptorAndPath(Map protobufSchemas,
+ String msgName) {
+ return protobufSchemas.entrySet().stream()
+ .filter(schema -> schema.getValue().toDescriptor(msgName) != null)
+ .map(schema -> Map.entry(schema.getValue().toDescriptor(msgName), schema.getKey()))
+ .findFirst()
+ .orElseThrow(() -> new NullPointerException(
+ "The given message type not found in protobuf definition: " + msgName));
+ }
+
+ private static Map populateDescriptors(Map descriptorMap,
+ Map messageNameMap) {
+ Map descriptors = new HashMap<>();
+ for (Map.Entry entry : messageNameMap.entrySet()) {
+ descriptors.put(entry.getKey(), descriptorMap.get(entry.getValue()));
+ }
+ return descriptors;
+ }
+
+ @VisibleForTesting
+ static Map loadSchemas(Optional protobufFile,
+ Optional> protobufFiles,
+ Optional protobufFilesDir) {
+ if (protobufFilesDir.isPresent()) {
+ if (protobufFile.isPresent() || protobufFiles.isPresent()) {
+ log.warn("protobufFile and protobufFiles properties will be ignored, since protobufFilesDir provided");
+ }
+ List loadedFiles = new ProtoSchemaLoader(protobufFilesDir.get()).load();
+ Map allPaths = loadedFiles.stream()
+ .collect(Collectors.toMap(f -> f.getLocation().getPath(), ProtoFile::toElement));
+ return loadedFiles.stream()
+ .collect(Collectors.toMap(
+ f -> Path.of(f.getLocation().getBase(), f.getLocation().getPath()),
+ f -> new ProtobufSchema(f.toElement(), List.of(), allPaths)));
+ }
+ //Supporting for backward-compatibility. Normally, protobufFilesDir setting should be used
+ return Stream.concat(
+ protobufFile.stream(),
+ protobufFiles.stream().flatMap(Collection::stream)
+ )
+ .distinct()
+ .map(Path::of)
+ .collect(Collectors.toMap(path -> path, path -> new ProtobufSchema(readFileAsString(path))));
+ }
+
+ private static void addProtobufSchema(Map descriptorPaths,
+ Map protobufSchemas,
+ String messageName) {
+ var descriptorAndPath = getDescriptorAndPath(protobufSchemas, messageName);
+ descriptorPaths.put(descriptorAndPath.getKey(), descriptorAndPath.getValue());
+ }
+
+ private static void addProtobufSchemas(Map descriptorPaths,
+ Map protobufSchemas,
+ Map messageNamesByTopic) {
+ messageNamesByTopic.values().stream()
+ .map(msgName -> getDescriptorAndPath(protobufSchemas, msgName))
+ .forEach(entry -> descriptorPaths.put(entry.getKey(), entry.getValue()));
+ }
+ }
+
+ static class ProtoSchemaLoader {
+
+ private final Path baseLocation;
+
+ ProtoSchemaLoader(String baseLocationStr) {
+ this.baseLocation = Path.of(baseLocationStr);
+ if (!Files.isReadable(baseLocation)) {
+ throw new ValidationException("proto files directory not readable");
+ }
+ }
+
+ List load() {
+ Map knownTypes = knownProtoFiles();
+
+ Map filesByLocations = new HashMap<>();
+ filesByLocations.putAll(knownTypes);
+ filesByLocations.putAll(loadFilesWithLocations());
+
+ Linker linker = new Linker(
+ createFilesLoader(filesByLocations),
+ new ErrorCollector(),
+ true,
+ true
+ );
+ var schema = linker.link(filesByLocations.values());
+ linker.getErrors().throwIfNonEmpty();
+ return schema.getProtoFiles()
+ .stream()
+ .filter(p -> !knownTypes.containsKey(p.getLocation().getPath())) //filtering known types
+ .toList();
+ }
+
+ private Map knownProtoFiles() {
+ return Stream.of(
+ loadKnownProtoFile("google/type/color.proto", ColorProto.getDescriptor()),
+ loadKnownProtoFile("google/type/date.proto", DateProto.getDescriptor()),
+ loadKnownProtoFile("google/type/datetime.proto", DateTimeProto.getDescriptor()),
+ loadKnownProtoFile("google/type/dayofweek.proto", DayOfWeekProto.getDescriptor()),
+ loadKnownProtoFile("google/type/decimal.proto", com.google.type.DecimalProto.getDescriptor()),
+ loadKnownProtoFile("google/type/expr.proto", ExprProto.getDescriptor()),
+ loadKnownProtoFile("google/type/fraction.proto", FractionProto.getDescriptor()),
+ loadKnownProtoFile("google/type/interval.proto", IntervalProto.getDescriptor()),
+ loadKnownProtoFile("google/type/latlng.proto", LatLngProto.getDescriptor()),
+ loadKnownProtoFile("google/type/money.proto", MoneyProto.getDescriptor()),
+ loadKnownProtoFile("google/type/month.proto", MonthProto.getDescriptor()),
+ loadKnownProtoFile("google/type/phone_number.proto", PhoneNumberProto.getDescriptor()),
+ loadKnownProtoFile("google/type/postal_address.proto", PostalAddressProto.getDescriptor()),
+ loadKnownProtoFile("google/type/quaternion.prot", QuaternionProto.getDescriptor()),
+ loadKnownProtoFile("google/type/timeofday.proto", TimeOfDayProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/any.proto", AnyProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/api.proto", ApiProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/descriptor.proto", DescriptorProtos.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/duration.proto", DurationProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/empty.proto", EmptyProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/field_mask.proto", FieldMaskProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/source_context.proto", SourceContextProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/struct.proto", StructProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/timestamp.proto", TimestampProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/type.proto", TypeProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/wrappers.proto", WrappersProto.getDescriptor())
+ ).collect(Collectors.toMap(p -> p.getLocation().getPath(), p -> p));
+ }
+
+ private ProtoFile loadKnownProtoFile(String path, Descriptors.FileDescriptor fileDescriptor) {
+ String protoFileString = null;
+ // know type file contains either message or enum
+ if (!fileDescriptor.getMessageTypes().isEmpty()) {
+ protoFileString = new ProtobufSchema(fileDescriptor.getMessageTypes().get(0)).canonicalString();
+ } else if (!fileDescriptor.getEnumTypes().isEmpty()) {
+ protoFileString = new ProtobufSchema(fileDescriptor.getEnumTypes().get(0)).canonicalString();
+ } else {
+ throw new IllegalStateException();
+ }
+ return ProtoFile.Companion.get(ProtoParser.Companion.parse(Location.get(path), protoFileString));
+ }
+
+ private Loader createFilesLoader(Map files) {
+ return new Loader() {
+ @Override
+ public @NotNull ProtoFile load(@NotNull String path) {
+ return Preconditions.checkNotNull(files.get(path), "ProtoFile not found for import '%s'", path);
+ }
+
+ @Override
+ public @NotNull Loader withErrors(@NotNull ErrorCollector errorCollector) {
+ return this;
+ }
+ };
+ }
+
+ @SneakyThrows
+ private Map loadFilesWithLocations() {
+ Map filesByLocations = new HashMap<>();
+ try (var files = Files.walk(baseLocation)) {
+ files.filter(p -> !Files.isDirectory(p) && p.toString().endsWith(".proto"))
+ .forEach(path -> {
+ // relative path will be used as "import" statement
+ String relativePath = baseLocation.relativize(path).toString();
+ var protoFileElement = ProtoParser.Companion.parse(
+ Location.get(baseLocation.toString(), relativePath),
+ readFileAsString(path)
+ );
+ filesByLocations.put(relativePath, ProtoFile.Companion.get(protoFileElement));
+ });
+ }
+ return filesByLocations;
+ }
+ }
+
}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java
index 3de915145f..ab99df74de 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java
@@ -10,14 +10,16 @@ import com.google.protobuf.Descriptors;
import com.google.protobuf.util.JsonFormat;
import com.provectus.kafka.ui.serde.api.PropertyResolver;
import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde.Configuration;
+import com.squareup.wire.schema.ProtoFile;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
-import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import lombok.SneakyThrows;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.springframework.util.ResourceUtils;
@@ -29,28 +31,29 @@ class ProtobufFileSerdeTest {
private static final String sampleBookMsgJson = "{\"version\": 1, \"people\": ["
+ "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"addrBook@example.com\", \"phones\":[]}]}";
- private static final String sampleSensorMsgJson = "{ \"name\": \"My Sensor\", "
- + "\"temperature\": 20.5, \"humidity\": 50, \"door\": \"OPEN\" }";
+ private static final String sampleLangDescriptionMsgJson = "{ \"lang\": \"EN\", "
+ + "\"descr\": \"Some description here\" }";
// Sample message of type `test.Person`
private byte[] personMessageBytes;
// Sample message of type `test.AddressBook`
private byte[] addressBookMessageBytes;
- private byte[] sensorMessageBytes;
- private Path addressBookSchemaPath;
- private Path sensorSchemaPath;
-
+ private byte[] langDescriptionMessageBytes;
private Descriptors.Descriptor personDescriptor;
private Descriptors.Descriptor addressBookDescriptor;
- private Descriptors.Descriptor sensorDescriptor;
+ private Descriptors.Descriptor langDescriptionDescriptor;
private Map descriptorPaths;
@BeforeEach
void setUp() throws Exception {
- addressBookSchemaPath = ResourceUtils.getFile("classpath:address-book.proto").toPath();
- sensorSchemaPath = ResourceUtils.getFile("classpath:sensor.proto").toPath();
+ Map files = ProtobufFileSerde.Configuration.loadSchemas(
+ Optional.empty(),
+ Optional.empty(),
+ Optional.of(protoFilesDir())
+ );
- ProtobufSchema addressBookSchema = new ProtobufSchema(Files.readString(addressBookSchemaPath));
+ Path addressBookSchemaPath = ResourceUtils.getFile("classpath:protobuf-serde/address-book.proto").toPath();
+ var addressBookSchema = files.get(addressBookSchemaPath);
var builder = addressBookSchema.newMessageBuilder("test.Person");
JsonFormat.parser().merge(samplePersonMsgJson, builder);
personMessageBytes = builder.build().toByteArray();
@@ -61,63 +64,241 @@ class ProtobufFileSerdeTest {
personDescriptor = addressBookSchema.toDescriptor("test.Person");
addressBookDescriptor = addressBookSchema.toDescriptor("test.AddressBook");
- ProtobufSchema sensorSchema = new ProtobufSchema(Files.readString(sensorSchemaPath));
- builder = sensorSchema.newMessageBuilder("iot.Sensor");
- JsonFormat.parser().merge(sampleSensorMsgJson, builder);
- sensorMessageBytes = builder.build().toByteArray();
- sensorDescriptor = sensorSchema.toDescriptor("iot.Sensor");
+ Path languageDescriptionPath = ResourceUtils.getFile("classpath:protobuf-serde/lang-description.proto").toPath();
+ var languageDescriptionSchema = files.get(languageDescriptionPath);
+ builder = languageDescriptionSchema.newMessageBuilder("test.LanguageDescription");
+ JsonFormat.parser().merge(sampleLangDescriptionMsgJson, builder);
+ langDescriptionMessageBytes = builder.build().toByteArray();
+ langDescriptionDescriptor = languageDescriptionSchema.toDescriptor("test.LanguageDescription");
descriptorPaths = Map.of(
personDescriptor, addressBookSchemaPath,
- addressBookDescriptor, addressBookSchemaPath,
- sensorDescriptor, sensorSchemaPath
+ addressBookDescriptor, addressBookSchemaPath
);
}
-
@Test
- void testDeserialize() {
- var messageNameMap = Map.of(
- "persons", personDescriptor,
- "books", addressBookDescriptor
- );
- var keyMessageNameMap = Map.of(
- "books", addressBookDescriptor);
+ void loadsAllProtoFiledFromTargetDirectory() throws Exception {
+ var protoDir = ResourceUtils.getFile("classpath:protobuf-serde/").getPath();
+ List files = new ProtobufFileSerde.ProtoSchemaLoader(protoDir).load();
+ assertThat(files).hasSize(4);
+ assertThat(files)
+ .map(f -> f.getLocation().getPath())
+ .containsExactlyInAnyOrder(
+ "language/language.proto",
+ "sensor.proto",
+ "address-book.proto",
+ "lang-description.proto"
+ );
+ }
- var serde = new ProtobufFileSerde();
- serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
- );
+ @SneakyThrows
+ private String protoFilesDir() {
+ return ResourceUtils.getFile("classpath:protobuf-serde/").getPath();
+ }
- var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
- .deserialize(null, personMessageBytes);
- assertJsonEquals(samplePersonMsgJson, deserializedPerson.getResult());
+ @Nested
+ class ConfigurationTests {
- var deserializedBook = serde.deserializer("books", Serde.Target.KEY)
- .deserialize(null, addressBookMessageBytes);
- assertJsonEquals(sampleBookMsgJson, deserializedBook.getResult());
+ @Test
+ void canBeAutoConfiguredReturnsNoProtoPropertiesProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isFalse();
+ }
+
+ @Test
+ void canBeAutoConfiguredReturnsTrueIfNoProtoFileHasBeenProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFile", String.class))
+ .thenReturn(Optional.of("file.proto"));
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isTrue();
+ }
+
+ @Test
+ void canBeAutoConfiguredReturnsTrueIfProtoFilesHasBeenProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getListProperty("protobufFiles", String.class))
+ .thenReturn(Optional.of(List.of("file.proto")));
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isTrue();
+ }
+
+ @Test
+ void canBeAutoConfiguredReturnsTrueIfProtoFilesDirProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of("/filesDir"));
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isTrue();
+ }
+
+ @Test
+ void unknownSchemaAsDefaultThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getProperty("protobufMessageName", String.class))
+ .thenReturn(Optional.of("test.NotExistent"));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void unknownSchemaAsDefaultForKeyThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getProperty("protobufMessageNameForKey", String.class))
+ .thenReturn(Optional.of("test.NotExistent"));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void unknownSchemaAsTopicSchemaThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
+ .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void unknownSchemaAsTopicSchemaForKeyThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
+ .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void createConfigureFillsDescriptorMappingsWhenProtoFilesListProvided() throws Exception {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFile", String.class))
+ .thenReturn(Optional.of(
+ ResourceUtils.getFile("classpath:protobuf-serde/sensor.proto").getPath()));
+
+ when(resolver.getListProperty("protobufFiles", String.class))
+ .thenReturn(Optional.of(
+ List.of(
+ ResourceUtils.getFile("classpath:protobuf-serde/address-book.proto").getPath())));
+
+ when(resolver.getProperty("protobufMessageName", String.class))
+ .thenReturn(Optional.of("test.Sensor"));
+
+ when(resolver.getProperty("protobufMessageNameForKey", String.class))
+ .thenReturn(Optional.of("test.AddressBook"));
+
+ when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Sensor",
+ "topic2", "test.AddressBook")));
+
+ when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Person",
+ "topic2", "test.AnotherPerson")));
+
+ var configuration = Configuration.create(resolver);
+
+ assertThat(configuration.defaultMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.Sensor"));
+ assertThat(configuration.defaultKeyMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.AddressBook"));
+
+ assertThat(configuration.messageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Sensor"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.AddressBook"));
+
+ assertThat(configuration.keyMessageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Person"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.AnotherPerson"));
+ }
+
+ @Test
+ void createConfigureFillsDescriptorMappingsWhenProtoFileDirProvided() throws Exception {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getProperty("protobufMessageName", String.class))
+ .thenReturn(Optional.of("test.Sensor"));
+
+ when(resolver.getProperty("protobufMessageNameForKey", String.class))
+ .thenReturn(Optional.of("test.AddressBook"));
+
+ when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Sensor",
+ "topic2", "test.LanguageDescription")));
+
+ when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Person",
+ "topic2", "test.AnotherPerson")));
+
+ var configuration = Configuration.create(resolver);
+
+ assertThat(configuration.defaultMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.Sensor"));
+ assertThat(configuration.defaultKeyMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.AddressBook"));
+
+ assertThat(configuration.messageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Sensor"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.LanguageDescription"));
+
+ assertThat(configuration.keyMessageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Person"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.AnotherPerson"));
+ }
}
@Test
- void testDeserializeMultipleProtobuf() {
+ void deserializeUsesTopicsMappingToFindMsgDescriptor() {
var messageNameMap = Map.of(
"persons", personDescriptor,
"books", addressBookDescriptor,
- "sensors", sensorDescriptor
+ "langs", langDescriptionDescriptor
);
var keyMessageNameMap = Map.of(
"books", addressBookDescriptor);
var serde = new ProtobufFileSerde();
serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
+ new Configuration(
+ null,
+ null,
+ descriptorPaths,
+ messageNameMap,
+ keyMessageNameMap
+ )
);
var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
@@ -128,20 +309,22 @@ class ProtobufFileSerdeTest {
.deserialize(null, addressBookMessageBytes);
assertJsonEquals(sampleBookMsgJson, deserializedBook.getResult());
- var deserializedSensor = serde.deserializer("sensors", Serde.Target.VALUE)
- .deserialize(null, sensorMessageBytes);
- assertJsonEquals(sampleSensorMsgJson, deserializedSensor.getResult());
+ var deserializedSensor = serde.deserializer("langs", Serde.Target.VALUE)
+ .deserialize(null, langDescriptionMessageBytes);
+ assertJsonEquals(sampleLangDescriptionMsgJson, deserializedSensor.getResult());
}
@Test
- void testDefaultMessageName() {
+ void deserializeUsesDefaultDescriptorIfTopicMappingNotFound() {
var serde = new ProtobufFileSerde();
serde.configure(
- personDescriptor,
- addressBookDescriptor,
- descriptorPaths,
- Map.of(),
- Map.of()
+ new Configuration(
+ personDescriptor,
+ addressBookDescriptor,
+ descriptorPaths,
+ Map.of(),
+ Map.of()
+ )
);
var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
@@ -154,230 +337,57 @@ class ProtobufFileSerdeTest {
}
@Test
- void testSerialize() {
- var messageNameMap = Map.of(
- "persons", personDescriptor,
- "books", addressBookDescriptor
- );
- var keyMessageNameMap = Map.of(
- "books", addressBookDescriptor);
-
- var serde = new ProtobufFileSerde();
- serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
- );
-
- var personBytes = serde.serializer("persons", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"user1@example.com\" }");
- assertThat(personBytes).isEqualTo(personMessageBytes);
-
- var booksBytes = serde.serializer("books", Serde.Target.KEY)
- .serialize("{\"version\": 1, \"people\": ["
- + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"addrBook@example.com\" }]}");
- assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
- }
-
- @Test
- void testSerializeMultipleProtobuf() {
+ void serializeUsesTopicsMappingToFindMsgDescriptor() {
var messageNameMap = Map.of(
"persons", personDescriptor,
"books", addressBookDescriptor,
- "sensors", sensorDescriptor
+ "langs", langDescriptionDescriptor
);
var keyMessageNameMap = Map.of(
"books", addressBookDescriptor);
var serde = new ProtobufFileSerde();
serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
+ new Configuration(
+ null,
+ null,
+ descriptorPaths,
+ messageNameMap,
+ keyMessageNameMap
+ )
);
- var personBytes = serde.serializer("persons", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"user1@example.com\" }");
- assertThat(personBytes).isEqualTo(personMessageBytes);
+ var personBytes = serde.serializer("langs", Serde.Target.VALUE)
+ .serialize(sampleLangDescriptionMsgJson);
+ assertThat(personBytes).isEqualTo(langDescriptionMessageBytes);
var booksBytes = serde.serializer("books", Serde.Target.KEY)
- .serialize("{\"version\": 1, \"people\": ["
- + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"addrBook@example.com\" }]}");
+ .serialize(sampleBookMsgJson);
assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
-
- var sensorBytes = serde.serializer("sensors", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Sensor\", \"temperature\": 20.5, \"humidity\": 50, \"door\": \"OPEN\" }");
- assertThat(sensorBytes).isEqualTo(sensorMessageBytes);
}
@Test
- void testSerializeDefaults() {
+ void serializeUsesDefaultDescriptorIfTopicMappingNotFound() {
var serde = new ProtobufFileSerde();
serde.configure(
- personDescriptor,
- addressBookDescriptor,
- descriptorPaths,
- Map.of(),
- Map.of()
+ new Configuration(
+ personDescriptor,
+ addressBookDescriptor,
+ descriptorPaths,
+ Map.of(),
+ Map.of()
+ )
);
var personBytes = serde.serializer("persons", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"user1@example.com\" }");
+ .serialize(samplePersonMsgJson);
assertThat(personBytes).isEqualTo(personMessageBytes);
var booksBytes = serde.serializer("books", Serde.Target.KEY)
- .serialize("{\"version\": 1, \"people\": ["
- + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"addrBook@example.com\" }]}");
+ .serialize(sampleBookMsgJson);
assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
}
- @Test
- void canBeAutoConfiguredReturnsFalseIfNoProtoFilesHaveBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isFalse();
- }
-
- @Test
- void canBeAutoConfiguredReturnsFalseIfProtoFilesListIsEmpty() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class)).thenReturn(Optional.of(List.of()));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isFalse();
- }
-
- @Test
- void canBeAutoConfiguredReturnsTrueIfNoProtoFileHasBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getProperty("protobufFile", String.class)).thenReturn(Optional.of("file.proto"));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isTrue();
- }
-
- @Test
- void canBeAutoConfiguredReturnsTrueIfProtoFilesHasBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class)).thenReturn(Optional.of(List.of("file.proto")));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isTrue();
- }
-
- @Test
- void canBeAutoConfiguredReturnsTrueIfProtoFileAndProtoFilesHaveBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getProperty("protobufFile", String.class)).thenReturn(Optional.of("file1.proto"));
- when(resolver.getListProperty("protobufFiles", String.class)).thenReturn(Optional.of(List.of("file2.proto")));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isTrue();
- }
-
- @Test
- void listOfProtobufFilesIsJoined() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getProperty("protobufFile", String.class))
- .thenReturn(Optional.of(addressBookSchemaPath.toString()));
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
-
- Map protobufMessageNameByTopic = Map.of(
- "persons", "test.Person",
- "books", "test.AddressBook",
- "sensors", "iot.Sensor");
- when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
- .thenReturn(Optional.of(protobufMessageNameByTopic));
-
- var serde = new ProtobufFileSerde();
- serde.configure(resolver, resolver, resolver);
-
- var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
- .deserialize(null, personMessageBytes);
- assertJsonEquals(samplePersonMsgJson, deserializedPerson.getResult());
-
- var deserializedSensor = serde.deserializer("sensors", Serde.Target.VALUE)
- .deserialize(null, sensorMessageBytes);
- assertJsonEquals(sampleSensorMsgJson, deserializedSensor.getResult());
- }
-
- @Test
- void unknownSchemaAsDefaultThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.NotExistent"));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
- @Test
- void unknownSchemaAsDefaultForKeyThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
- when(resolver.getProperty("protobufMessageNameForKey", String.class))
- .thenReturn(Optional.of("test.NotExistent"));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
- @Test
- void unknownSchemaAsTopicSchemaThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
-
- when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
- .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
- @Test
- void unknownSchemaAsTopicSchemaForKeyThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
-
- when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
- .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
@SneakyThrows
private void assertJsonEquals(String expectedJson, String actualJson) {
var mapper = new JsonMapper();
diff --git a/kafka-ui-api/src/test/resources/address-book.proto b/kafka-ui-api/src/test/resources/protobuf-serde/address-book.proto
similarity index 81%
rename from kafka-ui-api/src/test/resources/address-book.proto
rename to kafka-ui-api/src/test/resources/protobuf-serde/address-book.proto
index 72eab7aab8..f6c9a5d788 100644
--- a/kafka-ui-api/src/test/resources/address-book.proto
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/address-book.proto
@@ -1,16 +1,10 @@
-// [START declaration]
syntax = "proto3";
package test;
-// [END declaration]
-
-// [START java_declaration]
option java_multiple_files = true;
option java_package = "com.example.tutorial.protos";
option java_outer_classname = "AddressBookProtos";
-// [END java_declaration]
-// [START messages]
message Person {
string name = 1;
int32 id = 2; // Unique ID number for this person.
@@ -31,9 +25,13 @@ message Person {
}
+message AnotherPerson {
+ string name = 1;
+ string surname = 2;
+}
+
// Our address book file is just one of these.
message AddressBook {
int32 version = 1;
repeated Person people = 2;
}
-// [END messages]
\ No newline at end of file
diff --git a/kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto b/kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto
new file mode 100644
index 0000000000..8e213d58c4
--- /dev/null
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto
@@ -0,0 +1,11 @@
+syntax = "proto3";
+
+package test;
+
+import "language/language.proto";
+import "google/protobuf/wrappers.proto";
+
+message LanguageDescription {
+ test.lang.Language lang = 1;
+ google.protobuf.StringValue descr = 2;
+}
diff --git a/kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto b/kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto
new file mode 100644
index 0000000000..7ef30eab23
--- /dev/null
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto
@@ -0,0 +1,11 @@
+syntax = "proto3";
+package test.lang;
+
+enum Language {
+ DE = 0;
+ EN = 1;
+ ES = 2;
+ FR = 3;
+ PL = 4;
+ RU = 5;
+}
diff --git a/kafka-ui-api/src/test/resources/sensor.proto b/kafka-ui-api/src/test/resources/protobuf-serde/sensor.proto
similarity index 93%
rename from kafka-ui-api/src/test/resources/sensor.proto
rename to kafka-ui-api/src/test/resources/protobuf-serde/sensor.proto
index 33b8c387e4..3bde20a3ae 100644
--- a/kafka-ui-api/src/test/resources/sensor.proto
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/sensor.proto
@@ -1,5 +1,5 @@
syntax = "proto3";
-package iot;
+package test;
message Sensor {
string name = 1;
From 334ba3df99dfc84385faace167f6410c8ce0be91 Mon Sep 17 00:00:00 2001
From: Yarden Shoham
Date: Tue, 7 Mar 2023 17:20:21 +0200
Subject: [PATCH 43/54] Helm: Add `global.imageRegistry` (#3451)
Now if this chart is used as a subchart, the image registry will be taken from `global.imageRegistry` (if it exists).
Signed-off-by: Yarden Shoham
Co-authored-by: Roman Zabaluev
---
charts/kafka-ui/Chart.yaml | 2 +-
charts/kafka-ui/templates/_helpers.tpl | 5 +++++
2 files changed, 6 insertions(+), 1 deletion(-)
diff --git a/charts/kafka-ui/Chart.yaml b/charts/kafka-ui/Chart.yaml
index 6e5f0ee2d5..4f36aa7f0b 100644
--- a/charts/kafka-ui/Chart.yaml
+++ b/charts/kafka-ui/Chart.yaml
@@ -2,6 +2,6 @@ apiVersion: v2
name: kafka-ui
description: A Helm chart for kafka-UI
type: application
-version: 0.5.3
+version: 0.5.4
appVersion: v0.5.0
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
diff --git a/charts/kafka-ui/templates/_helpers.tpl b/charts/kafka-ui/templates/_helpers.tpl
index 510452d4cf..7155681a44 100644
--- a/charts/kafka-ui/templates/_helpers.tpl
+++ b/charts/kafka-ui/templates/_helpers.tpl
@@ -68,6 +68,11 @@ This allows us to check if the registry of the image is specified or not.
*/}}
{{- define "kafka-ui.imageName" -}}
{{- $registryName := .Values.image.registry -}}
+{{- if .Values.global }}
+ {{- if .Values.global.imageRegistry }}
+ {{- $registryName = .Values.global.imageRegistry -}}
+ {{- end -}}
+{{- end -}}
{{- $repository := .Values.image.repository -}}
{{- $tag := .Values.image.tag | default .Chart.AppVersion -}}
{{- if $registryName }}
From e72f6d6d5dd078df2d270cc48a4087588443f89a Mon Sep 17 00:00:00 2001
From: David <58771979+David-DB88@users.noreply.github.com>
Date: Thu, 9 Mar 2023 19:36:49 +0400
Subject: [PATCH 44/54] Feature: Cluster web configuration wizard (#3241)
* created wizard
* Create wizard form schema
* Wizard kafka cluster form (#3245)
* created wizard Kafka Cluster form
* created error message
Co-authored-by: davitbejanyan
* Update schema.ts
* Wizard authentication (#3268)
* created authentication form
* changed SaslType.tsx switch case
* remove console.log
* commented unused variables
* auth validation
* auth Security Protocol
* changed schema.ts username, password
* added Delegation tokens validation schema
* changed auth form
---------
Co-authored-by: davitbejanyan
* New Dashboard flow. Add an ability to configure clusters
* wizard kafka cluster validate (#3294)
* kafka cluster validate
* fixed bootstrap server uncontrolled input warning error
---------
Co-authored-by: davitbejanyan
* Wizard schema registry (#3286)
* created schema registry
* unused variables
* Prevent Default on click
---------
Co-authored-by: davitbejanyan
* feat: cleanup
* Application config API (#3242)
* wip
* wip
* wip
* wip
* OAuthProperties added to dynamic config api
* wip
* files upload endpoint added
* rbac conf api added
* rbac conf api improvements
* App configuration validation endpoint (#3264)
Co-authored-by: iliax
---------
Co-authored-by: iliax
Co-authored-by: Oleg Shur
* add app config api client
* refactor cluster section
* refactor cluster section
* linting
* refactor Registry Form (#3311)
* refactor Registry Form
* refactor Registry
---------
Co-authored-by: davitbejanyan
* auth form improvements
* refactoring
* linting
* file upload API changes
* Auth
* Start connecting to schema & payload
* Auth
* fileupload
* Wizard JMX Metrics form (#3303)
* created JMX Metrics form
* refactor JMXMetrics.tsx styles
* added cursor on checkbox, changed styles submit button
* refactor Metrics
* refactoring
* uncomment schema connect validation
---------
Co-authored-by: davitbejanyan
* validate api
* refactor
* Wizard Kafka Connect form (#3300)
* created Kafka Connect form
* renaming functions and variables
* refactor
* changed button name
* refactoring kafka connect
* made handler function, reset replace with set value,
* refactoring
* uncomment schema metrics validation
---------
Co-authored-by: davitbejanyan
* fixing AdminClient validation
* fixing AdminClient validation
* refactor kafka connect
* refactor metrics
* Per-cluster SSL verification settings (#3336)
* ssl configuration moved to app & cluster level
* documentations changes
* trust all removed, global app settings removed
* extracting ssl properties settings to SslPropertiesUtil
* wip
* documentation fix
---------
Co-authored-by: iliax
Co-authored-by: Roman Zabaluev
* SSL properties NPE fixes
* api integration
* custom fields for existing auth config
* OffsetsResetServiceTest fix
* cluster.properties structure flattening added
* kafka-ssl.yml: ssl properties moved to separate section, producer ssl properties copy added
* custom auth
* error messaging
* form submit
* feedback
* 1. defaulting metrics type to JMX
2. AdminClient id generation made uniq
* checkstyle fix
* checkstyle fix
* refactoring
* feedback
* feedback
* feedback
* feedback
* feedback
* feedback
* Wizard: Application info API (#3391)
* Application info API added, cluster features enum renamed to `ClusterFeature`
* show config for specific envs only
* refactor widget
* Cluster connection validation err msg improved
* KSQL DB section
* Refactor + deps upgrade
* experiment: get rid of babel
* BE validations refactoring
* Update kafka-ui.yaml
fixed to string type param
* fixes #3397
* linting
* #3399 adjust size of port input
* disable selects for disabled form
* Wizard: Keystore separation (#3425)
* wip
* wip
* compose fix
* dto structure fix
---------
Co-authored-by: iliax
* dynamic ops enablement properties improvements
* own keystore for each section
* linting
* fix keystore submit
* fix keystore submit
* feedback
* feedback
* refactoring
* Connect config userName field renamed
* metrics configs mapping fix
* feedback
* Wizard: Jmx ssl (#3448)
JMX SSL implementation. Added ability to set specific ssl keystore for each cluster when connection to jmx endpoint.
* Review fixes
* upd compareVersionsOperation qase id
* add toBeAutomated into manual suite
* DYNAMIC_CONFIG_ENABLED property description added
* Resolve conflicts
* Fix issue with 400 error
* fix SR edit form
---------
Co-authored-by: davitbejanyan
Co-authored-by: Alexander Krivonosov <31561808+GneyHabub@users.noreply.github.com>
Co-authored-by: Oleg Shur
Co-authored-by: Ilya Kuramshin
Co-authored-by: iliax
Co-authored-by: Roman Zabaluev
Co-authored-by: bkhakimov
Co-authored-by: Mgrdich
Co-authored-by: VladSenyuta
---
README.md | 17 +-
documentation/compose/jaas/client.properties | 0
documentation/compose/jaas/kafka_connect.jaas | 0
.../compose/jaas/kafka_connect.password | 0
documentation/compose/jaas/kafka_server.conf | 2 +-
.../compose/jaas/schema_registry.jaas | 0
.../compose/jaas/schema_registry.password | 0
.../compose/kafka-ssl-components.yaml | 18 +-
documentation/compose/kafka-ssl.yml | 10 +-
documentation/compose/kafka-ui-arm64.yaml | 1 +
.../compose/kafka-ui-jmx-secured.yml | 69 +-
documentation/compose/kafka-ui-sasl.yaml | 3 +-
documentation/compose/kafka-ui-serdes.yaml | 15 +-
documentation/compose/kafka-ui.yaml | 1 +
kafka-ui-api/Dockerfile | 7 +-
.../kafka/ui/KafkaUiApplication.java | 13 +-
.../ui/client/RetryingKafkaConnectClient.java | 31 +-
.../kafka/ui/config/ClustersProperties.java | 85 +-
.../kafka/ui/config/auth/OAuthProperties.java | 5 +-
.../config/auth/OAuthPropertiesConverter.java | 7 +-
.../logout/CognitoLogoutSuccessHandler.java | 5 +
.../kafka/ui/controller/AccessController.java | 2 +-
.../ApplicationConfigController.java | 137 +
.../kafka/ui/exception/ErrorCode.java | 4 +-
.../ui/exception/FileUploadException.java | 19 +
.../ui/exception/ValidationException.java | 4 +
.../kafka/ui/mapper/ClusterMapper.java | 4 +-
.../{Feature.java => ClusterFeature.java} | 2 +-
.../kafka/ui/model/InternalClusterState.java | 2 +-
.../kafka/ui/model/JmxConnectionInfo.java | 26 -
.../kafka/ui/model/MetricsConfig.java | 2 +
.../provectus/kafka/ui/model/Statistics.java | 2 +-
.../kafka/ui/model/rbac/AccessContext.java | 14 +-
.../kafka/ui/model/rbac/Permission.java | 25 +-
.../kafka/ui/model/rbac/Resource.java | 1 +
.../permission/ApplicationConfigAction.java | 18 +
.../kafka/ui/serdes/SerdesInitializer.java | 32 +-
.../builtin/sr/SchemaRegistrySerde.java | 30 +-
.../ui/service/AdminClientServiceImpl.java | 18 +-
.../ui/service/ConsumerGroupService.java | 2 +
.../kafka/ui/service/FeatureService.java | 14 +-
.../kafka/ui/service/KafkaClusterFactory.java | 144 +-
.../kafka/ui/service/MessagesService.java | 2 +
.../kafka/ui/service/ReactiveAdminClient.java | 6 +-
.../kafka/ui/service/StatisticsService.java | 4 +-
.../kafka/ui/service/TopicsService.java | 4 +-
.../kafka/ui/service/ksql/KsqlApiClient.java | 19 +-
.../service/ksql/response/ResponseParser.java | 6 +-
.../kafka/ui/service/masking/DataMasking.java | 4 +-
.../ui/service/masking/policies/Mask.java | 2 +
.../masking/policies/MaskingPolicy.java | 22 +-
.../ui/service/masking/policies/Replace.java | 2 +
.../service/metrics/JmxMetricsRetriever.java | 125 +-
.../service/metrics/JmxSslSocketFactory.java | 218 +
.../metrics/PrometheusMetricsRetriever.java | 33 +-
.../ui/service/rbac/AccessControlService.java | 40 +-
.../kafka/ui/util/ApplicationRestarter.java | 46 +
.../ui/util/DynamicConfigOperations.java | 228 +
.../kafka/ui/util/JmxPoolFactory.java | 47 -
.../ui/util/KafkaServicesValidation.java | 147 +
.../{NumberUtil.java => KafkaVersion.java} | 12 +-
.../com/provectus/kafka/ui/util/MapUtil.java | 0
.../kafka/ui/util/PollingThrottler.java | 6 +-
.../kafka/ui/util/ReactiveFailover.java | 16 +-
.../kafka/ui/util/SslPropertiesUtil.java | 33 +
.../kafka/ui/util/WebClientConfigurator.java | 54 +-
.../src/main/resources/application-local.yml | 10 +-
.../src/main/resources/application.yml | 2 -
.../ui/service/OffsetsResetServiceTest.java | 46 +-
.../ui/service/ksql/KsqlApiClientTest.java | 2 +-
.../ui/service/ksql/KsqlServiceV2Test.java | 2 +-
.../PrometheusMetricsRetrieverTest.java | 6 +-
.../ui/util/DynamicConfigOperationsTest.java | 128 +
kafka-ui-contract/pom.xml | 3 +
.../main/resources/swagger/kafka-ui-api.yaml | 375 ++
.../ui/manualSuite/suite/BrokersTest.java | 17 +
.../ui/manualSuite/suite/KsqlDbTest.java | 35 +
.../ui/smokeSuite/schemas/SchemasTest.java | 2 +-
kafka-ui-react-app/.babelrc | 7 -
kafka-ui-react-app/.eslintrc.json | 6 +-
kafka-ui-react-app/README.md | 4 +-
kafka-ui-react-app/package.json | 53 +-
kafka-ui-react-app/pnpm-lock.yaml | 3803 ++++-------------
kafka-ui-react-app/src/components/App.tsx | 21 +-
.../ClusterPage/ClusterConfigPage.tsx | 40 +
.../ClusterPage.tsx} | 18 +-
.../__tests__/ClusterPage.spec.tsx} | 6 +-
.../Connect/Details/Config/Config.tsx | 10 +-
.../src/components/Connect/New/New.tsx | 34 +-
.../Connect/New/__tests__/New.spec.tsx | 2 +-
.../__test__/ResetOffsets.spec.tsx | 14 +-
.../src/components/Dashboard/ClusterName.tsx | 18 +
.../Dashboard/ClusterTableActionsCell.tsx | 18 +
.../Dashboard/ClustersWidget/ClusterName.tsx | 15 -
.../ClustersWidget/ClustersWidget.styled.ts | 15 -
.../ClustersWidget/ClustersWidget.tsx | 75 -
.../__test__/ClustersWidget.spec.tsx | 40 -
.../components/Dashboard/Dashboard.styled.ts | 8 +
.../src/components/Dashboard/Dashboard.tsx | 104 +-
.../Dashboard/__test__/Dashboard.spec.tsx | 16 -
.../src/components/Nav/ClusterMenu.tsx | 1 -
kafka-ui-react-app/src/components/Nav/Nav.tsx | 21 +-
.../PageContainer/PageContainer.tsx | 18 +-
.../Details/SchemaVersion/SchemaVersion.tsx | 2 +-
.../Details/__test__/SchemaVersion.spec.tsx | 4 -
.../src/components/Schemas/Edit/Edit.tsx | 183 +-
.../src/components/Schemas/Edit/Form.tsx | 210 +
.../src/components/Topics/New/New.tsx | 8 +-
.../Topic/Edit/DangerZone/DangerZone.tsx | 8 +-
.../src/components/Topics/Topic/Edit/Edit.tsx | 17 +-
.../Filters/AddEditFilterContainer.tsx | 20 +-
.../Topic/Messages/Filters/Filters.styled.ts | 16 -
.../Filters/__tests__/AddFilter.spec.tsx | 54 -
.../Topic/Messages/__test__/Messages.spec.tsx | 10 +-
.../Topics/Topic/SendMessage/SendMessage.tsx | 23 +-
.../shared/Form/__tests__/TopicForm.spec.tsx | 29 +-
.../src/components/Version/Version.tsx | 5 -
.../src/components/__tests__/App.spec.tsx | 7 +
.../src/components/common/Button/Button.tsx | 6 +-
.../components/common/Checkbox/Checkbox.tsx | 30 +
.../src/components/common/Form/Form.styled.ts | 22 +
.../components/common/Input/Input.styled.ts | 6 +
.../src/components/common/Input/Input.tsx | 48 +-
.../common/Input/InputLabel.styled.ts | 1 -
.../src/components/common/NewTable/Table.tsx | 2 +-
.../common/NewTable/__test__/Table.spec.tsx | 2 +-
.../common/Select/ControlledSelect.tsx | 59 +
.../src/components/common/Tooltip/Tooltip.tsx | 2 +-
.../contexts/GlobalSettingsContext.tsx | 24 +-
kafka-ui-react-app/src/custom.d.ts | 2 -
kafka-ui-react-app/src/lib/api.ts | 4 +-
kafka-ui-react-app/src/lib/constants.ts | 24 +
kafka-ui-react-app/src/lib/errorHandling.tsx | 7 +
.../src/lib/hooks/api/appConfig.ts | 69 +
.../src/lib/hooks/api/clusters.ts | 2 +-
kafka-ui-react-app/src/lib/hooks/api/roles.ts | 4 +-
.../src/lib/hooks/api/topicMessages.tsx | 2 +-
.../src/lib/hooks/useBoolean.ts | 2 +-
kafka-ui-react-app/src/lib/paths.ts | 9 +
kafka-ui-react-app/src/lib/testHelpers.tsx | 4 +-
kafka-ui-react-app/src/lib/yupExtended.ts | 16 +-
.../reducers/ksqlDb/__test__/fixtures.ts | 2 +
kafka-ui-react-app/src/theme/theme.ts | 5 +-
.../ClusterConfigForm.styled.ts | 60 +
.../Authentication/Authentication.tsx | 54 +
.../Authentication/AuthenticationMethods.tsx | 93 +
.../Sections/CustomAuthentication.tsx | 43 +
.../ClusterConfigForm/Sections/KSQL.tsx | 42 +
.../Sections/KafkaCluster.tsx | 114 +
.../Sections/KafkaConnect.tsx | 91 +
.../ClusterConfigForm/Sections/Metrics.tsx | 59 +
.../Sections/SchemaRegistry.tsx | 45 +
.../ClusterConfigForm/common/Credentials.tsx | 45 +
.../ClusterConfigForm/common/Fileupload.tsx | 67 +
.../ClusterConfigForm/common/SSLForm.tsx | 25 +
.../common/SectionHeader.tsx | 31 +
.../src/widgets/ClusterConfigForm/index.tsx | 155 +
.../src/widgets/ClusterConfigForm/schema.ts | 194 +
.../src/widgets/ClusterConfigForm/types.ts | 56 +
.../utils/convertFormKeyToPropsKey.ts | 3 +
.../utils/convertPropsKeyToFormKey.ts | 3 +
.../utils/getInitialFormData.ts | 121 +
.../utils/getIsValidConfig.ts | 49 +
.../ClusterConfigForm/utils/getJaasConfig.ts | 27 +
.../utils/transformFormDataToPayload.ts | 231 +
165 files changed, 5298 insertions(+), 4122 deletions(-)
mode change 100644 => 100755 documentation/compose/jaas/client.properties
mode change 100644 => 100755 documentation/compose/jaas/kafka_connect.jaas
mode change 100644 => 100755 documentation/compose/jaas/kafka_connect.password
mode change 100644 => 100755 documentation/compose/jaas/schema_registry.jaas
mode change 100644 => 100755 documentation/compose/jaas/schema_registry.password
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/FileUploadException.java
rename kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/{Feature.java => ClusterFeature.java} (78%)
delete mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/JmxConnectionInfo.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ApplicationConfigAction.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxSslSocketFactory.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ApplicationRestarter.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java
delete mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/JmxPoolFactory.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java
rename kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/{NumberUtil.java => KafkaVersion.java} (74%)
delete mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java
create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SslPropertiesUtil.java
create mode 100644 kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/DynamicConfigOperationsTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/BrokersTest.java
create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/KsqlDbTest.java
delete mode 100644 kafka-ui-react-app/.babelrc
create mode 100644 kafka-ui-react-app/src/components/ClusterPage/ClusterConfigPage.tsx
rename kafka-ui-react-app/src/components/{Cluster/Cluster.tsx => ClusterPage/ClusterPage.tsx} (86%)
rename kafka-ui-react-app/src/components/{Cluster/__tests__/Cluster.spec.tsx => ClusterPage/__tests__/ClusterPage.spec.tsx} (96%)
create mode 100644 kafka-ui-react-app/src/components/Dashboard/ClusterName.tsx
create mode 100644 kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx
delete mode 100644 kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterName.tsx
delete mode 100644 kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClustersWidget.styled.ts
delete mode 100644 kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClustersWidget.tsx
delete mode 100644 kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClustersWidget.spec.tsx
create mode 100644 kafka-ui-react-app/src/components/Dashboard/Dashboard.styled.ts
delete mode 100644 kafka-ui-react-app/src/components/Dashboard/__test__/Dashboard.spec.tsx
create mode 100644 kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
create mode 100644 kafka-ui-react-app/src/components/common/Checkbox/Checkbox.tsx
create mode 100644 kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx
delete mode 100644 kafka-ui-react-app/src/custom.d.ts
create mode 100644 kafka-ui-react-app/src/lib/hooks/api/appConfig.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/ClusterConfigForm.styled.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/Authentication/Authentication.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/Authentication/AuthenticationMethods.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/CustomAuthentication.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/KSQL.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/KafkaCluster.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/KafkaConnect.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/Metrics.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/Sections/SchemaRegistry.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/common/Credentials.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/common/Fileupload.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/common/SSLForm.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/common/SectionHeader.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/schema.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/types.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/convertFormKeyToPropsKey.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/convertPropsKeyToFormKey.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getInitialFormData.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getIsValidConfig.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts
create mode 100644 kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts
diff --git a/README.md b/README.md
index b66ac8f37f..e924621536 100644
--- a/README.md
+++ b/README.md
@@ -185,16 +185,17 @@ For example, if you want to use an environment variable to set the `name` parame
|`KAFKA_CLUSTERS_0_KSQLDBSERVERAUTH_PASSWORD` | KSQL DB server's basic authentication password
|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTORELOCATION` |Path to the JKS keystore to communicate to KSQL DB
|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTOREPASSWORD` |Password of the JKS keystore for KSQL DB
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTORELOCATION` |Path to the JKS truststore to communicate to KSQL DB
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTOREPASSWORD` |Password of the JKS truststore for KSQL DB
|`KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL` |Security protocol to connect to the brokers. For SSL connection use "SSL", for plaintext connection don't set this environment variable
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME` |SchemaRegistry's basic authentication username
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD` |SchemaRegistry's basic authentication password
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION` |Path to the JKS keystore to communicate to SchemaRegistry
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD` |Password of the JKS keystore for SchemaRegistry
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTORELOCATION` |Path to the JKS truststore to communicate to SchemaRegistry
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTOREPASSWORD` |Password of the JKS truststore for SchemaRegistry
+|`KAFKA_CLUSTERS_0_METRICS_SSL` |Enable SSL for Metrics (for PROMETHEUS metrics type). Default: false.
+|`KAFKA_CLUSTERS_0_METRICS_USERNAME` |Username for Metrics authentication
+|`KAFKA_CLUSTERS_0_METRICS_PASSWORD` |Password for Metrics authentication
+|`KAFKA_CLUSTERS_0_METRICS_KEYSTORELOCATION` |Path to the JKS keystore to communicate to metrics source (JMX/PROMETHEUS). For advanced setup, see `kafka-ui-jmx-secured.yml`
+|`KAFKA_CLUSTERS_0_METRICS_KEYSTOREPASSWORD` |Password of the JKS metrics keystore
|`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry
|`KAFKA_CLUSTERS_0_METRICS_PORT` |Open metrics port of a broker
|`KAFKA_CLUSTERS_0_METRICS_TYPE` |Type of metrics retriever to use. Valid values are JMX (default) or PROMETHEUS. If Prometheus, then metrics are read from prometheus-jmx-exporter instead of jmx
@@ -205,11 +206,9 @@ For example, if you want to use an environment variable to set the `name` parame
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD`| Kafka Connect cluster's basic authentication password
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTORELOCATION`| Path to the JKS keystore to communicate to Kafka Connect
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTOREPASSWORD`| Password of the JKS keystore for Kafka Connect
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTORELOCATION`| Path to the JKS truststore to communicate to Kafka Connect
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTOREPASSWORD`| Password of the JKS truststore for Kafka Connect
-|`KAFKA_CLUSTERS_0_METRICS_SSL` |Enable SSL for Metrics? `true` or `false`. For advanced setup, see `kafka-ui-jmx-secured.yml`
-|`KAFKA_CLUSTERS_0_METRICS_USERNAME` |Username for Metrics authentication
-|`KAFKA_CLUSTERS_0_METRICS_PASSWORD` |Password for Metrics authentication
|`KAFKA_CLUSTERS_0_POLLING_THROTTLE_RATE` |Max traffic rate (bytes/sec) that kafka-ui allowed to reach when polling messages from the cluster. Default: 0 (not limited)
+|`KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION`| Path to the JKS truststore to communicate to Kafka Connect, SchemaRegistry, KSQL, Metrics
+|`KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD`| Password of the JKS truststore for Kafka Connect, SchemaRegistry, KSQL, Metrics
|`TOPIC_RECREATE_DELAY_SECONDS` |Time delay between topic deletion and topic creation attempts for topic recreate functionality. Default: 1
|`TOPIC_RECREATE_MAXRETRIES` |Number of attempts of topic creation after topic deletion for topic recreate functionality. Default: 15
+|`DYNAMIC_CONFIG_ENABLED`|Allow to change application config in runtime. Default: false.
diff --git a/documentation/compose/jaas/client.properties b/documentation/compose/jaas/client.properties
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_connect.jaas b/documentation/compose/jaas/kafka_connect.jaas
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_connect.password b/documentation/compose/jaas/kafka_connect.password
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_server.conf b/documentation/compose/jaas/kafka_server.conf
index ef41c992e2..25388be5aa 100644
--- a/documentation/compose/jaas/kafka_server.conf
+++ b/documentation/compose/jaas/kafka_server.conf
@@ -11,4 +11,4 @@ KafkaClient {
user_admin="admin-secret";
};
-Client {};
\ No newline at end of file
+Client {};
diff --git a/documentation/compose/jaas/schema_registry.jaas b/documentation/compose/jaas/schema_registry.jaas
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/schema_registry.password b/documentation/compose/jaas/schema_registry.password
old mode 100644
new mode 100755
diff --git a/documentation/compose/kafka-ssl-components.yaml b/documentation/compose/kafka-ssl-components.yaml
index 0c1287b647..407ce5b97a 100644
--- a/documentation/compose/kafka-ssl-components.yaml
+++ b/documentation/compose/kafka-ssl-components.yaml
@@ -15,27 +15,25 @@ services:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 # SSL LISTENER!
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_PASSWORD: secret
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD: secret
KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # DISABLE COMMON NAME VERIFICATION
+
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: https://schemaregistry0:8085
KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION: /kafka.keystore.jks
KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD: "secret"
- KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTORELOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTOREPASSWORD: "secret"
+
KAFKA_CLUSTERS_0_KSQLDBSERVER: https://ksqldb0:8088
KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTORELOCATION: /kafka.keystore.jks
KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTOREPASSWORD: "secret"
- KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTORELOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTOREPASSWORD: "secret"
+
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: local
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: https://kafka-connect0:8083
KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTORELOCATION: /kafka.keystore.jks
KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTOREPASSWORD: "secret"
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTORELOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTOREPASSWORD: "secret"
+
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION: /kafka.truststore.jks
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD: "secret"
+ DYNAMIC_CONFIG_ENABLED: 'true' # not necessary for ssl, added for tests
+
volumes:
- ./ssl/kafka.truststore.jks:/kafka.truststore.jks
- ./ssl/kafka.keystore.jks:/kafka.keystore.jks
diff --git a/documentation/compose/kafka-ssl.yml b/documentation/compose/kafka-ssl.yml
index 4fc7daebff..08ff9dc4af 100644
--- a/documentation/compose/kafka-ssl.yml
+++ b/documentation/compose/kafka-ssl.yml
@@ -11,11 +11,11 @@ services:
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 # SSL LISTENER!
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_PASSWORD: secret
KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD: secret
+ KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD: "secret"
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 # SSL LISTENER!
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION: /kafka.truststore.jks
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD: "secret"
KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # DISABLE COMMON NAME VERIFICATION
volumes:
- ./ssl/kafka.truststore.jks:/kafka.truststore.jks
@@ -60,4 +60,4 @@ services:
- ./ssl/creds:/etc/kafka/secrets/creds
- ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks
- ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks
- command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
\ No newline at end of file
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
diff --git a/documentation/compose/kafka-ui-arm64.yaml b/documentation/compose/kafka-ui-arm64.yaml
index bbcefecbf4..87a892cc70 100644
--- a/documentation/compose/kafka-ui-arm64.yaml
+++ b/documentation/compose/kafka-ui-arm64.yaml
@@ -19,6 +19,7 @@ services:
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schema-registry0:8085
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
+ DYNAMIC_CONFIG_ENABLED: 'true' # not necessary, added for tests
kafka0:
image: confluentinc/cp-kafka:7.2.1.arm64
diff --git a/documentation/compose/kafka-ui-jmx-secured.yml b/documentation/compose/kafka-ui-jmx-secured.yml
index de56a7e2c6..408f388ba5 100644
--- a/documentation/compose/kafka-ui-jmx-secured.yml
+++ b/documentation/compose/kafka-ui-jmx-secured.yml
@@ -7,11 +7,8 @@ services:
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
- - 5005:5005
depends_on:
- kafka0
- - schemaregistry0
- - kafka-connect0
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
@@ -19,15 +16,12 @@ services:
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
KAFKA_CLUSTERS_0_METRICS_PORT: 9997
- KAFKA_CLUSTERS_0_METRICS_SSL: 'true'
KAFKA_CLUSTERS_0_METRICS_USERNAME: root
KAFKA_CLUSTERS_0_METRICS_PASSWORD: password
- JAVA_OPTS: >-
- -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005
- -Djavax.net.ssl.trustStore=/jmx/clienttruststore
- -Djavax.net.ssl.trustStorePassword=12345678
- -Djavax.net.ssl.keyStore=/jmx/clientkeystore
- -Djavax.net.ssl.keyStorePassword=12345678
+ KAFKA_CLUSTERS_0_METRICS_KEYSTORE_LOCATION: /jmx/clientkeystore
+ KAFKA_CLUSTERS_0_METRICS_KEYSTORE_PASSWORD: '12345678'
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORE_LOCATION: /jmx/clienttruststore
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORE_PASSWORD: '12345678'
volumes:
- ./jmx/clienttruststore:/jmx/clienttruststore
- ./jmx/clientkeystore:/jmx/clientkeystore
@@ -70,8 +64,6 @@ services:
-Dcom.sun.management.jmxremote.access.file=/jmx/jmxremote.access
-Dcom.sun.management.jmxremote.rmi.port=9997
-Djava.rmi.server.hostname=kafka0
- -Djava.rmi.server.logCalls=true
-# -Djavax.net.debug=ssl:handshake
volumes:
- ./jmx/serverkeystore:/jmx/serverkeystore
- ./jmx/servertruststore:/jmx/servertruststore
@@ -79,56 +71,3 @@ services:
- ./jmx/jmxremote.access:/jmx/jmxremote.access
- ./scripts/update_run.sh:/tmp/update_run.sh
command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
-
- schemaregistry0:
- image: confluentinc/cp-schema-registry:7.2.1
- ports:
- - 8085:8085
- depends_on:
- - kafka0
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-
- kafka-connect0:
- image: confluentinc/cp-kafka-connect:7.2.1
- ports:
- - 8083:8083
- depends_on:
- - kafka0
- - schemaregistry0
- environment:
- CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
- CONNECT_GROUP_ID: compose-connect-group
- CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
- CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
- CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_STATUS_STORAGE_TOPIC: _connect_status
- CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
- CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-
- kafka-init-topics:
- image: confluentinc/cp-kafka:7.2.1
- volumes:
- - ./message.json:/data/message.json
- depends_on:
- - kafka0
- command: "bash -c 'echo Waiting for Kafka to be ready... && \
- cub kafka-ready -b kafka0:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
- kafka-console-producer --bootstrap-server kafka0:29092 --topic second.users < /data/message.json'"
\ No newline at end of file
diff --git a/documentation/compose/kafka-ui-sasl.yaml b/documentation/compose/kafka-ui-sasl.yaml
index 4b8fc02597..e4a2b3cc4a 100644
--- a/documentation/compose/kafka-ui-sasl.yaml
+++ b/documentation/compose/kafka-ui-sasl.yaml
@@ -15,6 +15,7 @@ services:
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
+ DYNAMIC_CONFIG_ENABLED: true # not necessary for sasl auth, added for tests
kafka:
image: confluentinc/cp-kafka:7.2.1
@@ -48,4 +49,4 @@ services:
volumes:
- ./scripts/update_run.sh:/tmp/update_run.sh
- ./jaas:/etc/kafka/jaas
- command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
\ No newline at end of file
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
diff --git a/documentation/compose/kafka-ui-serdes.yaml b/documentation/compose/kafka-ui-serdes.yaml
index c380a34f6b..eee510a13d 100644
--- a/documentation/compose/kafka-ui-serdes.yaml
+++ b/documentation/compose/kafka-ui-serdes.yaml
@@ -14,13 +14,16 @@ services:
kafka.clusters.0.name: SerdeExampleCluster
kafka.clusters.0.bootstrapServers: kafka0:29092
kafka.clusters.0.schemaRegistry: http://schemaregistry0:8085
- # optional auth and ssl properties for SR
+
+ # optional SSL settings for cluster (will be used by SchemaRegistry serde, if set)
+ #kafka.clusters.0.ssl.keystoreLocation: /kafka.keystore.jks
+ #kafka.clusters.0.ssl.keystorePassword: "secret"
+ #kafka.clusters.0.ssl.truststoreLocation: /kafka.truststore.jks
+ #kafka.clusters.0.ssl.truststorePassword: "secret"
+
+ # optional auth properties for SR
#kafka.clusters.0.schemaRegistryAuth.username: "use"
#kafka.clusters.0.schemaRegistryAuth.password: "pswrd"
- #kafka.clusters.0.schemaRegistrySSL.keystoreLocation: /kafka.keystore.jks
- #kafka.clusters.0.schemaRegistrySSL.keystorePassword: "secret"
- #kafka.clusters.0.schemaRegistrySSL.truststoreLocation: /kafka.truststore.jks
- #kafka.clusters.0.schemaRegistrySSL.truststorePassword: "secret"
kafka.clusters.0.defaultKeySerde: Int32 #optional
kafka.clusters.0.defaultValueSerde: String #optional
@@ -51,7 +54,7 @@ services:
kafka.clusters.0.serde.4.properties.keySchemaNameTemplate: "%s-key"
kafka.clusters.0.serde.4.properties.schemaNameTemplate: "%s-value"
#kafka.clusters.0.serde.4.topicValuesPattern: "sr2-topic.*"
- # optional auth and ssl properties for SR:
+ # optional auth and ssl properties for SR (overrides cluster-level):
#kafka.clusters.0.serde.4.properties.username: "user"
#kafka.clusters.0.serde.4.properties.password: "passw"
#kafka.clusters.0.serde.4.properties.keystoreLocation: /kafka.keystore.jks
diff --git a/documentation/compose/kafka-ui.yaml b/documentation/compose/kafka-ui.yaml
index 32c874b676..8524f6fa2b 100644
--- a/documentation/compose/kafka-ui.yaml
+++ b/documentation/compose/kafka-ui.yaml
@@ -24,6 +24,7 @@ services:
KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: kafka1:29092
KAFKA_CLUSTERS_1_METRICS_PORT: 9998
KAFKA_CLUSTERS_1_SCHEMAREGISTRY: http://schemaregistry1:8085
+ DYNAMIC_CONFIG_ENABLED: 'true'
kafka0:
image: confluentinc/cp-kafka:7.2.1
diff --git a/kafka-ui-api/Dockerfile b/kafka-ui-api/Dockerfile
index 81df41bd1d..fcd29c0f06 100644
--- a/kafka-ui-api/Dockerfile
+++ b/kafka-ui-api/Dockerfile
@@ -3,6 +3,10 @@ FROM azul/zulu-openjdk-alpine:17-jre
RUN apk add --no-cache gcompat # need to make snappy codec work
RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
+# creating folder for dynamic config usage (certificates uploads, etc)
+RUN mkdir /etc/kafkaui/
+RUN chown kafkaui /etc/kafkaui
+
USER kafkaui
ARG JAR_FILE
@@ -12,4 +16,5 @@ ENV JAVA_OPTS=
EXPOSE 8080
-CMD java $JAVA_OPTS -jar kafka-ui-api.jar
+# see JmxSslSocketFactory docs to understand why add-opens is needed
+CMD java --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED $JAVA_OPTS -jar kafka-ui-api.jar
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
index a9a523eb85..8d0eafeff3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
@@ -1,8 +1,10 @@
package com.provectus.kafka.ui;
-import org.springframework.boot.SpringApplication;
+import com.provectus.kafka.ui.util.DynamicConfigOperations;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
+import org.springframework.boot.builder.SpringApplicationBuilder;
+import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
@@ -12,6 +14,13 @@ import org.springframework.scheduling.annotation.EnableScheduling;
public class KafkaUiApplication {
public static void main(String[] args) {
- SpringApplication.run(KafkaUiApplication.class, args);
+ startApplication(args);
+ }
+
+ public static ConfigurableApplicationContext startApplication(String[] args) {
+ return new SpringApplicationBuilder(KafkaUiApplication.class)
+ .initializers(DynamicConfigOperations.dynamicConfigPropertiesInitializer())
+ .build()
+ .run(args);
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
index be5686e2f9..5ec5a779d3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
@@ -2,6 +2,7 @@ package com.provectus.kafka.ui.client;
import static com.provectus.kafka.ui.config.ClustersProperties.ConnectCluster;
+import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.connect.ApiClient;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.connect.model.Connector;
@@ -12,6 +13,7 @@ import com.provectus.kafka.ui.util.WebClientConfigurator;
import java.time.Duration;
import java.util.List;
import java.util.Map;
+import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpHeaders;
@@ -31,8 +33,10 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
private static final int MAX_RETRIES = 5;
private static final Duration RETRIES_DELAY = Duration.ofMillis(200);
- public RetryingKafkaConnectClient(ConnectCluster config, DataSize maxBuffSize) {
- super(new RetryingApiClient(config, maxBuffSize));
+ public RetryingKafkaConnectClient(ConnectCluster config,
+ @Nullable ClustersProperties.TruststoreConfig truststoreConfig,
+ DataSize maxBuffSize) {
+ super(new RetryingApiClient(config, truststoreConfig, maxBuffSize));
}
private static Retry conflictCodeRetry() {
@@ -77,23 +81,28 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
private static class RetryingApiClient extends ApiClient {
- public RetryingApiClient(ConnectCluster config, DataSize maxBuffSize) {
- super(buildWebClient(maxBuffSize, config), null, null);
+ public RetryingApiClient(ConnectCluster config,
+ ClustersProperties.TruststoreConfig truststoreConfig,
+ DataSize maxBuffSize) {
+ super(buildWebClient(maxBuffSize, config, truststoreConfig), null, null);
setBasePath(config.getAddress());
- setUsername(config.getUserName());
+ setUsername(config.getUsername());
setPassword(config.getPassword());
}
- public static WebClient buildWebClient(DataSize maxBuffSize, ConnectCluster config) {
+ public static WebClient buildWebClient(DataSize maxBuffSize,
+ ConnectCluster config,
+ ClustersProperties.TruststoreConfig truststoreConfig) {
return new WebClientConfigurator()
.configureSsl(
- config.getKeystoreLocation(),
- config.getKeystorePassword(),
- config.getTruststoreLocation(),
- config.getTruststorePassword()
+ truststoreConfig,
+ new ClustersProperties.KeystoreConfig(
+ config.getKeystoreLocation(),
+ config.getKeystorePassword()
+ )
)
.configureBasicAuth(
- config.getUserName(),
+ config.getUsername(),
config.getPassword()
)
.configureBufferSize(maxBuffSize)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
index 01d4de1257..2cd5e0e69c 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
@@ -1,12 +1,13 @@
package com.provectus.kafka.ui.config;
+import com.provectus.kafka.ui.model.MetricsConfig;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Properties;
import java.util.Set;
+import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import lombok.AllArgsConstructor;
import lombok.Builder;
@@ -30,55 +31,58 @@ public class ClustersProperties {
String bootstrapServers;
String schemaRegistry;
SchemaRegistryAuth schemaRegistryAuth;
- WebClientSsl schemaRegistrySsl;
+ KeystoreConfig schemaRegistrySsl;
String ksqldbServer;
KsqldbServerAuth ksqldbServerAuth;
- WebClientSsl ksqldbServerSsl;
+ KeystoreConfig ksqldbServerSsl;
List kafkaConnect;
MetricsConfigData metrics;
- Properties properties;
+ Map properties;
boolean readOnly = false;
- List serde = new ArrayList<>();
+ List serde;
String defaultKeySerde;
String defaultValueSerde;
- List masking = new ArrayList<>();
- long pollingThrottleRate = 0;
+ List masking;
+ Long pollingThrottleRate;
+ TruststoreConfig ssl;
}
@Data
+ @ToString(exclude = "password")
public static class MetricsConfigData {
String type;
Integer port;
- boolean ssl;
+ Boolean ssl;
String username;
String password;
+ String keystoreLocation;
+ String keystorePassword;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder(toBuilder = true)
+ @ToString(exclude = {"password", "keystorePassword"})
public static class ConnectCluster {
String name;
String address;
- String userName;
+ String username;
String password;
String keystoreLocation;
String keystorePassword;
- String truststoreLocation;
- String truststorePassword;
}
@Data
+ @ToString(exclude = {"password"})
public static class SchemaRegistryAuth {
String username;
String password;
}
@Data
- public static class WebClientSsl {
- String keystoreLocation;
- String keystorePassword;
+ @ToString(exclude = {"truststorePassword"})
+ public static class TruststoreConfig {
String truststoreLocation;
String truststorePassword;
}
@@ -88,7 +92,7 @@ public class ClustersProperties {
String name;
String className;
String filePath;
- Map properties = new HashMap<>();
+ Map properties;
String topicKeysPattern;
String topicValuesPattern;
}
@@ -100,12 +104,21 @@ public class ClustersProperties {
String password;
}
+ @Data
+ @NoArgsConstructor
+ @AllArgsConstructor
+ @ToString(exclude = {"keystorePassword"})
+ public static class KeystoreConfig {
+ String keystoreLocation;
+ String keystorePassword;
+ }
+
@Data
public static class Masking {
Type type;
- List fields = List.of(); //if empty - policy will be applied to all fields
- List pattern = List.of("X", "x", "n", "-"); //used when type=MASK
- String replacement = "***DATA_MASKED***"; //used when type=REPLACE
+ List fields; //if null or empty list - policy will be applied to all fields
+ List pattern; //used when type=MASK
+ String replacement; //used when type=REPLACE
String topicKeysPattern;
String topicValuesPattern;
@@ -116,7 +129,41 @@ public class ClustersProperties {
@PostConstruct
public void validateAndSetDefaults() {
- validateClusterNames();
+ if (clusters != null) {
+ validateClusterNames();
+ flattenClusterProperties();
+ setMetricsDefaults();
+ }
+ }
+
+ private void setMetricsDefaults() {
+ for (Cluster cluster : clusters) {
+ if (cluster.getMetrics() != null && !StringUtils.hasText(cluster.getMetrics().getType())) {
+ cluster.getMetrics().setType(MetricsConfig.JMX_METRICS_TYPE);
+ }
+ }
+ }
+
+ private void flattenClusterProperties() {
+ for (Cluster cluster : clusters) {
+ cluster.setProperties(flattenClusterProperties(null, cluster.getProperties()));
+ }
+ }
+
+ private Map flattenClusterProperties(@Nullable String prefix,
+ @Nullable Map propertiesMap) {
+ Map flattened = new HashMap<>();
+ if (propertiesMap != null) {
+ propertiesMap.forEach((k, v) -> {
+ String key = prefix == null ? k : prefix + "." + k;
+ if (v instanceof Map, ?>) {
+ flattened.putAll(flattenClusterProperties(key, (Map) v));
+ } else {
+ flattened.put(key, v);
+ }
+ });
+ }
+ return flattened;
}
private void validateClusterNames() {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
index db192ae826..f79d217fa7 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
@@ -1,7 +1,6 @@
package com.provectus.kafka.ui.config.auth;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.PostConstruct;
@@ -32,13 +31,13 @@ public class OAuthProperties {
private String clientName;
private String redirectUri;
private String authorizationGrantType;
- private Set scope = new HashSet<>();
+ private Set scope;
private String issuerUri;
private String authorizationUri;
private String tokenUri;
private String userInfoUri;
private String jwkSetUri;
private String userNameAttribute;
- private Map customParams = new HashMap<>();
+ private Map customParams;
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
index 8e4a8575a8..90daa36273 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
@@ -4,6 +4,8 @@ import static com.provectus.kafka.ui.config.auth.OAuthProperties.OAuth2Provider;
import static org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties.Provider;
import static org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties.Registration;
+import java.util.Optional;
+import java.util.Set;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
@@ -24,7 +26,7 @@ public final class OAuthPropertiesConverter {
registration.setClientId(provider.getClientId());
registration.setClientSecret(provider.getClientSecret());
registration.setClientName(provider.getClientName());
- registration.setScope(provider.getScope());
+ registration.setScope(Optional.ofNullable(provider.getScope()).orElse(Set.of()));
registration.setRedirectUri(provider.getRedirectUri());
registration.setAuthorizationGrantType(provider.getAuthorizationGrantType());
@@ -71,7 +73,8 @@ public final class OAuthPropertiesConverter {
}
private static boolean isGoogle(OAuth2Provider provider) {
- return GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
+ return provider.getCustomParams() != null
+ && GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java
index 3d725c659d..e9e5159e1b 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java
@@ -12,6 +12,7 @@ import org.springframework.security.core.Authentication;
import org.springframework.security.web.server.WebFilterExchange;
import org.springframework.security.web.util.UrlUtils;
import org.springframework.stereotype.Component;
+import org.springframework.util.Assert;
import org.springframework.web.server.WebSession;
import org.springframework.web.util.UriComponents;
import org.springframework.web.util.UriComponentsBuilder;
@@ -45,6 +46,10 @@ public class CognitoLogoutSuccessHandler implements LogoutSuccessHandler {
.fragment(null)
.build();
+ Assert.isTrue(
+ provider.getCustomParams() != null && provider.getCustomParams().containsKey("logoutUrl"),
+ "Custom params should contain 'logoutUrl'"
+ );
final var uri = UriComponentsBuilder
.fromUri(URI.create(provider.getCustomParams().get("logoutUrl")))
.queryParam("client_id", provider.getClientId())
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java
index 131a37982a..a86b6db5a0 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java
@@ -66,7 +66,7 @@ public class AccessController implements AuthorizationApi {
UserPermissionDTO dto = new UserPermissionDTO();
dto.setClusters(clusters);
dto.setResource(ResourceTypeDTO.fromValue(permission.getResource().toString().toUpperCase()));
- dto.setValue(permission.getValue() != null ? permission.getValue().toString() : null);
+ dto.setValue(permission.getValue());
dto.setActions(permission.getActions()
.stream()
.map(String::toUpperCase)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
new file mode 100644
index 0000000000..b21ef10c61
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
@@ -0,0 +1,137 @@
+package com.provectus.kafka.ui.controller;
+
+import static com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction.EDIT;
+import static com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction.VIEW;
+
+import com.provectus.kafka.ui.api.ApplicationConfigApi;
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.model.ApplicationConfigDTO;
+import com.provectus.kafka.ui.model.ApplicationConfigPropertiesDTO;
+import com.provectus.kafka.ui.model.ApplicationConfigValidationDTO;
+import com.provectus.kafka.ui.model.ApplicationInfoDTO;
+import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
+import com.provectus.kafka.ui.model.RestartRequestDTO;
+import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
+import com.provectus.kafka.ui.model.rbac.AccessContext;
+import com.provectus.kafka.ui.service.KafkaClusterFactory;
+import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import com.provectus.kafka.ui.util.ApplicationRestarter;
+import com.provectus.kafka.ui.util.DynamicConfigOperations;
+import com.provectus.kafka.ui.util.DynamicConfigOperations.PropertiesStructure;
+import java.util.List;
+import java.util.Map;
+import javax.annotation.Nullable;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.mapstruct.Mapper;
+import org.mapstruct.factory.Mappers;
+import org.springframework.http.ResponseEntity;
+import org.springframework.http.codec.multipart.FilePart;
+import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.server.ServerWebExchange;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+@Slf4j
+@RestController
+@RequiredArgsConstructor
+public class ApplicationConfigController implements ApplicationConfigApi {
+
+ private static final PropertiesMapper MAPPER = Mappers.getMapper(PropertiesMapper.class);
+
+ @Mapper
+ interface PropertiesMapper {
+
+ PropertiesStructure fromDto(ApplicationConfigPropertiesDTO dto);
+
+ ApplicationConfigPropertiesDTO toDto(PropertiesStructure propertiesStructure);
+ }
+
+ private final AccessControlService accessControlService;
+ private final DynamicConfigOperations dynamicConfigOperations;
+ private final ApplicationRestarter restarter;
+ private final KafkaClusterFactory kafkaClusterFactory;
+
+
+ @Override
+ public Mono> getApplicationInfo(ServerWebExchange exchange) {
+ return Mono.just(
+ new ApplicationInfoDTO()
+ .enabledFeatures(
+ dynamicConfigOperations.dynamicConfigEnabled()
+ ? List.of(ApplicationInfoDTO.EnabledFeaturesEnum.DYNAMIC_CONFIG)
+ : List.of()
+ )
+ ).map(ResponseEntity::ok);
+ }
+
+ @Override
+ public Mono> getCurrentConfig(ServerWebExchange exchange) {
+ return accessControlService
+ .validateAccess(
+ AccessContext.builder()
+ .applicationConfigActions(VIEW)
+ .build()
+ )
+ .then(Mono.fromSupplier(() -> ResponseEntity.ok(
+ new ApplicationConfigDTO()
+ .properties(MAPPER.toDto(dynamicConfigOperations.getCurrentProperties()))
+ )));
+ }
+
+ @Override
+ public Mono> restartWithConfig(Mono restartRequestDto,
+ ServerWebExchange exchange) {
+ return accessControlService
+ .validateAccess(
+ AccessContext.builder()
+ .applicationConfigActions(EDIT)
+ .build()
+ )
+ .then(restartRequestDto)
+ .map(dto -> {
+ dynamicConfigOperations.persist(MAPPER.fromDto(dto.getConfig().getProperties()));
+ restarter.requestRestart();
+ return ResponseEntity.ok().build();
+ });
+ }
+
+ @Override
+ public Mono> uploadConfigRelatedFile(FilePart file, ServerWebExchange exchange) {
+ return accessControlService
+ .validateAccess(
+ AccessContext.builder()
+ .applicationConfigActions(EDIT)
+ .build()
+ )
+ .then(dynamicConfigOperations.uploadConfigRelatedFile(file))
+ .map(path -> new UploadedFileInfoDTO().location(path.toString()))
+ .map(ResponseEntity::ok);
+ }
+
+ @Override
+ public Mono> validateConfig(Mono configDto,
+ ServerWebExchange exchange) {
+ return configDto
+ .flatMap(config -> {
+ PropertiesStructure propertiesStructure = MAPPER.fromDto(config.getProperties());
+ ClustersProperties clustersProperties = propertiesStructure.getKafka();
+ return validateClustersConfig(clustersProperties)
+ .map(validations -> new ApplicationConfigValidationDTO().clusters(validations));
+ })
+ .map(ResponseEntity::ok);
+ }
+
+ private Mono> validateClustersConfig(
+ @Nullable ClustersProperties properties) {
+ if (properties == null || properties.getClusters() == null) {
+ return Mono.just(Map.of());
+ }
+ properties.validateAndSetDefaults();
+ return Flux.fromIterable(properties.getClusters())
+ .flatMap(c -> kafkaClusterFactory.validate(c).map(v -> Tuples.of(c.getName(), v)))
+ .collectMap(Tuple2::getT1, Tuple2::getT2);
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java
index 6c05eec206..61be8155e8 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java
@@ -29,7 +29,9 @@ public enum ErrorCode {
RECREATE_TOPIC_TIMEOUT(4015, HttpStatus.REQUEST_TIMEOUT),
INVALID_ENTITY_STATE(4016, HttpStatus.BAD_REQUEST),
SCHEMA_NOT_DELETED(4017, HttpStatus.INTERNAL_SERVER_ERROR),
- TOPIC_ANALYSIS_ERROR(4018, HttpStatus.BAD_REQUEST);
+ TOPIC_ANALYSIS_ERROR(4018, HttpStatus.BAD_REQUEST),
+ FILE_UPLOAD_EXCEPTION(4019, HttpStatus.INTERNAL_SERVER_ERROR),
+ ;
static {
// codes uniqueness check
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/FileUploadException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/FileUploadException.java
new file mode 100644
index 0000000000..e5e410d64a
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/FileUploadException.java
@@ -0,0 +1,19 @@
+package com.provectus.kafka.ui.exception;
+
+import java.nio.file.Path;
+
+public class FileUploadException extends CustomBaseException {
+
+ public FileUploadException(String msg, Throwable cause) {
+ super(msg, cause);
+ }
+
+ public FileUploadException(Path path, Throwable cause) {
+ super("Error uploading file %s".formatted(path), cause);
+ }
+
+ @Override
+ public ErrorCode getErrorCode() {
+ return ErrorCode.FILE_UPLOAD_EXCEPTION;
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ValidationException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ValidationException.java
index 7b964fbca5..01eac145ff 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ValidationException.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ValidationException.java
@@ -6,6 +6,10 @@ public class ValidationException extends CustomBaseException {
super(message);
}
+ public ValidationException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
@Override
public ErrorCode getErrorCode() {
return ErrorCode.VALIDATION_FAIL;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java
index 0d33b5ca56..d989ce93ba 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java
@@ -6,12 +6,12 @@ import com.provectus.kafka.ui.model.BrokerDTO;
import com.provectus.kafka.ui.model.BrokerDiskUsageDTO;
import com.provectus.kafka.ui.model.BrokerMetricsDTO;
import com.provectus.kafka.ui.model.ClusterDTO;
+import com.provectus.kafka.ui.model.ClusterFeature;
import com.provectus.kafka.ui.model.ClusterMetricsDTO;
import com.provectus.kafka.ui.model.ClusterStatsDTO;
import com.provectus.kafka.ui.model.ConfigSourceDTO;
import com.provectus.kafka.ui.model.ConfigSynonymDTO;
import com.provectus.kafka.ui.model.ConnectDTO;
-import com.provectus.kafka.ui.model.Feature;
import com.provectus.kafka.ui.model.InternalBroker;
import com.provectus.kafka.ui.model.InternalBrokerConfig;
import com.provectus.kafka.ui.model.InternalBrokerDiskUsage;
@@ -95,7 +95,7 @@ public interface ClusterMapper {
ConnectDTO toKafkaConnect(ClustersProperties.ConnectCluster connect);
- List toFeaturesEnum(List features);
+ List toFeaturesEnum(List features);
default List map(Map map) {
return map.values().stream().map(this::toPartition).collect(Collectors.toList());
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Feature.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/ClusterFeature.java
similarity index 78%
rename from kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Feature.java
rename to kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/ClusterFeature.java
index ff0e2fca4b..9731492f00 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Feature.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/ClusterFeature.java
@@ -1,6 +1,6 @@
package com.provectus.kafka.ui.model;
-public enum Feature {
+public enum ClusterFeature {
KAFKA_CONNECT,
KSQL_DB,
SCHEMA_REGISTRY,
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterState.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterState.java
index 5f6d06ced5..28e9a7413a 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterState.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterState.java
@@ -23,7 +23,7 @@ public class InternalClusterState {
private Integer underReplicatedPartitionCount;
private List diskUsage;
private String version;
- private List features;
+ private List features;
private BigDecimal bytesInPerSec;
private BigDecimal bytesOutPerSec;
private Boolean readOnly;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/JmxConnectionInfo.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/JmxConnectionInfo.java
deleted file mode 100644
index de80b25be3..0000000000
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/JmxConnectionInfo.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.provectus.kafka.ui.model;
-
-import lombok.Builder;
-import lombok.Data;
-import lombok.EqualsAndHashCode;
-import lombok.RequiredArgsConstructor;
-
-@Data
-@RequiredArgsConstructor
-@Builder
-@EqualsAndHashCode(onlyExplicitlyIncluded = true)
-public class JmxConnectionInfo {
-
- @EqualsAndHashCode.Include
- private final String url;
- private final boolean ssl;
- private final String username;
- private final String password;
-
- public JmxConnectionInfo(String url) {
- this.url = url;
- this.ssl = false;
- this.username = null;
- this.password = null;
- }
-}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsConfig.java
index 2554008080..d355144343 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/MetricsConfig.java
@@ -17,4 +17,6 @@ public class MetricsConfig {
private final boolean ssl;
private final String username;
private final String password;
+ private final String keystoreLocation;
+ private final String keystorePassword;
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Statistics.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Statistics.java
index cb74c5d5ab..e70547f143 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Statistics.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/Statistics.java
@@ -15,7 +15,7 @@ public class Statistics {
ServerStatusDTO status;
Throwable lastKafkaException;
String version;
- List features;
+ List features;
ReactiveAdminClient.ClusterDescription clusterDescription;
Metrics metrics;
InternalLogDirStats logDirInfo;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/AccessContext.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/AccessContext.java
index abe18fb966..0c2587d681 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/AccessContext.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/AccessContext.java
@@ -1,5 +1,6 @@
package com.provectus.kafka.ui.model.rbac;
+import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
@@ -15,6 +16,8 @@ import org.springframework.util.Assert;
@Value
public class AccessContext {
+ Collection applicationConfigActions;
+
String cluster;
Collection clusterConfigActions;
@@ -39,6 +42,7 @@ public class AccessContext {
}
public static final class AccessContextBuilder {
+ private Collection applicationConfigActions = Collections.emptySet();
private String cluster;
private Collection clusterConfigActions = Collections.emptySet();
private String topic;
@@ -55,6 +59,12 @@ public class AccessContext {
private AccessContextBuilder() {
}
+ public AccessContextBuilder applicationConfigActions(ApplicationConfigAction... actions) {
+ Assert.isTrue(actions.length > 0, "actions not present");
+ this.applicationConfigActions = List.of(actions);
+ return this;
+ }
+
public AccessContextBuilder cluster(String cluster) {
this.cluster = cluster;
return this;
@@ -122,7 +132,9 @@ public class AccessContext {
}
public AccessContext build() {
- return new AccessContext(cluster, clusterConfigActions,
+ return new AccessContext(
+ applicationConfigActions,
+ cluster, clusterConfigActions,
topic, topicActions,
consumerGroup, consumerGroupActions,
connect, connectActions,
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java
index 9aa23b46c7..837f9008f3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java
@@ -3,6 +3,7 @@ package com.provectus.kafka.ui.model.rbac;
import static com.provectus.kafka.ui.model.rbac.Resource.CLUSTERCONFIG;
import static com.provectus.kafka.ui.model.rbac.Resource.KSQL;
+import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
@@ -12,11 +13,11 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
+import javax.annotation.Nullable;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import org.apache.commons.collections.CollectionUtils;
-import org.jetbrains.annotations.Nullable;
import org.springframework.util.Assert;
@Getter
@@ -25,18 +26,21 @@ import org.springframework.util.Assert;
public class Permission {
Resource resource;
+ List actions;
@Nullable
- Pattern value;
- List actions;
+ String value;
+ @Nullable
+ transient Pattern compiledValuePattern;
@SuppressWarnings("unused")
public void setResource(String resource) {
this.resource = Resource.fromString(resource.toUpperCase());
}
- public void setValue(String value) {
- this.value = Pattern.compile(value);
+ @SuppressWarnings("unused")
+ public void setValue(@Nullable String value) {
+ this.value = value;
}
@SuppressWarnings("unused")
@@ -52,14 +56,17 @@ public class Permission {
}
public void transform() {
- if (CollectionUtils.isEmpty(actions) || this.actions.stream().noneMatch("ALL"::equalsIgnoreCase)) {
- return;
+ if (value != null) {
+ this.compiledValuePattern = Pattern.compile(value);
+ }
+ if (CollectionUtils.isNotEmpty(actions) && actions.stream().anyMatch("ALL"::equalsIgnoreCase)) {
+ this.actions = getAllActionValues();
}
- this.actions = getActionValues();
}
- private List getActionValues() {
+ private List getAllActionValues() {
return switch (this.resource) {
+ case APPLICATIONCONFIG -> Arrays.stream(ApplicationConfigAction.values()).map(Enum::toString).toList();
case CLUSTERCONFIG -> Arrays.stream(ClusterConfigAction.values()).map(Enum::toString).toList();
case TOPIC -> Arrays.stream(TopicAction.values()).map(Enum::toString).toList();
case CONSUMER -> Arrays.stream(ConsumerGroupAction.values()).map(Enum::toString).toList();
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Resource.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Resource.java
index 3dafd7e6b2..4b2c66361f 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Resource.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Resource.java
@@ -5,6 +5,7 @@ import org.jetbrains.annotations.Nullable;
public enum Resource {
+ APPLICATIONCONFIG,
CLUSTERCONFIG,
TOPIC,
CONSUMER,
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ApplicationConfigAction.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ApplicationConfigAction.java
new file mode 100644
index 0000000000..d30ff50d70
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ApplicationConfigAction.java
@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.model.rbac.permission;
+
+import org.apache.commons.lang3.EnumUtils;
+import org.jetbrains.annotations.Nullable;
+
+public enum ApplicationConfigAction implements PermissibleAction {
+
+ VIEW,
+ EDIT
+
+ ;
+
+ @Nullable
+ public static ApplicationConfigAction fromString(String name) {
+ return EnumUtils.getEnum(ApplicationConfigAction.class, name);
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
index 2e057874e7..40ea320b2e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
@@ -89,21 +89,23 @@ public class SerdesInitializer {
Map registeredSerdes = new LinkedHashMap<>();
// initializing serdes from config
- for (int i = 0; i < clusterProperties.getSerde().size(); i++) {
- SerdeConfig serdeConfig = clusterProperties.getSerde().get(i);
- if (Strings.isNullOrEmpty(serdeConfig.getName())) {
- throw new ValidationException("'name' property not set for serde: " + serdeConfig);
+ if (clusterProperties.getSerde() != null) {
+ for (int i = 0; i < clusterProperties.getSerde().size(); i++) {
+ SerdeConfig serdeConfig = clusterProperties.getSerde().get(i);
+ if (Strings.isNullOrEmpty(serdeConfig.getName())) {
+ throw new ValidationException("'name' property not set for serde: " + serdeConfig);
+ }
+ if (registeredSerdes.containsKey(serdeConfig.getName())) {
+ throw new ValidationException("Multiple serdes with same name: " + serdeConfig.getName());
+ }
+ var instance = createSerdeFromConfig(
+ serdeConfig,
+ new PropertyResolverImpl(env, "kafka.clusters." + clusterIndex + ".serde." + i + ".properties"),
+ clusterPropertiesResolver,
+ globalPropertiesResolver
+ );
+ registeredSerdes.put(serdeConfig.getName(), instance);
}
- if (registeredSerdes.containsKey(serdeConfig.getName())) {
- throw new ValidationException("Multiple serdes with same name: " + serdeConfig.getName());
- }
- var instance = createSerdeFromConfig(
- serdeConfig,
- new PropertyResolverImpl(env, "kafka.clusters." + clusterIndex + ".serde." + i + ".properties"),
- clusterPropertiesResolver,
- globalPropertiesResolver
- );
- registeredSerdes.put(serdeConfig.getName(), instance);
}
// initializing remaining built-in serdes with empty selection patters
@@ -172,7 +174,7 @@ public class SerdesInitializer {
}
var clazz = builtInSerdeClasses.get(name);
BuiltInSerde serde = createSerdeInstance(clazz);
- if (serdeConfig.getProperties().isEmpty()) {
+ if (serdeConfig.getProperties() == null || serdeConfig.getProperties().isEmpty()) {
if (!autoConfigureSerde(serde, clusterProps, globalProps)) {
// no properties provided and serde does not support auto-configuration
throw new ValidationException(name + " serde is not configured");
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
index a4d7ee8891..fd5985e20c 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
@@ -70,10 +70,10 @@ public class SchemaRegistrySerde implements BuiltInSerde {
urls,
kafkaClusterProperties.getProperty("schemaRegistryAuth.username", String.class).orElse(null),
kafkaClusterProperties.getProperty("schemaRegistryAuth.password", String.class).orElse(null),
- kafkaClusterProperties.getProperty("schemaRegistrySSL.keystoreLocation", String.class).orElse(null),
- kafkaClusterProperties.getProperty("schemaRegistrySSL.keystorePassword", String.class).orElse(null),
- kafkaClusterProperties.getProperty("schemaRegistrySSL.truststoreLocation", String.class).orElse(null),
- kafkaClusterProperties.getProperty("schemaRegistrySSL.truststorePassword", String.class).orElse(null)
+ kafkaClusterProperties.getProperty("schemaRegistrySsl.keystoreLocation", String.class).orElse(null),
+ kafkaClusterProperties.getProperty("schemaRegistrySsl.keystorePassword", String.class).orElse(null),
+ kafkaClusterProperties.getProperty("ssl.truststoreLocation", String.class).orElse(null),
+ kafkaClusterProperties.getProperty("ssl.truststorePassword", String.class).orElse(null)
),
kafkaClusterProperties.getProperty("schemaRegistryKeySchemaNameTemplate", String.class).orElse("%s-key"),
kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value"),
@@ -98,12 +98,12 @@ public class SchemaRegistrySerde implements BuiltInSerde {
serdeProperties.getProperty("password", String.class).orElse(null),
serdeProperties.getProperty("keystoreLocation", String.class).orElse(null),
serdeProperties.getProperty("keystorePassword", String.class).orElse(null),
- serdeProperties.getProperty("truststoreLocation", String.class).orElse(null),
- serdeProperties.getProperty("truststorePassword", String.class).orElse(null)
+ kafkaClusterProperties.getProperty("ssl.truststoreLocation", String.class).orElse(null),
+ kafkaClusterProperties.getProperty("ssl.truststorePassword", String.class).orElse(null)
),
serdeProperties.getProperty("keySchemaNameTemplate", String.class).orElse("%s-key"),
serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value"),
- kafkaClusterProperties.getProperty("checkSchemaExistenceForDeserialize", Boolean.class)
+ serdeProperties.getProperty("checkSchemaExistenceForDeserialize", Boolean.class)
.orElse(false)
);
}
@@ -148,15 +148,15 @@ public class SchemaRegistrySerde implements BuiltInSerde {
trustStoreLocation);
configs.put(SchemaRegistryClientConfig.CLIENT_NAMESPACE + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG,
trustStorePassword);
+ }
- if (keyStoreLocation != null) {
- configs.put(SchemaRegistryClientConfig.CLIENT_NAMESPACE + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG,
- keyStoreLocation);
- configs.put(SchemaRegistryClientConfig.CLIENT_NAMESPACE + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG,
- keyStorePassword);
- configs.put(SchemaRegistryClientConfig.CLIENT_NAMESPACE + SslConfigs.SSL_KEY_PASSWORD_CONFIG,
- keyStorePassword);
- }
+ if (keyStoreLocation != null && keyStorePassword != null) {
+ configs.put(SchemaRegistryClientConfig.CLIENT_NAMESPACE + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG,
+ keyStoreLocation);
+ configs.put(SchemaRegistryClientConfig.CLIENT_NAMESPACE + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG,
+ keyStorePassword);
+ configs.put(SchemaRegistryClientConfig.CLIENT_NAMESPACE + SslConfigs.SSL_KEY_PASSWORD_CONFIG,
+ keyStorePassword);
}
return new CachedSchemaRegistryClient(
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java
index 3589a07a47..886b67b928 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java
@@ -1,10 +1,13 @@
package com.provectus.kafka.ui.service;
import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.io.Closeable;
+import java.time.Instant;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicLong;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
@@ -18,6 +21,9 @@ import reactor.core.publisher.Mono;
@RequiredArgsConstructor
@Slf4j
public class AdminClientServiceImpl implements AdminClientService, Closeable {
+
+ private static final AtomicLong CLIENT_ID_SEQ = new AtomicLong();
+
private final Map adminClientCache = new ConcurrentHashMap<>();
@Setter // used in tests
@Value("${kafka.admin-client-timeout:30000}")
@@ -33,14 +39,16 @@ public class AdminClientServiceImpl implements AdminClientService, Closeable {
private Mono createAdminClient(KafkaCluster cluster) {
return Mono.fromSupplier(() -> {
Properties properties = new Properties();
+ SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties);
properties.putAll(cluster.getProperties());
- properties
- .put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
+ properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout);
- properties.putIfAbsent(AdminClientConfig.CLIENT_ID_CONFIG, "kafka-ui-admin-client-" + System.currentTimeMillis());
+ properties.putIfAbsent(
+ AdminClientConfig.CLIENT_ID_CONFIG,
+ "kafka-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet()
+ );
return AdminClient.create(properties);
- })
- .flatMap(ReactiveAdminClient::create)
+ }).flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close()))
.onErrorMap(th -> new IllegalStateException(
"Error while creating AdminClient for Cluster " + cluster.getName(), th));
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
index 1a74914ff4..024eb3df51 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
@@ -7,6 +7,7 @@ import com.provectus.kafka.ui.model.InternalTopicConsumerGroup;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.SortOrderDTO;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
@@ -214,6 +215,7 @@ public class ConsumerGroupService {
public KafkaConsumer createConsumer(KafkaCluster cluster,
Map properties) {
Properties props = new Properties();
+ SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), props);
props.putAll(cluster.getProperties());
props.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafka-ui-consumer-" + System.currentTimeMillis());
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
index b2694d9f7d..f5fbf26264 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
@@ -1,6 +1,6 @@
package com.provectus.kafka.ui.service;
-import com.provectus.kafka.ui.model.Feature;
+import com.provectus.kafka.ui.model.ClusterFeature;
import com.provectus.kafka.ui.model.KafkaCluster;
import java.util.ArrayList;
import java.util.Collection;
@@ -25,27 +25,27 @@ public class FeatureService {
private final AdminClientService adminClientService;
- public Mono> getAvailableFeatures(KafkaCluster cluster, @Nullable Node controller) {
- List> features = new ArrayList<>();
+ public Mono> getAvailableFeatures(KafkaCluster cluster, @Nullable Node controller) {
+ List> features = new ArrayList<>();
if (Optional.ofNullable(cluster.getConnectsClients())
.filter(Predicate.not(Map::isEmpty))
.isPresent()) {
- features.add(Mono.just(Feature.KAFKA_CONNECT));
+ features.add(Mono.just(ClusterFeature.KAFKA_CONNECT));
}
if (cluster.getKsqlClient() != null) {
- features.add(Mono.just(Feature.KSQL_DB));
+ features.add(Mono.just(ClusterFeature.KSQL_DB));
}
if (cluster.getSchemaRegistryClient() != null) {
- features.add(Mono.just(Feature.SCHEMA_REGISTRY));
+ features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
}
if (controller != null) {
features.add(
isTopicDeletionEnabled(cluster, controller)
- .flatMap(r -> Boolean.TRUE.equals(r) ? Mono.just(Feature.TOPIC_DELETION) : Mono.empty())
+ .flatMap(r -> Boolean.TRUE.equals(r) ? Mono.just(ClusterFeature.TOPIC_DELETION) : Mono.empty())
);
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
index aea92ab4c4..26a9d40647 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
@@ -3,12 +3,15 @@ package com.provectus.kafka.ui.service;
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
+import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
+import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.MetricsConfig;
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
import com.provectus.kafka.ui.service.masking.DataMasking;
import com.provectus.kafka.ui.sr.ApiClient;
import com.provectus.kafka.ui.sr.api.KafkaSrClientApi;
+import com.provectus.kafka.ui.util.KafkaServicesValidation;
import com.provectus.kafka.ui.util.PollingThrottler;
import com.provectus.kafka.ui.util.ReactiveFailover;
import com.provectus.kafka.ui.util.WebClientConfigurator;
@@ -20,13 +23,19 @@ import java.util.Properties;
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.unit.DataSize;
import org.springframework.web.reactive.function.client.WebClient;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
@Service
@RequiredArgsConstructor
+@Slf4j
public class KafkaClusterFactory {
@Value("${webclient.max-in-memory-buffer-size:20MB}")
@@ -37,50 +46,116 @@ public class KafkaClusterFactory {
builder.name(clusterProperties.getName());
builder.bootstrapServers(clusterProperties.getBootstrapServers());
- builder.properties(Optional.ofNullable(clusterProperties.getProperties()).orElse(new Properties()));
+ builder.properties(convertProperties(clusterProperties.getProperties()));
builder.readOnly(clusterProperties.isReadOnly());
builder.masking(DataMasking.create(clusterProperties.getMasking()));
- builder.metricsConfig(metricsConfigDataToMetricsConfig(clusterProperties.getMetrics()));
builder.throttler(PollingThrottler.throttlerSupplier(clusterProperties));
- builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));
- builder.connectsClients(connectClients(clusterProperties));
- builder.ksqlClient(ksqlClient(clusterProperties));
-
+ if (schemaRegistryConfigured(clusterProperties)) {
+ builder.schemaRegistryClient(schemaRegistryClient(clusterProperties));
+ }
+ if (connectClientsConfigured(clusterProperties)) {
+ builder.connectsClients(connectClients(clusterProperties));
+ }
+ if (ksqlConfigured(clusterProperties)) {
+ builder.ksqlClient(ksqlClient(clusterProperties));
+ }
+ if (metricsConfigured(clusterProperties)) {
+ builder.metricsConfig(metricsConfigDataToMetricsConfig(clusterProperties.getMetrics()));
+ }
builder.originalProperties(clusterProperties);
-
return builder.build();
}
- @Nullable
+ public Mono validate(ClustersProperties.Cluster clusterProperties) {
+ if (clusterProperties.getSsl() != null) {
+ Optional errMsg = KafkaServicesValidation.validateTruststore(clusterProperties.getSsl());
+ if (errMsg.isPresent()) {
+ return Mono.just(new ClusterConfigValidationDTO()
+ .kafka(new ApplicationPropertyValidationDTO()
+ .error(true)
+ .errorMessage("Truststore not valid: " + errMsg.get())));
+ }
+ }
+
+ return Mono.zip(
+ KafkaServicesValidation.validateClusterConnection(
+ clusterProperties.getBootstrapServers(),
+ convertProperties(clusterProperties.getProperties()),
+ clusterProperties.getSsl()
+ ),
+ schemaRegistryConfigured(clusterProperties)
+ ? KafkaServicesValidation.validateSchemaRegistry(
+ () -> schemaRegistryClient(clusterProperties)).map(Optional::of)
+ : Mono.>just(Optional.empty()),
+
+ ksqlConfigured(clusterProperties)
+ ? KafkaServicesValidation.validateKsql(() -> ksqlClient(clusterProperties)).map(Optional::of)
+ : Mono.>just(Optional.empty()),
+
+ connectClientsConfigured(clusterProperties)
+ ?
+ Flux.fromIterable(clusterProperties.getKafkaConnect())
+ .flatMap(c ->
+ KafkaServicesValidation.validateConnect(() -> connectClient(clusterProperties, c))
+ .map(r -> Tuples.of(c.getName(), r)))
+ .collectMap(Tuple2::getT1, Tuple2::getT2)
+ .map(Optional::of)
+ :
+ Mono.>>just(Optional.empty())
+ ).map(tuple -> {
+ var validation = new ClusterConfigValidationDTO();
+ validation.kafka(tuple.getT1());
+ tuple.getT2().ifPresent(validation::schemaRegistry);
+ tuple.getT3().ifPresent(validation::ksqldb);
+ tuple.getT4().ifPresent(validation::kafkaConnects);
+ return validation;
+ });
+ }
+
+ private Properties convertProperties(Map propertiesMap) {
+ Properties properties = new Properties();
+ if (propertiesMap != null) {
+ properties.putAll(propertiesMap);
+ }
+ return properties;
+ }
+
+ private boolean connectClientsConfigured(ClustersProperties.Cluster clusterProperties) {
+ return clusterProperties.getKafkaConnect() != null;
+ }
+
private Map> connectClients(
ClustersProperties.Cluster clusterProperties) {
- if (clusterProperties.getKafkaConnect() == null) {
- return null;
- }
Map> connects = new HashMap<>();
- clusterProperties.getKafkaConnect().forEach(c -> {
- ReactiveFailover failover = ReactiveFailover.create(
- parseUrlList(c.getAddress()),
- url -> new RetryingKafkaConnectClient(c.toBuilder().address(url).build(), maxBuffSize),
- ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
- "No alive connect instances available",
- ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS
- );
- connects.put(c.getName(), failover);
- });
+ clusterProperties.getKafkaConnect().forEach(c -> connects.put(c.getName(), connectClient(clusterProperties, c)));
return connects;
}
- @Nullable
+ private ReactiveFailover connectClient(ClustersProperties.Cluster cluster,
+ ClustersProperties.ConnectCluster connectCluster) {
+ return ReactiveFailover.create(
+ parseUrlList(connectCluster.getAddress()),
+ url -> new RetryingKafkaConnectClient(
+ connectCluster.toBuilder().address(url).build(),
+ cluster.getSsl(),
+ maxBuffSize
+ ),
+ ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
+ "No alive connect instances available",
+ ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS
+ );
+ }
+
+ private boolean schemaRegistryConfigured(ClustersProperties.Cluster clusterProperties) {
+ return clusterProperties.getSchemaRegistry() != null;
+ }
+
private ReactiveFailover schemaRegistryClient(ClustersProperties.Cluster clusterProperties) {
- if (clusterProperties.getSchemaRegistry() == null) {
- return null;
- }
var auth = Optional.ofNullable(clusterProperties.getSchemaRegistryAuth())
.orElse(new ClustersProperties.SchemaRegistryAuth());
WebClient webClient = new WebClientConfigurator()
- .configureSsl(clusterProperties.getSchemaRegistrySsl())
+ .configureSsl(clusterProperties.getSsl(), clusterProperties.getSchemaRegistrySsl())
.configureBasicAuth(auth.getUsername(), auth.getPassword())
.configureBufferSize(maxBuffSize)
.build();
@@ -93,16 +168,17 @@ public class KafkaClusterFactory {
);
}
- @Nullable
+ private boolean ksqlConfigured(ClustersProperties.Cluster clusterProperties) {
+ return clusterProperties.getKsqldbServer() != null;
+ }
+
private ReactiveFailover ksqlClient(ClustersProperties.Cluster clusterProperties) {
- if (clusterProperties.getKsqldbServer() == null) {
- return null;
- }
return ReactiveFailover.create(
parseUrlList(clusterProperties.getKsqldbServer()),
url -> new KsqlApiClient(
url,
clusterProperties.getKsqldbServerAuth(),
+ clusterProperties.getSsl(),
clusterProperties.getKsqldbServerSsl(),
maxBuffSize
),
@@ -116,6 +192,10 @@ public class KafkaClusterFactory {
return Stream.of(url.split(",")).map(String::trim).filter(s -> !s.isBlank()).toList();
}
+ private boolean metricsConfigured(ClustersProperties.Cluster clusterProperties) {
+ return clusterProperties.getMetrics() != null;
+ }
+
@Nullable
private MetricsConfig metricsConfigDataToMetricsConfig(ClustersProperties.MetricsConfigData metricsConfigData) {
if (metricsConfigData == null) {
@@ -124,9 +204,11 @@ public class KafkaClusterFactory {
MetricsConfig.MetricsConfigBuilder builder = MetricsConfig.builder();
builder.type(metricsConfigData.getType());
builder.port(metricsConfigData.getPort());
- builder.ssl(metricsConfigData.isSsl());
+ builder.ssl(Optional.ofNullable(metricsConfigData.getSsl()).orElse(false));
builder.username(metricsConfigData.getUsername());
builder.password(metricsConfigData.getPassword());
+ builder.keystoreLocation(metricsConfigData.getKeystoreLocation());
+ builder.keystorePassword(metricsConfigData.getKeystorePassword());
return builder.build();
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
index eaf6fbb889..d1f0e261a8 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
@@ -18,6 +18,7 @@ import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer;
import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
import com.provectus.kafka.ui.util.ResultSizeLimiter;
+import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@@ -108,6 +109,7 @@ public class MessagesService {
);
Properties properties = new Properties();
+ SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties);
properties.putAll(cluster.getProperties());
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index 7cdf2ef16d..19d06a0c48 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -10,7 +10,7 @@ import com.google.common.collect.Table;
import com.provectus.kafka.ui.exception.IllegalEntityStateException;
import com.provectus.kafka.ui.exception.NotFoundException;
import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.util.NumberUtil;
+import com.provectus.kafka.ui.util.KafkaVersion;
import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
import java.io.Closeable;
import java.util.ArrayList;
@@ -123,7 +123,7 @@ public class ReactiveAdminClient implements Closeable {
private static Set getSupportedUpdateFeaturesForVersion(String versionStr) {
try {
- float version = NumberUtil.parserClusterVersion(versionStr);
+ float version = KafkaVersion.parse(versionStr);
return SupportedFeature.forVersion(version);
} catch (NumberFormatException e) {
return SupportedFeature.defaultFeatures();
@@ -132,7 +132,7 @@ public class ReactiveAdminClient implements Closeable {
// NOTE: if KafkaFuture returns null, that Mono will be empty(!), since Reactor does not support nullable results
// (see MonoSink.success(..) javadoc for details)
- private static Mono toMono(KafkaFuture future) {
+ public static Mono toMono(KafkaFuture future) {
return Mono.create(sink -> future.whenComplete((res, ex) -> {
if (ex != null) {
// KafkaFuture doc is unclear about what exception wrapper will be used
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
index e6dbb27ee9..a36a64ff6d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
@@ -2,7 +2,7 @@ package com.provectus.kafka.ui.service;
import static com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
-import com.provectus.kafka.ui.model.Feature;
+import com.provectus.kafka.ui.model.ClusterFeature;
import com.provectus.kafka.ui.model.InternalLogDirStats;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.Metrics;
@@ -51,7 +51,7 @@ public class StatisticsService {
.version(ac.getVersion())
.metrics((Metrics) results[0])
.logDirInfo((InternalLogDirStats) results[1])
- .features((List) results[2])
+ .features((List) results[2])
.topicConfigs((Map>) results[3])
.topicDescriptions((Map) results[4])
.build()
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
index b903867726..b172e2b4c6 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
@@ -7,7 +7,7 @@ import com.provectus.kafka.ui.exception.TopicMetadataException;
import com.provectus.kafka.ui.exception.TopicNotFoundException;
import com.provectus.kafka.ui.exception.TopicRecreationException;
import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.model.Feature;
+import com.provectus.kafka.ui.model.ClusterFeature;
import com.provectus.kafka.ui.model.InternalLogDirStats;
import com.provectus.kafka.ui.model.InternalPartition;
import com.provectus.kafka.ui.model.InternalPartitionsOffsets;
@@ -422,7 +422,7 @@ public class TopicsService {
}
public Mono deleteTopic(KafkaCluster cluster, String topicName) {
- if (statisticsCache.get(cluster).getFeatures().contains(Feature.TOPIC_DELETION)) {
+ if (statisticsCache.get(cluster).getFeatures().contains(ClusterFeature.TOPIC_DELETION)) {
return adminClientService.get(cluster).flatMap(c -> c.deleteTopic(topicName))
.doOnSuccess(t -> statisticsCache.onTopicDelete(cluster, topicName));
} else {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
index e0b95c522a..fd68add726 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
@@ -43,12 +43,13 @@ public class KsqlApiClient {
UndefineVariableContext.class
);
- @Builder
+ @Builder(toBuilder = true)
@Value
public static class KsqlResponseTable {
String header;
List columnNames;
List> values;
+ boolean error;
public Optional getColumnValue(List row, String column) {
return Optional.ofNullable(row.get(columnNames.indexOf(column)));
@@ -68,26 +69,22 @@ public class KsqlApiClient {
public KsqlApiClient(String baseUrl,
@Nullable ClustersProperties.KsqldbServerAuth ksqldbServerAuth,
- @Nullable ClustersProperties.WebClientSsl ksqldbServerSsl,
+ @Nullable ClustersProperties.TruststoreConfig ksqldbServerSsl,
+ @Nullable ClustersProperties.KeystoreConfig keystoreConfig,
@Nullable DataSize maxBuffSize) {
this.baseUrl = baseUrl;
- this.webClient = webClient(ksqldbServerAuth, ksqldbServerSsl, maxBuffSize);
+ this.webClient = webClient(ksqldbServerAuth, ksqldbServerSsl, keystoreConfig, maxBuffSize);
}
private static WebClient webClient(@Nullable ClustersProperties.KsqldbServerAuth ksqldbServerAuth,
- @Nullable ClustersProperties.WebClientSsl ksqldbServerSsl,
+ @Nullable ClustersProperties.TruststoreConfig truststoreConfig,
+ @Nullable ClustersProperties.KeystoreConfig keystoreConfig,
@Nullable DataSize maxBuffSize) {
ksqldbServerAuth = Optional.ofNullable(ksqldbServerAuth).orElse(new ClustersProperties.KsqldbServerAuth());
- ksqldbServerSsl = Optional.ofNullable(ksqldbServerSsl).orElse(new ClustersProperties.WebClientSsl());
maxBuffSize = Optional.ofNullable(maxBuffSize).orElse(DataSize.ofMegabytes(20));
return new WebClientConfigurator()
- .configureSsl(
- ksqldbServerSsl.getKeystoreLocation(),
- ksqldbServerSsl.getKeystorePassword(),
- ksqldbServerSsl.getTruststoreLocation(),
- ksqldbServerSsl.getTruststorePassword()
- )
+ .configureSsl(truststoreConfig, keystoreConfig)
.configureBasicAuth(
ksqldbServerAuth.getUsername(),
ksqldbServerAuth.getPassword()
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java
index 647e23a78e..cd91fa57dc 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java
@@ -74,13 +74,17 @@ public class ResponseParser {
.header("Execution error")
.columnNames(List.of("message"))
.values(List.of(List.of(new TextNode(errorText))))
+ .error(true)
.build();
}
public static KsqlApiClient.KsqlResponseTable parseErrorResponse(WebClientResponseException e) {
try {
var errBody = new JsonMapper().readTree(e.getResponseBodyAsString());
- return DynamicParser.parseObject("Execution error", errBody);
+ return DynamicParser.parseObject("Execution error", errBody)
+ .toBuilder()
+ .error(true)
+ .build();
} catch (Exception ex) {
return errorTableWithTextMsg(
String.format(
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java
index a8a69ad752..78e74f3332 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java
@@ -41,9 +41,9 @@ public class DataMasking {
private final List masks;
- public static DataMasking create(List config) {
+ public static DataMasking create(@Nullable List config) {
return new DataMasking(
- config.stream().map(property -> {
+ Optional.ofNullable(config).orElse(List.of()).stream().map(property -> {
Preconditions.checkNotNull(property.getType(), "masking type not specifed");
Preconditions.checkArgument(
StringUtils.isNotEmpty(property.getTopicKeysPattern())
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java
index 1459e95f00..dbbc5d131a 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java
@@ -11,6 +11,8 @@ import java.util.function.UnaryOperator;
class Mask extends MaskingPolicy {
+ static final List DEFAULT_PATTERN = List.of("X", "x", "n", "-");
+
private final UnaryOperator masker;
Mask(List fieldNames, List maskingChars) {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java
index 25a463a9da..7a75338210 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java
@@ -1,7 +1,6 @@
package com.provectus.kafka.ui.service.masking.policies;
import com.fasterxml.jackson.databind.node.ContainerNode;
-import com.google.common.base.Preconditions;
import com.provectus.kafka.ui.config.ClustersProperties;
import java.util.List;
import lombok.RequiredArgsConstructor;
@@ -9,15 +8,28 @@ import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public abstract class MaskingPolicy {
+
public static MaskingPolicy create(ClustersProperties.Masking property) {
- Preconditions.checkNotNull(property.getFields());
+ List fields = property.getFields() == null
+ ? List.of() // empty list means that policy will be applied to all fields
+ : property.getFields();
switch (property.getType()) {
case REMOVE:
- return new Remove(property.getFields());
+ return new Remove(fields);
case REPLACE:
- return new Replace(property.getFields(), property.getReplacement());
+ return new Replace(
+ fields,
+ property.getReplacement() == null
+ ? Replace.DEFAULT_REPLACEMENT
+ : property.getReplacement()
+ );
case MASK:
- return new Mask(property.getFields(), property.getPattern());
+ return new Mask(
+ fields,
+ property.getPattern() == null
+ ? Mask.DEFAULT_PATTERN
+ : property.getPattern()
+ );
default:
throw new IllegalStateException("Unknown policy type: " + property.getType());
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java
index a335730258..3af645cb11 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java
@@ -10,6 +10,8 @@ import java.util.List;
class Replace extends MaskingPolicy {
+ static final String DEFAULT_REPLACEMENT = "***DATA_MASKED***";
+
private final String replacement;
Replace(List fieldNames, String replacementString) {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java
index 8a6e10656d..78f5bdeced 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java
@@ -1,21 +1,22 @@
package com.provectus.kafka.ui.service.metrics;
-import com.provectus.kafka.ui.model.JmxConnectionInfo;
import com.provectus.kafka.ui.model.KafkaCluster;
-import com.provectus.kafka.ui.util.JmxPoolFactory;
+import java.io.Closeable;
import java.util.ArrayList;
-import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import java.util.function.Consumer;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
+import javax.management.remote.JMXConnectorFactory;
+import javax.management.remote.JMXServiceURL;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.pool2.impl.GenericKeyedObjectPool;
-import org.apache.commons.pool2.impl.GenericKeyedObjectPoolConfig;
+import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.common.Node;
-import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -23,68 +24,102 @@ import reactor.core.scheduler.Schedulers;
@Service
-@Lazy
@Slf4j
-class JmxMetricsRetriever implements MetricsRetriever, AutoCloseable {
+class JmxMetricsRetriever implements MetricsRetriever, Closeable {
+
+ private static final boolean SSL_JMX_SUPPORTED;
+
+ static {
+ // see JmxSslSocketFactory doc for details
+ SSL_JMX_SUPPORTED = JmxSslSocketFactory.initialized();
+ }
private static final String JMX_URL = "service:jmx:rmi:///jndi/rmi://";
private static final String JMX_SERVICE_TYPE = "jmxrmi";
private static final String CANONICAL_NAME_PATTERN = "kafka.server*:*";
- private final GenericKeyedObjectPool pool;
-
- public JmxMetricsRetriever() {
- this.pool = new GenericKeyedObjectPool<>(new JmxPoolFactory());
- GenericKeyedObjectPoolConfig poolConfig = new GenericKeyedObjectPoolConfig<>();
- poolConfig.setMaxIdlePerKey(3);
- poolConfig.setMaxTotalPerKey(3);
- this.pool.setConfig(poolConfig);
+ @Override
+ public void close() {
+ JmxSslSocketFactory.clearFactoriesCache();
}
@Override
public Flux retrieve(KafkaCluster c, Node node) {
+ if (isSslJmxEndpoint(c) && !SSL_JMX_SUPPORTED) {
+ log.warn("Cluster {} has jmx ssl configured, but it is not supported", c.getName());
+ return Flux.empty();
+ }
return Mono.fromSupplier(() -> retrieveSync(c, node))
.subscribeOn(Schedulers.boundedElastic())
.flatMapMany(Flux::fromIterable);
}
+ private boolean isSslJmxEndpoint(KafkaCluster cluster) {
+ return cluster.getMetricsConfig().getKeystoreLocation() != null;
+ }
+
+ @SneakyThrows
private List retrieveSync(KafkaCluster c, Node node) {
String jmxUrl = JMX_URL + node.host() + ":" + c.getMetricsConfig().getPort() + "/" + JMX_SERVICE_TYPE;
log.debug("Collection JMX metrics for {}", jmxUrl);
- final var connectionInfo = JmxConnectionInfo.builder()
- .url(jmxUrl)
- .ssl(c.getMetricsConfig().isSsl())
- .username(c.getMetricsConfig().getUsername())
- .password(c.getMetricsConfig().getPassword())
- .build();
- JMXConnector srv;
- try {
- srv = pool.borrowObject(connectionInfo);
- } catch (Exception e) {
- log.error("Cannot get JMX connector for the pool due to: ", e);
- return Collections.emptyList();
- }
List result = new ArrayList<>();
- try {
- MBeanServerConnection msc = srv.getMBeanServerConnection();
- var jmxMetrics = msc.queryNames(new ObjectName(CANONICAL_NAME_PATTERN), null);
- for (ObjectName jmxMetric : jmxMetrics) {
- result.addAll(extractObjectMetrics(jmxMetric, msc));
- }
- pool.returnObject(connectionInfo, srv);
- } catch (Exception e) {
- log.error("Error getting jmx metrics from {}", jmxUrl, e);
- closeConnectionExceptionally(jmxUrl, srv);
- }
+ withJmxConnector(jmxUrl, c, jmxConnector -> getMetricsFromJmx(jmxConnector, result));
log.debug("{} metrics collected for {}", result.size(), jmxUrl);
return result;
}
- private void closeConnectionExceptionally(String url, JMXConnector srv) {
+ private void withJmxConnector(String jmxUrl,
+ KafkaCluster c,
+ Consumer consumer) {
+ var env = prepareJmxEnvAndSetThreadLocal(c);
try {
- pool.invalidateObject(new JmxConnectionInfo(url), srv);
+ JMXConnector connector = null;
+ try {
+ connector = JMXConnectorFactory.newJMXConnector(new JMXServiceURL(jmxUrl), env);
+ connector.connect(env);
+ } catch (Exception exception) {
+ log.error("Error connecting to {}", jmxUrl, exception);
+ return;
+ }
+ consumer.accept(connector);
+ connector.close();
} catch (Exception e) {
- log.error("Cannot invalidate object in pool, {}", url, e);
+ log.error("Error getting jmx metrics from {}", jmxUrl, e);
+ } finally {
+ JmxSslSocketFactory.clearThreadLocalContext();
+ }
+ }
+
+ private Map prepareJmxEnvAndSetThreadLocal(KafkaCluster cluster) {
+ var metricsConfig = cluster.getMetricsConfig();
+ Map env = new HashMap<>();
+ if (isSslJmxEndpoint(cluster)) {
+ var clusterSsl = cluster.getOriginalProperties().getSsl();
+ JmxSslSocketFactory.setSslContextThreadLocal(
+ clusterSsl != null ? clusterSsl.getTruststoreLocation() : null,
+ clusterSsl != null ? clusterSsl.getTruststorePassword() : null,
+ metricsConfig.getKeystoreLocation(),
+ metricsConfig.getKeystorePassword()
+ );
+ JmxSslSocketFactory.editJmxConnectorEnv(env);
+ }
+
+ if (StringUtils.isNotEmpty(metricsConfig.getUsername())
+ && StringUtils.isNotEmpty(metricsConfig.getPassword())) {
+ env.put(
+ JMXConnector.CREDENTIALS,
+ new String[] {metricsConfig.getUsername(), metricsConfig.getPassword()}
+ );
+ }
+ return env;
+ }
+
+ @SneakyThrows
+ private void getMetricsFromJmx(JMXConnector jmxConnector, List sink) {
+ MBeanServerConnection msc = jmxConnector.getMBeanServerConnection();
+ var jmxMetrics = msc.queryNames(new ObjectName(CANONICAL_NAME_PATTERN), null);
+ for (ObjectName jmxMetric : jmxMetrics) {
+ sink.addAll(extractObjectMetrics(jmxMetric, msc));
}
}
@@ -98,9 +133,5 @@ class JmxMetricsRetriever implements MetricsRetriever, AutoCloseable {
return JmxMetricsFormatter.constructMetricsList(objectName, attrNames, attrValues);
}
- @Override
- public void close() {
- this.pool.close();
- }
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxSslSocketFactory.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxSslSocketFactory.java
new file mode 100644
index 0000000000..06304365c7
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxSslSocketFactory.java
@@ -0,0 +1,218 @@
+package com.provectus.kafka.ui.service.metrics;
+
+import com.google.common.base.Preconditions;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.net.InetAddress;
+import java.net.Socket;
+import java.net.UnknownHostException;
+import java.security.KeyStore;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import javax.annotation.Nullable;
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.TrustManagerFactory;
+import javax.rmi.ssl.SslRMIClientSocketFactory;
+import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.util.ResourceUtils;
+
+/*
+ * Purpose of this class to provide an ability to connect to different JMX endpoints using different keystores.
+ *
+ * Usually, when you want to establish SSL JMX connection you set "com.sun.jndi.rmi.factory.socket" env
+ * property to SslRMIClientSocketFactory instance. SslRMIClientSocketFactory itself uses SSLSocketFactory.getDefault()
+ * as a socket factory implementation. Problem here is that when ones SslRMIClientSocketFactory instance is created,
+ * the same cached SSLSocketFactory instance will be used to establish connection with *all* JMX endpoints.
+ * Moreover, even if we submit custom SslRMIClientSocketFactory implementation which takes specific ssl context
+ * into account, SslRMIClientSocketFactory is
+ * internally created during RMI calls.
+ *
+ * So, the only way we found to deal with it is to change internal field ('defaultSocketFactory') of
+ * SslRMIClientSocketFactory to our custom impl, and left all internal RMI code work as is.
+ * Since RMI code is synchronous, we can pass parameters (which are truststore/keystore) to our custom factory
+ * that we want to use when creating ssl socket via ThreadLocal variables.
+ *
+ * NOTE 1: Theoretically we could avoid using reflection to set internal field set by
+ * setting "ssl.SocketFactory.provider" security property (see code in SSLSocketFactory.getDefault()),
+ * but that code uses systemClassloader which is not working right when we're creating executable spring boot jar
+ * (https://docs.spring.io/spring-boot/docs/current/reference/html/executable-jar.html#appendix.executable-jar.restrictions).
+ * We can use this if we swith to other jar-packing solutions in the future.
+ *
+ * NOTE 2: There are two paths from which socket factory is called - when jmx connection if established (we manage this
+ * by passing ThreadLocal vars) and from DGCClient in background thread - we deal with that we cache created factories
+ * for specific host+port.
+ *
+ */
+@Slf4j
+class JmxSslSocketFactory extends javax.net.ssl.SSLSocketFactory {
+
+ private static final boolean SSL_JMX_SUPPORTED;
+
+ static {
+ boolean sslJmxSupported = false;
+ try {
+ Field defaultSocketFactoryField = SslRMIClientSocketFactory.class.getDeclaredField("defaultSocketFactory");
+ defaultSocketFactoryField.setAccessible(true);
+ defaultSocketFactoryField.set(null, new JmxSslSocketFactory());
+ sslJmxSupported = true;
+ } catch (Exception e) {
+ log.error("----------------------------------");
+ log.error("SSL can't be enabled for JMX retrieval. "
+ + "Make sure your java app run with '--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED' arg.", e);
+ log.error("----------------------------------");
+ }
+ SSL_JMX_SUPPORTED = sslJmxSupported;
+ }
+
+ public static boolean initialized() {
+ return SSL_JMX_SUPPORTED;
+ }
+
+ private static final ThreadLocal SSL_CONTEXT_THREAD_LOCAL = new ThreadLocal<>();
+
+ private static final Map CACHED_FACTORIES = new ConcurrentHashMap<>();
+
+ private record HostAndPort(String host, int port) {
+ }
+
+ private record Ssl(@Nullable String truststoreLocation,
+ @Nullable String truststorePassword,
+ @Nullable String keystoreLocation,
+ @Nullable String keystorePassword) {
+ }
+
+ public static void setSslContextThreadLocal(@Nullable String truststoreLocation,
+ @Nullable String truststorePassword,
+ @Nullable String keystoreLocation,
+ @Nullable String keystorePassword) {
+ SSL_CONTEXT_THREAD_LOCAL.set(
+ new Ssl(truststoreLocation, truststorePassword, keystoreLocation, keystorePassword));
+ }
+
+ // should be called when (host:port) -> factory cache should be invalidated (ex. on app config reload)
+ public static void clearFactoriesCache() {
+ CACHED_FACTORIES.clear();
+ }
+
+ public static void clearThreadLocalContext() {
+ SSL_CONTEXT_THREAD_LOCAL.set(null);
+ }
+
+ public static void editJmxConnectorEnv(Map env) {
+ env.put("com.sun.jndi.rmi.factory.socket", new SslRMIClientSocketFactory());
+ }
+
+ //-----------------------------------------------------------------------------------------------
+
+ private final javax.net.ssl.SSLSocketFactory defaultSocketFactory;
+
+ @SneakyThrows
+ public JmxSslSocketFactory() {
+ this.defaultSocketFactory = SSLContext.getDefault().getSocketFactory();
+ }
+
+ @SneakyThrows
+ private javax.net.ssl.SSLSocketFactory createFactoryFromThreadLocalCtx() {
+ Ssl ssl = Preconditions.checkNotNull(SSL_CONTEXT_THREAD_LOCAL.get());
+
+ var trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
+ if (ssl.truststoreLocation() != null && ssl.truststorePassword() != null) {
+ KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
+ trustStore.load(
+ new FileInputStream((ResourceUtils.getFile(ssl.truststoreLocation()))),
+ ssl.truststorePassword().toCharArray()
+ );
+ trustManagerFactory.init(trustStore);
+ }
+
+ var keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
+ if (ssl.keystoreLocation() != null && ssl.keystorePassword() != null) {
+ KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
+ keyStore.load(
+ new FileInputStream(ResourceUtils.getFile(ssl.keystoreLocation())),
+ ssl.keystorePassword().toCharArray()
+ );
+ keyManagerFactory.init(keyStore, ssl.keystorePassword().toCharArray());
+ }
+
+ SSLContext ctx = SSLContext.getInstance("TLS");
+ ctx.init(
+ keyManagerFactory.getKeyManagers(),
+ trustManagerFactory.getTrustManagers(),
+ null
+ );
+ return ctx.getSocketFactory();
+ }
+
+ private boolean threadLocalContextSet() {
+ return SSL_CONTEXT_THREAD_LOCAL.get() != null;
+ }
+
+ @Override
+ public Socket createSocket(String host, int port) throws IOException {
+ var hostAndPort = new HostAndPort(host, port);
+ if (CACHED_FACTORIES.containsKey(hostAndPort)) {
+ return CACHED_FACTORIES.get(hostAndPort).createSocket(host, port);
+ } else if (threadLocalContextSet()) {
+ var factory = createFactoryFromThreadLocalCtx();
+ CACHED_FACTORIES.put(hostAndPort, factory);
+ return factory.createSocket(host, port);
+ }
+ return defaultSocketFactory.createSocket(host, port);
+ }
+
+ /// FOLLOWING METHODS WON'T BE USED DURING JMX INTERACTION, IMPLEMENTING THEM JUST FOR CONSISTENCY ->>>>>
+
+ @Override
+ public Socket createSocket(Socket s, String host, int port, boolean autoClose) throws IOException {
+ if (threadLocalContextSet()) {
+ return createFactoryFromThreadLocalCtx().createSocket(s, host, port, autoClose);
+ }
+ return defaultSocketFactory.createSocket(s, host, port, autoClose);
+ }
+
+ @Override
+ public Socket createSocket(String host, int port, InetAddress localHost, int localPort)
+ throws IOException, UnknownHostException {
+ if (threadLocalContextSet()) {
+ return createFactoryFromThreadLocalCtx().createSocket(host, port, localHost, localPort);
+ }
+ return defaultSocketFactory.createSocket(host, port, localHost, localPort);
+ }
+
+ @Override
+ public Socket createSocket(InetAddress host, int port) throws IOException {
+ if (threadLocalContextSet()) {
+ return createFactoryFromThreadLocalCtx().createSocket(host, port);
+ }
+ return defaultSocketFactory.createSocket(host, port);
+ }
+
+ @Override
+ public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort)
+ throws IOException {
+ if (threadLocalContextSet()) {
+ return createFactoryFromThreadLocalCtx().createSocket(address, port, localAddress, localPort);
+ }
+ return defaultSocketFactory.createSocket(address, port, localAddress, localPort);
+ }
+
+ @Override
+ public String[] getDefaultCipherSuites() {
+ if (threadLocalContextSet()) {
+ return createFactoryFromThreadLocalCtx().getDefaultCipherSuites();
+ }
+ return defaultSocketFactory.getDefaultCipherSuites();
+ }
+
+ @Override
+ public String[] getSupportedCipherSuites() {
+ if (threadLocalContextSet()) {
+ return createFactoryFromThreadLocalCtx().getSupportedCipherSuites();
+ }
+ return defaultSocketFactory.getSupportedCipherSuites();
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetriever.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetriever.java
index e4cb4c36cb..33ef1b8072 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetriever.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/PrometheusMetricsRetriever.java
@@ -2,53 +2,58 @@ package com.provectus.kafka.ui.service.metrics;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
+import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.MetricsConfig;
+import com.provectus.kafka.ui.util.WebClientConfigurator;
import java.util.Arrays;
import java.util.Optional;
-import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.Node;
import org.springframework.stereotype.Service;
+import org.springframework.util.unit.DataSize;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.util.UriComponentsBuilder;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@Service
-@RequiredArgsConstructor
@Slf4j
class PrometheusMetricsRetriever implements MetricsRetriever {
private static final String METRICS_ENDPOINT_PATH = "/metrics";
private static final int DEFAULT_EXPORTER_PORT = 11001;
- private final WebClient webClient;
-
@Override
public Flux retrieve(KafkaCluster c, Node node) {
log.debug("Retrieving metrics from prometheus exporter: {}:{}", node.host(), c.getMetricsConfig().getPort());
- return retrieve(node.host(), c.getMetricsConfig());
+
+ MetricsConfig metricsConfig = c.getMetricsConfig();
+ var webClient = new WebClientConfigurator()
+ .configureBufferSize(DataSize.ofMegabytes(20))
+ .configureBasicAuth(metricsConfig.getUsername(), metricsConfig.getPassword())
+ .configureSsl(
+ c.getOriginalProperties().getSsl(),
+ new ClustersProperties.KeystoreConfig(
+ metricsConfig.getKeystoreLocation(),
+ metricsConfig.getKeystorePassword()))
+ .build();
+
+ return retrieve(webClient, node.host(), c.getMetricsConfig());
}
@VisibleForTesting
- Flux retrieve(String host, MetricsConfig metricsConfig) {
+ Flux retrieve(WebClient webClient, String host, MetricsConfig metricsConfig) {
int port = Optional.ofNullable(metricsConfig.getPort()).orElse(DEFAULT_EXPORTER_PORT);
-
+ boolean sslEnabled = metricsConfig.isSsl() || metricsConfig.getKeystoreLocation() != null;
var request = webClient.get()
.uri(UriComponentsBuilder.newInstance()
- .scheme(metricsConfig.isSsl() ? "https" : "http")
+ .scheme(sslEnabled ? "https" : "http")
.host(host)
.port(port)
.path(METRICS_ENDPOINT_PATH).build().toUri());
- if (metricsConfig.getUsername() != null && metricsConfig.getPassword() != null) {
- request.headers(
- httpHeaders -> httpHeaders.setBasicAuth(metricsConfig.getUsername(), metricsConfig.getPassword()));
- }
-
WebClient.ResponseSpec responseSpec = request.retrieve();
-
return responseSpec.bodyToMono(String.class)
.doOnError(e -> log.error("Error while getting metrics from {}", host, e))
.onErrorResume(th -> Mono.empty())
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
index 32cde63adb..ee17d21111 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
@@ -1,5 +1,7 @@
package com.provectus.kafka.ui.service.rbac;
+import static com.provectus.kafka.ui.model.rbac.Resource.APPLICATIONCONFIG;
+
import com.provectus.kafka.ui.config.auth.AuthenticatedUser;
import com.provectus.kafka.ui.config.auth.RbacUser;
import com.provectus.kafka.ui.config.auth.RoleBasedAccessControlProperties;
@@ -55,7 +57,7 @@ public class AccessControlService {
@PostConstruct
public void init() {
- if (properties.getRoles().isEmpty()) {
+ if (CollectionUtils.isEmpty(properties.getRoles())) {
log.trace("No roles provided, disabling RBAC");
return;
}
@@ -88,7 +90,8 @@ public class AccessControlService {
return getUser()
.doOnNext(user -> {
boolean accessGranted =
- isClusterAccessible(context, user)
+ isApplicationConfigAccessible(context, user)
+ && isClusterAccessible(context, user)
&& isClusterConfigAccessible(context, user)
&& isTopicAccessible(context, user)
&& isConsumerGroupAccessible(context, user)
@@ -112,6 +115,20 @@ public class AccessControlService {
.map(user -> new AuthenticatedUser(user.name(), user.groups()));
}
+ public boolean isApplicationConfigAccessible(AccessContext context, AuthenticatedUser user) {
+ if (!rbacEnabled) {
+ return true;
+ }
+ if (CollectionUtils.isEmpty(context.getApplicationConfigActions())) {
+ return true;
+ }
+ Set