From 270d52882e881a51730d6f03347fe2c312877b7e Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Mon, 20 Mar 2023 12:59:14 +0300 Subject: [PATCH 1/9] Revert "[Infra] A first try of reusable workflows (#3497)" (#3519) This reverts commit 36112fa26b89e490c869bf6e176c53d853e65c33. --- .github/workflows/branch-deploy.yml | 17 +++++++++++++-- .github/workflows/build-template.yml | 32 ---------------------------- 2 files changed, 15 insertions(+), 34 deletions(-) delete mode 100644 .github/workflows/build-template.yml diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml index 25f975708e..0e4a171aec 100644 --- a/.github/workflows/branch-deploy.yml +++ b/.github/workflows/branch-deploy.yml @@ -9,9 +9,9 @@ jobs: if: ${{ github.event.label.name == 'status/feature_testing' || github.event.label.name == 'status/feature_testing_public' }} runs-on: ubuntu-latest steps: - - uses: ./.github/workflows/build-template.yaml + - uses: actions/checkout@v3 with: - APP_VERSION: $GITHUB_SHA + ref: ${{ github.event.pull_request.head.sha }} - name: get branch name id: extract_branch run: | @@ -19,6 +19,19 @@ jobs: echo "tag=${tag}" >> $GITHUB_OUTPUT env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Set up JDK + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'zulu' + cache: 'maven' + - name: Build + id: build + run: | + ./mvnw -B -ntp versions:set -DnewVersion=$GITHUB_SHA + ./mvnw -B -V -ntp clean package -Pprod -DskipTests + export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec) + echo "version=${VERSION}" >> $GITHUB_OUTPUT - name: Set up QEMU uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx diff --git a/.github/workflows/build-template.yml b/.github/workflows/build-template.yml deleted file mode 100644 index 26677932d7..0000000000 --- a/.github/workflows/build-template.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Maven build template -on: - workflow_call: - inputs: - APP_VERSION: - required: true - type: string -jobs: - build: - runs-on: ubuntu-latest - outputs: - version: ${{steps.build.outputs.version}} - steps: - - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.sha }} - - run: | - git config user.name github-actions - git config user.email github-actions@github.com - - name: Set up JDK - uses: actions/setup-java@v3 - with: - java-version: '17' - distribution: 'zulu' - cache: 'maven' - - name: Build - id: build - run: | - ./mvnw -B -ntp versions:set -DnewVersion=${{ inputs.APP_VERSION }} - ./mvnw -B -V -ntp clean package -Pprod -DskipTests - export VERSION=$(./mvnw -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec) - echo "version=${VERSION}" >> $GITHUB_OUTPUT From e2dc12dc02f88a8d406e6c0e1573c966375609b5 Mon Sep 17 00:00:00 2001 From: Vlad Senyuta <66071557+VladSenyuta@users.noreply.github.com> Date: Mon, 20 Mar 2023 12:10:10 +0200 Subject: [PATCH 2/9] [E2E] Checking components paths (#3514) * [e2e]Checking components paths * upd checkComponentsPathWhileNavigating * add backlog * add backlog * add backlog * upd LocalWebDriver * upd wfm * upd fil path * upd clearMessage * upd panels package * upd Template * add manual cases --------- Co-authored-by: anedyal --- .github/workflows/e2e-automation.yml | 8 +- .github/workflows/e2e-weekly.yml | 8 +- .../provectus/kafka/ui/pages/BasePage.java | 11 + .../kafka/ui/pages/brokers/BrokersList.java | 5 +- .../ui/pages/connectors/KafkaConnectList.java | 3 +- .../ui/pages/consumers/ConsumersList.java | 7 +- .../kafka/ui/pages/ksqlDb/KsqlDbList.java | 196 +++++++++--------- .../ui/pages/{ => panels}/NaviSideBar.java | 39 ++-- .../kafka/ui/pages/{ => panels}/TopPanel.java | 3 +- .../kafka/ui/pages/panels/enums/MenuItem.java | 28 +++ .../ui/pages/schemas/SchemaRegistryList.java | 3 +- .../kafka/ui/pages/topics/TopicsList.java | 4 +- .../kafka/ui/services/ApiService.java | 18 +- .../kafka/ui/settings/BaseSource.java | 1 + .../java/com/provectus/kafka/ui/BaseTest.java | 29 ++- .../java/com/provectus/kafka/ui/Facade.java | 4 +- .../kafka/ui/manualSuite/BaseManualTest.java | 4 +- .../ui/manualSuite/backlog/SanityBacklog.java | 19 ++ .../SmokeBacklog.java} | 38 +++- .../ui/manualSuite/suite/TopicsTest.java | 26 +-- .../{BrokersTest.java => WizardTest.java} | 8 +- .../kafka/ui/qaseSuite/BaseQaseTest.java | 7 + .../kafka/ui/qaseSuite/Template.java | 2 +- .../kafka/ui/smokeSuite/SmokeTest.java | 53 +++++ .../smokeSuite/connectors/ConnectorsTest.java | 20 +- .../ui/smokeSuite/topics/MessagesTest.java | 9 +- .../ui/smokeSuite/topics/TopicsTest.java | 2 +- 27 files changed, 359 insertions(+), 196 deletions(-) rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ => panels}/NaviSideBar.java (67%) rename kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/{ => panels}/TopPanel.java (90%) create mode 100644 kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/enums/MenuItem.java create mode 100644 kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SanityBacklog.java rename kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/{suite/KsqlDbTest.java => backlog/SmokeBacklog.java} (53%) rename kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/{BrokersTest.java => WizardTest.java} (74%) diff --git a/.github/workflows/e2e-automation.yml b/.github/workflows/e2e-automation.yml index 5a5018c9e5..b21fa18ce8 100644 --- a/.github/workflows/e2e-automation.yml +++ b/.github/workflows/e2e-automation.yml @@ -23,6 +23,12 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ github.sha }} + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1-node16 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-central-1 - name: Set up environment id: set_env_values run: | @@ -65,8 +71,6 @@ jobs: if: always() env: AWS_S3_BUCKET: 'kafkaui-allure-reports' - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_REGION: 'eu-central-1' SOURCE_DIR: 'allure-history/allure-results' - name: Deploy report to Amazon S3 diff --git a/.github/workflows/e2e-weekly.yml b/.github/workflows/e2e-weekly.yml index 2bf2001ec2..80f7a9393d 100644 --- a/.github/workflows/e2e-weekly.yml +++ b/.github/workflows/e2e-weekly.yml @@ -10,6 +10,12 @@ jobs: - uses: actions/checkout@v3 with: ref: ${{ github.sha }} + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1-node16 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-central-1 - name: Set up environment id: set_env_values run: | @@ -52,8 +58,6 @@ jobs: if: always() env: AWS_S3_BUCKET: 'kafkaui-allure-reports' - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_REGION: 'eu-central-1' SOURCE_DIR: 'allure-history/allure-results' - name: Deploy report to Amazon S3 diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java index 979302fd27..201079fe3b 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java @@ -4,6 +4,7 @@ import com.codeborne.selenide.Condition; import com.codeborne.selenide.ElementsCollection; import com.codeborne.selenide.SelenideElement; import com.codeborne.selenide.WebDriverRunner; +import com.provectus.kafka.ui.pages.panels.enums.MenuItem; import com.provectus.kafka.ui.utilities.WebUtils; import lombok.extern.slf4j.Slf4j; import org.openqa.selenium.Keys; @@ -33,6 +34,8 @@ public abstract class BasePage extends WebUtils { protected String summaryCellLocator = "//div[contains(text(),'%s')]"; protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]"; protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']"; + protected String pageTitleFromHeader = "//h1[text()='%s']"; + protected String pagePathFromHeader = "//a[text()='%s']/../h1"; protected void waitUntilSpinnerDisappear() { log.debug("\nwaitUntilSpinnerDisappear"); @@ -41,6 +44,14 @@ public abstract class BasePage extends WebUtils { } } + protected SelenideElement getPageTitleFromHeader(MenuItem menuItem) { + return $x(String.format(pageTitleFromHeader, menuItem.getPageTitle())); + } + + protected SelenideElement getPagePathFromHeader(MenuItem menuItem) { + return $x(String.format(pagePathFromHeader, menuItem.getPageTitle())); + } + protected void clickSubmitBtn() { clickByJavaScript(submitBtn); } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java index 3d3a58f355..f1f08c3cf5 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java @@ -12,15 +12,14 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import static com.codeborne.selenide.Selenide.$x; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.BROKERS; public class BrokersList extends BasePage { - protected SelenideElement brokersListHeader = $x("//h1[text()='Brokers']"); - @Step public BrokersList waitUntilScreenReady() { waitUntilSpinnerDisappear(); - brokersListHeader.shouldBe(Condition.visible); + getPageTitleFromHeader(BROKERS).shouldBe(Condition.visible); return this; } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java index 3be1826511..6c672855a6 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/connectors/KafkaConnectList.java @@ -6,6 +6,7 @@ import com.provectus.kafka.ui.pages.BasePage; import io.qameta.allure.Step; import static com.codeborne.selenide.Selenide.$x; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KAFKA_CONNECT; public class KafkaConnectList extends BasePage { @@ -19,7 +20,7 @@ public class KafkaConnectList extends BasePage { @Step public KafkaConnectList waitUntilScreenReady() { waitUntilSpinnerDisappear(); - createConnectorBtn.shouldBe(Condition.visible); + getPageTitleFromHeader(KAFKA_CONNECT).shouldBe(Condition.visible); return this; } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java index 6d0c1d48f7..35ef404344 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java @@ -1,20 +1,17 @@ package com.provectus.kafka.ui.pages.consumers; import com.codeborne.selenide.Condition; -import com.codeborne.selenide.SelenideElement; import com.provectus.kafka.ui.pages.BasePage; import io.qameta.allure.Step; -import static com.codeborne.selenide.Selenide.$x; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.CONSUMERS; public class ConsumersList extends BasePage { - protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']"); - @Step public ConsumersList waitUntilScreenReady() { waitUntilSpinnerDisappear(); - consumerListHeader.shouldHave(Condition.visible); + getPageTitleFromHeader(CONSUMERS).shouldBe(Condition.visible); return this; } } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java index 25246a86ed..e80229d931 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java @@ -1,137 +1,139 @@ package com.provectus.kafka.ui.pages.ksqlDb; -import static com.codeborne.selenide.Selenide.$; -import static com.codeborne.selenide.Selenide.$x; - import com.codeborne.selenide.CollectionCondition; import com.codeborne.selenide.Condition; import com.codeborne.selenide.SelenideElement; import com.provectus.kafka.ui.pages.BasePage; import com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlMenuTabs; import io.qameta.allure.Step; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; import org.openqa.selenium.By; +import java.util.ArrayList; +import java.util.List; + +import static com.codeborne.selenide.Selenide.$; +import static com.codeborne.selenide.Selenide.$x; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB; + public class KsqlDbList extends BasePage { - protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']"); - protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']"); - protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']"); - @Step - public KsqlDbList waitUntilScreenReady() { - waitUntilSpinnerDisappear(); - Arrays.asList(tablesTab, streamsTab).forEach(tab -> tab.shouldBe(Condition.visible)); - return this; - } + protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']"); + protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']"); + protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']"); - @Step - public KsqlDbList clickExecuteKsqlRequestBtn() { - clickByJavaScript(executeKsqlBtn); - return this; - } - - @Step - public KsqlDbList openDetailsTab(KsqlMenuTabs menu) { - $(By.linkText(menu.toString())).shouldBe(Condition.visible).click(); - waitUntilSpinnerDisappear(); - return this; - } - - private List initTablesItems() { - List gridItemList = new ArrayList<>(); - gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0)) - .forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item))); - return gridItemList; - } - - @Step - public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) { - return initTablesItems().stream() - .filter(e -> e.getTableName().equals(tableName)) - .findFirst().orElseThrow(); - } - - public static class KsqlTablesGridItem extends BasePage { - - private final SelenideElement element; - - public KsqlTablesGridItem(SelenideElement element) { - this.element = element; + @Step + public KsqlDbList waitUntilScreenReady() { + waitUntilSpinnerDisappear(); + getPageTitleFromHeader(KSQL_DB).shouldBe(Condition.visible); + return this; } @Step - public String getTableName() { - return element.$x("./td[1]").getText().trim(); + public KsqlDbList clickExecuteKsqlRequestBtn() { + clickByJavaScript(executeKsqlBtn); + return this; } @Step - public String getTopicName() { - return element.$x("./td[2]").getText().trim(); + public KsqlDbList openDetailsTab(KsqlMenuTabs menu) { + $(By.linkText(menu.toString())).shouldBe(Condition.visible).click(); + waitUntilSpinnerDisappear(); + return this; + } + + private List initTablesItems() { + List gridItemList = new ArrayList<>(); + gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0)) + .forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item))); + return gridItemList; } @Step - public String getKeyFormat() { - return element.$x("./td[3]").getText().trim(); + public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) { + return initTablesItems().stream() + .filter(e -> e.getTableName().equals(tableName)) + .findFirst().orElseThrow(); + } + + private List initStreamsItems() { + List gridItemList = new ArrayList<>(); + gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0)) + .forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item))); + return gridItemList; } @Step - public String getValueFormat() { - return element.$x("./td[4]").getText().trim(); + public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) { + return initStreamsItems().stream() + .filter(e -> e.getStreamName().equals(streamName)) + .findFirst().orElseThrow(); } - @Step - public String getIsWindowed() { - return element.$x("./td[5]").getText().trim(); - } - } + public static class KsqlTablesGridItem extends BasePage { - private List initStreamsItems() { - List gridItemList = new ArrayList<>(); - gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0)) - .forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item))); - return gridItemList; - } + private final SelenideElement element; - @Step - public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) { - return initStreamsItems().stream() - .filter(e -> e.getStreamName().equals(streamName)) - .findFirst().orElseThrow(); - } + public KsqlTablesGridItem(SelenideElement element) { + this.element = element; + } - public static class KsqlStreamsGridItem extends BasePage { + @Step + public String getTableName() { + return element.$x("./td[1]").getText().trim(); + } - private final SelenideElement element; + @Step + public String getTopicName() { + return element.$x("./td[2]").getText().trim(); + } - public KsqlStreamsGridItem(SelenideElement element) { - this.element = element; + @Step + public String getKeyFormat() { + return element.$x("./td[3]").getText().trim(); + } + + @Step + public String getValueFormat() { + return element.$x("./td[4]").getText().trim(); + } + + @Step + public String getIsWindowed() { + return element.$x("./td[5]").getText().trim(); + } } - @Step - public String getStreamName() { - return element.$x("./td[1]").getText().trim(); - } + public static class KsqlStreamsGridItem extends BasePage { - @Step - public String getTopicName() { - return element.$x("./td[2]").getText().trim(); - } + private final SelenideElement element; - @Step - public String getKeyFormat() { - return element.$x("./td[3]").getText().trim(); - } + public KsqlStreamsGridItem(SelenideElement element) { + this.element = element; + } - @Step - public String getValueFormat() { - return element.$x("./td[4]").getText().trim(); - } + @Step + public String getStreamName() { + return element.$x("./td[1]").getText().trim(); + } - @Step - public String getIsWindowed() { - return element.$x("./td[5]").getText().trim(); + @Step + public String getTopicName() { + return element.$x("./td[2]").getText().trim(); + } + + @Step + public String getKeyFormat() { + return element.$x("./td[3]").getText().trim(); + } + + @Step + public String getValueFormat() { + return element.$x("./td[4]").getText().trim(); + } + + @Step + public String getIsWindowed() { + return element.$x("./td[5]").getText().trim(); + } } - } } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/NaviSideBar.java similarity index 67% rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/NaviSideBar.java index 3c5b0fe6d9..df10360844 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/NaviSideBar.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/NaviSideBar.java @@ -1,7 +1,9 @@ -package com.provectus.kafka.ui.pages; +package com.provectus.kafka.ui.pages.panels; import com.codeborne.selenide.Condition; import com.codeborne.selenide.SelenideElement; +import com.provectus.kafka.ui.pages.panels.enums.MenuItem; +import com.provectus.kafka.ui.pages.BasePage; import io.qameta.allure.Step; import java.time.Duration; @@ -34,38 +36,29 @@ public class NaviSideBar extends BasePage { } @Step - public NaviSideBar openSideMenu(String clusterName, SideMenuOption option) { + public String getPagePath(MenuItem menuItem) { + return getPagePathFromHeader(menuItem) + .shouldBe(Condition.visible) + .getText().trim(); + } + + @Step + public NaviSideBar openSideMenu(String clusterName, MenuItem menuItem) { clickByActions(expandCluster(clusterName).parent() - .$x(String.format(sideMenuOptionElementLocator, option.value))); + .$x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle()))); return this; } @Step - public NaviSideBar openSideMenu(SideMenuOption option) { - openSideMenu(CLUSTER_NAME, option); + public NaviSideBar openSideMenu(MenuItem menuItem) { + openSideMenu(CLUSTER_NAME, menuItem); return this; } public List getAllMenuButtons() { expandCluster(CLUSTER_NAME); - return Stream.of(SideMenuOption.values()) - .map(option -> $x(String.format(sideMenuOptionElementLocator, option.value))) + return Stream.of(MenuItem.values()) + .map(menuItem -> $x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle()))) .collect(Collectors.toList()); } - - public enum SideMenuOption { - DASHBOARD("Dashboard"), - BROKERS("Brokers"), - TOPICS("Topics"), - CONSUMERS("Consumers"), - SCHEMA_REGISTRY("Schema Registry"), - KAFKA_CONNECT("Kafka Connect"), - KSQL_DB("KSQL DB"); - - final String value; - - SideMenuOption(String value) { - this.value = value; - } - } } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/TopPanel.java similarity index 90% rename from kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java rename to kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/TopPanel.java index 28de4fe058..77cf71a929 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/TopPanel.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/TopPanel.java @@ -1,6 +1,7 @@ -package com.provectus.kafka.ui.pages; +package com.provectus.kafka.ui.pages.panels; import com.codeborne.selenide.SelenideElement; +import com.provectus.kafka.ui.pages.BasePage; import java.util.Arrays; import java.util.List; diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/enums/MenuItem.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/enums/MenuItem.java new file mode 100644 index 0000000000..6610a8293b --- /dev/null +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/panels/enums/MenuItem.java @@ -0,0 +1,28 @@ +package com.provectus.kafka.ui.pages.panels.enums; + +public enum MenuItem { + + DASHBOARD("Dashboard", "Dashboard"), + BROKERS("Brokers", "Brokers"), + TOPICS("Topics", "Topics"), + CONSUMERS("Consumers", "Consumers"), + SCHEMA_REGISTRY("Schema Registry", "Schema Registry"), + KAFKA_CONNECT("Kafka Connect", "Connectors"), + KSQL_DB("KSQL DB", "KSQL DB"); + + private final String naviTitle; + private final String pageTitle; + + MenuItem(String naviTitle, String pageTitle) { + this.naviTitle = naviTitle; + this.pageTitle = pageTitle; + } + + public String getNaviTitle() { + return naviTitle; + } + + public String getPageTitle() { + return pageTitle; + } +} diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java index 8f65947734..4f06bb995b 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaRegistryList.java @@ -6,6 +6,7 @@ import com.provectus.kafka.ui.pages.BasePage; import io.qameta.allure.Step; import static com.codeborne.selenide.Selenide.$x; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.SCHEMA_REGISTRY; public class SchemaRegistryList extends BasePage { @@ -14,7 +15,7 @@ public class SchemaRegistryList extends BasePage { @Step public SchemaRegistryList waitUntilScreenReady() { waitUntilSpinnerDisappear(); - createSchemaBtn.shouldBe(Condition.visible); + getPageTitleFromHeader(SCHEMA_REGISTRY).shouldBe(Condition.visible); return this; } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java index 35f4c9b1b4..b26f2c2997 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java @@ -14,10 +14,10 @@ import java.util.stream.Stream; import static com.codeborne.selenide.Condition.visible; import static com.codeborne.selenide.Selenide.$x; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.TOPICS; public class TopicsList extends BasePage { - protected SelenideElement topicListHeader = $x("//h1[text()='Topics']"); protected SelenideElement addTopicBtn = $x("//button[normalize-space(text()) ='Add a Topic']"); protected SelenideElement searchField = $x("//input[@placeholder='Search by Topic Name']"); protected SelenideElement showInternalRadioBtn = $x("//input[@name='ShowInternalTopics']"); @@ -31,7 +31,7 @@ public class TopicsList extends BasePage { @Step public TopicsList waitUntilScreenReady() { waitUntilSpinnerDisappear(); - topicListHeader.shouldBe(visible); + getPageTitleFromHeader(TOPICS).shouldBe(visible); return this; } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java index 54f69b5198..315cf56d18 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java @@ -67,8 +67,8 @@ public class ApiService extends BaseSource { } @Step - public ApiService createTopic(String topicName) { - createTopic(CLUSTER_NAME, topicName); + public ApiService createTopic(Topic topic) { + createTopic(CLUSTER_NAME, topic.getName()); return this; } @@ -133,6 +133,12 @@ public class ApiService extends BaseSource { return this; } + @Step + public ApiService deleteConnector(String connectorName) { + deleteConnector(CLUSTER_NAME, CONNECT_NAME, connectorName); + return this; + } + @SneakyThrows private void createConnector(String clusterName, String connectName, Connector connector) { NewConnector connectorProperties = new NewConnector(); @@ -152,9 +158,15 @@ public class ApiService extends BaseSource { return this; } + @Step + public ApiService createConnector(Connector connector) { + createConnector(CLUSTER_NAME, CONNECT_NAME, connector); + return this; + } + @Step public String getFirstConnectName(String clusterName) { - return connectorApi().getConnects(clusterName).blockFirst().getName(); + return Objects.requireNonNull(connectorApi().getConnects(clusterName).blockFirst()).getName(); } @SneakyThrows diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java index ebead7b089..d294c5a116 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/settings/BaseSource.java @@ -8,6 +8,7 @@ public abstract class BaseSource { public static final String BASE_CONTAINER_URL = "http://host.testcontainers.internal:8080"; public static final String BASE_LOCAL_URL = "http://localhost:8080"; public static final String CLUSTER_NAME = "local"; + public static final String CONNECT_NAME = "first"; private static Config config; public static final String BROWSER = config().browser(); public static final String SUITE_NAME = config().suite(); diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java index b2e7e007e5..dfb4230001 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java @@ -22,7 +22,7 @@ import org.testng.asserts.SoftAssert; import java.time.Duration; import java.util.List; -import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.*; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.*; import static com.provectus.kafka.ui.settings.BaseSource.*; import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*; import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup; @@ -108,7 +108,7 @@ public abstract class BaseTest extends Facade { public void afterMethod() { browserClear(); } - + @Step protected void navigateToBrokers() { naviSideBar @@ -117,6 +117,17 @@ public abstract class BaseTest extends Facade { .waitUntilScreenReady(); } + @Step + protected void navigateToBrokersAndOpenDetails(int brokerId) { + naviSideBar + .openSideMenu(BROKERS); + brokersList + .waitUntilScreenReady() + .openBroker(brokerId); + brokersDetails + .waitUntilScreenReady(); + } + @Step protected void navigateToTopics() { naviSideBar @@ -135,7 +146,7 @@ public abstract class BaseTest extends Facade { topicDetails .waitUntilScreenReady(); } - + @Step protected void navigateToConsumers() { naviSideBar @@ -143,7 +154,7 @@ public abstract class BaseTest extends Facade { consumersList .waitUntilScreenReady(); } - + @Step protected void navigateToSchemaRegistry() { naviSideBar @@ -151,7 +162,7 @@ public abstract class BaseTest extends Facade { schemaRegistryList .waitUntilScreenReady(); } - + @Step protected void navigateToSchemaRegistryAndOpenDetails(String schemaName) { navigateToSchemaRegistry(); @@ -160,7 +171,7 @@ public abstract class BaseTest extends Facade { schemaDetails .waitUntilScreenReady(); } - + @Step protected void navigateToConnectors() { naviSideBar @@ -168,7 +179,7 @@ public abstract class BaseTest extends Facade { kafkaConnectList .waitUntilScreenReady(); } - + @Step protected void navigateToConnectorsAndOpenDetails(String connectorName) { navigateToConnectors(); @@ -177,7 +188,7 @@ public abstract class BaseTest extends Facade { connectorDetails .waitUntilScreenReady(); } - + @Step protected void navigateToKsqlDb() { naviSideBar @@ -185,7 +196,7 @@ public abstract class BaseTest extends Facade { ksqlDbList .waitUntilScreenReady(); } - + @Step protected void verifyElementsCondition(List elementList, Condition expectedCondition) { SoftAssert softly = new SoftAssert(); diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java index c370c01b5f..67468dbcb0 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/Facade.java @@ -1,7 +1,7 @@ package com.provectus.kafka.ui; -import com.provectus.kafka.ui.pages.NaviSideBar; -import com.provectus.kafka.ui.pages.TopPanel; +import com.provectus.kafka.ui.pages.panels.NaviSideBar; +import com.provectus.kafka.ui.pages.panels.TopPanel; import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab; import com.provectus.kafka.ui.pages.brokers.BrokersDetails; import com.provectus.kafka.ui.pages.brokers.BrokersList; diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java index d9891a0b50..bf9f0c75ac 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/BaseManualTest.java @@ -11,6 +11,7 @@ import java.lang.reflect.Method; import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup; import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED; +import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED; @Listeners(QaseResultListener.class) public abstract class BaseManualTest { @@ -22,7 +23,8 @@ public abstract class BaseManualTest { @BeforeMethod public void beforeMethod(Method method) { - if (method.getAnnotation(Automation.class).state().equals(NOT_AUTOMATED)) + if (method.getAnnotation(Automation.class).state().equals(NOT_AUTOMATED) + || method.getAnnotation(Automation.class).state().equals(TO_BE_AUTOMATED)) throw new SkipException("Skip test exception"); } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SanityBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SanityBacklog.java new file mode 100644 index 0000000000..f09673c8a0 --- /dev/null +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SanityBacklog.java @@ -0,0 +1,19 @@ +package com.provectus.kafka.ui.manualSuite.backlog; + +import com.provectus.kafka.ui.manualSuite.BaseManualTest; +import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation; +import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite; +import io.qase.api.annotation.QaseId; +import org.testng.annotations.Test; + +import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED; + +public class SanityBacklog extends BaseManualTest { + + @Automation(state = TO_BE_AUTOMATED) + @Suite(id = 19) + @QaseId(285) + @Test + public void testCaseA() { + } +} diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java similarity index 53% rename from kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/KsqlDbTest.java rename to kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java index 443fc85ef2..32edaff8c9 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/KsqlDbTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java @@ -1,35 +1,61 @@ -package com.provectus.kafka.ui.manualSuite.suite; +package com.provectus.kafka.ui.manualSuite.backlog; import com.provectus.kafka.ui.manualSuite.BaseManualTest; import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation; +import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Suite; import io.qase.api.annotation.QaseId; import org.testng.annotations.Test; import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED; -public class KsqlDbTest extends BaseManualTest { +public class SmokeBacklog extends BaseManualTest { @Automation(state = TO_BE_AUTOMATED) - @QaseId(276) + @Suite(id = 1) + @QaseId(330) @Test public void testCaseA() { } @Automation(state = TO_BE_AUTOMATED) - @QaseId(277) + @Suite(id = 8) + @QaseId(276) @Test public void testCaseB() { } @Automation(state = TO_BE_AUTOMATED) - @QaseId(278) + @Suite(id = 8) + @QaseId(277) @Test public void testCaseC() { } @Automation(state = TO_BE_AUTOMATED) - @QaseId(284) + @Suite(id = 8) + @QaseId(278) @Test public void testCaseD() { } + + @Automation(state = TO_BE_AUTOMATED) + @Suite(id = 8) + @QaseId(284) + @Test + public void testCaseE() { + } + + @Automation(state = TO_BE_AUTOMATED) + @Suite(id = 1) + @QaseId(331) + @Test + public void testCaseF() { + } + + @Automation(state = TO_BE_AUTOMATED) + @Suite(id = 1) + @QaseId(332) + @Test + public void testCaseG() { + } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java index 35188e3f45..dda8103ffa 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java @@ -34,68 +34,62 @@ public class TopicsTest extends BaseManualTest { } @Automation(state = NOT_AUTOMATED) - @QaseId(46) + @QaseId(47) @Test public void testCaseE() { } @Automation(state = NOT_AUTOMATED) - @QaseId(47) + @QaseId(48) @Test public void testCaseF() { } @Automation(state = NOT_AUTOMATED) - @QaseId(48) + @QaseId(49) @Test public void testCaseG() { } @Automation(state = NOT_AUTOMATED) - @QaseId(49) + @QaseId(50) @Test public void testCaseH() { } @Automation(state = NOT_AUTOMATED) - @QaseId(50) + @QaseId(57) @Test public void testCaseI() { } @Automation(state = NOT_AUTOMATED) - @QaseId(57) + @QaseId(58) @Test public void testCaseJ() { } @Automation(state = NOT_AUTOMATED) - @QaseId(58) + @QaseId(269) @Test public void testCaseK() { } @Automation(state = NOT_AUTOMATED) - @QaseId(269) + @QaseId(270) @Test public void testCaseL() { } @Automation(state = NOT_AUTOMATED) - @QaseId(270) + @QaseId(271) @Test public void testCaseM() { } @Automation(state = NOT_AUTOMATED) - @QaseId(271) + @QaseId(272) @Test public void testCaseN() { } - - @Automation(state = NOT_AUTOMATED) - @QaseId(272) - @Test - public void testCaseO() { - } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/WizardTest.java similarity index 74% rename from kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/BrokersTest.java rename to kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/WizardTest.java index eb31d0c2b1..e7ae52bdb1 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/BrokersTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/WizardTest.java @@ -5,12 +5,12 @@ import com.provectus.kafka.ui.utilities.qaseUtils.annotations.Automation; import io.qase.api.annotation.QaseId; import org.testng.annotations.Test; -import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.TO_BE_AUTOMATED; +import static com.provectus.kafka.ui.utilities.qaseUtils.enums.State.NOT_AUTOMATED; -public class BrokersTest extends BaseManualTest { +public class WizardTest extends BaseManualTest { - @Automation(state = TO_BE_AUTOMATED) - @QaseId(330) + @Automation(state = NOT_AUTOMATED) + @QaseId(333) @Test public void testCaseA() { } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java index 977cbd6dc4..7b4c34ac05 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/BaseQaseTest.java @@ -9,6 +9,13 @@ import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrati @Listeners(QaseCreateListener.class) public abstract class BaseQaseTest { + protected static final long BROKERS_SUITE_ID = 1; + protected static final long CONNECTORS_SUITE_ID = 10; + protected static final long KSQL_DB_SUITE_ID = 8; + protected static final long SANITY_SUITE_ID = 19; + protected static final long SCHEMAS_SUITE_ID = 11; + protected static final long TOPICS_SUITE_ID = 2; + @BeforeSuite public void beforeSuite() { qaseIntegrationSetup(); diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java index cf0101fe79..ca987650dc 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/Template.java @@ -13,7 +13,7 @@ public class Template extends BaseQaseTest { /** * this class is a kind of placeholder or example, use is as template to create new one - * copy class into kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/suite + * copy Template into kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/qaseSuite/ * place it into regarding folder and rename according to test case summary from Qase.io * uncomment @Test and set all annotations according to kafka-ui-e2e-checks/QASE.md */ diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java index 0ea5d2eb82..aa48cde2fc 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java @@ -3,20 +3,46 @@ package com.provectus.kafka.ui.smokeSuite; import com.codeborne.selenide.Condition; import com.codeborne.selenide.WebDriverRunner; import com.provectus.kafka.ui.BaseTest; +import com.provectus.kafka.ui.pages.panels.enums.MenuItem; +import com.provectus.kafka.ui.models.Connector; +import com.provectus.kafka.ui.models.Schema; +import com.provectus.kafka.ui.models.Topic; import io.qameta.allure.Step; import io.qase.api.annotation.QaseId; import org.testng.Assert; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.util.stream.Collectors; import java.util.stream.Stream; +import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.*; import static com.provectus.kafka.ui.settings.BaseSource.BROWSER; +import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString; import static com.provectus.kafka.ui.variables.Browser.LOCAL; import static com.provectus.kafka.ui.variables.Url.*; +import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic; public class SmokeTest extends BaseTest { + private static final int BROKER_ID = 1; + private static final Schema TEST_SCHEMA = Schema.createSchemaAvro(); + private static final Topic TEST_TOPIC = new Topic() + .setName("new-topic-" + randomAlphabetic(5)) + .setNumberOfPartitions(1); + private static final Connector TEST_CONNECTOR = new Connector() + .setName("new-connector-" + randomAlphabetic(5)) + .setConfig(getResourceAsString("testData/connectors/config_for_create_connector_via_api.json")); + + @BeforeClass(alwaysRun = true) + public void beforeClass() { + apiService + .createTopic(TEST_TOPIC) + .createSchema(TEST_SCHEMA) + .createConnector(TEST_CONNECTOR); + } + @QaseId(198) @Test public void checkBasePageElements() { @@ -45,10 +71,37 @@ public class SmokeTest extends BaseTest { verifyCurrentUrl(KSQL_DB_LIST_URL); } + @QaseId(46) + @Test + public void checkComponentsPathWhileNavigating() { + navigateToBrokersAndOpenDetails(BROKER_ID); + verifyComponentsPath(BROKERS, String.format("Broker %d", BROKER_ID)); + navigateToTopicsAndOpenDetails(TEST_TOPIC.getName()); + verifyComponentsPath(TOPICS, TEST_TOPIC.getName()); + navigateToSchemaRegistryAndOpenDetails(TEST_SCHEMA.getName()); + verifyComponentsPath(SCHEMA_REGISTRY, TEST_SCHEMA.getName()); + navigateToConnectorsAndOpenDetails(TEST_CONNECTOR.getName()); + verifyComponentsPath(KAFKA_CONNECT, TEST_CONNECTOR.getName()); + } + @Step private void verifyCurrentUrl(String expectedUrl) { String host = BROWSER.equals(LOCAL) ? "localhost" : "host.testcontainers.internal"; Assert.assertEquals(WebDriverRunner.getWebDriver().getCurrentUrl(), String.format(expectedUrl, host), "getCurrentUrl()"); } + + @Step + private void verifyComponentsPath(MenuItem menuItem, String expectedPath) { + Assert.assertEquals(naviSideBar.getPagePath(menuItem), expectedPath, + String.format("getPagePath() for %s", menuItem.getPageTitle().toUpperCase())); + } + + @AfterClass(alwaysRun = true) + public void afterClass() { + apiService + .deleteTopic(TEST_TOPIC.getName()) + .deleteSchema(TEST_SCHEMA.getName()) + .deleteConnector(TEST_CONNECTOR.getName()); + } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java index db1fea805a..bdce29e153 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java @@ -18,44 +18,42 @@ import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic; public class ConnectorsTest extends BaseTest { - private static final String CONNECT_NAME = "first"; private static final List TOPIC_LIST = new ArrayList<>(); private static final List CONNECTOR_LIST = new ArrayList<>(); private static final String MESSAGE_CONTENT = "testData/topics/message_content_create_topic.json"; private static final String MESSAGE_KEY = " "; private static final Topic TOPIC_FOR_CREATE = new Topic() - .setName("topic_for_create_connector-" + randomAlphabetic(5)) + .setName("topic-for-create-connector-" + randomAlphabetic(5)) .setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY); private static final Topic TOPIC_FOR_DELETE = new Topic() - .setName("topic_for_delete_connector-" + randomAlphabetic(5)) + .setName("topic-for-delete-connector-" + randomAlphabetic(5)) .setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY); private static final Topic TOPIC_FOR_UPDATE = new Topic() - .setName("topic_for_update_connector-" + randomAlphabetic(5)) + .setName("topic-for-update-connector-" + randomAlphabetic(5)) .setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY); private static final Connector CONNECTOR_FOR_DELETE = new Connector() - .setName("sink_postgres_activities_e2e_checks_for_delete-" + randomAlphabetic(5)) + .setName("connector-for-delete-" + randomAlphabetic(5)) .setConfig(getResourceAsString("testData/connectors/delete_connector_config.json")); private static final Connector CONNECTOR_FOR_UPDATE = new Connector() - .setName("sink_postgres_activities_e2e_checks_for_update-" + randomAlphabetic(5)) + .setName("connector-for-update-and-delete-" + randomAlphabetic(5)) .setConfig(getResourceAsString("testData/connectors/config_for_create_connector_via_api.json")); @BeforeClass(alwaysRun = true) public void beforeClass() { TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE)); TOPIC_LIST.forEach(topic -> apiService - .createTopic(topic.getName()) + .createTopic(topic) .sendMessage(topic) ); CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE)); - CONNECTOR_LIST.forEach(connector -> apiService - .createConnector(CONNECT_NAME, connector)); + CONNECTOR_LIST.forEach(connector -> apiService.createConnector(connector)); } @QaseId(42) @Test public void createConnector() { Connector connectorForCreate = new Connector() - .setName("sink_postgres_activities_e2e_checks-" + randomAlphabetic(5)) + .setName("connector-for-create-" + randomAlphabetic(5)) .setConfig(getResourceAsString("testData/connectors/config_for_create_connector.json")); navigateToConnectors(); kafkaConnectList @@ -102,7 +100,7 @@ public class ConnectorsTest extends BaseTest { @AfterClass(alwaysRun = true) public void afterClass() { CONNECTOR_LIST.forEach(connector -> - apiService.deleteConnector(CONNECT_NAME, connector.getName())); + apiService.deleteConnector(connector.getName())); TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName())); } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java index 18219d965d..00a0413e7d 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java @@ -53,7 +53,7 @@ public class MessagesTest extends BaseTest { public void beforeClass() { TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECK_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES, TOPIC_TO_RECREATE, TOPIC_FOR_CHECK_MESSAGES_COUNT)); - TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName())); + TOPIC_LIST.forEach(topic -> apiService.createTopic(topic)); IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS)); waitUntilNewMinuteStarted(); IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_FILTERS)); @@ -75,8 +75,6 @@ public class MessagesTest extends BaseTest { softly.assertAll(); } - @Ignore - @Issue("https://github.com/provectus/kafka-ui/issues/2778") @QaseId(19) @Test(priority = 2) public void clearMessage() { @@ -85,12 +83,13 @@ public class MessagesTest extends BaseTest { .openDetailsTab(OVERVIEW); int messageAmount = topicDetails.getMessageCountAmount(); produceMessage(TOPIC_FOR_MESSAGES); - Assert.assertEquals(messageAmount + 1, topicDetails.getMessageCountAmount(), "getMessageCountAmount()"); + Assert.assertEquals(topicDetails.getMessageCountAmount(), messageAmount + 1, "getMessageCountAmount()"); topicDetails .openDotMenu() .clickClearMessagesMenu() + .clickConfirmBtnMdl() .waitUntilScreenReady(); - Assert.assertEquals(0, topicDetails.getMessageCountAmount(), "getMessageCountAmount()"); + Assert.assertEquals(topicDetails.getMessageCountAmount(), 0, "getMessageCountAmount()"); } @QaseId(239) diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java index 92e580ec32..c3091a61c1 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java @@ -59,7 +59,7 @@ public class TopicsTest extends BaseTest { @BeforeClass(alwaysRun = true) public void beforeClass() { TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS)); - TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName())); + TOPIC_LIST.forEach(topic -> apiService.createTopic(topic)); } @QaseId(199) From bd6394cb141dc7ae05684265cd0d7ba9bdab5d52 Mon Sep 17 00:00:00 2001 From: Ilya Kuramshin Date: Tue, 21 Mar 2023 21:14:25 +0400 Subject: [PATCH 3/9] Polling timeouts made configurable (#3513) 1. Polling timeouts made configurable 2. polling-related classes moved to emitter package --------- Co-authored-by: iliax --- .../kafka/ui/config/ClustersProperties.java | 9 +++ .../kafka/ui/emitter/AbstractEmitter.java | 17 ++-- .../ui/emitter/BackwardRecordEmitter.java | 24 +++--- .../kafka/ui/emitter/EmptyPollsCounter.java | 28 +++++++ .../ui/emitter/ForwardRecordEmitter.java | 13 ++- .../kafka/ui/emitter/PollingSettings.java | 79 +++++++++++++++++++ .../{util => emitter}/PollingThrottler.java | 3 +- .../{util => emitter}/ResultSizeLimiter.java | 2 +- .../kafka/ui/emitter/TailingEmitter.java | 5 +- .../kafka/ui/model/KafkaCluster.java | 5 +- .../kafka/ui/service/ClustersStorage.java | 2 +- .../kafka/ui/service/KafkaClusterFactory.java | 7 +- .../kafka/ui/service/MessagesService.java | 8 +- .../service/analyze/TopicAnalysisService.java | 19 ++--- .../kafka/ui/service/RecordEmitterTest.java | 22 +++--- .../kafka/ui/util/PollingThrottlerTest.java | 1 + .../main/resources/swagger/kafka-ui-api.yaml | 9 +++ 17 files changed, 184 insertions(+), 69 deletions(-) create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java create mode 100644 kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingSettings.java rename kafka-ui-api/src/main/java/com/provectus/kafka/ui/{util => emitter}/PollingThrottler.java (94%) rename kafka-ui-api/src/main/java/com/provectus/kafka/ui/{util => emitter}/ResultSizeLimiter.java (93%) diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java index 7b6b494ad1..919e0633e4 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java @@ -27,6 +27,8 @@ public class ClustersProperties { String internalTopicPrefix; + PollingProperties polling = new PollingProperties(); + @Data public static class Cluster { String name; @@ -49,6 +51,13 @@ public class ClustersProperties { TruststoreConfig ssl; } + @Data + public static class PollingProperties { + Integer pollTimeoutMs; + Integer partitionPollTimeout; + Integer noDataEmptyPolls; + } + @Data @ToString(exclude = "password") public static class MetricsConfigData { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java index 7cd01061d0..646cf81ca6 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/AbstractEmitter.java @@ -4,7 +4,6 @@ import com.provectus.kafka.ui.model.TopicMessageDTO; import com.provectus.kafka.ui.model.TopicMessageEventDTO; import com.provectus.kafka.ui.model.TopicMessagePhaseDTO; import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer; -import com.provectus.kafka.ui.util.PollingThrottler; import java.time.Duration; import java.time.Instant; import org.apache.kafka.clients.consumer.Consumer; @@ -14,27 +13,21 @@ import org.apache.kafka.common.utils.Bytes; import reactor.core.publisher.FluxSink; public abstract class AbstractEmitter { - private static final Duration DEFAULT_POLL_TIMEOUT_MS = Duration.ofMillis(1000L); - - // In some situations it is hard to say whether records range (between two offsets) was fully polled. - // This happens when we have holes in records sequences that is usual case for compact topics or - // topics with transactional writes. In such cases if you want to poll all records between offsets X and Y - // there is no guarantee that you will ever see record with offset Y. - // To workaround this we can assume that after N consecutive empty polls all target messages were read. - public static final int NO_MORE_DATA_EMPTY_POLLS_COUNT = 3; private final ConsumerRecordDeserializer recordDeserializer; private final ConsumingStats consumingStats = new ConsumingStats(); private final PollingThrottler throttler; + protected final PollingSettings pollingSettings; - protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingThrottler throttler) { + protected AbstractEmitter(ConsumerRecordDeserializer recordDeserializer, PollingSettings pollingSettings) { this.recordDeserializer = recordDeserializer; - this.throttler = throttler; + this.pollingSettings = pollingSettings; + this.throttler = pollingSettings.getPollingThrottler(); } protected ConsumerRecords poll( FluxSink sink, Consumer consumer) { - return poll(sink, consumer, DEFAULT_POLL_TIMEOUT_MS); + return poll(sink, consumer, pollingSettings.getPollTimeout()); } protected ConsumerRecords poll( diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java index 996f8b9f70..42f94a1e01 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/BackwardRecordEmitter.java @@ -3,15 +3,12 @@ package com.provectus.kafka.ui.emitter; import com.provectus.kafka.ui.model.ConsumerPosition; import com.provectus.kafka.ui.model.TopicMessageEventDTO; import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer; -import com.provectus.kafka.ui.util.PollingThrottler; -import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.TreeMap; import java.util.function.Supplier; -import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -26,8 +23,6 @@ public class BackwardRecordEmitter extends AbstractEmitter implements java.util.function.Consumer> { - private static final Duration POLL_TIMEOUT = Duration.ofMillis(200); - private final Supplier> consumerSupplier; private final ConsumerPosition consumerPosition; private final int messagesPerPage; @@ -37,8 +32,8 @@ public class BackwardRecordEmitter ConsumerPosition consumerPosition, int messagesPerPage, ConsumerRecordDeserializer recordDeserializer, - PollingThrottler throttler) { - super(recordDeserializer, throttler); + PollingSettings pollingSettings) { + super(recordDeserializer, pollingSettings); this.consumerPosition = consumerPosition; this.messagesPerPage = messagesPerPage; this.consumerSupplier = consumerSupplier; @@ -109,17 +104,18 @@ public class BackwardRecordEmitter var recordsToSend = new ArrayList>(); - // we use empty polls counting to verify that partition was fully read - for (int emptyPolls = 0; recordsToSend.size() < desiredMsgsToPoll && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) { - var polledRecords = poll(sink, consumer, POLL_TIMEOUT); - log.debug("{} records polled from {}", polledRecords.count(), tp); + EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter(); + while (!sink.isCancelled() + && recordsToSend.size() < desiredMsgsToPoll + && !emptyPolls.noDataEmptyPollsReached()) { + var polledRecords = poll(sink, consumer, pollingSettings.getPartitionPollTimeout()); + emptyPolls.count(polledRecords); - // counting sequential empty polls - emptyPolls = polledRecords.isEmpty() ? emptyPolls + 1 : 0; + log.debug("{} records polled from {}", polledRecords.count(), tp); var filteredRecords = polledRecords.records(tp).stream() .filter(r -> r.offset() < toOffset) - .collect(Collectors.toList()); + .toList(); if (!polledRecords.isEmpty() && filteredRecords.isEmpty()) { // we already read all messages in target offsets interval diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java new file mode 100644 index 0000000000..3bc2ca38c1 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/EmptyPollsCounter.java @@ -0,0 +1,28 @@ +package com.provectus.kafka.ui.emitter; + +import org.apache.kafka.clients.consumer.ConsumerRecords; + +// In some situations it is hard to say whether records range (between two offsets) was fully polled. +// This happens when we have holes in records sequences that is usual case for compact topics or +// topics with transactional writes. In such cases if you want to poll all records between offsets X and Y +// there is no guarantee that you will ever see record with offset Y. +// To workaround this we can assume that after N consecutive empty polls all target messages were read. +public class EmptyPollsCounter { + + private final int maxEmptyPolls; + + private int emptyPolls = 0; + + EmptyPollsCounter(int maxEmptyPolls) { + this.maxEmptyPolls = maxEmptyPolls; + } + + public void count(ConsumerRecords polled) { + emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0; + } + + public boolean noDataEmptyPollsReached() { + return emptyPolls >= maxEmptyPolls; + } + +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java index 9fadb149d4..971e2f7c9c 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ForwardRecordEmitter.java @@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter; import com.provectus.kafka.ui.model.ConsumerPosition; import com.provectus.kafka.ui.model.TopicMessageEventDTO; import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer; -import com.provectus.kafka.ui.util.PollingThrottler; import java.util.function.Supplier; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -25,8 +24,8 @@ public class ForwardRecordEmitter Supplier> consumerSupplier, ConsumerPosition position, ConsumerRecordDeserializer recordDeserializer, - PollingThrottler throttler) { - super(recordDeserializer, throttler); + PollingSettings pollingSettings) { + super(recordDeserializer, pollingSettings); this.position = position; this.consumerSupplier = consumerSupplier; } @@ -39,16 +38,16 @@ public class ForwardRecordEmitter var seekOperations = SeekOperations.create(consumer, position); seekOperations.assignAndSeekNonEmptyPartitions(); - // we use empty polls counting to verify that topic was fully read - int emptyPolls = 0; + EmptyPollsCounter emptyPolls = pollingSettings.createEmptyPollsCounter(); while (!sink.isCancelled() && !seekOperations.assignedPartitionsFullyPolled() - && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT) { + && !emptyPolls.noDataEmptyPollsReached()) { sendPhase(sink, "Polling"); ConsumerRecords records = poll(sink, consumer); + emptyPolls.count(records); + log.debug("{} records polled", records.count()); - emptyPolls = records.isEmpty() ? emptyPolls + 1 : 0; for (ConsumerRecord msg : records) { if (!sink.isCancelled()) { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingSettings.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingSettings.java new file mode 100644 index 0000000000..0c3dfcfbab --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingSettings.java @@ -0,0 +1,79 @@ +package com.provectus.kafka.ui.emitter; + +import com.provectus.kafka.ui.config.ClustersProperties; +import java.time.Duration; +import java.util.Optional; +import java.util.function.Supplier; + +public class PollingSettings { + + private static final Duration DEFAULT_POLL_TIMEOUT = Duration.ofMillis(1_000); + private static final Duration DEFAULT_PARTITION_POLL_TIMEOUT = Duration.ofMillis(200); + private static final int DEFAULT_NO_DATA_EMPTY_POLLS = 3; + + private final Duration pollTimeout; + private final Duration partitionPollTimeout; + private final int notDataEmptyPolls; //see EmptyPollsCounter docs + + private final Supplier throttlerSupplier; + + public static PollingSettings create(ClustersProperties.Cluster cluster, + ClustersProperties clustersProperties) { + var pollingProps = Optional.ofNullable(clustersProperties.getPolling()) + .orElseGet(ClustersProperties.PollingProperties::new); + + var pollTimeout = pollingProps.getPollTimeoutMs() != null + ? Duration.ofMillis(pollingProps.getPollTimeoutMs()) + : DEFAULT_POLL_TIMEOUT; + + var partitionPollTimeout = pollingProps.getPartitionPollTimeout() != null + ? Duration.ofMillis(pollingProps.getPartitionPollTimeout()) + : Duration.ofMillis(pollTimeout.toMillis() / 5); + + int noDataEmptyPolls = pollingProps.getNoDataEmptyPolls() != null + ? pollingProps.getNoDataEmptyPolls() + : DEFAULT_NO_DATA_EMPTY_POLLS; + + return new PollingSettings( + pollTimeout, + partitionPollTimeout, + noDataEmptyPolls, + PollingThrottler.throttlerSupplier(cluster) + ); + } + + public static PollingSettings createDefault() { + return new PollingSettings( + DEFAULT_POLL_TIMEOUT, + DEFAULT_PARTITION_POLL_TIMEOUT, + DEFAULT_NO_DATA_EMPTY_POLLS, + PollingThrottler::noop + ); + } + + private PollingSettings(Duration pollTimeout, + Duration partitionPollTimeout, + int notDataEmptyPolls, + Supplier throttlerSupplier) { + this.pollTimeout = pollTimeout; + this.partitionPollTimeout = partitionPollTimeout; + this.notDataEmptyPolls = notDataEmptyPolls; + this.throttlerSupplier = throttlerSupplier; + } + + public EmptyPollsCounter createEmptyPollsCounter() { + return new EmptyPollsCounter(notDataEmptyPolls); + } + + public Duration getPollTimeout() { + return pollTimeout; + } + + public Duration getPartitionPollTimeout() { + return partitionPollTimeout; + } + + public PollingThrottler getPollingThrottler() { + return throttlerSupplier.get(); + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/PollingThrottler.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java similarity index 94% rename from kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/PollingThrottler.java rename to kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java index bd2e97da97..15dfcd91c9 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/PollingThrottler.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/PollingThrottler.java @@ -1,8 +1,9 @@ -package com.provectus.kafka.ui.util; +package com.provectus.kafka.ui.emitter; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.RateLimiter; import com.provectus.kafka.ui.config.ClustersProperties; +import com.provectus.kafka.ui.util.ConsumerRecordsUtil; import java.util.function.Supplier; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecords; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ResultSizeLimiter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ResultSizeLimiter.java similarity index 93% rename from kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ResultSizeLimiter.java rename to kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ResultSizeLimiter.java index 64fcb21509..a0fa5bcb93 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/ResultSizeLimiter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/ResultSizeLimiter.java @@ -1,4 +1,4 @@ -package com.provectus.kafka.ui.util; +package com.provectus.kafka.ui.emitter; import com.provectus.kafka.ui.model.TopicMessageEventDTO; import java.util.concurrent.atomic.AtomicInteger; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java index 06cd8dad99..4554069c1c 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/TailingEmitter.java @@ -3,7 +3,6 @@ package com.provectus.kafka.ui.emitter; import com.provectus.kafka.ui.model.ConsumerPosition; import com.provectus.kafka.ui.model.TopicMessageEventDTO; import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer; -import com.provectus.kafka.ui.util.PollingThrottler; import java.util.HashMap; import java.util.function.Supplier; import lombok.extern.slf4j.Slf4j; @@ -22,8 +21,8 @@ public class TailingEmitter extends AbstractEmitter public TailingEmitter(Supplier> consumerSupplier, ConsumerPosition consumerPosition, ConsumerRecordDeserializer recordDeserializer, - PollingThrottler throttler) { - super(recordDeserializer, throttler); + PollingSettings pollingSettings) { + super(recordDeserializer, pollingSettings); this.consumerSupplier = consumerSupplier; this.consumerPosition = consumerPosition; } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java index 9933d7e467..1e2903dbcc 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java @@ -2,14 +2,13 @@ package com.provectus.kafka.ui.model; import com.provectus.kafka.ui.config.ClustersProperties; import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi; +import com.provectus.kafka.ui.emitter.PollingSettings; import com.provectus.kafka.ui.service.ksql.KsqlApiClient; import com.provectus.kafka.ui.service.masking.DataMasking; import com.provectus.kafka.ui.sr.api.KafkaSrClientApi; -import com.provectus.kafka.ui.util.PollingThrottler; import com.provectus.kafka.ui.util.ReactiveFailover; import java.util.Map; import java.util.Properties; -import java.util.function.Supplier; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Builder; @@ -28,7 +27,7 @@ public class KafkaCluster { private final boolean readOnly; private final MetricsConfig metricsConfig; private final DataMasking masking; - private final Supplier throttler; + private final PollingSettings pollingSettings; private final ReactiveFailover schemaRegistryClient; private final Map> connectsClients; private final ReactiveFailover ksqlClient; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java index c0143ad8c5..ee08d6392d 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java @@ -14,7 +14,7 @@ public class ClustersStorage { public ClustersStorage(ClustersProperties properties, KafkaClusterFactory factory) { var builder = ImmutableMap.builder(); - properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(c))); + properties.getClusters().forEach(c -> builder.put(c.getName(), factory.create(properties, c))); this.kafkaClusters = builder.build(); } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java index 26a9d40647..357a548a63 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java @@ -3,6 +3,7 @@ package com.provectus.kafka.ui.service; import com.provectus.kafka.ui.client.RetryingKafkaConnectClient; import com.provectus.kafka.ui.config.ClustersProperties; import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi; +import com.provectus.kafka.ui.emitter.PollingSettings; import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO; import com.provectus.kafka.ui.model.ClusterConfigValidationDTO; import com.provectus.kafka.ui.model.KafkaCluster; @@ -12,7 +13,6 @@ import com.provectus.kafka.ui.service.masking.DataMasking; import com.provectus.kafka.ui.sr.ApiClient; import com.provectus.kafka.ui.sr.api.KafkaSrClientApi; import com.provectus.kafka.ui.util.KafkaServicesValidation; -import com.provectus.kafka.ui.util.PollingThrottler; import com.provectus.kafka.ui.util.ReactiveFailover; import com.provectus.kafka.ui.util.WebClientConfigurator; import java.util.HashMap; @@ -41,7 +41,8 @@ public class KafkaClusterFactory { @Value("${webclient.max-in-memory-buffer-size:20MB}") private DataSize maxBuffSize; - public KafkaCluster create(ClustersProperties.Cluster clusterProperties) { + public KafkaCluster create(ClustersProperties properties, + ClustersProperties.Cluster clusterProperties) { KafkaCluster.KafkaClusterBuilder builder = KafkaCluster.builder(); builder.name(clusterProperties.getName()); @@ -49,7 +50,7 @@ public class KafkaClusterFactory { builder.properties(convertProperties(clusterProperties.getProperties())); builder.readOnly(clusterProperties.isReadOnly()); builder.masking(DataMasking.create(clusterProperties.getMasking())); - builder.throttler(PollingThrottler.throttlerSupplier(clusterProperties)); + builder.pollingSettings(PollingSettings.create(clusterProperties, properties)); if (schemaRegistryConfigured(clusterProperties)) { builder.schemaRegistryClient(schemaRegistryClient(clusterProperties)); diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java index d1f0e261a8..27f751ac80 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java @@ -5,6 +5,7 @@ import com.provectus.kafka.ui.emitter.BackwardRecordEmitter; import com.provectus.kafka.ui.emitter.ForwardRecordEmitter; import com.provectus.kafka.ui.emitter.MessageFilterStats; import com.provectus.kafka.ui.emitter.MessageFilters; +import com.provectus.kafka.ui.emitter.ResultSizeLimiter; import com.provectus.kafka.ui.emitter.TailingEmitter; import com.provectus.kafka.ui.exception.TopicNotFoundException; import com.provectus.kafka.ui.exception.ValidationException; @@ -17,7 +18,6 @@ import com.provectus.kafka.ui.model.TopicMessageEventDTO; import com.provectus.kafka.ui.serde.api.Serde; import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer; import com.provectus.kafka.ui.serdes.ProducerRecordCreator; -import com.provectus.kafka.ui.util.ResultSizeLimiter; import com.provectus.kafka.ui.util.SslPropertiesUtil; import java.util.List; import java.util.Map; @@ -169,7 +169,7 @@ public class MessagesService { () -> consumerGroupService.createConsumer(cluster), consumerPosition, recordDeserializer, - cluster.getThrottler().get() + cluster.getPollingSettings() ); } else if (seekDirection.equals(SeekDirectionDTO.BACKWARD)) { emitter = new BackwardRecordEmitter( @@ -177,14 +177,14 @@ public class MessagesService { consumerPosition, limit, recordDeserializer, - cluster.getThrottler().get() + cluster.getPollingSettings() ); } else { emitter = new TailingEmitter( () -> consumerGroupService.createConsumer(cluster), consumerPosition, recordDeserializer, - cluster.getThrottler().get() + cluster.getPollingSettings() ); } MessageFilterStats filterStats = new MessageFilterStats(); diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java index 9b6bd78767..7ea7a16598 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/analyze/TopicAnalysisService.java @@ -1,14 +1,14 @@ package com.provectus.kafka.ui.service.analyze; -import static com.provectus.kafka.ui.emitter.AbstractEmitter.NO_MORE_DATA_EMPTY_POLLS_COUNT; - +import com.provectus.kafka.ui.emitter.EmptyPollsCounter; import com.provectus.kafka.ui.emitter.OffsetsInfo; +import com.provectus.kafka.ui.emitter.PollingSettings; +import com.provectus.kafka.ui.emitter.PollingThrottler; import com.provectus.kafka.ui.exception.TopicAnalysisException; import com.provectus.kafka.ui.model.KafkaCluster; import com.provectus.kafka.ui.model.TopicAnalysisDTO; import com.provectus.kafka.ui.service.ConsumerGroupService; import com.provectus.kafka.ui.service.TopicsService; -import com.provectus.kafka.ui.util.PollingThrottler; import java.io.Closeable; import java.time.Duration; import java.time.Instant; @@ -63,7 +63,7 @@ public class TopicAnalysisService { if (analysisTasksStore.isAnalysisInProgress(topicId)) { throw new TopicAnalysisException("Topic is already analyzing"); } - var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getThrottler().get()); + var task = new AnalysisTask(cluster, topicId, partitionsCnt, approxNumberOfMsgs, cluster.getPollingSettings()); analysisTasksStore.registerNewTask(topicId, task); Schedulers.boundedElastic().schedule(task); } @@ -83,6 +83,7 @@ public class TopicAnalysisService { private final TopicIdentity topicId; private final int partitionsCnt; private final long approxNumberOfMsgs; + private final EmptyPollsCounter emptyPollsCounter; private final PollingThrottler throttler; private final TopicAnalysisStats totalStats = new TopicAnalysisStats(); @@ -91,7 +92,7 @@ public class TopicAnalysisService { private final KafkaConsumer consumer; AnalysisTask(KafkaCluster cluster, TopicIdentity topicId, int partitionsCnt, - long approxNumberOfMsgs, PollingThrottler throttler) { + long approxNumberOfMsgs, PollingSettings pollingSettings) { this.topicId = topicId; this.approxNumberOfMsgs = approxNumberOfMsgs; this.partitionsCnt = partitionsCnt; @@ -103,7 +104,8 @@ public class TopicAnalysisService { ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "100000" ) ); - this.throttler = throttler; + this.throttler = pollingSettings.getPollingThrottler(); + this.emptyPollsCounter = pollingSettings.createEmptyPollsCounter(); } @Override @@ -124,11 +126,10 @@ public class TopicAnalysisService { consumer.seekToBeginning(topicPartitions); var offsetsInfo = new OffsetsInfo(consumer, topicId.topicName); - for (int emptyPolls = 0; !offsetsInfo.assignedPartitionsFullyPolled() - && emptyPolls < NO_MORE_DATA_EMPTY_POLLS_COUNT;) { + while (!offsetsInfo.assignedPartitionsFullyPolled() && !emptyPollsCounter.noDataEmptyPollsReached()) { var polled = consumer.poll(Duration.ofSeconds(3)); throttler.throttleAfterPoll(polled); - emptyPolls = polled.isEmpty() ? emptyPolls + 1 : 0; + emptyPollsCounter.count(polled); polled.forEach(r -> { totalStats.apply(r); partitionStats.get(r.partition()).apply(r); diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java index 79e81a180f..e7b9edf834 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java @@ -9,6 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat; import com.provectus.kafka.ui.AbstractIntegrationTest; import com.provectus.kafka.ui.emitter.BackwardRecordEmitter; import com.provectus.kafka.ui.emitter.ForwardRecordEmitter; +import com.provectus.kafka.ui.emitter.PollingSettings; import com.provectus.kafka.ui.model.ConsumerPosition; import com.provectus.kafka.ui.model.TopicMessageEventDTO; import com.provectus.kafka.ui.producer.KafkaTestProducer; @@ -16,7 +17,6 @@ import com.provectus.kafka.ui.serde.api.Serde; import com.provectus.kafka.ui.serdes.ConsumerRecordDeserializer; import com.provectus.kafka.ui.serdes.PropertyResolverImpl; import com.provectus.kafka.ui.serdes.builtin.StringSerde; -import com.provectus.kafka.ui.util.PollingThrottler; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; @@ -112,7 +112,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { this::createConsumer, new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null), RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); var backwardEmitter = new BackwardRecordEmitter( @@ -120,7 +120,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { new ConsumerPosition(BEGINNING, EMPTY_TOPIC, null), 100, RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); StepVerifier.create(Flux.create(forwardEmitter)) @@ -142,7 +142,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { this::createConsumer, new ConsumerPosition(BEGINNING, TOPIC, null), RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); var backwardEmitter = new BackwardRecordEmitter( @@ -150,7 +150,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { new ConsumerPosition(LATEST, TOPIC, null), PARTITIONS * MSGS_PER_PARTITION, RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); List expectedValues = SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList()); @@ -171,7 +171,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { this::createConsumer, new ConsumerPosition(OFFSET, TOPIC, targetOffsets), RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); var backwardEmitter = new BackwardRecordEmitter( @@ -179,7 +179,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { new ConsumerPosition(OFFSET, TOPIC, targetOffsets), PARTITIONS * MSGS_PER_PARTITION, RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); var expectedValues = SENT_RECORDS.stream() @@ -216,7 +216,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { this::createConsumer, new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps), RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); var backwardEmitter = new BackwardRecordEmitter( @@ -224,7 +224,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { new ConsumerPosition(TIMESTAMP, TOPIC, targetTimestamps), PARTITIONS * MSGS_PER_PARTITION, RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); var expectedValues = SENT_RECORDS.stream() @@ -255,7 +255,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { new ConsumerPosition(OFFSET, TOPIC, targetOffsets), numMessages, RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); var expectedValues = SENT_RECORDS.stream() @@ -281,7 +281,7 @@ class RecordEmitterTest extends AbstractIntegrationTest { new ConsumerPosition(OFFSET, TOPIC, offsets), 100, RECORD_DESERIALIZER, - PollingThrottler.noop() + PollingSettings.createDefault() ); expectEmitter(backwardEmitter, diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/PollingThrottlerTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/PollingThrottlerTest.java index ab333cb11a..2efe7562df 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/PollingThrottlerTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/PollingThrottlerTest.java @@ -5,6 +5,7 @@ import static org.assertj.core.data.Percentage.withPercentage; import com.google.common.base.Stopwatch; import com.google.common.util.concurrent.RateLimiter; +import com.provectus.kafka.ui.emitter.PollingThrottler; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import org.junit.jupiter.api.Test; diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index 71c595e525..ea335f282c 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -3445,6 +3445,15 @@ components: kafka: type: object properties: + polling: + type: object + properties: + pollTimeoutMs: + type: integer + partitionPollTimeout: + type: integer + noDataEmptyPolls: + type: integer clusters: type: array items: From 5b726e84fa4c4725c7649e69d0787a07169803d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Mar 2023 16:58:09 +0400 Subject: [PATCH 4/9] Bump maven-javadoc-plugin from 3.4.1 to 3.5.0 (#3509) Bumps [maven-javadoc-plugin](https://github.com/apache/maven-javadoc-plugin) from 3.4.1 to 3.5.0. - [Release notes](https://github.com/apache/maven-javadoc-plugin/releases) - [Commits](https://github.com/apache/maven-javadoc-plugin/compare/maven-javadoc-plugin-3.4.1...maven-javadoc-plugin-3.5.0) --- updated-dependencies: - dependency-name: org.apache.maven.plugins:maven-javadoc-plugin dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- kafka-ui-serde-api/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kafka-ui-serde-api/pom.xml b/kafka-ui-serde-api/pom.xml index ad0a88ca40..db20926257 100644 --- a/kafka-ui-serde-api/pom.xml +++ b/kafka-ui-serde-api/pom.xml @@ -87,7 +87,7 @@ 8 - 3.4.1 + 3.5.0 attach-javadocs From 75a6282a84d77b15fbaaf7e382107a86e2e20f0f Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Wed, 22 Mar 2023 16:14:52 +0300 Subject: [PATCH 5/9] [Infra] Chore: Upgrade aws creds action (#3533) --- .github/workflows/aws_publisher.yaml | 2 +- .github/workflows/branch-deploy.yml | 2 +- .github/workflows/build-public-image.yml | 2 +- .github/workflows/delete-public-image.yml | 2 +- .github/workflows/e2e-automation.yml | 2 +- .github/workflows/e2e-checks.yaml | 2 +- .github/workflows/e2e-weekly.yml | 2 +- .github/workflows/separate_env_public_create.yml | 2 +- .github/workflows/terraform-deploy.yml | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/aws_publisher.yaml b/.github/workflows/aws_publisher.yaml index c7b80c54f9..8cdf3751eb 100644 --- a/.github/workflows/aws_publisher.yaml +++ b/.github/workflows/aws_publisher.yaml @@ -31,7 +31,7 @@ jobs: echo "Packer will be triggered in this dir $WORK_DIR" - name: Configure AWS credentials for Kafka-UI account - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }} diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml index 0e4a171aec..3039958b5a 100644 --- a/.github/workflows/branch-deploy.yml +++ b/.github/workflows/branch-deploy.yml @@ -45,7 +45,7 @@ jobs: restore-keys: | ${{ runner.os }}-buildx- - name: Configure AWS credentials for Kafka-UI account - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/build-public-image.yml b/.github/workflows/build-public-image.yml index e965f6c672..7a5e3b4ca6 100644 --- a/.github/workflows/build-public-image.yml +++ b/.github/workflows/build-public-image.yml @@ -42,7 +42,7 @@ jobs: restore-keys: | ${{ runner.os }}-buildx- - name: Configure AWS credentials for Kafka-UI account - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/delete-public-image.yml b/.github/workflows/delete-public-image.yml index 56c795d0b5..c335bb8050 100644 --- a/.github/workflows/delete-public-image.yml +++ b/.github/workflows/delete-public-image.yml @@ -15,7 +15,7 @@ jobs: tag='${{ github.event.pull_request.number }}' echo "tag=${tag}" >> $GITHUB_OUTPUT - name: Configure AWS credentials for Kafka-UI account - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/e2e-automation.yml b/.github/workflows/e2e-automation.yml index b21fa18ce8..eb10985747 100644 --- a/.github/workflows/e2e-automation.yml +++ b/.github/workflows/e2e-automation.yml @@ -24,7 +24,7 @@ jobs: with: ref: ${{ github.sha }} - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml index 962a134684..83be371bea 100644 --- a/.github/workflows/e2e-checks.yaml +++ b/.github/workflows/e2e-checks.yaml @@ -16,7 +16,7 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} - name: Configure AWS credentials for Kafka-UI account - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/e2e-weekly.yml b/.github/workflows/e2e-weekly.yml index 80f7a9393d..4683c7e111 100644 --- a/.github/workflows/e2e-weekly.yml +++ b/.github/workflows/e2e-weekly.yml @@ -11,7 +11,7 @@ jobs: with: ref: ${{ github.sha }} - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/separate_env_public_create.yml b/.github/workflows/separate_env_public_create.yml index 1b7085d936..ad5f0e47e7 100644 --- a/.github/workflows/separate_env_public_create.yml +++ b/.github/workflows/separate_env_public_create.yml @@ -47,7 +47,7 @@ jobs: restore-keys: | ${{ runner.os }}-buildx- - name: Configure AWS credentials for Kafka-UI account - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/terraform-deploy.yml b/.github/workflows/terraform-deploy.yml index c6163ef90f..902f9c385b 100644 --- a/.github/workflows/terraform-deploy.yml +++ b/.github/workflows/terraform-deploy.yml @@ -26,7 +26,7 @@ jobs: echo "Terraform will be triggered in this dir $TF_DIR" - name: Configure AWS credentials for Kafka-UI account - uses: aws-actions/configure-aws-credentials@v1-node16 + uses: aws-actions/configure-aws-credentials@v2 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} From d8289d2ee676a7f4de7e8aebfbe73aa809a80955 Mon Sep 17 00:00:00 2001 From: Ilya Kuramshin Date: Thu, 23 Mar 2023 14:46:59 +0400 Subject: [PATCH 6/9] CORS configuration bean added (#3529) Co-authored-by: iliax --- .../ui/config/CorsGlobalConfiguration.java | 42 +------------------ 1 file changed, 1 insertion(+), 41 deletions(-) diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java index 0128110ab7..c6c88bfa98 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java @@ -1,25 +1,12 @@ package com.provectus.kafka.ui.config; -import lombok.AllArgsConstructor; -import org.springframework.boot.autoconfigure.web.ServerProperties; -import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Profile; -import org.springframework.core.io.ClassPathResource; -import org.springframework.util.StringUtils; import org.springframework.web.reactive.config.CorsRegistry; import org.springframework.web.reactive.config.WebFluxConfigurer; -import org.springframework.web.reactive.function.server.RouterFunction; -import org.springframework.web.reactive.function.server.RouterFunctions; -import org.springframework.web.reactive.function.server.ServerResponse; @Configuration -@Profile("local") -@AllArgsConstructor public class CorsGlobalConfiguration implements WebFluxConfigurer { - private final ServerProperties serverProperties; - @Override public void addCorsMappings(CorsRegistry registry) { registry.addMapping("/**") @@ -28,31 +15,4 @@ public class CorsGlobalConfiguration implements WebFluxConfigurer { .allowedHeaders("*") .allowCredentials(false); } - - private String withContext(String pattern) { - final String basePath = serverProperties.getServlet().getContextPath(); - if (StringUtils.hasText(basePath)) { - return basePath + pattern; - } else { - return pattern; - } - } - - @Bean - public RouterFunction cssFilesRouter() { - return RouterFunctions - .resources(withContext("/static/css/**"), new ClassPathResource("static/static/css/")); - } - - @Bean - public RouterFunction jsFilesRouter() { - return RouterFunctions - .resources(withContext("/static/js/**"), new ClassPathResource("static/static/js/")); - } - - @Bean - public RouterFunction mediaFilesRouter() { - return RouterFunctions - .resources(withContext("/static/media/**"), new ClassPathResource("static/static/media/")); - } -} \ No newline at end of file +} From 84d3b329ba876bfc78e8b7a73c7174695c738ade Mon Sep 17 00:00:00 2001 From: Oleg Shur Date: Thu, 23 Mar 2023 16:36:29 +0300 Subject: [PATCH 7/9] Migrate KSQL to RQ. Get rid of redux reducer (#3478) * Migrate KSQL to RQ. Get rid of redux reducer * refactor react-ace using * get rid of deadcode * ace * Fix add/remove stream props * ace * Fix Clear result handler * Fix error handler * rollback removal of yup async validation helper * reduce re-renders * move ace to separate chunk * upd KsqlQueryForm * feedback --------- Co-authored-by: VladSenyuta --- .../kafka/ui/pages/ksqlDb/KsqlQueryForm.java | 3 +- kafka-ui-react-app/src/components/App.tsx | 3 + .../src/components/KsqlDb/KsqlDb.tsx | 108 ++++++- .../KsqlDb/List/KsqlDbItem/KsqlDbItem.tsx | 58 ---- .../KsqlDbItem/__test__/KsqlDbItem.spec.tsx | 59 ---- .../List/KsqlDbItem/utils/ksqlRowData.ts | 12 - .../src/components/KsqlDb/List/List.tsx | 111 ------- .../KsqlDb/List/__test__/List.spec.tsx | 22 -- .../components/KsqlDb/Query/Query.styled.ts | 9 - .../src/components/KsqlDb/Query/Query.tsx | 235 ++------------ .../Query/QueryForm/QueryForm.styled.ts | 69 +---- .../KsqlDb/Query/QueryForm/QueryForm.tsx | 292 +++++++++--------- .../QueryForm/__test__/QueryForm.spec.tsx | 189 ------------ .../KsqlDb/Query/__test__/Query.spec.tsx | 116 ------- .../renderer/TableRenderer/TableRenderer.tsx | 18 +- .../__test__/TableRenderer.spec.tsx | 71 ----- .../src/components/KsqlDb/TableView.tsx | 39 +++ .../KsqlDb/__test__/KsqlDb.spec.tsx | 42 --- .../Schemas/Details/__test__/fixtures.ts | 6 - .../ActionComponent/__tests__/fixtures.ts | 2 +- .../common/DiffViewer/DiffViewer.tsx | 1 + .../src/components/common/Editor/Editor.tsx | 4 +- .../common/NewTable/TimestampCell copy.tsx | 11 - .../components/common/SQLEditor/SQLEditor.tsx | 4 +- .../src/components/common/Tooltip/Tooltip.tsx | 4 +- .../table/TableTitle/TableTitle.styled.tsx | 2 +- .../src/lib/hooks/__tests__/fixtures.ts | 2 +- .../src/lib/hooks/api/kafkaConnect.ts | 2 +- .../src/lib/hooks/api/ksqlDb.tsx | 184 +++++++++++ .../src/lib/hooks/useMessageFiltersStore.ts | 2 +- kafka-ui-react-app/src/lib/paths.ts | 4 +- kafka-ui-react-app/src/lib/yupExtended.ts | 3 +- .../src/redux/interfaces/ksqlDb.ts | 19 -- .../src/redux/reducers/index.ts | 2 - .../reducers/ksqlDb/__test__/fixtures.ts | 43 --- .../ksqlDb/__test__/selectors.spec.ts | 51 --- .../src/redux/reducers/ksqlDb/ksqlDbSlice.ts | 75 ----- .../src/redux/reducers/ksqlDb/selectors.ts | 33 -- kafka-ui-react-app/vite.config.ts | 7 + 39 files changed, 543 insertions(+), 1374 deletions(-) delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/KsqlDbItem.tsx delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/__test__/KsqlDbItem.spec.tsx delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData.ts delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/List/List.tsx delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/List/__test__/List.spec.tsx delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/Query/Query.styled.ts delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/__test__/QueryForm.spec.tsx delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/Query/__test__/Query.spec.tsx delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/__test__/TableRenderer.spec.tsx create mode 100644 kafka-ui-react-app/src/components/KsqlDb/TableView.tsx delete mode 100644 kafka-ui-react-app/src/components/KsqlDb/__test__/KsqlDb.spec.tsx delete mode 100644 kafka-ui-react-app/src/components/common/NewTable/TimestampCell copy.tsx create mode 100644 kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx delete mode 100644 kafka-ui-react-app/src/redux/interfaces/ksqlDb.ts delete mode 100644 kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/fixtures.ts delete mode 100644 kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/selectors.spec.ts delete mode 100644 kafka-ui-react-app/src/redux/reducers/ksqlDb/ksqlDbSlice.ts delete mode 100644 kafka-ui-react-app/src/redux/reducers/ksqlDb/selectors.ts diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java index 30ac1007fd..df915c0098 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java @@ -16,7 +16,6 @@ import static com.codeborne.selenide.Selenide.$$x; import static com.codeborne.selenide.Selenide.$x; public class KsqlQueryForm extends BasePage { - protected SelenideElement pageTitle = $x("//h1[text()='Query']"); protected SelenideElement clearBtn = $x("//div/button[text()='Clear']"); protected SelenideElement executeBtn = $x("//div/button[text()='Execute']"); protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']"); @@ -31,7 +30,7 @@ public class KsqlQueryForm extends BasePage { @Step public KsqlQueryForm waitUntilScreenReady() { waitUntilSpinnerDisappear(); - pageTitle.shouldBe(Condition.visible); + executeBtn.shouldBe(Condition.visible); return this; } diff --git a/kafka-ui-react-app/src/components/App.tsx b/kafka-ui-react-app/src/components/App.tsx index 6107e3ade7..44409c403a 100644 --- a/kafka-ui-react-app/src/components/App.tsx +++ b/kafka-ui-react-app/src/components/App.tsx @@ -30,6 +30,9 @@ const queryClient = new QueryClient({ defaultOptions: { queries: { suspense: true, + onError(error) { + showServerError(error as Response); + }, }, mutations: { onError(error) { diff --git a/kafka-ui-react-app/src/components/KsqlDb/KsqlDb.tsx b/kafka-ui-react-app/src/components/KsqlDb/KsqlDb.tsx index 12a3f8cbfa..d105720aa7 100644 --- a/kafka-ui-react-app/src/components/KsqlDb/KsqlDb.tsx +++ b/kafka-ui-react-app/src/components/KsqlDb/KsqlDb.tsx @@ -1,15 +1,109 @@ import React from 'react'; -import { Route, Routes } from 'react-router-dom'; -import { clusterKsqlDbQueryRelativePath } from 'lib/paths'; -import List from 'components/KsqlDb/List/List'; import Query from 'components/KsqlDb/Query/Query'; +import useAppParams from 'lib/hooks/useAppParams'; +import * as Metrics from 'components/common/Metrics'; +import { + clusterKsqlDbQueryRelativePath, + clusterKsqlDbStreamsPath, + clusterKsqlDbStreamsRelativePath, + clusterKsqlDbTablesPath, + clusterKsqlDbTablesRelativePath, + ClusterNameRoute, +} from 'lib/paths'; +import PageHeading from 'components/common/PageHeading/PageHeading'; +import { ActionButton } from 'components/common/ActionComponent'; +import Navbar from 'components/common/Navigation/Navbar.styled'; +import { Navigate, NavLink, Route, Routes } from 'react-router-dom'; +import { Action, ResourceType } from 'generated-sources'; +import { useKsqlkDb } from 'lib/hooks/api/ksqlDb'; +import 'ace-builds/src-noconflict/ace'; + +import TableView from './TableView'; const KsqlDb: React.FC = () => { + const { clusterName } = useAppParams(); + + const [tables, streams] = useKsqlkDb(clusterName); + + const isFetching = tables.isFetching || streams.isFetching; + return ( - - } /> - } /> - + <> + + + Execute KSQL Request + + + + + + {tables.isSuccess ? tables.data.length : '-'} + + + {streams.isSuccess ? streams.data.length : '-'} + + + +
+ + (isActive ? 'is-active' : '')} + end + > + Tables + + (isActive ? 'is-active' : '')} + end + > + Streams + + + + } + /> + + } + /> + + } + /> + } /> + +
+ ); }; diff --git a/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/KsqlDbItem.tsx b/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/KsqlDbItem.tsx deleted file mode 100644 index bbb0844d68..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/KsqlDbItem.tsx +++ /dev/null @@ -1,58 +0,0 @@ -import React from 'react'; -import PageLoader from 'components/common/PageLoader/PageLoader'; -import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources'; -import { ksqlRowData } from 'components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData'; -import Table from 'components/common/NewTable'; -import { ColumnDef } from '@tanstack/react-table'; - -export enum KsqlDbItemType { - Tables = 'tables', - Streams = 'streams', -} - -interface RowsType { - tables: KsqlTableDescription[]; - streams: KsqlStreamDescription[]; -} -export interface KsqlDbItemProps { - type: KsqlDbItemType; - fetching: boolean; - rows: RowsType; -} - -export interface KsqlTableState { - name: string; - topic: string; - keyFormat: string; - valueFormat: string; - isWindowed: string; -} - -const KsqlDbItem: React.FC = ({ type, fetching, rows }) => { - const preparedRows = rows[type]?.map(ksqlRowData) || []; - - const columns = React.useMemo[]>( - () => [ - { header: 'Name', accessorKey: 'name' }, - { header: 'Topic', accessorKey: 'topic' }, - { header: 'Key Format', accessorKey: 'keyFormat' }, - { header: 'Value Format', accessorKey: 'valueFormat' }, - { header: 'Is Windowed', accessorKey: 'isWindowed' }, - ], - [] - ); - - if (fetching) { - return ; - } - return ( - - ); -}; - -export default KsqlDbItem; diff --git a/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/__test__/KsqlDbItem.spec.tsx b/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/__test__/KsqlDbItem.spec.tsx deleted file mode 100644 index ea6705b6a4..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/__test__/KsqlDbItem.spec.tsx +++ /dev/null @@ -1,59 +0,0 @@ -import React from 'react'; -import { render, WithRoute } from 'lib/testHelpers'; -import { clusterKsqlDbTablesPath } from 'lib/paths'; -import KsqlDbItem, { - KsqlDbItemProps, - KsqlDbItemType, -} from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem'; -import { screen } from '@testing-library/dom'; -import { fetchKsqlDbTablesPayload } from 'redux/reducers/ksqlDb/__test__/fixtures'; - -describe('KsqlDbItem', () => { - const tablesPathname = clusterKsqlDbTablesPath(); - const renderComponent = (props: Partial = {}) => { - render( - - - , - { - initialEntries: [clusterKsqlDbTablesPath()], - } - ); - }; - - it('renders progressbar when fetching tables and streams', () => { - renderComponent({ fetching: true }); - expect(screen.getByRole('progressbar')).toBeInTheDocument(); - }); - - it('show no text if no data found', () => { - renderComponent({}); - expect(screen.getByText('No tables or streams found')).toBeInTheDocument(); - }); - - it('renders with tables', () => { - renderComponent({ - rows: { - tables: fetchKsqlDbTablesPayload.tables, - streams: [], - }, - }); - - expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10); - }); - it('renders with streams', () => { - renderComponent({ - type: KsqlDbItemType.Streams, - rows: { - tables: [], - streams: fetchKsqlDbTablesPayload.streams, - }, - }); - expect(screen.getByRole('table').querySelectorAll('td')).toHaveLength(10); - }); -}); diff --git a/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData.ts b/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData.ts deleted file mode 100644 index a2c9e65891..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/List/KsqlDbItem/utils/ksqlRowData.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { KsqlDescription } from 'redux/interfaces/ksqlDb'; -import { KsqlTableState } from 'components/KsqlDb/List/KsqlDbItem/KsqlDbItem'; - -export const ksqlRowData = (data: KsqlDescription): KsqlTableState => { - return { - name: data.name || '', - topic: data.topic || '', - keyFormat: data.keyFormat || '', - valueFormat: data.valueFormat || '', - isWindowed: 'isWindowed' in data ? String(data.isWindowed) : '-', - }; -}; diff --git a/kafka-ui-react-app/src/components/KsqlDb/List/List.tsx b/kafka-ui-react-app/src/components/KsqlDb/List/List.tsx deleted file mode 100644 index 698bcd6f95..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/List/List.tsx +++ /dev/null @@ -1,111 +0,0 @@ -import React, { FC } from 'react'; -import useAppParams from 'lib/hooks/useAppParams'; -import * as Metrics from 'components/common/Metrics'; -import { getKsqlDbTables } from 'redux/reducers/ksqlDb/selectors'; -import { - clusterKsqlDbQueryRelativePath, - clusterKsqlDbStreamsPath, - clusterKsqlDbStreamsRelativePath, - clusterKsqlDbTablesPath, - clusterKsqlDbTablesRelativePath, - ClusterNameRoute, -} from 'lib/paths'; -import PageHeading from 'components/common/PageHeading/PageHeading'; -import { ActionButton } from 'components/common/ActionComponent'; -import Navbar from 'components/common/Navigation/Navbar.styled'; -import { Navigate, NavLink, Route, Routes } from 'react-router-dom'; -import { fetchKsqlDbTables } from 'redux/reducers/ksqlDb/ksqlDbSlice'; -import { useAppDispatch, useAppSelector } from 'lib/hooks/redux'; -import { Action, ResourceType } from 'generated-sources'; - -import KsqlDbItem, { KsqlDbItemType } from './KsqlDbItem/KsqlDbItem'; - -const List: FC = () => { - const { clusterName } = useAppParams(); - const dispatch = useAppDispatch(); - - const { rows, fetching, tablesCount, streamsCount } = - useAppSelector(getKsqlDbTables); - - React.useEffect(() => { - dispatch(fetchKsqlDbTables(clusterName)); - }, [clusterName, dispatch]); - - return ( - <> - - - Execute KSQL Request - - - - - - {tablesCount} - - - {streamsCount} - - - -
- - (isActive ? 'is-active' : '')} - end - > - Tables - - (isActive ? 'is-active' : '')} - end - > - Streams - - - - } - /> - - } - /> - - } - /> - -
- - ); -}; - -export default List; diff --git a/kafka-ui-react-app/src/components/KsqlDb/List/__test__/List.spec.tsx b/kafka-ui-react-app/src/components/KsqlDb/List/__test__/List.spec.tsx deleted file mode 100644 index c6309076b5..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/List/__test__/List.spec.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import React from 'react'; -import List from 'components/KsqlDb/List/List'; -import { render } from 'lib/testHelpers'; -import fetchMock from 'fetch-mock'; -import { screen } from '@testing-library/dom'; -import { act } from '@testing-library/react'; - -describe('KsqlDb List', () => { - const renderComponent = async () => { - await act(() => { - render(); - }); - }; - afterEach(() => fetchMock.reset()); - it('renders List component with Tables and Streams tabs', async () => { - await renderComponent(); - const Tables = screen.getByTitle('Tables'); - const Streams = screen.getByTitle('Streams'); - expect(Tables).toBeInTheDocument(); - expect(Streams).toBeInTheDocument(); - }); -}); diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/Query.styled.ts b/kafka-ui-react-app/src/components/KsqlDb/Query/Query.styled.ts deleted file mode 100644 index 8b145a89da..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/Query.styled.ts +++ /dev/null @@ -1,9 +0,0 @@ -import PageLoader from 'components/common/PageLoader/PageLoader'; -import styled from 'styled-components'; - -export const ContinuousLoader = styled(PageLoader)` - & > div { - transform: scale(0.5); - padding-top: 0; - } -`; diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/Query.tsx b/kafka-ui-react-app/src/components/KsqlDb/Query/Query.tsx index 7e87574863..267498b923 100644 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/Query.tsx +++ b/kafka-ui-react-app/src/components/KsqlDb/Query/Query.tsx @@ -1,223 +1,54 @@ -import React, { useCallback, useEffect, FC, useState } from 'react'; +import React from 'react'; import useAppParams from 'lib/hooks/useAppParams'; import TableRenderer from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer'; +import { ClusterNameRoute } from 'lib/paths'; import { - executeKsql, - resetExecutionResult, -} from 'redux/reducers/ksqlDb/ksqlDbSlice'; -import { getKsqlExecution } from 'redux/reducers/ksqlDb/selectors'; -import { BASE_PARAMS } from 'lib/constants'; -import { KsqlResponse, KsqlTableResponse } from 'generated-sources'; -import { clusterKsqlDbPath, ClusterNameRoute } from 'lib/paths'; -import { useAppDispatch, useAppSelector } from 'lib/hooks/redux'; -import { showAlert, showSuccessAlert } from 'lib/errorHandling'; -import PageHeading from 'components/common/PageHeading/PageHeading'; + useExecuteKsqlkDbQueryMutation, + useKsqlkDbSSE, +} from 'lib/hooks/api/ksqlDb'; import type { FormValues } from './QueryForm/QueryForm'; -import * as S from './Query.styled'; import QueryForm from './QueryForm/QueryForm'; -export const getFormattedErrorFromTableData = ( - responseValues: KsqlTableResponse['values'] -): { title: string; message: string } => { - // We expect someting like that - // [[ - // "@type", - // "error_code", - // "message", - // "statementText"?, - // "entities"? - // ]], - // or - // [["message"]] - - if (!responseValues || !responseValues.length) { - return { - title: 'Unknown error', - message: 'Recieved empty response', - }; - } - - let title = ''; - let message = ''; - if (responseValues[0].length < 2) { - const [messageText] = responseValues[0]; - title = messageText; - } else { - const [type, errorCode, messageText, statementText, entities] = - responseValues[0]; - title = `[Error #${errorCode}] ${type}`; - message = - (entities?.length ? `[${entities.join(', ')}] ` : '') + - (statementText ? `"${statementText}" ` : '') + - messageText; - } - - return { - title, - message, - }; -}; - -const Query: FC = () => { +const Query = () => { const { clusterName } = useAppParams(); + const executeQuery = useExecuteKsqlkDbQueryMutation(); + const [pipeId, setPipeId] = React.useState(false); - const sseRef = React.useRef<{ sse: EventSource | null; isOpen: boolean }>({ - sse: null, - isOpen: false, - }); - const [fetching, setFetching] = useState(false); - const dispatch = useAppDispatch(); + const sse = useKsqlkDbSSE({ clusterName, pipeId }); - const { executionResult } = useAppSelector(getKsqlExecution); - const [KSQLTable, setKSQLTable] = useState(null); + const isFetching = executeQuery.isLoading || sse.isFetching; - const reset = useCallback(() => { - dispatch(resetExecutionResult()); - }, [dispatch]); - - useEffect(() => { - return reset; - }, [reset]); - - const destroySSE = () => { - if (sseRef.current?.sse) { - sseRef.current.sse.close(); - setFetching(false); - sseRef.current.sse = null; - sseRef.current.isOpen = false; - } + const submitHandler = async (values: FormValues) => { + const filtered = values.streamsProperties.filter(({ key }) => key != null); + const streamsProperties = filtered.reduce>( + (acc, current) => ({ ...acc, [current.key]: current.value }), + {} + ); + await executeQuery.mutateAsync( + { + clusterName, + ksqlCommandV2: { + ...values, + streamsProperties: + values.streamsProperties[0].key !== '' + ? JSON.parse(JSON.stringify(streamsProperties)) + : undefined, + }, + }, + { onSuccess: (data) => setPipeId(data.pipeId) } + ); }; - const handleSSECancel = useCallback(() => { - reset(); - destroySSE(); - }, [reset]); - - const createSSE = useCallback( - (pipeId: string) => { - const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response?pipeId=${pipeId}`; - const sse = new EventSource(url); - sseRef.current.sse = sse; - setFetching(true); - - sse.onopen = () => { - sseRef.current.isOpen = true; - }; - - sse.onmessage = ({ data }) => { - const { table }: KsqlResponse = JSON.parse(data); - if (table) { - switch (table?.header) { - case 'Execution error': { - const { title, message } = getFormattedErrorFromTableData( - table.values - ); - const id = `${url}-executionError`; - showAlert('error', { id, title, message }); - break; - } - case 'Schema': { - setKSQLTable(table); - break; - } - case 'Row': { - setKSQLTable((PrevKSQLTable) => { - return { - header: PrevKSQLTable?.header, - columnNames: PrevKSQLTable?.columnNames, - values: [ - ...(PrevKSQLTable?.values || []), - ...(table?.values || []), - ], - }; - }); - break; - } - case 'Query Result': { - const id = `${url}-querySuccess`; - showSuccessAlert({ id, title: 'Query succeed', message: '' }); - break; - } - case 'Source Description': - case 'properties': - default: { - setKSQLTable(table); - break; - } - } - } - return sse; - }; - - sse.onerror = () => { - // if it's open - we know that server responded without opening SSE - if (!sseRef.current.isOpen) { - showAlert('error', { - id: `${url}-connectionClosedError`, - title: '', - message: 'SSE connection closed', - }); - } - destroySSE(); - }; - }, - [clusterName, dispatch] - ); - - const submitHandler = useCallback( - (values: FormValues) => { - const filteredProperties = values.streamsProperties.filter( - (property) => property.key != null - ); - const streamsProperties = filteredProperties.reduce( - (acc, current) => ({ - ...acc, - [current.key as keyof string]: current.value, - }), - {} as { [key: string]: string } - ); - setFetching(true); - dispatch( - executeKsql({ - clusterName, - ksqlCommandV2: { - ...values, - streamsProperties: - values.streamsProperties[0].key !== '' - ? JSON.parse(JSON.stringify(streamsProperties)) - : undefined, - }, - }) - ); - }, - [dispatch, clusterName] - ); - useEffect(() => { - if (executionResult?.pipeId) { - createSSE(executionResult.pipeId); - } - return () => { - destroySSE(); - }; - }, [createSSE, executionResult]); - return ( <> - setKSQLTable(null)} - handleSSECancel={handleSSECancel} + fetching={isFetching} + hasResults={!!sse.data && !!pipeId} + resetResults={() => setPipeId(false)} submitHandler={submitHandler} /> - {KSQLTable && } - {fetching && } + {pipeId && !!sse.data && } ); }; diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts b/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts index b1066e481c..6d0f6598b4 100644 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts +++ b/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts @@ -6,13 +6,12 @@ export const QueryWrapper = styled.div` `; export const KSQLInputsWrapper = styled.div` - width: 100%; display: flex; gap: 24px; - padding-bottom: 16px; - & > div { - flex-grow: 1; + + @media screen and (max-width: 769px) { + flex-direction: column; } `; @@ -22,61 +21,23 @@ export const KSQLInputHeader = styled.div` color: ${({ theme }) => theme.default.color.normal}; `; -export const KSQLButtons = styled.div` - display: flex; - gap: 16px; -`; - -export const StreamPropertiesContainer = styled.label` - display: flex; - flex-direction: column; - gap: 10px; - width: 50%; - color: ${({ theme }) => theme.default.color.normal}; -`; - export const InputsContainer = styled.div` - overflow: hidden; - width: 100%; - display: flex; - justify-content: center; - gap: 10px; -`; - -export const StreamPropertiesInputWrapper = styled.div` - & { - width: 100%; - } - & > input { - width: 100%; - height: 40px; - border: 1px solid grey; - &:focus { - outline: none; - border-color: ${({ theme }) => theme.input.borderColor.focus}; - &::placeholder { - color: transparent; - } - } - border-radius: 4px; - font-size: 16px; - padding-left: 15px; - background-color: ${({ theme }) => theme.input.backgroundColor.normal}; - color: ${({ theme }) => theme.input.color.normal}; - } -`; - -export const DeleteButtonWrapper = styled.div` - min-height: 32px; - display: flex; - flex-direction: column; + display: grid; + grid-template-columns: 1fr 1fr 30px; align-items: center; - justify-self: flex-start; - margin-top: 10px; + gap: 10px; `; export const Fieldset = styled.fieldset` - width: 50%; + display: flex; + flex: 1; + flex-direction: column; + gap: 8px; +`; + +export const ButtonsContainer = styled.div` + display: flex; + gap: 8px; `; export const SQLEditor = styled(BaseSQLEditor)( diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.tsx b/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.tsx index 3530921088..3803c1e10f 100644 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.tsx +++ b/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.tsx @@ -1,22 +1,27 @@ -import React, { useCallback, useRef } from 'react'; +import React from 'react'; import { FormError } from 'components/common/Input/Input.styled'; import { ErrorMessage } from '@hookform/error-message'; -import { useForm, Controller, useFieldArray } from 'react-hook-form'; +import { + useForm, + Controller, + useFieldArray, + FormProvider, +} from 'react-hook-form'; import { Button } from 'components/common/Button/Button'; import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper'; import CloseIcon from 'components/common/Icons/CloseIcon'; import { yupResolver } from '@hookform/resolvers/yup'; import yup from 'lib/yupExtended'; import PlusIcon from 'components/common/Icons/PlusIcon'; -import ReactAce from 'react-ace/lib/ace'; +import ReactAce from 'react-ace'; +import Input from 'components/common/Input/Input'; import * as S from './QueryForm.styled'; -export interface Props { +interface QueryFormProps { fetching: boolean; hasResults: boolean; - handleClearResults: () => void; - handleSSECancel: () => void; + resetResults: () => void; submitHandler: (values: FormValues) => void; } type StreamsPropertiesType = { @@ -37,20 +42,13 @@ const validationSchema = yup.object({ streamsProperties: yup.array().of(streamsPropertiesSchema), }); -const QueryForm: React.FC = ({ +const QueryForm: React.FC = ({ fetching, hasResults, - handleClearResults, - handleSSECancel, submitHandler, + resetResults, }) => { - const { - handleSubmit, - setValue, - getValues, - control, - formState: { errors }, - } = useForm({ + const methods = useForm({ mode: 'onTouched', resolver: yupResolver(validationSchema), defaultValues: { @@ -58,7 +56,16 @@ const QueryForm: React.FC = ({ streamsProperties: [{ key: '', value: '' }], }, }); - const { fields, append, remove } = useFieldArray< + + const { + handleSubmit, + setValue, + control, + watch, + formState: { errors, isDirty }, + } = methods; + + const { fields, append, remove, update } = useFieldArray< FormValues, 'streamsProperties' >({ @@ -66,17 +73,24 @@ const QueryForm: React.FC = ({ name: 'streamsProperties', }); - const handleAddNewProperty = useCallback(() => { - if ( - getValues().streamsProperties.every((prop) => { - return prop.key; - }) - ) { - append({ key: '', value: '' }); - } - }, []); + const watchStreamProps = watch('streamsProperties'); - const inputRef = useRef(null); + const appendProperty = () => { + append({ key: '', value: '' }); + }; + const removeProperty = (index: number) => () => { + if (fields.length === 1) { + update(index, { key: '', value: '' }); + return; + } + + remove(index); + }; + + const isAppendDisabled = + fetching || !!watchStreamProps.find((field) => !field.key); + + const inputRef = React.useRef(null); const handleFocus = () => { // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -87,145 +101,117 @@ const QueryForm: React.FC = ({ } }; + const handleClear = () => { + handleFocus(); + resetResults(); + }; + return ( - -
- - - - - - - ( - { - handleSubmit(submitHandler)(); + + + + + + + + + + ( + { + handleSubmit(submitHandler)(); + }, }, - }, - ]} - readOnly={fetching} - ref={inputRef} - /> - )} - /> - - - - + ]} + readOnly={fetching} + ref={inputRef} + /> + )} + /> + + + + - - Stream properties: - {fields.map((item, index) => ( - - - + Stream properties: + {fields.map((field, index) => ( + + ( - - )} + placeholder="Key" + type="text" + autoComplete="off" + withError /> - - - - - - ( - - )} + placeholder="Value" + type="text" + autoComplete="off" + withError /> - - - - - - remove(index)}> - + - - - ))} + + ))} + + + + - - - - - - - - -
+ + + + + ); }; diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/__test__/QueryForm.spec.tsx b/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/__test__/QueryForm.spec.tsx deleted file mode 100644 index 76f8b21335..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/__test__/QueryForm.spec.tsx +++ /dev/null @@ -1,189 +0,0 @@ -import { render } from 'lib/testHelpers'; -import React from 'react'; -import QueryForm, { Props } from 'components/KsqlDb/Query/QueryForm/QueryForm'; -import { screen, waitFor, within } from '@testing-library/dom'; -import userEvent from '@testing-library/user-event'; - -const renderComponent = (props: Props) => render(); - -describe('QueryForm', () => { - it('renders', () => { - renderComponent({ - fetching: false, - hasResults: false, - handleClearResults: jest.fn(), - handleSSECancel: jest.fn(), - submitHandler: jest.fn(), - }); - - const KSQLBlock = screen.getByLabelText('KSQL'); - expect(KSQLBlock).toBeInTheDocument(); - expect(within(KSQLBlock).getByText('KSQL')).toBeInTheDocument(); - expect( - within(KSQLBlock).getByRole('button', { name: 'Clear' }) - ).toBeInTheDocument(); - // Represents SQL editor - expect(within(KSQLBlock).getByRole('textbox')).toBeInTheDocument(); - - const streamPropertiesBlock = screen.getByRole('textbox', { name: 'key' }); - expect(streamPropertiesBlock).toBeInTheDocument(); - expect(screen.getByText('Stream properties:')).toBeInTheDocument(); - expect(screen.getByRole('button', { name: 'Clear' })).toBeInTheDocument(); - expect(screen.queryAllByRole('textbox')[0]).toBeInTheDocument(); - - // Form controls - expect(screen.getByRole('button', { name: 'Execute' })).toBeInTheDocument(); - expect(screen.getByRole('button', { name: 'Execute' })).toBeEnabled(); - expect( - screen.getByRole('button', { name: 'Stop query' }) - ).toBeInTheDocument(); - expect(screen.getByRole('button', { name: 'Stop query' })).toBeDisabled(); - expect( - screen.getByRole('button', { name: 'Clear results' }) - ).toBeInTheDocument(); - expect( - screen.getByRole('button', { name: 'Clear results' }) - ).toBeDisabled(); - }); - - it('renders error with empty input', async () => { - const submitFn = jest.fn(); - renderComponent({ - fetching: false, - hasResults: false, - handleClearResults: jest.fn(), - handleSSECancel: jest.fn(), - submitHandler: submitFn, - }); - - await userEvent.click(screen.getByRole('button', { name: 'Execute' })); - - await waitFor(() => { - expect(screen.getByText('ksql is a required field')).toBeInTheDocument(); - expect(submitFn).not.toBeCalled(); - }); - }); - - it('submits with correct inputs', async () => { - const submitFn = jest.fn(); - renderComponent({ - fetching: false, - hasResults: false, - handleClearResults: jest.fn(), - handleSSECancel: jest.fn(), - submitHandler: submitFn, - }); - - const textbox = screen.getAllByRole('textbox'); - textbox[0].focus(); - await userEvent.paste('show tables;'); - const key = screen.getByRole('textbox', { name: 'key' }); - key.focus(); - await userEvent.paste('test'); - const value = screen.getByRole('textbox', { name: 'value' }); - value.focus(); - await userEvent.paste('test'); - await userEvent.click(screen.getByRole('button', { name: 'Execute' })); - - expect( - screen.queryByText('ksql is a required field') - ).not.toBeInTheDocument(); - - expect( - screen.queryByText('streamsProperties is not JSON object') - ).not.toBeInTheDocument(); - - expect(submitFn).toBeCalled(); - }); - - it('clear results is enabled when has results', async () => { - const clearFn = jest.fn(); - renderComponent({ - fetching: false, - hasResults: true, - handleClearResults: clearFn, - handleSSECancel: jest.fn(), - submitHandler: jest.fn(), - }); - - expect(screen.getByRole('button', { name: 'Clear results' })).toBeEnabled(); - - await userEvent.click( - screen.getByRole('button', { name: 'Clear results' }) - ); - - expect(clearFn).toBeCalled(); - }); - - it('stop query query is enabled when is fetching', async () => { - const cancelFn = jest.fn(); - renderComponent({ - fetching: true, - hasResults: false, - handleClearResults: jest.fn(), - handleSSECancel: cancelFn, - submitHandler: jest.fn(), - }); - - expect(screen.getByRole('button', { name: 'Stop query' })).toBeEnabled(); - - await userEvent.click(screen.getByRole('button', { name: 'Stop query' })); - - expect(cancelFn).toBeCalled(); - }); - - it('add new property', async () => { - renderComponent({ - fetching: false, - hasResults: false, - handleClearResults: jest.fn(), - handleSSECancel: jest.fn(), - submitHandler: jest.fn(), - }); - - const textbox = screen.getByLabelText('key'); - await userEvent.type(textbox, 'prop_name'); - await userEvent.click( - screen.getByRole('button', { name: 'Add Stream Property' }) - ); - expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(2); - }); - - it("doesn't add new property", async () => { - renderComponent({ - fetching: false, - hasResults: false, - handleClearResults: jest.fn(), - handleSSECancel: jest.fn(), - submitHandler: jest.fn(), - }); - - await userEvent.click( - screen.getByRole('button', { name: 'Add Stream Property' }) - ); - expect(screen.getAllByRole('textbox', { name: 'key' }).length).toEqual(1); - }); - - it('delete stream property', async () => { - await renderComponent({ - fetching: false, - hasResults: false, - handleClearResults: jest.fn(), - handleSSECancel: jest.fn(), - submitHandler: jest.fn(), - }); - const textBoxes = screen.getAllByRole('textbox', { name: 'key' }); - textBoxes[0].focus(); - await userEvent.paste('test'); - await userEvent.click( - screen.getByRole('button', { name: 'Add Stream Property' }) - ); - await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]); - - await screen.getByRole('button', { name: 'Add Stream Property' }); - - await userEvent.click(screen.getAllByLabelText('deleteProperty')[0]); - - expect(textBoxes.length).toEqual(1); - }); -}); diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/__test__/Query.spec.tsx b/kafka-ui-react-app/src/components/KsqlDb/Query/__test__/Query.spec.tsx deleted file mode 100644 index 705d86be5f..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/__test__/Query.spec.tsx +++ /dev/null @@ -1,116 +0,0 @@ -import { render, EventSourceMock, WithRoute } from 'lib/testHelpers'; -import React from 'react'; -import Query, { - getFormattedErrorFromTableData, -} from 'components/KsqlDb/Query/Query'; -import { screen } from '@testing-library/dom'; -import fetchMock from 'fetch-mock'; -import { clusterKsqlDbQueryPath } from 'lib/paths'; -import userEvent from '@testing-library/user-event'; - -const clusterName = 'testLocal'; -const renderComponent = () => - render( - - - , - { - initialEntries: [clusterKsqlDbQueryPath(clusterName)], - } - ); - -describe('Query', () => { - it('renders', () => { - renderComponent(); - - expect(screen.getByLabelText('KSQL')).toBeInTheDocument(); - expect(screen.getByLabelText('Stream properties:')).toBeInTheDocument(); - }); - - afterEach(() => fetchMock.reset()); - it('fetch on execute', async () => { - renderComponent(); - - const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, { - pipeId: 'testPipeID', - }); - - Object.defineProperty(window, 'EventSource', { - value: EventSourceMock, - }); - const inputs = screen.getAllByRole('textbox'); - const textAreaElement = inputs[0] as HTMLTextAreaElement; - - textAreaElement.focus(); - await userEvent.paste('show tables;'); - await userEvent.click(screen.getByRole('button', { name: 'Execute' })); - - expect(mock.calls().length).toBe(1); - }); - - it('fetch on execute with streamParams', async () => { - renderComponent(); - - const mock = fetchMock.postOnce(`/api/clusters/${clusterName}/ksql/v2`, { - pipeId: 'testPipeID', - }); - - Object.defineProperty(window, 'EventSource', { - value: EventSourceMock, - }); - - const inputs = screen.getAllByRole('textbox'); - const textAreaElement = inputs[0] as HTMLTextAreaElement; - textAreaElement.focus(); - await userEvent.paste('show tables;'); - - const key = screen.getByLabelText('key'); - key.focus(); - await userEvent.paste('key'); - const value = screen.getByLabelText('value'); - value.focus(); - await userEvent.paste('value'); - - await userEvent.click(screen.getByRole('button', { name: 'Execute' })); - - expect(mock.calls().length).toBe(1); - }); -}); - -describe('getFormattedErrorFromTableData', () => { - it('works', () => { - expect(getFormattedErrorFromTableData([['Test Error']])).toStrictEqual({ - title: 'Test Error', - message: '', - }); - - expect( - getFormattedErrorFromTableData([ - ['some_type', 'errorCode', 'messageText'], - ]) - ).toStrictEqual({ - title: '[Error #errorCode] some_type', - message: 'messageText', - }); - - expect( - getFormattedErrorFromTableData([ - [ - 'some_type', - 'errorCode', - 'messageText', - 'statementText', - ['test1', 'test2'], - ], - ]) - ).toStrictEqual({ - title: '[Error #errorCode] some_type', - message: '[test1, test2] "statementText" messageText', - }); - - expect(getFormattedErrorFromTableData([])).toStrictEqual({ - title: 'Unknown error', - message: 'Recieved empty response', - }); - }); -}); diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/TableRenderer.tsx b/kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/TableRenderer.tsx index aaf4755b0a..4e1acb38d6 100644 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/TableRenderer.tsx +++ b/kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/TableRenderer.tsx @@ -6,13 +6,11 @@ import { TableTitle } from 'components/common/table/TableTitle/TableTitle.styled import * as S from './TableRenderer.styled'; -export interface Props { +interface TableRendererProps { table: KsqlTableResponse; } -export function hasJsonStructure( - str: string | Record -): boolean { +function hasJsonStructure(str: string | Record): boolean { if (typeof str === 'object') { return true; } @@ -30,13 +28,7 @@ export function hasJsonStructure( return false; } -const TableRenderer: React.FC = ({ table }) => { - const heading = React.useMemo(() => { - return table.header || ''; - }, [table.header]); - const ths = React.useMemo(() => { - return table.columnNames || []; - }, [table.columnNames]); +const TableRenderer: React.FC = ({ table }) => { const rows = React.useMemo(() => { return (table.values || []).map((row) => { return { @@ -53,9 +45,11 @@ const TableRenderer: React.FC = ({ table }) => { }); }, [table.values]); + const ths = table.columnNames || []; + return ( - {heading} + {table.header}
diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/__test__/TableRenderer.spec.tsx b/kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/__test__/TableRenderer.spec.tsx deleted file mode 100644 index 775b16b51a..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/Query/renderer/TableRenderer/__test__/TableRenderer.spec.tsx +++ /dev/null @@ -1,71 +0,0 @@ -import { render } from 'lib/testHelpers'; -import React from 'react'; -import TableRenderer, { - Props, - hasJsonStructure, -} from 'components/KsqlDb/Query/renderer/TableRenderer/TableRenderer'; -import { screen } from '@testing-library/dom'; - -const renderComponent = (props: Props) => render(); - -describe('TableRenderer', () => { - it('renders', () => { - renderComponent({ - table: { - header: 'Test header', - columnNames: ['Test column name'], - values: [['Table row #1'], ['Table row #2'], ['{"jsonrow": "#3"}']], - }, - }); - - expect( - screen.getByRole('heading', { name: 'Test header' }) - ).toBeInTheDocument(); - expect( - screen.getByRole('columnheader', { name: 'Test column name' }) - ).toBeInTheDocument(); - expect( - screen.getByRole('cell', { name: 'Table row #1' }) - ).toBeInTheDocument(); - expect( - screen.getByRole('cell', { name: 'Table row #2' }) - ).toBeInTheDocument(); - }); - - it('renders with empty arrays', () => { - renderComponent({ - table: {}, - }); - - expect(screen.getByText('No tables or streams found')).toBeInTheDocument(); - }); -}); - -describe('hasJsonStructure', () => { - it('works', () => { - expect(hasJsonStructure('simplestring')).toBeFalsy(); - expect( - hasJsonStructure("{'looksLikeJson': 'but has wrong quotes'}") - ).toBeFalsy(); - expect( - hasJsonStructure('{"json": "but doesnt have closing brackets"') - ).toBeFalsy(); - expect(hasJsonStructure('"string":"that looks like json"')).toBeFalsy(); - - expect(hasJsonStructure('1')).toBeFalsy(); - expect(hasJsonStructure('{1:}')).toBeFalsy(); - expect(hasJsonStructure('{1:"1"}')).toBeFalsy(); - - // @ts-expect-error We suppress error because this function works with unknown data from server - expect(hasJsonStructure(1)).toBeFalsy(); - - expect(hasJsonStructure('{}')).toBeTruthy(); - expect(hasJsonStructure('{"correct": "json"}')).toBeTruthy(); - - expect(hasJsonStructure('[]')).toBeTruthy(); - expect(hasJsonStructure('[{}]')).toBeTruthy(); - - expect(hasJsonStructure({})).toBeTruthy(); - expect(hasJsonStructure({ correct: 'json' })).toBeTruthy(); - }); -}); diff --git a/kafka-ui-react-app/src/components/KsqlDb/TableView.tsx b/kafka-ui-react-app/src/components/KsqlDb/TableView.tsx new file mode 100644 index 0000000000..d27e4968b7 --- /dev/null +++ b/kafka-ui-react-app/src/components/KsqlDb/TableView.tsx @@ -0,0 +1,39 @@ +import React from 'react'; +import { KsqlStreamDescription, KsqlTableDescription } from 'generated-sources'; +import Table from 'components/common/NewTable'; +import { ColumnDef } from '@tanstack/react-table'; + +interface TableViewProps { + fetching: boolean; + rows: KsqlTableDescription[] | KsqlStreamDescription[]; +} + +const TableView: React.FC = ({ fetching, rows }) => { + const columns = React.useMemo< + ColumnDef[] + >( + () => [ + { header: 'Name', accessorKey: 'name' }, + { header: 'Topic', accessorKey: 'topic' }, + { header: 'Key Format', accessorKey: 'keyFormat' }, + { header: 'Value Format', accessorKey: 'valueFormat' }, + { + header: 'Is Windowed', + accessorKey: 'isWindowed', + cell: ({ row }) => + 'isWindowed' in row.original ? String(row.original.isWindowed) : '-', + }, + ], + [] + ); + return ( +
+ ); +}; + +export default TableView; diff --git a/kafka-ui-react-app/src/components/KsqlDb/__test__/KsqlDb.spec.tsx b/kafka-ui-react-app/src/components/KsqlDb/__test__/KsqlDb.spec.tsx deleted file mode 100644 index b07a3936da..0000000000 --- a/kafka-ui-react-app/src/components/KsqlDb/__test__/KsqlDb.spec.tsx +++ /dev/null @@ -1,42 +0,0 @@ -import React from 'react'; -import KsqlDb from 'components/KsqlDb/KsqlDb'; -import { render, WithRoute } from 'lib/testHelpers'; -import { screen } from '@testing-library/dom'; -import { - clusterKsqlDbPath, - clusterKsqlDbQueryPath, - getNonExactPath, -} from 'lib/paths'; - -const KSqLComponentText = { - list: 'list', - query: 'query', -}; - -jest.mock('components/KsqlDb/List/List', () => () => ( -
{KSqLComponentText.list}
-)); -jest.mock('components/KsqlDb/Query/Query', () => () => ( -
{KSqLComponentText.query}
-)); - -describe('KsqlDb Component', () => { - const clusterName = 'clusterName'; - const renderComponent = (path: string) => - render( - - - , - { initialEntries: [path] } - ); - - it('Renders the List', () => { - renderComponent(clusterKsqlDbPath(clusterName)); - expect(screen.getByText(KSqLComponentText.list)).toBeInTheDocument(); - }); - - it('Renders the List', () => { - renderComponent(clusterKsqlDbQueryPath(clusterName)); - expect(screen.getByText(KSqLComponentText.query)).toBeInTheDocument(); - }); -}); diff --git a/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts b/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts index 174cfd830f..18cca3b100 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts @@ -12,12 +12,6 @@ export const versionPayload = [ ]; export const versionEmptyPayload = []; -export const versions = [ - schemaVersion1, - schemaVersion2, - schemaVersionWithNonAsciiChars, -]; - export const jsonSchema: SchemaSubject = { subject: 'test', version: '15', diff --git a/kafka-ui-react-app/src/components/common/ActionComponent/__tests__/fixtures.ts b/kafka-ui-react-app/src/components/common/ActionComponent/__tests__/fixtures.ts index 9107dcc775..bf2047ad64 100644 --- a/kafka-ui-react-app/src/components/common/ActionComponent/__tests__/fixtures.ts +++ b/kafka-ui-react-app/src/components/common/ActionComponent/__tests__/fixtures.ts @@ -14,7 +14,7 @@ export const invalidPermission = { action: Action.DELETE, }; -export const roles = [ +const roles = [ { ...validPermission, actions: [validPermission.action], diff --git a/kafka-ui-react-app/src/components/common/DiffViewer/DiffViewer.tsx b/kafka-ui-react-app/src/components/common/DiffViewer/DiffViewer.tsx index 18abc774d1..b38b4f0af9 100644 --- a/kafka-ui-react-app/src/components/common/DiffViewer/DiffViewer.tsx +++ b/kafka-ui-react-app/src/components/common/DiffViewer/DiffViewer.tsx @@ -1,4 +1,5 @@ import { diff as DiffEditor } from 'react-ace'; +import 'ace-builds/src-noconflict/ace'; import 'ace-builds/src-noconflict/mode-json5'; import 'ace-builds/src-noconflict/mode-protobuf'; import 'ace-builds/src-noconflict/theme-textmate'; diff --git a/kafka-ui-react-app/src/components/common/Editor/Editor.tsx b/kafka-ui-react-app/src/components/common/Editor/Editor.tsx index 9903bcc334..b8744762c9 100644 --- a/kafka-ui-react-app/src/components/common/Editor/Editor.tsx +++ b/kafka-ui-react-app/src/components/common/Editor/Editor.tsx @@ -1,11 +1,9 @@ -/* eslint-disable react/jsx-props-no-spreading */ import AceEditor, { IAceEditorProps } from 'react-ace'; import 'ace-builds/src-noconflict/mode-json5'; import 'ace-builds/src-noconflict/mode-protobuf'; import 'ace-builds/src-noconflict/theme-tomorrow'; import { SchemaType } from 'generated-sources'; import React from 'react'; -import ReactAce from 'react-ace/lib/ace'; import styled from 'styled-components'; interface EditorProps extends IAceEditorProps { @@ -13,7 +11,7 @@ interface EditorProps extends IAceEditorProps { schemaType?: string; } -const Editor = React.forwardRef((props, ref) => { +const Editor = React.forwardRef((props, ref) => { const { isFixedHeight, schemaType, ...rest } = props; return ( > = ({ - getValue, -}) => {getValue()}; - -export default TruncatedTextCell; diff --git a/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx b/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx index de52f9a451..a12170151e 100644 --- a/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx +++ b/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx @@ -1,15 +1,15 @@ /* eslint-disable react/jsx-props-no-spreading */ import AceEditor, { IAceEditorProps } from 'react-ace'; +import 'ace-builds/src-noconflict/ace'; import 'ace-builds/src-noconflict/mode-sql'; import 'ace-builds/src-noconflict/theme-textmate'; import React from 'react'; -import ReactAce from 'react-ace/lib/ace'; interface SQLEditorProps extends IAceEditorProps { isFixedHeight?: boolean; } -const SQLEditor = React.forwardRef( +const SQLEditor = React.forwardRef( (props, ref) => { const { isFixedHeight, ...rest } = props; return ( diff --git a/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx b/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx index cde97964f3..0764320f58 100644 --- a/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx +++ b/kafka-ui-react-app/src/components/common/Tooltip/Tooltip.tsx @@ -8,13 +8,13 @@ import { import * as S from './Tooltip.styled'; -export interface PropsTypes { +interface TooltipProps { value: React.ReactNode; content: string; placement?: Placement; } -const Tooltip: React.FC = ({ value, content, placement }) => { +const Tooltip: React.FC = ({ value, content, placement }) => { const [open, setOpen] = useState(false); const { x, y, refs, strategy, context } = useFloating({ open, diff --git a/kafka-ui-react-app/src/components/common/table/TableTitle/TableTitle.styled.tsx b/kafka-ui-react-app/src/components/common/table/TableTitle/TableTitle.styled.tsx index 1ef6ee258d..ee1d26d504 100644 --- a/kafka-ui-react-app/src/components/common/table/TableTitle/TableTitle.styled.tsx +++ b/kafka-ui-react-app/src/components/common/table/TableTitle/TableTitle.styled.tsx @@ -3,5 +3,5 @@ import Heading from 'components/common/heading/Heading.styled'; import styled from 'styled-components'; export const TableTitle = styled((props) => )` - padding: 16px; + padding: 16px 16px 0; `; diff --git a/kafka-ui-react-app/src/lib/hooks/__tests__/fixtures.ts b/kafka-ui-react-app/src/lib/hooks/__tests__/fixtures.ts index 435e0286f5..a6bdab4366 100644 --- a/kafka-ui-react-app/src/lib/hooks/__tests__/fixtures.ts +++ b/kafka-ui-react-app/src/lib/hooks/__tests__/fixtures.ts @@ -4,7 +4,7 @@ import { modifyRolesData } from 'lib/permissions'; export const clusterName1 = 'local'; export const clusterName2 = 'dev'; -export const userPermissionsMock = [ +const userPermissionsMock = [ { clusters: [clusterName1], resource: ResourceType.TOPIC, diff --git a/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts b/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts index dd34030653..b8a17c558d 100644 --- a/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts +++ b/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts @@ -109,7 +109,7 @@ export function useUpdateConnectorConfig(props: UseConnectorProps) { } ); } -export function useCreateConnectorMutation(clusterName: ClusterName) { +function useCreateConnectorMutation(clusterName: ClusterName) { const client = useQueryClient(); return useMutation( (props: CreateConnectorProps) => diff --git a/kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx b/kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx new file mode 100644 index 0000000000..6685c6223f --- /dev/null +++ b/kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx @@ -0,0 +1,184 @@ +import { ksqlDbApiClient as api } from 'lib/api'; +import { useMutation, useQueries } from '@tanstack/react-query'; +import { ClusterName } from 'redux/interfaces'; +import { BASE_PARAMS } from 'lib/constants'; +import React from 'react'; +import { fetchEventSource } from '@microsoft/fetch-event-source'; +import { + showAlert, + showServerError, + showSuccessAlert, +} from 'lib/errorHandling'; +import { + ExecuteKsqlRequest, + KsqlResponse, + KsqlTableResponse, +} from 'generated-sources'; +import { StopLoading } from 'components/Topics/Topic/Messages/Messages.styled'; +import toast from 'react-hot-toast'; + +export function useKsqlkDb(clusterName: ClusterName) { + return useQueries({ + queries: [ + { + queryKey: ['clusters', clusterName, 'ksqlDb', 'tables'], + queryFn: () => api.listTables({ clusterName }), + suspense: false, + }, + { + queryKey: ['clusters', clusterName, 'ksqlDb', 'streams'], + queryFn: () => api.listStreams({ clusterName }), + suspense: false, + }, + ], + }); +} + +export function useExecuteKsqlkDbQueryMutation() { + return useMutation((props: ExecuteKsqlRequest) => api.executeKsql(props)); +} + +const getFormattedErrorFromTableData = ( + responseValues: KsqlTableResponse['values'] +): { title: string; message: string } => { + // We expect someting like that + // [[ + // "@type", + // "error_code", + // "message", + // "statementText"?, + // "entities"? + // ]], + // or + // [["message"]] + + if (!responseValues || !responseValues.length) { + return { + title: 'Unknown error', + message: 'Recieved empty response', + }; + } + + let title = ''; + let message = ''; + if (responseValues[0].length < 2) { + const [messageText] = responseValues[0]; + title = messageText; + } else { + const [type, errorCode, messageText, statementText, entities] = + responseValues[0]; + title = `[Error #${errorCode}] ${type}`; + message = + (entities?.length ? `[${entities.join(', ')}] ` : '') + + (statementText ? `"${statementText}" ` : '') + + messageText; + } + + return { title, message }; +}; + +type UseKsqlkDbSSEProps = { + pipeId: string | false; + clusterName: ClusterName; +}; + +export const useKsqlkDbSSE = ({ clusterName, pipeId }: UseKsqlkDbSSEProps) => { + const [data, setData] = React.useState(); + const [isFetching, setIsFetching] = React.useState(false); + + const abortController = new AbortController(); + + React.useEffect(() => { + const fetchData = async () => { + const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response`; + await fetchEventSource( + `${url}?${new URLSearchParams({ pipeId: pipeId || '' }).toString()}`, + { + method: 'GET', + signal: abortController.signal, + openWhenHidden: true, + async onopen(response) { + const { ok, status } = response; + if (ok) setData(undefined); // Reset + if (status >= 400 && status < 500 && status !== 429) { + showServerError(response); + } + }, + onmessage(event) { + const { table }: KsqlResponse = JSON.parse(event.data); + if (!table) { + return; + } + switch (table?.header) { + case 'Execution error': { + showAlert('error', { + ...getFormattedErrorFromTableData(table.values), + id: `${url}-executionError`, + }); + break; + } + case 'Schema': + setData(table); + break; + case 'Row': + setData((state) => ({ + header: state?.header, + columnNames: state?.columnNames, + values: [...(state?.values || []), ...(table?.values || [])], + })); + break; + case 'Query Result': + showSuccessAlert({ + id: `${url}-querySuccess`, + title: 'Query succeed', + message: '', + }); + break; + case 'Source Description': + case 'properties': + default: + setData(table); + break; + } + }, + onclose() { + setIsFetching(false); + }, + onerror(err) { + setIsFetching(false); + showServerError(err); + }, + } + ); + }; + + const abortFetchData = () => { + setIsFetching(false); + if (pipeId) abortController.abort(); + }; + if (pipeId) { + toast.promise( + fetchData(), + { + loading: ( + <> +
Consuming query execution result...
+   + Abort + + ), + success: 'Cancelled', + error: 'Something went wrong. Please try again.', + }, + { + id: 'messages', + success: { duration: 20 }, + } + ); + } + + return abortFetchData; + }, [pipeId]); + + return { data, isFetching }; +}; diff --git a/kafka-ui-react-app/src/lib/hooks/useMessageFiltersStore.ts b/kafka-ui-react-app/src/lib/hooks/useMessageFiltersStore.ts index 9aa59b00e0..8397d41d27 100644 --- a/kafka-ui-react-app/src/lib/hooks/useMessageFiltersStore.ts +++ b/kafka-ui-react-app/src/lib/hooks/useMessageFiltersStore.ts @@ -2,7 +2,7 @@ import { LOCAL_STORAGE_KEY_PREFIX } from 'lib/constants'; import create from 'zustand'; import { persist } from 'zustand/middleware'; -export interface AdvancedFilter { +interface AdvancedFilter { name: string; value: string; } diff --git a/kafka-ui-react-app/src/lib/paths.ts b/kafka-ui-react-app/src/lib/paths.ts index f372d541eb..ad24ed2f6b 100644 --- a/kafka-ui-react-app/src/lib/paths.ts +++ b/kafka-ui-react-app/src/lib/paths.ts @@ -204,7 +204,7 @@ export const clusterConnectorsRelativePath = 'connectors'; export const clusterConnectorNewRelativePath = 'create-new'; export const clusterConnectConnectorsRelativePath = `${RouteParams.connectName}/connectors`; export const clusterConnectConnectorRelativePath = `${clusterConnectConnectorsRelativePath}/${RouteParams.connectorName}`; -export const clusterConnectConnectorTasksRelativePath = 'tasks'; +const clusterConnectConnectorTasksRelativePath = 'tasks'; export const clusterConnectConnectorConfigRelativePath = 'config'; export const clusterConnectsPath = ( @@ -287,5 +287,5 @@ export const clusterConfigPath = ( clusterName: ClusterName = RouteParams.clusterName ) => `${clusterPath(clusterName)}/${clusterConfigRelativePath}`; -export const clusterNewConfigRelativePath = 'create-new-cluster'; +const clusterNewConfigRelativePath = 'create-new-cluster'; export const clusterNewConfigPath = `/ui/clusters/${clusterNewConfigRelativePath}`; diff --git a/kafka-ui-react-app/src/lib/yupExtended.ts b/kafka-ui-react-app/src/lib/yupExtended.ts index 2720245e0b..9c96e073db 100644 --- a/kafka-ui-react-app/src/lib/yupExtended.ts +++ b/kafka-ui-react-app/src/lib/yupExtended.ts @@ -41,7 +41,8 @@ const isJsonObject = () => { }; /** - * due to yup rerunning all the object validiation during any render, it makes sense to cache the async results + * due to yup rerunning all the object validiation during any render, + * it makes sense to cache the async results * */ export function cacheTest( asyncValidate: (val?: string, ctx?: yup.AnyObject) => Promise diff --git a/kafka-ui-react-app/src/redux/interfaces/ksqlDb.ts b/kafka-ui-react-app/src/redux/interfaces/ksqlDb.ts deleted file mode 100644 index 007055df63..0000000000 --- a/kafka-ui-react-app/src/redux/interfaces/ksqlDb.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { - KsqlCommandV2Response, - KsqlStreamDescription, - KsqlTableDescription, -} from 'generated-sources'; - -export interface KsqlState { - tables: KsqlTableDescription[]; - streams: KsqlStreamDescription[]; - executionResult: KsqlCommandV2Response | null; -} - -export interface KsqlDescription { - name?: string; - topic?: string; - keyFormat?: string; - valueFormat?: string; - isWindowed?: boolean; -} diff --git a/kafka-ui-react-app/src/redux/reducers/index.ts b/kafka-ui-react-app/src/redux/reducers/index.ts index 9cb88ad840..78a2055a9c 100644 --- a/kafka-ui-react-app/src/redux/reducers/index.ts +++ b/kafka-ui-react-app/src/redux/reducers/index.ts @@ -3,12 +3,10 @@ import loader from 'redux/reducers/loader/loaderSlice'; import schemas from 'redux/reducers/schemas/schemasSlice'; import topicMessages from 'redux/reducers/topicMessages/topicMessagesSlice'; import consumerGroups from 'redux/reducers/consumerGroups/consumerGroupsSlice'; -import ksqlDb from 'redux/reducers/ksqlDb/ksqlDbSlice'; export default combineReducers({ loader, topicMessages, consumerGroups, schemas, - ksqlDb, }); diff --git a/kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/fixtures.ts b/kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/fixtures.ts deleted file mode 100644 index 3f57063471..0000000000 --- a/kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/fixtures.ts +++ /dev/null @@ -1,43 +0,0 @@ -type Dictionary = Record; - -export const fetchKsqlDbTablesPayload: { - tables: Dictionary[]; - streams: Dictionary[]; -} = { - tables: [ - { - type: 'TABLE', - name: 'USERS', - topic: 'users', - keyFormat: 'KAFKA', - valueFormat: 'AVRO', - isWindowed: 'false', - }, - { - type: 'TABLE', - name: 'USERS2', - topic: 'users', - keyFormat: 'KAFKA', - valueFormat: 'AVRO', - isWindowed: 'false', - }, - ], - streams: [ - { - type: 'STREAM', - name: 'KSQL_PROCESSING_LOG', - topic: 'default_ksql_processing_log', - keyFormat: 'KAFKA', - valueFormat: 'JSON', - isWindowed: 'false', - }, - { - type: 'STREAM', - name: 'PAGEVIEWS', - topic: 'pageviews', - keyFormat: 'KAFKA', - valueFormat: 'AVRO', - isWindowed: 'false', - }, - ], -}; diff --git a/kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/selectors.spec.ts b/kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/selectors.spec.ts deleted file mode 100644 index 5e60caa74c..0000000000 --- a/kafka-ui-react-app/src/redux/reducers/ksqlDb/__test__/selectors.spec.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { store } from 'redux/store'; -import * as selectors from 'redux/reducers/ksqlDb/selectors'; -import { fetchKsqlDbTables } from 'redux/reducers/ksqlDb/ksqlDbSlice'; - -import { fetchKsqlDbTablesPayload } from './fixtures'; - -describe('TopicMessages selectors', () => { - describe('Initial state', () => { - beforeAll(() => { - store.dispatch({ - type: fetchKsqlDbTables.pending.type, - payload: fetchKsqlDbTablesPayload, - }); - }); - - it('Returns empty state', () => { - expect(selectors.getKsqlDbTables(store.getState())).toEqual({ - rows: { - streams: [], - tables: [], - }, - fetched: false, - fetching: true, - tablesCount: 0, - streamsCount: 0, - }); - }); - }); - - describe('State', () => { - beforeAll(() => { - store.dispatch({ - type: fetchKsqlDbTables.fulfilled.type, - payload: fetchKsqlDbTablesPayload, - }); - }); - - it('Returns tables and streams', () => { - expect(selectors.getKsqlDbTables(store.getState())).toEqual({ - rows: { - streams: [...fetchKsqlDbTablesPayload.streams], - tables: [...fetchKsqlDbTablesPayload.tables], - }, - fetched: true, - fetching: false, - tablesCount: 2, - streamsCount: 2, - }); - }); - }); -}); diff --git a/kafka-ui-react-app/src/redux/reducers/ksqlDb/ksqlDbSlice.ts b/kafka-ui-react-app/src/redux/reducers/ksqlDb/ksqlDbSlice.ts deleted file mode 100644 index fed60d5d34..0000000000 --- a/kafka-ui-react-app/src/redux/reducers/ksqlDb/ksqlDbSlice.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { KsqlState } from 'redux/interfaces/ksqlDb'; -import { createAsyncThunk, createSlice } from '@reduxjs/toolkit'; -import { ExecuteKsqlRequest } from 'generated-sources'; -import { ClusterName } from 'redux/interfaces'; -import { ksqlDbApiClient } from 'lib/api'; - -const getTables = (clusterName: ClusterName) => - ksqlDbApiClient.listTables({ - clusterName, - }); - -const getStreams = (clusterName: ClusterName) => - ksqlDbApiClient.listStreams({ - clusterName, - }); - -export const fetchKsqlDbTables = createAsyncThunk( - 'ksqlDb/fetchKsqlDbTables', - async (clusterName: ClusterName) => { - const [tables, streams] = await Promise.all([ - getTables(clusterName), - getStreams(clusterName), - ]); - - const processedTables = tables.map((table) => ({ - type: 'TABLE', - ...table, - })); - const processedStreams = streams.map((stream) => ({ - type: 'STREAM', - ...stream, - })); - - return { - tables: processedTables, - streams: processedStreams, - }; - } -); - -export const executeKsql = createAsyncThunk( - 'ksqlDb/executeKsql', - (params: ExecuteKsqlRequest) => ksqlDbApiClient.executeKsql(params) -); - -const initialState: KsqlState = { - streams: [], - tables: [], - executionResult: null, -}; - -const ksqlDbSlice = createSlice({ - name: 'ksqlDb', - initialState, - reducers: { - resetExecutionResult: (state) => ({ - ...state, - executionResult: null, - }), - }, - extraReducers: (builder) => { - builder.addCase(fetchKsqlDbTables.fulfilled, (state, action) => ({ - ...state, - ...action.payload, - })); - builder.addCase(executeKsql.fulfilled, (state, action) => ({ - ...state, - executionResult: action.payload, - })); - }, -}); - -export const { resetExecutionResult } = ksqlDbSlice.actions; - -export default ksqlDbSlice.reducer; diff --git a/kafka-ui-react-app/src/redux/reducers/ksqlDb/selectors.ts b/kafka-ui-react-app/src/redux/reducers/ksqlDb/selectors.ts deleted file mode 100644 index 0e61995ea5..0000000000 --- a/kafka-ui-react-app/src/redux/reducers/ksqlDb/selectors.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { createSelector } from '@reduxjs/toolkit'; -import { RootState } from 'redux/interfaces'; -import { createFetchingSelector } from 'redux/reducers/loader/selectors'; -import { KsqlState } from 'redux/interfaces/ksqlDb'; -import { AsyncRequestStatus } from 'lib/constants'; - -const ksqlDbState = ({ ksqlDb }: RootState): KsqlState => ksqlDb; - -const getKsqlDbFetchTablesAndStreamsFetchingStatus = createFetchingSelector( - 'ksqlDb/fetchKsqlDbTables' -); - -const getKsqlExecutionStatus = createFetchingSelector('ksqlDb/executeKsql'); - -export const getKsqlDbTables = createSelector( - [ksqlDbState, getKsqlDbFetchTablesAndStreamsFetchingStatus], - (state, status) => ({ - rows: { streams: [...state.streams], tables: [...state.tables] }, - fetched: status === AsyncRequestStatus.fulfilled, - fetching: status === AsyncRequestStatus.pending, - tablesCount: state.tables.length, - streamsCount: state.streams.length, - }) -); - -export const getKsqlExecution = createSelector( - [ksqlDbState, getKsqlExecutionStatus], - (state, status) => ({ - executionResult: state.executionResult, - fetched: status === AsyncRequestStatus.fulfilled, - fetching: status === AsyncRequestStatus.pending, - }) -); diff --git a/kafka-ui-react-app/vite.config.ts b/kafka-ui-react-app/vite.config.ts index 189e72e7f9..b4cdd022a9 100644 --- a/kafka-ui-react-app/vite.config.ts +++ b/kafka-ui-react-app/vite.config.ts @@ -25,6 +25,13 @@ export default defineConfig(({ mode }) => { }, build: { outDir: 'build', + rollupOptions: { + output: { + manualChunks: { + ace: ['ace-builds', 'react-ace'], + }, + }, + }, }, experimental: { renderBuiltUrl( From 8d3bac8834b5dbc26009915801d25aa1a0a6adf6 Mon Sep 17 00:00:00 2001 From: Oleg Shur Date: Thu, 23 Mar 2023 18:40:12 +0300 Subject: [PATCH 8/9] [FE} Refactor Consumers Section (#3508) * refactor CG List & details page * Refactor ResetOffset page * get rid of redux reducer --- .../ConsumerGroups/ConsumerGroups.tsx | 5 +- .../ConsumerGroups/Details/Details.tsx | 53 +-- .../Details/ResetOffsets/Form.tsx | 197 +++++++++++ .../ResetOffsets/ResetOffsets.styled.ts | 59 ++-- .../Details/ResetOffsets/ResetOffsets.tsx | 327 ++---------------- .../__test__/ResetOffsets.spec.tsx | 158 --------- .../__test__/TopicContents.spec.tsx | 2 +- .../Details/__tests__/Details.spec.tsx | 114 ------ .../Details/__tests__/ListItem.spec.tsx | 48 --- .../ConsumerGroups/{List => }/List.tsx | 47 ++- .../ConsumerGroups/List/ListContainer.tsx | 16 - .../List/__test__/List.spec.tsx | 60 ---- .../__test__/ConsumerGroups.spec.tsx | 6 +- .../common/NewTable/Table.styled.ts | 13 +- .../src/components/common/NewTable/Table.tsx | 5 +- .../fixtures/consumerGroups.ts} | 25 -- .../src/lib/hooks/api/consumers.ts | 92 +++++ kafka-ui-react-app/src/lib/paths.ts | 8 +- .../src/redux/interfaces/consumerGroup.ts | 3 +- .../__test__/consumerGroupSlice.spec.ts | 49 --- .../consumerGroups/consumerGroupsSlice.ts | 223 ------------ .../src/redux/reducers/index.ts | 2 - 22 files changed, 405 insertions(+), 1107 deletions(-) create mode 100644 kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx delete mode 100644 kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/__test__/ResetOffsets.spec.tsx delete mode 100644 kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx delete mode 100644 kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx rename kafka-ui-react-app/src/components/ConsumerGroups/{List => }/List.tsx (70%) delete mode 100644 kafka-ui-react-app/src/components/ConsumerGroups/List/ListContainer.tsx delete mode 100644 kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx rename kafka-ui-react-app/src/{redux/reducers/consumerGroups/__test__/fixtures.ts => lib/fixtures/consumerGroups.ts} (75%) create mode 100644 kafka-ui-react-app/src/lib/hooks/api/consumers.ts delete mode 100644 kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/consumerGroupSlice.spec.ts delete mode 100644 kafka-ui-react-app/src/redux/reducers/consumerGroups/consumerGroupsSlice.ts diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx index b3f5ab4e02..2b729718de 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx @@ -1,17 +1,18 @@ import React from 'react'; import { Route, Routes } from 'react-router-dom'; import Details from 'components/ConsumerGroups/Details/Details'; -import ListContainer from 'components/ConsumerGroups/List/ListContainer'; import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets'; import { clusterConsumerGroupResetOffsetsRelativePath, RouteParams, } from 'lib/paths'; +import List from './List'; + const ConsumerGroups: React.FC = () => { return ( - } /> + } /> } /> { const [searchParams] = useSearchParams(); const searchValue = searchParams.get('q') || ''; const { isReadOnly } = React.useContext(ClusterContext); - const { consumerGroupID, clusterName } = useAppParams(); - const dispatch = useAppDispatch(); - const consumerGroup = useAppSelector((state) => - selectById(state, consumerGroupID) - ); - const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled); + const routeParams = useAppParams(); + const { clusterName, consumerGroupID } = routeParams; - React.useEffect(() => { - dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID })); - }, [clusterName, consumerGroupID, dispatch]); + const consumerGroup = useConsumerGroupDetails(routeParams); + const deleteConsumerGroup = useDeleteConsumerGroupMutation(routeParams); const onDelete = async () => { - const res = await dispatch( - deleteConsumerGroup({ clusterName, consumerGroupID }) - ).unwrap(); - if (res) navigate('../'); + await deleteConsumerGroup.mutateAsync(); + navigate('../'); }; const onResetOffsets = () => { navigate(clusterConsumerGroupResetRelativePath); }; - if (!isFetched || !consumerGroup) { - return ; - } - - const partitionsByTopic = groupBy(consumerGroup.partitions, 'topic'); - + const partitionsByTopic = groupBy(consumerGroup.data?.partitions, 'topic'); const filteredPartitionsByTopic = Object.keys(partitionsByTopic).filter( (el) => el.includes(searchValue) ); - const currentPartitionsByTopic = searchValue.length ? filteredPartitionsByTopic : Object.keys(partitionsByTopic); @@ -110,24 +93,24 @@ const Details: React.FC = () => { - - {consumerGroup.state} + + {consumerGroup.data?.state} - {consumerGroup.members} + {consumerGroup.data?.members} - {consumerGroup.topics} + {consumerGroup.data?.topics} - {consumerGroup.partitions?.length} + {consumerGroup.data?.partitions?.length} - {consumerGroup.coordinator?.id} + {consumerGroup.data?.coordinator?.id} - {consumerGroup.messagesBehind} + {consumerGroup.data?.messagesBehind} diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx new file mode 100644 index 0000000000..8ee7995bf2 --- /dev/null +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/Form.tsx @@ -0,0 +1,197 @@ +import React from 'react'; +import { useNavigate } from 'react-router-dom'; +import { + ConsumerGroupDetails, + ConsumerGroupOffsetsReset, + ConsumerGroupOffsetsResetType, +} from 'generated-sources'; +import { ClusterGroupParam } from 'lib/paths'; +import { + Controller, + FormProvider, + useFieldArray, + useForm, +} from 'react-hook-form'; +import { MultiSelect, Option } from 'react-multi-select-component'; +import 'react-datepicker/dist/react-datepicker.css'; +import { ErrorMessage } from '@hookform/error-message'; +import { InputLabel } from 'components/common/Input/InputLabel.styled'; +import { Button } from 'components/common/Button/Button'; +import Input from 'components/common/Input/Input'; +import { FormError } from 'components/common/Input/Input.styled'; +import useAppParams from 'lib/hooks/useAppParams'; +import { useResetConsumerGroupOffsetsMutation } from 'lib/hooks/api/consumers'; +import { FlexFieldset, StyledForm } from 'components/common/Form/Form.styled'; +import ControlledSelect from 'components/common/Select/ControlledSelect'; + +import * as S from './ResetOffsets.styled'; + +interface FormProps { + defaultValues: ConsumerGroupOffsetsReset; + topics: string[]; + partitions: ConsumerGroupDetails['partitions']; +} + +const resetTypeOptions = Object.values(ConsumerGroupOffsetsResetType).map( + (value) => ({ value, label: value }) +); + +const Form: React.FC = ({ defaultValues, partitions, topics }) => { + const navigate = useNavigate(); + const routerParams = useAppParams(); + const reset = useResetConsumerGroupOffsetsMutation(routerParams); + const topicOptions = React.useMemo( + () => topics.map((value) => ({ value, label: value })), + [topics] + ); + const methods = useForm({ + mode: 'onChange', + defaultValues, + }); + + const { + handleSubmit, + setValue, + watch, + control, + formState: { errors }, + } = methods; + const { fields } = useFieldArray({ + control, + name: 'partitionsOffsets', + }); + + const resetTypeValue = watch('resetType'); + const topicValue = watch('topic'); + const offsetsValue = watch('partitionsOffsets'); + const partitionsValue = watch('partitions') || []; + + const partitionOptions = + partitions + ?.filter((p) => p.topic === topicValue) + .map((p) => ({ + label: `Partition #${p.partition.toString()}`, + value: p.partition, + })) || []; + + const onSelectedPartitionsChange = (selected: Option[]) => { + setValue( + 'partitions', + selected.map(({ value }) => value) + ); + + setValue( + 'partitionsOffsets', + selected.map(({ value }) => { + const currentOffset = offsetsValue?.find( + ({ partition }) => partition === value + ); + return { offset: currentOffset?.offset, partition: value }; + }) + ); + }; + + React.useEffect(() => { + onSelectedPartitionsChange([]); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [topicValue]); + + const onSubmit = async (data: ConsumerGroupOffsetsReset) => { + await reset.mutateAsync(data); + navigate('../'); + }; + + return ( + + + + + +
+ Partitions + ({ + value: p, + label: String(p), + }))} + onChange={onSelectedPartitionsChange} + labelledBy="Select partitions" + /> +
+ {resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP && + partitionsValue.length > 0 && ( +
+ Timestamp + ( + onChange(e?.getTime())} + onBlur={onBlur} + /> + )} + /> + {message}} + /> +
+ )} + + {resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET && + partitionsValue.length > 0 && ( + + {fields.map((field, index) => ( + + ))} + + )} +
+
+ +
+
+
+ ); +}; + +export default Form; diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.styled.ts b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.styled.ts index fc1b22f607..8a0cf02b4c 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.styled.ts +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.styled.ts @@ -1,37 +1,5 @@ import styled from 'styled-components'; - -export const Wrapper = styled.div` - padding: 16px; - padding-top: 0; - - & > form { - display: flex; - flex-direction: column; - gap: 16px; - - & > button:last-child { - align-self: flex-start; - } - } - - & .multi-select { - height: 32px; - & > .dropdown-container { - height: 32px; - & > .dropdown-heading { - height: 32px; - } - } - } -`; - -export const MainSelectors = styled.div` - display: flex; - gap: 16px; - & > * { - flex-grow: 1; - } -`; +import DatePicker from 'react-datepicker'; export const OffsetsWrapper = styled.div` display: flex; @@ -40,7 +8,26 @@ export const OffsetsWrapper = styled.div` gap: 16px; `; -export const OffsetsTitle = styled.h1` - font-size: 18px; - font-weight: 500; +export const DatePickerInput = styled(DatePicker).attrs({ + showTimeInput: true, + timeInputLabel: 'Time:', + dateFormat: 'MMMM d, yyyy h:mm aa', +})` + height: 40px; + border: 1px ${({ theme }) => theme.select.borderColor.normal} solid; + border-radius: 4px; + font-size: 14px; + width: 270px; + padding-left: 12px; + background-color: ${({ theme }) => theme.input.backgroundColor.normal}; + color: ${({ theme }) => theme.input.color.normal}; + &::placeholder { + color: ${({ theme }) => theme.input.color.normal}; + } + &:hover { + cursor: pointer; + } + &:focus { + outline: none; + } `; diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx index cb4b602cd8..8d22050194 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx @@ -1,315 +1,52 @@ import React from 'react'; -import { useNavigate } from 'react-router-dom'; -import { ConsumerGroupOffsetsResetType } from 'generated-sources'; import { clusterConsumerGroupsPath, ClusterGroupParam } from 'lib/paths'; -import { - Controller, - FormProvider, - useFieldArray, - useForm, -} from 'react-hook-form'; -import { MultiSelect, Option } from 'react-multi-select-component'; -import DatePicker from 'react-datepicker'; import 'react-datepicker/dist/react-datepicker.css'; -import groupBy from 'lodash/groupBy'; -import PageLoader from 'components/common/PageLoader/PageLoader'; -import { ErrorMessage } from '@hookform/error-message'; -import Select from 'components/common/Select/Select'; -import { InputLabel } from 'components/common/Input/InputLabel.styled'; -import { Button } from 'components/common/Button/Button'; -import Input from 'components/common/Input/Input'; -import { FormError } from 'components/common/Input/Input.styled'; import PageHeading from 'components/common/PageHeading/PageHeading'; -import { - fetchConsumerGroupDetails, - selectById, - getAreConsumerGroupDetailsFulfilled, - getIsOffsetReseted, - resetConsumerGroupOffsets, -} from 'redux/reducers/consumerGroups/consumerGroupsSlice'; -import { useAppDispatch, useAppSelector } from 'lib/hooks/redux'; import useAppParams from 'lib/hooks/useAppParams'; -import { resetLoaderById } from 'redux/reducers/loader/loaderSlice'; +import { useConsumerGroupDetails } from 'lib/hooks/api/consumers'; +import PageLoader from 'components/common/PageLoader/PageLoader'; +import { + ConsumerGroupOffsetsReset, + ConsumerGroupOffsetsResetType, +} from 'generated-sources'; -import * as S from './ResetOffsets.styled'; - -interface FormType { - topic: string; - resetType: ConsumerGroupOffsetsResetType; - partitionsOffsets: { offset: string | undefined; partition: number }[]; - resetToTimestamp: Date; -} +import Form from './Form'; const ResetOffsets: React.FC = () => { - const dispatch = useAppDispatch(); - const { consumerGroupID, clusterName } = useAppParams(); - const consumerGroup = useAppSelector((state) => - selectById(state, consumerGroupID) - ); + const routerParams = useAppParams(); - const isFetched = useAppSelector(getAreConsumerGroupDetailsFulfilled); - const isOffsetReseted = useAppSelector(getIsOffsetReseted); + const consumerGroup = useConsumerGroupDetails(routerParams); - React.useEffect(() => { - dispatch(fetchConsumerGroupDetails({ clusterName, consumerGroupID })); - }, [clusterName, consumerGroupID, dispatch]); - - const [uniqueTopics, setUniqueTopics] = React.useState([]); - const [selectedPartitions, setSelectedPartitions] = React.useState( - [] - ); - - const methods = useForm({ - mode: 'onChange', - defaultValues: { - resetType: ConsumerGroupOffsetsResetType.EARLIEST, - topic: '', - partitionsOffsets: [], - }, - }); - const { - handleSubmit, - setValue, - watch, - control, - setError, - clearErrors, - formState: { errors, isValid }, - } = methods; - const { fields } = useFieldArray({ - control, - name: 'partitionsOffsets', - }); - const resetTypeValue = watch('resetType'); - const topicValue = watch('topic'); - const offsetsValue = watch('partitionsOffsets'); - - React.useEffect(() => { - if (isFetched && consumerGroup?.partitions) { - setValue('topic', consumerGroup.partitions[0].topic); - setUniqueTopics(Object.keys(groupBy(consumerGroup.partitions, 'topic'))); - } - }, [consumerGroup?.partitions, isFetched, setValue]); - - const onSelectedPartitionsChange = (value: Option[]) => { - clearErrors(); - setValue( - 'partitionsOffsets', - value.map((partition) => { - const currentOffset = offsetsValue.find( - (offset) => offset.partition === partition.value - ); - return { - offset: currentOffset ? currentOffset?.offset : undefined, - partition: partition.value, - }; - }) - ); - setSelectedPartitions(value); - }; - - React.useEffect(() => { - onSelectedPartitionsChange([]); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [topicValue]); - - const onSubmit = (data: FormType) => { - const augmentedData = { - ...data, - partitions: selectedPartitions.map((partition) => partition.value), - partitionsOffsets: data.partitionsOffsets as { - offset: string; - partition: number; - }[], - }; - let isValidAugmentedData = true; - if (augmentedData.resetType === ConsumerGroupOffsetsResetType.OFFSET) { - augmentedData.partitionsOffsets.forEach((offset, index) => { - if (!offset.offset) { - setError(`partitionsOffsets.${index}.offset`, { - type: 'manual', - message: "This field shouldn't be empty!", - }); - isValidAugmentedData = false; - } - }); - } else if ( - augmentedData.resetType === ConsumerGroupOffsetsResetType.TIMESTAMP - ) { - if (!augmentedData.resetToTimestamp) { - setError(`resetToTimestamp`, { - type: 'manual', - message: "This field shouldn't be empty!", - }); - isValidAugmentedData = false; - } - } - if (isValidAugmentedData) { - dispatch( - resetConsumerGroupOffsets({ - clusterName, - consumerGroupID, - requestBody: augmentedData, - }) - ); - } - }; - - const navigate = useNavigate(); - React.useEffect(() => { - if (isOffsetReseted) { - dispatch(resetLoaderById('consumerGroups/resetConsumerGroupOffsets')); - navigate('../'); - } - }, [clusterName, consumerGroupID, dispatch, navigate, isOffsetReseted]); - - if (!isFetched || !consumerGroup) { + if (consumerGroup.isLoading || !consumerGroup.isSuccess) return ; - } + + const partitions = consumerGroup.data.partitions || []; + const { topic } = partitions[0]; + + const uniqTopics = Array.from( + new Set(partitions.map((partition) => partition.topic)) + ); + + const defaultValues: ConsumerGroupOffsetsReset = { + resetType: ConsumerGroupOffsetsResetType.EARLIEST, + topic, + partitionsOffsets: [], + resetToTimestamp: new Date().getTime(), + }; return ( - + <> - -
- -
- Topic - ( - ({ value: type, label: type }) - )} - /> - )} - /> -
-
- Partitions - p.topic === topicValue) - .map((p) => ({ - label: `Partition #${p.partition.toString()}`, - value: p.partition, - })) || [] - } - value={selectedPartitions} - onChange={onSelectedPartitionsChange} - labelledBy="Select partitions" - /> -
-
- {resetTypeValue === ConsumerGroupOffsetsResetType.TIMESTAMP && - selectedPartitions.length > 0 && ( -
- Timestamp - ( - - )} - /> - {message}} - /> -
- )} - {resetTypeValue === ConsumerGroupOffsetsResetType.OFFSET && - selectedPartitions.length > 0 && ( -
- Offsets - - {fields.map((field, index) => ( -
- - Partition #{field.partition} - - - ( - {message} - )} - /> -
- ))} -
-
- )} - - -
-
+
+ ); }; diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/__test__/ResetOffsets.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/__test__/ResetOffsets.spec.tsx deleted file mode 100644 index 21bcd56087..0000000000 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/__test__/ResetOffsets.spec.tsx +++ /dev/null @@ -1,158 +0,0 @@ -import React from 'react'; -import fetchMock from 'fetch-mock'; -import { act, screen, waitFor } from '@testing-library/react'; -import userEvent from '@testing-library/user-event'; -import { render, WithRoute } from 'lib/testHelpers'; -import { clusterConsumerGroupResetOffsetsPath } from 'lib/paths'; -import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures'; -import ResetOffsets from 'components/ConsumerGroups/Details/ResetOffsets/ResetOffsets'; - -const clusterName = 'cluster1'; -const { groupId } = consumerGroupPayload; - -const renderComponent = () => - render( - - - , - { - initialEntries: [ - clusterConsumerGroupResetOffsetsPath( - clusterName, - consumerGroupPayload.groupId - ), - ], - } - ); - -const resetConsumerGroupOffsetsMockCalled = () => - expect( - fetchMock.called( - `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets` - ) - ).toBeTruthy(); - -const selectresetTypeAndPartitions = async (resetType: string) => { - await userEvent.click(screen.getByLabelText('Reset Type')); - await userEvent.click(screen.getByText(resetType)); - await userEvent.click(screen.getByText('Select...')); - await userEvent.click(screen.getByText('Partition #0')); -}; - -const resetConsumerGroupOffsetsWith = async ( - resetType: string, - offset: null | number = null -) => { - await userEvent.click(screen.getByLabelText('Reset Type')); - const options = screen.getAllByText(resetType); - await userEvent.click(options.length > 1 ? options[1] : options[0]); - await userEvent.click(screen.getByText('Select...')); - - await userEvent.click(screen.getByText('Partition #0')); - - fetchMock.postOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`, - 200, - { - body: { - topic: '__amazon_msk_canary', - resetType, - partitions: [0], - partitionsOffsets: [{ partition: 0, offset }], - }, - } - ); - await userEvent.click(screen.getByText('Submit')); - await waitFor(() => resetConsumerGroupOffsetsMockCalled()); -}; - -describe('ResetOffsets', () => { - afterEach(() => { - fetchMock.reset(); - }); - - xit('renders progress bar for initial state', async () => { - fetchMock.getOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}`, - 404 - ); - await act(() => { - renderComponent(); - }); - expect(screen.getByRole('progressbar')).toBeInTheDocument(); - }); - - describe('with consumer group', () => { - describe('submit handles resetConsumerGroupOffsets', () => { - beforeEach(async () => { - const fetchConsumerGroupMock = fetchMock.getOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}`, - consumerGroupPayload - ); - await act(() => { - renderComponent(); - }); - expect(fetchConsumerGroupMock.called()).toBeTruthy(); - }); - - it('calls resetConsumerGroupOffsets with EARLIEST', async () => { - await resetConsumerGroupOffsetsWith('EARLIEST'); - }); - - it('calls resetConsumerGroupOffsets with LATEST', async () => { - await resetConsumerGroupOffsetsWith('LATEST'); - }); - it('calls resetConsumerGroupOffsets with OFFSET', async () => { - await selectresetTypeAndPartitions('OFFSET'); - fetchMock.postOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`, - 200, - { - body: { - topic: '__amazon_msk_canary', - resetType: 'OFFSET', - partitions: [0], - partitionsOffsets: [{ partition: 0, offset: 10 }], - }, - } - ); - - await userEvent.click(screen.getAllByLabelText('Partition #0')[1]); - await userEvent.keyboard('10'); - await userEvent.click(screen.getByText('Submit')); - await resetConsumerGroupOffsetsMockCalled(); - }); - - // focus doesn't work for datepicker - it.skip('calls resetConsumerGroupOffsets with TIMESTAMP', async () => { - await selectresetTypeAndPartitions('TIMESTAMP'); - const resetConsumerGroupOffsetsMock = fetchMock.postOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets`, - 200, - { - body: { - topic: '__amazon_msk_canary', - resetType: 'OFFSET', - partitions: [0], - partitionsOffsets: [{ partition: 0, offset: 10 }], - }, - } - ); - await userEvent.click(screen.getByText('Submit')); - await waitFor(() => - expect( - screen.getByText("This field shouldn't be empty!") - ).toBeInTheDocument() - ); - - await waitFor(() => - expect( - resetConsumerGroupOffsetsMock.called( - `/api/clusters/${clusterName}/consumer-groups/${groupId}/offsets` - ) - ).toBeFalsy() - ); - }); - }); - }); -}); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/__test__/TopicContents.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/__test__/TopicContents.spec.tsx index e1a10b7cd5..0cc91e02cb 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/__test__/TopicContents.spec.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/__test__/TopicContents.spec.tsx @@ -2,9 +2,9 @@ import React from 'react'; import { clusterConsumerGroupDetailsPath } from 'lib/paths'; import { screen } from '@testing-library/react'; import TopicContents from 'components/ConsumerGroups/Details/TopicContents/TopicContents'; -import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures'; import { render, WithRoute } from 'lib/testHelpers'; import { ConsumerGroupTopicPartition } from 'generated-sources'; +import { consumerGroupPayload } from 'lib/fixtures/consumerGroups'; const clusterName = 'cluster1'; diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx deleted file mode 100644 index 9e010a414f..0000000000 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx +++ /dev/null @@ -1,114 +0,0 @@ -import Details from 'components/ConsumerGroups/Details/Details'; -import React from 'react'; -import fetchMock from 'fetch-mock'; -import { render, WithRoute } from 'lib/testHelpers'; -import { - clusterConsumerGroupDetailsPath, - clusterConsumerGroupResetRelativePath, -} from 'lib/paths'; -import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures'; -import { - screen, - waitFor, - waitForElementToBeRemoved, -} from '@testing-library/dom'; -import userEvent from '@testing-library/user-event'; - -const clusterName = 'cluster1'; -const { groupId } = consumerGroupPayload; - -const mockNavigate = jest.fn(); -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), - useNavigate: () => mockNavigate, -})); - -const renderComponent = () => { - render( - -
- , - { initialEntries: [clusterConsumerGroupDetailsPath(clusterName, groupId)] } - ); -}; -describe('Details component', () => { - afterEach(() => { - fetchMock.reset(); - mockNavigate.mockClear(); - }); - - describe('when consumer groups are NOT fetched', () => { - it('renders progress bar for initial state', () => { - fetchMock.getOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}`, - 404 - ); - renderComponent(); - expect(screen.getByRole('progressbar')).toBeInTheDocument(); - }); - }); - - describe('when consumer gruops are fetched', () => { - beforeEach(async () => { - const fetchConsumerGroupMock = fetchMock.getOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}`, - consumerGroupPayload - ); - renderComponent(); - await waitForElementToBeRemoved(() => screen.getByRole('progressbar')); - await waitFor(() => expect(fetchConsumerGroupMock.called()).toBeTruthy()); - }); - - it('renders component', () => { - expect(screen.getByRole('heading')).toBeInTheDocument(); - expect(screen.getByText(groupId)).toBeInTheDocument(); - - expect(screen.getByRole('table')).toBeInTheDocument(); - expect(screen.getAllByRole('columnheader').length).toEqual(2); - - expect(screen.queryByRole('dialog')).not.toBeInTheDocument(); - }); - - it('handles [Reset offset] click', async () => { - await userEvent.click(screen.getByText('Reset offset')); - expect(mockNavigate).toHaveBeenLastCalledWith( - clusterConsumerGroupResetRelativePath - ); - }); - - it('renders search input', async () => { - expect( - screen.getByPlaceholderText('Search by Topic Name') - ).toBeInTheDocument(); - }); - - it('shows confirmation modal on consumer group delete', async () => { - expect(screen.queryByRole('dialog')).not.toBeInTheDocument(); - await userEvent.click(screen.getByText('Delete consumer group')); - await waitFor(() => - expect(screen.queryByRole('dialog')).toBeInTheDocument() - ); - await userEvent.click(screen.getByText('Cancel')); - expect(screen.queryByRole('dialog')).not.toBeInTheDocument(); - }); - - it('handles [Delete consumer group] click', async () => { - expect(screen.queryByRole('dialog')).not.toBeInTheDocument(); - - await userEvent.click(screen.getByText('Delete consumer group')); - - expect(screen.queryByRole('dialog')).toBeInTheDocument(); - const deleteConsumerGroupMock = fetchMock.deleteOnce( - `/api/clusters/${clusterName}/consumer-groups/${groupId}`, - 200 - ); - await waitFor(() => { - userEvent.click(screen.getByRole('button', { name: 'Confirm' })); - }); - expect(deleteConsumerGroupMock.called()).toBeTruthy(); - - await waitForElementToBeRemoved(() => screen.queryByRole('dialog')); - await waitFor(() => expect(mockNavigate).toHaveBeenLastCalledWith('../')); - }); - }); -}); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx deleted file mode 100644 index 9cf79d02de..0000000000 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import React from 'react'; -import { clusterConsumerGroupDetailsPath } from 'lib/paths'; -import { screen } from '@testing-library/react'; -import userEvent from '@testing-library/user-event'; -import ListItem from 'components/ConsumerGroups/Details/ListItem'; -import { consumerGroupPayload } from 'redux/reducers/consumerGroups/__test__/fixtures'; -import { render, WithRoute } from 'lib/testHelpers'; -import { ConsumerGroupTopicPartition } from 'generated-sources'; - -const clusterName = 'cluster1'; - -const renderComponent = (consumers: ConsumerGroupTopicPartition[] = []) => - render( - -
- - - -
- , - { - initialEntries: [ - clusterConsumerGroupDetailsPath( - clusterName, - consumerGroupPayload.groupId - ), - ], - } - ); - -describe('ListItem', () => { - beforeEach(() => renderComponent(consumerGroupPayload.partitions)); - - it('should renders list item with topic content closed and check if element exists', () => { - expect(screen.getByRole('row')).toBeInTheDocument(); - }); - - it('should renders list item with topic content open', async () => { - await userEvent.click( - screen.getByRole('cell', { name: 'cluster1' }).children[0].children[0] - ); - expect(screen.getByText('Consumer ID')).toBeInTheDocument(); - }); -}); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx similarity index 70% rename from kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx rename to kafka-ui-react-app/src/components/ConsumerGroups/List.tsx index c7df01b5e9..feb772ac82 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx @@ -7,41 +7,29 @@ import { ConsumerGroupOrdering, SortOrder, } from 'generated-sources'; -import { useAppDispatch } from 'lib/hooks/redux'; import useAppParams from 'lib/hooks/useAppParams'; import { clusterConsumerGroupDetailsPath, ClusterNameRoute } from 'lib/paths'; -import { fetchConsumerGroupsPaged } from 'redux/reducers/consumerGroups/consumerGroupsSlice'; import { ColumnDef } from '@tanstack/react-table'; import Table, { TagCell, LinkCell } from 'components/common/NewTable'; import { useNavigate, useSearchParams } from 'react-router-dom'; import { PER_PAGE } from 'lib/constants'; +import { useConsumerGroups } from 'lib/hooks/api/consumers'; -export interface Props { - consumerGroups: ConsumerGroupDetails[]; - totalPages: number; -} - -const List: React.FC = ({ consumerGroups, totalPages }) => { - const dispatch = useAppDispatch(); +const List = () => { const { clusterName } = useAppParams(); const [searchParams] = useSearchParams(); const navigate = useNavigate(); - React.useEffect(() => { - dispatch( - fetchConsumerGroupsPaged({ - clusterName, - orderBy: - (searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined, - sortOrder: - (searchParams.get('sortDirection')?.toUpperCase() as SortOrder) || - undefined, - page: Number(searchParams.get('page') || 1), - perPage: Number(searchParams.get('perPage') || PER_PAGE), - search: searchParams.get('q') || '', - }) - ); - }, [clusterName, dispatch, searchParams]); + const consumerGroups = useConsumerGroups({ + clusterName, + orderBy: (searchParams.get('sortBy') as ConsumerGroupOrdering) || undefined, + sortOrder: + (searchParams.get('sortDirection')?.toUpperCase() as SortOrder) || + undefined, + page: Number(searchParams.get('page') || 1), + perPage: Number(searchParams.get('perPage') || PER_PAGE), + search: searchParams.get('q') || '', + }); const columns = React.useMemo[]>( () => [ @@ -95,9 +83,13 @@ const List: React.FC = ({ consumerGroups, totalPages }) => { @@ -105,6 +97,7 @@ const List: React.FC = ({ consumerGroups, totalPages }) => { clusterConsumerGroupDetailsPath(clusterName, original.groupId) ) } + disabled={consumerGroups.isFetching} /> ); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List/ListContainer.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List/ListContainer.tsx deleted file mode 100644 index 6c819eff4a..0000000000 --- a/kafka-ui-react-app/src/components/ConsumerGroups/List/ListContainer.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import { connect } from 'react-redux'; -import { RootState } from 'redux/interfaces'; -import { - getConsumerGroupsOrderBy, - getConsumerGroupsTotalPages, - selectAll, -} from 'redux/reducers/consumerGroups/consumerGroupsSlice'; -import List from 'components/ConsumerGroups/List/List'; - -const mapStateToProps = (state: RootState) => ({ - consumerGroups: selectAll(state), - orderBy: getConsumerGroupsOrderBy(state), - totalPages: getConsumerGroupsTotalPages(state), -}); - -export default connect(mapStateToProps)(List); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx deleted file mode 100644 index a1393c2ccd..0000000000 --- a/kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx +++ /dev/null @@ -1,60 +0,0 @@ -import React from 'react'; -import List, { Props } from 'components/ConsumerGroups/List/List'; -import { screen } from '@testing-library/react'; -import { render } from 'lib/testHelpers'; -import { consumerGroups as consumerGroupMock } from 'redux/reducers/consumerGroups/__test__/fixtures'; -import { clusterConsumerGroupDetailsPath } from 'lib/paths'; -import userEvent from '@testing-library/user-event'; -import ListContainer from 'components/ConsumerGroups/List/ListContainer'; - -const mockedUsedNavigate = jest.fn(); - -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), - useNavigate: () => mockedUsedNavigate, -})); - -describe('ListContainer', () => { - it('renders correctly', () => { - render(); - expect(screen.getByRole('table')).toBeInTheDocument(); - }); -}); - -describe('List', () => { - const renderComponent = (props: Partial = {}) => { - const { consumerGroups, totalPages } = props; - return render( - - ); - }; - - it('renders empty table', () => { - renderComponent(); - expect(screen.getByRole('table')).toBeInTheDocument(); - expect( - screen.getByText('No active consumer groups found') - ).toBeInTheDocument(); - }); - - describe('consumerGroups are fetched', () => { - beforeEach(() => renderComponent({ consumerGroups: consumerGroupMock })); - - it('renders all rows with consumers', () => { - expect(screen.getByText('groupId1')).toBeInTheDocument(); - expect(screen.getByText('groupId2')).toBeInTheDocument(); - }); - - it('handles onRowClick', async () => { - const row = screen.getByRole('row', { name: 'groupId1 0 1 1' }); - expect(row).toBeInTheDocument(); - await userEvent.click(row); - expect(mockedUsedNavigate).toHaveBeenCalledWith( - clusterConsumerGroupDetailsPath(':clusterName', 'groupId1') - ); - }); - }); -}); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/__test__/ConsumerGroups.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/__test__/ConsumerGroups.spec.tsx index 0e319e5bd3..4d06c3ecc4 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/__test__/ConsumerGroups.spec.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/__test__/ConsumerGroups.spec.tsx @@ -11,9 +11,7 @@ import { render, WithRoute } from 'lib/testHelpers'; const clusterName = 'cluster1'; -jest.mock('components/ConsumerGroups/List/ListContainer', () => () => ( -
ListContainerMock
-)); +jest.mock('components/ConsumerGroups/List', () => () =>
ListPage
); jest.mock('components/ConsumerGroups/Details/Details', () => () => (
DetailsMock
)); @@ -35,7 +33,7 @@ const renderComponent = (path?: string) => describe('ConsumerGroups', () => { it('renders ListContainer', async () => { renderComponent(); - expect(screen.getByText('ListContainerMock')).toBeInTheDocument(); + expect(screen.getByText('ListPage')).toBeInTheDocument(); }); it('renders ResetOffsets', async () => { renderComponent( diff --git a/kafka-ui-react-app/src/components/common/NewTable/Table.styled.ts b/kafka-ui-react-app/src/components/common/NewTable/Table.styled.ts index 0d9af3a61a..5db0ba8068 100644 --- a/kafka-ui-react-app/src/components/common/NewTable/Table.styled.ts +++ b/kafka-ui-react-app/src/components/common/NewTable/Table.styled.ts @@ -225,6 +225,13 @@ export const Ellipsis = styled.div` display: block; `; -export const TableWrapper = styled.div` - overflow-x: auto; -`; +export const TableWrapper = styled.div<{ $disabled: boolean }>( + ({ $disabled }) => css` + overflow-x: auto; + ${$disabled && + css` + pointer-events: none; + opacity: 0.5; + `} + ` +); diff --git a/kafka-ui-react-app/src/components/common/NewTable/Table.tsx b/kafka-ui-react-app/src/components/common/NewTable/Table.tsx index fb6f76aa39..1cdc67001d 100644 --- a/kafka-ui-react-app/src/components/common/NewTable/Table.tsx +++ b/kafka-ui-react-app/src/components/common/NewTable/Table.tsx @@ -48,6 +48,8 @@ export interface TableProps { // Placeholder for empty table emptyMessage?: React.ReactNode; + disabled?: boolean; + // Handles row click. Can not be combined with `enableRowSelection` && expandable rows. onRowClick?: (row: Row) => void; } @@ -123,6 +125,7 @@ const Table: React.FC> = ({ enableRowSelection = false, batchActionsBar: BatchActionsBar, emptyMessage, + disabled, onRowClick, }) => { const [searchParams, setSearchParams] = useSearchParams(); @@ -200,7 +203,7 @@ const Table: React.FC> = ({ /> )} - +
{table.getHeaderGroups().map((headerGroup) => ( diff --git a/kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/fixtures.ts b/kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts similarity index 75% rename from kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/fixtures.ts rename to kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts index 7c130ef472..f6c60cf61a 100644 --- a/kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/lib/fixtures/consumerGroups.ts @@ -1,30 +1,5 @@ import { ConsumerGroupState } from 'generated-sources'; -export const consumerGroups = [ - { - groupId: 'groupId1', - members: 0, - topics: 1, - simple: false, - partitionAssignor: '', - coordinator: { - id: 1, - host: 'host', - }, - }, - { - groupId: 'groupId2', - members: 0, - topics: 1, - simple: false, - partitionAssignor: '', - coordinator: { - id: 1, - host: 'host', - }, - }, -]; - export const consumerGroupPayload = { groupId: 'amazon.msk.canary.group.broker-1', members: 0, diff --git a/kafka-ui-react-app/src/lib/hooks/api/consumers.ts b/kafka-ui-react-app/src/lib/hooks/api/consumers.ts new file mode 100644 index 0000000000..c0089e1f00 --- /dev/null +++ b/kafka-ui-react-app/src/lib/hooks/api/consumers.ts @@ -0,0 +1,92 @@ +import { consumerGroupsApiClient as api } from 'lib/api'; +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { ClusterName } from 'redux/interfaces'; +import { + ConsumerGroup, + ConsumerGroupOffsetsReset, + ConsumerGroupOrdering, + SortOrder, +} from 'generated-sources'; +import { showSuccessAlert } from 'lib/errorHandling'; + +export type ConsumerGroupID = ConsumerGroup['groupId']; + +type UseConsumerGroupsProps = { + clusterName: ClusterName; + orderBy?: ConsumerGroupOrdering; + sortOrder?: SortOrder; + page?: number; + perPage?: number; + search: string; +}; + +type UseConsumerGroupDetailsProps = { + clusterName: ClusterName; + consumerGroupID: ConsumerGroupID; +}; + +export function useConsumerGroups(props: UseConsumerGroupsProps) { + const { clusterName, ...rest } = props; + return useQuery( + ['clusters', clusterName, 'consumerGroups', rest], + () => api.getConsumerGroupsPage(props), + { suspense: false, keepPreviousData: true } + ); +} + +export function useConsumerGroupDetails(props: UseConsumerGroupDetailsProps) { + const { clusterName, consumerGroupID } = props; + return useQuery( + ['clusters', clusterName, 'consumerGroups', consumerGroupID], + () => api.getConsumerGroup({ clusterName, id: consumerGroupID }) + ); +} + +export const useDeleteConsumerGroupMutation = ({ + clusterName, + consumerGroupID, +}: UseConsumerGroupDetailsProps) => { + const queryClient = useQueryClient(); + return useMutation( + () => api.deleteConsumerGroup({ clusterName, id: consumerGroupID }), + { + onSuccess: () => { + showSuccessAlert({ + message: `Consumer ${consumerGroupID} group deleted`, + }); + queryClient.invalidateQueries([ + 'clusters', + clusterName, + 'consumerGroups', + ]); + }, + } + ); +}; + +export const useResetConsumerGroupOffsetsMutation = ({ + clusterName, + consumerGroupID, +}: UseConsumerGroupDetailsProps) => { + const queryClient = useQueryClient(); + return useMutation( + (props: ConsumerGroupOffsetsReset) => + api.resetConsumerGroupOffsets({ + clusterName, + id: consumerGroupID, + consumerGroupOffsetsReset: props, + }), + { + onSuccess: () => { + showSuccessAlert({ + message: `Consumer ${consumerGroupID} group offsets reset`, + }); + queryClient.invalidateQueries([ + 'clusters', + clusterName, + 'consumerGroups', + ]); + }, + } + ); +}; diff --git a/kafka-ui-react-app/src/lib/paths.ts b/kafka-ui-react-app/src/lib/paths.ts index ad24ed2f6b..6571f1684c 100644 --- a/kafka-ui-react-app/src/lib/paths.ts +++ b/kafka-ui-react-app/src/lib/paths.ts @@ -1,12 +1,8 @@ import { Broker, Connect, Connector } from 'generated-sources'; -import { - ClusterName, - ConsumerGroupID, - SchemaName, - TopicName, -} from 'redux/interfaces'; +import { ClusterName, SchemaName, TopicName } from 'redux/interfaces'; import { GIT_REPO_LINK } from './constants'; +import { ConsumerGroupID } from './hooks/api/consumers'; export const gitCommitPath = (commit: string) => `${GIT_REPO_LINK}/commit/${commit}`; diff --git a/kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts b/kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts index 8e05a9b628..45412e264b 100644 --- a/kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts +++ b/kafka-ui-react-app/src/redux/interfaces/consumerGroup.ts @@ -5,10 +5,9 @@ import { import { ClusterName } from './cluster'; -export type ConsumerGroupID = ConsumerGroup['groupId']; export interface ConsumerGroupResetOffsetRequestParams { clusterName: ClusterName; - consumerGroupID: ConsumerGroupID; + consumerGroupID: ConsumerGroup['groupId']; requestBody: { topic: string; resetType: ConsumerGroupOffsetsResetType; diff --git a/kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/consumerGroupSlice.spec.ts b/kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/consumerGroupSlice.spec.ts deleted file mode 100644 index 2bf20606ad..0000000000 --- a/kafka-ui-react-app/src/redux/reducers/consumerGroups/__test__/consumerGroupSlice.spec.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { store } from 'redux/store'; -import { - sortBy, - getConsumerGroupsOrderBy, - getConsumerGroupsSortOrder, - getAreConsumerGroupsPagedFulfilled, - fetchConsumerGroupsPaged, - selectAll, -} from 'redux/reducers/consumerGroups/consumerGroupsSlice'; -import { ConsumerGroupOrdering, SortOrder } from 'generated-sources'; -import { consumerGroups } from 'redux/reducers/consumerGroups/__test__/fixtures'; - -describe('Consumer Groups Slice', () => { - describe('Actions', () => { - it('should test the sortBy actions', () => { - expect(store.getState().consumerGroups.sortOrder).toBe(SortOrder.ASC); - - store.dispatch(sortBy(ConsumerGroupOrdering.STATE)); - expect(getConsumerGroupsOrderBy(store.getState())).toBe( - ConsumerGroupOrdering.STATE - ); - expect(getConsumerGroupsSortOrder(store.getState())).toBe(SortOrder.DESC); - store.dispatch(sortBy(ConsumerGroupOrdering.STATE)); - expect(getConsumerGroupsSortOrder(store.getState())).toBe(SortOrder.ASC); - }); - }); - - describe('Thunk Actions', () => { - it('should check the fetchConsumerPaged ', () => { - store.dispatch({ - type: fetchConsumerGroupsPaged.fulfilled.type, - payload: { - consumerGroups, - }, - }); - - expect(getAreConsumerGroupsPagedFulfilled(store.getState())).toBeTruthy(); - expect(selectAll(store.getState())).toEqual(consumerGroups); - - store.dispatch({ - type: fetchConsumerGroupsPaged.fulfilled.type, - payload: { - consumerGroups: null, - }, - }); - expect(selectAll(store.getState())).toEqual([]); - }); - }); -}); diff --git a/kafka-ui-react-app/src/redux/reducers/consumerGroups/consumerGroupsSlice.ts b/kafka-ui-react-app/src/redux/reducers/consumerGroups/consumerGroupsSlice.ts deleted file mode 100644 index 91420f0404..0000000000 --- a/kafka-ui-react-app/src/redux/reducers/consumerGroups/consumerGroupsSlice.ts +++ /dev/null @@ -1,223 +0,0 @@ -import { - createAsyncThunk, - createEntityAdapter, - createSlice, - createSelector, - PayloadAction, -} from '@reduxjs/toolkit'; -import { - ConsumerGroupDetails, - ConsumerGroupOrdering, - ConsumerGroupsPageResponse, - SortOrder, -} from 'generated-sources'; -import { AsyncRequestStatus } from 'lib/constants'; -import { - getResponse, - showServerError, - showSuccessAlert, -} from 'lib/errorHandling'; -import { - ClusterName, - ConsumerGroupID, - ConsumerGroupResetOffsetRequestParams, - RootState, -} from 'redux/interfaces'; -import { createFetchingSelector } from 'redux/reducers/loader/selectors'; -import { EntityState } from '@reduxjs/toolkit/src/entities/models'; -import { consumerGroupsApiClient } from 'lib/api'; - -export const fetchConsumerGroupsPaged = createAsyncThunk< - ConsumerGroupsPageResponse, - { - clusterName: ClusterName; - orderBy?: ConsumerGroupOrdering; - sortOrder?: SortOrder; - page?: number; - perPage?: number; - search: string; - } ->( - 'consumerGroups/fetchConsumerGroupsPaged', - async ( - { clusterName, orderBy, sortOrder, page, perPage, search }, - { rejectWithValue } - ) => { - try { - return await consumerGroupsApiClient.getConsumerGroupsPage({ - clusterName, - orderBy, - sortOrder, - page, - perPage, - search, - }); - } catch (error) { - showServerError(error as Response); - return rejectWithValue(await getResponse(error as Response)); - } - } -); - -export const fetchConsumerGroupDetails = createAsyncThunk< - ConsumerGroupDetails, - { clusterName: ClusterName; consumerGroupID: ConsumerGroupID } ->( - 'consumerGroups/fetchConsumerGroupDetails', - async ({ clusterName, consumerGroupID }, { rejectWithValue }) => { - try { - return await consumerGroupsApiClient.getConsumerGroup({ - clusterName, - id: consumerGroupID, - }); - } catch (error) { - showServerError(error as Response); - return rejectWithValue(await getResponse(error as Response)); - } - } -); - -export const deleteConsumerGroup = createAsyncThunk< - ConsumerGroupID, - { clusterName: ClusterName; consumerGroupID: ConsumerGroupID } ->( - 'consumerGroups/deleteConsumerGroup', - async ({ clusterName, consumerGroupID }, { rejectWithValue }) => { - try { - await consumerGroupsApiClient.deleteConsumerGroup({ - clusterName, - id: consumerGroupID, - }); - showSuccessAlert({ - message: `Consumer ${consumerGroupID} group deleted`, - }); - return consumerGroupID; - } catch (error) { - showServerError(error as Response); - return rejectWithValue(await getResponse(error as Response)); - } - } -); - -export const resetConsumerGroupOffsets = createAsyncThunk< - ConsumerGroupID, - ConsumerGroupResetOffsetRequestParams ->( - 'consumerGroups/resetConsumerGroupOffsets', - async ( - { clusterName, consumerGroupID, requestBody }, - { rejectWithValue } - ) => { - try { - await consumerGroupsApiClient.resetConsumerGroupOffsets({ - clusterName, - id: consumerGroupID, - consumerGroupOffsetsReset: { - topic: requestBody.topic, - resetType: requestBody.resetType, - partitions: requestBody.partitions, - partitionsOffsets: requestBody.partitionsOffsets?.map((offset) => ({ - ...offset, - offset: +offset.offset, - })), - resetToTimestamp: requestBody.resetToTimestamp?.getTime(), - }, - }); - showSuccessAlert({ - message: `Consumer ${consumerGroupID} group offsets reset`, - }); - return consumerGroupID; - } catch (error) { - showServerError(error as Response); - return rejectWithValue(await getResponse(error as Response)); - } - } -); -const SCHEMAS_PAGE_COUNT = 1; - -const consumerGroupsAdapter = createEntityAdapter({ - selectId: (consumerGroup) => consumerGroup.groupId, -}); - -interface ConsumerGroupState extends EntityState { - orderBy: ConsumerGroupOrdering | null; - sortOrder: SortOrder; - totalPages: number; -} - -const initialState: ConsumerGroupState = { - orderBy: ConsumerGroupOrdering.NAME, - sortOrder: SortOrder.ASC, - totalPages: SCHEMAS_PAGE_COUNT, - ...consumerGroupsAdapter.getInitialState(), -}; - -const consumerGroupsSlice = createSlice({ - name: 'consumerGroups', - initialState, - reducers: { - sortBy: (state, action: PayloadAction) => { - state.orderBy = action.payload; - state.sortOrder = - state.orderBy === action.payload && state.sortOrder === SortOrder.ASC - ? SortOrder.DESC - : SortOrder.ASC; - }, - }, - extraReducers: (builder) => { - builder.addCase( - fetchConsumerGroupsPaged.fulfilled, - (state, { payload }) => { - state.totalPages = payload.pageCount || SCHEMAS_PAGE_COUNT; - consumerGroupsAdapter.setAll(state, payload.consumerGroups || []); - } - ); - builder.addCase(fetchConsumerGroupDetails.fulfilled, (state, { payload }) => - consumerGroupsAdapter.upsertOne(state, payload) - ); - builder.addCase(deleteConsumerGroup.fulfilled, (state, { payload }) => - consumerGroupsAdapter.removeOne(state, payload) - ); - }, -}); - -export const { sortBy } = consumerGroupsSlice.actions; - -const consumerGroupsState = ({ - consumerGroups, -}: RootState): ConsumerGroupState => consumerGroups; - -export const { selectAll, selectById } = - consumerGroupsAdapter.getSelectors(consumerGroupsState); - -export const getAreConsumerGroupsPagedFulfilled = createSelector( - createFetchingSelector('consumerGroups/fetchConsumerGroupsPaged'), - (status) => status === AsyncRequestStatus.fulfilled -); - -export const getAreConsumerGroupDetailsFulfilled = createSelector( - createFetchingSelector('consumerGroups/fetchConsumerGroupDetails'), - (status) => status === AsyncRequestStatus.fulfilled -); - -export const getIsOffsetReseted = createSelector( - createFetchingSelector('consumerGroups/resetConsumerGroupOffsets'), - (status) => status === AsyncRequestStatus.fulfilled -); - -export const getConsumerGroupsOrderBy = createSelector( - consumerGroupsState, - (state) => state.orderBy -); - -export const getConsumerGroupsSortOrder = createSelector( - consumerGroupsState, - (state) => state.sortOrder -); - -export const getConsumerGroupsTotalPages = createSelector( - consumerGroupsState, - (state) => state.totalPages -); - -export default consumerGroupsSlice.reducer; diff --git a/kafka-ui-react-app/src/redux/reducers/index.ts b/kafka-ui-react-app/src/redux/reducers/index.ts index 78a2055a9c..aa5cb69cf0 100644 --- a/kafka-ui-react-app/src/redux/reducers/index.ts +++ b/kafka-ui-react-app/src/redux/reducers/index.ts @@ -2,11 +2,9 @@ import { combineReducers } from '@reduxjs/toolkit'; import loader from 'redux/reducers/loader/loaderSlice'; import schemas from 'redux/reducers/schemas/schemasSlice'; import topicMessages from 'redux/reducers/topicMessages/topicMessagesSlice'; -import consumerGroups from 'redux/reducers/consumerGroups/consumerGroupsSlice'; export default combineReducers({ loader, topicMessages, - consumerGroups, schemas, }); From acfe7a4afcce8da9d111d8922a6cdbb311e53450 Mon Sep 17 00:00:00 2001 From: Ilya Kuramshin Date: Fri, 24 Mar 2023 14:05:59 +0400 Subject: [PATCH 9/9] Azure Events Hub support fixed (#3540) UnknownTopicOrPartitionException error suppressing added when calling describeConfigs() for brokers (Azure case). Co-authored-by: iliax --- .../kafka/ui/service/ReactiveAdminClient.java | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java index 19d06a0c48..7cabb79f2d 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java @@ -212,17 +212,24 @@ public class ReactiveAdminClient implements Closeable { .map(brokerId -> new ConfigResource(ConfigResource.Type.BROKER, Integer.toString(brokerId))) .collect(toList()); return toMono(client.describeConfigs(resources).all()) - // some kafka backends (like MSK serverless) do not support broker's configs retrieval, - // in that case InvalidRequestException will be thrown - .onErrorResume(InvalidRequestException.class, th -> { - log.trace("Error while getting broker {} configs", brokerIds, th); - return Mono.just(Map.of()); - }) + // some kafka backends don't support broker's configs retrieval, + // and throw various exceptions on describeConfigs() call + .onErrorResume(th -> th instanceof InvalidRequestException // MSK Serverless + || th instanceof UnknownTopicOrPartitionException, // Azure event hub + th -> { + log.trace("Error while getting configs for brokers {}", brokerIds, th); + return Mono.just(Map.of()); + }) // there are situations when kafka-ui user has no DESCRIBE_CONFIGS permission on cluster .onErrorResume(ClusterAuthorizationException.class, th -> { log.trace("AuthorizationException while getting configs for brokers {}", brokerIds, th); return Mono.just(Map.of()); }) + // catching all remaining exceptions, but logging on WARN level + .onErrorResume(th -> true, th -> { + log.warn("Unexpected error while getting configs for brokers {}", brokerIds, th); + return Mono.just(Map.of()); + }) .map(config -> config.entrySet().stream() .collect(toMap( c -> Integer.valueOf(c.getKey().name()),