Merge remote-tracking branch 'origin/Wizard_RBAC_Disable_configure_buttons_#3646' into Wizard_RBAC_Disable_configure_buttons_#3646
This commit is contained in:
commit
aa05e0a681
57 changed files with 896 additions and 371 deletions
92
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
92
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
name: "\U0001F41E Bug report"
|
||||
description: File a bug report
|
||||
labels: ["status/triage", "type/bug"]
|
||||
assignees: []
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Hi, thanks for raising the issue(-s), all contributions really matter!
|
||||
Please, note that we'll close the issue without further explanation if you don't follow
|
||||
this template and don't provide the information requested within this template.
|
||||
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Issue submitter TODO list
|
||||
description: By you checking these checkboxes we can be sure you've done the essential things.
|
||||
options:
|
||||
- label: I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
|
||||
required: true
|
||||
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
|
||||
required: true
|
||||
- label: I've tried running `master`-labeled docker image and the issue still persists there
|
||||
required: true
|
||||
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the bug (actual behavior)
|
||||
description: A clear and concise description of what the bug is. Use a list, if there is more than one problem
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Your installation details
|
||||
description: |
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (commit hash in the top left corner of the UI)
|
||||
2. Helm chart version, if you use one
|
||||
3. Your application config. Please remove the sensitive info like passwords or API keys.
|
||||
4. Any IAAC configs
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Please write down the order of the actions required to reproduce the issue.
|
||||
For the advanced setups/complicated issue, we might need you to provide
|
||||
a minimal [reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: |
|
||||
If applicable, add screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Logs
|
||||
description: |
|
||||
If applicable, *upload* screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context about the problem here. E.G.:
|
||||
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
|
||||
Were they successful or the same issue occurred? Please provide steps as well.
|
||||
2. Related issues (if there are any).
|
||||
3. Logs (if available)
|
||||
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
|
||||
validations:
|
||||
required: false
|
64
.github/ISSUE_TEMPLATE/bug_report.md
vendored
64
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -1,64 +0,0 @@
|
|||
---
|
||||
name: "\U0001F41E Bug report"
|
||||
about: Create a bug report
|
||||
title: ''
|
||||
labels: status/triage, type/bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
We will close the issue without further explanation if you don't follow this template and don't provide the information requested within this template.
|
||||
|
||||
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
|
||||
https://github.com/provectus/kafka-ui/issues
|
||||
https://github.com/provectus/kafka-ui/discussions
|
||||
|
||||
-->
|
||||
|
||||
<!--
|
||||
Please follow the naming conventions for bugs:
|
||||
<Feature/Area/Scope> : <Compact, but specific problem summary>
|
||||
Avoid generic titles, like “Topics: incorrect layout of message sorting drop-down list”. Better use something like: “Topics: Message sorting drop-down list overlaps the "Submit" button”.
|
||||
|
||||
-->
|
||||
|
||||
**Describe the bug** (Actual behavior)
|
||||
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
|
||||
|
||||
**Expected behavior**
|
||||
<!--(A clear and concise description of what you expected to happen.)-->
|
||||
|
||||
**Set up**
|
||||
<!--
|
||||
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
|
||||
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (docker image version or check commit hash in the top left corner in UI)
|
||||
2. Helm chart version, if you use one
|
||||
3. Any IAAC configs
|
||||
-->
|
||||
|
||||
|
||||
**Steps to Reproduce**
|
||||
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
|
||||
to reproduce the problem, especially with a complex setups. -->
|
||||
|
||||
1.
|
||||
|
||||
**Screenshots**
|
||||
<!--
|
||||
(If applicable, add screenshots to help explain your problem)
|
||||
-->
|
||||
|
||||
|
||||
**Additional context**
|
||||
<!--
|
||||
Add any other context about the problem here. E.g.:
|
||||
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
|
||||
Were they successfull or same issue occured? Please provide steps as well.
|
||||
2. Related issues (if there are any).
|
||||
3. Logs (if available)
|
||||
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
|
||||
-->
|
66
.github/ISSUE_TEMPLATE/feature.yml
vendored
Normal file
66
.github/ISSUE_TEMPLATE/feature.yml
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
name: "\U0001F680 Feature request"
|
||||
description: Propose a new feature
|
||||
labels: ["status/triage", "type/feature"]
|
||||
assignees: []
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Hi, thanks for raising the issue(-s), all contributions really matter!
|
||||
Please, note that we'll close the issue without further explanation if you don't follow
|
||||
this template and don't provide the information requested within this template.
|
||||
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Issue submitter TODO list
|
||||
description: By you checking these checkboxes we can be sure you've done the essential things.
|
||||
options:
|
||||
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
|
||||
required: true
|
||||
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Is your proposal related to a problem?
|
||||
description: |
|
||||
Provide a clear and concise description of what the problem is.
|
||||
For example, "I'm always frustrated when..."
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the feature you're interested in
|
||||
description: |
|
||||
Provide a clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: |
|
||||
Let us know about other solutions you've tried or researched.
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version you're running
|
||||
description: |
|
||||
Please provide the app version you're currently running:
|
||||
1. App version (commit hash in the top left corner of the UI)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Is there anything else you can add about the proposal?
|
||||
You might want to link to related issues here, if you haven't already.
|
||||
validations:
|
||||
required: false
|
46
.github/ISSUE_TEMPLATE/feature_request.md
vendored
46
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,46 +0,0 @@
|
|||
---
|
||||
name: "\U0001F680 Feature request"
|
||||
about: Propose a new feature
|
||||
title: ''
|
||||
labels: status/triage, type/feature
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
|
||||
https://github.com/provectus/kafka-ui/issues
|
||||
https://github.com/provectus/kafka-ui/discussions
|
||||
|
||||
-->
|
||||
|
||||
### Which version of the app are you running?
|
||||
<!-- Please provide docker image version or check commit hash in the top left corner in UI) -->
|
||||
|
||||
### Is your proposal related to a problem?
|
||||
|
||||
<!--
|
||||
Provide a clear and concise description of what the problem is.
|
||||
For example, "I'm always frustrated when..."
|
||||
-->
|
||||
|
||||
### Describe the solution you'd like
|
||||
|
||||
<!--
|
||||
Provide a clear and concise description of what you want to happen.
|
||||
-->
|
||||
|
||||
### Describe alternatives you've considered
|
||||
|
||||
<!--
|
||||
Let us know about other solutions you've tried or researched.
|
||||
-->
|
||||
|
||||
### Additional context
|
||||
|
||||
<!--
|
||||
Is there anything else you can add about the proposal?
|
||||
You might want to link to related issues here, if you haven't already.
|
||||
-->
|
||||
|
92
.github/ISSUE_TEMPLATE/helm.yml
vendored
Normal file
92
.github/ISSUE_TEMPLATE/helm.yml
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
name: "⎈ K8s/Helm problem report"
|
||||
description: "Report a problem with k8s/helm charts/etc"
|
||||
labels: ["status/triage", "scope/k8s"]
|
||||
assignees: []
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Hi, thanks for raising the issue(-s), all contributions really matter!
|
||||
Please, note that we'll close the issue without further explanation if you don't follow
|
||||
this template and don't provide the information requested within this template.
|
||||
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Issue submitter TODO list
|
||||
description: By you checking these checkboxes we can be sure you've done the essential things.
|
||||
options:
|
||||
- label: I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
|
||||
required: true
|
||||
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
|
||||
required: true
|
||||
- label: I've tried running `master`-labeled docker image and the issue still persists there
|
||||
required: true
|
||||
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the bug (actual behavior)
|
||||
description: A clear and concise description of what the bug is. Use a list, if there is more than one problem
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Your installation details
|
||||
description: |
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (commit hash in the top left corner of the UI)
|
||||
2. Helm chart version
|
||||
3. Your application config. Please remove the sensitive info like passwords or API keys.
|
||||
4. Any IAAC configs
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Please write down the order of the actions required to reproduce the issue.
|
||||
For the advanced setups/complicated issue, we might need you to provide
|
||||
a minimal [reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: |
|
||||
If applicable, add screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Logs
|
||||
description: |
|
||||
If applicable, *upload* screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context about the problem here. E.G.:
|
||||
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
|
||||
Were they successful or the same issue occurred? Please provide steps as well.
|
||||
2. Related issues (if there are any).
|
||||
3. Logs (if available)
|
||||
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
|
||||
validations:
|
||||
required: false
|
52
.github/ISSUE_TEMPLATE/k8s.md
vendored
52
.github/ISSUE_TEMPLATE/k8s.md
vendored
|
@ -1,52 +0,0 @@
|
|||
---
|
||||
name: "⎈ K8s/Helm problem report"
|
||||
about: Report a problem with k8s/helm charts/etc
|
||||
title: ''
|
||||
labels: scope/k8s, status/triage
|
||||
assignees: azatsafin
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
|
||||
https://github.com/provectus/kafka-ui/issues
|
||||
https://github.com/provectus/kafka-ui/discussions
|
||||
|
||||
-->
|
||||
|
||||
**Describe the bug**
|
||||
<!--(A clear and concise description of what the bug is.)-->
|
||||
|
||||
|
||||
**Set up**
|
||||
<!--
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (docker image version or check commit hash in the top left corner in UI)
|
||||
2. Helm chart version, if you use one
|
||||
3. Any IAAC configs
|
||||
|
||||
We might close the issue without further explanation if you don't provide such information.
|
||||
-->
|
||||
|
||||
|
||||
**Steps to Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1.
|
||||
|
||||
**Expected behavior**
|
||||
<!--
|
||||
(A clear and concise description of what you expected to happen)
|
||||
-->
|
||||
|
||||
**Screenshots**
|
||||
<!--
|
||||
(If applicable, add screenshots to help explain your problem)
|
||||
-->
|
||||
|
||||
|
||||
**Additional context**
|
||||
<!--
|
||||
(Add any other context about the problem here)
|
||||
-->
|
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
|
@ -8,8 +8,6 @@ updates:
|
|||
timezone: Europe/Moscow
|
||||
reviewers:
|
||||
- "Haarolean"
|
||||
assignees:
|
||||
- "Haarolean"
|
||||
labels:
|
||||
- "scope/backend"
|
||||
- "type/dependencies"
|
||||
|
@ -99,8 +97,6 @@ updates:
|
|||
timezone: Europe/Moscow
|
||||
reviewers:
|
||||
- "Haarolean"
|
||||
assignees:
|
||||
- "Haarolean"
|
||||
labels:
|
||||
- "scope/infrastructure"
|
||||
- "type/dependencies"
|
||||
|
|
2
.github/workflows/block_merge.yml
vendored
2
.github/workflows/block_merge.yml
vendored
|
@ -6,7 +6,7 @@ jobs:
|
|||
block_merge:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: mheap/github-action-required-labels@v3
|
||||
- uses: mheap/github-action-required-labels@v4
|
||||
with:
|
||||
mode: exactly
|
||||
count: 0
|
||||
|
|
4
.github/workflows/branch-deploy.yml
vendored
4
.github/workflows/branch-deploy.yml
vendored
|
@ -86,7 +86,7 @@ jobs:
|
|||
|
||||
- name: make comment with private deployment link
|
||||
if: ${{ github.event.label.name == 'status/feature_testing' }}
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
|
@ -94,7 +94,7 @@ jobs:
|
|||
|
||||
- name: make comment with public deployment link
|
||||
if: ${{ github.event.label.name == 'status/feature_testing_public' }}
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
|
|
2
.github/workflows/branch-remove.yml
vendored
2
.github/workflows/branch-remove.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
|||
git add ../kafka-ui-from-branch/
|
||||
git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true
|
||||
- name: make comment with deployment link
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
|
|
2
.github/workflows/build-public-image.yml
vendored
2
.github/workflows/build-public-image.yml
vendored
|
@ -65,7 +65,7 @@ jobs:
|
|||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
- name: make comment with private deployment link
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
|
|
2
.github/workflows/cve.yaml
vendored
2
.github/workflows/cve.yaml
vendored
|
@ -55,7 +55,7 @@ jobs:
|
|||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Run CVE checks
|
||||
uses: aquasecurity/trivy-action@0.9.2
|
||||
uses: aquasecurity/trivy-action@0.10.0
|
||||
with:
|
||||
image-ref: "provectuslabs/kafka-ui:${{ steps.build.outputs.version }}"
|
||||
format: "table"
|
||||
|
|
2
.github/workflows/delete-public-image.yml
vendored
2
.github/workflows/delete-public-image.yml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
|||
--image-ids imageTag=${{ steps.extract_branch.outputs.tag }} \
|
||||
--region us-east-1
|
||||
- name: make comment with private deployment link
|
||||
uses: peter-evans/create-or-update-comment@v2
|
||||
uses: peter-evans/create-or-update-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
|
|
2
.github/workflows/stale.yaml
vendored
2
.github/workflows/stale.yaml
vendored
|
@ -7,7 +7,7 @@ jobs:
|
|||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v7
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
days-before-issue-stale: 7
|
||||
days-before-issue-close: 3
|
||||
|
|
|
@ -9,4 +9,6 @@ message MySpecificTopicValue {
|
|||
message MyValue {
|
||||
int32 version = 1;
|
||||
string payload = 2;
|
||||
map<int32, string> intToStringMap = 3;
|
||||
map<string, MyValue> strToObjMap = 4;
|
||||
}
|
||||
|
|
|
@ -21,6 +21,12 @@
|
|||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<!--TODO: remove, when spring-boot fixed dependency to 6.0.8+ (6.0.7 has CVE) -->
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>6.0.8</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-webflux</artifactId>
|
||||
|
@ -109,6 +115,12 @@
|
|||
<groupId>io.projectreactor.addons</groupId>
|
||||
<artifactId>reactor-extra</artifactId>
|
||||
</dependency>
|
||||
<!-- https://github.com/provectus/kafka-ui/pull/3693 -->
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>${org.json.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
|
|
|
@ -58,6 +58,8 @@ public class ClustersProperties {
|
|||
Integer pollTimeoutMs;
|
||||
Integer partitionPollTimeout;
|
||||
Integer noDataEmptyPolls;
|
||||
Integer maxPageSize;
|
||||
Integer defaultPageSize;
|
||||
}
|
||||
|
||||
@Data
|
||||
|
|
|
@ -43,9 +43,6 @@ import reactor.core.scheduler.Schedulers;
|
|||
@Slf4j
|
||||
public class MessagesController extends AbstractController implements MessagesApi {
|
||||
|
||||
private static final int MAX_LOAD_RECORD_LIMIT = 100;
|
||||
private static final int DEFAULT_LOAD_RECORD_LIMIT = 20;
|
||||
|
||||
private final MessagesService messagesService;
|
||||
private final DeserializationService deserializationService;
|
||||
private final AccessControlService accessControlService;
|
||||
|
@ -91,8 +88,6 @@ public class MessagesController extends AbstractController implements MessagesAp
|
|||
seekType = seekType != null ? seekType : SeekTypeDTO.BEGINNING;
|
||||
seekDirection = seekDirection != null ? seekDirection : SeekDirectionDTO.FORWARD;
|
||||
filterQueryType = filterQueryType != null ? filterQueryType : MessageFilterTypeDTO.STRING_CONTAINS;
|
||||
int recordsLimit =
|
||||
Optional.ofNullable(limit).map(s -> Math.min(s, MAX_LOAD_RECORD_LIMIT)).orElse(DEFAULT_LOAD_RECORD_LIMIT);
|
||||
|
||||
var positions = new ConsumerPosition(
|
||||
seekType,
|
||||
|
@ -103,7 +98,7 @@ public class MessagesController extends AbstractController implements MessagesAp
|
|||
ResponseEntity.ok(
|
||||
messagesService.loadMessages(
|
||||
getCluster(clusterName), topicName, positions, q, filterQueryType,
|
||||
recordsLimit, seekDirection, keySerde, valueSerde)
|
||||
limit, seekDirection, keySerde, valueSerde)
|
||||
)
|
||||
);
|
||||
|
||||
|
|
|
@ -11,8 +11,6 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.TopicPartition;
|
||||
|
||||
|
@ -82,15 +80,8 @@ public class ConsumerGroupMapper {
|
|||
InternalConsumerGroup c, T consumerGroup) {
|
||||
consumerGroup.setGroupId(c.getGroupId());
|
||||
consumerGroup.setMembers(c.getMembers().size());
|
||||
|
||||
int numTopics = Stream.concat(
|
||||
c.getOffsets().keySet().stream().map(TopicPartition::topic),
|
||||
c.getMembers().stream()
|
||||
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
|
||||
).collect(Collectors.toSet()).size();
|
||||
|
||||
consumerGroup.setMessagesBehind(c.getMessagesBehind());
|
||||
consumerGroup.setTopics(numTopics);
|
||||
consumerGroup.setTopics(c.getTopicNum());
|
||||
consumerGroup.setSimple(c.isSimple());
|
||||
|
||||
Optional.ofNullable(c.getState())
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.provectus.kafka.ui.model;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.Data;
|
||||
import org.apache.kafka.common.Node;
|
||||
|
||||
|
@ -10,15 +11,27 @@ public class InternalBroker {
|
|||
private final Integer id;
|
||||
private final String host;
|
||||
private final Integer port;
|
||||
private final BigDecimal bytesInPerSec;
|
||||
private final BigDecimal bytesOutPerSec;
|
||||
private final @Nullable BigDecimal bytesInPerSec;
|
||||
private final @Nullable BigDecimal bytesOutPerSec;
|
||||
private final @Nullable Integer partitionsLeader;
|
||||
private final @Nullable Integer partitions;
|
||||
private final @Nullable Integer inSyncPartitions;
|
||||
private final @Nullable BigDecimal leadersSkew;
|
||||
private final @Nullable BigDecimal partitionsSkew;
|
||||
|
||||
public InternalBroker(Node node, Statistics statistics) {
|
||||
public InternalBroker(Node node,
|
||||
PartitionDistributionStats partitionDistribution,
|
||||
Statistics statistics) {
|
||||
this.id = node.id();
|
||||
this.host = node.host();
|
||||
this.port = node.port();
|
||||
this.bytesInPerSec = statistics.getMetrics().getBrokerBytesInPerSec().get(node.id());
|
||||
this.bytesOutPerSec = statistics.getMetrics().getBrokerBytesOutPerSec().get(node.id());
|
||||
this.partitionsLeader = partitionDistribution.getPartitionLeaders().get(node);
|
||||
this.partitions = partitionDistribution.getPartitionsCount().get(node);
|
||||
this.inSyncPartitions = partitionDistribution.getInSyncPartitions().get(node);
|
||||
this.leadersSkew = partitionDistribution.leadersSkew(node);
|
||||
this.partitionsSkew = partitionDistribution.partitionsSkew(node);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import java.util.Map;
|
|||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
|
||||
|
@ -21,6 +22,7 @@ public class InternalConsumerGroup {
|
|||
private final Map<TopicPartition, Long> offsets;
|
||||
private final Map<TopicPartition, Long> endOffsets;
|
||||
private final Long messagesBehind;
|
||||
private final Integer topicNum;
|
||||
private final String partitionAssignor;
|
||||
private final ConsumerGroupState state;
|
||||
private final Node coordinator;
|
||||
|
@ -44,22 +46,12 @@ public class InternalConsumerGroup {
|
|||
builder.simple(description.isSimpleConsumerGroup());
|
||||
builder.state(description.state());
|
||||
builder.partitionAssignor(description.partitionAssignor());
|
||||
builder.members(
|
||||
description.members().stream()
|
||||
.map(m ->
|
||||
InternalConsumerGroup.InternalMember.builder()
|
||||
.assignment(m.assignment().topicPartitions())
|
||||
.clientId(m.clientId())
|
||||
.groupInstanceId(m.groupInstanceId().orElse(""))
|
||||
.consumerId(m.consumerId())
|
||||
.clientId(m.clientId())
|
||||
.host(m.host())
|
||||
.build()
|
||||
).collect(Collectors.toList())
|
||||
);
|
||||
Collection<InternalMember> internalMembers = initInternalMembers(description);
|
||||
builder.members(internalMembers);
|
||||
builder.offsets(groupOffsets);
|
||||
builder.endOffsets(topicEndOffsets);
|
||||
builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
|
||||
builder.topicNum(calculateTopicNum(groupOffsets, internalMembers));
|
||||
Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
|
||||
return builder.build();
|
||||
}
|
||||
|
@ -80,4 +72,31 @@ public class InternalConsumerGroup {
|
|||
return messagesBehind;
|
||||
}
|
||||
|
||||
private static Integer calculateTopicNum(Map<TopicPartition, Long> offsets, Collection<InternalMember> members) {
|
||||
|
||||
long topicNum = Stream.concat(
|
||||
offsets.keySet().stream().map(TopicPartition::topic),
|
||||
members.stream()
|
||||
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
|
||||
).distinct().count();
|
||||
|
||||
return Integer.valueOf((int) topicNum);
|
||||
|
||||
}
|
||||
|
||||
private static Collection<InternalMember> initInternalMembers(ConsumerGroupDescription description) {
|
||||
return description.members().stream()
|
||||
.map(m ->
|
||||
InternalConsumerGroup.InternalMember.builder()
|
||||
.assignment(m.assignment().topicPartitions())
|
||||
.clientId(m.clientId())
|
||||
.groupInstanceId(m.groupInstanceId().orElse(""))
|
||||
.consumerId(m.consumerId())
|
||||
.clientId(m.clientId())
|
||||
.host(m.host())
|
||||
.build()
|
||||
).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
package com.provectus.kafka.ui.model;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.MathContext;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.clients.admin.TopicDescription;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.TopicPartitionInfo;
|
||||
|
||||
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
|
||||
@Getter
|
||||
@Slf4j
|
||||
public class PartitionDistributionStats {
|
||||
|
||||
// avg skew will show unuseful results on low number of partitions
|
||||
private static final int MIN_PARTITIONS_FOR_SKEW_CALCULATION = 50;
|
||||
|
||||
private static final MathContext ROUNDING_MATH_CTX = new MathContext(3);
|
||||
|
||||
private final Map<Node, Integer> partitionLeaders;
|
||||
private final Map<Node, Integer> partitionsCount;
|
||||
private final Map<Node, Integer> inSyncPartitions;
|
||||
private final double avgLeadersCntPerBroker;
|
||||
private final double avgPartitionsPerBroker;
|
||||
private final boolean skewCanBeCalculated;
|
||||
|
||||
public static PartitionDistributionStats create(Statistics stats) {
|
||||
return create(stats, MIN_PARTITIONS_FOR_SKEW_CALCULATION);
|
||||
}
|
||||
|
||||
static PartitionDistributionStats create(Statistics stats, int minPartitionsForSkewCalculation) {
|
||||
var partitionLeaders = new HashMap<Node, Integer>();
|
||||
var partitionsReplicated = new HashMap<Node, Integer>();
|
||||
var isr = new HashMap<Node, Integer>();
|
||||
int partitionsCnt = 0;
|
||||
for (TopicDescription td : stats.getTopicDescriptions().values()) {
|
||||
for (TopicPartitionInfo tp : td.partitions()) {
|
||||
partitionsCnt++;
|
||||
tp.replicas().forEach(r -> incr(partitionsReplicated, r));
|
||||
tp.isr().forEach(r -> incr(isr, r));
|
||||
if (tp.leader() != null) {
|
||||
incr(partitionLeaders, tp.leader());
|
||||
}
|
||||
}
|
||||
}
|
||||
int nodesWithPartitions = partitionsReplicated.size();
|
||||
int partitionReplications = partitionsReplicated.values().stream().mapToInt(i -> i).sum();
|
||||
var avgPartitionsPerBroker = nodesWithPartitions == 0 ? 0 : ((double) partitionReplications) / nodesWithPartitions;
|
||||
|
||||
int nodesWithLeaders = partitionLeaders.size();
|
||||
int leadersCnt = partitionLeaders.values().stream().mapToInt(i -> i).sum();
|
||||
var avgLeadersCntPerBroker = nodesWithLeaders == 0 ? 0 : ((double) leadersCnt) / nodesWithLeaders;
|
||||
|
||||
return new PartitionDistributionStats(
|
||||
partitionLeaders,
|
||||
partitionsReplicated,
|
||||
isr,
|
||||
avgLeadersCntPerBroker,
|
||||
avgPartitionsPerBroker,
|
||||
partitionsCnt >= minPartitionsForSkewCalculation
|
||||
);
|
||||
}
|
||||
|
||||
private static void incr(Map<Node, Integer> map, Node n) {
|
||||
map.compute(n, (k, c) -> c == null ? 1 : ++c);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public BigDecimal partitionsSkew(Node node) {
|
||||
return calculateAvgSkew(partitionsCount.get(node), avgPartitionsPerBroker);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public BigDecimal leadersSkew(Node node) {
|
||||
return calculateAvgSkew(partitionLeaders.get(node), avgLeadersCntPerBroker);
|
||||
}
|
||||
|
||||
// Returns difference (in percents) from average value, null if it can't be calculated
|
||||
@Nullable
|
||||
private BigDecimal calculateAvgSkew(@Nullable Integer value, double avgValue) {
|
||||
if (avgValue == 0 || !skewCanBeCalculated) {
|
||||
return null;
|
||||
}
|
||||
value = value == null ? 0 : value;
|
||||
return new BigDecimal((value - avgValue) / avgValue * 100.0).round(ROUNDING_MATH_CTX);
|
||||
}
|
||||
}
|
|
@ -10,6 +10,7 @@ import com.provectus.kafka.ui.model.BrokersLogdirsDTO;
|
|||
import com.provectus.kafka.ui.model.InternalBroker;
|
||||
import com.provectus.kafka.ui.model.InternalBrokerConfig;
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.model.PartitionDistributionStats;
|
||||
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -64,11 +65,13 @@ public class BrokerService {
|
|||
}
|
||||
|
||||
public Flux<InternalBroker> getBrokers(KafkaCluster cluster) {
|
||||
var stats = statisticsCache.get(cluster);
|
||||
var partitionsDistribution = PartitionDistributionStats.create(stats);
|
||||
return adminClientService
|
||||
.get(cluster)
|
||||
.flatMap(ReactiveAdminClient::describeCluster)
|
||||
.map(description -> description.getNodes().stream()
|
||||
.map(node -> new InternalBroker(node, statisticsCache.get(cluster)))
|
||||
.map(node -> new InternalBroker(node, partitionsDistribution, stats))
|
||||
.collect(Collectors.toList()))
|
||||
.flatMapMany(Flux::fromIterable);
|
||||
}
|
||||
|
|
|
@ -101,6 +101,9 @@ public class ConsumerGroupService {
|
|||
public record ConsumerGroupsPage(List<InternalConsumerGroup> consumerGroups, int totalPages) {
|
||||
}
|
||||
|
||||
private record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) {
|
||||
}
|
||||
|
||||
public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
|
||||
KafkaCluster cluster,
|
||||
int pageNum,
|
||||
|
@ -159,22 +162,19 @@ public class ConsumerGroupService {
|
|||
sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
|
||||
}
|
||||
case MESSAGES_BEHIND -> {
|
||||
record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
|
||||
|
||||
Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
|
||||
gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
|
||||
|
||||
var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
|
||||
yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
|
||||
}
|
||||
|
||||
case TOPIC_NUM -> {
|
||||
|
||||
Comparator<GroupWithDescr> comparator = Comparator.comparingInt(gwd -> gwd.icg.getTopicNum());
|
||||
|
||||
yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
|
||||
|
||||
yield ac.describeConsumerGroups(groupNames)
|
||||
.flatMap(descriptionsMap -> {
|
||||
List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
|
||||
return getConsumerGroups(ac, descriptions)
|
||||
.map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
|
||||
.map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
|
||||
.map(GroupWithDescr::cgd).toList());
|
||||
}
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -209,6 +209,27 @@ public class ConsumerGroupService {
|
|||
.map(cgs -> new ArrayList<>(cgs.values()));
|
||||
}
|
||||
|
||||
|
||||
private Mono<List<ConsumerGroupDescription>> loadDescriptionsByInternalConsumerGroups(ReactiveAdminClient ac,
|
||||
List<ConsumerGroupListing> groups,
|
||||
Comparator<GroupWithDescr> comparator,
|
||||
int pageNum,
|
||||
int perPage,
|
||||
SortOrderDTO sortOrderDto) {
|
||||
var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
|
||||
|
||||
return ac.describeConsumerGroups(groupNames)
|
||||
.flatMap(descriptionsMap -> {
|
||||
List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
|
||||
return getConsumerGroups(ac, descriptions)
|
||||
.map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
|
||||
.map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
|
||||
.map(GroupWithDescr::cgd).toList());
|
||||
}
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
public Mono<InternalConsumerGroup> getConsumerGroupDetail(KafkaCluster cluster,
|
||||
String consumerGroupId) {
|
||||
return adminClientService.get(cluster)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.provectus.kafka.ui.service;
|
||||
|
||||
import com.google.common.util.concurrent.RateLimiter;
|
||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||
import com.provectus.kafka.ui.emitter.BackwardRecordEmitter;
|
||||
import com.provectus.kafka.ui.emitter.ForwardRecordEmitter;
|
||||
import com.provectus.kafka.ui.emitter.MessageFilters;
|
||||
|
@ -20,13 +21,13 @@ import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
|
|||
import com.provectus.kafka.ui.util.SslPropertiesUtil;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.function.UnaryOperator;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.kafka.clients.admin.OffsetSpec;
|
||||
|
@ -44,16 +45,35 @@ import reactor.core.publisher.Mono;
|
|||
import reactor.core.scheduler.Schedulers;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class MessagesService {
|
||||
|
||||
private static final int DEFAULT_MAX_PAGE_SIZE = 500;
|
||||
private static final int DEFAULT_PAGE_SIZE = 100;
|
||||
// limiting UI messages rate to 20/sec in tailing mode
|
||||
public static final int TAILING_UI_MESSAGE_THROTTLE_RATE = 20;
|
||||
private static final int TAILING_UI_MESSAGE_THROTTLE_RATE = 20;
|
||||
|
||||
private final AdminClientService adminClientService;
|
||||
private final DeserializationService deserializationService;
|
||||
private final ConsumerGroupService consumerGroupService;
|
||||
private final int maxPageSize;
|
||||
private final int defaultPageSize;
|
||||
|
||||
public MessagesService(AdminClientService adminClientService,
|
||||
DeserializationService deserializationService,
|
||||
ConsumerGroupService consumerGroupService,
|
||||
ClustersProperties properties) {
|
||||
this.adminClientService = adminClientService;
|
||||
this.deserializationService = deserializationService;
|
||||
this.consumerGroupService = consumerGroupService;
|
||||
|
||||
var pollingProps = Optional.ofNullable(properties.getPolling())
|
||||
.orElseGet(ClustersProperties.PollingProperties::new);
|
||||
this.maxPageSize = Optional.ofNullable(pollingProps.getMaxPageSize())
|
||||
.orElse(DEFAULT_MAX_PAGE_SIZE);
|
||||
this.defaultPageSize = Optional.ofNullable(pollingProps.getDefaultPageSize())
|
||||
.orElse(DEFAULT_PAGE_SIZE);
|
||||
}
|
||||
|
||||
private Mono<TopicDescription> withExistingTopic(KafkaCluster cluster, String topicName) {
|
||||
return adminClientService.get(cluster)
|
||||
|
@ -139,7 +159,7 @@ public class MessagesService {
|
|||
ConsumerPosition consumerPosition,
|
||||
@Nullable String query,
|
||||
MessageFilterTypeDTO filterQueryType,
|
||||
int limit,
|
||||
@Nullable Integer pageSize,
|
||||
SeekDirectionDTO seekDirection,
|
||||
@Nullable String keySerde,
|
||||
@Nullable String valueSerde) {
|
||||
|
@ -147,7 +167,13 @@ public class MessagesService {
|
|||
.flux()
|
||||
.publishOn(Schedulers.boundedElastic())
|
||||
.flatMap(td -> loadMessagesImpl(cluster, topic, consumerPosition, query,
|
||||
filterQueryType, limit, seekDirection, keySerde, valueSerde));
|
||||
filterQueryType, fixPageSize(pageSize), seekDirection, keySerde, valueSerde));
|
||||
}
|
||||
|
||||
private int fixPageSize(@Nullable Integer pageSize) {
|
||||
return Optional.ofNullable(pageSize)
|
||||
.filter(ps -> ps > 0 && ps <= maxPageSize)
|
||||
.orElse(defaultPageSize);
|
||||
}
|
||||
|
||||
private Flux<TopicMessageEventDTO> loadMessagesImpl(KafkaCluster cluster,
|
||||
|
|
|
@ -4,9 +4,9 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
// Specifies field that can contain any kind of value - primitive, complex and nulls
|
||||
public class AnyFieldSchema implements FieldSchema {
|
||||
class AnyFieldSchema implements FieldSchema {
|
||||
|
||||
public static AnyFieldSchema get() {
|
||||
static AnyFieldSchema get() {
|
||||
return new AnyFieldSchema();
|
||||
}
|
||||
|
||||
|
|
|
@ -4,10 +4,10 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
|
||||
public class ArrayFieldSchema implements FieldSchema {
|
||||
class ArrayFieldSchema implements FieldSchema {
|
||||
private final FieldSchema itemsSchema;
|
||||
|
||||
public ArrayFieldSchema(FieldSchema itemsSchema) {
|
||||
ArrayFieldSchema(FieldSchema itemsSchema) {
|
||||
this.itemsSchema = itemsSchema;
|
||||
}
|
||||
|
||||
|
|
|
@ -7,10 +7,10 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
|
||||
public class EnumJsonType extends JsonType {
|
||||
class EnumJsonType extends JsonType {
|
||||
private final List<String> values;
|
||||
|
||||
public EnumJsonType(List<String> values) {
|
||||
EnumJsonType(List<String> values) {
|
||||
super(Type.ENUM);
|
||||
this.values = values;
|
||||
}
|
||||
|
|
|
@ -3,6 +3,6 @@ package com.provectus.kafka.ui.util.jsonschema;
|
|||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
public interface FieldSchema {
|
||||
interface FieldSchema {
|
||||
JsonNode toJsonNode(ObjectMapper mapper);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class JsonType {
|
||||
abstract class JsonType {
|
||||
|
||||
protected final Type type;
|
||||
|
||||
|
@ -12,13 +12,13 @@ public abstract class JsonType {
|
|||
this.type = type;
|
||||
}
|
||||
|
||||
public Type getType() {
|
||||
Type getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public abstract Map<String, JsonNode> toJsonNode(ObjectMapper mapper);
|
||||
abstract Map<String, JsonNode> toJsonNode(ObjectMapper mapper);
|
||||
|
||||
public enum Type {
|
||||
enum Type {
|
||||
NULL,
|
||||
BOOLEAN,
|
||||
OBJECT,
|
||||
|
|
|
@ -2,21 +2,27 @@ package com.provectus.kafka.ui.util.jsonschema;
|
|||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.BooleanNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.fasterxml.jackson.databind.node.TextNode;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class MapFieldSchema implements FieldSchema {
|
||||
private final FieldSchema itemSchema;
|
||||
class MapFieldSchema implements FieldSchema {
|
||||
private final @Nullable FieldSchema itemSchema;
|
||||
|
||||
public MapFieldSchema(FieldSchema itemSchema) {
|
||||
MapFieldSchema(@Nullable FieldSchema itemSchema) {
|
||||
this.itemSchema = itemSchema;
|
||||
}
|
||||
|
||||
MapFieldSchema() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode toJsonNode(ObjectMapper mapper) {
|
||||
final ObjectNode objectNode = mapper.createObjectNode();
|
||||
objectNode.set("type", new TextNode(JsonType.Type.OBJECT.getName()));
|
||||
objectNode.set("additionalProperties", itemSchema.toJsonNode(mapper));
|
||||
objectNode.set("additionalProperties", itemSchema != null ? itemSchema.toJsonNode(mapper) : BooleanNode.TRUE);
|
||||
return objectNode;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,24 +9,24 @@ import java.util.stream.Collectors;
|
|||
import reactor.util.function.Tuple2;
|
||||
import reactor.util.function.Tuples;
|
||||
|
||||
public class ObjectFieldSchema implements FieldSchema {
|
||||
class ObjectFieldSchema implements FieldSchema {
|
||||
|
||||
public static final ObjectFieldSchema EMPTY = new ObjectFieldSchema(Map.of(), List.of());
|
||||
static final ObjectFieldSchema EMPTY = new ObjectFieldSchema(Map.of(), List.of());
|
||||
|
||||
private final Map<String, FieldSchema> properties;
|
||||
private final List<String> required;
|
||||
|
||||
public ObjectFieldSchema(Map<String, FieldSchema> properties,
|
||||
ObjectFieldSchema(Map<String, FieldSchema> properties,
|
||||
List<String> required) {
|
||||
this.properties = properties;
|
||||
this.required = required;
|
||||
}
|
||||
|
||||
public Map<String, FieldSchema> getProperties() {
|
||||
Map<String, FieldSchema> getProperties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
public List<String> getRequired() {
|
||||
List<String> getRequired() {
|
||||
return required;
|
||||
}
|
||||
|
||||
|
|
|
@ -5,11 +5,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
|||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class OneOfFieldSchema implements FieldSchema {
|
||||
class OneOfFieldSchema implements FieldSchema {
|
||||
private final List<FieldSchema> schemaList;
|
||||
|
||||
public OneOfFieldSchema(
|
||||
List<FieldSchema> schemaList) {
|
||||
OneOfFieldSchema(List<FieldSchema> schemaList) {
|
||||
this.schemaList = schemaList;
|
||||
}
|
||||
|
||||
|
|
|
@ -94,6 +94,9 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
|
|||
if (wellKnownTypeSchema.isPresent()) {
|
||||
return wellKnownTypeSchema.get();
|
||||
}
|
||||
if (field.isMapField()) {
|
||||
return new MapFieldSchema();
|
||||
}
|
||||
final JsonType jsonType = convertType(field);
|
||||
FieldSchema fieldSchema;
|
||||
if (jsonType.getType().equals(JsonType.Type.OBJECT)) {
|
||||
|
@ -149,67 +152,47 @@ public class ProtobufSchemaConverter implements JsonSchemaConverter<Descriptors.
|
|||
}
|
||||
|
||||
private JsonType convertType(Descriptors.FieldDescriptor field) {
|
||||
switch (field.getType()) {
|
||||
case INT32:
|
||||
case FIXED32:
|
||||
case SFIXED32:
|
||||
case SINT32:
|
||||
return new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", IntNode.valueOf(Integer.MAX_VALUE),
|
||||
"minimum", IntNode.valueOf(Integer.MIN_VALUE)
|
||||
)
|
||||
);
|
||||
case UINT32:
|
||||
return new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
|
||||
"minimum", IntNode.valueOf(0)
|
||||
)
|
||||
);
|
||||
return switch (field.getType()) {
|
||||
case INT32, FIXED32, SFIXED32, SINT32 -> new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", IntNode.valueOf(Integer.MAX_VALUE),
|
||||
"minimum", IntNode.valueOf(Integer.MIN_VALUE)
|
||||
)
|
||||
);
|
||||
case UINT32 -> new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", LongNode.valueOf(UnsignedInteger.MAX_VALUE.longValue()),
|
||||
"minimum", IntNode.valueOf(0)
|
||||
)
|
||||
);
|
||||
//TODO: actually all *64 types will be printed with quotes (as strings),
|
||||
// see JsonFormat::printSingleFieldValue for impl. This can cause problems when you copy-paste from messages
|
||||
// table to `Produce` area - need to think if it is critical or not.
|
||||
case INT64:
|
||||
case FIXED64:
|
||||
case SFIXED64:
|
||||
case SINT64:
|
||||
return new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", LongNode.valueOf(Long.MAX_VALUE),
|
||||
"minimum", LongNode.valueOf(Long.MIN_VALUE)
|
||||
)
|
||||
);
|
||||
case UINT64:
|
||||
return new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
|
||||
"minimum", LongNode.valueOf(0)
|
||||
)
|
||||
);
|
||||
case MESSAGE:
|
||||
case GROUP:
|
||||
return new SimpleJsonType(JsonType.Type.OBJECT);
|
||||
case ENUM:
|
||||
return new EnumJsonType(
|
||||
field.getEnumType().getValues().stream()
|
||||
.map(Descriptors.EnumValueDescriptor::getName)
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
case BYTES:
|
||||
case STRING:
|
||||
return new SimpleJsonType(JsonType.Type.STRING);
|
||||
case FLOAT:
|
||||
case DOUBLE:
|
||||
return new SimpleJsonType(JsonType.Type.NUMBER);
|
||||
case BOOL:
|
||||
return new SimpleJsonType(JsonType.Type.BOOLEAN);
|
||||
default:
|
||||
return new SimpleJsonType(JsonType.Type.STRING);
|
||||
}
|
||||
case INT64, FIXED64, SFIXED64, SINT64 -> new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", LongNode.valueOf(Long.MAX_VALUE),
|
||||
"minimum", LongNode.valueOf(Long.MIN_VALUE)
|
||||
)
|
||||
);
|
||||
case UINT64 -> new SimpleJsonType(
|
||||
JsonType.Type.INTEGER,
|
||||
Map.of(
|
||||
"maximum", new BigIntegerNode(UnsignedLong.MAX_VALUE.bigIntegerValue()),
|
||||
"minimum", LongNode.valueOf(0)
|
||||
)
|
||||
);
|
||||
case MESSAGE, GROUP -> new SimpleJsonType(JsonType.Type.OBJECT);
|
||||
case ENUM -> new EnumJsonType(
|
||||
field.getEnumType().getValues().stream()
|
||||
.map(Descriptors.EnumValueDescriptor::getName)
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
case BYTES, STRING -> new SimpleJsonType(JsonType.Type.STRING);
|
||||
case FLOAT, DOUBLE -> new SimpleJsonType(JsonType.Type.NUMBER);
|
||||
case BOOL -> new SimpleJsonType(JsonType.Type.BOOLEAN);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,10 +4,10 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.TextNode;
|
||||
|
||||
public class RefFieldSchema implements FieldSchema {
|
||||
class RefFieldSchema implements FieldSchema {
|
||||
private final String ref;
|
||||
|
||||
public RefFieldSchema(String ref) {
|
||||
RefFieldSchema(String ref) {
|
||||
this.ref = ref;
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,7 @@ public class RefFieldSchema implements FieldSchema {
|
|||
return mapper.createObjectNode().set("$ref", new TextNode(ref));
|
||||
}
|
||||
|
||||
public String getRef() {
|
||||
String getRef() {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,10 +3,10 @@ package com.provectus.kafka.ui.util.jsonschema;
|
|||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
public class SimpleFieldSchema implements FieldSchema {
|
||||
class SimpleFieldSchema implements FieldSchema {
|
||||
private final JsonType type;
|
||||
|
||||
public SimpleFieldSchema(JsonType type) {
|
||||
SimpleFieldSchema(JsonType type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,15 +6,15 @@ import com.fasterxml.jackson.databind.node.TextNode;
|
|||
import com.google.common.collect.ImmutableMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class SimpleJsonType extends JsonType {
|
||||
class SimpleJsonType extends JsonType {
|
||||
|
||||
private final Map<String, JsonNode> additionalTypeProperties;
|
||||
|
||||
public SimpleJsonType(Type type) {
|
||||
SimpleJsonType(Type type) {
|
||||
this(type, Map.of());
|
||||
}
|
||||
|
||||
public SimpleJsonType(Type type, Map<String, JsonNode> additionalTypeProperties) {
|
||||
SimpleJsonType(Type type, Map<String, JsonNode> additionalTypeProperties) {
|
||||
super(type);
|
||||
this.additionalTypeProperties = additionalTypeProperties;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
package com.provectus.kafka.ui.model;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import com.provectus.kafka.ui.service.ReactiveAdminClient;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import org.apache.kafka.clients.admin.TopicDescription;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.TopicPartitionInfo;
|
||||
import org.assertj.core.data.Percentage;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class PartitionDistributionStatsTest {
|
||||
|
||||
@Test
|
||||
void skewCalculatedBasedOnPartitionsCounts() {
|
||||
Node n1 = new Node(1, "n1", 9092);
|
||||
Node n2 = new Node(2, "n2", 9092);
|
||||
Node n3 = new Node(3, "n3", 9092);
|
||||
Node n4 = new Node(4, "n4", 9092);
|
||||
|
||||
var stats = PartitionDistributionStats.create(
|
||||
Statistics.builder()
|
||||
.clusterDescription(
|
||||
new ReactiveAdminClient.ClusterDescription(null, "test", Set.of(n1, n2, n3), null))
|
||||
.topicDescriptions(
|
||||
Map.of(
|
||||
"t1", new TopicDescription(
|
||||
"t1", false,
|
||||
List.of(
|
||||
new TopicPartitionInfo(0, n1, List.of(n1, n2), List.of(n1, n2)),
|
||||
new TopicPartitionInfo(1, n2, List.of(n2, n3), List.of(n2, n3))
|
||||
)
|
||||
),
|
||||
"t2", new TopicDescription(
|
||||
"t2", false,
|
||||
List.of(
|
||||
new TopicPartitionInfo(0, n1, List.of(n1, n2), List.of(n1, n2)),
|
||||
new TopicPartitionInfo(1, null, List.of(n2, n1), List.of(n1))
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.build(), 4
|
||||
);
|
||||
|
||||
assertThat(stats.getPartitionLeaders())
|
||||
.containsExactlyInAnyOrderEntriesOf(Map.of(n1, 2, n2, 1));
|
||||
assertThat(stats.getPartitionsCount())
|
||||
.containsExactlyInAnyOrderEntriesOf(Map.of(n1, 3, n2, 4, n3, 1));
|
||||
assertThat(stats.getInSyncPartitions())
|
||||
.containsExactlyInAnyOrderEntriesOf(Map.of(n1, 3, n2, 3, n3, 1));
|
||||
|
||||
// Node(partitions): n1(3), n2(4), n3(1), n4(0)
|
||||
// average partitions cnt = (3+4+1) / 3 = 2.666 (counting only nodes with partitions!)
|
||||
assertThat(stats.getAvgPartitionsPerBroker())
|
||||
.isCloseTo(2.666, Percentage.withPercentage(1));
|
||||
|
||||
assertThat(stats.partitionsSkew(n1))
|
||||
.isCloseTo(BigDecimal.valueOf(12.5), Percentage.withPercentage(1));
|
||||
assertThat(stats.partitionsSkew(n2))
|
||||
.isCloseTo(BigDecimal.valueOf(50), Percentage.withPercentage(1));
|
||||
assertThat(stats.partitionsSkew(n3))
|
||||
.isCloseTo(BigDecimal.valueOf(-62.5), Percentage.withPercentage(1));
|
||||
assertThat(stats.partitionsSkew(n4))
|
||||
.isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
|
||||
|
||||
// Node(leaders): n1(2), n2(1), n3(0), n4(0)
|
||||
// average leaders cnt = (2+1) / 2 = 1.5 (counting only nodes with leaders!)
|
||||
assertThat(stats.leadersSkew(n1))
|
||||
.isCloseTo(BigDecimal.valueOf(33.33), Percentage.withPercentage(1));
|
||||
assertThat(stats.leadersSkew(n2))
|
||||
.isCloseTo(BigDecimal.valueOf(-33.33), Percentage.withPercentage(1));
|
||||
assertThat(stats.leadersSkew(n3))
|
||||
.isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
|
||||
assertThat(stats.leadersSkew(n4))
|
||||
.isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
|
||||
}
|
||||
|
||||
}
|
|
@ -59,8 +59,10 @@ class ProtobufSchemaConverterTest {
|
|||
TestMsg outer_ref = 2;
|
||||
EmbeddedMsg self_ref = 3;
|
||||
}
|
||||
}""";
|
||||
|
||||
map<int32, string> intToStringMap = 21;
|
||||
map<string, EmbeddedMsg> strToObjMap = 22;
|
||||
}""";
|
||||
|
||||
String expectedJsonSchema = """
|
||||
{
|
||||
|
@ -109,7 +111,9 @@ class ProtobufSchemaConverterTest {
|
|||
"v2": { "type": [ "number", "string", "object", "array", "boolean", "null" ] },
|
||||
"uint32_w_field": { "type": "integer", "maximum": 4294967295, "minimum": 0 },
|
||||
"bool_w_field": { "type": "boolean" },
|
||||
"uint64_w_field": { "type": "integer", "maximum": 18446744073709551615, "minimum": 0 }
|
||||
"uint64_w_field": { "type": "integer", "maximum": 18446744073709551615, "minimum": 0 },
|
||||
"strToObjMap": { "type": "object", "additionalProperties": true },
|
||||
"intToStringMap": { "type": "object", "additionalProperties": true }
|
||||
}
|
||||
},
|
||||
"test.TestMsg.EmbeddedMsg": {
|
||||
|
|
|
@ -2375,6 +2375,16 @@ components:
|
|||
type: number
|
||||
bytesOutPerSec:
|
||||
type: number
|
||||
partitionsLeader:
|
||||
type: integer
|
||||
partitions:
|
||||
type: integer
|
||||
inSyncPartitions:
|
||||
type: integer
|
||||
partitionsSkew:
|
||||
type: number
|
||||
leadersSkew:
|
||||
type: number
|
||||
required:
|
||||
- id
|
||||
|
||||
|
@ -2441,6 +2451,7 @@ components:
|
|||
- MEMBERS
|
||||
- STATE
|
||||
- MESSAGES_BEHIND
|
||||
- TOPIC_NUM
|
||||
|
||||
ConsumerGroupsPageResponse:
|
||||
type: object
|
||||
|
@ -3485,6 +3496,10 @@ components:
|
|||
type: integer
|
||||
noDataEmptyPolls:
|
||||
type: integer
|
||||
maxPageSize:
|
||||
type: integer
|
||||
defaultPageSize:
|
||||
type: integer
|
||||
adminClientTimeout:
|
||||
type: integer
|
||||
internalTopicPrefix:
|
||||
|
|
|
@ -36,29 +36,31 @@ import org.springframework.web.reactive.function.client.WebClientResponseExcepti
|
|||
@Slf4j
|
||||
public class ApiService extends BaseSource {
|
||||
|
||||
private final ApiClient apiClient = new ApiClient().setBasePath(BASE_API_URL);
|
||||
|
||||
@SneakyThrows
|
||||
private TopicsApi topicApi() {
|
||||
return new TopicsApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new TopicsApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private SchemasApi schemaApi() {
|
||||
return new SchemasApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new SchemasApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private KafkaConnectApi connectorApi() {
|
||||
return new KafkaConnectApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new KafkaConnectApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private MessagesApi messageApi() {
|
||||
return new MessagesApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new MessagesApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private KsqlApi ksqlApi() {
|
||||
return new KsqlApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new KsqlApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui.manualsuite.backlog;
|
|||
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.BROKERS_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.KSQL_DB_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.SCHEMAS_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.TOPICS_PROFILE_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.utilities.qase.enums.State.TO_BE_AUTOMATED;
|
||||
|
||||
|
@ -35,37 +36,65 @@ public class SmokeBacklog extends BaseManualTest {
|
|||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(284)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(331)
|
||||
@Test
|
||||
public void testCaseD() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(331)
|
||||
@Test
|
||||
public void testCaseE() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(332)
|
||||
@Test
|
||||
public void testCaseF() {
|
||||
public void testCaseE() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(335)
|
||||
@Test
|
||||
public void testCaseG() {
|
||||
public void testCaseF() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(336)
|
||||
@Test
|
||||
public void testCaseG() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(343)
|
||||
@Test
|
||||
public void testCaseH() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(344)
|
||||
@Test
|
||||
public void testCaseI() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = SCHEMAS_SUITE_ID)
|
||||
@QaseId(345)
|
||||
@Test
|
||||
public void testCaseJ() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = SCHEMAS_SUITE_ID)
|
||||
@QaseId(346)
|
||||
@Test
|
||||
public void testCaseK() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(347)
|
||||
@Test
|
||||
public void testCaseL() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,4 +92,28 @@ public class TopicsTest extends BaseManualTest {
|
|||
@Test
|
||||
public void testCaseN() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(337)
|
||||
@Test
|
||||
public void testCaseO() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(339)
|
||||
@Test
|
||||
public void testCaseP() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(341)
|
||||
@Test
|
||||
public void testCaseQ() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(342)
|
||||
@Test
|
||||
public void testCaseR() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,4 +14,16 @@ public class WizardTest extends BaseManualTest {
|
|||
@Test
|
||||
public void testCaseA() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(338)
|
||||
@Test
|
||||
public void testCaseB() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(340)
|
||||
@Test
|
||||
public void testCaseC() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package com.provectus.kafka.ui.smokesuite.ksqldb;
|
||||
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs.STREAMS;
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
|
||||
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
|
||||
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
||||
|
@ -39,17 +40,21 @@ public class KsqlDbTest extends BaseTest {
|
|||
FIRST_TABLE.getName(), SECOND_TABLE.getName()));
|
||||
}
|
||||
|
||||
@QaseId(86)
|
||||
@QaseId(284)
|
||||
@Test(priority = 1)
|
||||
public void clearResultsForExecutedRequest() {
|
||||
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||
public void streamsAndTablesVisibilityCheck() {
|
||||
naviSideBar
|
||||
.openSideMenu(KSQL_DB);
|
||||
ksqlDbList
|
||||
.waitUntilScreenReady();
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertAll();
|
||||
ksqlQueryForm
|
||||
.clickClearResultsBtn();
|
||||
softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertTrue(ksqlDbList.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableByName()");
|
||||
softly.assertTrue(ksqlDbList.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableByName()");
|
||||
softly.assertAll();
|
||||
ksqlDbList
|
||||
.openDetailsTab(STREAMS)
|
||||
.waitUntilScreenReady();
|
||||
Assert.assertTrue(ksqlDbList.getStreamByName(DEFAULT_STREAM.getName()).isVisible(), "getStreamByName()");
|
||||
}
|
||||
|
||||
@QaseId(276)
|
||||
|
@ -68,11 +73,31 @@ public class KsqlDbTest extends BaseTest {
|
|||
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(), "getItemByName()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(), "getItemByName()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(),
|
||||
String.format("getItemByName(%s)", FIRST_TABLE.getName()));
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(),
|
||||
String.format("getItemByName(%s)", SECOND_TABLE.getName()));
|
||||
softly.assertAll();
|
||||
}
|
||||
|
||||
@QaseId(86)
|
||||
@Test(priority = 4)
|
||||
public void clearResultsForExecutedRequest() {
|
||||
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertAll();
|
||||
ksqlQueryForm
|
||||
.clickClearResultsBtn();
|
||||
softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertAll();
|
||||
}
|
||||
|
||||
@AfterClass(alwaysRun = true)
|
||||
public void afterClass() {
|
||||
TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
|
||||
}
|
||||
|
||||
@Step
|
||||
private void navigateToKsqlDbAndExecuteRequest(String query) {
|
||||
naviSideBar
|
||||
|
@ -85,9 +110,4 @@ public class KsqlDbTest extends BaseTest {
|
|||
.setQuery(query)
|
||||
.clickExecuteBtn(query);
|
||||
}
|
||||
|
||||
@AfterClass(alwaysRun = true)
|
||||
public void afterClass() {
|
||||
TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -486,11 +486,7 @@ public class TopicsTest extends BaseTest {
|
|||
topicDetails
|
||||
.waitUntilScreenReady();
|
||||
TOPIC_LIST.add(topicToCopy);
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully created."),
|
||||
"isAlertWithMessageVisible()");
|
||||
softly.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
|
||||
softly.assertAll();
|
||||
Assert.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
|
||||
}
|
||||
|
||||
@AfterClass(alwaysRun = true)
|
||||
|
|
|
@ -1,26 +1,41 @@
|
|||
import React from 'react';
|
||||
import { FullConnectorInfo } from 'generated-sources';
|
||||
import {
|
||||
Action,
|
||||
ConnectorAction,
|
||||
ConnectorState,
|
||||
FullConnectorInfo,
|
||||
ResourceType,
|
||||
} from 'generated-sources';
|
||||
import { CellContext } from '@tanstack/react-table';
|
||||
import { ClusterNameRoute } from 'lib/paths';
|
||||
import useAppParams from 'lib/hooks/useAppParams';
|
||||
import { Dropdown, DropdownItem } from 'components/common/Dropdown';
|
||||
import { useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
|
||||
import {
|
||||
useDeleteConnector,
|
||||
useUpdateConnectorState,
|
||||
} from 'lib/hooks/api/kafkaConnect';
|
||||
import { useConfirm } from 'lib/hooks/useConfirm';
|
||||
import { useIsMutating } from '@tanstack/react-query';
|
||||
import { ActionDropdownItem } from 'components/common/ActionComponent';
|
||||
|
||||
const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
|
||||
row,
|
||||
}) => {
|
||||
const { connect, name } = row.original;
|
||||
|
||||
const { connect, name, status } = row.original;
|
||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||
|
||||
const mutationsNumber = useIsMutating();
|
||||
const isMutating = mutationsNumber > 0;
|
||||
const confirm = useConfirm();
|
||||
const deleteMutation = useDeleteConnector({
|
||||
clusterName,
|
||||
connectName: connect,
|
||||
connectorName: name,
|
||||
});
|
||||
|
||||
const stateMutation = useUpdateConnectorState({
|
||||
clusterName,
|
||||
connectName: connect,
|
||||
connectorName: name,
|
||||
});
|
||||
const handleDelete = () => {
|
||||
confirm(
|
||||
<>
|
||||
|
@ -31,8 +46,66 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
|
|||
}
|
||||
);
|
||||
};
|
||||
// const stateMutation = useUpdateConnectorState(routerProps);
|
||||
const resumeConnectorHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESUME);
|
||||
const restartConnectorHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESTART);
|
||||
|
||||
const restartAllTasksHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESTART_ALL_TASKS);
|
||||
|
||||
const restartFailedTasksHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS);
|
||||
|
||||
return (
|
||||
<Dropdown>
|
||||
{status.state === ConnectorState.PAUSED && (
|
||||
<ActionDropdownItem
|
||||
onClick={resumeConnectorHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Resume
|
||||
</ActionDropdownItem>
|
||||
)}
|
||||
<ActionDropdownItem
|
||||
onClick={restartConnectorHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Restart Connector
|
||||
</ActionDropdownItem>
|
||||
<ActionDropdownItem
|
||||
onClick={restartAllTasksHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Restart All Tasks
|
||||
</ActionDropdownItem>
|
||||
<ActionDropdownItem
|
||||
onClick={restartFailedTasksHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Restart Failed Tasks
|
||||
</ActionDropdownItem>
|
||||
<DropdownItem onClick={handleDelete} danger>
|
||||
Remove Connector
|
||||
</DropdownItem>
|
||||
|
|
|
@ -9,7 +9,11 @@ import { screen, waitFor } from '@testing-library/react';
|
|||
import userEvent from '@testing-library/user-event';
|
||||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { clusterConnectConnectorPath, clusterConnectorsPath } from 'lib/paths';
|
||||
import { useConnectors, useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
|
||||
import {
|
||||
useConnectors,
|
||||
useDeleteConnector,
|
||||
useUpdateConnectorState,
|
||||
} from 'lib/hooks/api/kafkaConnect';
|
||||
|
||||
const mockedUsedNavigate = jest.fn();
|
||||
const mockDelete = jest.fn();
|
||||
|
@ -22,6 +26,7 @@ jest.mock('react-router-dom', () => ({
|
|||
jest.mock('lib/hooks/api/kafkaConnect', () => ({
|
||||
useConnectors: jest.fn(),
|
||||
useDeleteConnector: jest.fn(),
|
||||
useUpdateConnectorState: jest.fn(),
|
||||
}));
|
||||
|
||||
const clusterName = 'local';
|
||||
|
@ -42,6 +47,10 @@ describe('Connectors List', () => {
|
|||
(useConnectors as jest.Mock).mockImplementation(() => ({
|
||||
data: connectors,
|
||||
}));
|
||||
const restartConnector = jest.fn();
|
||||
(useUpdateConnectorState as jest.Mock).mockImplementation(() => ({
|
||||
mutateAsync: restartConnector,
|
||||
}));
|
||||
});
|
||||
|
||||
it('renders', async () => {
|
||||
|
|
|
@ -51,9 +51,9 @@ const List = () => {
|
|||
accessorKey: 'members',
|
||||
},
|
||||
{
|
||||
id: ConsumerGroupOrdering.TOPIC_NUM,
|
||||
header: 'Num Of Topics',
|
||||
accessorKey: 'topics',
|
||||
enableSorting: false,
|
||||
},
|
||||
{
|
||||
id: ConsumerGroupOrdering.MESSAGES_BEHIND,
|
||||
|
|
|
@ -29,8 +29,10 @@ const Message: React.FC<Props> = ({
|
|||
timestampType,
|
||||
offset,
|
||||
key,
|
||||
keySize,
|
||||
partition,
|
||||
content,
|
||||
valueSize,
|
||||
headers,
|
||||
valueSerde,
|
||||
keySerde,
|
||||
|
@ -138,6 +140,8 @@ const Message: React.FC<Props> = ({
|
|||
headers={headers}
|
||||
timestamp={timestamp}
|
||||
timestampType={timestampType}
|
||||
keySize={keySize}
|
||||
contentSize={valueSize}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
|
|
@ -15,6 +15,8 @@ export interface MessageContentProps {
|
|||
headers?: { [key: string]: string | undefined };
|
||||
timestamp?: Date;
|
||||
timestampType?: TopicMessageTimestampTypeEnum;
|
||||
keySize?: number;
|
||||
contentSize?: number;
|
||||
}
|
||||
|
||||
const MessageContent: React.FC<MessageContentProps> = ({
|
||||
|
@ -23,6 +25,8 @@ const MessageContent: React.FC<MessageContentProps> = ({
|
|||
headers,
|
||||
timestamp,
|
||||
timestampType,
|
||||
keySize,
|
||||
contentSize,
|
||||
}) => {
|
||||
const [activeTab, setActiveTab] = React.useState<Tab>('content');
|
||||
const [searchParams] = useSearchParams();
|
||||
|
@ -54,8 +58,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
|
|||
e.preventDefault();
|
||||
setActiveTab('headers');
|
||||
};
|
||||
const keySize = new TextEncoder().encode(messageKey).length;
|
||||
const contentSize = new TextEncoder().encode(messageContent).length;
|
||||
|
||||
const contentType =
|
||||
messageContent && messageContent.trim().startsWith('{')
|
||||
? SchemaType.JSON
|
||||
|
|
|
@ -59,7 +59,7 @@ const Topic: React.FC = () => {
|
|||
|
||||
const deleteTopicHandler = async () => {
|
||||
await deleteTopic.mutateAsync(topicName);
|
||||
navigate('../..');
|
||||
navigate(clusterTopicsPath(clusterName));
|
||||
};
|
||||
|
||||
React.useEffect(() => {
|
||||
|
|
|
@ -10,6 +10,7 @@ import {
|
|||
clusterTopicMessagesPath,
|
||||
clusterTopicPath,
|
||||
clusterTopicSettingsPath,
|
||||
clusterTopicsPath,
|
||||
clusterTopicStatisticsPath,
|
||||
getNonExactPath,
|
||||
} from 'lib/paths';
|
||||
|
@ -179,7 +180,9 @@ describe('Details', () => {
|
|||
name: 'Confirm',
|
||||
});
|
||||
await userEvent.click(submitDeleteButton);
|
||||
expect(mockNavigate).toHaveBeenCalledWith('../..');
|
||||
expect(mockNavigate).toHaveBeenCalledWith(
|
||||
clusterTopicsPath(mockClusterName)
|
||||
);
|
||||
});
|
||||
|
||||
it('shows a confirmation popup on deleting topic messages', async () => {
|
||||
|
|
|
@ -70,7 +70,7 @@ export const DropdownButton = styled.button`
|
|||
`;
|
||||
|
||||
export const DangerItem = styled.div`
|
||||
color: ${({ theme: { dropdown } }) => dropdown.item.color.normal};
|
||||
color: ${({ theme: { dropdown } }) => dropdown.item.color.danger};
|
||||
`;
|
||||
|
||||
export const DropdownItemHint = styled.div`
|
||||
|
|
|
@ -76,7 +76,8 @@ export function useUpdateConnectorState(props: UseConnectorProps) {
|
|||
return useMutation(
|
||||
(action: ConnectorAction) => api.updateConnectorState({ ...props, action }),
|
||||
{
|
||||
onSuccess: () => client.invalidateQueries(connectorKey(props)),
|
||||
onSuccess: () =>
|
||||
client.invalidateQueries(['clusters', props.clusterName, 'connectors']),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
|
@ -122,9 +122,6 @@ export function useCreateTopicMutation(clusterName: ClusterName) {
|
|||
}),
|
||||
{
|
||||
onSuccess: () => {
|
||||
showSuccessAlert({
|
||||
message: `Topic successfully created.`,
|
||||
});
|
||||
client.invalidateQueries(topicKeys.all(clusterName));
|
||||
},
|
||||
}
|
||||
|
|
7
pom.xml
7
pom.xml
|
@ -26,7 +26,7 @@
|
|||
<assertj.version>3.19.0</assertj.version>
|
||||
<avro.version>1.11.1</avro.version>
|
||||
<byte-buddy.version>1.12.19</byte-buddy.version>
|
||||
<confluent.version>7.3.0</confluent.version>
|
||||
<confluent.version>7.3.3</confluent.version>
|
||||
<datasketches-java.version>3.1.0</datasketches-java.version>
|
||||
<groovy.version>3.0.13</groovy.version>
|
||||
<jackson.version>2.14.0</jackson.version>
|
||||
|
@ -40,10 +40,11 @@
|
|||
<kafka-ui-serde-api.version>1.0.0</kafka-ui-serde-api.version>
|
||||
<odd-oddrn-generator.version>0.1.15</odd-oddrn-generator.version>
|
||||
<odd-oddrn-client.version>0.1.23</odd-oddrn-client.version>
|
||||
<org.json.version>20230227</org.json.version>
|
||||
|
||||
<!-- Test dependency versions -->
|
||||
<junit.version>5.9.1</junit.version>
|
||||
<mockito.version>5.1.1</mockito.version>
|
||||
<mockito.version>5.3.0</mockito.version>
|
||||
<okhttp3.mockwebserver.version>4.10.0</okhttp3.mockwebserver.version>
|
||||
<testcontainers.version>1.17.5</testcontainers.version>
|
||||
|
||||
|
@ -52,7 +53,7 @@
|
|||
<pnpm.version>v7.4.0</pnpm.version>
|
||||
|
||||
<!-- Plugin versions -->
|
||||
<fabric8-maven-plugin.version>0.42.0</fabric8-maven-plugin.version>
|
||||
<fabric8-maven-plugin.version>0.42.1</fabric8-maven-plugin.version>
|
||||
<frontend-maven-plugin.version>1.12.1</frontend-maven-plugin.version>
|
||||
<maven-clean-plugin.version>3.2.0</maven-clean-plugin.version>
|
||||
<maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>
|
||||
|
|
Loading…
Add table
Reference in a new issue