Merge branch 'master' into sort-by-topic-fix-style
This commit is contained in:
commit
708a8b541d
152 changed files with 3374 additions and 1052 deletions
92
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
92
.github/ISSUE_TEMPLATE/bug.yml
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
name: "\U0001F41E Bug report"
|
||||
description: File a bug report
|
||||
labels: ["status/triage", "type/bug"]
|
||||
assignees: []
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Hi, thanks for raising the issue(-s), all contributions really matter!
|
||||
Please, note that we'll close the issue without further explanation if you don't follow
|
||||
this template and don't provide the information requested within this template.
|
||||
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Issue submitter TODO list
|
||||
description: By you checking these checkboxes we can be sure you've done the essential things.
|
||||
options:
|
||||
- label: I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
|
||||
required: true
|
||||
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
|
||||
required: true
|
||||
- label: I've tried running `master`-labeled docker image and the issue still persists there
|
||||
required: true
|
||||
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the bug (actual behavior)
|
||||
description: A clear and concise description of what the bug is. Use a list, if there is more than one problem
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Your installation details
|
||||
description: |
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (commit hash in the top left corner of the UI)
|
||||
2. Helm chart version, if you use one
|
||||
3. Your application config. Please remove the sensitive info like passwords or API keys.
|
||||
4. Any IAAC configs
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Please write down the order of the actions required to reproduce the issue.
|
||||
For the advanced setups/complicated issue, we might need you to provide
|
||||
a minimal [reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: |
|
||||
If applicable, add screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Logs
|
||||
description: |
|
||||
If applicable, *upload* screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context about the problem here. E.G.:
|
||||
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
|
||||
Were they successful or the same issue occurred? Please provide steps as well.
|
||||
2. Related issues (if there are any).
|
||||
3. Logs (if available)
|
||||
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
|
||||
validations:
|
||||
required: false
|
64
.github/ISSUE_TEMPLATE/bug_report.md
vendored
64
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -1,64 +0,0 @@
|
|||
---
|
||||
name: "\U0001F41E Bug report"
|
||||
about: Create a bug report
|
||||
title: ''
|
||||
labels: status/triage, type/bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
We will close the issue without further explanation if you don't follow this template and don't provide the information requested within this template.
|
||||
|
||||
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
|
||||
https://github.com/provectus/kafka-ui/issues
|
||||
https://github.com/provectus/kafka-ui/discussions
|
||||
|
||||
-->
|
||||
|
||||
<!--
|
||||
Please follow the naming conventions for bugs:
|
||||
<Feature/Area/Scope> : <Compact, but specific problem summary>
|
||||
Avoid generic titles, like “Topics: incorrect layout of message sorting drop-down list”. Better use something like: “Topics: Message sorting drop-down list overlaps the "Submit" button”.
|
||||
|
||||
-->
|
||||
|
||||
**Describe the bug** (Actual behavior)
|
||||
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
|
||||
|
||||
**Expected behavior**
|
||||
<!--(A clear and concise description of what you expected to happen.)-->
|
||||
|
||||
**Set up**
|
||||
<!--
|
||||
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
|
||||
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (docker image version or check commit hash in the top left corner in UI)
|
||||
2. Helm chart version, if you use one
|
||||
3. Any IAAC configs
|
||||
-->
|
||||
|
||||
|
||||
**Steps to Reproduce**
|
||||
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
|
||||
to reproduce the problem, especially with a complex setups. -->
|
||||
|
||||
1.
|
||||
|
||||
**Screenshots**
|
||||
<!--
|
||||
(If applicable, add screenshots to help explain your problem)
|
||||
-->
|
||||
|
||||
|
||||
**Additional context**
|
||||
<!--
|
||||
Add any other context about the problem here. E.g.:
|
||||
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
|
||||
Were they successfull or same issue occured? Please provide steps as well.
|
||||
2. Related issues (if there are any).
|
||||
3. Logs (if available)
|
||||
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
|
||||
-->
|
66
.github/ISSUE_TEMPLATE/feature.yml
vendored
Normal file
66
.github/ISSUE_TEMPLATE/feature.yml
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
name: "\U0001F680 Feature request"
|
||||
description: Propose a new feature
|
||||
labels: ["status/triage", "type/feature"]
|
||||
assignees: []
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Hi, thanks for raising the issue(-s), all contributions really matter!
|
||||
Please, note that we'll close the issue without further explanation if you don't follow
|
||||
this template and don't provide the information requested within this template.
|
||||
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Issue submitter TODO list
|
||||
description: By you checking these checkboxes we can be sure you've done the essential things.
|
||||
options:
|
||||
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
|
||||
required: true
|
||||
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Is your proposal related to a problem?
|
||||
description: |
|
||||
Provide a clear and concise description of what the problem is.
|
||||
For example, "I'm always frustrated when..."
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the feature you're interested in
|
||||
description: |
|
||||
Provide a clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: |
|
||||
Let us know about other solutions you've tried or researched.
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version you're running
|
||||
description: |
|
||||
Please provide the app version you're currently running:
|
||||
1. App version (commit hash in the top left corner of the UI)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Is there anything else you can add about the proposal?
|
||||
You might want to link to related issues here, if you haven't already.
|
||||
validations:
|
||||
required: false
|
46
.github/ISSUE_TEMPLATE/feature_request.md
vendored
46
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,46 +0,0 @@
|
|||
---
|
||||
name: "\U0001F680 Feature request"
|
||||
about: Propose a new feature
|
||||
title: ''
|
||||
labels: status/triage, type/feature
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
|
||||
https://github.com/provectus/kafka-ui/issues
|
||||
https://github.com/provectus/kafka-ui/discussions
|
||||
|
||||
-->
|
||||
|
||||
### Which version of the app are you running?
|
||||
<!-- Please provide docker image version or check commit hash in the top left corner in UI) -->
|
||||
|
||||
### Is your proposal related to a problem?
|
||||
|
||||
<!--
|
||||
Provide a clear and concise description of what the problem is.
|
||||
For example, "I'm always frustrated when..."
|
||||
-->
|
||||
|
||||
### Describe the solution you'd like
|
||||
|
||||
<!--
|
||||
Provide a clear and concise description of what you want to happen.
|
||||
-->
|
||||
|
||||
### Describe alternatives you've considered
|
||||
|
||||
<!--
|
||||
Let us know about other solutions you've tried or researched.
|
||||
-->
|
||||
|
||||
### Additional context
|
||||
|
||||
<!--
|
||||
Is there anything else you can add about the proposal?
|
||||
You might want to link to related issues here, if you haven't already.
|
||||
-->
|
||||
|
92
.github/ISSUE_TEMPLATE/helm.yml
vendored
Normal file
92
.github/ISSUE_TEMPLATE/helm.yml
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
name: "⎈ K8s/Helm problem report"
|
||||
description: "Report a problem with k8s/helm charts/etc"
|
||||
labels: ["status/triage", "scope/k8s"]
|
||||
assignees: []
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Hi, thanks for raising the issue(-s), all contributions really matter!
|
||||
Please, note that we'll close the issue without further explanation if you don't follow
|
||||
this template and don't provide the information requested within this template.
|
||||
|
||||
- type: checkboxes
|
||||
id: terms
|
||||
attributes:
|
||||
label: Issue submitter TODO list
|
||||
description: By you checking these checkboxes we can be sure you've done the essential things.
|
||||
options:
|
||||
- label: I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
|
||||
required: true
|
||||
- label: I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
|
||||
required: true
|
||||
- label: I've tried running `master`-labeled docker image and the issue still persists there
|
||||
required: true
|
||||
- label: I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Describe the bug (actual behavior)
|
||||
description: A clear and concise description of what the bug is. Use a list, if there is more than one problem
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Your installation details
|
||||
description: |
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (commit hash in the top left corner of the UI)
|
||||
2. Helm chart version
|
||||
3. Your application config. Please remove the sensitive info like passwords or API keys.
|
||||
4. Any IAAC configs
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Please write down the order of the actions required to reproduce the issue.
|
||||
For the advanced setups/complicated issue, we might need you to provide
|
||||
a minimal [reproducible example](https://stackoverflow.com/help/minimal-reproducible-example).
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Screenshots
|
||||
description: |
|
||||
If applicable, add screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Logs
|
||||
description: |
|
||||
If applicable, *upload* screenshots to help explain your problem
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context about the problem here. E.G.:
|
||||
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
|
||||
Were they successful or the same issue occurred? Please provide steps as well.
|
||||
2. Related issues (if there are any).
|
||||
3. Logs (if available)
|
||||
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
|
||||
validations:
|
||||
required: false
|
52
.github/ISSUE_TEMPLATE/k8s.md
vendored
52
.github/ISSUE_TEMPLATE/k8s.md
vendored
|
@ -1,52 +0,0 @@
|
|||
---
|
||||
name: "⎈ K8s/Helm problem report"
|
||||
about: Report a problem with k8s/helm charts/etc
|
||||
title: ''
|
||||
labels: scope/k8s, status/triage
|
||||
assignees: azatsafin
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
|
||||
https://github.com/provectus/kafka-ui/issues
|
||||
https://github.com/provectus/kafka-ui/discussions
|
||||
|
||||
-->
|
||||
|
||||
**Describe the bug**
|
||||
<!--(A clear and concise description of what the bug is.)-->
|
||||
|
||||
|
||||
**Set up**
|
||||
<!--
|
||||
How do you run the app? Please provide as much info as possible:
|
||||
1. App version (docker image version or check commit hash in the top left corner in UI)
|
||||
2. Helm chart version, if you use one
|
||||
3. Any IAAC configs
|
||||
|
||||
We might close the issue without further explanation if you don't provide such information.
|
||||
-->
|
||||
|
||||
|
||||
**Steps to Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
1.
|
||||
|
||||
**Expected behavior**
|
||||
<!--
|
||||
(A clear and concise description of what you expected to happen)
|
||||
-->
|
||||
|
||||
**Screenshots**
|
||||
<!--
|
||||
(If applicable, add screenshots to help explain your problem)
|
||||
-->
|
||||
|
||||
|
||||
**Additional context**
|
||||
<!--
|
||||
(Add any other context about the problem here)
|
||||
-->
|
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
|
@ -8,8 +8,6 @@ updates:
|
|||
timezone: Europe/Moscow
|
||||
reviewers:
|
||||
- "Haarolean"
|
||||
assignees:
|
||||
- "Haarolean"
|
||||
labels:
|
||||
- "scope/backend"
|
||||
- "type/dependencies"
|
||||
|
@ -99,8 +97,6 @@ updates:
|
|||
timezone: Europe/Moscow
|
||||
reviewers:
|
||||
- "Haarolean"
|
||||
assignees:
|
||||
- "Haarolean"
|
||||
labels:
|
||||
- "scope/infrastructure"
|
||||
- "type/dependencies"
|
||||
|
|
8
.github/release_drafter.yaml
vendored
8
.github/release_drafter.yaml
vendored
|
@ -16,18 +16,26 @@ exclude-labels:
|
|||
- 'type/refactoring'
|
||||
|
||||
categories:
|
||||
- title: '🚩 Breaking Changes'
|
||||
labels:
|
||||
- 'impact/changelog'
|
||||
|
||||
- title: '⚙️Features'
|
||||
labels:
|
||||
- 'type/feature'
|
||||
|
||||
- title: '🪛Enhancements'
|
||||
labels:
|
||||
- 'type/enhancement'
|
||||
|
||||
- title: '🔨Bug Fixes'
|
||||
labels:
|
||||
- 'type/bug'
|
||||
|
||||
- title: 'Security'
|
||||
labels:
|
||||
- 'type/security'
|
||||
|
||||
- title: '⎈ Helm/K8S Changes'
|
||||
labels:
|
||||
- 'scope/k8s'
|
||||
|
|
17
.github/workflows/release_drafter.yml
vendored
17
.github/workflows/release_drafter.yml
vendored
|
@ -2,18 +2,33 @@ name: Release Drafter
|
|||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Release version'
|
||||
required: false
|
||||
branch:
|
||||
description: 'Target branch'
|
||||
required: false
|
||||
default: 'master'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
config-name: release_drafter.yaml
|
||||
disable-autolabeler: true
|
||||
version: ${{ github.event.inputs.version }}
|
||||
commitish: ${{ github.event.inputs.branch }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
@ -11,4 +11,8 @@ KafkaClient {
|
|||
user_admin="admin-secret";
|
||||
};
|
||||
|
||||
Client {};
|
||||
Client {
|
||||
org.apache.zookeeper.server.auth.DigestLoginModule required
|
||||
username="zkuser"
|
||||
password="zkuserpassword";
|
||||
};
|
||||
|
|
4
documentation/compose/jaas/zookeeper_jaas.conf
Normal file
4
documentation/compose/jaas/zookeeper_jaas.conf
Normal file
|
@ -0,0 +1,4 @@
|
|||
Server {
|
||||
org.apache.zookeeper.server.auth.DigestLoginModule required
|
||||
user_zkuser="zkuserpassword";
|
||||
};
|
59
documentation/compose/kafka-ui-acl-with-zk.yaml
Normal file
59
documentation/compose/kafka-ui-acl-with-zk.yaml
Normal file
|
@ -0,0 +1,59 @@
|
|||
---
|
||||
version: '2'
|
||||
services:
|
||||
|
||||
kafka-ui:
|
||||
container_name: kafka-ui
|
||||
image: provectuslabs/kafka-ui:latest
|
||||
ports:
|
||||
- 8080:8080
|
||||
depends_on:
|
||||
- zookeeper
|
||||
- kafka
|
||||
environment:
|
||||
KAFKA_CLUSTERS_0_NAME: local
|
||||
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
|
||||
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
|
||||
KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
|
||||
KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
|
||||
|
||||
zookeeper:
|
||||
image: wurstmeister/zookeeper:3.4.6
|
||||
environment:
|
||||
JVMFLAGS: "-Djava.security.auth.login.config=/etc/zookeeper/zookeeper_jaas.conf"
|
||||
volumes:
|
||||
- ./jaas/zookeeper_jaas.conf:/etc/zookeeper/zookeeper_jaas.conf
|
||||
ports:
|
||||
- 2181:2181
|
||||
|
||||
kafka:
|
||||
image: confluentinc/cp-kafka:7.2.1
|
||||
hostname: kafka
|
||||
container_name: kafka
|
||||
ports:
|
||||
- "9092:9092"
|
||||
- "9997:9997"
|
||||
environment:
|
||||
KAFKA_BROKER_ID: 1
|
||||
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
|
||||
KAFKA_ADVERTISED_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
|
||||
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas/kafka_server.conf"
|
||||
KAFKA_AUTHORIZER_CLASS_NAME: "kafka.security.authorizer.AclAuthorizer"
|
||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
|
||||
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
||||
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
|
||||
KAFKA_JMX_PORT: 9997
|
||||
KAFKA_JMX_HOSTNAME: localhost
|
||||
KAFKA_NODE_ID: 1
|
||||
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
|
||||
KAFKA_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
|
||||
KAFKA_INTER_BROKER_LISTENER_NAME: 'SASL_PLAINTEXT'
|
||||
KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
|
||||
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
|
||||
KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT'
|
||||
KAFKA_SUPER_USERS: 'User:admin'
|
||||
volumes:
|
||||
- ./scripts/update_run.sh:/tmp/update_run.sh
|
||||
- ./jaas:/etc/kafka/jaas
|
|
@ -12,7 +12,7 @@
|
|||
<artifactId>kafka-ui-api</artifactId>
|
||||
|
||||
<properties>
|
||||
<jacoco.version>0.8.8</jacoco.version>
|
||||
<jacoco.version>0.8.10</jacoco.version>
|
||||
<sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>
|
||||
<sonar.dynamicAnalysis>reuseReports</sonar.dynamicAnalysis>
|
||||
<sonar.jacoco.reportPath>${project.basedir}/target/jacoco.exec</sonar.jacoco.reportPath>
|
||||
|
@ -21,6 +21,12 @@
|
|||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<!--TODO: remove, when spring-boot fixed dependency to 6.0.8+ (6.0.7 has CVE) -->
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-core</artifactId>
|
||||
<version>6.0.8</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-webflux</artifactId>
|
||||
|
@ -109,6 +115,12 @@
|
|||
<groupId>io.projectreactor.addons</groupId>
|
||||
<artifactId>reactor-extra</artifactId>
|
||||
</dependency>
|
||||
<!-- https://github.com/provectus/kafka-ui/pull/3693 -->
|
||||
<dependency>
|
||||
<groupId>org.json</groupId>
|
||||
<artifactId>json</artifactId>
|
||||
<version>${org.json.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
|
|
|
@ -131,8 +131,9 @@ public class ClustersProperties {
|
|||
@Data
|
||||
public static class Masking {
|
||||
Type type;
|
||||
List<String> fields; //if null or empty list - policy will be applied to all fields
|
||||
List<String> pattern; //used when type=MASK
|
||||
List<String> fields;
|
||||
String fieldsNamePattern;
|
||||
List<String> maskingCharsReplacement; //used when type=MASK
|
||||
String replacement; //used when type=REPLACE
|
||||
String topicKeysPattern;
|
||||
String topicValuesPattern;
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
package com.provectus.kafka.ui.config.auth;
|
||||
|
||||
import lombok.Data;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
|
||||
@ConfigurationProperties("spring.ldap")
|
||||
@Data
|
||||
public class LdapProperties {
|
||||
|
||||
private String urls;
|
||||
private String base;
|
||||
private String adminUser;
|
||||
private String adminPassword;
|
||||
private String userFilterSearchBase;
|
||||
private String userFilterSearchFilter;
|
||||
private String groupFilterSearchBase;
|
||||
|
||||
@Value("${oauth2.ldap.activeDirectory:false}")
|
||||
private boolean isActiveDirectory;
|
||||
@Value("${oauth2.ldap.aсtiveDirectory.domain:@null}")
|
||||
private String activeDirectoryDomain;
|
||||
|
||||
}
|
|
@ -1,13 +1,21 @@
|
|||
package com.provectus.kafka.ui.config.auth;
|
||||
|
||||
import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
|
||||
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.ldap.core.DirContextOperations;
|
||||
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
|
||||
import org.springframework.ldap.core.support.LdapContextSource;
|
||||
import org.springframework.security.authentication.AuthenticationManager;
|
||||
|
@ -16,91 +24,118 @@ import org.springframework.security.authentication.ReactiveAuthenticationManager
|
|||
import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter;
|
||||
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
|
||||
import org.springframework.security.config.web.server.ServerHttpSecurity;
|
||||
import org.springframework.security.core.GrantedAuthority;
|
||||
import org.springframework.security.core.userdetails.UserDetails;
|
||||
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
|
||||
import org.springframework.security.ldap.authentication.BindAuthenticator;
|
||||
import org.springframework.security.ldap.authentication.LdapAuthenticationProvider;
|
||||
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
|
||||
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
|
||||
import org.springframework.security.ldap.search.LdapUserSearch;
|
||||
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
|
||||
import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator;
|
||||
import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
|
||||
import org.springframework.security.web.server.SecurityWebFilterChain;
|
||||
|
||||
@Configuration
|
||||
@EnableWebFluxSecurity
|
||||
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
|
||||
@Import(LdapAutoConfiguration.class)
|
||||
@EnableConfigurationProperties(LdapProperties.class)
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
|
||||
public class LdapSecurityConfig {
|
||||
|
||||
@Value("${spring.ldap.urls}")
|
||||
private String ldapUrls;
|
||||
@Value("${spring.ldap.dn.pattern:#{null}}")
|
||||
private String ldapUserDnPattern;
|
||||
@Value("${spring.ldap.adminUser:#{null}}")
|
||||
private String adminUser;
|
||||
@Value("${spring.ldap.adminPassword:#{null}}")
|
||||
private String adminPassword;
|
||||
@Value("${spring.ldap.userFilter.searchBase:#{null}}")
|
||||
private String userFilterSearchBase;
|
||||
@Value("${spring.ldap.userFilter.searchFilter:#{null}}")
|
||||
private String userFilterSearchFilter;
|
||||
|
||||
@Value("${oauth2.ldap.activeDirectory:false}")
|
||||
private boolean isActiveDirectory;
|
||||
@Value("${oauth2.ldap.aсtiveDirectory.domain:#{null}}")
|
||||
private String activeDirectoryDomain;
|
||||
private final LdapProperties props;
|
||||
|
||||
@Bean
|
||||
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource) {
|
||||
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
|
||||
LdapAuthoritiesPopulator ldapAuthoritiesPopulator,
|
||||
@Nullable AccessControlService acs) {
|
||||
var rbacEnabled = acs != null && acs.isRbacEnabled();
|
||||
BindAuthenticator ba = new BindAuthenticator(contextSource);
|
||||
if (ldapUserDnPattern != null) {
|
||||
ba.setUserDnPatterns(new String[] {ldapUserDnPattern});
|
||||
if (props.getBase() != null) {
|
||||
ba.setUserDnPatterns(new String[] {props.getBase()});
|
||||
}
|
||||
if (userFilterSearchFilter != null) {
|
||||
if (props.getUserFilterSearchFilter() != null) {
|
||||
LdapUserSearch userSearch =
|
||||
new FilterBasedLdapUserSearch(userFilterSearchBase, userFilterSearchFilter, contextSource);
|
||||
new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(),
|
||||
contextSource);
|
||||
ba.setUserSearch(userSearch);
|
||||
}
|
||||
|
||||
AbstractLdapAuthenticationProvider authenticationProvider;
|
||||
if (!isActiveDirectory) {
|
||||
authenticationProvider = new LdapAuthenticationProvider(ba);
|
||||
if (!props.isActiveDirectory()) {
|
||||
authenticationProvider = rbacEnabled
|
||||
? new LdapAuthenticationProvider(ba, ldapAuthoritiesPopulator)
|
||||
: new LdapAuthenticationProvider(ba);
|
||||
} else {
|
||||
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(activeDirectoryDomain, ldapUrls);
|
||||
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
|
||||
props.getUrls()); // TODO Issue #3741
|
||||
authenticationProvider.setUseAuthenticationRequestCredentials(true);
|
||||
}
|
||||
|
||||
if (rbacEnabled) {
|
||||
authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper());
|
||||
}
|
||||
|
||||
AuthenticationManager am = new ProviderManager(List.of(authenticationProvider));
|
||||
|
||||
return new ReactiveAuthenticationManagerAdapter(am);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public BaseLdapPathContextSource contextSource() {
|
||||
LdapContextSource ctx = new LdapContextSource();
|
||||
ctx.setUrl(ldapUrls);
|
||||
ctx.setUserDn(adminUser);
|
||||
ctx.setPassword(adminPassword);
|
||||
ctx.setUrl(props.getUrls());
|
||||
ctx.setUserDn(props.getAdminUser());
|
||||
ctx.setPassword(props.getAdminPassword());
|
||||
ctx.afterPropertiesSet();
|
||||
return ctx;
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public LdapAuthoritiesPopulator ldapAuthoritiesPopulator(BaseLdapPathContextSource contextSource) {
|
||||
var authoritiesPopulator = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase());
|
||||
authoritiesPopulator.setRolePrefix("");
|
||||
authoritiesPopulator.setConvertToUpperCase(false);
|
||||
return authoritiesPopulator;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
|
||||
log.info("Configuring LDAP authentication.");
|
||||
if (isActiveDirectory) {
|
||||
if (props.isActiveDirectory()) {
|
||||
log.info("Active Directory support for LDAP has been enabled.");
|
||||
}
|
||||
|
||||
http
|
||||
return http
|
||||
.authorizeExchange()
|
||||
.pathMatchers(AUTH_WHITELIST)
|
||||
.permitAll()
|
||||
.anyExchange()
|
||||
.authenticated()
|
||||
.and()
|
||||
.httpBasic();
|
||||
|
||||
return http.csrf().disable().build();
|
||||
.and()
|
||||
.formLogin()
|
||||
|
||||
.and()
|
||||
.logout()
|
||||
|
||||
.and()
|
||||
.csrf().disable()
|
||||
.build();
|
||||
}
|
||||
|
||||
private static class UserDetailsMapper extends LdapUserDetailsMapper {
|
||||
@Override
|
||||
public UserDetails mapUserFromContext(DirContextOperations ctx, String username,
|
||||
Collection<? extends GrantedAuthority> authorities) {
|
||||
UserDetails userDetails = super.mapUserFromContext(ctx, username, authorities);
|
||||
return new RbacLdapUser(userDetails);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.provectus.kafka.ui.config.auth;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -14,7 +15,16 @@ public class OAuthProperties {
|
|||
private Map<String, OAuth2Provider> client = new HashMap<>();
|
||||
|
||||
@PostConstruct
|
||||
public void validate() {
|
||||
public void init() {
|
||||
getClient().values().forEach((provider) -> {
|
||||
if (provider.getCustomParams() == null) {
|
||||
provider.setCustomParams(Collections.emptyMap());
|
||||
}
|
||||
if (provider.getScope() == null) {
|
||||
provider.setScope(Collections.emptySet());
|
||||
}
|
||||
});
|
||||
|
||||
getClient().values().forEach(this::validateProvider);
|
||||
}
|
||||
|
||||
|
|
|
@ -73,8 +73,7 @@ public final class OAuthPropertiesConverter {
|
|||
}
|
||||
|
||||
private static boolean isGoogle(OAuth2Provider provider) {
|
||||
return provider.getCustomParams() != null
|
||||
&& GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
|
||||
return GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -72,13 +72,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
|
|||
final OidcReactiveOAuth2UserService delegate = new OidcReactiveOAuth2UserService();
|
||||
return request -> delegate.loadUser(request)
|
||||
.flatMap(user -> {
|
||||
String providerId = request.getClientRegistration().getRegistrationId();
|
||||
final var extractor = getExtractor(providerId, acs);
|
||||
var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
|
||||
final var extractor = getExtractor(provider, acs);
|
||||
if (extractor == null) {
|
||||
return Mono.just(user);
|
||||
}
|
||||
|
||||
return extractor.extract(acs, user, Map.of("request", request))
|
||||
return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
|
||||
.map(groups -> new RbacOidcUser(user, groups));
|
||||
});
|
||||
}
|
||||
|
@ -88,13 +88,13 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
|
|||
final DefaultReactiveOAuth2UserService delegate = new DefaultReactiveOAuth2UserService();
|
||||
return request -> delegate.loadUser(request)
|
||||
.flatMap(user -> {
|
||||
String providerId = request.getClientRegistration().getRegistrationId();
|
||||
final var extractor = getExtractor(providerId, acs);
|
||||
var provider = getProviderByProviderId(request.getClientRegistration().getRegistrationId());
|
||||
final var extractor = getExtractor(provider, acs);
|
||||
if (extractor == null) {
|
||||
return Mono.just(user);
|
||||
}
|
||||
|
||||
return extractor.extract(acs, user, Map.of("request", request))
|
||||
return extractor.extract(acs, user, Map.of("request", request, "provider", provider))
|
||||
.map(groups -> new RbacOAuth2User(user, groups));
|
||||
});
|
||||
}
|
||||
|
@ -113,18 +113,18 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
|
||||
final String provider = getProviderByProviderId(providerId);
|
||||
Optional<ProviderAuthorityExtractor> extractor = acs.getExtractors()
|
||||
private ProviderAuthorityExtractor getExtractor(final OAuthProperties.OAuth2Provider provider,
|
||||
AccessControlService acs) {
|
||||
Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
|
||||
.stream()
|
||||
.filter(e -> e.isApplicable(provider))
|
||||
.filter(e -> e.isApplicable(provider.getProvider(), provider.getCustomParams()))
|
||||
.findFirst();
|
||||
|
||||
return extractor.orElse(null);
|
||||
}
|
||||
|
||||
private String getProviderByProviderId(final String providerId) {
|
||||
return properties.getClient().get(providerId).getProvider();
|
||||
private OAuthProperties.OAuth2Provider getProviderByProviderId(final String providerId) {
|
||||
return properties.getClient().get(providerId);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
package com.provectus.kafka.ui.config.auth;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
import org.springframework.security.core.GrantedAuthority;
|
||||
import org.springframework.security.core.userdetails.UserDetails;
|
||||
|
||||
public class RbacLdapUser implements UserDetails, RbacUser {
|
||||
|
||||
private final UserDetails userDetails;
|
||||
|
||||
public RbacLdapUser(UserDetails userDetails) {
|
||||
this.userDetails = userDetails;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return userDetails.getUsername();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<String> groups() {
|
||||
return userDetails.getAuthorities().stream().map(GrantedAuthority::getAuthority).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<? extends GrantedAuthority> getAuthorities() {
|
||||
return userDetails.getAuthorities();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPassword() {
|
||||
return userDetails.getPassword();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUsername() {
|
||||
return userDetails.getUsername();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAccountNonExpired() {
|
||||
return userDetails.isAccountNonExpired();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAccountNonLocked() {
|
||||
return userDetails.isAccountNonLocked();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCredentialsNonExpired() {
|
||||
return userDetails.isCredentialsNonExpired();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return userDetails.isEnabled();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
package com.provectus.kafka.ui.config.auth.condition;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.AllNestedConditions;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
|
||||
public class ActiveDirectoryCondition extends AllNestedConditions {
|
||||
|
||||
public ActiveDirectoryCondition() {
|
||||
super(ConfigurationPhase.PARSE_CONFIGURATION);
|
||||
}
|
||||
|
||||
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
|
||||
public static class OnAuthType {
|
||||
|
||||
}
|
||||
|
||||
@ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false)
|
||||
public static class OnActiveDirectory {
|
||||
|
||||
}
|
||||
}
|
|
@ -46,10 +46,8 @@ public class CognitoLogoutSuccessHandler implements LogoutSuccessHandler {
|
|||
.fragment(null)
|
||||
.build();
|
||||
|
||||
Assert.isTrue(
|
||||
provider.getCustomParams() != null && provider.getCustomParams().containsKey("logoutUrl"),
|
||||
"Custom params should contain 'logoutUrl'"
|
||||
);
|
||||
Assert.isTrue(provider.getCustomParams().containsKey("logoutUrl"),
|
||||
"Custom params should contain 'logoutUrl'");
|
||||
final var uri = UriComponentsBuilder
|
||||
.fromUri(URI.create(provider.getCustomParams().get("logoutUrl")))
|
||||
.queryParam("client_id", provider.getClientId())
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
package com.provectus.kafka.ui.controller;
|
||||
|
||||
import com.provectus.kafka.ui.api.AclsApi;
|
||||
import com.provectus.kafka.ui.mapper.ClusterMapper;
|
||||
import com.provectus.kafka.ui.model.KafkaAclDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
|
||||
import com.provectus.kafka.ui.model.rbac.AccessContext;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
|
||||
import com.provectus.kafka.ui.service.acl.AclsService;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.Optional;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.server.ServerWebExchange;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
public class AclsController extends AbstractController implements AclsApi {
|
||||
|
||||
private final AclsService aclsService;
|
||||
private final AccessControlService accessControlService;
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Void>> createAcl(String clusterName, Mono<KafkaAclDTO> kafkaAclDto,
|
||||
ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.EDIT)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context)
|
||||
.then(kafkaAclDto)
|
||||
.map(ClusterMapper::toAclBinding)
|
||||
.flatMap(binding -> aclsService.createAcl(getCluster(clusterName), binding))
|
||||
.thenReturn(ResponseEntity.ok().build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Void>> deleteAcl(String clusterName, Mono<KafkaAclDTO> kafkaAclDto,
|
||||
ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.EDIT)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context)
|
||||
.then(kafkaAclDto)
|
||||
.map(ClusterMapper::toAclBinding)
|
||||
.flatMap(binding -> aclsService.deleteAcl(getCluster(clusterName), binding))
|
||||
.thenReturn(ResponseEntity.ok().build());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Flux<KafkaAclDTO>>> listAcls(String clusterName,
|
||||
KafkaAclResourceTypeDTO resourceTypeDto,
|
||||
String resourceName,
|
||||
KafkaAclNamePatternTypeDTO namePatternTypeDto,
|
||||
ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.VIEW)
|
||||
.build();
|
||||
|
||||
var resourceType = Optional.ofNullable(resourceTypeDto)
|
||||
.map(ClusterMapper::mapAclResourceTypeDto)
|
||||
.orElse(ResourceType.ANY);
|
||||
|
||||
var namePatternType = Optional.ofNullable(namePatternTypeDto)
|
||||
.map(ClusterMapper::mapPatternTypeDto)
|
||||
.orElse(PatternType.ANY);
|
||||
|
||||
var filter = new ResourcePatternFilter(resourceType, resourceName, namePatternType);
|
||||
|
||||
return accessControlService.validateAccess(context).then(
|
||||
Mono.just(
|
||||
ResponseEntity.ok(
|
||||
aclsService.listAcls(getCluster(clusterName), filter)
|
||||
.map(ClusterMapper::toKafkaAclDto)))
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<String>> getAclAsCsv(String clusterName, ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.VIEW)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context).then(
|
||||
aclsService.getAclAsCsvString(getCluster(clusterName))
|
||||
.map(ResponseEntity::ok)
|
||||
.flatMap(Mono::just)
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<Void>> syncAclsCsv(String clusterName, Mono<String> csvMono, ServerWebExchange exchange) {
|
||||
AccessContext context = AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.aclActions(AclAction.EDIT)
|
||||
.build();
|
||||
|
||||
return accessControlService.validateAccess(context)
|
||||
.then(csvMono)
|
||||
.flatMap(csv -> aclsService.syncAclWithAclCsv(getCluster(clusterName), csv))
|
||||
.thenReturn(ResponseEntity.ok().build());
|
||||
}
|
||||
}
|
|
@ -27,6 +27,7 @@ import org.mapstruct.Mapper;
|
|||
import org.mapstruct.factory.Mappers;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.http.codec.multipart.FilePart;
|
||||
import org.springframework.http.codec.multipart.Part;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.server.ServerWebExchange;
|
||||
import reactor.core.publisher.Flux;
|
||||
|
@ -92,16 +93,19 @@ public class ApplicationConfigController implements ApplicationConfigApi {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(FilePart file, ServerWebExchange exchange) {
|
||||
public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(Flux<Part> fileFlux,
|
||||
ServerWebExchange exchange) {
|
||||
return accessControlService
|
||||
.validateAccess(
|
||||
AccessContext.builder()
|
||||
.applicationConfigActions(EDIT)
|
||||
.build()
|
||||
)
|
||||
.then(dynamicConfigOperations.uploadConfigRelatedFile(file))
|
||||
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
|
||||
.map(ResponseEntity::ok);
|
||||
.then(fileFlux.single())
|
||||
.flatMap(file ->
|
||||
dynamicConfigOperations.uploadConfigRelatedFile((FilePart) file)
|
||||
.map(path -> new UploadedFileInfoDTO().location(path.toString()))
|
||||
.map(ResponseEntity::ok));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -211,7 +211,7 @@ public class KafkaConnectController extends AbstractController implements KafkaC
|
|||
Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder()
|
||||
.cluster(clusterName)
|
||||
.connect(connectName)
|
||||
.connectActions(ConnectAction.VIEW, ConnectAction.EDIT)
|
||||
.connectActions(ConnectAction.VIEW, ConnectAction.RESTART)
|
||||
.build());
|
||||
|
||||
return validateAccess.then(
|
||||
|
|
|
@ -39,41 +39,42 @@ public class MessageFilters {
|
|||
}
|
||||
|
||||
static Predicate<TopicMessageDTO> groovyScriptFilter(String script) {
|
||||
var compiledScript = compileScript(script);
|
||||
var engine = getGroovyEngine();
|
||||
var compiledScript = compileScript(engine, script);
|
||||
var jsonSlurper = new JsonSlurper();
|
||||
return new Predicate<TopicMessageDTO>() {
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public boolean test(TopicMessageDTO msg) {
|
||||
var bindings = getGroovyEngine().createBindings();
|
||||
var bindings = engine.createBindings();
|
||||
bindings.put("partition", msg.getPartition());
|
||||
bindings.put("offset", msg.getOffset());
|
||||
bindings.put("timestampMs", msg.getTimestamp().toInstant().toEpochMilli());
|
||||
bindings.put("keyAsText", msg.getKey());
|
||||
bindings.put("valueAsText", msg.getContent());
|
||||
bindings.put("headers", msg.getHeaders());
|
||||
bindings.put("key", parseToJsonOrReturnNull(jsonSlurper, msg.getKey()));
|
||||
bindings.put("value", parseToJsonOrReturnNull(jsonSlurper, msg.getContent()));
|
||||
bindings.put("key", parseToJsonOrReturnAsIs(jsonSlurper, msg.getKey()));
|
||||
bindings.put("value", parseToJsonOrReturnAsIs(jsonSlurper, msg.getContent()));
|
||||
var result = compiledScript.eval(bindings);
|
||||
if (result instanceof Boolean) {
|
||||
return (Boolean) result;
|
||||
} else {
|
||||
throw new ValidationException(
|
||||
String.format("Unexpected script result: %s, Boolean should be returned instead", result));
|
||||
"Unexpected script result: %s, Boolean should be returned instead".formatted(result));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private static Object parseToJsonOrReturnNull(JsonSlurper parser, @Nullable String str) {
|
||||
private static Object parseToJsonOrReturnAsIs(JsonSlurper parser, @Nullable String str) {
|
||||
if (str == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return parser.parseText(str);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,9 +87,9 @@ public class MessageFilters {
|
|||
return GROOVY_ENGINE;
|
||||
}
|
||||
|
||||
private static CompiledScript compileScript(String script) {
|
||||
private static CompiledScript compileScript(GroovyScriptEngineImpl engine, String script) {
|
||||
try {
|
||||
return getGroovyEngine().compile(script);
|
||||
return engine.compile(script);
|
||||
} catch (ScriptException e) {
|
||||
throw new ValidationException("Script syntax error: " + e.getMessage());
|
||||
}
|
||||
|
|
|
@ -20,6 +20,9 @@ import com.provectus.kafka.ui.model.InternalPartition;
|
|||
import com.provectus.kafka.ui.model.InternalReplica;
|
||||
import com.provectus.kafka.ui.model.InternalTopic;
|
||||
import com.provectus.kafka.ui.model.InternalTopicConfig;
|
||||
import com.provectus.kafka.ui.model.KafkaAclDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
|
||||
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
|
||||
import com.provectus.kafka.ui.model.MetricDTO;
|
||||
import com.provectus.kafka.ui.model.Metrics;
|
||||
import com.provectus.kafka.ui.model.PartitionDTO;
|
||||
|
@ -27,12 +30,18 @@ import com.provectus.kafka.ui.model.ReplicaDTO;
|
|||
import com.provectus.kafka.ui.model.TopicConfigDTO;
|
||||
import com.provectus.kafka.ui.model.TopicDTO;
|
||||
import com.provectus.kafka.ui.model.TopicDetailsDTO;
|
||||
import com.provectus.kafka.ui.service.masking.DataMasking;
|
||||
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.kafka.clients.admin.ConfigEntry;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.mapstruct.Mapper;
|
||||
import org.mapstruct.Mapping;
|
||||
|
||||
|
@ -109,8 +118,74 @@ public interface ClusterMapper {
|
|||
return brokerDiskUsage;
|
||||
}
|
||||
|
||||
default DataMasking map(List<ClustersProperties.Masking> maskingProperties) {
|
||||
return DataMasking.create(maskingProperties);
|
||||
static KafkaAclDTO.OperationEnum mapAclOperation(AclOperation operation) {
|
||||
return switch (operation) {
|
||||
case ALL -> KafkaAclDTO.OperationEnum.ALL;
|
||||
case READ -> KafkaAclDTO.OperationEnum.READ;
|
||||
case WRITE -> KafkaAclDTO.OperationEnum.WRITE;
|
||||
case CREATE -> KafkaAclDTO.OperationEnum.CREATE;
|
||||
case DELETE -> KafkaAclDTO.OperationEnum.DELETE;
|
||||
case ALTER -> KafkaAclDTO.OperationEnum.ALTER;
|
||||
case DESCRIBE -> KafkaAclDTO.OperationEnum.DESCRIBE;
|
||||
case CLUSTER_ACTION -> KafkaAclDTO.OperationEnum.CLUSTER_ACTION;
|
||||
case DESCRIBE_CONFIGS -> KafkaAclDTO.OperationEnum.DESCRIBE_CONFIGS;
|
||||
case ALTER_CONFIGS -> KafkaAclDTO.OperationEnum.ALTER_CONFIGS;
|
||||
case IDEMPOTENT_WRITE -> KafkaAclDTO.OperationEnum.IDEMPOTENT_WRITE;
|
||||
case CREATE_TOKENS -> KafkaAclDTO.OperationEnum.CREATE_TOKENS;
|
||||
case DESCRIBE_TOKENS -> KafkaAclDTO.OperationEnum.DESCRIBE_TOKENS;
|
||||
case ANY -> throw new IllegalArgumentException("ANY operation can be only part of filter");
|
||||
case UNKNOWN -> KafkaAclDTO.OperationEnum.UNKNOWN;
|
||||
};
|
||||
}
|
||||
|
||||
static KafkaAclResourceTypeDTO mapAclResourceType(ResourceType resourceType) {
|
||||
return switch (resourceType) {
|
||||
case CLUSTER -> KafkaAclResourceTypeDTO.CLUSTER;
|
||||
case TOPIC -> KafkaAclResourceTypeDTO.TOPIC;
|
||||
case GROUP -> KafkaAclResourceTypeDTO.GROUP;
|
||||
case DELEGATION_TOKEN -> KafkaAclResourceTypeDTO.DELEGATION_TOKEN;
|
||||
case TRANSACTIONAL_ID -> KafkaAclResourceTypeDTO.TRANSACTIONAL_ID;
|
||||
case USER -> KafkaAclResourceTypeDTO.USER;
|
||||
case ANY -> throw new IllegalArgumentException("ANY type can be only part of filter");
|
||||
case UNKNOWN -> KafkaAclResourceTypeDTO.UNKNOWN;
|
||||
};
|
||||
}
|
||||
|
||||
static ResourceType mapAclResourceTypeDto(KafkaAclResourceTypeDTO dto) {
|
||||
return ResourceType.valueOf(dto.name());
|
||||
}
|
||||
|
||||
static PatternType mapPatternTypeDto(KafkaAclNamePatternTypeDTO dto) {
|
||||
return PatternType.valueOf(dto.name());
|
||||
}
|
||||
|
||||
static AclBinding toAclBinding(KafkaAclDTO dto) {
|
||||
return new AclBinding(
|
||||
new ResourcePattern(
|
||||
mapAclResourceTypeDto(dto.getResourceType()),
|
||||
dto.getResourceName(),
|
||||
mapPatternTypeDto(dto.getNamePatternType())
|
||||
),
|
||||
new AccessControlEntry(
|
||||
dto.getPrincipal(),
|
||||
dto.getHost(),
|
||||
AclOperation.valueOf(dto.getOperation().name()),
|
||||
AclPermissionType.valueOf(dto.getPermission().name())
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
static KafkaAclDTO toKafkaAclDto(AclBinding binding) {
|
||||
var pattern = binding.pattern();
|
||||
var filter = binding.toFilter().entryFilter();
|
||||
return new KafkaAclDTO()
|
||||
.resourceType(mapAclResourceType(pattern.resourceType()))
|
||||
.resourceName(pattern.name())
|
||||
.namePatternType(KafkaAclNamePatternTypeDTO.fromValue(pattern.patternType().name()))
|
||||
.principal(filter.principal())
|
||||
.host(filter.host())
|
||||
.operation(mapAclOperation(filter.operation()))
|
||||
.permission(KafkaAclDTO.PermissionEnum.fromValue(filter.permissionType().name()));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,5 +4,7 @@ public enum ClusterFeature {
|
|||
KAFKA_CONNECT,
|
||||
KSQL_DB,
|
||||
SCHEMA_REGISTRY,
|
||||
TOPIC_DELETION
|
||||
TOPIC_DELETION,
|
||||
KAFKA_ACL_VIEW,
|
||||
KAFKA_ACL_EDIT
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package com.provectus.kafka.ui.model;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.Data;
|
||||
import org.apache.kafka.common.Node;
|
||||
|
||||
|
@ -10,15 +11,27 @@ public class InternalBroker {
|
|||
private final Integer id;
|
||||
private final String host;
|
||||
private final Integer port;
|
||||
private final BigDecimal bytesInPerSec;
|
||||
private final BigDecimal bytesOutPerSec;
|
||||
private final @Nullable BigDecimal bytesInPerSec;
|
||||
private final @Nullable BigDecimal bytesOutPerSec;
|
||||
private final @Nullable Integer partitionsLeader;
|
||||
private final @Nullable Integer partitions;
|
||||
private final @Nullable Integer inSyncPartitions;
|
||||
private final @Nullable BigDecimal leadersSkew;
|
||||
private final @Nullable BigDecimal partitionsSkew;
|
||||
|
||||
public InternalBroker(Node node, Statistics statistics) {
|
||||
public InternalBroker(Node node,
|
||||
PartitionDistributionStats partitionDistribution,
|
||||
Statistics statistics) {
|
||||
this.id = node.id();
|
||||
this.host = node.host();
|
||||
this.port = node.port();
|
||||
this.bytesInPerSec = statistics.getMetrics().getBrokerBytesInPerSec().get(node.id());
|
||||
this.bytesOutPerSec = statistics.getMetrics().getBrokerBytesOutPerSec().get(node.id());
|
||||
this.partitionsLeader = partitionDistribution.getPartitionLeaders().get(node);
|
||||
this.partitions = partitionDistribution.getPartitionsCount().get(node);
|
||||
this.inSyncPartitions = partitionDistribution.getInSyncPartitions().get(node);
|
||||
this.leadersSkew = partitionDistribution.leadersSkew(node);
|
||||
this.partitionsSkew = partitionDistribution.partitionsSkew(node);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
package com.provectus.kafka.ui.model;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.math.RoundingMode;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.clients.admin.TopicDescription;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.TopicPartitionInfo;
|
||||
|
||||
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
|
||||
@Getter
|
||||
@Slf4j
|
||||
public class PartitionDistributionStats {
|
||||
|
||||
// avg skew will show unuseful results on low number of partitions
|
||||
private static final int MIN_PARTITIONS_FOR_SKEW_CALCULATION = 50;
|
||||
|
||||
private final Map<Node, Integer> partitionLeaders;
|
||||
private final Map<Node, Integer> partitionsCount;
|
||||
private final Map<Node, Integer> inSyncPartitions;
|
||||
private final double avgLeadersCntPerBroker;
|
||||
private final double avgPartitionsPerBroker;
|
||||
private final boolean skewCanBeCalculated;
|
||||
|
||||
public static PartitionDistributionStats create(Statistics stats) {
|
||||
return create(stats, MIN_PARTITIONS_FOR_SKEW_CALCULATION);
|
||||
}
|
||||
|
||||
static PartitionDistributionStats create(Statistics stats, int minPartitionsForSkewCalculation) {
|
||||
var partitionLeaders = new HashMap<Node, Integer>();
|
||||
var partitionsReplicated = new HashMap<Node, Integer>();
|
||||
var isr = new HashMap<Node, Integer>();
|
||||
int partitionsCnt = 0;
|
||||
for (TopicDescription td : stats.getTopicDescriptions().values()) {
|
||||
for (TopicPartitionInfo tp : td.partitions()) {
|
||||
partitionsCnt++;
|
||||
tp.replicas().forEach(r -> incr(partitionsReplicated, r));
|
||||
tp.isr().forEach(r -> incr(isr, r));
|
||||
if (tp.leader() != null) {
|
||||
incr(partitionLeaders, tp.leader());
|
||||
}
|
||||
}
|
||||
}
|
||||
int nodesWithPartitions = partitionsReplicated.size();
|
||||
int partitionReplications = partitionsReplicated.values().stream().mapToInt(i -> i).sum();
|
||||
var avgPartitionsPerBroker = nodesWithPartitions == 0 ? 0 : ((double) partitionReplications) / nodesWithPartitions;
|
||||
|
||||
int nodesWithLeaders = partitionLeaders.size();
|
||||
int leadersCnt = partitionLeaders.values().stream().mapToInt(i -> i).sum();
|
||||
var avgLeadersCntPerBroker = nodesWithLeaders == 0 ? 0 : ((double) leadersCnt) / nodesWithLeaders;
|
||||
|
||||
return new PartitionDistributionStats(
|
||||
partitionLeaders,
|
||||
partitionsReplicated,
|
||||
isr,
|
||||
avgLeadersCntPerBroker,
|
||||
avgPartitionsPerBroker,
|
||||
partitionsCnt >= minPartitionsForSkewCalculation
|
||||
);
|
||||
}
|
||||
|
||||
private static void incr(Map<Node, Integer> map, Node n) {
|
||||
map.compute(n, (k, c) -> c == null ? 1 : ++c);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public BigDecimal partitionsSkew(Node node) {
|
||||
return calculateAvgSkew(partitionsCount.get(node), avgPartitionsPerBroker);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public BigDecimal leadersSkew(Node node) {
|
||||
return calculateAvgSkew(partitionLeaders.get(node), avgLeadersCntPerBroker);
|
||||
}
|
||||
|
||||
// Returns difference (in percents) from average value, null if it can't be calculated
|
||||
@Nullable
|
||||
private BigDecimal calculateAvgSkew(@Nullable Integer value, double avgValue) {
|
||||
if (avgValue == 0 || !skewCanBeCalculated) {
|
||||
return null;
|
||||
}
|
||||
value = value == null ? 0 : value;
|
||||
return new BigDecimal((value - avgValue) / avgValue * 100.0)
|
||||
.setScale(1, RoundingMode.HALF_UP);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
package com.provectus.kafka.ui.model.rbac;
|
||||
|
||||
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
|
||||
|
@ -37,6 +38,8 @@ public class AccessContext {
|
|||
|
||||
Collection<KsqlAction> ksqlActions;
|
||||
|
||||
Collection<AclAction> aclActions;
|
||||
|
||||
public static AccessContextBuilder builder() {
|
||||
return new AccessContextBuilder();
|
||||
}
|
||||
|
@ -55,6 +58,7 @@ public class AccessContext {
|
|||
private String schema;
|
||||
private Collection<SchemaAction> schemaActions = Collections.emptySet();
|
||||
private Collection<KsqlAction> ksqlActions = Collections.emptySet();
|
||||
private Collection<AclAction> aclActions = Collections.emptySet();
|
||||
|
||||
private AccessContextBuilder() {
|
||||
}
|
||||
|
@ -131,6 +135,12 @@ public class AccessContext {
|
|||
return this;
|
||||
}
|
||||
|
||||
public AccessContextBuilder aclActions(AclAction... actions) {
|
||||
Assert.isTrue(actions.length > 0, "actions not present");
|
||||
this.aclActions = List.of(actions);
|
||||
return this;
|
||||
}
|
||||
|
||||
public AccessContext build() {
|
||||
return new AccessContext(
|
||||
applicationConfigActions,
|
||||
|
@ -140,7 +150,7 @@ public class AccessContext {
|
|||
connect, connectActions,
|
||||
connector,
|
||||
schema, schemaActions,
|
||||
ksqlActions);
|
||||
ksqlActions, aclActions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import static com.provectus.kafka.ui.model.rbac.Resource.APPLICATIONCONFIG;
|
|||
import static com.provectus.kafka.ui.model.rbac.Resource.CLUSTERCONFIG;
|
||||
import static com.provectus.kafka.ui.model.rbac.Resource.KSQL;
|
||||
|
||||
import com.provectus.kafka.ui.model.rbac.permission.AclAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ClusterConfigAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
|
||||
|
@ -76,6 +77,7 @@ public class Permission {
|
|||
case SCHEMA -> Arrays.stream(SchemaAction.values()).map(Enum::toString).toList();
|
||||
case CONNECT -> Arrays.stream(ConnectAction.values()).map(Enum::toString).toList();
|
||||
case KSQL -> Arrays.stream(KsqlAction.values()).map(Enum::toString).toList();
|
||||
case ACL -> Arrays.stream(AclAction.values()).map(Enum::toString).toList();
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,8 @@ public enum Resource {
|
|||
CONSUMER,
|
||||
SCHEMA,
|
||||
CONNECT,
|
||||
KSQL;
|
||||
KSQL,
|
||||
ACL;
|
||||
|
||||
@Nullable
|
||||
public static Resource fromString(String name) {
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
package com.provectus.kafka.ui.model.rbac.permission;
|
||||
|
||||
import org.apache.commons.lang3.EnumUtils;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
public enum AclAction implements PermissibleAction {
|
||||
|
||||
VIEW,
|
||||
EDIT;
|
||||
|
||||
@Nullable
|
||||
public static AclAction fromString(String name) {
|
||||
return EnumUtils.getEnum(AclAction.class, name);
|
||||
}
|
||||
}
|
|
@ -7,7 +7,8 @@ public enum ConnectAction implements PermissibleAction {
|
|||
|
||||
VIEW,
|
||||
EDIT,
|
||||
CREATE
|
||||
CREATE,
|
||||
RESTART
|
||||
|
||||
;
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ public enum Provider {
|
|||
|
||||
OAUTH_COGNITO,
|
||||
|
||||
OAUTH,
|
||||
|
||||
LDAP,
|
||||
LDAP_AD;
|
||||
|
||||
|
@ -22,6 +24,8 @@ public enum Provider {
|
|||
public static String GOOGLE = "google";
|
||||
public static String GITHUB = "github";
|
||||
public static String COGNITO = "cognito";
|
||||
|
||||
public static String OAUTH = "oauth";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -123,11 +123,11 @@ public class ConsumerRecordDeserializer {
|
|||
}
|
||||
|
||||
private static Long getKeySize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
|
||||
return consumerRecord.key() != null ? (long) consumerRecord.key().get().length : null;
|
||||
return consumerRecord.key() != null ? (long) consumerRecord.serializedKeySize() : null;
|
||||
}
|
||||
|
||||
private static Long getValueSize(ConsumerRecord<Bytes, Bytes> consumerRecord) {
|
||||
return consumerRecord.value() != null ? (long) consumerRecord.value().get().length : null;
|
||||
return consumerRecord.value() != null ? (long) consumerRecord.serializedValueSize() : null;
|
||||
}
|
||||
|
||||
private static int headerSize(Header header) {
|
||||
|
|
|
@ -122,8 +122,6 @@ public class SerdesInitializer {
|
|||
registeredSerdes,
|
||||
Optional.ofNullable(clusterProperties.getDefaultKeySerde())
|
||||
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default key serde not found"))
|
||||
.or(() -> Optional.ofNullable(registeredSerdes.get(SchemaRegistrySerde.name())))
|
||||
.or(() -> Optional.ofNullable(registeredSerdes.get(ProtobufFileSerde.name())))
|
||||
.orElse(null),
|
||||
Optional.ofNullable(clusterProperties.getDefaultValueSerde())
|
||||
.map(name -> Preconditions.checkNotNull(registeredSerdes.get(name), "Default value serde not found"))
|
||||
|
|
|
@ -10,6 +10,7 @@ import com.provectus.kafka.ui.model.BrokersLogdirsDTO;
|
|||
import com.provectus.kafka.ui.model.InternalBroker;
|
||||
import com.provectus.kafka.ui.model.InternalBrokerConfig;
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.model.PartitionDistributionStats;
|
||||
import com.provectus.kafka.ui.service.metrics.RawMetric;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -64,11 +65,13 @@ public class BrokerService {
|
|||
}
|
||||
|
||||
public Flux<InternalBroker> getBrokers(KafkaCluster cluster) {
|
||||
var stats = statisticsCache.get(cluster);
|
||||
var partitionsDistribution = PartitionDistributionStats.create(stats);
|
||||
return adminClientService
|
||||
.get(cluster)
|
||||
.flatMap(ReactiveAdminClient::describeCluster)
|
||||
.map(description -> description.getNodes().stream()
|
||||
.map(node -> new InternalBroker(node, statisticsCache.get(cluster)))
|
||||
.map(node -> new InternalBroker(node, partitionsDistribution, stats))
|
||||
.collect(Collectors.toList()))
|
||||
.flatMapMany(Flux::fromIterable);
|
||||
}
|
||||
|
|
|
@ -2,16 +2,16 @@ package com.provectus.kafka.ui.service;
|
|||
|
||||
import com.provectus.kafka.ui.model.ClusterFeature;
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.springframework.stereotype.Service;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
@ -21,12 +21,11 @@ import reactor.core.publisher.Mono;
|
|||
@Slf4j
|
||||
public class FeatureService {
|
||||
|
||||
private static final String DELETE_TOPIC_ENABLED_SERVER_PROPERTY = "delete.topic.enable";
|
||||
|
||||
private final AdminClientService adminClientService;
|
||||
|
||||
public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
|
||||
ReactiveAdminClient.ClusterDescription clusterDescription) {
|
||||
public Mono<List<ClusterFeature>> getAvailableFeatures(ReactiveAdminClient adminClient,
|
||||
KafkaCluster cluster,
|
||||
ClusterDescription clusterDescription) {
|
||||
List<Mono<ClusterFeature>> features = new ArrayList<>();
|
||||
|
||||
if (Optional.ofNullable(cluster.getConnectsClients())
|
||||
|
@ -43,26 +42,32 @@ public class FeatureService {
|
|||
features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
|
||||
}
|
||||
|
||||
features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
|
||||
features.add(topicDeletionEnabled(adminClient));
|
||||
features.add(aclView(cluster));
|
||||
features.add(aclEdit(clusterDescription));
|
||||
|
||||
return Flux.fromIterable(features).flatMap(m -> m).collectList();
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> topicDeletionEnabled(KafkaCluster cluster, @Nullable Node controller) {
|
||||
if (controller == null) {
|
||||
return Mono.just(ClusterFeature.TOPIC_DELETION); // assuming it is enabled by default
|
||||
}
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
|
||||
.map(config ->
|
||||
config.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
|
||||
.map(e -> Boolean.parseBoolean(e.value()))
|
||||
.findFirst()
|
||||
.orElse(true))
|
||||
.flatMap(enabled -> enabled
|
||||
? Mono.just(ClusterFeature.TOPIC_DELETION)
|
||||
: Mono.empty());
|
||||
private Mono<ClusterFeature> topicDeletionEnabled(ReactiveAdminClient adminClient) {
|
||||
return adminClient.isTopicDeletionEnabled()
|
||||
? Mono.just(ClusterFeature.TOPIC_DELETION)
|
||||
: Mono.empty();
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> aclEdit(ClusterDescription clusterDescription) {
|
||||
var authorizedOps = Optional.ofNullable(clusterDescription.getAuthorizedOperations()).orElse(Set.of());
|
||||
boolean canEdit = authorizedOps.contains(AclOperation.ALL) || authorizedOps.contains(AclOperation.ALTER);
|
||||
return canEdit
|
||||
? Mono.just(ClusterFeature.KAFKA_ACL_EDIT)
|
||||
: Mono.empty();
|
||||
}
|
||||
|
||||
private Mono<ClusterFeature> aclView(KafkaCluster cluster) {
|
||||
return adminClientService.get(cluster).flatMap(
|
||||
ac -> ac.getClusterFeatures().contains(ReactiveAdminClient.SupportedFeature.AUTHORIZED_SECURITY_ENABLED)
|
||||
? Mono.just(ClusterFeature.KAFKA_ACL_VIEW)
|
||||
: Mono.empty()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -109,6 +109,7 @@ public class KafkaConnectService {
|
|||
private Stream<String> getStringsForSearch(FullConnectorInfoDTO fullConnectorInfo) {
|
||||
return Stream.of(
|
||||
fullConnectorInfo.getName(),
|
||||
fullConnectorInfo.getConnect(),
|
||||
fullConnectorInfo.getStatus().getState().getValue(),
|
||||
fullConnectorInfo.getType().getValue());
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import static java.util.stream.Collectors.toMap;
|
|||
import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableTable;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Table;
|
||||
|
@ -15,7 +16,6 @@ import com.provectus.kafka.ui.util.KafkaVersion;
|
|||
import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
|
||||
import java.io.Closeable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -32,8 +32,9 @@ import java.util.stream.Collectors;
|
|||
import java.util.stream.Stream;
|
||||
import javax.annotation.Nullable;
|
||||
import lombok.AccessLevel;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.Value;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.clients.admin.AdminClient;
|
||||
|
@ -61,16 +62,21 @@ import org.apache.kafka.common.Node;
|
|||
import org.apache.kafka.common.TopicPartition;
|
||||
import org.apache.kafka.common.TopicPartitionInfo;
|
||||
import org.apache.kafka.common.TopicPartitionReplica;
|
||||
import org.apache.kafka.common.acl.AccessControlEntryFilter;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclBindingFilter;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.config.ConfigResource;
|
||||
import org.apache.kafka.common.errors.ClusterAuthorizationException;
|
||||
import org.apache.kafka.common.errors.GroupIdNotFoundException;
|
||||
import org.apache.kafka.common.errors.GroupNotEmptyException;
|
||||
import org.apache.kafka.common.errors.InvalidRequestException;
|
||||
import org.apache.kafka.common.errors.SecurityDisabledException;
|
||||
import org.apache.kafka.common.errors.TopicAuthorizationException;
|
||||
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
|
||||
import org.apache.kafka.common.errors.UnsupportedVersionException;
|
||||
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
import reactor.core.scheduler.Schedulers;
|
||||
|
@ -79,29 +85,33 @@ import reactor.util.function.Tuples;
|
|||
|
||||
|
||||
@Slf4j
|
||||
@RequiredArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ReactiveAdminClient implements Closeable {
|
||||
|
||||
private enum SupportedFeature {
|
||||
public enum SupportedFeature {
|
||||
INCREMENTAL_ALTER_CONFIGS(2.3f),
|
||||
CONFIG_DOCUMENTATION_RETRIEVAL(2.6f),
|
||||
DESCRIBE_CLUSTER_INCLUDE_AUTHORIZED_OPERATIONS(2.3f);
|
||||
DESCRIBE_CLUSTER_INCLUDE_AUTHORIZED_OPERATIONS(2.3f),
|
||||
AUTHORIZED_SECURITY_ENABLED(ReactiveAdminClient::isAuthorizedSecurityEnabled);
|
||||
|
||||
private final float sinceVersion;
|
||||
private final BiFunction<AdminClient, Float, Mono<Boolean>> predicate;
|
||||
|
||||
SupportedFeature(float sinceVersion) {
|
||||
this.sinceVersion = sinceVersion;
|
||||
SupportedFeature(BiFunction<AdminClient, Float, Mono<Boolean>> predicate) {
|
||||
this.predicate = predicate;
|
||||
}
|
||||
|
||||
static Set<SupportedFeature> forVersion(float kafkaVersion) {
|
||||
return Arrays.stream(SupportedFeature.values())
|
||||
.filter(f -> kafkaVersion >= f.sinceVersion)
|
||||
SupportedFeature(float fromVersion) {
|
||||
this.predicate = (admin, ver) -> Mono.just(ver != null && ver >= fromVersion);
|
||||
}
|
||||
|
||||
static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, String kafkaVersionStr) {
|
||||
@Nullable Float kafkaVersion = KafkaVersion.parse(kafkaVersionStr).orElse(null);
|
||||
return Flux.fromArray(SupportedFeature.values())
|
||||
.flatMap(f -> f.predicate.apply(ac, kafkaVersion).map(enabled -> Tuples.of(f, enabled)))
|
||||
.filter(Tuple2::getT2)
|
||||
.map(Tuple2::getT1)
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
static Set<SupportedFeature> defaultFeatures() {
|
||||
return Set.of();
|
||||
}
|
||||
}
|
||||
|
||||
@Value
|
||||
|
@ -110,25 +120,58 @@ public class ReactiveAdminClient implements Closeable {
|
|||
Node controller;
|
||||
String clusterId;
|
||||
Collection<Node> nodes;
|
||||
@Nullable // null, if ACL is disabled
|
||||
Set<AclOperation> authorizedOperations;
|
||||
}
|
||||
|
||||
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
|
||||
return getClusterVersion(adminClient)
|
||||
.map(ver ->
|
||||
new ReactiveAdminClient(
|
||||
adminClient,
|
||||
ver,
|
||||
getSupportedUpdateFeaturesForVersion(ver)));
|
||||
@Builder
|
||||
private record ConfigRelatedInfo(String version,
|
||||
Set<SupportedFeature> features,
|
||||
boolean topicDeletionIsAllowed) {
|
||||
|
||||
private static Mono<ConfigRelatedInfo> extract(AdminClient ac, int controllerId) {
|
||||
return loadBrokersConfig(ac, List.of(controllerId))
|
||||
.map(map -> map.isEmpty() ? List.<ConfigEntry>of() : map.get(controllerId))
|
||||
.flatMap(configs -> {
|
||||
String version = "1.0-UNKNOWN";
|
||||
boolean topicDeletionEnabled = true;
|
||||
for (ConfigEntry entry : configs) {
|
||||
if (entry.name().contains("inter.broker.protocol.version")) {
|
||||
version = entry.value();
|
||||
}
|
||||
if (entry.name().equals("delete.topic.enable")) {
|
||||
topicDeletionEnabled = Boolean.parseBoolean(entry.value());
|
||||
}
|
||||
}
|
||||
var builder = ConfigRelatedInfo.builder()
|
||||
.version(version)
|
||||
.topicDeletionIsAllowed(topicDeletionEnabled);
|
||||
return SupportedFeature.forVersion(ac, version)
|
||||
.map(features -> builder.features(features).build());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static Set<SupportedFeature> getSupportedUpdateFeaturesForVersion(String versionStr) {
|
||||
try {
|
||||
float version = KafkaVersion.parse(versionStr);
|
||||
return SupportedFeature.forVersion(version);
|
||||
} catch (NumberFormatException e) {
|
||||
return SupportedFeature.defaultFeatures();
|
||||
}
|
||||
public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
|
||||
return describeClusterImpl(adminClient, Set.of())
|
||||
// choosing node from which we will get configs (starting with controller)
|
||||
.flatMap(descr -> descr.controller != null
|
||||
? Mono.just(descr.controller)
|
||||
: Mono.justOrEmpty(descr.nodes.stream().findFirst())
|
||||
)
|
||||
.flatMap(node -> ConfigRelatedInfo.extract(adminClient, node.id()))
|
||||
.map(info -> new ReactiveAdminClient(adminClient, info));
|
||||
}
|
||||
|
||||
|
||||
private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullable Float kafkaVersion) {
|
||||
return toMono(ac.describeAcls(AclBindingFilter.ANY).values())
|
||||
.thenReturn(true)
|
||||
.doOnError(th -> !(th instanceof SecurityDisabledException)
|
||||
&& !(th instanceof InvalidRequestException)
|
||||
&& !(th instanceof UnsupportedVersionException),
|
||||
th -> log.warn("Error checking if security enabled", th))
|
||||
.onErrorReturn(false);
|
||||
}
|
||||
|
||||
// NOTE: if KafkaFuture returns null, that Mono will be empty(!), since Reactor does not support nullable results
|
||||
|
@ -159,8 +202,11 @@ public class ReactiveAdminClient implements Closeable {
|
|||
|
||||
@Getter(AccessLevel.PACKAGE) // visible for testing
|
||||
private final AdminClient client;
|
||||
private final String version;
|
||||
private final Set<SupportedFeature> features;
|
||||
private volatile ConfigRelatedInfo configRelatedInfo;
|
||||
|
||||
public Set<SupportedFeature> getClusterFeatures() {
|
||||
return configRelatedInfo.features();
|
||||
}
|
||||
|
||||
public Mono<Set<String>> listTopics(boolean listInternal) {
|
||||
return toMono(client.listTopics(new ListTopicsOptions().listInternal(listInternal)).names());
|
||||
|
@ -171,7 +217,20 @@ public class ReactiveAdminClient implements Closeable {
|
|||
}
|
||||
|
||||
public String getVersion() {
|
||||
return version;
|
||||
return configRelatedInfo.version();
|
||||
}
|
||||
|
||||
public boolean isTopicDeletionEnabled() {
|
||||
return configRelatedInfo.topicDeletionIsAllowed();
|
||||
}
|
||||
|
||||
public Mono<Void> updateInternalStats(@Nullable Node controller) {
|
||||
if (controller == null) {
|
||||
return Mono.empty();
|
||||
}
|
||||
return ConfigRelatedInfo.extract(client, controller.id())
|
||||
.doOnNext(info -> this.configRelatedInfo = info)
|
||||
.then();
|
||||
}
|
||||
|
||||
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig() {
|
||||
|
@ -181,7 +240,7 @@ public class ReactiveAdminClient implements Closeable {
|
|||
//NOTE: skips not-found topics (for which UnknownTopicOrPartitionException was thrown by AdminClient)
|
||||
//and topics for which DESCRIBE_CONFIGS permission is not set (TopicAuthorizationException was thrown)
|
||||
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig(Collection<String> topicNames, boolean includeDoc) {
|
||||
var includeDocFixed = features.contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL) && includeDoc;
|
||||
var includeDocFixed = includeDoc && getClusterFeatures().contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL);
|
||||
// we need to partition calls, because it can lead to AdminClient timeouts in case of large topics count
|
||||
return partitionCalls(
|
||||
topicNames,
|
||||
|
@ -330,7 +389,7 @@ public class ReactiveAdminClient implements Closeable {
|
|||
}
|
||||
|
||||
public Mono<ClusterDescription> describeCluster() {
|
||||
return describeClusterImpl(client, features);
|
||||
return describeClusterImpl(client, getClusterFeatures());
|
||||
}
|
||||
|
||||
private static Mono<ClusterDescription> describeClusterImpl(AdminClient client, Set<SupportedFeature> features) {
|
||||
|
@ -352,23 +411,6 @@ public class ReactiveAdminClient implements Closeable {
|
|||
);
|
||||
}
|
||||
|
||||
private static Mono<String> getClusterVersion(AdminClient client) {
|
||||
return describeClusterImpl(client, Set.of())
|
||||
// choosing node from which we will get configs (starting with controller)
|
||||
.flatMap(descr -> descr.controller != null
|
||||
? Mono.just(descr.controller)
|
||||
: Mono.justOrEmpty(descr.nodes.stream().findFirst())
|
||||
)
|
||||
.flatMap(node -> loadBrokersConfig(client, List.of(node.id())))
|
||||
.flatMap(configs -> configs.values().stream()
|
||||
.flatMap(Collection::stream)
|
||||
.filter(entry -> entry.name().contains("inter.broker.protocol.version"))
|
||||
.findFirst()
|
||||
.map(configEntry -> Mono.just(configEntry.value()))
|
||||
.orElse(Mono.empty()))
|
||||
.switchIfEmpty(Mono.just("1.0-UNKNOWN"));
|
||||
}
|
||||
|
||||
public Mono<Void> deleteConsumerGroups(Collection<String> groupIds) {
|
||||
return toMono(client.deleteConsumerGroups(groupIds).all())
|
||||
.onErrorResume(GroupIdNotFoundException.class,
|
||||
|
@ -402,7 +444,7 @@ public class ReactiveAdminClient implements Closeable {
|
|||
// NOTE: places whole current topic config with new one. Entries that were present in old config,
|
||||
// but missed in new will be set to default
|
||||
public Mono<Void> updateTopicConfig(String topicName, Map<String, String> configs) {
|
||||
if (features.contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
|
||||
if (getClusterFeatures().contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
|
||||
return getTopicsConfigImpl(List.of(topicName), false)
|
||||
.map(conf -> conf.getOrDefault(topicName, List.of()))
|
||||
.flatMap(currentConfigs -> incrementalAlterConfig(topicName, currentConfigs, configs));
|
||||
|
@ -576,6 +618,22 @@ public class ReactiveAdminClient implements Closeable {
|
|||
);
|
||||
}
|
||||
|
||||
public Mono<Collection<AclBinding>> listAcls(ResourcePatternFilter filter) {
|
||||
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
return toMono(client.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values());
|
||||
}
|
||||
|
||||
public Mono<Void> createAcls(Collection<AclBinding> aclBindings) {
|
||||
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
return toMono(client.createAcls(aclBindings).all());
|
||||
}
|
||||
|
||||
public Mono<Void> deleteAcls(Collection<AclBinding> aclBindings) {
|
||||
Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
|
||||
var filters = aclBindings.stream().map(AclBinding::toFilter).collect(Collectors.toSet());
|
||||
return toMono(client.deleteAcls(filters).all()).then();
|
||||
}
|
||||
|
||||
public Mono<Void> updateBrokerConfigByName(Integer brokerId, String name, String value) {
|
||||
ConfigResource cr = new ConfigResource(ConfigResource.Type.BROKER, String.valueOf(brokerId));
|
||||
AlterConfigOp op = new AlterConfigOp(new ConfigEntry(name, value), AlterConfigOp.OpType.SET);
|
||||
|
|
|
@ -37,25 +37,26 @@ public class StatisticsService {
|
|||
private Mono<Statistics> getStatistics(KafkaCluster cluster) {
|
||||
return adminClientService.get(cluster).flatMap(ac ->
|
||||
ac.describeCluster().flatMap(description ->
|
||||
Mono.zip(
|
||||
List.of(
|
||||
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
|
||||
getLogDirInfo(description, ac),
|
||||
featureService.getAvailableFeatures(cluster, description),
|
||||
loadTopicConfigs(cluster),
|
||||
describeTopics(cluster)),
|
||||
results ->
|
||||
Statistics.builder()
|
||||
.status(ServerStatusDTO.ONLINE)
|
||||
.clusterDescription(description)
|
||||
.version(ac.getVersion())
|
||||
.metrics((Metrics) results[0])
|
||||
.logDirInfo((InternalLogDirStats) results[1])
|
||||
.features((List<ClusterFeature>) results[2])
|
||||
.topicConfigs((Map<String, List<ConfigEntry>>) results[3])
|
||||
.topicDescriptions((Map<String, TopicDescription>) results[4])
|
||||
.build()
|
||||
)))
|
||||
ac.updateInternalStats(description.getController()).then(
|
||||
Mono.zip(
|
||||
List.of(
|
||||
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
|
||||
getLogDirInfo(description, ac),
|
||||
featureService.getAvailableFeatures(ac, cluster, description),
|
||||
loadTopicConfigs(cluster),
|
||||
describeTopics(cluster)),
|
||||
results ->
|
||||
Statistics.builder()
|
||||
.status(ServerStatusDTO.ONLINE)
|
||||
.clusterDescription(description)
|
||||
.version(ac.getVersion())
|
||||
.metrics((Metrics) results[0])
|
||||
.logDirInfo((InternalLogDirStats) results[1])
|
||||
.features((List<ClusterFeature>) results[2])
|
||||
.topicConfigs((Map<String, List<ConfigEntry>>) results[3])
|
||||
.topicDescriptions((Map<String, TopicDescription>) results[4])
|
||||
.build()
|
||||
))))
|
||||
.doOnError(e ->
|
||||
log.error("Failed to collect cluster {} info", cluster.getName(), e))
|
||||
.onErrorResume(
|
||||
|
|
|
@ -0,0 +1,81 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
|
||||
public class AclCsv {
|
||||
|
||||
private static final String LINE_SEPARATOR = System.lineSeparator();
|
||||
private static final String VALUES_SEPARATOR = ",";
|
||||
private static final String HEADER = "Principal,ResourceType,PatternType,ResourceName,Operation,PermissionType,Host";
|
||||
|
||||
public static String transformToCsvString(Collection<AclBinding> acls) {
|
||||
return Stream.concat(Stream.of(HEADER), acls.stream().map(AclCsv::createAclString))
|
||||
.collect(Collectors.joining(System.lineSeparator()));
|
||||
}
|
||||
|
||||
public static String createAclString(AclBinding binding) {
|
||||
var pattern = binding.pattern();
|
||||
var filter = binding.toFilter().entryFilter();
|
||||
return String.format(
|
||||
"%s,%s,%s,%s,%s,%s,%s",
|
||||
filter.principal(),
|
||||
pattern.resourceType(),
|
||||
pattern.patternType(),
|
||||
pattern.name(),
|
||||
filter.operation(),
|
||||
filter.permissionType(),
|
||||
filter.host()
|
||||
);
|
||||
}
|
||||
|
||||
private static AclBinding parseCsvLine(String csv, int line) {
|
||||
String[] values = csv.split(VALUES_SEPARATOR);
|
||||
if (values.length != 7) {
|
||||
throw new ValidationException("Input csv is not valid - there should be 7 columns in line " + line);
|
||||
}
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
if ((values[i] = values[i].trim()).isBlank()) {
|
||||
throw new ValidationException("Input csv is not valid - blank value in colum " + i + ", line " + line);
|
||||
}
|
||||
}
|
||||
try {
|
||||
return new AclBinding(
|
||||
new ResourcePattern(
|
||||
ResourceType.valueOf(values[1]), values[3], PatternType.valueOf(values[2])),
|
||||
new AccessControlEntry(
|
||||
values[0], values[6], AclOperation.valueOf(values[4]), AclPermissionType.valueOf(values[5]))
|
||||
);
|
||||
} catch (IllegalArgumentException enumParseError) {
|
||||
throw new ValidationException("Error parsing enum value in line " + line);
|
||||
}
|
||||
}
|
||||
|
||||
public static Collection<AclBinding> parseCsv(String csvString) {
|
||||
String[] lines = csvString.split(LINE_SEPARATOR);
|
||||
if (lines.length == 0) {
|
||||
throw new ValidationException("Error parsing ACL csv file: no lines in file");
|
||||
}
|
||||
boolean firstLineIsHeader = HEADER.equalsIgnoreCase(lines[0].trim().replace(" ", ""));
|
||||
Set<AclBinding> result = new HashSet<>();
|
||||
for (int i = firstLineIsHeader ? 1 : 0; i < lines.length; i++) {
|
||||
String line = lines[i];
|
||||
if (!line.isBlank()) {
|
||||
AclBinding aclBinding = parseCsvLine(line, i);
|
||||
result.add(aclBinding);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.AdminClientService;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import org.springframework.stereotype.Service;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class AclsService {
|
||||
|
||||
private final AdminClientService adminClientService;
|
||||
|
||||
public Mono<Void> createAcl(KafkaCluster cluster, AclBinding aclBinding) {
|
||||
var aclString = AclCsv.createAclString(aclBinding);
|
||||
log.info("CREATING ACL: [{}]", aclString);
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.createAcls(List.of(aclBinding)))
|
||||
.doOnSuccess(v -> log.info("ACL CREATED: [{}]", aclString));
|
||||
}
|
||||
|
||||
public Mono<Void> deleteAcl(KafkaCluster cluster, AclBinding aclBinding) {
|
||||
var aclString = AclCsv.createAclString(aclBinding);
|
||||
log.info("DELETING ACL: [{}]", aclString);
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.deleteAcls(List.of(aclBinding)))
|
||||
.doOnSuccess(v -> log.info("ACL DELETED: [{}]", aclString));
|
||||
}
|
||||
|
||||
public Flux<AclBinding> listAcls(KafkaCluster cluster, ResourcePatternFilter filter) {
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(c -> c.listAcls(filter))
|
||||
.flatMapIterable(acls -> acls);
|
||||
}
|
||||
|
||||
public Mono<String> getAclAsCsvString(KafkaCluster cluster) {
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(c -> c.listAcls(ResourcePatternFilter.ANY))
|
||||
.map(AclCsv::transformToCsvString);
|
||||
}
|
||||
|
||||
public Mono<Void> syncAclWithAclCsv(KafkaCluster cluster, String csv) {
|
||||
return adminClientService.get(cluster)
|
||||
.flatMap(ac -> ac.listAcls(ResourcePatternFilter.ANY).flatMap(existingAclList -> {
|
||||
var existingSet = Set.copyOf(existingAclList);
|
||||
var newAcls = Set.copyOf(AclCsv.parseCsv(csv));
|
||||
var toDelete = Sets.difference(existingSet, newAcls);
|
||||
var toAdd = Sets.difference(newAcls, existingSet);
|
||||
logAclSyncPlan(cluster, toAdd, toDelete);
|
||||
if (toAdd.isEmpty() && toDelete.isEmpty()) {
|
||||
return Mono.empty();
|
||||
}
|
||||
log.info("Starting new ACLs creation");
|
||||
return ac.createAcls(toAdd)
|
||||
.doOnSuccess(v -> {
|
||||
log.info("{} new ACLs created", toAdd.size());
|
||||
log.info("Starting ACLs deletion");
|
||||
})
|
||||
.then(ac.deleteAcls(toDelete)
|
||||
.doOnSuccess(v -> log.info("{} ACLs deleted", toDelete.size())));
|
||||
}));
|
||||
}
|
||||
|
||||
private void logAclSyncPlan(KafkaCluster cluster, Set<AclBinding> toBeAdded, Set<AclBinding> toBeDeleted) {
|
||||
log.info("'{}' cluster ACL sync plan: ", cluster.getName());
|
||||
if (toBeAdded.isEmpty() && toBeDeleted.isEmpty()) {
|
||||
log.info("Nothing to do, ACL is already in sync");
|
||||
return;
|
||||
}
|
||||
if (!toBeAdded.isEmpty()) {
|
||||
log.info("ACLs to be added ({}): ", toBeAdded.size());
|
||||
for (AclBinding aclBinding : toBeAdded) {
|
||||
log.info(" " + AclCsv.createAclString(aclBinding));
|
||||
}
|
||||
}
|
||||
if (!toBeDeleted.isEmpty()) {
|
||||
log.info("ACLs to be deleted ({}): ", toBeDeleted.size());
|
||||
for (AclBinding aclBinding : toBeDeleted) {
|
||||
log.info(" " + AclCsv.createAclString(aclBinding));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -43,8 +43,7 @@ class TopicAnalysisStats {
|
|||
Long max;
|
||||
final UpdateDoublesSketch sizeSketch = DoublesSketch.builder().build();
|
||||
|
||||
void apply(byte[] bytes) {
|
||||
int len = bytes.length;
|
||||
void apply(int len) {
|
||||
sum += len;
|
||||
min = minNullable(min, len);
|
||||
max = maxNullable(max, len);
|
||||
|
@ -98,7 +97,7 @@ class TopicAnalysisStats {
|
|||
|
||||
if (rec.key() != null) {
|
||||
byte[] keyBytes = rec.key().get();
|
||||
keysSize.apply(keyBytes);
|
||||
keysSize.apply(rec.serializedKeySize());
|
||||
uniqKeys.update(keyBytes);
|
||||
} else {
|
||||
nullKeys++;
|
||||
|
@ -106,7 +105,7 @@ class TopicAnalysisStats {
|
|||
|
||||
if (rec.value() != null) {
|
||||
byte[] valueBytes = rec.value().get();
|
||||
valuesSize.apply(valueBytes);
|
||||
valuesSize.apply(rec.serializedValueSize());
|
||||
uniqValues.update(valueBytes);
|
||||
} else {
|
||||
nullValues++;
|
||||
|
|
|
@ -44,7 +44,7 @@ public class DataMasking {
|
|||
public static DataMasking create(@Nullable List<ClustersProperties.Masking> config) {
|
||||
return new DataMasking(
|
||||
Optional.ofNullable(config).orElse(List.of()).stream().map(property -> {
|
||||
Preconditions.checkNotNull(property.getType(), "masking type not specifed");
|
||||
Preconditions.checkNotNull(property.getType(), "masking type not specified");
|
||||
Preconditions.checkArgument(
|
||||
StringUtils.isNotEmpty(property.getTopicKeysPattern())
|
||||
|| StringUtils.isNotEmpty(property.getTopicValuesPattern()),
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
package com.provectus.kafka.ui.service.masking.policies;
|
||||
|
||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.regex.Pattern;
|
||||
import org.springframework.util.CollectionUtils;
|
||||
import org.springframework.util.StringUtils;
|
||||
|
||||
interface FieldsSelector {
|
||||
|
||||
static FieldsSelector create(ClustersProperties.Masking property) {
|
||||
if (StringUtils.hasText(property.getFieldsNamePattern()) && !CollectionUtils.isEmpty(property.getFields())) {
|
||||
throw new ValidationException("You can't provide both fieldNames & fieldsNamePattern for masking");
|
||||
}
|
||||
if (StringUtils.hasText(property.getFieldsNamePattern())) {
|
||||
Pattern pattern = Pattern.compile(property.getFieldsNamePattern());
|
||||
return f -> pattern.matcher(f).matches();
|
||||
}
|
||||
if (!CollectionUtils.isEmpty(property.getFields())) {
|
||||
return f -> property.getFields().contains(f);
|
||||
}
|
||||
//no pattern, no field names - mean all fields should be masked
|
||||
return fieldName -> true;
|
||||
}
|
||||
|
||||
boolean shouldBeMasked(String fieldName);
|
||||
|
||||
}
|
|
@ -15,8 +15,8 @@ class Mask extends MaskingPolicy {
|
|||
|
||||
private final UnaryOperator<String> masker;
|
||||
|
||||
Mask(List<String> fieldNames, List<String> maskingChars) {
|
||||
super(fieldNames);
|
||||
Mask(FieldsSelector fieldsSelector, List<String> maskingChars) {
|
||||
super(fieldsSelector);
|
||||
this.masker = createMasker(maskingChars);
|
||||
}
|
||||
|
||||
|
@ -38,22 +38,13 @@ class Mask extends MaskingPolicy {
|
|||
for (int i = 0; i < input.length(); i++) {
|
||||
int cp = input.codePointAt(i);
|
||||
switch (Character.getType(cp)) {
|
||||
case Character.SPACE_SEPARATOR:
|
||||
case Character.LINE_SEPARATOR:
|
||||
case Character.PARAGRAPH_SEPARATOR:
|
||||
sb.appendCodePoint(cp); // keeping separators as-is
|
||||
break;
|
||||
case Character.UPPERCASE_LETTER:
|
||||
sb.append(maskingChars.get(0));
|
||||
break;
|
||||
case Character.LOWERCASE_LETTER:
|
||||
sb.append(maskingChars.get(1));
|
||||
break;
|
||||
case Character.DECIMAL_DIGIT_NUMBER:
|
||||
sb.append(maskingChars.get(2));
|
||||
break;
|
||||
default:
|
||||
sb.append(maskingChars.get(3));
|
||||
case Character.SPACE_SEPARATOR,
|
||||
Character.LINE_SEPARATOR,
|
||||
Character.PARAGRAPH_SEPARATOR -> sb.appendCodePoint(cp); // keeping separators as-is
|
||||
case Character.UPPERCASE_LETTER -> sb.append(maskingChars.get(0));
|
||||
case Character.LOWERCASE_LETTER -> sb.append(maskingChars.get(1));
|
||||
case Character.DECIMAL_DIGIT_NUMBER -> sb.append(maskingChars.get(2));
|
||||
default -> sb.append(maskingChars.get(3));
|
||||
}
|
||||
}
|
||||
return sb.toString();
|
||||
|
|
|
@ -2,46 +2,36 @@ package com.provectus.kafka.ui.service.masking.policies;
|
|||
|
||||
import com.fasterxml.jackson.databind.node.ContainerNode;
|
||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||
import java.util.List;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@RequiredArgsConstructor
|
||||
public abstract class MaskingPolicy {
|
||||
|
||||
|
||||
public static MaskingPolicy create(ClustersProperties.Masking property) {
|
||||
List<String> fields = property.getFields() == null
|
||||
? List.of() // empty list means that policy will be applied to all fields
|
||||
: property.getFields();
|
||||
switch (property.getType()) {
|
||||
case REMOVE:
|
||||
return new Remove(fields);
|
||||
case REPLACE:
|
||||
return new Replace(
|
||||
fields,
|
||||
property.getReplacement() == null
|
||||
? Replace.DEFAULT_REPLACEMENT
|
||||
: property.getReplacement()
|
||||
);
|
||||
case MASK:
|
||||
return new Mask(
|
||||
fields,
|
||||
property.getPattern() == null
|
||||
? Mask.DEFAULT_PATTERN
|
||||
: property.getPattern()
|
||||
);
|
||||
default:
|
||||
throw new IllegalStateException("Unknown policy type: " + property.getType());
|
||||
}
|
||||
FieldsSelector fieldsSelector = FieldsSelector.create(property);
|
||||
return switch (property.getType()) {
|
||||
case REMOVE -> new Remove(fieldsSelector);
|
||||
case REPLACE -> new Replace(
|
||||
fieldsSelector,
|
||||
property.getReplacement() == null
|
||||
? Replace.DEFAULT_REPLACEMENT
|
||||
: property.getReplacement()
|
||||
);
|
||||
case MASK -> new Mask(
|
||||
fieldsSelector,
|
||||
property.getMaskingCharsReplacement() == null
|
||||
? Mask.DEFAULT_PATTERN
|
||||
: property.getMaskingCharsReplacement()
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------
|
||||
|
||||
// empty list means policy will be applied to all fields
|
||||
private final List<String> fieldNames;
|
||||
private final FieldsSelector fieldsSelector;
|
||||
|
||||
protected boolean fieldShouldBeMasked(String fieldName) {
|
||||
return fieldNames.isEmpty() || fieldNames.contains(fieldName);
|
||||
return fieldsSelector.shouldBeMasked(fieldName);
|
||||
}
|
||||
|
||||
public abstract ContainerNode<?> applyToJsonContainer(ContainerNode<?> node);
|
||||
|
|
|
@ -4,12 +4,12 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ContainerNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
class Remove extends MaskingPolicy {
|
||||
|
||||
Remove(List<String> fieldNames) {
|
||||
super(fieldNames);
|
||||
Remove(FieldsSelector fieldsSelector) {
|
||||
super(fieldsSelector);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,7 +6,6 @@ import com.fasterxml.jackson.databind.node.ContainerNode;
|
|||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.fasterxml.jackson.databind.node.TextNode;
|
||||
import com.google.common.base.Preconditions;
|
||||
import java.util.List;
|
||||
|
||||
class Replace extends MaskingPolicy {
|
||||
|
||||
|
@ -14,8 +13,8 @@ class Replace extends MaskingPolicy {
|
|||
|
||||
private final String replacement;
|
||||
|
||||
Replace(List<String> fieldNames, String replacementString) {
|
||||
super(fieldNames);
|
||||
Replace(FieldsSelector fieldsSelector, String replacementString) {
|
||||
super(fieldsSelector);
|
||||
this.replacement = Preconditions.checkNotNull(replacementString);
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,9 @@ class JmxSslSocketFactory extends javax.net.ssl.SSLSocketFactory {
|
|||
} catch (Exception e) {
|
||||
log.error("----------------------------------");
|
||||
log.error("SSL can't be enabled for JMX retrieval. "
|
||||
+ "Make sure your java app run with '--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED' arg.", e);
|
||||
+ "Make sure your java app run with '--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED' arg. Err: {}",
|
||||
e.getMessage());
|
||||
log.trace("SSL can't be enabled for JMX retrieval", e);
|
||||
log.error("----------------------------------");
|
||||
}
|
||||
SSL_JMX_SUPPORTED = sslJmxSupported;
|
||||
|
|
|
@ -12,6 +12,7 @@ import com.provectus.kafka.ui.model.rbac.AccessContext;
|
|||
import com.provectus.kafka.ui.model.rbac.Permission;
|
||||
import com.provectus.kafka.ui.model.rbac.Resource;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.Subject;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
|
||||
import com.provectus.kafka.ui.model.rbac.permission.SchemaAction;
|
||||
|
@ -19,11 +20,12 @@ import com.provectus.kafka.ui.model.rbac.permission.TopicAction;
|
|||
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.OauthAuthorityExtractor;
|
||||
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Pattern;
|
||||
|
@ -34,6 +36,7 @@ import lombok.extern.slf4j.Slf4j;
|
|||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.security.access.AccessDeniedException;
|
||||
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
|
||||
import org.springframework.security.core.context.SecurityContext;
|
||||
|
@ -50,10 +53,11 @@ public class AccessControlService {
|
|||
|
||||
@Nullable
|
||||
private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
|
||||
private final RoleBasedAccessControlProperties properties;
|
||||
private final Environment environment;
|
||||
|
||||
private boolean rbacEnabled = false;
|
||||
private Set<ProviderAuthorityExtractor> extractors = Collections.emptySet();
|
||||
private final RoleBasedAccessControlProperties properties;
|
||||
private Set<ProviderAuthorityExtractor> oauthExtractors = Collections.emptySet();
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
|
@ -63,21 +67,27 @@ public class AccessControlService {
|
|||
}
|
||||
rbacEnabled = true;
|
||||
|
||||
this.extractors = properties.getRoles()
|
||||
this.oauthExtractors = properties.getRoles()
|
||||
.stream()
|
||||
.map(role -> role.getSubjects()
|
||||
.stream()
|
||||
.map(provider -> switch (provider.getProvider()) {
|
||||
.map(Subject::getProvider)
|
||||
.distinct()
|
||||
.map(provider -> switch (provider) {
|
||||
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
|
||||
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
|
||||
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
|
||||
case LDAP, LDAP_AD -> new LdapAuthorityExtractor();
|
||||
}).collect(Collectors.toSet()))
|
||||
case OAUTH -> new OauthAuthorityExtractor();
|
||||
default -> null;
|
||||
})
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toSet()))
|
||||
.flatMap(Set::stream)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if ((clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())
|
||||
&& !properties.getRoles().isEmpty()) {
|
||||
if (!properties.getRoles().isEmpty()
|
||||
&& "oauth2".equalsIgnoreCase(environment.getProperty("auth.type"))
|
||||
&& (clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())) {
|
||||
log.error("Roles are configured but no authentication methods are present. Authentication might fail.");
|
||||
}
|
||||
}
|
||||
|
@ -354,8 +364,8 @@ public class AccessControlService {
|
|||
return isAccessible(Resource.KSQL, null, user, context, requiredActions);
|
||||
}
|
||||
|
||||
public Set<ProviderAuthorityExtractor> getExtractors() {
|
||||
return extractors;
|
||||
public Set<ProviderAuthorityExtractor> getOauthExtractors() {
|
||||
return oauthExtractors;
|
||||
}
|
||||
|
||||
public List<Role> getRoles() {
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.COGNITO;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
|
@ -18,8 +21,8 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
private static final String COGNITO_GROUPS_ATTRIBUTE_NAME = "cognito:groups";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return Provider.Name.COGNITO.equalsIgnoreCase(provider);
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
return COGNITO.equalsIgnoreCase(provider) || COGNITO.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -63,7 +66,7 @@ public class CognitoAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
.map(Role::getName)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByGroups.stream()).collect(Collectors.toSet()));
|
||||
return Mono.just(Sets.union(groupsByUsername, groupsByGroups));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GITHUB;
|
||||
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
|
@ -28,8 +30,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
private static final String DUMMY = "dummy";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return Provider.Name.GITHUB.equalsIgnoreCase(provider);
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
return GITHUB.equalsIgnoreCase(provider) || GITHUB.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.GOOGLE;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
@ -19,8 +20,8 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
public static final String EMAIL_ATTRIBUTE_NAME = "email";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return Provider.Name.GOOGLE.equalsIgnoreCase(provider);
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
return GOOGLE.equalsIgnoreCase(provider) || GOOGLE.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -52,7 +53,7 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
return Mono.just(groupsByUsername);
|
||||
}
|
||||
|
||||
List<String> groupsByDomain = acs.getRoles()
|
||||
Set<String> groupsByDomain = acs.getRoles()
|
||||
.stream()
|
||||
.filter(r -> r.getSubjects()
|
||||
.stream()
|
||||
|
@ -60,10 +61,9 @@ public class GoogleAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
.filter(s -> s.getType().equals("domain"))
|
||||
.anyMatch(s -> s.getValue().equals(domain)))
|
||||
.map(Role::getName)
|
||||
.toList();
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return Mono.just(Stream.concat(groupsByUsername.stream(), groupsByDomain.stream())
|
||||
.collect(Collectors.toSet()));
|
||||
return Mono.just(Sets.union(groupsByUsername, groupsByDomain));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@Slf4j
|
||||
public class LdapAuthorityExtractor implements ProviderAuthorityExtractor {
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return false; // TODO #2752
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
|
||||
return Mono.just(Collections.emptySet()); // TODO #2752
|
||||
}
|
||||
|
||||
}
|
|
@ -1,22 +1,44 @@
|
|||
package com.provectus.kafka.ui.service.rbac.extractor;
|
||||
|
||||
import static com.provectus.kafka.ui.model.rbac.provider.Provider.Name.OAUTH;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.provectus.kafka.ui.config.auth.OAuthProperties;
|
||||
import com.provectus.kafka.ui.model.rbac.Role;
|
||||
import com.provectus.kafka.ui.model.rbac.provider.Provider;
|
||||
import com.provectus.kafka.ui.service.rbac.AccessControlService;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
|
||||
import org.springframework.util.Assert;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
@Slf4j
|
||||
public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
|
||||
|
||||
public static final String ROLES_FIELD_PARAM_NAME = "roles-field";
|
||||
|
||||
@Override
|
||||
public boolean isApplicable(String provider) {
|
||||
return false; // TODO #2844
|
||||
public boolean isApplicable(String provider, Map<String, String> customParams) {
|
||||
var containsRolesFieldNameParam = customParams.containsKey(ROLES_FIELD_PARAM_NAME);
|
||||
if (!containsRolesFieldNameParam) {
|
||||
log.debug("Provider [{}] doesn't contain a roles field param name, mapping won't be performed", provider);
|
||||
return false;
|
||||
}
|
||||
|
||||
return OAUTH.equalsIgnoreCase(provider) || OAUTH.equalsIgnoreCase(customParams.get(TYPE));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
|
||||
log.trace("Extracting OAuth2 user authorities");
|
||||
|
||||
DefaultOAuth2User principal;
|
||||
try {
|
||||
principal = (DefaultOAuth2User) value;
|
||||
|
@ -25,7 +47,67 @@ public class OauthAuthorityExtractor implements ProviderAuthorityExtractor {
|
|||
throw new RuntimeException();
|
||||
}
|
||||
|
||||
return Mono.just(Set.of(principal.getName())); // TODO #2844
|
||||
var provider = (OAuthProperties.OAuth2Provider) additionalParams.get("provider");
|
||||
Assert.notNull(provider, "provider is null");
|
||||
var rolesFieldName = provider.getCustomParams().get(ROLES_FIELD_PARAM_NAME);
|
||||
|
||||
Set<String> rolesByUsername = acs.getRoles()
|
||||
.stream()
|
||||
.filter(r -> r.getSubjects()
|
||||
.stream()
|
||||
.filter(s -> s.getProvider().equals(Provider.OAUTH))
|
||||
.filter(s -> s.getType().equals("user"))
|
||||
.anyMatch(s -> s.getValue().equals(principal.getName())))
|
||||
.map(Role::getName)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> rolesByRolesField = acs.getRoles()
|
||||
.stream()
|
||||
.filter(role -> role.getSubjects()
|
||||
.stream()
|
||||
.filter(s -> s.getProvider().equals(Provider.OAUTH))
|
||||
.filter(s -> s.getType().equals("role"))
|
||||
.anyMatch(subject -> {
|
||||
var roleName = subject.getValue();
|
||||
var principalRoles = convertRoles(principal.getAttribute(rolesFieldName));
|
||||
var roleMatched = principalRoles.contains(roleName);
|
||||
|
||||
if (roleMatched) {
|
||||
log.debug("Assigning role [{}] to user [{}]", roleName, principal.getName());
|
||||
} else {
|
||||
log.trace("Role [{}] not found in user [{}] roles", roleName, principal.getName());
|
||||
}
|
||||
|
||||
return roleMatched;
|
||||
})
|
||||
)
|
||||
.map(Role::getName)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return Mono.just(Sets.union(rolesByUsername, rolesByRolesField));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Collection<String> convertRoles(Object roles) {
|
||||
if (roles == null) {
|
||||
log.debug("Param missing from attributes, skipping");
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
if ((roles instanceof List<?>) || (roles instanceof Set<?>)) {
|
||||
log.trace("The field is either a set or a list, returning as is");
|
||||
return (Collection<String>) roles;
|
||||
}
|
||||
|
||||
if (!(roles instanceof String)) {
|
||||
log.debug("The field is not a string, skipping");
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
log.trace("Trying to deserialize the field value [{}] as a string", roles);
|
||||
|
||||
return Arrays.stream(((String) roles).split(","))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -7,7 +7,9 @@ import reactor.core.publisher.Mono;
|
|||
|
||||
public interface ProviderAuthorityExtractor {
|
||||
|
||||
boolean isApplicable(String provider);
|
||||
String TYPE = "type";
|
||||
|
||||
boolean isApplicable(String provider, Map<String, String> customParams);
|
||||
|
||||
Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams);
|
||||
|
||||
|
|
|
@ -90,6 +90,7 @@ public class DynamicConfigOperations {
|
|||
}
|
||||
|
||||
public PropertiesStructure getCurrentProperties() {
|
||||
checkIfDynamicConfigEnabled();
|
||||
return PropertiesStructure.builder()
|
||||
.kafka(getNullableBean(ClustersProperties.class))
|
||||
.rbac(getNullableBean(RoleBasedAccessControlProperties.class))
|
||||
|
@ -112,11 +113,7 @@ public class DynamicConfigOperations {
|
|||
}
|
||||
|
||||
public void persist(PropertiesStructure properties) {
|
||||
if (!dynamicConfigEnabled()) {
|
||||
throw new ValidationException(
|
||||
"Dynamic config change is not allowed. "
|
||||
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
|
||||
}
|
||||
checkIfDynamicConfigEnabled();
|
||||
properties.initAndValidate();
|
||||
|
||||
String yaml = serializeToYaml(properties);
|
||||
|
@ -124,8 +121,9 @@ public class DynamicConfigOperations {
|
|||
}
|
||||
|
||||
public Mono<Path> uploadConfigRelatedFile(FilePart file) {
|
||||
String targetDirStr = (String) ctx.getEnvironment().getSystemEnvironment()
|
||||
.getOrDefault(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
|
||||
checkIfDynamicConfigEnabled();
|
||||
String targetDirStr = ctx.getEnvironment()
|
||||
.getProperty(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
|
||||
|
||||
Path targetDir = Path.of(targetDirStr);
|
||||
if (!Files.exists(targetDir)) {
|
||||
|
@ -149,6 +147,14 @@ public class DynamicConfigOperations {
|
|||
.onErrorMap(th -> new FileUploadException(targetFilePath, th));
|
||||
}
|
||||
|
||||
private void checkIfDynamicConfigEnabled() {
|
||||
if (!dynamicConfigEnabled()) {
|
||||
throw new ValidationException(
|
||||
"Dynamic config change is not allowed. "
|
||||
+ "Set dynamic.config.enabled property to 'true' to enabled it.");
|
||||
}
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void writeYamlToFile(String yaml, Path path) {
|
||||
if (Files.isDirectory(path)) {
|
||||
|
@ -224,7 +230,7 @@ public class DynamicConfigOperations {
|
|||
|
||||
Optional.ofNullable(auth)
|
||||
.flatMap(a -> Optional.ofNullable(a.oauth2))
|
||||
.ifPresent(OAuthProperties::validate);
|
||||
.ifPresent(OAuthProperties::init);
|
||||
|
||||
Optional.ofNullable(webclient)
|
||||
.ifPresent(WebclientProperties::validate);
|
||||
|
|
|
@ -1,24 +1,21 @@
|
|||
package com.provectus.kafka.ui.util;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import java.util.Optional;
|
||||
|
||||
@Slf4j
|
||||
public final class KafkaVersion {
|
||||
|
||||
private KafkaVersion() {
|
||||
}
|
||||
|
||||
public static float parse(String version) throws NumberFormatException {
|
||||
log.trace("Parsing cluster version [{}]", version);
|
||||
public static Optional<Float> parse(String version) throws NumberFormatException {
|
||||
try {
|
||||
final String[] parts = version.split("\\.");
|
||||
if (parts.length > 2) {
|
||||
version = parts[0] + "." + parts[1];
|
||||
}
|
||||
return Float.parseFloat(version.split("-")[0]);
|
||||
return Optional.of(Float.parseFloat(version.split("-")[0]));
|
||||
} catch (Exception e) {
|
||||
log.error("Conversion clusterVersion [{}] to float value failed", version, e);
|
||||
throw e;
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui;
|
|||
|
||||
import com.provectus.kafka.ui.container.KafkaConnectContainer;
|
||||
import com.provectus.kafka.ui.container.SchemaRegistryContainer;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Properties;
|
||||
import org.apache.kafka.clients.admin.AdminClient;
|
||||
|
@ -9,6 +10,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig;
|
|||
import org.apache.kafka.clients.admin.NewTopic;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.jupiter.api.function.ThrowingConsumer;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
|
@ -47,6 +49,9 @@ public abstract class AbstractIntegrationTest {
|
|||
.dependsOn(kafka)
|
||||
.dependsOn(schemaRegistry);
|
||||
|
||||
@TempDir
|
||||
public static Path tmpDir;
|
||||
|
||||
static {
|
||||
kafka.start();
|
||||
schemaRegistry.start();
|
||||
|
@ -76,6 +81,9 @@ public abstract class AbstractIntegrationTest {
|
|||
System.setProperty("kafka.clusters.1.schemaRegistry", schemaRegistry.getUrl());
|
||||
System.setProperty("kafka.clusters.1.kafkaConnect.0.name", "kafka-connect");
|
||||
System.setProperty("kafka.clusters.1.kafkaConnect.0.address", kafkaConnect.getTarget());
|
||||
|
||||
System.setProperty("dynamic.config.enabled", "true");
|
||||
System.setProperty("config.related.uploads.dir", tmpDir.toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
package com.provectus.kafka.ui.controller;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import com.provectus.kafka.ui.AbstractIntegrationTest;
|
||||
import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.http.HttpEntity;
|
||||
import org.springframework.http.client.MultipartBodyBuilder;
|
||||
import org.springframework.test.web.reactive.server.WebTestClient;
|
||||
import org.springframework.util.MultiValueMap;
|
||||
|
||||
class ApplicationConfigControllerTest extends AbstractIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private WebTestClient webTestClient;
|
||||
|
||||
@Test
|
||||
public void testUpload() throws IOException {
|
||||
var fileToUpload = new ClassPathResource("/fileForUploadTest.txt", this.getClass());
|
||||
|
||||
UploadedFileInfoDTO result = webTestClient
|
||||
.post()
|
||||
.uri("/api/config/relatedfiles")
|
||||
.bodyValue(generateBody(fileToUpload))
|
||||
.exchange()
|
||||
.expectStatus()
|
||||
.isOk()
|
||||
.expectBody(UploadedFileInfoDTO.class)
|
||||
.returnResult()
|
||||
.getResponseBody();
|
||||
|
||||
assertThat(result).isNotNull();
|
||||
assertThat(result.getLocation()).isNotNull();
|
||||
assertThat(Path.of(result.getLocation()))
|
||||
.hasSameBinaryContentAs(fileToUpload.getFile().toPath());
|
||||
}
|
||||
|
||||
private MultiValueMap<String, HttpEntity<?>> generateBody(ClassPathResource resource) {
|
||||
MultipartBodyBuilder builder = new MultipartBodyBuilder();
|
||||
builder.part("file", resource);
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
}
|
|
@ -118,10 +118,18 @@ class MessageFiltersTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void keySetToNullIfKeyCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("key == null");
|
||||
void keySetToKeyStringIfCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("key == \"not json\"");
|
||||
assertTrue(f.test(msg().key("not json")));
|
||||
assertFalse(f.test(msg().key("{ \"k\" : \"v\" }")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void keyAndKeyAsTextSetToNullIfRecordsKeyIsNull() {
|
||||
var f = groovyScriptFilter("key == null");
|
||||
assertTrue(f.test(msg().key(null)));
|
||||
|
||||
f = groovyScriptFilter("keyAsText == null");
|
||||
assertTrue(f.test(msg().key(null)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -132,10 +140,18 @@ class MessageFiltersTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void valueSetToNullIfKeyCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("value == null");
|
||||
void valueSetToContentStringIfCantBeParsedToJson() {
|
||||
var f = groovyScriptFilter("value == \"not json\"");
|
||||
assertTrue(f.test(msg().content("not json")));
|
||||
assertFalse(f.test(msg().content("{ \"k\" : \"v\" }")));
|
||||
}
|
||||
|
||||
@Test
|
||||
void valueAndValueAsTextSetToNullIfRecordsContentIsNull() {
|
||||
var f = groovyScriptFilter("value == null");
|
||||
assertTrue(f.test(msg().content(null)));
|
||||
|
||||
f = groovyScriptFilter("valueAsText == null");
|
||||
assertTrue(f.test(msg().content(null)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -185,4 +201,4 @@ class MessageFiltersTest {
|
|||
.partition(1);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
package com.provectus.kafka.ui.model;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import com.provectus.kafka.ui.service.ReactiveAdminClient;
|
||||
import java.math.BigDecimal;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import org.apache.kafka.clients.admin.TopicDescription;
|
||||
import org.apache.kafka.common.Node;
|
||||
import org.apache.kafka.common.TopicPartitionInfo;
|
||||
import org.assertj.core.data.Percentage;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class PartitionDistributionStatsTest {
|
||||
|
||||
@Test
|
||||
void skewCalculatedBasedOnPartitionsCounts() {
|
||||
Node n1 = new Node(1, "n1", 9092);
|
||||
Node n2 = new Node(2, "n2", 9092);
|
||||
Node n3 = new Node(3, "n3", 9092);
|
||||
Node n4 = new Node(4, "n4", 9092);
|
||||
|
||||
var stats = PartitionDistributionStats.create(
|
||||
Statistics.builder()
|
||||
.clusterDescription(
|
||||
new ReactiveAdminClient.ClusterDescription(null, "test", Set.of(n1, n2, n3), null))
|
||||
.topicDescriptions(
|
||||
Map.of(
|
||||
"t1", new TopicDescription(
|
||||
"t1", false,
|
||||
List.of(
|
||||
new TopicPartitionInfo(0, n1, List.of(n1, n2), List.of(n1, n2)),
|
||||
new TopicPartitionInfo(1, n2, List.of(n2, n3), List.of(n2, n3))
|
||||
)
|
||||
),
|
||||
"t2", new TopicDescription(
|
||||
"t2", false,
|
||||
List.of(
|
||||
new TopicPartitionInfo(0, n1, List.of(n1, n2), List.of(n1, n2)),
|
||||
new TopicPartitionInfo(1, null, List.of(n2, n1), List.of(n1))
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.build(), 4
|
||||
);
|
||||
|
||||
assertThat(stats.getPartitionLeaders())
|
||||
.containsExactlyInAnyOrderEntriesOf(Map.of(n1, 2, n2, 1));
|
||||
assertThat(stats.getPartitionsCount())
|
||||
.containsExactlyInAnyOrderEntriesOf(Map.of(n1, 3, n2, 4, n3, 1));
|
||||
assertThat(stats.getInSyncPartitions())
|
||||
.containsExactlyInAnyOrderEntriesOf(Map.of(n1, 3, n2, 3, n3, 1));
|
||||
|
||||
// Node(partitions): n1(3), n2(4), n3(1), n4(0)
|
||||
// average partitions cnt = (3+4+1) / 3 = 2.666 (counting only nodes with partitions!)
|
||||
assertThat(stats.getAvgPartitionsPerBroker())
|
||||
.isCloseTo(2.666, Percentage.withPercentage(1));
|
||||
|
||||
assertThat(stats.partitionsSkew(n1))
|
||||
.isCloseTo(BigDecimal.valueOf(12.5), Percentage.withPercentage(1));
|
||||
assertThat(stats.partitionsSkew(n2))
|
||||
.isCloseTo(BigDecimal.valueOf(50), Percentage.withPercentage(1));
|
||||
assertThat(stats.partitionsSkew(n3))
|
||||
.isCloseTo(BigDecimal.valueOf(-62.5), Percentage.withPercentage(1));
|
||||
assertThat(stats.partitionsSkew(n4))
|
||||
.isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
|
||||
|
||||
// Node(leaders): n1(2), n2(1), n3(0), n4(0)
|
||||
// average leaders cnt = (2+1) / 2 = 1.5 (counting only nodes with leaders!)
|
||||
assertThat(stats.leadersSkew(n1))
|
||||
.isCloseTo(BigDecimal.valueOf(33.33), Percentage.withPercentage(1));
|
||||
assertThat(stats.leadersSkew(n2))
|
||||
.isCloseTo(BigDecimal.valueOf(-33.33), Percentage.withPercentage(1));
|
||||
assertThat(stats.leadersSkew(n3))
|
||||
.isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
|
||||
assertThat(stats.leadersSkew(n4))
|
||||
.isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
|
||||
class AclCsvTest {
|
||||
|
||||
private static final List<AclBinding> TEST_BINDINGS = List.of(
|
||||
new AclBinding(
|
||||
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
|
||||
new AccessControlEntry("User:test1", "*", AclOperation.READ, AclPermissionType.ALLOW)),
|
||||
new AclBinding(
|
||||
new ResourcePattern(ResourceType.GROUP, "group1", PatternType.PREFIXED),
|
||||
new AccessControlEntry("User:test2", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY))
|
||||
);
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
"Principal,ResourceType, PatternType, ResourceName,Operation,PermissionType,Host\n"
|
||||
+ "User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
|
||||
+ "User:test2,GROUP,PREFIXED,group1,DESCRIBE,DENY,localhost",
|
||||
|
||||
//without header
|
||||
"User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
|
||||
+ "\n"
|
||||
+ "User:test2,GROUP,PREFIXED,group1,DESCRIBE,DENY,localhost"
|
||||
+ "\n"
|
||||
})
|
||||
void parsesValidInputCsv(String csvString) {
|
||||
Collection<AclBinding> parsed = AclCsv.parseCsv(csvString);
|
||||
assertThat(parsed).containsExactlyInAnyOrderElementsOf(TEST_BINDINGS);
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(strings = {
|
||||
// columns > 7
|
||||
"User:test1,TOPIC,LITERAL,*,READ,ALLOW,*,1,2,3,4",
|
||||
// columns < 7
|
||||
"User:test1,TOPIC,LITERAL,*",
|
||||
// enum values are illegal
|
||||
"User:test1,ILLEGAL,LITERAL,*,READ,ALLOW,*",
|
||||
"User:test1,TOPIC,LITERAL,*,READ,ILLEGAL,*"
|
||||
})
|
||||
void throwsExceptionForInvalidInputCsv(String csvString) {
|
||||
assertThatThrownBy(() -> AclCsv.parseCsv(csvString))
|
||||
.isInstanceOf(ValidationException.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
void transformAndParseUseSameFormat() {
|
||||
String csv = AclCsv.transformToCsvString(TEST_BINDINGS);
|
||||
Collection<AclBinding> parsedBindings = AclCsv.parseCsv(csv);
|
||||
assertThat(parsedBindings).containsExactlyInAnyOrderElementsOf(TEST_BINDINGS);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
package com.provectus.kafka.ui.service.acl;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import com.provectus.kafka.ui.model.KafkaCluster;
|
||||
import com.provectus.kafka.ui.service.AdminClientService;
|
||||
import com.provectus.kafka.ui.service.ReactiveAdminClient;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import org.apache.kafka.common.acl.AccessControlEntry;
|
||||
import org.apache.kafka.common.acl.AclBinding;
|
||||
import org.apache.kafka.common.acl.AclOperation;
|
||||
import org.apache.kafka.common.acl.AclPermissionType;
|
||||
import org.apache.kafka.common.resource.PatternType;
|
||||
import org.apache.kafka.common.resource.ResourcePattern;
|
||||
import org.apache.kafka.common.resource.ResourcePatternFilter;
|
||||
import org.apache.kafka.common.resource.ResourceType;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
class AclsServiceTest {
|
||||
|
||||
private static final KafkaCluster CLUSTER = KafkaCluster.builder().build();
|
||||
|
||||
private final ReactiveAdminClient adminClientMock = mock(ReactiveAdminClient.class);
|
||||
private final AdminClientService adminClientService = mock(AdminClientService.class);
|
||||
|
||||
private final AclsService aclsService = new AclsService(adminClientService);
|
||||
|
||||
@BeforeEach
|
||||
void initMocks() {
|
||||
when(adminClientService.get(CLUSTER)).thenReturn(Mono.just(adminClientMock));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSyncAclWithAclCsv() {
|
||||
var existingBinding1 = new AclBinding(
|
||||
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
|
||||
new AccessControlEntry("User:test1", "*", AclOperation.READ, AclPermissionType.ALLOW));
|
||||
|
||||
var existingBinding2 = new AclBinding(
|
||||
new ResourcePattern(ResourceType.GROUP, "group1", PatternType.PREFIXED),
|
||||
new AccessControlEntry("User:test2", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY));
|
||||
|
||||
var newBindingToBeAdded = new AclBinding(
|
||||
new ResourcePattern(ResourceType.GROUP, "groupNew", PatternType.PREFIXED),
|
||||
new AccessControlEntry("User:test3", "localhost", AclOperation.DESCRIBE, AclPermissionType.DENY));
|
||||
|
||||
when(adminClientMock.listAcls(ResourcePatternFilter.ANY))
|
||||
.thenReturn(Mono.just(List.of(existingBinding1, existingBinding2)));
|
||||
|
||||
ArgumentCaptor<?> createdCaptor = ArgumentCaptor.forClass(Collection.class);
|
||||
when(adminClientMock.createAcls((Collection<AclBinding>) createdCaptor.capture()))
|
||||
.thenReturn(Mono.empty());
|
||||
|
||||
ArgumentCaptor<?> deletedCaptor = ArgumentCaptor.forClass(Collection.class);
|
||||
when(adminClientMock.deleteAcls((Collection<AclBinding>) deletedCaptor.capture()))
|
||||
.thenReturn(Mono.empty());
|
||||
|
||||
aclsService.syncAclWithAclCsv(
|
||||
CLUSTER,
|
||||
"Principal,ResourceType, PatternType, ResourceName,Operation,PermissionType,Host\n"
|
||||
+ "User:test1,TOPIC,LITERAL,*,READ,ALLOW,*\n"
|
||||
+ "User:test3,GROUP,PREFIXED,groupNew,DESCRIBE,DENY,localhost"
|
||||
).block();
|
||||
|
||||
Collection<AclBinding> createdBindings = (Collection<AclBinding>) createdCaptor.getValue();
|
||||
assertThat(createdBindings)
|
||||
.hasSize(1)
|
||||
.contains(newBindingToBeAdded);
|
||||
|
||||
Collection<AclBinding> deletedBindings = (Collection<AclBinding>) deletedCaptor.getValue();
|
||||
assertThat(deletedBindings)
|
||||
.hasSize(1)
|
||||
.contains(existingBinding2);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package com.provectus.kafka.ui.service.masking.policies;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.assertj.core.api.Assertions.assertThatThrownBy;
|
||||
|
||||
import com.provectus.kafka.ui.config.ClustersProperties;
|
||||
import com.provectus.kafka.ui.exception.ValidationException;
|
||||
import java.util.List;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class FieldsSelectorTest {
|
||||
|
||||
@Test
|
||||
void selectsFieldsDueToProvidedPattern() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
properties.setFieldsNamePattern("f1|f2");
|
||||
|
||||
var selector = FieldsSelector.create(properties);
|
||||
assertThat(selector.shouldBeMasked("f1")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("f2")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("doesNotMatchPattern")).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
void selectsFieldsDueToProvidedFieldNames() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
properties.setFields(List.of("f1", "f2"));
|
||||
|
||||
var selector = FieldsSelector.create(properties);
|
||||
assertThat(selector.shouldBeMasked("f1")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("f2")).isTrue();
|
||||
assertThat(selector.shouldBeMasked("notInAList")).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
void selectAllFieldsIfNoPatternAndNoNamesProvided() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
|
||||
var selector = FieldsSelector.create(properties);
|
||||
assertThat(selector.shouldBeMasked("anyPropertyName")).isTrue();
|
||||
}
|
||||
|
||||
@Test
|
||||
void throwsExceptionIfBothFieldListAndPatternProvided() {
|
||||
var properties = new ClustersProperties.Masking();
|
||||
properties.setFieldsNamePattern("f1|f2");
|
||||
properties.setFields(List.of("f3", "f4"));
|
||||
|
||||
assertThatThrownBy(() -> FieldsSelector.create(properties))
|
||||
.isInstanceOf(ValidationException.class);
|
||||
}
|
||||
|
||||
}
|
|
@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
|
|||
|
||||
class MaskTest {
|
||||
|
||||
private static final List<String> TARGET_FIELDS = List.of("id", "name");
|
||||
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
|
||||
private static final List<String> PATTERN = List.of("X", "x", "n", "-");
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource
|
||||
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
Mask policy = new Mask(fields, PATTERN);
|
||||
void testApplyToJsonContainer(FieldsSelector selector, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
Mask policy = new Mask(selector, PATTERN);
|
||||
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> testApplyToJsonContainer() {
|
||||
return Stream.of(
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
|
||||
parse("{ \"id\": \"nnn\", \"name\": { \"first\": \"Xxxxx\", \"surname\": \"Xxxxnnn-\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
|
||||
parse("[{ \"id\": \"nnn\", \"f2\": 234}, { \"name\": \"n-n\", \"f2\": 345} ]")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Xxxxnnn-\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"Xxxxx\", \"name\": \"Xxxxnnn-\"}}")
|
||||
)
|
||||
|
@ -57,7 +57,7 @@ class MaskTest {
|
|||
"null, xxxx"
|
||||
})
|
||||
void testApplyToString(String original, String expected) {
|
||||
Mask policy = new Mask(List.of(), PATTERN);
|
||||
Mask policy = new Mask(fieldName -> true, PATTERN);
|
||||
assertThat(policy.applyToString(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
|
|
|
@ -15,39 +15,39 @@ import org.junit.jupiter.params.provider.MethodSource;
|
|||
|
||||
class RemoveTest {
|
||||
|
||||
private static final List<String> TARGET_FIELDS = List.of("id", "name");
|
||||
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource
|
||||
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Remove(fields);
|
||||
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Remove(fieldsSelector);
|
||||
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> testApplyToJsonContainer() {
|
||||
return Stream.of(
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
|
||||
parse("{}")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
|
||||
parse("[{ \"f2\": 234}, { \"f2\": 345} ]")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"James\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
|
||||
parse("{}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("[{ \"f1\": 123}, { \"f2\": \"1.2\"} ]"),
|
||||
parse("[{}, {}]")
|
||||
)
|
||||
|
@ -66,7 +66,7 @@ class RemoveTest {
|
|||
"null, null"
|
||||
})
|
||||
void testApplyToString(String original, String expected) {
|
||||
var policy = new Remove(List.of());
|
||||
var policy = new Remove(fieldName -> true);
|
||||
assertThat(policy.applyToString(original)).isEqualTo(expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,35 +15,35 @@ import org.junit.jupiter.params.provider.MethodSource;
|
|||
|
||||
class ReplaceTest {
|
||||
|
||||
private static final List<String> TARGET_FIELDS = List.of("id", "name");
|
||||
private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
|
||||
private static final String REPLACEMENT_STRING = "***";
|
||||
|
||||
@ParameterizedTest
|
||||
@MethodSource
|
||||
void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Replace(fields, REPLACEMENT_STRING);
|
||||
void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
|
||||
var policy = new Replace(fieldsSelector, REPLACEMENT_STRING);
|
||||
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
|
||||
}
|
||||
|
||||
private static Stream<Arguments> testApplyToJsonContainer() {
|
||||
return Stream.of(
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
|
||||
parse("{ \"id\": \"***\", \"name\": { \"first\": \"***\", \"surname\": \"***\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
|
||||
parse("[{ \"id\": \"***\", \"f2\": 234}, { \"name\": \"***\", \"f2\": 345} ]")
|
||||
),
|
||||
Arguments.of(
|
||||
TARGET_FIELDS,
|
||||
FIELDS_SELECTOR,
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"***\"}}")
|
||||
),
|
||||
Arguments.of(
|
||||
List.of(),
|
||||
(FieldsSelector) (fieldName -> true),
|
||||
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
|
||||
parse("{ \"outer\": { \"f1\": \"***\", \"f2\": \"***\", \"inner\" : {\"if1\": \"***\"}}}}")
|
||||
)
|
||||
|
@ -62,7 +62,7 @@ class ReplaceTest {
|
|||
"null, ***"
|
||||
})
|
||||
void testApplyToString(String original, String expected) {
|
||||
var policy = new Replace(List.of(), REPLACEMENT_STRING);
|
||||
var policy = new Replace(fieldName -> true, REPLACEMENT_STRING);
|
||||
assertThat(policy.applyToString(original)).isEqualTo(expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
1
kafka-ui-api/src/test/resources/fileForUploadTest.txt
Normal file
1
kafka-ui-api/src/test/resources/fileForUploadTest.txt
Normal file
|
@ -0,0 +1 @@
|
|||
some content goes here
|
|
@ -101,9 +101,6 @@
|
|||
<useSpringBoot3>true</useSpringBoot3>
|
||||
<dateLibrary>java8</dateLibrary>
|
||||
</configOptions>
|
||||
<typeMappings>
|
||||
<mapping>filepart=org.springframework.http.codec.multipart.FilePart</mapping>
|
||||
</typeMappings>
|
||||
</configuration>
|
||||
</execution>
|
||||
<execution>
|
||||
|
|
|
@ -1730,6 +1730,125 @@ paths:
|
|||
404:
|
||||
description: Not found
|
||||
|
||||
/api/clusters/{clusterName}/acls:
|
||||
get:
|
||||
tags:
|
||||
- Acls
|
||||
summary: listKafkaAcls
|
||||
operationId: listAcls
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: resourceType
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAclResourceType'
|
||||
- name: resourceName
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
- name: namePatternType
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAclNamePatternType'
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/KafkaAcl'
|
||||
|
||||
/api/clusters/{clusterName}/acl/csv:
|
||||
get:
|
||||
tags:
|
||||
- Acls
|
||||
summary: getAclAsCsv
|
||||
operationId: getAclAsCsv
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
content:
|
||||
text/plain:
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
tags:
|
||||
- Acls
|
||||
summary: syncAclsCsv
|
||||
operationId: syncAclsCsv
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
text/plain:
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
|
||||
/api/clusters/{clusterName}/acl:
|
||||
post:
|
||||
tags:
|
||||
- Acls
|
||||
summary: createAcl
|
||||
operationId: createAcl
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAcl'
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
|
||||
delete:
|
||||
tags:
|
||||
- Acls
|
||||
summary: deleteAcl
|
||||
operationId: deleteAcl
|
||||
parameters:
|
||||
- name: clusterName
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/KafkaAcl'
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
404:
|
||||
description: Acl not found
|
||||
|
||||
/api/authorization:
|
||||
get:
|
||||
tags:
|
||||
|
@ -1819,7 +1938,7 @@ paths:
|
|||
properties:
|
||||
file:
|
||||
type: string
|
||||
format: filepart
|
||||
format: binary
|
||||
responses:
|
||||
200:
|
||||
description: OK
|
||||
|
@ -1972,6 +2091,8 @@ components:
|
|||
- KAFKA_CONNECT
|
||||
- KSQL_DB
|
||||
- TOPIC_DELETION
|
||||
- KAFKA_ACL_VIEW # get ACLs listing
|
||||
- KAFKA_ACL_EDIT # create & delete ACLs
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
|
@ -2375,6 +2496,16 @@ components:
|
|||
type: number
|
||||
bytesOutPerSec:
|
||||
type: number
|
||||
partitionsLeader:
|
||||
type: integer
|
||||
partitions:
|
||||
type: integer
|
||||
inSyncPartitions:
|
||||
type: integer
|
||||
partitionsSkew:
|
||||
type: number
|
||||
leadersSkew:
|
||||
type: number
|
||||
required:
|
||||
- id
|
||||
|
||||
|
@ -3332,6 +3463,62 @@ components:
|
|||
- SCHEMA
|
||||
- CONNECT
|
||||
- KSQL
|
||||
- ACL
|
||||
|
||||
KafkaAcl:
|
||||
type: object
|
||||
required: [resourceType, resourceName, namePatternType, principal, host, operation, permission]
|
||||
properties:
|
||||
resourceType:
|
||||
$ref: '#/components/schemas/KafkaAclResourceType'
|
||||
resourceName:
|
||||
type: string # "*" if acl can be applied to any resource of given type
|
||||
namePatternType:
|
||||
$ref: '#/components/schemas/KafkaAclNamePatternType'
|
||||
principal:
|
||||
type: string
|
||||
host:
|
||||
type: string # "*" if acl can be applied to any resource of given type
|
||||
operation:
|
||||
type: string
|
||||
enum:
|
||||
- UNKNOWN # Unknown operation, need to update mapping code on BE
|
||||
- ALL # Cluster, Topic, Group
|
||||
- READ # Topic, Group
|
||||
- WRITE # Topic, TransactionalId
|
||||
- CREATE # Cluster, Topic
|
||||
- DELETE # Topic, Group
|
||||
- ALTER # Cluster, Topic,
|
||||
- DESCRIBE # Cluster, Topic, Group, TransactionalId, DelegationToken
|
||||
- CLUSTER_ACTION # Cluster
|
||||
- DESCRIBE_CONFIGS # Cluster, Topic
|
||||
- ALTER_CONFIGS # Cluster, Topic
|
||||
- IDEMPOTENT_WRITE # Cluster
|
||||
- CREATE_TOKENS
|
||||
- DESCRIBE_TOKENS
|
||||
permission:
|
||||
type: string
|
||||
enum:
|
||||
- ALLOW
|
||||
- DENY
|
||||
|
||||
KafkaAclResourceType:
|
||||
type: string
|
||||
enum:
|
||||
- UNKNOWN # Unknown operation, need to update mapping code on BE
|
||||
- TOPIC
|
||||
- GROUP
|
||||
- CLUSTER
|
||||
- TRANSACTIONAL_ID
|
||||
- DELEGATION_TOKEN
|
||||
- USER
|
||||
|
||||
KafkaAclNamePatternType:
|
||||
type: string
|
||||
enum:
|
||||
- MATCH
|
||||
- LITERAL
|
||||
- PREFIXED
|
||||
|
||||
RestartRequest:
|
||||
type: object
|
||||
|
@ -3622,7 +3809,9 @@ components:
|
|||
type: array
|
||||
items:
|
||||
type: string
|
||||
pattern:
|
||||
fieldsNamePattern:
|
||||
type: string
|
||||
maskingCharsReplacement:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
<httpcomponents.version>5.2.1</httpcomponents.version>
|
||||
<selenium.version>4.8.1</selenium.version>
|
||||
<selenide.version>6.12.3</selenide.version>
|
||||
<testng.version>7.7.0</testng.version>
|
||||
<testng.version>7.7.1</testng.version>
|
||||
<allure.version>2.21.0</allure.version>
|
||||
<qase.io.version>3.0.4</qase.io.version>
|
||||
<aspectj.version>1.9.9.1</aspectj.version>
|
||||
|
|
|
@ -48,7 +48,8 @@ public class BrokersList extends BasePage {
|
|||
}
|
||||
|
||||
private List<SelenideElement> getEnabledColumnHeaders() {
|
||||
return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
|
||||
return Stream.of("Broker ID", "Disk usage", "Partitions skew",
|
||||
"Leaders", "Leader skew", "Online partitions", "Port", "Host")
|
||||
.map(name -> $x(String.format(columnHeaderLocator, name)))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package com.provectus.kafka.ui.pages.ksqldb;
|
|||
import static com.codeborne.selenide.Condition.visible;
|
||||
import static com.codeborne.selenide.Selenide.$$x;
|
||||
import static com.codeborne.selenide.Selenide.$x;
|
||||
import static com.codeborne.selenide.Selenide.sleep;
|
||||
|
||||
import com.codeborne.selenide.CollectionCondition;
|
||||
import com.codeborne.selenide.Condition;
|
||||
|
@ -17,11 +18,12 @@ import java.util.List;
|
|||
public class KsqlQueryForm extends BasePage {
|
||||
protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
|
||||
protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
|
||||
protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
|
||||
protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
|
||||
protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
|
||||
protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
|
||||
protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
|
||||
protected SelenideElement abortButton = $x("//div[@role='status']/div[text()='Abort']");
|
||||
protected SelenideElement cancelledAlert = $x("//div[@role='status'][text()='Cancelled']");
|
||||
protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
|
||||
protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
|
||||
protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
|
||||
|
@ -36,6 +38,7 @@ public class KsqlQueryForm extends BasePage {
|
|||
@Step
|
||||
public KsqlQueryForm clickClearBtn() {
|
||||
clickByJavaScript(clearBtn);
|
||||
sleep(500);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -48,7 +51,7 @@ public class KsqlQueryForm extends BasePage {
|
|||
public KsqlQueryForm clickExecuteBtn(String query) {
|
||||
clickByActions(executeBtn);
|
||||
if (query.contains("EMIT CHANGES")) {
|
||||
loadingSpinner.shouldBe(Condition.visible);
|
||||
abortButton.shouldBe(Condition.visible);
|
||||
} else {
|
||||
waitUntilSpinnerDisappear();
|
||||
}
|
||||
|
@ -56,12 +59,21 @@ public class KsqlQueryForm extends BasePage {
|
|||
}
|
||||
|
||||
@Step
|
||||
public KsqlQueryForm clickStopQueryBtn() {
|
||||
clickByActions(stopQueryBtn);
|
||||
waitUntilSpinnerDisappear();
|
||||
public boolean isAbortBtnVisible() {
|
||||
return isVisible(abortButton);
|
||||
}
|
||||
|
||||
@Step
|
||||
public KsqlQueryForm clickAbortBtn() {
|
||||
clickByActions(abortButton);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Step
|
||||
public boolean isCancelledAlertVisible() {
|
||||
return isVisible(cancelledAlert);
|
||||
}
|
||||
|
||||
@Step
|
||||
public KsqlQueryForm clickClearResultsBtn() {
|
||||
clickByActions(clearResultsBtn);
|
||||
|
|
|
@ -36,29 +36,31 @@ import org.springframework.web.reactive.function.client.WebClientResponseExcepti
|
|||
@Slf4j
|
||||
public class ApiService extends BaseSource {
|
||||
|
||||
private final ApiClient apiClient = new ApiClient().setBasePath(BASE_API_URL);
|
||||
|
||||
@SneakyThrows
|
||||
private TopicsApi topicApi() {
|
||||
return new TopicsApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new TopicsApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private SchemasApi schemaApi() {
|
||||
return new SchemasApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new SchemasApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private KafkaConnectApi connectorApi() {
|
||||
return new KafkaConnectApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new KafkaConnectApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private MessagesApi messageApi() {
|
||||
return new MessagesApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new MessagesApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private KsqlApi ksqlApi() {
|
||||
return new KsqlApi(new ApiClient().setBasePath(BASE_API_URL));
|
||||
return new KsqlApi(apiClient);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
|
|
|
@ -2,6 +2,7 @@ package com.provectus.kafka.ui.manualsuite.backlog;
|
|||
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.BROKERS_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.KSQL_DB_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.SCHEMAS_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.TOPICS_PROFILE_SUITE_ID;
|
||||
import static com.provectus.kafka.ui.utilities.qase.enums.State.TO_BE_AUTOMATED;
|
||||
|
||||
|
@ -20,52 +21,66 @@ public class SmokeBacklog extends BaseManualTest {
|
|||
public void testCaseA() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(277)
|
||||
@Test
|
||||
public void testCaseB() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(278)
|
||||
@Test
|
||||
public void testCaseC() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(284)
|
||||
@Test
|
||||
public void testCaseD() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(331)
|
||||
@Test
|
||||
public void testCaseE() {
|
||||
public void testCaseB() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = BROKERS_SUITE_ID)
|
||||
@QaseId(332)
|
||||
@Test
|
||||
public void testCaseF() {
|
||||
public void testCaseC() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(335)
|
||||
@Test
|
||||
public void testCaseG() {
|
||||
public void testCaseD() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(336)
|
||||
@Test
|
||||
public void testCaseE() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(343)
|
||||
@Test
|
||||
public void testCaseF() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = KSQL_DB_SUITE_ID)
|
||||
@QaseId(344)
|
||||
@Test
|
||||
public void testCaseG() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = SCHEMAS_SUITE_ID)
|
||||
@QaseId(345)
|
||||
@Test
|
||||
public void testCaseH() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = SCHEMAS_SUITE_ID)
|
||||
@QaseId(346)
|
||||
@Test
|
||||
public void testCaseI() {
|
||||
}
|
||||
|
||||
@Automation(state = TO_BE_AUTOMATED)
|
||||
@Suite(id = TOPICS_PROFILE_SUITE_ID)
|
||||
@QaseId(347)
|
||||
@Test
|
||||
public void testCaseJ() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -92,4 +92,28 @@ public class TopicsTest extends BaseManualTest {
|
|||
@Test
|
||||
public void testCaseN() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(337)
|
||||
@Test
|
||||
public void testCaseO() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(339)
|
||||
@Test
|
||||
public void testCaseP() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(341)
|
||||
@Test
|
||||
public void testCaseQ() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(342)
|
||||
@Test
|
||||
public void testCaseR() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,4 +14,16 @@ public class WizardTest extends BaseManualTest {
|
|||
@Test
|
||||
public void testCaseA() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(338)
|
||||
@Test
|
||||
public void testCaseB() {
|
||||
}
|
||||
|
||||
@Automation(state = NOT_AUTOMATED)
|
||||
@QaseId(340)
|
||||
@Test
|
||||
public void testCaseC() {
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package com.provectus.kafka.ui.smokesuite.ksqldb;
|
||||
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs.STREAMS;
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SELECT_ALL_FROM;
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_STREAMS;
|
||||
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
|
||||
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
|
||||
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
|
||||
|
@ -39,17 +42,21 @@ public class KsqlDbTest extends BaseTest {
|
|||
FIRST_TABLE.getName(), SECOND_TABLE.getName()));
|
||||
}
|
||||
|
||||
@QaseId(86)
|
||||
@QaseId(284)
|
||||
@Test(priority = 1)
|
||||
public void clearResultsForExecutedRequest() {
|
||||
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||
public void streamsAndTablesVisibilityCheck() {
|
||||
naviSideBar
|
||||
.openSideMenu(KSQL_DB);
|
||||
ksqlDbList
|
||||
.waitUntilScreenReady();
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertAll();
|
||||
ksqlQueryForm
|
||||
.clickClearResultsBtn();
|
||||
softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertTrue(ksqlDbList.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableByName()");
|
||||
softly.assertTrue(ksqlDbList.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableByName()");
|
||||
softly.assertAll();
|
||||
ksqlDbList
|
||||
.openDetailsTab(STREAMS)
|
||||
.waitUntilScreenReady();
|
||||
Assert.assertTrue(ksqlDbList.getStreamByName(DEFAULT_STREAM.getName()).isVisible(), "getStreamByName()");
|
||||
}
|
||||
|
||||
@QaseId(276)
|
||||
|
@ -68,11 +75,52 @@ public class KsqlDbTest extends BaseTest {
|
|||
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(), "getItemByName()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(), "getItemByName()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(),
|
||||
String.format("getItemByName(%s)", FIRST_TABLE.getName()));
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(),
|
||||
String.format("getItemByName(%s)", SECOND_TABLE.getName()));
|
||||
softly.assertAll();
|
||||
}
|
||||
|
||||
@QaseId(278)
|
||||
@Test(priority = 4)
|
||||
public void checkShowStreamsRequestExecution() {
|
||||
navigateToKsqlDbAndExecuteRequest(SHOW_STREAMS.getQuery());
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertTrue(ksqlQueryForm.getItemByName(DEFAULT_STREAM.getName()).isVisible(),
|
||||
String.format("getItemByName(%s)", FIRST_TABLE.getName()));
|
||||
softly.assertAll();
|
||||
}
|
||||
|
||||
@QaseId(86)
|
||||
@Test(priority = 5)
|
||||
public void clearResultsForExecutedRequest() {
|
||||
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertAll();
|
||||
ksqlQueryForm
|
||||
.clickClearResultsBtn();
|
||||
softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
|
||||
softly.assertAll();
|
||||
}
|
||||
|
||||
@QaseId(277)
|
||||
@Test(priority = 6)
|
||||
public void stopQueryFunctionalCheck() {
|
||||
navigateToKsqlDbAndExecuteRequest(String.format(SELECT_ALL_FROM.getQuery(), FIRST_TABLE.getName()));
|
||||
Assert.assertTrue(ksqlQueryForm.isAbortBtnVisible(), "isAbortBtnVisible()");
|
||||
ksqlQueryForm
|
||||
.clickAbortBtn();
|
||||
Assert.assertTrue(ksqlQueryForm.isCancelledAlertVisible(), "isCancelledAlertVisible()");
|
||||
}
|
||||
|
||||
@AfterClass(alwaysRun = true)
|
||||
public void afterClass() {
|
||||
TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
|
||||
}
|
||||
|
||||
@Step
|
||||
private void navigateToKsqlDbAndExecuteRequest(String query) {
|
||||
naviSideBar
|
||||
|
@ -85,9 +133,4 @@ public class KsqlDbTest extends BaseTest {
|
|||
.setQuery(query)
|
||||
.clickExecuteBtn(query);
|
||||
}
|
||||
|
||||
@AfterClass(alwaysRun = true)
|
||||
public void afterClass() {
|
||||
TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -486,11 +486,7 @@ public class TopicsTest extends BaseTest {
|
|||
topicDetails
|
||||
.waitUntilScreenReady();
|
||||
TOPIC_LIST.add(topicToCopy);
|
||||
SoftAssert softly = new SoftAssert();
|
||||
softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully created."),
|
||||
"isAlertWithMessageVisible()");
|
||||
softly.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
|
||||
softly.assertAll();
|
||||
Assert.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
|
||||
}
|
||||
|
||||
@AfterClass(alwaysRun = true)
|
||||
|
|
13
kafka-ui-react-app/src/components/ACLPage/ACLPage.tsx
Normal file
13
kafka-ui-react-app/src/components/ACLPage/ACLPage.tsx
Normal file
|
@ -0,0 +1,13 @@
|
|||
import React from 'react';
|
||||
import { Routes, Route } from 'react-router-dom';
|
||||
import ACList from 'components/ACLPage/List/List';
|
||||
|
||||
const ACLPage = () => {
|
||||
return (
|
||||
<Routes>
|
||||
<Route index element={<ACList />} />
|
||||
</Routes>
|
||||
);
|
||||
};
|
||||
|
||||
export default ACLPage;
|
|
@ -0,0 +1,44 @@
|
|||
import styled from 'styled-components';
|
||||
|
||||
export const EnumCell = styled.div`
|
||||
text-transform: capitalize;
|
||||
`;
|
||||
|
||||
export const DeleteCell = styled.div`
|
||||
svg {
|
||||
cursor: pointer;
|
||||
}
|
||||
`;
|
||||
|
||||
export const Chip = styled.div<{
|
||||
chipType?: 'default' | 'success' | 'danger' | 'secondary' | string;
|
||||
}>`
|
||||
width: fit-content;
|
||||
text-transform: capitalize;
|
||||
padding: 2px 8px;
|
||||
font-size: 12px;
|
||||
line-height: 16px;
|
||||
border-radius: 16px;
|
||||
color: ${({ theme }) => theme.tag.color};
|
||||
background-color: ${({ theme, chipType }) => {
|
||||
switch (chipType) {
|
||||
case 'success':
|
||||
return theme.tag.backgroundColor.green;
|
||||
case 'danger':
|
||||
return theme.tag.backgroundColor.red;
|
||||
case 'secondary':
|
||||
return theme.tag.backgroundColor.secondary;
|
||||
default:
|
||||
return theme.tag.backgroundColor.gray;
|
||||
}
|
||||
}};
|
||||
`;
|
||||
|
||||
export const PatternCell = styled.div`
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
${Chip} {
|
||||
margin-left: 4px;
|
||||
}
|
||||
`;
|
153
kafka-ui-react-app/src/components/ACLPage/List/List.tsx
Normal file
153
kafka-ui-react-app/src/components/ACLPage/List/List.tsx
Normal file
|
@ -0,0 +1,153 @@
|
|||
import React from 'react';
|
||||
import { ColumnDef } from '@tanstack/react-table';
|
||||
import { useTheme } from 'styled-components';
|
||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||
import Table from 'components/common/NewTable';
|
||||
import DeleteIcon from 'components/common/Icons/DeleteIcon';
|
||||
import { useConfirm } from 'lib/hooks/useConfirm';
|
||||
import useAppParams from 'lib/hooks/useAppParams';
|
||||
import { useAcls, useDeleteAcl } from 'lib/hooks/api/acl';
|
||||
import { ClusterName } from 'redux/interfaces';
|
||||
import {
|
||||
KafkaAcl,
|
||||
KafkaAclNamePatternType,
|
||||
KafkaAclPermissionEnum,
|
||||
} from 'generated-sources';
|
||||
|
||||
import * as S from './List.styled';
|
||||
|
||||
const ACList: React.FC = () => {
|
||||
const { clusterName } = useAppParams<{ clusterName: ClusterName }>();
|
||||
const theme = useTheme();
|
||||
const { data: aclList } = useAcls(clusterName);
|
||||
const { deleteResource } = useDeleteAcl(clusterName);
|
||||
const modal = useConfirm(true);
|
||||
|
||||
const [rowId, setRowId] = React.useState('');
|
||||
|
||||
const onDeleteClick = (acl: KafkaAcl | null) => {
|
||||
if (acl) {
|
||||
modal('Are you sure want to delete this ACL record?', () =>
|
||||
deleteResource(acl)
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
const columns = React.useMemo<ColumnDef<KafkaAcl>[]>(
|
||||
() => [
|
||||
{
|
||||
header: 'Principal',
|
||||
accessorKey: 'principal',
|
||||
size: 257,
|
||||
},
|
||||
{
|
||||
header: 'Resource',
|
||||
accessorKey: 'resourceType',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => (
|
||||
<S.EnumCell>{getValue<string>().toLowerCase()}</S.EnumCell>
|
||||
),
|
||||
size: 145,
|
||||
},
|
||||
{
|
||||
header: 'Pattern',
|
||||
accessorKey: 'resourceName',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue, row }) => {
|
||||
let chipType;
|
||||
if (
|
||||
row.original.namePatternType === KafkaAclNamePatternType.PREFIXED
|
||||
) {
|
||||
chipType = 'default';
|
||||
}
|
||||
|
||||
if (
|
||||
row.original.namePatternType === KafkaAclNamePatternType.LITERAL
|
||||
) {
|
||||
chipType = 'secondary';
|
||||
}
|
||||
return (
|
||||
<S.PatternCell>
|
||||
{getValue<string>()}
|
||||
{chipType ? (
|
||||
<S.Chip chipType={chipType}>
|
||||
{row.original.namePatternType.toLowerCase()}
|
||||
</S.Chip>
|
||||
) : null}
|
||||
</S.PatternCell>
|
||||
);
|
||||
},
|
||||
size: 257,
|
||||
},
|
||||
{
|
||||
header: 'Host',
|
||||
accessorKey: 'host',
|
||||
size: 257,
|
||||
},
|
||||
{
|
||||
header: 'Operation',
|
||||
accessorKey: 'operation',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => (
|
||||
<S.EnumCell>{getValue<string>().toLowerCase()}</S.EnumCell>
|
||||
),
|
||||
size: 121,
|
||||
},
|
||||
{
|
||||
header: 'Permission',
|
||||
accessorKey: 'permission',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => (
|
||||
<S.Chip
|
||||
chipType={
|
||||
getValue<string>() === KafkaAclPermissionEnum.ALLOW
|
||||
? 'success'
|
||||
: 'danger'
|
||||
}
|
||||
>
|
||||
{getValue<string>().toLowerCase()}
|
||||
</S.Chip>
|
||||
),
|
||||
size: 111,
|
||||
},
|
||||
{
|
||||
id: 'delete',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ row }) => {
|
||||
return (
|
||||
<S.DeleteCell onClick={() => onDeleteClick(row.original)}>
|
||||
<DeleteIcon
|
||||
fill={
|
||||
rowId === row.id ? theme.acl.table.deleteIcon : 'transparent'
|
||||
}
|
||||
/>
|
||||
</S.DeleteCell>
|
||||
);
|
||||
},
|
||||
size: 76,
|
||||
},
|
||||
],
|
||||
[rowId]
|
||||
);
|
||||
|
||||
const onRowHover = (value: unknown) => {
|
||||
if (value && typeof value === 'object' && 'id' in value) {
|
||||
setRowId(value.id as string);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<PageHeading text="Access Control List" />
|
||||
<Table
|
||||
columns={columns}
|
||||
data={aclList ?? []}
|
||||
emptyMessage="No ACL items found"
|
||||
onRowHover={onRowHover}
|
||||
onMouseLeave={() => setRowId('')}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ACList;
|
|
@ -0,0 +1,74 @@
|
|||
import React from 'react';
|
||||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { screen } from '@testing-library/dom';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { clusterACLPath } from 'lib/paths';
|
||||
import ACList from 'components/ACLPage/List/List';
|
||||
import { useAcls, useDeleteAcl } from 'lib/hooks/api/acl';
|
||||
import { aclPayload } from 'lib/fixtures/acls';
|
||||
|
||||
jest.mock('lib/hooks/api/acl', () => ({
|
||||
useAcls: jest.fn(),
|
||||
useDeleteAcl: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('ACLList Component', () => {
|
||||
const clusterName = 'local';
|
||||
const renderComponent = () =>
|
||||
render(
|
||||
<WithRoute path={clusterACLPath()}>
|
||||
<ACList />
|
||||
</WithRoute>,
|
||||
{
|
||||
initialEntries: [clusterACLPath(clusterName)],
|
||||
}
|
||||
);
|
||||
|
||||
describe('ACLList', () => {
|
||||
describe('when the acls are loaded', () => {
|
||||
beforeEach(() => {
|
||||
(useAcls as jest.Mock).mockImplementation(() => ({
|
||||
data: aclPayload,
|
||||
}));
|
||||
(useDeleteAcl as jest.Mock).mockImplementation(() => ({
|
||||
deleteResource: jest.fn(),
|
||||
}));
|
||||
});
|
||||
|
||||
it('renders ACLList with records', async () => {
|
||||
renderComponent();
|
||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
||||
expect(screen.getAllByRole('row').length).toEqual(4);
|
||||
});
|
||||
|
||||
it('shows delete icon on hover', async () => {
|
||||
const { container } = renderComponent();
|
||||
const [trElement] = screen.getAllByRole('row');
|
||||
await userEvent.hover(trElement);
|
||||
const deleteElement = container.querySelector('svg');
|
||||
expect(deleteElement).not.toHaveStyle({
|
||||
fill: 'transparent',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when it has no acls', () => {
|
||||
beforeEach(() => {
|
||||
(useAcls as jest.Mock).mockImplementation(() => ({
|
||||
data: [],
|
||||
}));
|
||||
(useDeleteAcl as jest.Mock).mockImplementation(() => ({
|
||||
deleteResource: jest.fn(),
|
||||
}));
|
||||
});
|
||||
|
||||
it('renders empty ACLList with message', async () => {
|
||||
renderComponent();
|
||||
expect(screen.getByRole('table')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByRole('row', { name: 'No ACL items found' })
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -34,7 +34,11 @@ const Configs: React.FC = () => {
|
|||
|
||||
const getData = () => {
|
||||
return data
|
||||
.filter((item) => item.name.toLocaleLowerCase().indexOf(keyword) > -1)
|
||||
.filter(
|
||||
(item) =>
|
||||
item.name.toLocaleLowerCase().indexOf(keyword.toLocaleLowerCase()) >
|
||||
-1
|
||||
)
|
||||
.sort((a, b) => {
|
||||
if (a.source === b.source) return 0;
|
||||
|
||||
|
|
|
@ -11,7 +11,9 @@ import CheckMarkRoundIcon from 'components/common/Icons/CheckMarkRoundIcon';
|
|||
import { ColumnDef } from '@tanstack/react-table';
|
||||
import { clusterBrokerPath } from 'lib/paths';
|
||||
import Tooltip from 'components/common/Tooltip/Tooltip';
|
||||
import ColoredCell from 'components/common/NewTable/ColoredCell';
|
||||
|
||||
import SkewHeader from './SkewHeader/SkewHeader';
|
||||
import * as S from './BrokersList.styled';
|
||||
|
||||
const NA = 'N/A';
|
||||
|
@ -57,11 +59,15 @@ const BrokersList: React.FC = () => {
|
|||
count: segmentCount || NA,
|
||||
port: broker?.port,
|
||||
host: broker?.host,
|
||||
partitionsLeader: broker?.partitionsLeader,
|
||||
partitionsSkew: broker?.partitionsSkew,
|
||||
leadersSkew: broker?.leadersSkew,
|
||||
inSyncPartitions: broker?.inSyncPartitions,
|
||||
};
|
||||
});
|
||||
}, [diskUsage, brokers]);
|
||||
|
||||
const columns = React.useMemo<ColumnDef<typeof rows>[]>(
|
||||
const columns = React.useMemo<ColumnDef<(typeof rows)[number]>[]>(
|
||||
() => [
|
||||
{
|
||||
header: 'Broker ID',
|
||||
|
@ -84,7 +90,7 @@ const BrokersList: React.FC = () => {
|
|||
),
|
||||
},
|
||||
{
|
||||
header: 'Segment Size',
|
||||
header: 'Disk usage',
|
||||
accessorKey: 'size',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue, table, cell, column, renderValue, row }) =>
|
||||
|
@ -98,10 +104,56 @@ const BrokersList: React.FC = () => {
|
|||
cell={cell}
|
||||
getValue={getValue}
|
||||
renderValue={renderValue}
|
||||
renderSegments
|
||||
/>
|
||||
),
|
||||
},
|
||||
{ header: 'Segment Count', accessorKey: 'count' },
|
||||
{
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
header: () => <SkewHeader />,
|
||||
accessorKey: 'partitionsSkew',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => {
|
||||
const value = getValue<number>();
|
||||
return (
|
||||
<ColoredCell
|
||||
value={value ? `${value.toFixed(2)}%` : '-'}
|
||||
warn={value >= 10 && value < 20}
|
||||
attention={value >= 20}
|
||||
/>
|
||||
);
|
||||
},
|
||||
},
|
||||
{ header: 'Leaders', accessorKey: 'partitionsLeader' },
|
||||
{
|
||||
header: 'Leader skew',
|
||||
accessorKey: 'leadersSkew',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue }) => {
|
||||
const value = getValue<number>();
|
||||
return (
|
||||
<ColoredCell
|
||||
value={value ? `${value.toFixed(2)}%` : '-'}
|
||||
warn={value >= 10 && value < 20}
|
||||
attention={value >= 20}
|
||||
/>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
header: 'Online partitions',
|
||||
accessorKey: 'inSyncPartitions',
|
||||
// eslint-disable-next-line react/no-unstable-nested-components
|
||||
cell: ({ getValue, row }) => {
|
||||
const value = getValue<number>();
|
||||
return (
|
||||
<ColoredCell
|
||||
value={value}
|
||||
attention={value !== row.original.count}
|
||||
/>
|
||||
);
|
||||
},
|
||||
},
|
||||
{ header: 'Port', accessorKey: 'port' },
|
||||
{
|
||||
header: 'Host',
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
import styled from 'styled-components';
|
||||
import { MessageTooltip } from 'components/common/Tooltip/Tooltip.styled';
|
||||
|
||||
export const CellWrapper = styled.div`
|
||||
display: flex;
|
||||
gap: 10px;
|
||||
|
||||
${MessageTooltip} {
|
||||
max-height: unset;
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,17 @@
|
|||
import React from 'react';
|
||||
import Tooltip from 'components/common/Tooltip/Tooltip';
|
||||
import InfoIcon from 'components/common/Icons/InfoIcon';
|
||||
|
||||
import * as S from './SkewHeader.styled';
|
||||
|
||||
const SkewHeader: React.FC = () => (
|
||||
<S.CellWrapper>
|
||||
Partitions skew
|
||||
<Tooltip
|
||||
value={<InfoIcon />}
|
||||
content="The divergence from the average brokers' value"
|
||||
/>
|
||||
</S.CellWrapper>
|
||||
);
|
||||
|
||||
export default SkewHeader;
|
|
@ -13,6 +13,7 @@ import {
|
|||
clusterTopicsRelativePath,
|
||||
clusterConfigRelativePath,
|
||||
getNonExactPath,
|
||||
clusterAclRelativePath,
|
||||
} from 'lib/paths';
|
||||
import ClusterContext from 'components/contexts/ClusterContext';
|
||||
import PageLoader from 'components/common/PageLoader/PageLoader';
|
||||
|
@ -30,6 +31,7 @@ const ClusterConfigPage = React.lazy(
|
|||
const ConsumerGroups = React.lazy(
|
||||
() => import('components/ConsumerGroups/ConsumerGroups')
|
||||
);
|
||||
const AclPage = React.lazy(() => import('components/ACLPage/ACLPage'));
|
||||
|
||||
const ClusterPage: React.FC = () => {
|
||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||
|
@ -51,6 +53,9 @@ const ClusterPage: React.FC = () => {
|
|||
ClusterFeaturesEnum.TOPIC_DELETION
|
||||
),
|
||||
hasKsqlDbConfigured: features.includes(ClusterFeaturesEnum.KSQL_DB),
|
||||
hasAclViewConfigured:
|
||||
features.includes(ClusterFeaturesEnum.KAFKA_ACL_VIEW) ||
|
||||
features.includes(ClusterFeaturesEnum.KAFKA_ACL_EDIT),
|
||||
};
|
||||
}, [clusterName, data]);
|
||||
|
||||
|
@ -95,6 +100,12 @@ const ClusterPage: React.FC = () => {
|
|||
element={<KsqlDb />}
|
||||
/>
|
||||
)}
|
||||
{contextValue.hasAclViewConfigured && (
|
||||
<Route
|
||||
path={getNonExactPath(clusterAclRelativePath)}
|
||||
element={<AclPage />}
|
||||
/>
|
||||
)}
|
||||
{appInfo.hasDynamicConfig && (
|
||||
<Route
|
||||
path={getNonExactPath(clusterConfigRelativePath)}
|
||||
|
|
|
@ -37,7 +37,7 @@ const Config: React.FC = () => {
|
|||
formState: { isDirty, isSubmitting, isValid, errors },
|
||||
setValue,
|
||||
} = useForm<FormValues>({
|
||||
mode: 'onTouched',
|
||||
mode: 'onChange',
|
||||
resolver: yupResolver(validationSchema),
|
||||
defaultValues: {
|
||||
config: JSON.stringify(config, null, '\t'),
|
||||
|
|
|
@ -1,26 +1,41 @@
|
|||
import React from 'react';
|
||||
import { FullConnectorInfo } from 'generated-sources';
|
||||
import {
|
||||
Action,
|
||||
ConnectorAction,
|
||||
ConnectorState,
|
||||
FullConnectorInfo,
|
||||
ResourceType,
|
||||
} from 'generated-sources';
|
||||
import { CellContext } from '@tanstack/react-table';
|
||||
import { ClusterNameRoute } from 'lib/paths';
|
||||
import useAppParams from 'lib/hooks/useAppParams';
|
||||
import { Dropdown, DropdownItem } from 'components/common/Dropdown';
|
||||
import { useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
|
||||
import {
|
||||
useDeleteConnector,
|
||||
useUpdateConnectorState,
|
||||
} from 'lib/hooks/api/kafkaConnect';
|
||||
import { useConfirm } from 'lib/hooks/useConfirm';
|
||||
import { useIsMutating } from '@tanstack/react-query';
|
||||
import { ActionDropdownItem } from 'components/common/ActionComponent';
|
||||
|
||||
const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
|
||||
row,
|
||||
}) => {
|
||||
const { connect, name } = row.original;
|
||||
|
||||
const { connect, name, status } = row.original;
|
||||
const { clusterName } = useAppParams<ClusterNameRoute>();
|
||||
|
||||
const mutationsNumber = useIsMutating();
|
||||
const isMutating = mutationsNumber > 0;
|
||||
const confirm = useConfirm();
|
||||
const deleteMutation = useDeleteConnector({
|
||||
clusterName,
|
||||
connectName: connect,
|
||||
connectorName: name,
|
||||
});
|
||||
|
||||
const stateMutation = useUpdateConnectorState({
|
||||
clusterName,
|
||||
connectName: connect,
|
||||
connectorName: name,
|
||||
});
|
||||
const handleDelete = () => {
|
||||
confirm(
|
||||
<>
|
||||
|
@ -31,8 +46,66 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
|
|||
}
|
||||
);
|
||||
};
|
||||
// const stateMutation = useUpdateConnectorState(routerProps);
|
||||
const resumeConnectorHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESUME);
|
||||
const restartConnectorHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESTART);
|
||||
|
||||
const restartAllTasksHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESTART_ALL_TASKS);
|
||||
|
||||
const restartFailedTasksHandler = () =>
|
||||
stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS);
|
||||
|
||||
return (
|
||||
<Dropdown>
|
||||
{status.state === ConnectorState.PAUSED && (
|
||||
<ActionDropdownItem
|
||||
onClick={resumeConnectorHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Resume
|
||||
</ActionDropdownItem>
|
||||
)}
|
||||
<ActionDropdownItem
|
||||
onClick={restartConnectorHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Restart Connector
|
||||
</ActionDropdownItem>
|
||||
<ActionDropdownItem
|
||||
onClick={restartAllTasksHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Restart All Tasks
|
||||
</ActionDropdownItem>
|
||||
<ActionDropdownItem
|
||||
onClick={restartFailedTasksHandler}
|
||||
disabled={isMutating}
|
||||
permission={{
|
||||
resource: ResourceType.CONNECT,
|
||||
action: Action.EDIT,
|
||||
value: name,
|
||||
}}
|
||||
>
|
||||
Restart Failed Tasks
|
||||
</ActionDropdownItem>
|
||||
<DropdownItem onClick={handleDelete} danger>
|
||||
Remove Connector
|
||||
</DropdownItem>
|
||||
|
|
|
@ -9,7 +9,11 @@ import { screen, waitFor } from '@testing-library/react';
|
|||
import userEvent from '@testing-library/user-event';
|
||||
import { render, WithRoute } from 'lib/testHelpers';
|
||||
import { clusterConnectConnectorPath, clusterConnectorsPath } from 'lib/paths';
|
||||
import { useConnectors, useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
|
||||
import {
|
||||
useConnectors,
|
||||
useDeleteConnector,
|
||||
useUpdateConnectorState,
|
||||
} from 'lib/hooks/api/kafkaConnect';
|
||||
|
||||
const mockedUsedNavigate = jest.fn();
|
||||
const mockDelete = jest.fn();
|
||||
|
@ -22,6 +26,7 @@ jest.mock('react-router-dom', () => ({
|
|||
jest.mock('lib/hooks/api/kafkaConnect', () => ({
|
||||
useConnectors: jest.fn(),
|
||||
useDeleteConnector: jest.fn(),
|
||||
useUpdateConnectorState: jest.fn(),
|
||||
}));
|
||||
|
||||
const clusterName = 'local';
|
||||
|
@ -42,6 +47,10 @@ describe('Connectors List', () => {
|
|||
(useConnectors as jest.Mock).mockImplementation(() => ({
|
||||
data: connectors,
|
||||
}));
|
||||
const restartConnector = jest.fn();
|
||||
(useUpdateConnectorState as jest.Mock).mockImplementation(() => ({
|
||||
mutateAsync: restartConnector,
|
||||
}));
|
||||
});
|
||||
|
||||
it('renders', async () => {
|
||||
|
|
|
@ -1,17 +1,31 @@
|
|||
import React from 'react';
|
||||
import { Cluster } from 'generated-sources';
|
||||
import React, { useMemo } from 'react';
|
||||
import { Cluster, ResourceType } from 'generated-sources';
|
||||
import { CellContext } from '@tanstack/react-table';
|
||||
import { Button } from 'components/common/Button/Button';
|
||||
import { clusterConfigPath } from 'lib/paths';
|
||||
import { useGetUserInfo } from 'lib/hooks/api/roles';
|
||||
import { ActionCanButton } from 'components/common/ActionComponent';
|
||||
|
||||
type Props = CellContext<Cluster, unknown>;
|
||||
|
||||
const ClusterTableActionsCell: React.FC<Props> = ({ row }) => {
|
||||
const { name } = row.original;
|
||||
const { data } = useGetUserInfo();
|
||||
|
||||
const isApplicationConfig = useMemo(() => {
|
||||
return !!data?.userInfo?.permissions.some(
|
||||
(permission) => permission.resource === ResourceType.APPLICATIONCONFIG
|
||||
);
|
||||
}, [data]);
|
||||
|
||||
return (
|
||||
<Button buttonType="secondary" buttonSize="S" to={clusterConfigPath(name)}>
|
||||
<ActionCanButton
|
||||
buttonType="secondary"
|
||||
buttonSize="S"
|
||||
to={clusterConfigPath(name)}
|
||||
canDoAction={isApplicationConfig}
|
||||
>
|
||||
Configure
|
||||
</Button>
|
||||
</ActionCanButton>
|
||||
);
|
||||
};
|
||||
|
||||
|
|
|
@ -1,27 +1,28 @@
|
|||
import React, { useEffect } from 'react';
|
||||
import React, { useMemo } from 'react';
|
||||
import PageHeading from 'components/common/PageHeading/PageHeading';
|
||||
import * as Metrics from 'components/common/Metrics';
|
||||
import { Tag } from 'components/common/Tag/Tag.styled';
|
||||
import Switch from 'components/common/Switch/Switch';
|
||||
import { useClusters } from 'lib/hooks/api/clusters';
|
||||
import { Cluster, ServerStatus } from 'generated-sources';
|
||||
import { Cluster, ResourceType, ServerStatus } from 'generated-sources';
|
||||
import { ColumnDef } from '@tanstack/react-table';
|
||||
import Table, { SizeCell } from 'components/common/NewTable';
|
||||
import useBoolean from 'lib/hooks/useBoolean';
|
||||
import { Button } from 'components/common/Button/Button';
|
||||
import { clusterNewConfigPath } from 'lib/paths';
|
||||
import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { ActionCanButton } from 'components/common/ActionComponent';
|
||||
import { useGetUserInfo } from 'lib/hooks/api/roles';
|
||||
|
||||
import * as S from './Dashboard.styled';
|
||||
import ClusterName from './ClusterName';
|
||||
import ClusterTableActionsCell from './ClusterTableActionsCell';
|
||||
|
||||
const Dashboard: React.FC = () => {
|
||||
const { data } = useGetUserInfo();
|
||||
const clusters = useClusters();
|
||||
const { value: showOfflineOnly, toggle } = useBoolean(false);
|
||||
const appInfo = React.useContext(GlobalSettingsContext);
|
||||
const navigate = useNavigate();
|
||||
|
||||
const config = React.useMemo(() => {
|
||||
const clusterList = clusters.data || [];
|
||||
const offlineClusters = clusterList.filter(
|
||||
|
@ -56,12 +57,11 @@ const Dashboard: React.FC = () => {
|
|||
return initialColumns;
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (appInfo.hasDynamicConfig && !clusters.data) {
|
||||
navigate(clusterNewConfigPath);
|
||||
}
|
||||
}, [clusters, appInfo.hasDynamicConfig]);
|
||||
|
||||
const isApplicationConfig = useMemo(() => {
|
||||
return !!data?.userInfo?.permissions.some(
|
||||
(permission) => permission.resource === ResourceType.APPLICATIONCONFIG
|
||||
);
|
||||
}, [data]);
|
||||
return (
|
||||
<>
|
||||
<PageHeading text="Dashboard" />
|
||||
|
@ -87,9 +87,14 @@ const Dashboard: React.FC = () => {
|
|||
<label>Only offline clusters</label>
|
||||
</div>
|
||||
{appInfo.hasDynamicConfig && (
|
||||
<Button buttonType="primary" buttonSize="M" to={clusterNewConfigPath}>
|
||||
<ActionCanButton
|
||||
buttonType="primary"
|
||||
buttonSize="M"
|
||||
to={clusterNewConfigPath}
|
||||
canDoAction={isApplicationConfig}
|
||||
>
|
||||
Configure new cluster
|
||||
</Button>
|
||||
</ActionCanButton>
|
||||
)}
|
||||
</S.Toolbar>
|
||||
<Table
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
import React from 'react';
|
||||
import { useClusters } from 'lib/hooks/api/clusters';
|
||||
import Dashboard from 'components/Dashboard/Dashboard';
|
||||
import { Cluster, ServerStatus } from 'generated-sources';
|
||||
import { render } from 'lib/testHelpers';
|
||||
|
||||
interface DataType {
|
||||
data: Cluster[] | undefined;
|
||||
}
|
||||
jest.mock('lib/hooks/api/clusters');
|
||||
const mockedNavigate = jest.fn();
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useNavigate: () => mockedNavigate,
|
||||
}));
|
||||
describe('Dashboard component', () => {
|
||||
const renderComponent = (hasDynamicConfig: boolean, data: DataType) => {
|
||||
const useClustersMock = useClusters as jest.Mock;
|
||||
useClustersMock.mockReturnValue(data);
|
||||
render(<Dashboard />, {
|
||||
globalSettings: { hasDynamicConfig },
|
||||
});
|
||||
};
|
||||
it('redirects to new cluster configuration page if there are no clusters and dynamic config is enabled', async () => {
|
||||
await renderComponent(true, { data: undefined });
|
||||
|
||||
expect(mockedNavigate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not navigate to new cluster config page when there are clusters', async () => {
|
||||
await renderComponent(true, {
|
||||
data: [{ name: 'Cluster 1', status: ServerStatus.ONLINE }],
|
||||
});
|
||||
|
||||
expect(mockedNavigate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not navigate to new cluster config page when there are no clusters and hasDynamicConfig is false', async () => {
|
||||
await renderComponent(false, {
|
||||
data: [],
|
||||
});
|
||||
|
||||
expect(mockedNavigate).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -33,6 +33,7 @@ export const Fieldset = styled.fieldset`
|
|||
flex: 1;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
color: ${({ theme }) => theme.default.color.normal};
|
||||
`;
|
||||
|
||||
export const ButtonsContainer = styled.div`
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue