diff --git a/.github/workflows/aws_publisher.yaml b/.github/workflows/aws_publisher.yaml
index 0d0f081f56..c7b80c54f9 100644
--- a/.github/workflows/aws_publisher.yaml
+++ b/.github/workflows/aws_publisher.yaml
@@ -31,7 +31,7 @@ jobs:
echo "Packer will be triggered in this dir $WORK_DIR"
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }}
diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index 17a2fc1007..dc4700458f 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -1,4 +1,4 @@
-name: backend
+name: Backend build and test
on:
push:
branches:
diff --git a/.github/workflows/block_merge.yml b/.github/workflows/block_merge.yml
index 28f5cde3f4..e1cdb3ac8e 100644
--- a/.github/workflows/block_merge.yml
+++ b/.github/workflows/block_merge.yml
@@ -6,7 +6,7 @@ jobs:
block_merge:
runs-on: ubuntu-latest
steps:
- - uses: mheap/github-action-required-labels@v2
+ - uses: mheap/github-action-required-labels@v3
with:
mode: exactly
count: 0
diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml
index 0cd0ebb893..1cc5fee39b 100644
--- a/.github/workflows/branch-deploy.yml
+++ b/.github/workflows/branch-deploy.yml
@@ -1,4 +1,4 @@
-name: DeployFromBranch
+name: Feature testing init
on:
workflow_dispatch:
@@ -10,6 +10,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
- name: get branch name
id: extract_branch
run: |
@@ -43,7 +45,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/branch-remove.yml b/.github/workflows/branch-remove.yml
index 59024af6cb..c93fa89eba 100644
--- a/.github/workflows/branch-remove.yml
+++ b/.github/workflows/branch-remove.yml
@@ -1,4 +1,4 @@
-name: RemoveCustomDeployment
+name: Feature testing destroy
on:
workflow_dispatch:
pull_request:
diff --git a/.github/workflows/build-public-image.yml b/.github/workflows/build-public-image.yml
index 9db111c7a8..c79996d2ac 100644
--- a/.github/workflows/build-public-image.yml
+++ b/.github/workflows/build-public-image.yml
@@ -9,6 +9,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
- name: get branch name
id: extract_branch
run: |
@@ -40,7 +42,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/create-branch-for-helm.yaml b/.github/workflows/create-branch-for-helm.yaml
index b3e7d5f4ed..d755e3da8f 100644
--- a/.github/workflows/create-branch-for-helm.yaml
+++ b/.github/workflows/create-branch-for-helm.yaml
@@ -1,4 +1,4 @@
-name: prepare-helm-release
+name: Prepare helm release
on:
repository_dispatch:
types: [prepare-helm-release]
diff --git a/.github/workflows/cve.yaml b/.github/workflows/cve.yaml
index ef7d784ca1..5b0e2779dd 100644
--- a/.github/workflows/cve.yaml
+++ b/.github/workflows/cve.yaml
@@ -55,7 +55,7 @@ jobs:
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Run CVE checks
- uses: aquasecurity/trivy-action@0.8.0
+ uses: aquasecurity/trivy-action@0.9.1
with:
image-ref: "provectuslabs/kafka-ui:${{ steps.build.outputs.version }}"
format: "table"
diff --git a/.github/workflows/delete-public-image.yml b/.github/workflows/delete-public-image.yml
index 47e08713d8..56c795d0b5 100644
--- a/.github/workflows/delete-public-image.yml
+++ b/.github/workflows/delete-public-image.yml
@@ -15,7 +15,7 @@ jobs:
tag='${{ github.event.pull_request.number }}'
echo "tag=${tag}" >> $GITHUB_OUTPUT
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml
index 5fef435fc9..70fff483da 100644
--- a/.github/workflows/documentation.yaml
+++ b/.github/workflows/documentation.yaml
@@ -1,4 +1,4 @@
-name: Documentation
+name: Documentation URLs linter
on:
pull_request:
types:
diff --git a/.github/workflows/e2e-automation.yml b/.github/workflows/e2e-automation.yml
new file mode 100644
index 0000000000..5a5018c9e5
--- /dev/null
+++ b/.github/workflows/e2e-automation.yml
@@ -0,0 +1,83 @@
+name: E2E Automation suite
+on:
+ workflow_dispatch:
+ inputs:
+ test_suite:
+ description: 'Select test suite to run'
+ default: 'regression'
+ required: true
+ type: choice
+ options:
+ - regression
+ - sanity
+ - smoke
+ qase_token:
+ description: 'Set Qase token to enable integration'
+ required: false
+ type: string
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Pull with Docker
+ id: pull_chrome
+ run: |
+ docker pull selenium/standalone-chrome:103.0
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Compose with Docker
+ id: compose_app
+ # use the following command until #819 will be fixed
+ run: |
+ docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ github.event.inputs.qase_token }} -Dsurefire.suiteXmlFiles='src/test/resources/${{ github.event.inputs.test_suite }}.xml' -Dsuite=${{ github.event.inputs.test_suite }} -f 'kafka-ui-e2e-checks' test -Pprod
+ - name: Generate Allure report
+ uses: simple-elf/allure-report-action@master
+ if: always()
+ id: allure-report
+ with:
+ allure_results: ./kafka-ui-e2e-checks/allure-results
+ gh_pages: allure-results
+ allure_report: allure-report
+ subfolder: allure-results
+ report_url: "http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com"
+ - uses: jakejarvis/s3-sync-action@master
+ if: always()
+ env:
+ AWS_S3_BUCKET: 'kafkaui-allure-reports'
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_REGION: 'eu-central-1'
+ SOURCE_DIR: 'allure-history/allure-results'
+ - name: Deploy report to Amazon S3
+ if: always()
+ uses: Sibz/github-status-action@v1.1.6
+ with:
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Test report"
+ state: "success"
+ sha: ${{ github.sha }}
+ target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
+ - name: Dump Docker logs on failure
+ if: failure()
+ uses: jwalton/gh-docker-logs@v2.2.1
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml
index 7725d33d4a..962a134684 100644
--- a/.github/workflows/e2e-checks.yaml
+++ b/.github/workflows/e2e-checks.yaml
@@ -1,7 +1,7 @@
-name: e2e-checks
+name: E2E PR health check
on:
pull_request_target:
- types: ["opened", "edited", "reopened", "synchronize"]
+ types: [ "opened", "edited", "reopened", "synchronize" ]
paths:
- "kafka-ui-api/**"
- "kafka-ui-contract/**"
@@ -15,6 +15,12 @@ jobs:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
+ - name: Configure AWS credentials for Kafka-UI account
+ uses: aws-actions/configure-aws-credentials@v1-node16
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: eu-central-1
- name: Set the values
id: set_env_values
run: |
@@ -33,7 +39,7 @@ jobs:
id: build_app
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- ./mvnw -B -V -ntp clean package -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
- name: compose app
id: compose_app
# use the following command until #819 will be fixed
@@ -42,7 +48,7 @@ jobs:
- name: e2e run
run: |
./mvnw -B -ntp versions:set -DnewVersion=${{ github.event.pull_request.head.sha }}
- ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -pl '!kafka-ui-api' test -Pprod
+ ./mvnw -B -V -ntp -Dsurefire.suiteXmlFiles='src/test/resources/smoke.xml' -f 'kafka-ui-e2e-checks' test -Pprod
- name: Generate allure report
uses: simple-elf/allure-report-action@master
if: always()
@@ -57,8 +63,6 @@ jobs:
if: always()
env:
AWS_S3_BUCKET: 'kafkaui-allure-reports'
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: 'eu-central-1'
SOURCE_DIR: 'allure-history/allure-results'
- name: Post the link to allure report
@@ -66,7 +70,7 @@ jobs:
uses: Sibz/github-status-action@v1.1.6
with:
authToken: ${{secrets.GITHUB_TOKEN}}
- context: "Test report"
+ context: "Click Details button to open Allure report"
state: "success"
sha: ${{ github.event.pull_request.head.sha || github.sha }}
target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
diff --git a/.github/workflows/e2e-manual.yml b/.github/workflows/e2e-manual.yml
new file mode 100644
index 0000000000..68963e29fa
--- /dev/null
+++ b/.github/workflows/e2e-manual.yml
@@ -0,0 +1,43 @@
+name: E2E Manual suite
+on:
+ workflow_dispatch:
+ inputs:
+ test_suite:
+ description: 'Select test suite to run'
+ default: 'manual'
+ required: true
+ type: choice
+ options:
+ - manual
+ - qase
+ qase_token:
+ description: 'Set Qase token to enable integration'
+ required: true
+ type: string
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ github.event.inputs.qase_token }} -Dsurefire.suiteXmlFiles='src/test/resources/${{ github.event.inputs.test_suite }}.xml' -Dsuite=${{ github.event.inputs.test_suite }} -f 'kafka-ui-e2e-checks' test -Pprod
diff --git a/.github/workflows/e2e-weekly.yml b/.github/workflows/e2e-weekly.yml
new file mode 100644
index 0000000000..2bf2001ec2
--- /dev/null
+++ b/.github/workflows/e2e-weekly.yml
@@ -0,0 +1,70 @@
+name: E2E Weekly suite
+on:
+ schedule:
+ - cron: '0 1 * * 1'
+
+jobs:
+ build-and-test:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.sha }}
+ - name: Set up environment
+ id: set_env_values
+ run: |
+ cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env"
+ - name: Pull with Docker
+ id: pull_chrome
+ run: |
+ docker pull selenium/standalone-chrome:103.0
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ java-version: '17'
+ distribution: 'zulu'
+ cache: 'maven'
+ - name: Build with Maven
+ id: build_app
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp clean install -Pprod -Dmaven.test.skip=true ${{ github.event.inputs.extraMavenOptions }}
+ - name: Compose with Docker
+ id: compose_app
+ # use the following command until #819 will be fixed
+ run: |
+ docker-compose -f ./documentation/compose/e2e-tests.yaml up -d
+ - name: Run test suite
+ run: |
+ ./mvnw -B -ntp versions:set -DnewVersion=${{ github.sha }}
+ ./mvnw -B -V -ntp -DQASEIO_API_TOKEN=${{ secrets.QASEIO_API_TOKEN }} -Dsurefire.suiteXmlFiles='src/test/resources/sanity.xml' -Dsuite=weekly -f 'kafka-ui-e2e-checks' test -Pprod
+ - name: Generate Allure report
+ uses: simple-elf/allure-report-action@master
+ if: always()
+ id: allure-report
+ with:
+ allure_results: ./kafka-ui-e2e-checks/allure-results
+ gh_pages: allure-results
+ allure_report: allure-report
+ subfolder: allure-results
+ report_url: "http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com"
+ - uses: jakejarvis/s3-sync-action@master
+ if: always()
+ env:
+ AWS_S3_BUCKET: 'kafkaui-allure-reports'
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ AWS_REGION: 'eu-central-1'
+ SOURCE_DIR: 'allure-history/allure-results'
+ - name: Deploy report to Amazon S3
+ if: always()
+ uses: Sibz/github-status-action@v1.1.6
+ with:
+ authToken: ${{secrets.GITHUB_TOKEN}}
+ context: "Test report"
+ state: "success"
+ sha: ${{ github.sha }}
+ target_url: http://kafkaui-allure-reports.s3-website.eu-central-1.amazonaws.com/${{ github.run_number }}
+ - name: Dump Docker logs on failure
+ if: failure()
+ uses: jwalton/gh-docker-logs@v2.2.1
diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml
index baa2551d1c..05ef18e383 100644
--- a/.github/workflows/frontend.yaml
+++ b/.github/workflows/frontend.yaml
@@ -1,4 +1,4 @@
-name: frontend
+name: Frontend build and test
on:
push:
branches:
@@ -24,7 +24,7 @@ jobs:
with:
version: 7.4.0
- name: Install node
- uses: actions/setup-node@v3.5.1
+ uses: actions/setup-node@v3.6.0
with:
node-version: "16.15.0"
cache: "pnpm"
diff --git a/.github/workflows/helm.yaml b/.github/workflows/helm.yaml
index b8c88a4305..427c63e0b3 100644
--- a/.github/workflows/helm.yaml
+++ b/.github/workflows/helm.yaml
@@ -1,4 +1,4 @@
-name: Helm
+name: Helm linter
on:
pull_request:
types: ["opened", "edited", "reopened", "synchronize"]
diff --git a/.github/workflows/master.yaml b/.github/workflows/master.yaml
index 4d1173958b..01651ee76d 100644
--- a/.github/workflows/master.yaml
+++ b/.github/workflows/master.yaml
@@ -1,4 +1,4 @@
-name: Master
+name: Master branch build & deploy
on:
workflow_dispatch:
push:
@@ -9,6 +9,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
- name: Set up JDK
uses: actions/setup-java@v3
diff --git a/.github/workflows/pr-checks.yaml b/.github/workflows/pr-checks.yaml
index 74ff75b833..80f4e6eddc 100644
--- a/.github/workflows/pr-checks.yaml
+++ b/.github/workflows/pr-checks.yaml
@@ -7,7 +7,7 @@ jobs:
task-check:
runs-on: ubuntu-latest
steps:
- - uses: kentaro-m/task-completed-checker-action@v0.1.0
+ - uses: kentaro-m/task-completed-checker-action@v0.1.1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
- uses: dekinderfiets/pr-description-enforcer@0.0.1
diff --git a/.github/workflows/release-serde-api.yaml b/.github/workflows/release-serde-api.yaml
index 227dbcec24..e9a35ee30e 100644
--- a/.github/workflows/release-serde-api.yaml
+++ b/.github/workflows/release-serde-api.yaml
@@ -1,4 +1,4 @@
-name: Release-serde-api
+name: Release serde api
on: workflow_dispatch
jobs:
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 0e87c10fd2..1990fbdbc5 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -12,6 +12,7 @@ jobs:
- uses: actions/checkout@v3
with:
fetch-depth: 0
+ ref: ${{ github.event.pull_request.head.sha }}
- run: |
git config user.name github-actions
@@ -33,7 +34,7 @@ jobs:
echo "version=${VERSION}" >> $GITHUB_OUTPUT
- name: Upload files to a GitHub release
- uses: svenstaro/upload-release-action@2.3.0
+ uses: svenstaro/upload-release-action@2.5.0
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: kafka-ui-api/target/kafka-ui-api-${{ steps.build.outputs.version }}.jar
diff --git a/.github/workflows/separate_env_public_create.yml b/.github/workflows/separate_env_public_create.yml
index 551001031c..e10b1a382c 100644
--- a/.github/workflows/separate_env_public_create.yml
+++ b/.github/workflows/separate_env_public_create.yml
@@ -12,6 +12,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
- name: get branch name
id: extract_branch
run: |
@@ -45,7 +47,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml
index aafb50ceda..0a5d2e064c 100644
--- a/.github/workflows/stale.yaml
+++ b/.github/workflows/stale.yaml
@@ -7,7 +7,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- - uses: actions/stale@v6
+ - uses: actions/stale@v7
with:
days-before-issue-stale: 7
days-before-issue-close: 3
diff --git a/.github/workflows/terraform-deploy.yml b/.github/workflows/terraform-deploy.yml
index 8ec07be0a9..6df90e7338 100644
--- a/.github/workflows/terraform-deploy.yml
+++ b/.github/workflows/terraform-deploy.yml
@@ -1,4 +1,4 @@
-name: terraform_deploy
+name: Terraform deploy
on:
workflow_dispatch:
inputs:
diff --git a/README.md b/README.md
index a0ca023bc0..e924621536 100644
--- a/README.md
+++ b/README.md
@@ -185,32 +185,30 @@ For example, if you want to use an environment variable to set the `name` parame
|`KAFKA_CLUSTERS_0_KSQLDBSERVERAUTH_PASSWORD` | KSQL DB server's basic authentication password
|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTORELOCATION` |Path to the JKS keystore to communicate to KSQL DB
|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTOREPASSWORD` |Password of the JKS keystore for KSQL DB
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTORELOCATION` |Path to the JKS truststore to communicate to KSQL DB
-|`KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTOREPASSWORD` |Password of the JKS truststore for KSQL DB
|`KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL` |Security protocol to connect to the brokers. For SSL connection use "SSL", for plaintext connection don't set this environment variable
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME` |SchemaRegistry's basic authentication username
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD` |SchemaRegistry's basic authentication password
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION` |Path to the JKS keystore to communicate to SchemaRegistry
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD` |Password of the JKS keystore for SchemaRegistry
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTORELOCATION` |Path to the JKS truststore to communicate to SchemaRegistry
-|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTOREPASSWORD` |Password of the JKS truststore for SchemaRegistry
+|`KAFKA_CLUSTERS_0_METRICS_SSL` |Enable SSL for Metrics (for PROMETHEUS metrics type). Default: false.
+|`KAFKA_CLUSTERS_0_METRICS_USERNAME` |Username for Metrics authentication
+|`KAFKA_CLUSTERS_0_METRICS_PASSWORD` |Password for Metrics authentication
+|`KAFKA_CLUSTERS_0_METRICS_KEYSTORELOCATION` |Path to the JKS keystore to communicate to metrics source (JMX/PROMETHEUS). For advanced setup, see `kafka-ui-jmx-secured.yml`
+|`KAFKA_CLUSTERS_0_METRICS_KEYSTOREPASSWORD` |Password of the JKS metrics keystore
|`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry
|`KAFKA_CLUSTERS_0_METRICS_PORT` |Open metrics port of a broker
|`KAFKA_CLUSTERS_0_METRICS_TYPE` |Type of metrics retriever to use. Valid values are JMX (default) or PROMETHEUS. If Prometheus, then metrics are read from prometheus-jmx-exporter instead of jmx
|`KAFKA_CLUSTERS_0_READONLY` |Enable read-only mode. Default: false
-|`KAFKA_CLUSTERS_0_DISABLELOGDIRSCOLLECTION` |Disable collecting segments information. It should be true for confluent cloud. Default: false
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME`| Kafka Connect cluster's basic authentication username
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD`| Kafka Connect cluster's basic authentication password
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTORELOCATION`| Path to the JKS keystore to communicate to Kafka Connect
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTOREPASSWORD`| Password of the JKS keystore for Kafka Connect
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTORELOCATION`| Path to the JKS truststore to communicate to Kafka Connect
-|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTOREPASSWORD`| Password of the JKS truststore for Kafka Connect
-|`KAFKA_CLUSTERS_0_METRICS_SSL` |Enable SSL for Metrics? `true` or `false`. For advanced setup, see `kafka-ui-jmx-secured.yml`
-|`KAFKA_CLUSTERS_0_METRICS_USERNAME` |Username for Metrics authentication
-|`KAFKA_CLUSTERS_0_METRICS_PASSWORD` |Password for Metrics authentication
|`KAFKA_CLUSTERS_0_POLLING_THROTTLE_RATE` |Max traffic rate (bytes/sec) that kafka-ui allowed to reach when polling messages from the cluster. Default: 0 (not limited)
+|`KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION`| Path to the JKS truststore to communicate to Kafka Connect, SchemaRegistry, KSQL, Metrics
+|`KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD`| Password of the JKS truststore for Kafka Connect, SchemaRegistry, KSQL, Metrics
|`TOPIC_RECREATE_DELAY_SECONDS` |Time delay between topic deletion and topic creation attempts for topic recreate functionality. Default: 1
|`TOPIC_RECREATE_MAXRETRIES` |Number of attempts of topic creation after topic deletion for topic recreate functionality. Default: 15
+|`DYNAMIC_CONFIG_ENABLED`|Allow to change application config in runtime. Default: false.
diff --git a/charts/kafka-ui/Chart.yaml b/charts/kafka-ui/Chart.yaml
index 28e1a892aa..553d193b79 100644
--- a/charts/kafka-ui/Chart.yaml
+++ b/charts/kafka-ui/Chart.yaml
@@ -2,6 +2,6 @@ apiVersion: v2
name: kafka-ui
description: A Helm chart for kafka-UI
type: application
-version: 0.5.1
-appVersion: v0.5.0
+version: 0.6.0
+appVersion: v0.6.0
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
diff --git a/charts/kafka-ui/templates/_helpers.tpl b/charts/kafka-ui/templates/_helpers.tpl
index 510452d4cf..7155681a44 100644
--- a/charts/kafka-ui/templates/_helpers.tpl
+++ b/charts/kafka-ui/templates/_helpers.tpl
@@ -68,6 +68,11 @@ This allows us to check if the registry of the image is specified or not.
*/}}
{{- define "kafka-ui.imageName" -}}
{{- $registryName := .Values.image.registry -}}
+{{- if .Values.global }}
+ {{- if .Values.global.imageRegistry }}
+ {{- $registryName = .Values.global.imageRegistry -}}
+ {{- end -}}
+{{- end -}}
{{- $repository := .Values.image.repository -}}
{{- $tag := .Values.image.tag | default .Chart.AppVersion -}}
{{- if $registryName }}
diff --git a/charts/kafka-ui/templates/ingress.yaml b/charts/kafka-ui/templates/ingress.yaml
index e4b33439c4..13e746d8d1 100644
--- a/charts/kafka-ui/templates/ingress.yaml
+++ b/charts/kafka-ui/templates/ingress.yaml
@@ -35,7 +35,7 @@ spec:
{{- if and ($.Capabilities.APIVersions.Has "networking.k8s.io/v1") $isHigher1p19 -}}
{{- range .Values.ingress.precedingPaths }}
- path: {{ .path }}
- pathType: Prefix
+ pathType: {{ .Values.ingress.pathType }}
backend:
service:
name: {{ .serviceName }}
@@ -47,13 +47,13 @@ spec:
name: {{ $fullName }}
port:
number: {{ $svcPort }}
- pathType: Prefix
+ pathType: {{ .Values.ingress.pathType }}
{{- if .Values.ingress.path }}
path: {{ .Values.ingress.path }}
{{- end }}
{{- range .Values.ingress.succeedingPaths }}
- path: {{ .path }}
- pathType: Prefix
+ pathType: {{ .Values.ingress.pathType }}
backend:
service:
name: {{ .serviceName }}
diff --git a/charts/kafka-ui/templates/secret.yaml b/charts/kafka-ui/templates/secret.yaml
index a2d1f25fa2..1f974503dc 100644
--- a/charts/kafka-ui/templates/secret.yaml
+++ b/charts/kafka-ui/templates/secret.yaml
@@ -1,3 +1,4 @@
+{{- if .Values.envs.secret -}}
apiVersion: v1
kind: Secret
metadata:
@@ -9,3 +10,4 @@ data:
{{- range $key, $val := .Values.envs.secret }}
{{ $key }}: {{ $val | b64enc | quote }}
{{- end -}}
+{{- end}}
\ No newline at end of file
diff --git a/charts/kafka-ui/values.yaml b/charts/kafka-ui/values.yaml
index dce32059e1..3c30b40813 100644
--- a/charts/kafka-ui/values.yaml
+++ b/charts/kafka-ui/values.yaml
@@ -111,6 +111,9 @@ ingress:
# The path for the Ingress
path: "/"
+ # The path type for the Ingress
+ pathType: "Prefix"
+
# The hostname for the Ingress
host: ""
diff --git a/documentation/compose/jaas/client.properties b/documentation/compose/jaas/client.properties
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_connect.jaas b/documentation/compose/jaas/kafka_connect.jaas
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_connect.password b/documentation/compose/jaas/kafka_connect.password
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/kafka_server.conf b/documentation/compose/jaas/kafka_server.conf
index ef41c992e2..25388be5aa 100644
--- a/documentation/compose/jaas/kafka_server.conf
+++ b/documentation/compose/jaas/kafka_server.conf
@@ -11,4 +11,4 @@ KafkaClient {
user_admin="admin-secret";
};
-Client {};
\ No newline at end of file
+Client {};
diff --git a/documentation/compose/jaas/schema_registry.jaas b/documentation/compose/jaas/schema_registry.jaas
old mode 100644
new mode 100755
diff --git a/documentation/compose/jaas/schema_registry.password b/documentation/compose/jaas/schema_registry.password
old mode 100644
new mode 100755
diff --git a/documentation/compose/kafka-ssl-components.yaml b/documentation/compose/kafka-ssl-components.yaml
index 0c1287b647..407ce5b97a 100644
--- a/documentation/compose/kafka-ssl-components.yaml
+++ b/documentation/compose/kafka-ssl-components.yaml
@@ -15,27 +15,25 @@ services:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092 # SSL LISTENER!
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_PASSWORD: secret
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD: secret
KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # DISABLE COMMON NAME VERIFICATION
+
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: https://schemaregistry0:8085
KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION: /kafka.keystore.jks
KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD: "secret"
- KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTORELOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTOREPASSWORD: "secret"
+
KAFKA_CLUSTERS_0_KSQLDBSERVER: https://ksqldb0:8088
KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTORELOCATION: /kafka.keystore.jks
KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_KEYSTOREPASSWORD: "secret"
- KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTORELOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_KSQLDBSERVERSSL_TRUSTSTOREPASSWORD: "secret"
+
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: local
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: https://kafka-connect0:8083
KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTORELOCATION: /kafka.keystore.jks
KAFKA_CLUSTERS_0_KAFKACONNECT_0_KEYSTOREPASSWORD: "secret"
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTORELOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_KAFKACONNECT_0_TRUSTSTOREPASSWORD: "secret"
+
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION: /kafka.truststore.jks
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD: "secret"
+ DYNAMIC_CONFIG_ENABLED: 'true' # not necessary for ssl, added for tests
+
volumes:
- ./ssl/kafka.truststore.jks:/kafka.truststore.jks
- ./ssl/kafka.keystore.jks:/kafka.keystore.jks
diff --git a/documentation/compose/kafka-ssl.yml b/documentation/compose/kafka-ssl.yml
index 4fc7daebff..08ff9dc4af 100644
--- a/documentation/compose/kafka-ssl.yml
+++ b/documentation/compose/kafka-ssl.yml
@@ -11,11 +11,11 @@ services:
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SSL
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 # SSL LISTENER!
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_LOCATION: /kafka.truststore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_PASSWORD: secret
KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_LOCATION: /kafka.keystore.jks
- KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD: secret
+ KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD: "secret"
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 # SSL LISTENER!
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION: /kafka.truststore.jks
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD: "secret"
KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # DISABLE COMMON NAME VERIFICATION
volumes:
- ./ssl/kafka.truststore.jks:/kafka.truststore.jks
@@ -60,4 +60,4 @@ services:
- ./ssl/creds:/etc/kafka/secrets/creds
- ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks
- ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks
- command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
\ No newline at end of file
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
diff --git a/documentation/compose/kafka-ui-arm64.yaml b/documentation/compose/kafka-ui-arm64.yaml
index bbcefecbf4..87a892cc70 100644
--- a/documentation/compose/kafka-ui-arm64.yaml
+++ b/documentation/compose/kafka-ui-arm64.yaml
@@ -19,6 +19,7 @@ services:
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://schema-registry0:8085
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
+ DYNAMIC_CONFIG_ENABLED: 'true' # not necessary, added for tests
kafka0:
image: confluentinc/cp-kafka:7.2.1.arm64
diff --git a/documentation/compose/kafka-ui-jmx-secured.yml b/documentation/compose/kafka-ui-jmx-secured.yml
index de56a7e2c6..408f388ba5 100644
--- a/documentation/compose/kafka-ui-jmx-secured.yml
+++ b/documentation/compose/kafka-ui-jmx-secured.yml
@@ -7,11 +7,8 @@ services:
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
- - 5005:5005
depends_on:
- kafka0
- - schemaregistry0
- - kafka-connect0
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka0:29092
@@ -19,15 +16,12 @@ services:
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: first
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: http://kafka-connect0:8083
KAFKA_CLUSTERS_0_METRICS_PORT: 9997
- KAFKA_CLUSTERS_0_METRICS_SSL: 'true'
KAFKA_CLUSTERS_0_METRICS_USERNAME: root
KAFKA_CLUSTERS_0_METRICS_PASSWORD: password
- JAVA_OPTS: >-
- -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005
- -Djavax.net.ssl.trustStore=/jmx/clienttruststore
- -Djavax.net.ssl.trustStorePassword=12345678
- -Djavax.net.ssl.keyStore=/jmx/clientkeystore
- -Djavax.net.ssl.keyStorePassword=12345678
+ KAFKA_CLUSTERS_0_METRICS_KEYSTORE_LOCATION: /jmx/clientkeystore
+ KAFKA_CLUSTERS_0_METRICS_KEYSTORE_PASSWORD: '12345678'
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORE_LOCATION: /jmx/clienttruststore
+ KAFKA_CLUSTERS_0_SSL_TRUSTSTORE_PASSWORD: '12345678'
volumes:
- ./jmx/clienttruststore:/jmx/clienttruststore
- ./jmx/clientkeystore:/jmx/clientkeystore
@@ -70,8 +64,6 @@ services:
-Dcom.sun.management.jmxremote.access.file=/jmx/jmxremote.access
-Dcom.sun.management.jmxremote.rmi.port=9997
-Djava.rmi.server.hostname=kafka0
- -Djava.rmi.server.logCalls=true
-# -Djavax.net.debug=ssl:handshake
volumes:
- ./jmx/serverkeystore:/jmx/serverkeystore
- ./jmx/servertruststore:/jmx/servertruststore
@@ -79,56 +71,3 @@ services:
- ./jmx/jmxremote.access:/jmx/jmxremote.access
- ./scripts/update_run.sh:/tmp/update_run.sh
command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
-
- schemaregistry0:
- image: confluentinc/cp-schema-registry:7.2.1
- ports:
- - 8085:8085
- depends_on:
- - kafka0
- environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka0:29092
- SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
- SCHEMA_REGISTRY_HOST_NAME: schemaregistry0
- SCHEMA_REGISTRY_LISTENERS: http://schemaregistry0:8085
-
- SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
- SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
- SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
-
- kafka-connect0:
- image: confluentinc/cp-kafka-connect:7.2.1
- ports:
- - 8083:8083
- depends_on:
- - kafka0
- - schemaregistry0
- environment:
- CONNECT_BOOTSTRAP_SERVERS: kafka0:29092
- CONNECT_GROUP_ID: compose-connect-group
- CONNECT_CONFIG_STORAGE_TOPIC: _connect_configs
- CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_OFFSET_STORAGE_TOPIC: _connect_offset
- CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_STATUS_STORAGE_TOPIC: _connect_status
- CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
- CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.storage.StringConverter
- CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schemaregistry0:8085
- CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
- CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect0
- CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
-
- kafka-init-topics:
- image: confluentinc/cp-kafka:7.2.1
- volumes:
- - ./message.json:/data/message.json
- depends_on:
- - kafka0
- command: "bash -c 'echo Waiting for Kafka to be ready... && \
- cub kafka-ready -b kafka0:29092 1 30 && \
- kafka-topics --create --topic second.users --partitions 3 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
- kafka-topics --create --topic first.messages --partitions 2 --replication-factor 1 --if-not-exists --bootstrap-server kafka0:29092 && \
- kafka-console-producer --bootstrap-server kafka0:29092 --topic second.users < /data/message.json'"
\ No newline at end of file
diff --git a/documentation/compose/kafka-ui-sasl.yaml b/documentation/compose/kafka-ui-sasl.yaml
index 6dfe4f7532..e4a2b3cc4a 100644
--- a/documentation/compose/kafka-ui-sasl.yaml
+++ b/documentation/compose/kafka-ui-sasl.yaml
@@ -8,43 +8,45 @@ services:
ports:
- 8080:8080
depends_on:
- - zookeeper
- kafka
environment:
KAFKA_CLUSTERS_0_NAME: local
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:9093
- KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper:2181
+ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT
KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: PLAIN
KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin-secret";'
-
- zookeeper:
- image: wurstmeister/zookeeper:3.4.6
- environment:
- JVMFLAGS: "-Djava.security.auth.login.config=/etc/zookeeper/zookeeper_jaas.conf"
- volumes:
- - ./jaas/zookeeper_jaas.conf:/etc/zookeeper/zookeeper_jaas.conf
- ports:
- - 2181:2181
+ DYNAMIC_CONFIG_ENABLED: true # not necessary for sasl auth, added for tests
kafka:
- image: wurstmeister/kafka:2.13-2.8.1
- depends_on:
- - zookeeper
+ image: confluentinc/cp-kafka:7.2.1
+ hostname: kafka
+ container_name: kafka
ports:
- - 9092:9092
+ - "9092:9092"
+ - "9997:9997"
environment:
- KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
- KAFKA_SUPER_USERS: "User:admin"
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
- KAFKA_LISTENERS: INTERNAL://:9093,EXTERNAL://:9092
- KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9093,EXTERNAL://localhost:9092
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:SASL_PLAINTEXT,EXTERNAL:SASL_PLAINTEXT
- ALLOW_PLAINTEXT_LISTENER: 'yes'
- KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
- KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
- KAFKA_SASL_ENABLED_MECHANISMS: PLAIN
- KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAIN
- KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/kafka_jaas.conf"
+ KAFKA_BROKER_ID: 1
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
+ KAFKA_ADVERTISED_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092'
+ KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/jaas/kafka_server.conf"
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
+ KAFKA_JMX_PORT: 9997
+ KAFKA_JMX_HOSTNAME: localhost
+ KAFKA_PROCESS_ROLES: 'broker,controller'
+ KAFKA_NODE_ID: 1
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka:29093'
+ KAFKA_LISTENERS: 'SASL_PLAINTEXT://kafka:29092,CONTROLLER://kafka:29093,PLAINTEXT_HOST://0.0.0.0:9092'
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'SASL_PLAINTEXT'
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
+ KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: 'PLAIN'
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
+ KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT'
+ KAFKA_SUPER_USERS: 'User:admin,User:enzo'
volumes:
- - ./jaas/kafka_server.conf:/etc/kafka/kafka_jaas.conf
\ No newline at end of file
+ - ./scripts/update_run.sh:/tmp/update_run.sh
+ - ./jaas:/etc/kafka/jaas
+ command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'"
diff --git a/documentation/compose/kafka-ui-serdes.yaml b/documentation/compose/kafka-ui-serdes.yaml
index 143d454bb5..eee510a13d 100644
--- a/documentation/compose/kafka-ui-serdes.yaml
+++ b/documentation/compose/kafka-ui-serdes.yaml
@@ -14,13 +14,16 @@ services:
kafka.clusters.0.name: SerdeExampleCluster
kafka.clusters.0.bootstrapServers: kafka0:29092
kafka.clusters.0.schemaRegistry: http://schemaregistry0:8085
- # optional auth and ssl properties for SR
+
+ # optional SSL settings for cluster (will be used by SchemaRegistry serde, if set)
+ #kafka.clusters.0.ssl.keystoreLocation: /kafka.keystore.jks
+ #kafka.clusters.0.ssl.keystorePassword: "secret"
+ #kafka.clusters.0.ssl.truststoreLocation: /kafka.truststore.jks
+ #kafka.clusters.0.ssl.truststorePassword: "secret"
+
+ # optional auth properties for SR
#kafka.clusters.0.schemaRegistryAuth.username: "use"
#kafka.clusters.0.schemaRegistryAuth.password: "pswrd"
- #kafka.clusters.0.schemaRegistrySSL.keystoreLocation: /kafka.keystore.jks
- #kafka.clusters.0.schemaRegistrySSL.keystorePassword: "secret"
- #kafka.clusters.0.schemaRegistrySSL.truststoreLocation: /kafka.truststore.jks
- #kafka.clusters.0.schemaRegistrySSL.truststorePassword: "secret"
kafka.clusters.0.defaultKeySerde: Int32 #optional
kafka.clusters.0.defaultValueSerde: String #optional
@@ -28,8 +31,7 @@ services:
kafka.clusters.0.serde.0.name: ProtobufFile
kafka.clusters.0.serde.0.topicKeysPattern: "topic1"
kafka.clusters.0.serde.0.topicValuesPattern: "topic1"
- kafka.clusters.0.serde.0.properties.protobufFiles.0: /protofiles/key-types.proto
- kafka.clusters.0.serde.0.properties.protobufFiles.1: /protofiles/values.proto
+ kafka.clusters.0.serde.0.properties.protobufFilesDir: /protofiles/
kafka.clusters.0.serde.0.properties.protobufMessageNameForKey: test.MyKey # default type for keys
kafka.clusters.0.serde.0.properties.protobufMessageName: test.MyValue # default type for values
kafka.clusters.0.serde.0.properties.protobufMessageNameForKeyByTopic.topic1: test.MySpecificTopicKey # keys type for topic "topic1"
@@ -52,7 +54,7 @@ services:
kafka.clusters.0.serde.4.properties.keySchemaNameTemplate: "%s-key"
kafka.clusters.0.serde.4.properties.schemaNameTemplate: "%s-value"
#kafka.clusters.0.serde.4.topicValuesPattern: "sr2-topic.*"
- # optional auth and ssl properties for SR:
+ # optional auth and ssl properties for SR (overrides cluster-level):
#kafka.clusters.0.serde.4.properties.username: "user"
#kafka.clusters.0.serde.4.properties.password: "passw"
#kafka.clusters.0.serde.4.properties.keystoreLocation: /kafka.keystore.jks
diff --git a/documentation/compose/kafka-ui.yaml b/documentation/compose/kafka-ui.yaml
index 32c874b676..8524f6fa2b 100644
--- a/documentation/compose/kafka-ui.yaml
+++ b/documentation/compose/kafka-ui.yaml
@@ -24,6 +24,7 @@ services:
KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: kafka1:29092
KAFKA_CLUSTERS_1_METRICS_PORT: 9998
KAFKA_CLUSTERS_1_SCHEMAREGISTRY: http://schemaregistry1:8085
+ DYNAMIC_CONFIG_ENABLED: 'true'
kafka0:
image: confluentinc/cp-kafka:7.2.1
diff --git a/documentation/compose/proto/key-types.proto b/documentation/compose/proto/key-types.proto
index 908aed5689..1f5e22a427 100644
--- a/documentation/compose/proto/key-types.proto
+++ b/documentation/compose/proto/key-types.proto
@@ -1,11 +1,15 @@
syntax = "proto3";
package test;
+import "google/protobuf/wrappers.proto";
+
message MyKey {
string myKeyF1 = 1;
+ google.protobuf.UInt64Value uint_64_wrapper = 2;
}
message MySpecificTopicKey {
string special_field1 = 1;
string special_field2 = 2;
+ google.protobuf.FloatValue float_wrapper = 3;
}
diff --git a/documentation/guides/Protobuf.md b/documentation/guides/Protobuf.md
index 533a6a8f48..12f92448c8 100644
--- a/documentation/guides/Protobuf.md
+++ b/documentation/guides/Protobuf.md
@@ -12,22 +12,26 @@ To configure Kafkaui to deserialize protobuf messages using a supplied protobuf
```yaml
kafka:
clusters:
- - # Cluster configuration omitted.
- # protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
+ - # Cluster configuration omitted...
+ # protobufFilesDir specifies root location for proto files (will be scanned recursively)
+ # NOTE: if 'protobufFilesDir' specified, then 'protobufFile' and 'protobufFiles' settings will be ignored
+ protobufFilesDir: "/path/to/my-protobufs"
+ # (DEPRECATED) protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
protobufFile: path/to/my.proto
- # protobufFiles is the path to one or more protobuf schemas.
- protobufFiles:
- - /path/to/my.proto
- - /path/to/another.proto
- # protobufMessageName is the default protobuf type that is used to deserilize
- # the message's value if the topic is not found in protobufMessageNameByTopic.
+ # (DEPRECATED) protobufFiles is the location of one or more protobuf schemas
+ protobufFiles:
+ - /path/to/my-protobufs/my.proto
+ - /path/to/my-protobufs/another.proto
+ - /path/to/my-protobufs:test/test.proto
+ # protobufMessageName is the default protobuf type that is used to deserialize
+ # the message's value if the topic is not found in protobufMessageNameByTopic.
protobufMessageName: my.DefaultValType
# protobufMessageNameByTopic is a mapping of topic names to protobuf types.
# This mapping is required and is used to deserialize the Kafka message's value.
protobufMessageNameByTopic:
topic1: my.Type1
topic2: my.Type2
- # protobufMessageNameForKey is the default protobuf type that is used to deserilize
+ # protobufMessageNameForKey is the default protobuf type that is used to deserialize
# the message's key if the topic is not found in protobufMessageNameForKeyByTopic.
protobufMessageNameForKey: my.DefaultKeyType
# protobufMessageNameForKeyByTopic is a mapping of topic names to protobuf types.
diff --git a/documentation/guides/Serialization.md b/documentation/guides/Serialization.md
index 19869a8bc1..b9690f2cba 100644
--- a/documentation/guides/Serialization.md
+++ b/documentation/guides/Serialization.md
@@ -20,7 +20,7 @@ kafka:
clusters:
- name: Cluster1
# Other Cluster configuration omitted ...
- serdes:
+ serde:
# registering String serde with custom config
- name: AsciiString
className: com.provectus.kafka.ui.serdes.builtin.StringSerde
@@ -43,13 +43,11 @@ kafka:
clusters:
- name: Cluster1
# Other Cluster configuration omitted ...
- serdes:
+ serde:
- name: ProtobufFile
properties:
- # path to the protobuf schema files
- protobufFiles:
- - path/to/my.proto
- - path/to/another.proto
+ # path to the protobuf schema files directory
+ protobufFilesDir: "path/to/protofiles"
# default protobuf type that is used for KEY serialization/deserialization
# optional
protobufMessageNameForKey: my.Type1
@@ -84,7 +82,7 @@ kafka:
- name: Cluster1
# this url will be used by "SchemaRegistry" by default
schemaRegistry: http://main-schema-registry:8081
- serdes:
+ serde:
- name: AnotherSchemaRegistry
className: com.provectus.kafka.ui.serdes.builtin.sr.SchemaRegistrySerde
properties:
@@ -109,7 +107,7 @@ Sample configuration:
kafka:
clusters:
- name: Cluster1
- serdes:
+ serde:
- name: String
topicKeysPattern: click-events|imp-events
@@ -131,7 +129,7 @@ kafka:
- name: Cluster1
defaultKeySerde: Int32
defaultValueSerde: String
- serdes:
+ serde:
- name: Int32
topicKeysPattern: click-events|imp-events
```
@@ -156,7 +154,7 @@ Sample configuration:
kafka:
clusters:
- name: Cluster1
- serdes:
+ serde:
- name: MyCustomSerde
className: my.lovely.org.KafkaUiSerde
filePath: /var/lib/kui-serde/my-kui-serde.jar
diff --git a/kafka-ui-api/Dockerfile b/kafka-ui-api/Dockerfile
index 96e0eb414b..fcd29c0f06 100644
--- a/kafka-ui-api/Dockerfile
+++ b/kafka-ui-api/Dockerfile
@@ -1,8 +1,12 @@
-FROM azul/zulu-openjdk-alpine:17
+FROM azul/zulu-openjdk-alpine:17-jre
RUN apk add --no-cache gcompat # need to make snappy codec work
RUN addgroup -S kafkaui && adduser -S kafkaui -G kafkaui
+# creating folder for dynamic config usage (certificates uploads, etc)
+RUN mkdir /etc/kafkaui/
+RUN chown kafkaui /etc/kafkaui
+
USER kafkaui
ARG JAR_FILE
@@ -12,4 +16,5 @@ ENV JAVA_OPTS=
EXPOSE 8080
-CMD java $JAVA_OPTS -jar kafka-ui-api.jar
+# see JmxSslSocketFactory docs to understand why add-opens is needed
+CMD java --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED $JAVA_OPTS -jar kafka-ui-api.jar
diff --git a/kafka-ui-api/pom.xml b/kafka-ui-api/pom.xml
index 8764020693..e61827fd73 100644
--- a/kafka-ui-api/pom.xml
+++ b/kafka-ui-api/pom.xml
@@ -199,6 +199,31 @@
${antlr4-maven-plugin.version}
+
+ org.opendatadiscovery
+ oddrn-generator-java
+ ${odd-oddrn-generator.version}
+
+
+ org.opendatadiscovery
+ ingestion-contract-client
+
+
+ org.springframework.boot
+ spring-boot-starter-webflux
+
+
+ io.projectreactor
+ reactor-core
+
+
+ io.projectreactor.ipc
+ reactor-netty
+
+
+ ${odd-oddrn-client.version}
+
+
org.springframework.security
spring-security-ldap
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
index a9a523eb85..8d0eafeff3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/KafkaUiApplication.java
@@ -1,8 +1,10 @@
package com.provectus.kafka.ui;
-import org.springframework.boot.SpringApplication;
+import com.provectus.kafka.ui.util.DynamicConfigOperations;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
+import org.springframework.boot.builder.SpringApplicationBuilder;
+import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
@@ -12,6 +14,13 @@ import org.springframework.scheduling.annotation.EnableScheduling;
public class KafkaUiApplication {
public static void main(String[] args) {
- SpringApplication.run(KafkaUiApplication.class, args);
+ startApplication(args);
+ }
+
+ public static ConfigurableApplicationContext startApplication(String[] args) {
+ return new SpringApplicationBuilder(KafkaUiApplication.class)
+ .initializers(DynamicConfigOperations.dynamicConfigPropertiesInitializer())
+ .build()
+ .run(args);
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
index be5686e2f9..5ec5a779d3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/client/RetryingKafkaConnectClient.java
@@ -2,6 +2,7 @@ package com.provectus.kafka.ui.client;
import static com.provectus.kafka.ui.config.ClustersProperties.ConnectCluster;
+import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.connect.ApiClient;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.connect.model.Connector;
@@ -12,6 +13,7 @@ import com.provectus.kafka.ui.util.WebClientConfigurator;
import java.time.Duration;
import java.util.List;
import java.util.Map;
+import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpHeaders;
@@ -31,8 +33,10 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
private static final int MAX_RETRIES = 5;
private static final Duration RETRIES_DELAY = Duration.ofMillis(200);
- public RetryingKafkaConnectClient(ConnectCluster config, DataSize maxBuffSize) {
- super(new RetryingApiClient(config, maxBuffSize));
+ public RetryingKafkaConnectClient(ConnectCluster config,
+ @Nullable ClustersProperties.TruststoreConfig truststoreConfig,
+ DataSize maxBuffSize) {
+ super(new RetryingApiClient(config, truststoreConfig, maxBuffSize));
}
private static Retry conflictCodeRetry() {
@@ -77,23 +81,28 @@ public class RetryingKafkaConnectClient extends KafkaConnectClientApi {
private static class RetryingApiClient extends ApiClient {
- public RetryingApiClient(ConnectCluster config, DataSize maxBuffSize) {
- super(buildWebClient(maxBuffSize, config), null, null);
+ public RetryingApiClient(ConnectCluster config,
+ ClustersProperties.TruststoreConfig truststoreConfig,
+ DataSize maxBuffSize) {
+ super(buildWebClient(maxBuffSize, config, truststoreConfig), null, null);
setBasePath(config.getAddress());
- setUsername(config.getUserName());
+ setUsername(config.getUsername());
setPassword(config.getPassword());
}
- public static WebClient buildWebClient(DataSize maxBuffSize, ConnectCluster config) {
+ public static WebClient buildWebClient(DataSize maxBuffSize,
+ ConnectCluster config,
+ ClustersProperties.TruststoreConfig truststoreConfig) {
return new WebClientConfigurator()
.configureSsl(
- config.getKeystoreLocation(),
- config.getKeystorePassword(),
- config.getTruststoreLocation(),
- config.getTruststorePassword()
+ truststoreConfig,
+ new ClustersProperties.KeystoreConfig(
+ config.getKeystoreLocation(),
+ config.getKeystorePassword()
+ )
)
.configureBasicAuth(
- config.getUserName(),
+ config.getUsername(),
config.getPassword()
)
.configureBufferSize(maxBuffSize)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
index e709f33d4f..2cd5e0e69c 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
@@ -1,12 +1,13 @@
package com.provectus.kafka.ui.config;
+import com.provectus.kafka.ui.model.MetricsConfig;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Properties;
import java.util.Set;
+import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import lombok.AllArgsConstructor;
import lombok.Builder;
@@ -30,56 +31,58 @@ public class ClustersProperties {
String bootstrapServers;
String schemaRegistry;
SchemaRegistryAuth schemaRegistryAuth;
- WebClientSsl schemaRegistrySsl;
+ KeystoreConfig schemaRegistrySsl;
String ksqldbServer;
KsqldbServerAuth ksqldbServerAuth;
- WebClientSsl ksqldbServerSsl;
+ KeystoreConfig ksqldbServerSsl;
List kafkaConnect;
MetricsConfigData metrics;
- Properties properties;
+ Map properties;
boolean readOnly = false;
- boolean disableLogDirsCollection = false;
- List serde = new ArrayList<>();
+ List serde;
String defaultKeySerde;
String defaultValueSerde;
- List masking = new ArrayList<>();
- long pollingThrottleRate = 0;
+ List masking;
+ Long pollingThrottleRate;
+ TruststoreConfig ssl;
}
@Data
+ @ToString(exclude = "password")
public static class MetricsConfigData {
String type;
Integer port;
- boolean ssl;
+ Boolean ssl;
String username;
String password;
+ String keystoreLocation;
+ String keystorePassword;
}
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder(toBuilder = true)
+ @ToString(exclude = {"password", "keystorePassword"})
public static class ConnectCluster {
String name;
String address;
- String userName;
+ String username;
String password;
String keystoreLocation;
String keystorePassword;
- String truststoreLocation;
- String truststorePassword;
}
@Data
+ @ToString(exclude = {"password"})
public static class SchemaRegistryAuth {
String username;
String password;
}
@Data
- public static class WebClientSsl {
- String keystoreLocation;
- String keystorePassword;
+ @ToString(exclude = {"truststorePassword"})
+ public static class TruststoreConfig {
String truststoreLocation;
String truststorePassword;
}
@@ -89,7 +92,7 @@ public class ClustersProperties {
String name;
String className;
String filePath;
- Map properties = new HashMap<>();
+ Map properties;
String topicKeysPattern;
String topicValuesPattern;
}
@@ -101,12 +104,21 @@ public class ClustersProperties {
String password;
}
+ @Data
+ @NoArgsConstructor
+ @AllArgsConstructor
+ @ToString(exclude = {"keystorePassword"})
+ public static class KeystoreConfig {
+ String keystoreLocation;
+ String keystorePassword;
+ }
+
@Data
public static class Masking {
Type type;
- List fields = List.of(); //if empty - policy will be applied to all fields
- List pattern = List.of("X", "x", "n", "-"); //used when type=MASK
- String replacement = "***DATA_MASKED***"; //used when type=REPLACE
+ List fields; //if null or empty list - policy will be applied to all fields
+ List pattern; //used when type=MASK
+ String replacement; //used when type=REPLACE
String topicKeysPattern;
String topicValuesPattern;
@@ -117,7 +129,41 @@ public class ClustersProperties {
@PostConstruct
public void validateAndSetDefaults() {
- validateClusterNames();
+ if (clusters != null) {
+ validateClusterNames();
+ flattenClusterProperties();
+ setMetricsDefaults();
+ }
+ }
+
+ private void setMetricsDefaults() {
+ for (Cluster cluster : clusters) {
+ if (cluster.getMetrics() != null && !StringUtils.hasText(cluster.getMetrics().getType())) {
+ cluster.getMetrics().setType(MetricsConfig.JMX_METRICS_TYPE);
+ }
+ }
+ }
+
+ private void flattenClusterProperties() {
+ for (Cluster cluster : clusters) {
+ cluster.setProperties(flattenClusterProperties(null, cluster.getProperties()));
+ }
+ }
+
+ private Map flattenClusterProperties(@Nullable String prefix,
+ @Nullable Map propertiesMap) {
+ Map flattened = new HashMap<>();
+ if (propertiesMap != null) {
+ propertiesMap.forEach((k, v) -> {
+ String key = prefix == null ? k : prefix + "." + k;
+ if (v instanceof Map, ?>) {
+ flattened.putAll(flattenClusterProperties(key, (Map) v));
+ } else {
+ flattened.put(key, v);
+ }
+ });
+ }
+ return flattened;
}
private void validateClusterNames() {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
index db192ae826..f79d217fa7 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
@@ -1,7 +1,6 @@
package com.provectus.kafka.ui.config.auth;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.PostConstruct;
@@ -32,13 +31,13 @@ public class OAuthProperties {
private String clientName;
private String redirectUri;
private String authorizationGrantType;
- private Set scope = new HashSet<>();
+ private Set scope;
private String issuerUri;
private String authorizationUri;
private String tokenUri;
private String userInfoUri;
private String jwkSetUri;
private String userNameAttribute;
- private Map customParams = new HashMap<>();
+ private Map customParams;
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
index 8e4a8575a8..90daa36273 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
@@ -4,6 +4,8 @@ import static com.provectus.kafka.ui.config.auth.OAuthProperties.OAuth2Provider;
import static org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties.Provider;
import static org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties.Registration;
+import java.util.Optional;
+import java.util.Set;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.StringUtils;
@@ -24,7 +26,7 @@ public final class OAuthPropertiesConverter {
registration.setClientId(provider.getClientId());
registration.setClientSecret(provider.getClientSecret());
registration.setClientName(provider.getClientName());
- registration.setScope(provider.getScope());
+ registration.setScope(Optional.ofNullable(provider.getScope()).orElse(Set.of()));
registration.setRedirectUri(provider.getRedirectUri());
registration.setAuthorizationGrantType(provider.getAuthorizationGrantType());
@@ -71,7 +73,8 @@ public final class OAuthPropertiesConverter {
}
private static boolean isGoogle(OAuth2Provider provider) {
- return GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
+ return provider.getCustomParams() != null
+ && GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java
index 3d725c659d..e9e5159e1b 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/logout/CognitoLogoutSuccessHandler.java
@@ -12,6 +12,7 @@ import org.springframework.security.core.Authentication;
import org.springframework.security.web.server.WebFilterExchange;
import org.springframework.security.web.util.UrlUtils;
import org.springframework.stereotype.Component;
+import org.springframework.util.Assert;
import org.springframework.web.server.WebSession;
import org.springframework.web.util.UriComponents;
import org.springframework.web.util.UriComponentsBuilder;
@@ -45,6 +46,10 @@ public class CognitoLogoutSuccessHandler implements LogoutSuccessHandler {
.fragment(null)
.build();
+ Assert.isTrue(
+ provider.getCustomParams() != null && provider.getCustomParams().containsKey("logoutUrl"),
+ "Custom params should contain 'logoutUrl'"
+ );
final var uri = UriComponentsBuilder
.fromUri(URI.create(provider.getCustomParams().get("logoutUrl")))
.queryParam("client_id", provider.getClientId())
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java
index 131a37982a..a86b6db5a0 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java
@@ -66,7 +66,7 @@ public class AccessController implements AuthorizationApi {
UserPermissionDTO dto = new UserPermissionDTO();
dto.setClusters(clusters);
dto.setResource(ResourceTypeDTO.fromValue(permission.getResource().toString().toUpperCase()));
- dto.setValue(permission.getValue() != null ? permission.getValue().toString() : null);
+ dto.setValue(permission.getValue());
dto.setActions(permission.getActions()
.stream()
.map(String::toUpperCase)
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
new file mode 100644
index 0000000000..b21ef10c61
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
@@ -0,0 +1,137 @@
+package com.provectus.kafka.ui.controller;
+
+import static com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction.EDIT;
+import static com.provectus.kafka.ui.model.rbac.permission.ApplicationConfigAction.VIEW;
+
+import com.provectus.kafka.ui.api.ApplicationConfigApi;
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.model.ApplicationConfigDTO;
+import com.provectus.kafka.ui.model.ApplicationConfigPropertiesDTO;
+import com.provectus.kafka.ui.model.ApplicationConfigValidationDTO;
+import com.provectus.kafka.ui.model.ApplicationInfoDTO;
+import com.provectus.kafka.ui.model.ClusterConfigValidationDTO;
+import com.provectus.kafka.ui.model.RestartRequestDTO;
+import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
+import com.provectus.kafka.ui.model.rbac.AccessContext;
+import com.provectus.kafka.ui.service.KafkaClusterFactory;
+import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import com.provectus.kafka.ui.util.ApplicationRestarter;
+import com.provectus.kafka.ui.util.DynamicConfigOperations;
+import com.provectus.kafka.ui.util.DynamicConfigOperations.PropertiesStructure;
+import java.util.List;
+import java.util.Map;
+import javax.annotation.Nullable;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.mapstruct.Mapper;
+import org.mapstruct.factory.Mappers;
+import org.springframework.http.ResponseEntity;
+import org.springframework.http.codec.multipart.FilePart;
+import org.springframework.web.bind.annotation.RestController;
+import org.springframework.web.server.ServerWebExchange;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+@Slf4j
+@RestController
+@RequiredArgsConstructor
+public class ApplicationConfigController implements ApplicationConfigApi {
+
+ private static final PropertiesMapper MAPPER = Mappers.getMapper(PropertiesMapper.class);
+
+ @Mapper
+ interface PropertiesMapper {
+
+ PropertiesStructure fromDto(ApplicationConfigPropertiesDTO dto);
+
+ ApplicationConfigPropertiesDTO toDto(PropertiesStructure propertiesStructure);
+ }
+
+ private final AccessControlService accessControlService;
+ private final DynamicConfigOperations dynamicConfigOperations;
+ private final ApplicationRestarter restarter;
+ private final KafkaClusterFactory kafkaClusterFactory;
+
+
+ @Override
+ public Mono> getApplicationInfo(ServerWebExchange exchange) {
+ return Mono.just(
+ new ApplicationInfoDTO()
+ .enabledFeatures(
+ dynamicConfigOperations.dynamicConfigEnabled()
+ ? List.of(ApplicationInfoDTO.EnabledFeaturesEnum.DYNAMIC_CONFIG)
+ : List.of()
+ )
+ ).map(ResponseEntity::ok);
+ }
+
+ @Override
+ public Mono> getCurrentConfig(ServerWebExchange exchange) {
+ return accessControlService
+ .validateAccess(
+ AccessContext.builder()
+ .applicationConfigActions(VIEW)
+ .build()
+ )
+ .then(Mono.fromSupplier(() -> ResponseEntity.ok(
+ new ApplicationConfigDTO()
+ .properties(MAPPER.toDto(dynamicConfigOperations.getCurrentProperties()))
+ )));
+ }
+
+ @Override
+ public Mono> restartWithConfig(Mono restartRequestDto,
+ ServerWebExchange exchange) {
+ return accessControlService
+ .validateAccess(
+ AccessContext.builder()
+ .applicationConfigActions(EDIT)
+ .build()
+ )
+ .then(restartRequestDto)
+ .map(dto -> {
+ dynamicConfigOperations.persist(MAPPER.fromDto(dto.getConfig().getProperties()));
+ restarter.requestRestart();
+ return ResponseEntity.ok().build();
+ });
+ }
+
+ @Override
+ public Mono> uploadConfigRelatedFile(FilePart file, ServerWebExchange exchange) {
+ return accessControlService
+ .validateAccess(
+ AccessContext.builder()
+ .applicationConfigActions(EDIT)
+ .build()
+ )
+ .then(dynamicConfigOperations.uploadConfigRelatedFile(file))
+ .map(path -> new UploadedFileInfoDTO().location(path.toString()))
+ .map(ResponseEntity::ok);
+ }
+
+ @Override
+ public Mono> validateConfig(Mono configDto,
+ ServerWebExchange exchange) {
+ return configDto
+ .flatMap(config -> {
+ PropertiesStructure propertiesStructure = MAPPER.fromDto(config.getProperties());
+ ClustersProperties clustersProperties = propertiesStructure.getKafka();
+ return validateClustersConfig(clustersProperties)
+ .map(validations -> new ApplicationConfigValidationDTO().clusters(validations));
+ })
+ .map(ResponseEntity::ok);
+ }
+
+ private Mono