diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 79c2681816b6f440c75cffd54ea3a692c55ebf28..97054ff950632a9417f59da57e362f57f4c3dc83 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -1,20 +1,19 @@ name: backend on: push: - branches: [ '*' ] - pull_request: - branches: [ master ] + branches: + - "**" + - "!master" jobs: - mvn-all-build: + build: runs-on: ubuntu-latest steps: - name: Cache local Maven repository - uses: actions/cache@v1 + uses: actions/cache@v2 with: path: ~/.m2/repository - key: ${{ runner.os }}-maven-all-${{ hashFiles('**/pom.xml') }} + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} restore-keys: | - ${{ runner.os }}-maven-all- ${{ runner.os }}-maven- - uses: actions/checkout@v2 - name: Set up JDK 1.13 diff --git a/.github/workflows/charts.yaml b/.github/workflows/charts.yaml deleted file mode 100644 index f52c09bf1d8cfaeb51c6fe0180cbdb223c245864..0000000000000000000000000000000000000000 --- a/.github/workflows/charts.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: charts -on: - create: - tags: - - "v*.*.*" -jobs: - release: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - run: | - git config user.name github-actions - git config user.email github-actions@github.com - - uses: azure/setup-helm@v1 - - name: update appVersion - run: | - export version=${GITHUB_REF##*/} - sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml - - name: - run: | - export VERSION=${GITHUB_REF##*/} - MSG=$(helm package --app-version ${VERSION} charts/kafka-ui) - git fetch origin - git stash - git checkout -b gh-pages origin/gh-pages - helm repo index . - git add -f ${MSG##*/} index.yaml - git commit -m "release ${VERSION}" - git push \ No newline at end of file diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml index 331e766154eb9cfe8641339bac5651374d2e2b9c..111446b6773983e63e69aa2f0986fce39d86a89c 100644 --- a/.github/workflows/frontend.yaml +++ b/.github/workflows/frontend.yaml @@ -1,9 +1,9 @@ name: frontend on: push: - branches: [ '*' ] - pull_request: - branches: [ master ] + branches: + - "**" + - "!master" jobs: npm-test: needs: [mvn-contract-build] diff --git a/.github/workflows/latest.yaml b/.github/workflows/latest.yaml new file mode 100644 index 0000000000000000000000000000000000000000..eb32018dc3c7ed2dff745be6fe7471d4fe1083dd --- /dev/null +++ b/.github/workflows/latest.yaml @@ -0,0 +1,61 @@ +name: latest +on: + workflow_dispatch: + push: + branches: [ "master" ] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Cache local Maven repository + uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- + - name: Set up JDK 1.13 + uses: actions/setup-java@v1 + with: + java-version: 1.13 + - name: Build + id: build + run: | + export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec) + echo "::set-output name=version::${VERSION}" + mvn clean package -Pprod -DskipTests +################# +# # +# Docker images # +# # +################# + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + id: docker_build_and_push + uses: docker/build-push-action@v2 + with: + builder: ${{ steps.buildx.outputs.name }} + context: kafka-ui-api + push: true + tags: provectuslabs/kafka-ui:latest + build-args: | + JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 6a95f1ca21ab7267923b92ec0f689e0bc5f02aeb..8b1877e4776934e886858ffee044b33e7d0897be 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,19 +59,32 @@ jobs: ${{ runner.os }}-buildx- - name: Login to DockerHub if: github.ref == 'refs/heads/master' - uses: docker/login-action@v1 + uses: docker/login-action@v1 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build and push + - name: Build + if: github.ref != 'refs/heads/master' id: docker_build uses: docker/build-push-action@v2 with: builder: ${{ steps.buildx.outputs.name }} context: kafka-ui-api - push: github.ref == 'refs/heads/master' - tags: provectuslabs/kafka-ui:${{ steps.prep.outputs.version }} + push: false build-args: | JAR_FILE=kafka-ui-api-${{ steps.prep.outputs.version }}.jar cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache + - name: Build and push + if: github.ref == 'refs/heads/master' + id: docker_build_and_push + uses: docker/build-push-action@v2 + with: + builder: ${{ steps.buildx.outputs.name }} + context: kafka-ui-api + push: true + tags: provectuslabs/kafka-ui:${{ steps.prep.outputs.version }} + build-args: | + JAR_FILE=kafka-ui-api-${{ steps.prep.outputs.version }}.jar + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache \ No newline at end of file diff --git a/.github/workflows/tags.yaml b/.github/workflows/tags.yaml new file mode 100644 index 0000000000000000000000000000000000000000..562d0f5214ddd882035fc65b9d1ff51f9d5cb15f --- /dev/null +++ b/.github/workflows/tags.yaml @@ -0,0 +1,68 @@ +name: after_release +on: + push: + tags: + - "v**" +jobs: + charts: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 1 + - run: | + git config user.name github-actions + git config user.email github-actions@github.com + - uses: azure/setup-helm@v1 + - name: update appVersion + run: | + export version=${GITHUB_REF##*/} + sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml + - name: + run: | + export VERSION=${GITHUB_REF##*/} + MSG=$(helm package --app-version ${VERSION} charts/kafka-ui) + git fetch origin + git stash + git checkout -b gh-pages origin/gh-pages + helm repo index . + git add -f ${MSG##*/} index.yaml + git commit -m "release ${VERSION}" + git push + gh-release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - run: | + git config user.name github-actions + git config user.email github-actions@github.com + - id: generate + shell: /usr/bin/bash -x -e {0} + run: | + VERSION=${GITHUB_REF##*/} + CHANGELOG=$(git --no-pager log --oneline --pretty=format:"- %s" `git tag --sort=-creatordate | grep '^v.*' | head -n2 | tail -n1`.. | uniq | grep -v '^- Merge\|^- skip') + CHANGELOG="${CHANGELOG//'%'/'%25'}" + CHANGELOG="${CHANGELOG//$'\n'/'%0A'}" + CHANGELOG="${CHANGELOG//$'\r'/'%0D'}" + echo ${CHANGELOG} + echo "::set-output name=changelog::${CHANGELOG}" + echo "::set-output name=version::${VERSION}" + - id: create_release + uses: actions/github-script@v3 + env: + CHANGELOG: ${{steps.generate.outputs.changelog}} + VERSION: ${{steps.generate.outputs.version}} + with: + github-token: ${{secrets.GITHUB_TOKEN}} + script: | + github.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: context.ref, + name: "Release "+process.env.VERSION, + body: process.env.CHANGELOG, + draft: false, + prerelease: false + }); diff --git a/README.md b/README.md index 4e38d7bec8838b8c9efa0a3b027b65c17722dd54..84521839abaa6daf80a5f4c24c677b84082ea47c 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,8 @@ docker run -p 8080:8080 \ -d provectuslabs/kafka-ui:latest ``` -Then access the web UI at [http://localhost:8080](http://localhost:8080). +Then access the web UI at [http://localhost:8080](http://localhost:8080). +Further configuration with environment variables - [see environment variables](#env_variables) ### Docker Compose @@ -138,10 +139,11 @@ kafka: * `schemaRegistry`: schemaRegistry's address * `schemaNameTemplate`: how keys are saved to schemaRegistry * `jmxPort`: open jmxPosrts of a broker +* `readOnly`: enable read only mode Configure as many clusters as you need by adding their configs below separated with `-`. -## Environment Variables +## Environment Variables Alternatively, each variable of of the .yml file can be set with an environment variable. For example, if you want to use an environment variable to set the `name` parameter, you can write it like this: `KAFKA_CLUSTERS_2_NAME` @@ -154,6 +156,7 @@ For example, if you want to use an environment variable to set the `name` parame |`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address |`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry |`KAFKA_CLUSTERS_0_JMXPORT` |Open jmxPosrts of a broker +|`KAFKA_CLUSTERS_0_READONLY` |Enable read only mode. Default: false diff --git a/docker-compose.md b/docker-compose.md index 1ec8b1350450a0d61d7da9a8d15177565bcda624..68be207ade4cbcb986e48227e3879353fab78451 100644 --- a/docker-compose.md +++ b/docker-compose.md @@ -1,5 +1,7 @@ # Quick Start with docker-compose +Envinronment variables documentation - [see usage](README.md#env_variables) + * Add a new service in docker-compose.yml ```yaml @@ -9,14 +11,31 @@ services: image: provectuslabs/kafka-ui container_name: kafka-ui ports: - - "9000:8080" + - "8080:8080" restart: always environment: - -e KAFKA_CLUSTERS_0_NAME=local - -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 - -e KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 + - KAFKA_CLUSTERS_0_NAME=local + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 + - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 ``` + +* If you prefer Kafka UI in read only mode +```yaml +version: '2' +services: + kafka-ui: + image: provectuslabs/kafka-ui + container_name: kafka-ui + ports: + - "8080:8080" + restart: always + environment: + - KAFKA_CLUSTERS_0_NAME=local + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 + - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 + - KAFKA_CLUSTERS_0_READONLY=true +``` * Start Kafka UI process diff --git a/docker/kafka-clusters-only.yaml b/docker/kafka-clusters-only.yaml index 16cb50b6596f01a35bbd5c3227f779eed0363607..68116d9499b33c8c000ed117e3ab90e88f9e204e 100644 --- a/docker/kafka-clusters-only.yaml +++ b/docker/kafka-clusters-only.yaml @@ -86,7 +86,25 @@ services: SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas ports: - - 8081:8081 + - 8085:8085 + + schemaregistry1: + image: confluentinc/cp-schema-registry:5.5.0 + ports: + - 18085:8085 + depends_on: + - zookeeper1 + - kafka1 + environment: + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092 + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181 + SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT + SCHEMA_REGISTRY_HOST_NAME: schemaregistry1 + SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085 + + SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http" + SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO + SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-connect0: image: confluentinc/cp-kafka-connect:5.2.4 diff --git a/docker/kafka-ui.yaml b/docker/kafka-ui.yaml index 8d8790b97e9de26bb7bcffb71740230937a80198..c384a8f4b3c95a074fdbfaede5b4d42a5e4636cd 100644 --- a/docker/kafka-ui.yaml +++ b/docker/kafka-ui.yaml @@ -96,6 +96,24 @@ services: SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas + schemaregistry1: + image: confluentinc/cp-schema-registry:5.5.0 + ports: + - 18085:8085 + depends_on: + - zookeeper1 + - kafka1 + environment: + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092 + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181 + SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT + SCHEMA_REGISTRY_HOST_NAME: schemaregistry1 + SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085 + + SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http" + SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO + SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas + kafka-connect0: image: confluentinc/cp-kafka-connect:5.2.4 ports: diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/DuplicateEntityException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/DuplicateEntityException.java new file mode 100644 index 0000000000000000000000000000000000000000..04c6be1590c5dcbd9bc1c5972d41417ea85bba56 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/DuplicateEntityException.java @@ -0,0 +1,15 @@ +package com.provectus.kafka.ui.cluster.exception; + +import org.springframework.http.HttpStatus; + +public class DuplicateEntityException extends CustomBaseException{ + + public DuplicateEntityException(String message) { + super(message); + } + + @Override + public HttpStatus getResponseStatusCode() { + return HttpStatus.CONFLICT; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/UnprocessableEntityException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/UnprocessableEntityException.java new file mode 100644 index 0000000000000000000000000000000000000000..bafc8c81806e7221c385bbd69a9033eeec3982cf --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/UnprocessableEntityException.java @@ -0,0 +1,15 @@ +package com.provectus.kafka.ui.cluster.exception; + +import org.springframework.http.HttpStatus; + +public class UnprocessableEntityException extends CustomBaseException{ + + public UnprocessableEntityException(String message) { + super(message); + } + + @Override + public HttpStatus getResponseStatusCode() { + return HttpStatus.UNPROCESSABLE_ENTITY; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java index a9e48bd627419fc9ad89fff4735e69a4044ea566..befddfc3280b5a82120abb064795e4e854e18377 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java @@ -2,7 +2,8 @@ package com.provectus.kafka.ui.cluster.mapper; import com.provectus.kafka.ui.cluster.config.ClustersProperties; import com.provectus.kafka.ui.cluster.model.*; -import com.provectus.kafka.ui.cluster.model.InternalCompatibilityCheck; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityCheck; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityLevel; import com.provectus.kafka.ui.model.*; import java.util.Properties; import org.mapstruct.Mapper; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityCheck.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityCheck.java similarity index 76% rename from kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityCheck.java rename to kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityCheck.java index 001823f2758d6e69379ba8ad8ecd52b3043732f3..da072c46f695f7ee11b955077f298058d931ea26 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityCheck.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityCheck.java @@ -1,4 +1,4 @@ -package com.provectus.kafka.ui.cluster.model; +package com.provectus.kafka.ui.cluster.model.schemaregistry; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityLevel.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityLevel.java similarity index 64% rename from kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityLevel.java rename to kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityLevel.java index ed2bfc70d76ba1715d49b6fec54ddb4b1d45fdfc..d66fc80c8ddc54592c39d62cbe5e8dde794422fb 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityLevel.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityLevel.java @@ -1,4 +1,4 @@ -package com.provectus.kafka.ui.cluster.model; +package com.provectus.kafka.ui.cluster.model.schemaregistry; import lombok.Data; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalNewSchema.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalNewSchema.java new file mode 100644 index 0000000000000000000000000000000000000000..b121943fe341768fe62506cedffabfa9e8fdc3da --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalNewSchema.java @@ -0,0 +1,17 @@ +package com.provectus.kafka.ui.cluster.model.schemaregistry; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.provectus.kafka.ui.model.SchemaType; +import lombok.Data; + +@Data +public class InternalNewSchema { + private String schema; + @JsonInclude(JsonInclude.Include.NON_NULL) + private SchemaType schemaType; + + public InternalNewSchema(String schema, SchemaType schemaType) { + this.schema = schema; + this.schemaType = schemaType; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/SubjectIdResponse.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/SubjectIdResponse.java new file mode 100644 index 0000000000000000000000000000000000000000..3a6eefee279ce70e3ebacefbfbaed560a788bb8c --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/SubjectIdResponse.java @@ -0,0 +1,8 @@ +package com.provectus.kafka.ui.cluster.model.schemaregistry; + +import lombok.Data; + +@Data +public class SubjectIdResponse { + private Integer id; +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java index 232e25d0263c7d3759479409b5b87ec7abe157fe..b364cbc8299bb4bf9bcfb8239150b65ee4b96405 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java @@ -1,36 +1,43 @@ package com.provectus.kafka.ui.cluster.service; +import com.provectus.kafka.ui.cluster.exception.DuplicateEntityException; import com.provectus.kafka.ui.cluster.exception.NotFoundException; +import com.provectus.kafka.ui.cluster.exception.UnprocessableEntityException; import com.provectus.kafka.ui.cluster.mapper.ClusterMapper; import com.provectus.kafka.ui.cluster.model.ClustersStorage; -import com.provectus.kafka.ui.cluster.model.InternalCompatibilityCheck; -import com.provectus.kafka.ui.cluster.model.InternalCompatibilityLevel; -import com.provectus.kafka.ui.model.CompatibilityCheckResponse; -import com.provectus.kafka.ui.model.CompatibilityLevel; -import com.provectus.kafka.ui.model.NewSchemaSubject; -import com.provectus.kafka.ui.model.SchemaSubject; -import java.util.Formatter; +import com.provectus.kafka.ui.cluster.model.KafkaCluster; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityCheck; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityLevel; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalNewSchema; +import com.provectus.kafka.ui.cluster.model.schemaregistry.SubjectIdResponse; +import com.provectus.kafka.ui.model.*; import lombok.RequiredArgsConstructor; import lombok.extern.log4j.Log4j2; -import org.springframework.core.ParameterizedTypeReference; -import org.springframework.http.HttpEntity; -import org.springframework.http.HttpStatus; +import org.jetbrains.annotations.NotNull; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; import org.springframework.web.reactive.function.BodyInserters; +import org.springframework.web.reactive.function.client.ClientResponse; import org.springframework.web.reactive.function.client.WebClient; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; -import java.util.Arrays; -import java.util.List; +import java.util.Formatter; import java.util.Objects; +import java.util.function.Function; + +import static org.springframework.http.HttpStatus.NOT_FOUND; +import static org.springframework.http.HttpStatus.UNPROCESSABLE_ENTITY; @Service @Log4j2 @RequiredArgsConstructor public class SchemaRegistryService { + public static final String NO_SUCH_SCHEMA_VERSION = "No such schema %s with version %s"; + public static final String NO_SUCH_SCHEMA = "No such schema %s"; + public static final String NO_SUCH_CLUSTER = "No such cluster"; + private static final String URL_SUBJECTS = "/subjects"; private static final String URL_SUBJECT = "/subjects/{schemaName}"; private static final String URL_SUBJECT_VERSIONS = "/subjects/{schemaName}/versions"; @@ -45,7 +52,7 @@ public class SchemaRegistryService { var allSubjectNames = getAllSubjectNames(clusterName); return allSubjectNames .flatMapMany(Flux::fromArray) - .flatMap(subject -> getLatestSchemaSubject(clusterName, subject)); + .flatMap(subject -> getLatestSchemaVersionBySubject(clusterName, subject)); } public Mono getAllSubjectNames(String clusterName) { @@ -56,7 +63,7 @@ public class SchemaRegistryService { .bodyToMono(String[].class) .doOnError(log::error) ) - .orElse(Mono.error(new NotFoundException("No such cluster"))); + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } public Flux getAllVersionsBySubject(String clusterName, String subject) { @@ -69,19 +76,17 @@ public class SchemaRegistryService { .map(cluster -> webClient.get() .uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException(formatted("No such schema %s")) - ) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA)) ).bodyToFlux(Integer.class) - ).orElse(Flux.error(new NotFoundException("No such cluster"))); + ).orElse(Flux.error(new NotFoundException(NO_SUCH_CLUSTER))); } public Mono getSchemaSubjectByVersion(String clusterName, String schemaName, Integer version) { return this.getSchemaSubject(clusterName, schemaName, String.valueOf(version)); } - public Mono getLatestSchemaSubject(String clusterName, String schemaName) { + public Mono getLatestSchemaVersionBySubject(String clusterName, String schemaName) { return this.getSchemaSubject(clusterName, schemaName, LATEST); } @@ -90,13 +95,10 @@ public class SchemaRegistryService { .map(cluster -> webClient.get() .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException( - formatted("No such schema %s with version %s", schemaName, version) - ) - ) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version)) ).bodyToMono(SchemaSubject.class) + .map(this::withSchemaType) .zipWith(getSchemaCompatibilityInfoOrGlobal(clusterName, schemaName)) .map(tuple -> { SchemaSubject schema = tuple.getT1(); @@ -105,7 +107,21 @@ public class SchemaRegistryService { return schema; }) ) - .orElseThrow(); + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); + } + + /** + * If {@link SchemaSubject#getSchemaType()} is null, then AVRO, otherwise, adds the schema type as is. + */ + @NotNull + private SchemaSubject withSchemaType(SchemaSubject s) { + SchemaType schemaType = Objects.nonNull(s.getSchemaType()) ? s.getSchemaType() : SchemaType.AVRO; + return new SchemaSubject() + .schema(s.getSchema()) + .subject(s.getSubject()) + .version(s.getVersion()) + .id(s.getId()) + .schemaType(schemaType); } public Mono> deleteSchemaSubjectByVersion(String clusterName, String schemaName, Integer version) { @@ -121,46 +137,71 @@ public class SchemaRegistryService { .map(cluster -> webClient.delete() .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException( - formatted("No such schema %s with version %s", schemaName, version) - ) - ) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version)) ).toBodilessEntity() - ).orElse(Mono.error(new NotFoundException("No such cluster"))); + ).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } - public Mono> deleteSchemaSubject(String clusterName, String schemaName) { + public Mono> deleteSchemaSubjectEntirely(String clusterName, String schemaName) { return clustersStorage.getClusterByName(clusterName) .map(cluster -> webClient.delete() .uri(cluster.getSchemaRegistry() + URL_SUBJECT, schemaName) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException( - formatted("No such schema %s", schemaName) - ) - ) + .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)) ) .toBodilessEntity()) - .orElse(Mono.error(new NotFoundException("No such cluster"))); + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } - public Mono> createNewSubject(String clusterName, String schemaName, Mono newSchemaSubject) { - return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.post() - .uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName) - .contentType(MediaType.APPLICATION_JSON) - .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class)) - .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException(formatted("No such schema %s", schemaName))) - ) - .toEntity(SchemaSubject.class) - .log()) - .orElse(Mono.error(new NotFoundException("No such cluster"))); + /** + * Checks whether the provided schema duplicates the previous or not, creates a new schema + * and then returns the whole content by requesting its latest version. + */ + public Mono registerNewSchema(String clusterName, Mono newSchemaSubject) { + return newSchemaSubject + .flatMap(schema -> { + SchemaType schemaType = SchemaType.AVRO == schema.getSchemaType() ? null : schema.getSchemaType(); + Mono newSchema = Mono.just(new InternalNewSchema(schema.getSchema(), schemaType)); + String subject = schema.getSubject(); + return clustersStorage.getClusterByName(clusterName) + .map(KafkaCluster::getSchemaRegistry) + .map(schemaRegistryUrl -> checkSchemaOnDuplicate(subject, newSchema, schemaRegistryUrl) + .flatMap(s -> submitNewSchema(subject, newSchema, schemaRegistryUrl)) + .flatMap(resp -> getLatestSchemaVersionBySubject(clusterName, subject)) + ) + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); + }); + } + + @NotNull + private Mono submitNewSchema(String subject, Mono newSchemaSubject, String schemaRegistryUrl) { + return webClient.post() + .uri(schemaRegistryUrl + URL_SUBJECT_VERSIONS, subject) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class)) + .retrieve() + .onStatus(UNPROCESSABLE_ENTITY::equals, r -> Mono.error(new UnprocessableEntityException("Invalid params"))) + .bodyToMono(SubjectIdResponse.class); + } + + @NotNull + private Mono checkSchemaOnDuplicate(String subject, Mono newSchemaSubject, String schemaRegistryUrl) { + return webClient.post() + .uri(schemaRegistryUrl + URL_SUBJECT, subject) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class)) + .retrieve() + .onStatus(NOT_FOUND::equals, res -> Mono.empty()) + .onStatus(UNPROCESSABLE_ENTITY::equals, r -> Mono.error(new UnprocessableEntityException("Invalid params"))) + .bodyToMono(SchemaSubject.class) + .filter(s -> Objects.isNull(s.getId())) + .switchIfEmpty(Mono.error(new DuplicateEntityException("Such schema already exists"))); + } + + @NotNull + private Function> throwIfNotFoundStatus(String formatted) { + return resp -> Mono.error(new NotFoundException(formatted)); } /** @@ -178,10 +219,10 @@ public class SchemaRegistryService { .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(compatibilityLevel, CompatibilityLevel.class)) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName)))) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName))) .bodyToMono(Void.class); - }).orElse(Mono.error(new NotFoundException("No such cluster"))); + }).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } public Mono updateSchemaCompatibility(String clusterName, Mono compatibilityLevel) { @@ -217,12 +258,11 @@ public class SchemaRegistryService { .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class)) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName)))) + .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName))) .bodyToMono(InternalCompatibilityCheck.class) .map(mapper::toCompatibilityCheckResponse) .log() - ).orElse(Mono.error(new NotFoundException("No such cluster"))); + ).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } public String formatted(String str, Object... args) { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java index a2cacbba8e08c5e3cd65a48c6c075f8ea900913e..9b86451b8fe575f2cf7228e75d661e4b5aaac106 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java @@ -106,7 +106,7 @@ public class MetricsRestController implements ApiClustersApi { @Override public Mono> getLatestSchema(String clusterName, String subject, ServerWebExchange exchange) { - return schemaRegistryService.getLatestSchemaSubject(clusterName, subject).map(ResponseEntity::ok); + return schemaRegistryService.getLatestSchemaVersionBySubject(clusterName, subject).map(ResponseEntity::ok); } @Override @@ -138,14 +138,16 @@ public class MetricsRestController implements ApiClustersApi { @Override public Mono> deleteSchema(String clusterName, String subjectName, ServerWebExchange exchange) { - return schemaRegistryService.deleteSchemaSubject(clusterName, subjectName); + return schemaRegistryService.deleteSchemaSubjectEntirely(clusterName, subjectName); } @Override - public Mono> createNewSchema(String clusterName, String subject, + public Mono> createNewSchema(String clusterName, @Valid Mono newSchemaSubject, ServerWebExchange exchange) { - return schemaRegistryService.createNewSubject(clusterName, subject, newSchemaSubject); + return schemaRegistryService + .registerNewSchema(clusterName, newSchemaSubject) + .map(ResponseEntity::ok); } @Override diff --git a/kafka-ui-api/src/main/resources/application-local.yml b/kafka-ui-api/src/main/resources/application-local.yml index 951a1799e067a71109c3b3f3e9df52402247407d..582284910936b8ace7fd2570de89238a81286332 100644 --- a/kafka-ui-api/src/main/resources/application-local.yml +++ b/kafka-ui-api/src/main/resources/application-local.yml @@ -13,7 +13,7 @@ kafka: name: secondLocal bootstrapServers: localhost:9093 zookeeper: localhost:2182 - schemaRegistry: http://localhost:8081 + schemaRegistry: http://localhost:18085 kafkaConnect: - name: first address: http://localhost:8083 diff --git a/kafka-ui-api/src/main/resources/application-sdp.yml b/kafka-ui-api/src/main/resources/application-sdp.yml index 9f6a293ec29e9085b6d41b246748da6331a1c905..46a0377799ef367b19794815b9e3ab9a2dda01eb 100644 --- a/kafka-ui-api/src/main/resources/application-sdp.yml +++ b/kafka-ui-api/src/main/resources/application-sdp.yml @@ -9,7 +9,7 @@ kafka: name: secondLocal zookeeper: zookeeper1:2181 bootstrapServers: kafka1:29092 - schemaRegistry: http://schemaregistry0:8085 + schemaRegistry: http://schemaregistry1:8085 admin-client-timeout: 5000 zookeeper: connection-timeout: 1000 diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java index 4065b4006d197867c80aa2a524798050b1acd4dc..cd005596940b05c1f4a50fa7721571340878ad3b 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java @@ -18,7 +18,7 @@ public abstract class AbstractBaseTest { public static String LOCAL = "local"; public static String SECOND_LOCAL = "secondLocal"; - private static final String CONFLUENT_PLATFORM_VERSION = "5.2.1"; + private static final String CONFLUENT_PLATFORM_VERSION = "5.5.0"; public static final KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION)) .withNetwork(Network.SHARED); diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java index f7e6407b91ef7c885a3faf5395bad65aed5c7cbe..603390dbf5737bf5f6f575c32a123c518e9673c8 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java @@ -249,7 +249,7 @@ public class KafkaConnectServiceTests extends AbstractBaseTest { .exchange() .expectStatus().isOk() .expectBodyList(ConnectorPlugin.class) - .value(plugins -> assertEquals(13, plugins.size())); + .value(plugins -> assertEquals(14, plugins.size())); } @Test diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java index c5af013e79f577126a23cfa34498e23dd329e647..07d05cade6030a8cf825a8e87d59b3aa8e898772 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java @@ -1,7 +1,9 @@ package com.provectus.kafka.ui; import com.provectus.kafka.ui.model.CompatibilityLevel; +import com.provectus.kafka.ui.model.NewSchemaSubject; import com.provectus.kafka.ui.model.SchemaSubject; +import com.provectus.kafka.ui.model.SchemaType; import lombok.extern.log4j.Log4j2; import lombok.val; import org.junit.jupiter.api.Assertions; @@ -9,11 +11,13 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; +import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.web.reactive.server.EntityExchangeResult; import org.springframework.test.web.reactive.server.WebTestClient; import org.springframework.web.reactive.function.BodyInserters; +import reactor.core.publisher.Mono; import java.util.List; import java.util.UUID; @@ -50,6 +54,69 @@ class SchemaRegistryServiceTests extends AbstractBaseTest { .expectStatus().isNotFound(); } + /** + * It should create a new schema w/o submitting a schemaType field to Schema Registry + */ + @Test + void shouldBeBadRequestIfNoSchemaType() { + String schema = "{\"subject\":\"%s\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}"; + + webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(schema.formatted(subject))) + .exchange() + .expectStatus().isBadRequest(); + } + + @Test + void shouldReturn409WhenSchemaDuplicatesThePreviousVersion() { + String schema = "{\"subject\":\"%s\",\"schemaType\":\"AVRO\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}"; + + webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(schema.formatted(subject))) + .exchange() + .expectStatus().isEqualTo(HttpStatus.OK); + + webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(schema.formatted(subject))) + .exchange() + .expectStatus().isEqualTo(HttpStatus.CONFLICT); + } + + @Test + void shouldCreateNewProtobufSchema() { + String schema = "syntax = \"proto3\";\n\nmessage MyRecord {\n int32 id = 1;\n string name = 2;\n}\n"; + NewSchemaSubject requestBody = new NewSchemaSubject() + .schemaType(SchemaType.PROTOBUF) + .subject(subject) + .schema(schema); + SchemaSubject actual = webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromPublisher(Mono.just(requestBody), NewSchemaSubject.class)) + .exchange() + .expectStatus() + .isOk() + .expectBody(SchemaSubject.class) + .returnResult() + .getResponseBody(); + + Assertions.assertNotNull(actual); + Assertions.assertEquals(CompatibilityLevel.CompatibilityEnum.BACKWARD.name(), actual.getCompatibilityLevel()); + Assertions.assertEquals("1", actual.getVersion()); + Assertions.assertEquals(SchemaType.PROTOBUF, actual.getSchemaType()); + Assertions.assertEquals(schema, actual.getSchema()); + } + @Test public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { webTestClient @@ -132,9 +199,9 @@ class SchemaRegistryServiceTests extends AbstractBaseTest { private void createNewSubjectAndAssert(String subject) { webTestClient .post() - .uri("/api/clusters/{clusterName}/schemas/{subject}", LOCAL, subject) + .uri("/api/clusters/{clusterName}/schemas", LOCAL) .contentType(MediaType.APPLICATION_JSON) - .body(BodyInserters.fromValue("{\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}")) + .body(BodyInserters.fromValue("{\"subject\":\"%s\",\"schemaType\":\"AVRO\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}".formatted(subject))) .exchange() .expectStatus().isOk() .expectBody(SchemaSubject.class) @@ -151,16 +218,17 @@ class SchemaRegistryServiceTests extends AbstractBaseTest { Assertions.assertEquals("\"string\"", actualSchema.getSchema()); Assertions.assertNotNull(actualSchema.getCompatibilityLevel()); + Assertions.assertEquals(SchemaType.AVRO, actualSchema.getSchemaType()); Assertions.assertEquals(expectedCompatibility.name(), actualSchema.getCompatibilityLevel()); } private void assertResponseBodyWhenCreateNewSchema(EntityExchangeResult exchangeResult) { SchemaSubject responseBody = exchangeResult.getResponseBody(); Assertions.assertNotNull(responseBody); - Assertions.assertEquals(1, responseBody.getId(), "The schema ID should be non-null in the response"); - String message = "It should be null"; - Assertions.assertNull(responseBody.getSchema(), message); - Assertions.assertNull(responseBody.getSubject(), message); - Assertions.assertNull(responseBody.getVersion(), message); + Assertions.assertEquals("1", responseBody.getVersion()); + Assertions.assertNotNull(responseBody.getSchema()); + Assertions.assertNotNull(responseBody.getSubject()); + Assertions.assertNotNull(responseBody.getCompatibilityLevel()); + Assertions.assertEquals(SchemaType.AVRO, responseBody.getSchemaType()); } } diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index 635f5b1693585c8027da4eadc9b883c0036b3562..f50695abfc134c752efa6e663448d6b154c3e0f5 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -358,58 +358,57 @@ paths: $ref: '#/components/schemas/ConsumerGroup' /api/clusters/{clusterName}/schemas: - get: + post: tags: - /api/clusters - summary: get all schemas of latest version from Schema Registry service - operationId: getSchemas + summary: create a new subject schema + operationId: createNewSchema parameters: - name: clusterName in: path required: true schema: type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/NewSchemaSubject' responses: 200: - description: OK + description: Ok content: application/json: schema: - type: array - items: - $ref: '#/components/schemas/SchemaSubject' - - /api/clusters/{clusterName}/schemas/{subject}: - post: + $ref: '#/components/schemas/SchemaSubject' + 400: + description: Bad request + 409: + description: Duplicate schema + 422: + description: Invalid parameters + get: tags: - /api/clusters - summary: create a new subject schema - operationId: createNewSchema + summary: get all schemas of latest version from Schema Registry service + operationId: getSchemas parameters: - name: clusterName in: path required: true schema: type: string - - name: subject - in: path - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/NewSchemaSubject' responses: 200: - description: Updated + description: OK content: application/json: schema: - $ref: '#/components/schemas/SchemaSubject' - 400: - description: Bad request + type: array + items: + $ref: '#/components/schemas/SchemaSubject' + + /api/clusters/{clusterName}/schemas/{subject}: delete: tags: - /api/clusters @@ -1360,16 +1359,29 @@ components: type: string compatibilityLevel: type: string + schemaType: + $ref: '#/components/schemas/SchemaType' required: - id + - subject + - version + - schema + - compatibilityLevel + - schemaType NewSchemaSubject: type: object properties: + subject: + type: string schema: type: string + schemaType: + $ref: '#/components/schemas/SchemaType' required: + - subject - schema + - schemaType CompatibilityLevel: type: object @@ -1387,13 +1399,12 @@ components: required: - compatibility -# CompatibilityLevelResponse: -# type: object -# properties: -# compatibilityLevel: -# type: string -# required: -# - compatibilityLevel + SchemaType: + type: string + enum: + - AVRO + - JSON + - PROTOBUF CompatibilityCheckResponse: type: object diff --git a/kafka-ui-react-app/src/components/App.tsx b/kafka-ui-react-app/src/components/App.tsx index a5fe4f6f897d4315492799121d75174c41533fca..66d77f69719fe894db56e86f2907702f27ce701b 100644 --- a/kafka-ui-react-app/src/components/App.tsx +++ b/kafka-ui-react-app/src/components/App.tsx @@ -1,13 +1,10 @@ import React from 'react'; -import { Switch, Route, Redirect } from 'react-router-dom'; +import { Switch, Route } from 'react-router-dom'; import './App.scss'; -import BrokersContainer from './Brokers/BrokersContainer'; -import TopicsContainer from './Topics/TopicsContainer'; import NavContainer from './Nav/NavContainer'; import PageLoader from './common/PageLoader/PageLoader'; import Dashboard from './Dashboard/Dashboard'; -import ConsumersGroupsContainer from './ConsumerGroups/ConsumersGroupsContainer'; -import SchemasContainer from './Schemas/SchemasContainer'; +import Cluster from './Cluster/Cluster'; interface AppProps { isClusterListFetched: boolean; @@ -44,29 +41,10 @@ const App: React.FC = ({ path={['/', '/ui', '/ui/clusters']} component={Dashboard} /> - - - - - + ) : ( - + )} diff --git a/kafka-ui-react-app/src/components/Cluster/Cluster.tsx b/kafka-ui-react-app/src/components/Cluster/Cluster.tsx new file mode 100644 index 0000000000000000000000000000000000000000..fc5c15793795cf0c69668ed15d8b831e211547db --- /dev/null +++ b/kafka-ui-react-app/src/components/Cluster/Cluster.tsx @@ -0,0 +1,39 @@ +import React from 'react'; +import { useSelector } from 'react-redux'; +import { Switch, Route, Redirect, useParams } from 'react-router-dom'; +import BrokersContainer from 'components/Brokers/BrokersContainer'; +import TopicsContainer from 'components/Topics/TopicsContainer'; +import ConsumersGroupsContainer from 'components/ConsumerGroups/ConsumersGroupsContainer'; +import Schemas from 'components/Schemas/Schemas'; +import { getClustersReadonlyStatus } from 'redux/reducers/clusters/selectors'; +import ClusterContext from 'components/contexts/ClusterContext'; + +const Cluster: React.FC = () => { + const { clusterName } = useParams<{ clusterName: string }>(); + const isReadOnly = useSelector(getClustersReadonlyStatus(clusterName)); + return ( + + + + + + + + + + ); +}; + +export default Cluster; diff --git a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx index c5b0007288700acd977dbe5a273ef1a790addee2..9e7fa5403c34e8336b7ef76aedcf7e8bb8ed2135 100644 --- a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx +++ b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx @@ -17,6 +17,7 @@ const ClusterWidget: React.FC = ({ bytesInPerSec, bytesOutPerSec, onlinePartitionCount, + readOnly, }, }) => (
@@ -29,6 +30,9 @@ const ClusterWidget: React.FC = ({ > {status}
+ {readOnly && ( +
readonly
+ )} {name} diff --git a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx index e8d21e6ab3ed7833b86868497e0f9caecca25975..4cc652e05c606effdfad6505a6d7706741514211 100644 --- a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx +++ b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx @@ -70,4 +70,17 @@ describe('ClusterWidget', () => { ).toMatchSnapshot(); }); }); + + describe('when cluster is read-only', () => { + it('renders the tag', () => { + expect( + shallow( + + ) + .find('.title') + .childAt(1) + .text() + ).toEqual('readonly'); + }); + }); }); diff --git a/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx b/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx index 1753b11e663ddf92a53a673e2025a7dc44b232f5..5aa7b40749b3df1110f5e25070e725c728162281 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx @@ -2,12 +2,14 @@ import React from 'react'; import { SchemaSubject } from 'generated-sources'; import { ClusterName, SchemaName } from 'redux/interfaces'; import { clusterSchemasPath } from 'lib/paths'; +import ClusterContext from 'components/contexts/ClusterContext'; import Breadcrumb from '../../common/Breadcrumb/Breadcrumb'; import SchemaVersion from './SchemaVersion'; import LatestVersionItem from './LatestVersionItem'; import PageLoader from '../../common/PageLoader/PageLoader'; export interface DetailsProps { + subject: SchemaName; schema: SchemaSubject; clusterName: ClusterName; versions: SchemaSubject[]; @@ -19,15 +21,18 @@ export interface DetailsProps { } const Details: React.FC = ({ + subject, schema, clusterName, fetchSchemaVersions, versions, isFetched, }) => { + const { isReadOnly } = React.useContext(ClusterContext); React.useEffect(() => { - fetchSchemaVersions(clusterName, schema.subject as SchemaName); + fetchSchemaVersions(clusterName, subject); }, [fetchSchemaVersions, clusterName]); + return (
@@ -39,59 +44,63 @@ const Details: React.FC = ({ }, ]} > - {schema.subject} + {subject}
-
-
-
-
-
- Latest Version -
-
- #{schema.version} + {isFetched ? ( + <> +
+
+
+
+
+ Latest Version +
+
+ #{schema.version} +
+
+ {!isReadOnly && ( +
+ + +
+ )}
+
-
- - +
+ + + + + + + + + + {versions.map((version) => ( + + ))} + +
VersionIDSchema
-
- -
- {isFetched ? ( -
- - - - - - - - - - {versions.map((version) => ( - - ))} - -
VersionIDSchema
-
+ ) : ( )} diff --git a/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts b/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts index 072d2a44f8cff2c22285eca3f3733090a3095393..dbfec5314ca4327ee6eb9b57833dbb400a18074b 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts +++ b/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts @@ -24,6 +24,7 @@ const mapStateToProps = ( }, }: OwnProps ) => ({ + subject, schema: getSchema(state, subject), versions: getSortedSchemaVersions(state), isFetched: getIsSchemaVersionFetched(state), diff --git a/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx b/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx index 816f20380c6c23e1bdc691eba261b9e61b87d1e5..312bd16fa4f0884b0380c6074dc0cc029c225f83 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx @@ -1,7 +1,9 @@ import React from 'react'; import { Provider } from 'react-redux'; -import { shallow } from 'enzyme'; +import { shallow, mount } from 'enzyme'; import configureStore from 'redux/store/configureStore'; +import { StaticRouter } from 'react-router'; +import ClusterContext from 'components/contexts/ClusterContext'; import DetailsContainer from '../DetailsContainer'; import Details, { DetailsProps } from '../Details'; import { schema, versions } from './fixtures'; @@ -24,6 +26,7 @@ describe('Details', () => { describe('View', () => { const setupWrapper = (props: Partial = {}) => (
{ expect(shallow(setupWrapper({ versions }))).toMatchSnapshot(); }); }); + + describe('when the readonly flag is set', () => { + it('does not render update & delete buttons', () => { + expect( + mount( + + + {setupWrapper({ versions })} + + + ).exists('.level-right') + ).toBeFalsy(); + }); + }); }); }); }); diff --git a/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap b/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap index 7871bb5588c508a0016943770cfdbbfb676b4961..337111c166be2a6a10ef6582a1e8af34d30b7b0f 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap @@ -75,6 +75,7 @@ exports[`Details View Initial state matches snapshot 1`] = ` "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } @@ -126,67 +127,6 @@ exports[`Details View when page with schema versions is loading matches snapshot test
-
-
-
-
-
- - Latest Version - -
-
- # - 1 -
-
-
-
- - -
-
- -
`; @@ -266,6 +206,7 @@ exports[`Details View when page with schema versions loaded when schema has vers "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } @@ -299,6 +240,7 @@ exports[`Details View when page with schema versions loaded when schema has vers "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } @@ -311,6 +253,7 @@ exports[`Details View when page with schema versions loaded when schema has vers "compatibilityLevel": "BACKWARD", "id": 2, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "2", } @@ -397,6 +340,7 @@ exports[`Details View when page with schema versions loaded when versions are em "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } diff --git a/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts b/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts index 019532251b83120b78f0b57869b6a7d0857038f4..d53190c9f637d3b14ea784fe45c9877563a206c1 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts @@ -1,4 +1,4 @@ -import { SchemaSubject } from 'generated-sources'; +import { SchemaSubject, SchemaType } from 'generated-sources'; export const schema: SchemaSubject = { subject: 'test', @@ -7,6 +7,7 @@ export const schema: SchemaSubject = { schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }; export const versions: SchemaSubject[] = [ @@ -17,6 +18,7 @@ export const versions: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -25,5 +27,6 @@ export const versions: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; diff --git a/kafka-ui-react-app/src/components/Schemas/List/List.tsx b/kafka-ui-react-app/src/components/Schemas/List/List.tsx index c7366d1f66bb3617f239e6860c2a240916179194..5e8ab6681b93f40de017f05cc84dc1dd9a795607 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/List.tsx +++ b/kafka-ui-react-app/src/components/Schemas/List/List.tsx @@ -2,54 +2,74 @@ import React from 'react'; import { SchemaSubject } from 'generated-sources'; import { NavLink, useParams } from 'react-router-dom'; import { clusterSchemaNewPath } from 'lib/paths'; +import { ClusterName } from 'redux/interfaces'; +import PageLoader from 'components/common/PageLoader/PageLoader'; import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb'; +import ClusterContext from 'components/contexts/ClusterContext'; import ListItem from './ListItem'; export interface ListProps { schemas: SchemaSubject[]; + isFetching: boolean; + fetchSchemasByClusterName: (clusterName: ClusterName) => void; } -const List: React.FC = ({ schemas }) => { +const List: React.FC = ({ + schemas, + isFetching, + fetchSchemasByClusterName, +}) => { + const { isReadOnly } = React.useContext(ClusterContext); const { clusterName } = useParams<{ clusterName: string }>(); + React.useEffect(() => { + fetchSchemasByClusterName(clusterName); + }, [fetchSchemasByClusterName, clusterName]); + return (
Schema Registry
-
- - Create Schema - -
+ {!isReadOnly && ( +
+ + Create Schema + +
+ )}
-
- - - - - - - - - - {schemas.length > 0 ? ( - schemas.map((subject) => ( - - )) - ) : ( + {isFetching ? ( + + ) : ( +
+
Schema NameVersionCompatibility
+ - + + + - )} - -
No schemas foundSchema NameVersionCompatibility
-
+ + + {schemas.length > 0 ? ( + schemas.map((subject) => ( + + )) + ) : ( + + No schemas found + + )} + + +
+ )}
); }; diff --git a/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx b/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx index 29259875404eb5570ae02fa76a83650d3189a57b..9bd7c4fd6b7276436d8940f9788673c3d736d7c4 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx +++ b/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx @@ -1,10 +1,19 @@ import { connect } from 'react-redux'; import { RootState } from 'redux/interfaces'; -import { getSchemaList } from 'redux/reducers/schemas/selectors'; +import { fetchSchemasByClusterName } from 'redux/actions'; +import { + getIsSchemaListFetching, + getSchemaList, +} from 'redux/reducers/schemas/selectors'; import List from './List'; const mapStateToProps = (state: RootState) => ({ + isFetching: getIsSchemaListFetching(state), schemas: getSchemaList(state), }); -export default connect(mapStateToProps)(List); +const mapDispatchToProps = { + fetchSchemasByClusterName, +}; + +export default connect(mapStateToProps, mapDispatchToProps)(List); diff --git a/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx b/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx index 4a803432fb2741f2ab3967b323ce55823291238d..3e3aa07f0312978d27ba8202d74644f36c279b46 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx +++ b/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx @@ -3,6 +3,7 @@ import { mount, shallow } from 'enzyme'; import { Provider } from 'react-redux'; import { StaticRouter } from 'react-router'; import configureStore from 'redux/store/configureStore'; +import ClusterContext from 'components/contexts/ClusterContext'; import ListContainer from '../ListContainer'; import List, { ListProps } from '../List'; import { schemas } from './fixtures'; @@ -27,13 +28,50 @@ describe('List', () => { const setupWrapper = (props: Partial = {}) => ( - + ); + describe('Initial state', () => { + let useEffect: jest.SpyInstance< + void, + [effect: React.EffectCallback, deps?: React.DependencyList | undefined] + >; + const mockedFn = jest.fn(); + + const mockedUseEffect = () => { + useEffect.mockImplementationOnce(mockedFn); + }; + + beforeEach(() => { + useEffect = jest.spyOn(React, 'useEffect'); + mockedUseEffect(); + }); + + it('should call fetchSchemasByClusterName every render', () => { + mount(setupWrapper({ fetchSchemasByClusterName: mockedFn })); + expect(mockedFn).toHaveBeenCalled(); + }); + }); + + describe('when fetching', () => { + it('renders PageLoader', () => { + const wrapper = mount(setupWrapper({ isFetching: true })); + expect(wrapper.exists('Breadcrumb')).toBeTruthy(); + expect(wrapper.exists('thead')).toBeFalsy(); + expect(wrapper.exists('ListItem')).toBeFalsy(); + expect(wrapper.exists('PageLoader')).toBeTruthy(); + }); + }); + describe('without schemas', () => { it('renders table heading without ListItem', () => { - const wrapper = mount(setupWrapper()); + const wrapper = mount(setupWrapper({ isFetching: false })); expect(wrapper.exists('Breadcrumb')).toBeTruthy(); expect(wrapper.exists('thead')).toBeTruthy(); expect(wrapper.exists('ListItem')).toBeFalsy(); @@ -41,7 +79,7 @@ describe('List', () => { }); describe('with schemas', () => { - const wrapper = mount(setupWrapper({ schemas })); + const wrapper = mount(setupWrapper({ isFetching: false, schemas })); it('renders table heading with ListItem', () => { expect(wrapper.exists('Breadcrumb')).toBeTruthy(); @@ -49,5 +87,18 @@ describe('List', () => { expect(wrapper.find('ListItem').length).toEqual(3); }); }); + + describe('with readonly cluster', () => { + const wrapper = mount( + + + {setupWrapper({ schemas: [] })} + + + ); + it('does not render Create Schema button', () => { + expect(wrapper.exists('NavLink')).toBeFalsy(); + }); + }); }); }); diff --git a/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap b/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap index 1783cdc9943356691a22ae3e83810e1101d84fd8..e26acad840820e63065edf177da43f746d3f7e0d 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap @@ -32,6 +32,7 @@ exports[`ListItem matches snapshot 1`] = ` "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } diff --git a/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts b/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts index 7b337a23ec95f74f8d11776050d0ff6b01009b07..46a4d5a10561440650a199b3672a541533325181 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts @@ -1,4 +1,4 @@ -import { SchemaSubject } from 'generated-sources'; +import { SchemaSubject, SchemaType } from 'generated-sources'; export const schemas: SchemaSubject[] = [ { @@ -8,6 +8,7 @@ export const schemas: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test2', @@ -16,6 +17,7 @@ export const schemas: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test3', @@ -24,5 +26,6 @@ export const schemas: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord3","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; diff --git a/kafka-ui-react-app/src/components/Schemas/New/New.tsx b/kafka-ui-react-app/src/components/Schemas/New/New.tsx index cc15b9709e6745cf596aff4c558a988b728d960e..7a1325b9242c85ac571e5e532b0aa13fa32994f3 100644 --- a/kafka-ui-react-app/src/components/Schemas/New/New.tsx +++ b/kafka-ui-react-app/src/components/Schemas/New/New.tsx @@ -1,17 +1,16 @@ import React from 'react'; -import { ClusterName, SchemaName, NewSchemaSubjectRaw } from 'redux/interfaces'; +import { ClusterName, NewSchemaSubjectRaw } from 'redux/interfaces'; import { useForm } from 'react-hook-form'; import { ErrorMessage } from '@hookform/error-message'; import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb'; import { clusterSchemaPath, clusterSchemasPath } from 'lib/paths'; -import { NewSchemaSubject } from 'generated-sources'; +import { NewSchemaSubject, SchemaType } from 'generated-sources'; import { SCHEMA_NAME_VALIDATION_PATTERN } from 'lib/constants'; import { useHistory, useParams } from 'react-router'; export interface NewProps { createSchema: ( clusterName: ClusterName, - subject: SchemaName, newSchemaSubject: NewSchemaSubject ) => void; } @@ -29,7 +28,11 @@ const New: React.FC = ({ createSchema }) => { const onSubmit = React.useCallback( async ({ subject, schema }: NewSchemaSubjectRaw) => { try { - await createSchema(clusterName, subject, { schema }); + await createSchema(clusterName, { + subject, + schema, + schemaType: SchemaType.AVRO, + }); history.push(clusterSchemaPath(clusterName, subject)); } catch (e) { // Show Error diff --git a/kafka-ui-react-app/src/components/Schemas/Schemas.tsx b/kafka-ui-react-app/src/components/Schemas/Schemas.tsx index 030aa71229b0115cf356e9b52c10935749c53ddd..62aa84f81fddc2eddd2b917f6b098cd853951c54 100644 --- a/kafka-ui-react-app/src/components/Schemas/Schemas.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Schemas.tsx @@ -1,49 +1,27 @@ import React from 'react'; -import { ClusterName } from 'redux/interfaces'; -import { Switch, Route, useParams } from 'react-router-dom'; -import PageLoader from 'components/common/PageLoader/PageLoader'; +import { Switch, Route } from 'react-router-dom'; import ListContainer from './List/ListContainer'; import DetailsContainer from './Details/DetailsContainer'; import NewContainer from './New/NewContainer'; -export interface SchemasProps { - isFetching: boolean; - fetchSchemasByClusterName: (clusterName: ClusterName) => void; -} - -const Schemas: React.FC = ({ - isFetching, - fetchSchemasByClusterName, -}) => { - const { clusterName } = useParams<{ clusterName: string }>(); - - React.useEffect(() => { - fetchSchemasByClusterName(clusterName); - }, [fetchSchemasByClusterName, clusterName]); - - if (isFetching) { - return ; - } - - return ( - - - - - - ); -}; +const Schemas: React.FC = () => ( + + + + + +); export default Schemas; diff --git a/kafka-ui-react-app/src/components/Schemas/SchemasContainer.tsx b/kafka-ui-react-app/src/components/Schemas/SchemasContainer.tsx deleted file mode 100644 index a70e9d902c5a369fe7940b3775d071e46260b726..0000000000000000000000000000000000000000 --- a/kafka-ui-react-app/src/components/Schemas/SchemasContainer.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { connect } from 'react-redux'; -import { RootState } from 'redux/interfaces'; -import { fetchSchemasByClusterName } from 'redux/actions'; -import { getIsSchemaListFetching } from 'redux/reducers/schemas/selectors'; -import Schemas from './Schemas'; - -const mapStateToProps = (state: RootState) => ({ - isFetching: getIsSchemaListFetching(state), -}); - -const mapDispatchToProps = { - fetchSchemasByClusterName, -}; - -export default connect(mapStateToProps, mapDispatchToProps)(Schemas); diff --git a/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx b/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx index 1c24d2cff3119b3d6de1374afaa1914b91eb0ba2..e643afeae94c13f047a4027d82b58132536e5420 100644 --- a/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx +++ b/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx @@ -1,71 +1,18 @@ import React from 'react'; -import { Provider } from 'react-redux'; -import { mount } from 'enzyme'; -import configureStore from 'redux/store/configureStore'; +import { shallow } from 'enzyme'; import { StaticRouter } from 'react-router-dom'; -import Schemas, { SchemasProps } from '../Schemas'; -import SchemasContainer from '../SchemasContainer'; +import Schemas from '../Schemas'; describe('Schemas', () => { const pathname = `/ui/clusters/clusterName/schemas`; - describe('Container', () => { - const store = configureStore(); + it('renders', () => { + const wrapper = shallow( + + + + ); - it('renders view', () => { - const component = mount( - - - - - - ); - - expect(component.exists()).toBeTruthy(); - }); - - describe('View', () => { - const setupWrapper = (props: Partial = {}) => ( - - - - ); - describe('Initial state', () => { - let useEffect: jest.SpyInstance< - void, - [ - effect: React.EffectCallback, - deps?: React.DependencyList | undefined - ] - >; - const mockedFn = jest.fn(); - - const mockedUseEffect = () => { - useEffect.mockImplementationOnce(mockedFn); - }; - - beforeEach(() => { - useEffect = jest.spyOn(React, 'useEffect'); - mockedUseEffect(); - }); - - it('should call fetchSchemasByClusterName every render', () => { - mount(setupWrapper({ fetchSchemasByClusterName: mockedFn })); - expect(mockedFn).toHaveBeenCalled(); - }); - }); - - describe('when page is loading', () => { - const wrapper = mount(setupWrapper({ isFetching: true })); - - it('renders PageLoader', () => { - expect(wrapper.exists('PageLoader')).toBeTruthy(); - }); - }); - }); + expect(wrapper.exists('Schemas')).toBeTruthy(); }); }); diff --git a/kafka-ui-react-app/src/components/Topics/Details/Details.tsx b/kafka-ui-react-app/src/components/Topics/Details/Details.tsx index 1b1cb9903f9e66eb3083b0643bd04d1f651401af..796be2c6f920648274075f29d104808ee2834656 100644 --- a/kafka-ui-react-app/src/components/Topics/Details/Details.tsx +++ b/kafka-ui-react-app/src/components/Topics/Details/Details.tsx @@ -10,6 +10,7 @@ import { clusterTopicMessagesPath, clusterTopicsTopicEditPath, } from 'lib/paths'; +import ClusterContext from 'components/contexts/ClusterContext'; import OverviewContainer from './Overview/OverviewContainer'; import MessagesContainer from './Messages/MessagesContainer'; import SettingsContainer from './Settings/SettingsContainer'; @@ -21,6 +22,7 @@ interface Props extends Topic, TopicDetails { } const Details: React.FC = ({ clusterName, topicName }) => { + const { isReadOnly } = React.useContext(ClusterContext); return (
@@ -33,9 +35,11 @@ const Details: React.FC = ({ clusterName, topicName }) => { {topicName}
- + {!isReadOnly && ( + + )}
diff --git a/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx b/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx index fbb03cd6774cf406e533f791d11c3ee99100125c..bc4b2ae4c6d9dbfcc2b6044b4d28ce0551375896 100644 --- a/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx +++ b/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx @@ -195,7 +195,7 @@ const Messages: React.FC = ({ }; if (!isFetched) { - return ; + return ; } return ( diff --git a/kafka-ui-react-app/src/components/Topics/List/List.tsx b/kafka-ui-react-app/src/components/Topics/List/List.tsx index ad6a286e90586d3d7f143393b36fcdcbd071c6c8..879c08134a79e82124e34be9f01a0547302ec64d 100644 --- a/kafka-ui-react-app/src/components/Topics/List/List.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/List.tsx @@ -3,6 +3,7 @@ import { TopicWithDetailedInfo, ClusterName } from 'redux/interfaces'; import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb'; import { NavLink } from 'react-router-dom'; import { clusterTopicNewPath } from 'lib/paths'; +import ClusterContext from 'components/contexts/ClusterContext'; import ListItem from './ListItem'; interface Props { @@ -15,7 +16,7 @@ const List: React.FC = ({ clusterName, topics, externalTopics }) => { const [showInternal, setShowInternal] = React.useState(true); const handleSwitch = () => setShowInternal(!showInternal); - + const { isReadOnly } = React.useContext(ClusterContext); const items = showInternal ? topics : externalTopics; return ( @@ -38,12 +39,14 @@ const List: React.FC = ({ clusterName, topics, externalTopics }) => {
- - Add a Topic - + {!isReadOnly && ( + + Add a Topic + + )}
diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx new file mode 100644 index 0000000000000000000000000000000000000000..0e857e276bfe1a8e6f1d7e6a75c786a07e3aea08 --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx @@ -0,0 +1,22 @@ +import { mount } from 'enzyme'; +import React from 'react'; +import ClusterContext from 'components/contexts/ClusterContext'; +import List from '../List'; + +describe('List', () => { + describe('when it has readonly flag', () => { + it('does not render the Add a Topic button', () => { + const props = { + clusterName: 'Cluster', + topics: [], + externalTopics: [], + }; + const component = mount( + + + + ); + expect(component.exists('NavLink')).toBeFalsy(); + }); + }); +}); diff --git a/kafka-ui-react-app/src/components/Topics/Topics.tsx b/kafka-ui-react-app/src/components/Topics/Topics.tsx index 5317ec1abf0bbc920823ed11d641aa6cf28016ce..d119abdfc8d192eb006714be28f1d0b68952f6c4 100644 --- a/kafka-ui-react-app/src/components/Topics/Topics.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topics.tsx @@ -10,7 +10,6 @@ import NewContainer from './New/NewContainer'; interface Props { clusterName: ClusterName; isFetched: boolean; - fetchBrokers: (clusterName: ClusterName) => void; fetchTopicsList: (clusterName: ClusterName) => void; } diff --git a/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx b/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx index 73864385ffd170c64bc1ffeac113dd5fe6267192..c5871d50ff62e472311cccfccece6854f3f2bf43 100644 --- a/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx +++ b/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx @@ -8,17 +8,17 @@ describe('MetricsWrapper', () => { const component = shallow( ); - expect(component.find(`.${className}`).exists()).toBeTruthy(); - expect(component.find('.level-multiline').exists()).toBeTruthy(); + expect(component.exists(`.${className}`)).toBeTruthy(); + expect(component.exists('.level-multiline')).toBeTruthy(); }); it('correctly renders children', () => { let component = shallow(); - expect(component.find('.subtitle').exists()).toBeFalsy(); + expect(component.exists('.subtitle')).toBeFalsy(); const title = 'title'; component = shallow(); - expect(component.find('.subtitle').exists()).toBeTruthy(); + expect(component.exists('.subtitle')).toBeTruthy(); expect(component.text()).toEqual(title); }); }); diff --git a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx index 2f5932a4f87f73a08a870a3f61a5caddb763bbf0..98c70eb48385b2e4861fde215d7b6c2870fb5a42 100644 --- a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx +++ b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx @@ -2,14 +2,14 @@ import React from 'react'; import cx from 'classnames'; interface Props { - isFullHeight: boolean; + fullHeight: boolean; } -const PageLoader: React.FC> = ({ isFullHeight = true }) => ( +const PageLoader: React.FC> = ({ fullHeight }) => (
{ it('matches the snapshot', () => { - const component = mount(); - expect(component).toMatchSnapshot(); + expect(mount()).toMatchSnapshot(); + }); + + it('renders half-height page loader by default', () => { + const wrapper = mount(); + expect(wrapper.exists('.hero.is-halfheight')).toBeTruthy(); + expect(wrapper.exists('.hero.is-fullheight-with-navbar')).toBeFalsy(); + }); + + it('renders fullheight page loader', () => { + const wrapper = mount(); + expect(wrapper.exists('.hero.is-halfheight')).toBeFalsy(); + expect(wrapper.exists('.hero.is-fullheight-with-navbar')).toBeTruthy(); }); }); diff --git a/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap b/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap index 10a9cf063135223ff34fc7f3efb0971669a725d5..34e9145f31aebe1b9de6f92c25f0909f0beb922b 100644 --- a/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap @@ -3,7 +3,7 @@ exports[`PageLoader matches the snapshot 1`] = `
{ describe('createSchema', () => { it('creates POST_SCHEMA__SUCCESS when posting new schema', async () => { - fetchMock.postOnce(`/api/clusters/${clusterName}/schemas/${subject}`, { + fetchMock.postOnce(`/api/clusters/${clusterName}/schemas`, { body: schemaFixtures.schemaVersionsPayload[0], }); await store.dispatch( - thunks.createSchema(clusterName, subject, fixtures.schemaPayload) + thunks.createSchema(clusterName, fixtures.schemaPayload) ); expect(store.getActions()).toEqual([ actions.createSchemaAction.request(), @@ -122,19 +122,19 @@ describe('Thunks', () => { ]); }); - // it('creates POST_SCHEMA__FAILURE when posting new schema', async () => { - // fetchMock.postOnce( - // `/api/clusters/${clusterName}/schemas/${subject}`, - // 404 - // ); - // await store.dispatch( - // thunks.createSchema(clusterName, subject, fixtures.schemaPayload) - // ); - // expect(store.getActions()).toEqual([ - // actions.createSchemaAction.request(), - // actions.createSchemaAction.failure(), - // ]); - // expect(store.getActions()).toThrow(); - // }); + it('creates POST_SCHEMA__FAILURE when posting new schema', async () => { + fetchMock.postOnce(`/api/clusters/${clusterName}/schemas`, 404); + try { + await store.dispatch( + thunks.createSchema(clusterName, fixtures.schemaPayload) + ); + } catch (error) { + expect(error.status).toEqual(404); + expect(store.getActions()).toEqual([ + actions.createSchemaAction.request(), + actions.createSchemaAction.failure(), + ]); + } + }); }); }); diff --git a/kafka-ui-react-app/src/redux/actions/thunks.ts b/kafka-ui-react-app/src/redux/actions/thunks.ts index f650d011bfbecff10d5322f7637208d227928d4d..36e1ccaeb0b66e82ab243931f2651e6650058a86 100644 --- a/kafka-ui-react-app/src/redux/actions/thunks.ts +++ b/kafka-ui-react-app/src/redux/actions/thunks.ts @@ -285,14 +285,12 @@ export const fetchSchemaVersions = ( export const createSchema = ( clusterName: ClusterName, - subject: SchemaName, newSchemaSubject: NewSchemaSubject ): PromiseThunkResult => async (dispatch) => { dispatch(actions.createSchemaAction.request()); try { const schema: SchemaSubject = await apiClient.createNewSchema({ clusterName, - subject, newSchemaSubject, }); dispatch(actions.createSchemaAction.success(schema)); diff --git a/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts b/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts index e6667fc3e02000b3955c2cad31d9ec1f76fbbe1d..a8b06b0f1730dfcaa913ae71687601360a9c7018 100644 --- a/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts +++ b/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts @@ -24,3 +24,10 @@ export const getOnlineClusters = createSelector(getClusterList, (clusters) => export const getOfflineClusters = createSelector(getClusterList, (clusters) => clusters.filter(({ status }) => status === ServerStatus.OFFLINE) ); + +export const getClustersReadonlyStatus = (clusterName: string) => + createSelector( + getClusterList, + (clusters): boolean => + clusters.find(({ name }) => name === clusterName)?.readOnly || false + ); diff --git a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap index bf18ffa14fe392579ea4ecc2b34447a2b4b09596..61332659b76e2f065e60a35b57947a9045160c63 100644 --- a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap +++ b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap @@ -12,6 +12,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 2, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "2", }, @@ -19,6 +20,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 4, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord4\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test2", "version": "3", }, @@ -26,6 +28,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 5, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test3", "version": "1", }, @@ -43,6 +46,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", }, @@ -50,6 +54,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 2, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "2", }, @@ -67,6 +72,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", }, diff --git a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts index 5a75ed212bec3d01aab7f0d9ed71aa726cb0eaf4..a53f1dced7ad9c0db118ef88f2f3e413d8ed07df 100644 --- a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts @@ -1,5 +1,5 @@ import { SchemasState } from 'redux/interfaces'; -import { SchemaSubject } from 'generated-sources'; +import { SchemaSubject, SchemaType } from 'generated-sources'; export const initialState: SchemasState = { byName: {}, @@ -15,6 +15,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test3', @@ -23,6 +24,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -31,6 +33,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; @@ -42,6 +45,7 @@ export const schemaVersionsPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -50,6 +54,7 @@ export const schemaVersionsPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; @@ -60,6 +65,7 @@ export const newSchemaPayload: SchemaSubject = { schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }; export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ @@ -70,6 +76,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test3', @@ -78,6 +85,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -86,6 +94,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test4', @@ -94,5 +103,6 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ];