diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 79c2681816..97054ff950 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -1,20 +1,19 @@ name: backend on: push: - branches: [ '*' ] - pull_request: - branches: [ master ] + branches: + - "**" + - "!master" jobs: - mvn-all-build: + build: runs-on: ubuntu-latest steps: - name: Cache local Maven repository - uses: actions/cache@v1 + uses: actions/cache@v2 with: path: ~/.m2/repository - key: ${{ runner.os }}-maven-all-${{ hashFiles('**/pom.xml') }} + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} restore-keys: | - ${{ runner.os }}-maven-all- ${{ runner.os }}-maven- - uses: actions/checkout@v2 - name: Set up JDK 1.13 diff --git a/.github/workflows/charts.yaml b/.github/workflows/charts.yaml deleted file mode 100644 index f52c09bf1d..0000000000 --- a/.github/workflows/charts.yaml +++ /dev/null @@ -1,31 +0,0 @@ -name: charts -on: - create: - tags: - - "v*.*.*" -jobs: - release: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - run: | - git config user.name github-actions - git config user.email github-actions@github.com - - uses: azure/setup-helm@v1 - - name: update appVersion - run: | - export version=${GITHUB_REF##*/} - sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml - - name: - run: | - export VERSION=${GITHUB_REF##*/} - MSG=$(helm package --app-version ${VERSION} charts/kafka-ui) - git fetch origin - git stash - git checkout -b gh-pages origin/gh-pages - helm repo index . - git add -f ${MSG##*/} index.yaml - git commit -m "release ${VERSION}" - git push \ No newline at end of file diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml index 331e766154..111446b677 100644 --- a/.github/workflows/frontend.yaml +++ b/.github/workflows/frontend.yaml @@ -1,9 +1,9 @@ name: frontend on: push: - branches: [ '*' ] - pull_request: - branches: [ master ] + branches: + - "**" + - "!master" jobs: npm-test: needs: [mvn-contract-build] diff --git a/.github/workflows/latest.yaml b/.github/workflows/latest.yaml new file mode 100644 index 0000000000..eb32018dc3 --- /dev/null +++ b/.github/workflows/latest.yaml @@ -0,0 +1,61 @@ +name: latest +on: + workflow_dispatch: + push: + branches: [ "master" ] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Cache local Maven repository + uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- + - name: Set up JDK 1.13 + uses: actions/setup-java@v1 + with: + java-version: 1.13 + - name: Build + id: build + run: | + export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec) + echo "::set-output name=version::${VERSION}" + mvn clean package -Pprod -DskipTests +################# +# # +# Docker images # +# # +################# + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + id: docker_build_and_push + uses: docker/build-push-action@v2 + with: + builder: ${{ steps.buildx.outputs.name }} + context: kafka-ui-api + push: true + tags: provectuslabs/kafka-ui:latest + build-args: | + JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 6a95f1ca21..8b1877e477 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,19 +59,32 @@ jobs: ${{ runner.os }}-buildx- - name: Login to DockerHub if: github.ref == 'refs/heads/master' - uses: docker/login-action@v1 + uses: docker/login-action@v1 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - - name: Build and push + - name: Build + if: github.ref != 'refs/heads/master' id: docker_build uses: docker/build-push-action@v2 with: builder: ${{ steps.buildx.outputs.name }} context: kafka-ui-api - push: github.ref == 'refs/heads/master' - tags: provectuslabs/kafka-ui:${{ steps.prep.outputs.version }} + push: false build-args: | JAR_FILE=kafka-ui-api-${{ steps.prep.outputs.version }}.jar cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache + - name: Build and push + if: github.ref == 'refs/heads/master' + id: docker_build_and_push + uses: docker/build-push-action@v2 + with: + builder: ${{ steps.buildx.outputs.name }} + context: kafka-ui-api + push: true + tags: provectuslabs/kafka-ui:${{ steps.prep.outputs.version }} + build-args: | + JAR_FILE=kafka-ui-api-${{ steps.prep.outputs.version }}.jar + cache-from: type=local,src=/tmp/.buildx-cache + cache-to: type=local,dest=/tmp/.buildx-cache \ No newline at end of file diff --git a/.github/workflows/tags.yaml b/.github/workflows/tags.yaml new file mode 100644 index 0000000000..562d0f5214 --- /dev/null +++ b/.github/workflows/tags.yaml @@ -0,0 +1,68 @@ +name: after_release +on: + push: + tags: + - "v**" +jobs: + charts: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 1 + - run: | + git config user.name github-actions + git config user.email github-actions@github.com + - uses: azure/setup-helm@v1 + - name: update appVersion + run: | + export version=${GITHUB_REF##*/} + sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml + - name: + run: | + export VERSION=${GITHUB_REF##*/} + MSG=$(helm package --app-version ${VERSION} charts/kafka-ui) + git fetch origin + git stash + git checkout -b gh-pages origin/gh-pages + helm repo index . + git add -f ${MSG##*/} index.yaml + git commit -m "release ${VERSION}" + git push + gh-release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - run: | + git config user.name github-actions + git config user.email github-actions@github.com + - id: generate + shell: /usr/bin/bash -x -e {0} + run: | + VERSION=${GITHUB_REF##*/} + CHANGELOG=$(git --no-pager log --oneline --pretty=format:"- %s" `git tag --sort=-creatordate | grep '^v.*' | head -n2 | tail -n1`.. | uniq | grep -v '^- Merge\|^- skip') + CHANGELOG="${CHANGELOG//'%'/'%25'}" + CHANGELOG="${CHANGELOG//$'\n'/'%0A'}" + CHANGELOG="${CHANGELOG//$'\r'/'%0D'}" + echo ${CHANGELOG} + echo "::set-output name=changelog::${CHANGELOG}" + echo "::set-output name=version::${VERSION}" + - id: create_release + uses: actions/github-script@v3 + env: + CHANGELOG: ${{steps.generate.outputs.changelog}} + VERSION: ${{steps.generate.outputs.version}} + with: + github-token: ${{secrets.GITHUB_TOKEN}} + script: | + github.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: context.ref, + name: "Release "+process.env.VERSION, + body: process.env.CHANGELOG, + draft: false, + prerelease: false + }); diff --git a/README.md b/README.md index 4e38d7bec8..84521839ab 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,8 @@ docker run -p 8080:8080 \ -d provectuslabs/kafka-ui:latest ``` -Then access the web UI at [http://localhost:8080](http://localhost:8080). +Then access the web UI at [http://localhost:8080](http://localhost:8080). +Further configuration with environment variables - [see environment variables](#env_variables) ### Docker Compose @@ -138,10 +139,11 @@ kafka: * `schemaRegistry`: schemaRegistry's address * `schemaNameTemplate`: how keys are saved to schemaRegistry * `jmxPort`: open jmxPosrts of a broker +* `readOnly`: enable read only mode Configure as many clusters as you need by adding their configs below separated with `-`. -## Environment Variables +## Environment Variables Alternatively, each variable of of the .yml file can be set with an environment variable. For example, if you want to use an environment variable to set the `name` parameter, you can write it like this: `KAFKA_CLUSTERS_2_NAME` @@ -154,6 +156,7 @@ For example, if you want to use an environment variable to set the `name` parame |`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address |`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry |`KAFKA_CLUSTERS_0_JMXPORT` |Open jmxPosrts of a broker +|`KAFKA_CLUSTERS_0_READONLY` |Enable read only mode. Default: false diff --git a/docker-compose.md b/docker-compose.md index 1ec8b13504..68be207ade 100644 --- a/docker-compose.md +++ b/docker-compose.md @@ -1,5 +1,7 @@ # Quick Start with docker-compose +Envinronment variables documentation - [see usage](README.md#env_variables) + * Add a new service in docker-compose.yml ```yaml @@ -9,14 +11,31 @@ services: image: provectuslabs/kafka-ui container_name: kafka-ui ports: - - "9000:8080" + - "8080:8080" restart: always environment: - -e KAFKA_CLUSTERS_0_NAME=local - -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 - -e KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 + - KAFKA_CLUSTERS_0_NAME=local + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 + - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 ``` + +* If you prefer Kafka UI in read only mode +```yaml +version: '2' +services: + kafka-ui: + image: provectuslabs/kafka-ui + container_name: kafka-ui + ports: + - "8080:8080" + restart: always + environment: + - KAFKA_CLUSTERS_0_NAME=local + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 + - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 + - KAFKA_CLUSTERS_0_READONLY=true +``` * Start Kafka UI process diff --git a/docker/kafka-clusters-only.yaml b/docker/kafka-clusters-only.yaml index 16cb50b659..68116d9499 100644 --- a/docker/kafka-clusters-only.yaml +++ b/docker/kafka-clusters-only.yaml @@ -86,7 +86,25 @@ services: SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas ports: - - 8081:8081 + - 8085:8085 + + schemaregistry1: + image: confluentinc/cp-schema-registry:5.5.0 + ports: + - 18085:8085 + depends_on: + - zookeeper1 + - kafka1 + environment: + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092 + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181 + SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT + SCHEMA_REGISTRY_HOST_NAME: schemaregistry1 + SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085 + + SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http" + SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO + SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-connect0: image: confluentinc/cp-kafka-connect:5.2.4 diff --git a/docker/kafka-ui.yaml b/docker/kafka-ui.yaml index 8d8790b97e..c384a8f4b3 100644 --- a/docker/kafka-ui.yaml +++ b/docker/kafka-ui.yaml @@ -96,6 +96,24 @@ services: SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas + schemaregistry1: + image: confluentinc/cp-schema-registry:5.5.0 + ports: + - 18085:8085 + depends_on: + - zookeeper1 + - kafka1 + environment: + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092 + SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181 + SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT + SCHEMA_REGISTRY_HOST_NAME: schemaregistry1 + SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085 + + SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http" + SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO + SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas + kafka-connect0: image: confluentinc/cp-kafka-connect:5.2.4 ports: diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/DuplicateEntityException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/DuplicateEntityException.java new file mode 100644 index 0000000000..04c6be1590 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/DuplicateEntityException.java @@ -0,0 +1,15 @@ +package com.provectus.kafka.ui.cluster.exception; + +import org.springframework.http.HttpStatus; + +public class DuplicateEntityException extends CustomBaseException{ + + public DuplicateEntityException(String message) { + super(message); + } + + @Override + public HttpStatus getResponseStatusCode() { + return HttpStatus.CONFLICT; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/UnprocessableEntityException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/UnprocessableEntityException.java new file mode 100644 index 0000000000..bafc8c8180 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/exception/UnprocessableEntityException.java @@ -0,0 +1,15 @@ +package com.provectus.kafka.ui.cluster.exception; + +import org.springframework.http.HttpStatus; + +public class UnprocessableEntityException extends CustomBaseException{ + + public UnprocessableEntityException(String message) { + super(message); + } + + @Override + public HttpStatus getResponseStatusCode() { + return HttpStatus.UNPROCESSABLE_ENTITY; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java index a9e48bd627..befddfc328 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/mapper/ClusterMapper.java @@ -2,7 +2,8 @@ package com.provectus.kafka.ui.cluster.mapper; import com.provectus.kafka.ui.cluster.config.ClustersProperties; import com.provectus.kafka.ui.cluster.model.*; -import com.provectus.kafka.ui.cluster.model.InternalCompatibilityCheck; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityCheck; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityLevel; import com.provectus.kafka.ui.model.*; import java.util.Properties; import org.mapstruct.Mapper; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityCheck.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityCheck.java similarity index 76% rename from kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityCheck.java rename to kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityCheck.java index 001823f275..da072c46f6 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityCheck.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityCheck.java @@ -1,4 +1,4 @@ -package com.provectus.kafka.ui.cluster.model; +package com.provectus.kafka.ui.cluster.model.schemaregistry; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityLevel.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityLevel.java similarity index 64% rename from kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityLevel.java rename to kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityLevel.java index ed2bfc70d7..d66fc80c8d 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/InternalCompatibilityLevel.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalCompatibilityLevel.java @@ -1,4 +1,4 @@ -package com.provectus.kafka.ui.cluster.model; +package com.provectus.kafka.ui.cluster.model.schemaregistry; import lombok.Data; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalNewSchema.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalNewSchema.java new file mode 100644 index 0000000000..b121943fe3 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/InternalNewSchema.java @@ -0,0 +1,17 @@ +package com.provectus.kafka.ui.cluster.model.schemaregistry; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.provectus.kafka.ui.model.SchemaType; +import lombok.Data; + +@Data +public class InternalNewSchema { + private String schema; + @JsonInclude(JsonInclude.Include.NON_NULL) + private SchemaType schemaType; + + public InternalNewSchema(String schema, SchemaType schemaType) { + this.schema = schema; + this.schemaType = schemaType; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/SubjectIdResponse.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/SubjectIdResponse.java new file mode 100644 index 0000000000..3a6eefee27 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/model/schemaregistry/SubjectIdResponse.java @@ -0,0 +1,8 @@ +package com.provectus.kafka.ui.cluster.model.schemaregistry; + +import lombok.Data; + +@Data +public class SubjectIdResponse { + private Integer id; +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java index 232e25d026..b364cbc829 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/SchemaRegistryService.java @@ -1,36 +1,43 @@ package com.provectus.kafka.ui.cluster.service; +import com.provectus.kafka.ui.cluster.exception.DuplicateEntityException; import com.provectus.kafka.ui.cluster.exception.NotFoundException; +import com.provectus.kafka.ui.cluster.exception.UnprocessableEntityException; import com.provectus.kafka.ui.cluster.mapper.ClusterMapper; import com.provectus.kafka.ui.cluster.model.ClustersStorage; -import com.provectus.kafka.ui.cluster.model.InternalCompatibilityCheck; -import com.provectus.kafka.ui.cluster.model.InternalCompatibilityLevel; -import com.provectus.kafka.ui.model.CompatibilityCheckResponse; -import com.provectus.kafka.ui.model.CompatibilityLevel; -import com.provectus.kafka.ui.model.NewSchemaSubject; -import com.provectus.kafka.ui.model.SchemaSubject; -import java.util.Formatter; +import com.provectus.kafka.ui.cluster.model.KafkaCluster; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityCheck; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityLevel; +import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalNewSchema; +import com.provectus.kafka.ui.cluster.model.schemaregistry.SubjectIdResponse; +import com.provectus.kafka.ui.model.*; import lombok.RequiredArgsConstructor; import lombok.extern.log4j.Log4j2; -import org.springframework.core.ParameterizedTypeReference; -import org.springframework.http.HttpEntity; -import org.springframework.http.HttpStatus; +import org.jetbrains.annotations.NotNull; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; import org.springframework.web.reactive.function.BodyInserters; +import org.springframework.web.reactive.function.client.ClientResponse; import org.springframework.web.reactive.function.client.WebClient; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; -import java.util.Arrays; -import java.util.List; +import java.util.Formatter; import java.util.Objects; +import java.util.function.Function; + +import static org.springframework.http.HttpStatus.NOT_FOUND; +import static org.springframework.http.HttpStatus.UNPROCESSABLE_ENTITY; @Service @Log4j2 @RequiredArgsConstructor public class SchemaRegistryService { + public static final String NO_SUCH_SCHEMA_VERSION = "No such schema %s with version %s"; + public static final String NO_SUCH_SCHEMA = "No such schema %s"; + public static final String NO_SUCH_CLUSTER = "No such cluster"; + private static final String URL_SUBJECTS = "/subjects"; private static final String URL_SUBJECT = "/subjects/{schemaName}"; private static final String URL_SUBJECT_VERSIONS = "/subjects/{schemaName}/versions"; @@ -45,7 +52,7 @@ public class SchemaRegistryService { var allSubjectNames = getAllSubjectNames(clusterName); return allSubjectNames .flatMapMany(Flux::fromArray) - .flatMap(subject -> getLatestSchemaSubject(clusterName, subject)); + .flatMap(subject -> getLatestSchemaVersionBySubject(clusterName, subject)); } public Mono getAllSubjectNames(String clusterName) { @@ -56,7 +63,7 @@ public class SchemaRegistryService { .bodyToMono(String[].class) .doOnError(log::error) ) - .orElse(Mono.error(new NotFoundException("No such cluster"))); + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } public Flux getAllVersionsBySubject(String clusterName, String subject) { @@ -69,19 +76,17 @@ public class SchemaRegistryService { .map(cluster -> webClient.get() .uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException(formatted("No such schema %s")) - ) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA)) ).bodyToFlux(Integer.class) - ).orElse(Flux.error(new NotFoundException("No such cluster"))); + ).orElse(Flux.error(new NotFoundException(NO_SUCH_CLUSTER))); } public Mono getSchemaSubjectByVersion(String clusterName, String schemaName, Integer version) { return this.getSchemaSubject(clusterName, schemaName, String.valueOf(version)); } - public Mono getLatestSchemaSubject(String clusterName, String schemaName) { + public Mono getLatestSchemaVersionBySubject(String clusterName, String schemaName) { return this.getSchemaSubject(clusterName, schemaName, LATEST); } @@ -90,13 +95,10 @@ public class SchemaRegistryService { .map(cluster -> webClient.get() .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException( - formatted("No such schema %s with version %s", schemaName, version) - ) - ) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version)) ).bodyToMono(SchemaSubject.class) + .map(this::withSchemaType) .zipWith(getSchemaCompatibilityInfoOrGlobal(clusterName, schemaName)) .map(tuple -> { SchemaSubject schema = tuple.getT1(); @@ -105,7 +107,21 @@ public class SchemaRegistryService { return schema; }) ) - .orElseThrow(); + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); + } + + /** + * If {@link SchemaSubject#getSchemaType()} is null, then AVRO, otherwise, adds the schema type as is. + */ + @NotNull + private SchemaSubject withSchemaType(SchemaSubject s) { + SchemaType schemaType = Objects.nonNull(s.getSchemaType()) ? s.getSchemaType() : SchemaType.AVRO; + return new SchemaSubject() + .schema(s.getSchema()) + .subject(s.getSubject()) + .version(s.getVersion()) + .id(s.getId()) + .schemaType(schemaType); } public Mono> deleteSchemaSubjectByVersion(String clusterName, String schemaName, Integer version) { @@ -121,46 +137,71 @@ public class SchemaRegistryService { .map(cluster -> webClient.delete() .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException( - formatted("No such schema %s with version %s", schemaName, version) - ) - ) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version)) ).toBodilessEntity() - ).orElse(Mono.error(new NotFoundException("No such cluster"))); + ).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } - public Mono> deleteSchemaSubject(String clusterName, String schemaName) { + public Mono> deleteSchemaSubjectEntirely(String clusterName, String schemaName) { return clustersStorage.getClusterByName(clusterName) .map(cluster -> webClient.delete() .uri(cluster.getSchemaRegistry() + URL_SUBJECT, schemaName) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException( - formatted("No such schema %s", schemaName) - ) - ) + .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)) ) .toBodilessEntity()) - .orElse(Mono.error(new NotFoundException("No such cluster"))); + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } - public Mono> createNewSubject(String clusterName, String schemaName, Mono newSchemaSubject) { - return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.post() - .uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName) - .contentType(MediaType.APPLICATION_JSON) - .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class)) - .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error( - new NotFoundException(formatted("No such schema %s", schemaName))) - ) - .toEntity(SchemaSubject.class) - .log()) - .orElse(Mono.error(new NotFoundException("No such cluster"))); + /** + * Checks whether the provided schema duplicates the previous or not, creates a new schema + * and then returns the whole content by requesting its latest version. + */ + public Mono registerNewSchema(String clusterName, Mono newSchemaSubject) { + return newSchemaSubject + .flatMap(schema -> { + SchemaType schemaType = SchemaType.AVRO == schema.getSchemaType() ? null : schema.getSchemaType(); + Mono newSchema = Mono.just(new InternalNewSchema(schema.getSchema(), schemaType)); + String subject = schema.getSubject(); + return clustersStorage.getClusterByName(clusterName) + .map(KafkaCluster::getSchemaRegistry) + .map(schemaRegistryUrl -> checkSchemaOnDuplicate(subject, newSchema, schemaRegistryUrl) + .flatMap(s -> submitNewSchema(subject, newSchema, schemaRegistryUrl)) + .flatMap(resp -> getLatestSchemaVersionBySubject(clusterName, subject)) + ) + .orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); + }); + } + + @NotNull + private Mono submitNewSchema(String subject, Mono newSchemaSubject, String schemaRegistryUrl) { + return webClient.post() + .uri(schemaRegistryUrl + URL_SUBJECT_VERSIONS, subject) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class)) + .retrieve() + .onStatus(UNPROCESSABLE_ENTITY::equals, r -> Mono.error(new UnprocessableEntityException("Invalid params"))) + .bodyToMono(SubjectIdResponse.class); + } + + @NotNull + private Mono checkSchemaOnDuplicate(String subject, Mono newSchemaSubject, String schemaRegistryUrl) { + return webClient.post() + .uri(schemaRegistryUrl + URL_SUBJECT, subject) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class)) + .retrieve() + .onStatus(NOT_FOUND::equals, res -> Mono.empty()) + .onStatus(UNPROCESSABLE_ENTITY::equals, r -> Mono.error(new UnprocessableEntityException("Invalid params"))) + .bodyToMono(SchemaSubject.class) + .filter(s -> Objects.isNull(s.getId())) + .switchIfEmpty(Mono.error(new DuplicateEntityException("Such schema already exists"))); + } + + @NotNull + private Function> throwIfNotFoundStatus(String formatted) { + return resp -> Mono.error(new NotFoundException(formatted)); } /** @@ -178,10 +219,10 @@ public class SchemaRegistryService { .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(compatibilityLevel, CompatibilityLevel.class)) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName)))) + .onStatus(NOT_FOUND::equals, + throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName))) .bodyToMono(Void.class); - }).orElse(Mono.error(new NotFoundException("No such cluster"))); + }).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } public Mono updateSchemaCompatibility(String clusterName, Mono compatibilityLevel) { @@ -217,12 +258,11 @@ public class SchemaRegistryService { .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class)) .retrieve() - .onStatus(HttpStatus.NOT_FOUND::equals, - resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName)))) + .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName))) .bodyToMono(InternalCompatibilityCheck.class) .map(mapper::toCompatibilityCheckResponse) .log() - ).orElse(Mono.error(new NotFoundException("No such cluster"))); + ).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER))); } public String formatted(String str, Object... args) { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java index a2cacbba8e..9b86451b8f 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/MetricsRestController.java @@ -106,7 +106,7 @@ public class MetricsRestController implements ApiClustersApi { @Override public Mono> getLatestSchema(String clusterName, String subject, ServerWebExchange exchange) { - return schemaRegistryService.getLatestSchemaSubject(clusterName, subject).map(ResponseEntity::ok); + return schemaRegistryService.getLatestSchemaVersionBySubject(clusterName, subject).map(ResponseEntity::ok); } @Override @@ -138,14 +138,16 @@ public class MetricsRestController implements ApiClustersApi { @Override public Mono> deleteSchema(String clusterName, String subjectName, ServerWebExchange exchange) { - return schemaRegistryService.deleteSchemaSubject(clusterName, subjectName); + return schemaRegistryService.deleteSchemaSubjectEntirely(clusterName, subjectName); } @Override - public Mono> createNewSchema(String clusterName, String subject, + public Mono> createNewSchema(String clusterName, @Valid Mono newSchemaSubject, ServerWebExchange exchange) { - return schemaRegistryService.createNewSubject(clusterName, subject, newSchemaSubject); + return schemaRegistryService + .registerNewSchema(clusterName, newSchemaSubject) + .map(ResponseEntity::ok); } @Override diff --git a/kafka-ui-api/src/main/resources/application-local.yml b/kafka-ui-api/src/main/resources/application-local.yml index 951a1799e0..5822849109 100644 --- a/kafka-ui-api/src/main/resources/application-local.yml +++ b/kafka-ui-api/src/main/resources/application-local.yml @@ -13,7 +13,7 @@ kafka: name: secondLocal bootstrapServers: localhost:9093 zookeeper: localhost:2182 - schemaRegistry: http://localhost:8081 + schemaRegistry: http://localhost:18085 kafkaConnect: - name: first address: http://localhost:8083 diff --git a/kafka-ui-api/src/main/resources/application-sdp.yml b/kafka-ui-api/src/main/resources/application-sdp.yml index 9f6a293ec2..46a0377799 100644 --- a/kafka-ui-api/src/main/resources/application-sdp.yml +++ b/kafka-ui-api/src/main/resources/application-sdp.yml @@ -9,7 +9,7 @@ kafka: name: secondLocal zookeeper: zookeeper1:2181 bootstrapServers: kafka1:29092 - schemaRegistry: http://schemaregistry0:8085 + schemaRegistry: http://schemaregistry1:8085 admin-client-timeout: 5000 zookeeper: connection-timeout: 1000 diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java index 4065b4006d..cd00559694 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java @@ -18,7 +18,7 @@ public abstract class AbstractBaseTest { public static String LOCAL = "local"; public static String SECOND_LOCAL = "secondLocal"; - private static final String CONFLUENT_PLATFORM_VERSION = "5.2.1"; + private static final String CONFLUENT_PLATFORM_VERSION = "5.5.0"; public static final KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION)) .withNetwork(Network.SHARED); diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java index f7e6407b91..603390dbf5 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/KafkaConnectServiceTests.java @@ -249,7 +249,7 @@ public class KafkaConnectServiceTests extends AbstractBaseTest { .exchange() .expectStatus().isOk() .expectBodyList(ConnectorPlugin.class) - .value(plugins -> assertEquals(13, plugins.size())); + .value(plugins -> assertEquals(14, plugins.size())); } @Test diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java index c5af013e79..07d05cade6 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/SchemaRegistryServiceTests.java @@ -1,7 +1,9 @@ package com.provectus.kafka.ui; import com.provectus.kafka.ui.model.CompatibilityLevel; +import com.provectus.kafka.ui.model.NewSchemaSubject; import com.provectus.kafka.ui.model.SchemaSubject; +import com.provectus.kafka.ui.model.SchemaType; import lombok.extern.log4j.Log4j2; import lombok.val; import org.junit.jupiter.api.Assertions; @@ -9,11 +11,13 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; +import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.web.reactive.server.EntityExchangeResult; import org.springframework.test.web.reactive.server.WebTestClient; import org.springframework.web.reactive.function.BodyInserters; +import reactor.core.publisher.Mono; import java.util.List; import java.util.UUID; @@ -50,6 +54,69 @@ class SchemaRegistryServiceTests extends AbstractBaseTest { .expectStatus().isNotFound(); } + /** + * It should create a new schema w/o submitting a schemaType field to Schema Registry + */ + @Test + void shouldBeBadRequestIfNoSchemaType() { + String schema = "{\"subject\":\"%s\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}"; + + webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(schema.formatted(subject))) + .exchange() + .expectStatus().isBadRequest(); + } + + @Test + void shouldReturn409WhenSchemaDuplicatesThePreviousVersion() { + String schema = "{\"subject\":\"%s\",\"schemaType\":\"AVRO\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}"; + + webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(schema.formatted(subject))) + .exchange() + .expectStatus().isEqualTo(HttpStatus.OK); + + webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromValue(schema.formatted(subject))) + .exchange() + .expectStatus().isEqualTo(HttpStatus.CONFLICT); + } + + @Test + void shouldCreateNewProtobufSchema() { + String schema = "syntax = \"proto3\";\n\nmessage MyRecord {\n int32 id = 1;\n string name = 2;\n}\n"; + NewSchemaSubject requestBody = new NewSchemaSubject() + .schemaType(SchemaType.PROTOBUF) + .subject(subject) + .schema(schema); + SchemaSubject actual = webTestClient + .post() + .uri("/api/clusters/{clusterName}/schemas", LOCAL) + .contentType(MediaType.APPLICATION_JSON) + .body(BodyInserters.fromPublisher(Mono.just(requestBody), NewSchemaSubject.class)) + .exchange() + .expectStatus() + .isOk() + .expectBody(SchemaSubject.class) + .returnResult() + .getResponseBody(); + + Assertions.assertNotNull(actual); + Assertions.assertEquals(CompatibilityLevel.CompatibilityEnum.BACKWARD.name(), actual.getCompatibilityLevel()); + Assertions.assertEquals("1", actual.getVersion()); + Assertions.assertEquals(SchemaType.PROTOBUF, actual.getSchemaType()); + Assertions.assertEquals(schema, actual.getSchema()); + } + @Test public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { webTestClient @@ -132,9 +199,9 @@ class SchemaRegistryServiceTests extends AbstractBaseTest { private void createNewSubjectAndAssert(String subject) { webTestClient .post() - .uri("/api/clusters/{clusterName}/schemas/{subject}", LOCAL, subject) + .uri("/api/clusters/{clusterName}/schemas", LOCAL) .contentType(MediaType.APPLICATION_JSON) - .body(BodyInserters.fromValue("{\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}")) + .body(BodyInserters.fromValue("{\"subject\":\"%s\",\"schemaType\":\"AVRO\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}".formatted(subject))) .exchange() .expectStatus().isOk() .expectBody(SchemaSubject.class) @@ -151,16 +218,17 @@ class SchemaRegistryServiceTests extends AbstractBaseTest { Assertions.assertEquals("\"string\"", actualSchema.getSchema()); Assertions.assertNotNull(actualSchema.getCompatibilityLevel()); + Assertions.assertEquals(SchemaType.AVRO, actualSchema.getSchemaType()); Assertions.assertEquals(expectedCompatibility.name(), actualSchema.getCompatibilityLevel()); } private void assertResponseBodyWhenCreateNewSchema(EntityExchangeResult exchangeResult) { SchemaSubject responseBody = exchangeResult.getResponseBody(); Assertions.assertNotNull(responseBody); - Assertions.assertEquals(1, responseBody.getId(), "The schema ID should be non-null in the response"); - String message = "It should be null"; - Assertions.assertNull(responseBody.getSchema(), message); - Assertions.assertNull(responseBody.getSubject(), message); - Assertions.assertNull(responseBody.getVersion(), message); + Assertions.assertEquals("1", responseBody.getVersion()); + Assertions.assertNotNull(responseBody.getSchema()); + Assertions.assertNotNull(responseBody.getSubject()); + Assertions.assertNotNull(responseBody.getCompatibilityLevel()); + Assertions.assertEquals(SchemaType.AVRO, responseBody.getSchemaType()); } } diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index 635f5b1693..f50695abfc 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -358,6 +358,35 @@ paths: $ref: '#/components/schemas/ConsumerGroup' /api/clusters/{clusterName}/schemas: + post: + tags: + - /api/clusters + summary: create a new subject schema + operationId: createNewSchema + parameters: + - name: clusterName + in: path + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/NewSchemaSubject' + responses: + 200: + description: Ok + content: + application/json: + schema: + $ref: '#/components/schemas/SchemaSubject' + 400: + description: Bad request + 409: + description: Duplicate schema + 422: + description: Invalid parameters get: tags: - /api/clusters @@ -380,36 +409,6 @@ paths: $ref: '#/components/schemas/SchemaSubject' /api/clusters/{clusterName}/schemas/{subject}: - post: - tags: - - /api/clusters - summary: create a new subject schema - operationId: createNewSchema - parameters: - - name: clusterName - in: path - required: true - schema: - type: string - - name: subject - in: path - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/NewSchemaSubject' - responses: - 200: - description: Updated - content: - application/json: - schema: - $ref: '#/components/schemas/SchemaSubject' - 400: - description: Bad request delete: tags: - /api/clusters @@ -1360,16 +1359,29 @@ components: type: string compatibilityLevel: type: string + schemaType: + $ref: '#/components/schemas/SchemaType' required: - id + - subject + - version + - schema + - compatibilityLevel + - schemaType NewSchemaSubject: type: object properties: + subject: + type: string schema: type: string + schemaType: + $ref: '#/components/schemas/SchemaType' required: + - subject - schema + - schemaType CompatibilityLevel: type: object @@ -1387,13 +1399,12 @@ components: required: - compatibility -# CompatibilityLevelResponse: -# type: object -# properties: -# compatibilityLevel: -# type: string -# required: -# - compatibilityLevel + SchemaType: + type: string + enum: + - AVRO + - JSON + - PROTOBUF CompatibilityCheckResponse: type: object diff --git a/kafka-ui-react-app/src/components/App.tsx b/kafka-ui-react-app/src/components/App.tsx index a5fe4f6f89..66d77f6971 100644 --- a/kafka-ui-react-app/src/components/App.tsx +++ b/kafka-ui-react-app/src/components/App.tsx @@ -1,13 +1,10 @@ import React from 'react'; -import { Switch, Route, Redirect } from 'react-router-dom'; +import { Switch, Route } from 'react-router-dom'; import './App.scss'; -import BrokersContainer from './Brokers/BrokersContainer'; -import TopicsContainer from './Topics/TopicsContainer'; import NavContainer from './Nav/NavContainer'; import PageLoader from './common/PageLoader/PageLoader'; import Dashboard from './Dashboard/Dashboard'; -import ConsumersGroupsContainer from './ConsumerGroups/ConsumersGroupsContainer'; -import SchemasContainer from './Schemas/SchemasContainer'; +import Cluster from './Cluster/Cluster'; interface AppProps { isClusterListFetched: boolean; @@ -44,29 +41,10 @@ const App: React.FC = ({ path={['/', '/ui', '/ui/clusters']} component={Dashboard} /> - - - - - + ) : ( - + )} diff --git a/kafka-ui-react-app/src/components/Cluster/Cluster.tsx b/kafka-ui-react-app/src/components/Cluster/Cluster.tsx new file mode 100644 index 0000000000..fc5c157937 --- /dev/null +++ b/kafka-ui-react-app/src/components/Cluster/Cluster.tsx @@ -0,0 +1,39 @@ +import React from 'react'; +import { useSelector } from 'react-redux'; +import { Switch, Route, Redirect, useParams } from 'react-router-dom'; +import BrokersContainer from 'components/Brokers/BrokersContainer'; +import TopicsContainer from 'components/Topics/TopicsContainer'; +import ConsumersGroupsContainer from 'components/ConsumerGroups/ConsumersGroupsContainer'; +import Schemas from 'components/Schemas/Schemas'; +import { getClustersReadonlyStatus } from 'redux/reducers/clusters/selectors'; +import ClusterContext from 'components/contexts/ClusterContext'; + +const Cluster: React.FC = () => { + const { clusterName } = useParams<{ clusterName: string }>(); + const isReadOnly = useSelector(getClustersReadonlyStatus(clusterName)); + return ( + + + + + + + + + + ); +}; + +export default Cluster; diff --git a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx index c5b0007288..9e7fa5403c 100644 --- a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx +++ b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx @@ -17,6 +17,7 @@ const ClusterWidget: React.FC = ({ bytesInPerSec, bytesOutPerSec, onlinePartitionCount, + readOnly, }, }) => (
@@ -29,6 +30,9 @@ const ClusterWidget: React.FC = ({ > {status}
+ {readOnly && ( +
readonly
+ )} {name} diff --git a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx index e8d21e6ab3..4cc652e05c 100644 --- a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx +++ b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/ClusterWidget.spec.tsx @@ -70,4 +70,17 @@ describe('ClusterWidget', () => { ).toMatchSnapshot(); }); }); + + describe('when cluster is read-only', () => { + it('renders the tag', () => { + expect( + shallow( + + ) + .find('.title') + .childAt(1) + .text() + ).toEqual('readonly'); + }); + }); }); diff --git a/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx b/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx index 1753b11e66..5aa7b40749 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Details/Details.tsx @@ -2,12 +2,14 @@ import React from 'react'; import { SchemaSubject } from 'generated-sources'; import { ClusterName, SchemaName } from 'redux/interfaces'; import { clusterSchemasPath } from 'lib/paths'; +import ClusterContext from 'components/contexts/ClusterContext'; import Breadcrumb from '../../common/Breadcrumb/Breadcrumb'; import SchemaVersion from './SchemaVersion'; import LatestVersionItem from './LatestVersionItem'; import PageLoader from '../../common/PageLoader/PageLoader'; export interface DetailsProps { + subject: SchemaName; schema: SchemaSubject; clusterName: ClusterName; versions: SchemaSubject[]; @@ -19,15 +21,18 @@ export interface DetailsProps { } const Details: React.FC = ({ + subject, schema, clusterName, fetchSchemaVersions, versions, isFetched, }) => { + const { isReadOnly } = React.useContext(ClusterContext); React.useEffect(() => { - fetchSchemaVersions(clusterName, schema.subject as SchemaName); + fetchSchemaVersions(clusterName, subject); }, [fetchSchemaVersions, clusterName]); + return (
@@ -39,59 +44,63 @@ const Details: React.FC = ({ }, ]} > - {schema.subject} + {subject}
-
-
-
-
-
- Latest Version -
-
- #{schema.version} -
-
-
-
- - -
-
- -
{isFetched ? ( -
- - - - - - - - - - {versions.map((version) => ( - - ))} - -
VersionIDSchema
-
+ <> +
+
+
+
+
+ Latest Version +
+
+ #{schema.version} +
+
+
+ {!isReadOnly && ( +
+ + +
+ )} +
+ +
+
+ + + + + + + + + + {versions.map((version) => ( + + ))} + +
VersionIDSchema
+
+ ) : ( )} diff --git a/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts b/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts index 072d2a44f8..dbfec5314c 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts +++ b/kafka-ui-react-app/src/components/Schemas/Details/DetailsContainer.ts @@ -24,6 +24,7 @@ const mapStateToProps = ( }, }: OwnProps ) => ({ + subject, schema: getSchema(state, subject), versions: getSortedSchemaVersions(state), isFetched: getIsSchemaVersionFetched(state), diff --git a/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx b/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx index 816f20380c..312bd16fa4 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Details/__test__/Details.spec.tsx @@ -1,7 +1,9 @@ import React from 'react'; import { Provider } from 'react-redux'; -import { shallow } from 'enzyme'; +import { shallow, mount } from 'enzyme'; import configureStore from 'redux/store/configureStore'; +import { StaticRouter } from 'react-router'; +import ClusterContext from 'components/contexts/ClusterContext'; import DetailsContainer from '../DetailsContainer'; import Details, { DetailsProps } from '../Details'; import { schema, versions } from './fixtures'; @@ -24,6 +26,7 @@ describe('Details', () => { describe('View', () => { const setupWrapper = (props: Partial = {}) => (
{ expect(shallow(setupWrapper({ versions }))).toMatchSnapshot(); }); }); + + describe('when the readonly flag is set', () => { + it('does not render update & delete buttons', () => { + expect( + mount( + + + {setupWrapper({ versions })} + + + ).exists('.level-right') + ).toBeFalsy(); + }); + }); }); }); }); diff --git a/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap b/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap index 7871bb5588..337111c166 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Schemas/Details/__test__/__snapshots__/Details.spec.tsx.snap @@ -75,6 +75,7 @@ exports[`Details View Initial state matches snapshot 1`] = ` "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } @@ -126,67 +127,6 @@ exports[`Details View when page with schema versions is loading matches snapshot test
-
-
-
-
-
- - Latest Version - -
-
- # - 1 -
-
-
-
- - -
-
- -
`; @@ -266,6 +206,7 @@ exports[`Details View when page with schema versions loaded when schema has vers "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } @@ -299,6 +240,7 @@ exports[`Details View when page with schema versions loaded when schema has vers "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } @@ -311,6 +253,7 @@ exports[`Details View when page with schema versions loaded when schema has vers "compatibilityLevel": "BACKWARD", "id": 2, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "2", } @@ -397,6 +340,7 @@ exports[`Details View when page with schema versions loaded when versions are em "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } diff --git a/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts b/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts index 019532251b..d53190c9f6 100644 --- a/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/components/Schemas/Details/__test__/fixtures.ts @@ -1,4 +1,4 @@ -import { SchemaSubject } from 'generated-sources'; +import { SchemaSubject, SchemaType } from 'generated-sources'; export const schema: SchemaSubject = { subject: 'test', @@ -7,6 +7,7 @@ export const schema: SchemaSubject = { schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }; export const versions: SchemaSubject[] = [ @@ -17,6 +18,7 @@ export const versions: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -25,5 +27,6 @@ export const versions: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; diff --git a/kafka-ui-react-app/src/components/Schemas/List/List.tsx b/kafka-ui-react-app/src/components/Schemas/List/List.tsx index c7366d1f66..5e8ab6681b 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/List.tsx +++ b/kafka-ui-react-app/src/components/Schemas/List/List.tsx @@ -2,54 +2,74 @@ import React from 'react'; import { SchemaSubject } from 'generated-sources'; import { NavLink, useParams } from 'react-router-dom'; import { clusterSchemaNewPath } from 'lib/paths'; +import { ClusterName } from 'redux/interfaces'; +import PageLoader from 'components/common/PageLoader/PageLoader'; import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb'; +import ClusterContext from 'components/contexts/ClusterContext'; import ListItem from './ListItem'; export interface ListProps { schemas: SchemaSubject[]; + isFetching: boolean; + fetchSchemasByClusterName: (clusterName: ClusterName) => void; } -const List: React.FC = ({ schemas }) => { +const List: React.FC = ({ + schemas, + isFetching, + fetchSchemasByClusterName, +}) => { + const { isReadOnly } = React.useContext(ClusterContext); const { clusterName } = useParams<{ clusterName: string }>(); + React.useEffect(() => { + fetchSchemasByClusterName(clusterName); + }, [fetchSchemasByClusterName, clusterName]); + return (
Schema Registry
-
- - Create Schema - -
+ {!isReadOnly && ( +
+ + Create Schema + +
+ )}
-
- - - - - - - - - - {schemas.length > 0 ? ( - schemas.map((subject) => ( - - )) - ) : ( + {isFetching ? ( + + ) : ( +
+
Schema NameVersionCompatibility
+ - + + + - )} - -
No schemas foundSchema NameVersionCompatibility
-
+ + + {schemas.length > 0 ? ( + schemas.map((subject) => ( + + )) + ) : ( + + No schemas found + + )} + + +
+ )} ); }; diff --git a/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx b/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx index 2925987540..9bd7c4fd6b 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx +++ b/kafka-ui-react-app/src/components/Schemas/List/ListContainer.tsx @@ -1,10 +1,19 @@ import { connect } from 'react-redux'; import { RootState } from 'redux/interfaces'; -import { getSchemaList } from 'redux/reducers/schemas/selectors'; +import { fetchSchemasByClusterName } from 'redux/actions'; +import { + getIsSchemaListFetching, + getSchemaList, +} from 'redux/reducers/schemas/selectors'; import List from './List'; const mapStateToProps = (state: RootState) => ({ + isFetching: getIsSchemaListFetching(state), schemas: getSchemaList(state), }); -export default connect(mapStateToProps)(List); +const mapDispatchToProps = { + fetchSchemasByClusterName, +}; + +export default connect(mapStateToProps, mapDispatchToProps)(List); diff --git a/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx b/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx index 4a803432fb..3e3aa07f03 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx +++ b/kafka-ui-react-app/src/components/Schemas/List/__test__/List.spec.tsx @@ -3,6 +3,7 @@ import { mount, shallow } from 'enzyme'; import { Provider } from 'react-redux'; import { StaticRouter } from 'react-router'; import configureStore from 'redux/store/configureStore'; +import ClusterContext from 'components/contexts/ClusterContext'; import ListContainer from '../ListContainer'; import List, { ListProps } from '../List'; import { schemas } from './fixtures'; @@ -27,13 +28,50 @@ describe('List', () => { const setupWrapper = (props: Partial = {}) => ( - + ); + describe('Initial state', () => { + let useEffect: jest.SpyInstance< + void, + [effect: React.EffectCallback, deps?: React.DependencyList | undefined] + >; + const mockedFn = jest.fn(); + + const mockedUseEffect = () => { + useEffect.mockImplementationOnce(mockedFn); + }; + + beforeEach(() => { + useEffect = jest.spyOn(React, 'useEffect'); + mockedUseEffect(); + }); + + it('should call fetchSchemasByClusterName every render', () => { + mount(setupWrapper({ fetchSchemasByClusterName: mockedFn })); + expect(mockedFn).toHaveBeenCalled(); + }); + }); + + describe('when fetching', () => { + it('renders PageLoader', () => { + const wrapper = mount(setupWrapper({ isFetching: true })); + expect(wrapper.exists('Breadcrumb')).toBeTruthy(); + expect(wrapper.exists('thead')).toBeFalsy(); + expect(wrapper.exists('ListItem')).toBeFalsy(); + expect(wrapper.exists('PageLoader')).toBeTruthy(); + }); + }); + describe('without schemas', () => { it('renders table heading without ListItem', () => { - const wrapper = mount(setupWrapper()); + const wrapper = mount(setupWrapper({ isFetching: false })); expect(wrapper.exists('Breadcrumb')).toBeTruthy(); expect(wrapper.exists('thead')).toBeTruthy(); expect(wrapper.exists('ListItem')).toBeFalsy(); @@ -41,7 +79,7 @@ describe('List', () => { }); describe('with schemas', () => { - const wrapper = mount(setupWrapper({ schemas })); + const wrapper = mount(setupWrapper({ isFetching: false, schemas })); it('renders table heading with ListItem', () => { expect(wrapper.exists('Breadcrumb')).toBeTruthy(); @@ -49,5 +87,18 @@ describe('List', () => { expect(wrapper.find('ListItem').length).toEqual(3); }); }); + + describe('with readonly cluster', () => { + const wrapper = mount( + + + {setupWrapper({ schemas: [] })} + + + ); + it('does not render Create Schema button', () => { + expect(wrapper.exists('NavLink')).toBeFalsy(); + }); + }); }); }); diff --git a/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap b/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap index 1783cdc994..e26acad840 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Schemas/List/__test__/__snapshots__/ListItem.spec.tsx.snap @@ -32,6 +32,7 @@ exports[`ListItem matches snapshot 1`] = ` "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", } diff --git a/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts b/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts index 7b337a23ec..46a4d5a105 100644 --- a/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/components/Schemas/List/__test__/fixtures.ts @@ -1,4 +1,4 @@ -import { SchemaSubject } from 'generated-sources'; +import { SchemaSubject, SchemaType } from 'generated-sources'; export const schemas: SchemaSubject[] = [ { @@ -8,6 +8,7 @@ export const schemas: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test2', @@ -16,6 +17,7 @@ export const schemas: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test3', @@ -24,5 +26,6 @@ export const schemas: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord3","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; diff --git a/kafka-ui-react-app/src/components/Schemas/New/New.tsx b/kafka-ui-react-app/src/components/Schemas/New/New.tsx index cc15b9709e..7a1325b924 100644 --- a/kafka-ui-react-app/src/components/Schemas/New/New.tsx +++ b/kafka-ui-react-app/src/components/Schemas/New/New.tsx @@ -1,17 +1,16 @@ import React from 'react'; -import { ClusterName, SchemaName, NewSchemaSubjectRaw } from 'redux/interfaces'; +import { ClusterName, NewSchemaSubjectRaw } from 'redux/interfaces'; import { useForm } from 'react-hook-form'; import { ErrorMessage } from '@hookform/error-message'; import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb'; import { clusterSchemaPath, clusterSchemasPath } from 'lib/paths'; -import { NewSchemaSubject } from 'generated-sources'; +import { NewSchemaSubject, SchemaType } from 'generated-sources'; import { SCHEMA_NAME_VALIDATION_PATTERN } from 'lib/constants'; import { useHistory, useParams } from 'react-router'; export interface NewProps { createSchema: ( clusterName: ClusterName, - subject: SchemaName, newSchemaSubject: NewSchemaSubject ) => void; } @@ -29,7 +28,11 @@ const New: React.FC = ({ createSchema }) => { const onSubmit = React.useCallback( async ({ subject, schema }: NewSchemaSubjectRaw) => { try { - await createSchema(clusterName, subject, { schema }); + await createSchema(clusterName, { + subject, + schema, + schemaType: SchemaType.AVRO, + }); history.push(clusterSchemaPath(clusterName, subject)); } catch (e) { // Show Error diff --git a/kafka-ui-react-app/src/components/Schemas/Schemas.tsx b/kafka-ui-react-app/src/components/Schemas/Schemas.tsx index 030aa71229..62aa84f81f 100644 --- a/kafka-ui-react-app/src/components/Schemas/Schemas.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Schemas.tsx @@ -1,49 +1,27 @@ import React from 'react'; -import { ClusterName } from 'redux/interfaces'; -import { Switch, Route, useParams } from 'react-router-dom'; -import PageLoader from 'components/common/PageLoader/PageLoader'; +import { Switch, Route } from 'react-router-dom'; import ListContainer from './List/ListContainer'; import DetailsContainer from './Details/DetailsContainer'; import NewContainer from './New/NewContainer'; -export interface SchemasProps { - isFetching: boolean; - fetchSchemasByClusterName: (clusterName: ClusterName) => void; -} - -const Schemas: React.FC = ({ - isFetching, - fetchSchemasByClusterName, -}) => { - const { clusterName } = useParams<{ clusterName: string }>(); - - React.useEffect(() => { - fetchSchemasByClusterName(clusterName); - }, [fetchSchemasByClusterName, clusterName]); - - if (isFetching) { - return ; - } - - return ( - - - - - - ); -}; +const Schemas: React.FC = () => ( + + + + + +); export default Schemas; diff --git a/kafka-ui-react-app/src/components/Schemas/SchemasContainer.tsx b/kafka-ui-react-app/src/components/Schemas/SchemasContainer.tsx deleted file mode 100644 index a70e9d902c..0000000000 --- a/kafka-ui-react-app/src/components/Schemas/SchemasContainer.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { connect } from 'react-redux'; -import { RootState } from 'redux/interfaces'; -import { fetchSchemasByClusterName } from 'redux/actions'; -import { getIsSchemaListFetching } from 'redux/reducers/schemas/selectors'; -import Schemas from './Schemas'; - -const mapStateToProps = (state: RootState) => ({ - isFetching: getIsSchemaListFetching(state), -}); - -const mapDispatchToProps = { - fetchSchemasByClusterName, -}; - -export default connect(mapStateToProps, mapDispatchToProps)(Schemas); diff --git a/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx b/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx index 1c24d2cff3..e643afeae9 100644 --- a/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx +++ b/kafka-ui-react-app/src/components/Schemas/__test__/Schemas.spec.tsx @@ -1,71 +1,18 @@ import React from 'react'; -import { Provider } from 'react-redux'; -import { mount } from 'enzyme'; -import configureStore from 'redux/store/configureStore'; +import { shallow } from 'enzyme'; import { StaticRouter } from 'react-router-dom'; -import Schemas, { SchemasProps } from '../Schemas'; -import SchemasContainer from '../SchemasContainer'; +import Schemas from '../Schemas'; describe('Schemas', () => { const pathname = `/ui/clusters/clusterName/schemas`; - describe('Container', () => { - const store = configureStore(); + it('renders', () => { + const wrapper = shallow( + + + + ); - it('renders view', () => { - const component = mount( - - - - - - ); - - expect(component.exists()).toBeTruthy(); - }); - - describe('View', () => { - const setupWrapper = (props: Partial = {}) => ( - - - - ); - describe('Initial state', () => { - let useEffect: jest.SpyInstance< - void, - [ - effect: React.EffectCallback, - deps?: React.DependencyList | undefined - ] - >; - const mockedFn = jest.fn(); - - const mockedUseEffect = () => { - useEffect.mockImplementationOnce(mockedFn); - }; - - beforeEach(() => { - useEffect = jest.spyOn(React, 'useEffect'); - mockedUseEffect(); - }); - - it('should call fetchSchemasByClusterName every render', () => { - mount(setupWrapper({ fetchSchemasByClusterName: mockedFn })); - expect(mockedFn).toHaveBeenCalled(); - }); - }); - - describe('when page is loading', () => { - const wrapper = mount(setupWrapper({ isFetching: true })); - - it('renders PageLoader', () => { - expect(wrapper.exists('PageLoader')).toBeTruthy(); - }); - }); - }); + expect(wrapper.exists('Schemas')).toBeTruthy(); }); }); diff --git a/kafka-ui-react-app/src/components/Topics/Details/Details.tsx b/kafka-ui-react-app/src/components/Topics/Details/Details.tsx index 1b1cb9903f..796be2c6f9 100644 --- a/kafka-ui-react-app/src/components/Topics/Details/Details.tsx +++ b/kafka-ui-react-app/src/components/Topics/Details/Details.tsx @@ -10,6 +10,7 @@ import { clusterTopicMessagesPath, clusterTopicsTopicEditPath, } from 'lib/paths'; +import ClusterContext from 'components/contexts/ClusterContext'; import OverviewContainer from './Overview/OverviewContainer'; import MessagesContainer from './Messages/MessagesContainer'; import SettingsContainer from './Settings/SettingsContainer'; @@ -21,6 +22,7 @@ interface Props extends Topic, TopicDetails { } const Details: React.FC = ({ clusterName, topicName }) => { + const { isReadOnly } = React.useContext(ClusterContext); return (
@@ -33,9 +35,11 @@ const Details: React.FC = ({ clusterName, topicName }) => { {topicName}
- + {!isReadOnly && ( + + )}
diff --git a/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx b/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx index fbb03cd677..bc4b2ae4c6 100644 --- a/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx +++ b/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx @@ -195,7 +195,7 @@ const Messages: React.FC = ({ }; if (!isFetched) { - return ; + return ; } return ( diff --git a/kafka-ui-react-app/src/components/Topics/List/List.tsx b/kafka-ui-react-app/src/components/Topics/List/List.tsx index ad6a286e90..879c08134a 100644 --- a/kafka-ui-react-app/src/components/Topics/List/List.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/List.tsx @@ -3,6 +3,7 @@ import { TopicWithDetailedInfo, ClusterName } from 'redux/interfaces'; import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb'; import { NavLink } from 'react-router-dom'; import { clusterTopicNewPath } from 'lib/paths'; +import ClusterContext from 'components/contexts/ClusterContext'; import ListItem from './ListItem'; interface Props { @@ -15,7 +16,7 @@ const List: React.FC = ({ clusterName, topics, externalTopics }) => { const [showInternal, setShowInternal] = React.useState(true); const handleSwitch = () => setShowInternal(!showInternal); - + const { isReadOnly } = React.useContext(ClusterContext); const items = showInternal ? topics : externalTopics; return ( @@ -38,12 +39,14 @@ const List: React.FC = ({ clusterName, topics, externalTopics }) => {
- - Add a Topic - + {!isReadOnly && ( + + Add a Topic + + )}
diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx new file mode 100644 index 0000000000..0e857e276b --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx @@ -0,0 +1,22 @@ +import { mount } from 'enzyme'; +import React from 'react'; +import ClusterContext from 'components/contexts/ClusterContext'; +import List from '../List'; + +describe('List', () => { + describe('when it has readonly flag', () => { + it('does not render the Add a Topic button', () => { + const props = { + clusterName: 'Cluster', + topics: [], + externalTopics: [], + }; + const component = mount( + + + + ); + expect(component.exists('NavLink')).toBeFalsy(); + }); + }); +}); diff --git a/kafka-ui-react-app/src/components/Topics/Topics.tsx b/kafka-ui-react-app/src/components/Topics/Topics.tsx index 5317ec1abf..d119abdfc8 100644 --- a/kafka-ui-react-app/src/components/Topics/Topics.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topics.tsx @@ -10,7 +10,6 @@ import NewContainer from './New/NewContainer'; interface Props { clusterName: ClusterName; isFetched: boolean; - fetchBrokers: (clusterName: ClusterName) => void; fetchTopicsList: (clusterName: ClusterName) => void; } diff --git a/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx b/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx index 73864385ff..c5871d50ff 100644 --- a/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx +++ b/kafka-ui-react-app/src/components/common/Dashboard/__tests__/MetricsWrapper.spec.tsx @@ -8,17 +8,17 @@ describe('MetricsWrapper', () => { const component = shallow( ); - expect(component.find(`.${className}`).exists()).toBeTruthy(); - expect(component.find('.level-multiline').exists()).toBeTruthy(); + expect(component.exists(`.${className}`)).toBeTruthy(); + expect(component.exists('.level-multiline')).toBeTruthy(); }); it('correctly renders children', () => { let component = shallow(); - expect(component.find('.subtitle').exists()).toBeFalsy(); + expect(component.exists('.subtitle')).toBeFalsy(); const title = 'title'; component = shallow(); - expect(component.find('.subtitle').exists()).toBeTruthy(); + expect(component.exists('.subtitle')).toBeTruthy(); expect(component.text()).toEqual(title); }); }); diff --git a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx index 2f5932a4f8..98c70eb483 100644 --- a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx +++ b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx @@ -2,14 +2,14 @@ import React from 'react'; import cx from 'classnames'; interface Props { - isFullHeight: boolean; + fullHeight: boolean; } -const PageLoader: React.FC> = ({ isFullHeight = true }) => ( +const PageLoader: React.FC> = ({ fullHeight }) => (
{ it('matches the snapshot', () => { - const component = mount(); - expect(component).toMatchSnapshot(); + expect(mount()).toMatchSnapshot(); + }); + + it('renders half-height page loader by default', () => { + const wrapper = mount(); + expect(wrapper.exists('.hero.is-halfheight')).toBeTruthy(); + expect(wrapper.exists('.hero.is-fullheight-with-navbar')).toBeFalsy(); + }); + + it('renders fullheight page loader', () => { + const wrapper = mount(); + expect(wrapper.exists('.hero.is-halfheight')).toBeFalsy(); + expect(wrapper.exists('.hero.is-fullheight-with-navbar')).toBeTruthy(); }); }); diff --git a/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap b/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap index 10a9cf0631..34e9145f31 100644 --- a/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/common/PageLoader/__tests__/__snapshots__/PageLoader.spec.tsx.snap @@ -3,7 +3,7 @@ exports[`PageLoader matches the snapshot 1`] = `
{ describe('createSchema', () => { it('creates POST_SCHEMA__SUCCESS when posting new schema', async () => { - fetchMock.postOnce(`/api/clusters/${clusterName}/schemas/${subject}`, { + fetchMock.postOnce(`/api/clusters/${clusterName}/schemas`, { body: schemaFixtures.schemaVersionsPayload[0], }); await store.dispatch( - thunks.createSchema(clusterName, subject, fixtures.schemaPayload) + thunks.createSchema(clusterName, fixtures.schemaPayload) ); expect(store.getActions()).toEqual([ actions.createSchemaAction.request(), @@ -122,19 +122,19 @@ describe('Thunks', () => { ]); }); - // it('creates POST_SCHEMA__FAILURE when posting new schema', async () => { - // fetchMock.postOnce( - // `/api/clusters/${clusterName}/schemas/${subject}`, - // 404 - // ); - // await store.dispatch( - // thunks.createSchema(clusterName, subject, fixtures.schemaPayload) - // ); - // expect(store.getActions()).toEqual([ - // actions.createSchemaAction.request(), - // actions.createSchemaAction.failure(), - // ]); - // expect(store.getActions()).toThrow(); - // }); + it('creates POST_SCHEMA__FAILURE when posting new schema', async () => { + fetchMock.postOnce(`/api/clusters/${clusterName}/schemas`, 404); + try { + await store.dispatch( + thunks.createSchema(clusterName, fixtures.schemaPayload) + ); + } catch (error) { + expect(error.status).toEqual(404); + expect(store.getActions()).toEqual([ + actions.createSchemaAction.request(), + actions.createSchemaAction.failure(), + ]); + } + }); }); }); diff --git a/kafka-ui-react-app/src/redux/actions/thunks.ts b/kafka-ui-react-app/src/redux/actions/thunks.ts index f650d011bf..36e1ccaeb0 100644 --- a/kafka-ui-react-app/src/redux/actions/thunks.ts +++ b/kafka-ui-react-app/src/redux/actions/thunks.ts @@ -285,14 +285,12 @@ export const fetchSchemaVersions = ( export const createSchema = ( clusterName: ClusterName, - subject: SchemaName, newSchemaSubject: NewSchemaSubject ): PromiseThunkResult => async (dispatch) => { dispatch(actions.createSchemaAction.request()); try { const schema: SchemaSubject = await apiClient.createNewSchema({ clusterName, - subject, newSchemaSubject, }); dispatch(actions.createSchemaAction.success(schema)); diff --git a/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts b/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts index e6667fc3e0..a8b06b0f17 100644 --- a/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts +++ b/kafka-ui-react-app/src/redux/reducers/clusters/selectors.ts @@ -24,3 +24,10 @@ export const getOnlineClusters = createSelector(getClusterList, (clusters) => export const getOfflineClusters = createSelector(getClusterList, (clusters) => clusters.filter(({ status }) => status === ServerStatus.OFFLINE) ); + +export const getClustersReadonlyStatus = (clusterName: string) => + createSelector( + getClusterList, + (clusters): boolean => + clusters.find(({ name }) => name === clusterName)?.readOnly || false + ); diff --git a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap index bf18ffa14f..61332659b7 100644 --- a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap +++ b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/__snapshots__/reducer.spec.ts.snap @@ -12,6 +12,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 2, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "2", }, @@ -19,6 +20,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 4, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord4\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test2", "version": "3", }, @@ -26,6 +28,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 5, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test3", "version": "1", }, @@ -43,6 +46,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", }, @@ -50,6 +54,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 2, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "2", }, @@ -67,6 +72,7 @@ Object { "compatibilityLevel": "BACKWARD", "id": 1, "schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}", + "schemaType": "JSON", "subject": "test", "version": "1", }, diff --git a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts index 5a75ed212b..a53f1dced7 100644 --- a/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts +++ b/kafka-ui-react-app/src/redux/reducers/schemas/__test__/fixtures.ts @@ -1,5 +1,5 @@ import { SchemasState } from 'redux/interfaces'; -import { SchemaSubject } from 'generated-sources'; +import { SchemaSubject, SchemaType } from 'generated-sources'; export const initialState: SchemasState = { byName: {}, @@ -15,6 +15,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test3', @@ -23,6 +24,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -31,6 +33,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; @@ -42,6 +45,7 @@ export const schemaVersionsPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -50,6 +54,7 @@ export const schemaVersionsPayload: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ]; @@ -60,6 +65,7 @@ export const newSchemaPayload: SchemaSubject = { schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }; export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ @@ -70,6 +76,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test3', @@ -78,6 +85,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test', @@ -86,6 +94,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, { subject: 'test4', @@ -94,5 +103,6 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [ schema: '{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}', compatibilityLevel: 'BACKWARD', + schemaType: SchemaType.JSON, }, ];