Merge branch 'master' into feature/#190-json-export

This commit is contained in:
GneyHabub 2021-03-13 13:32:25 +03:00
commit bab84f5db1
59 changed files with 929 additions and 509 deletions

View file

@ -1,20 +1,19 @@
name: backend
on:
push:
branches: [ '*' ]
pull_request:
branches: [ master ]
branches:
- "**"
- "!master"
jobs:
mvn-all-build:
build:
runs-on: ubuntu-latest
steps:
- name: Cache local Maven repository
uses: actions/cache@v1
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-all-${{ hashFiles('**/pom.xml') }}
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-all-
${{ runner.os }}-maven-
- uses: actions/checkout@v2
- name: Set up JDK 1.13

View file

@ -1,31 +0,0 @@
name: charts
on:
create:
tags:
- "v*.*.*"
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
- uses: azure/setup-helm@v1
- name: update appVersion
run: |
export version=${GITHUB_REF##*/}
sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml
- name:
run: |
export VERSION=${GITHUB_REF##*/}
MSG=$(helm package --app-version ${VERSION} charts/kafka-ui)
git fetch origin
git stash
git checkout -b gh-pages origin/gh-pages
helm repo index .
git add -f ${MSG##*/} index.yaml
git commit -m "release ${VERSION}"
git push

View file

@ -1,9 +1,9 @@
name: frontend
on:
push:
branches: [ '*' ]
pull_request:
branches: [ master ]
branches:
- "**"
- "!master"
jobs:
npm-test:
needs: [mvn-contract-build]

61
.github/workflows/latest.yaml vendored Normal file
View file

@ -0,0 +1,61 @@
name: latest
on:
workflow_dispatch:
push:
branches: [ "master" ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Set up JDK 1.13
uses: actions/setup-java@v1
with:
java-version: 1.13
- name: Build
id: build
run: |
export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' --non-recursive exec:exec)
echo "::set-output name=version::${VERSION}"
mvn clean package -Pprod -DskipTests
#################
# #
# Docker images #
# #
#################
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build_and_push
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: kafka-ui-api
push: true
tags: provectuslabs/kafka-ui:latest
build-args: |
JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache

View file

@ -59,19 +59,32 @@ jobs:
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.ref == 'refs/heads/master'
uses: docker/login-action@v1
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
- name: Build
if: github.ref != 'refs/heads/master'
id: docker_build
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: kafka-ui-api
push: github.ref == 'refs/heads/master'
tags: provectuslabs/kafka-ui:${{ steps.prep.outputs.version }}
push: false
build-args: |
JAR_FILE=kafka-ui-api-${{ steps.prep.outputs.version }}.jar
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Build and push
if: github.ref == 'refs/heads/master'
id: docker_build_and_push
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: kafka-ui-api
push: true
tags: provectuslabs/kafka-ui:${{ steps.prep.outputs.version }}
build-args: |
JAR_FILE=kafka-ui-api-${{ steps.prep.outputs.version }}.jar
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache

68
.github/workflows/tags.yaml vendored Normal file
View file

@ -0,0 +1,68 @@
name: after_release
on:
push:
tags:
- "v**"
jobs:
charts:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 1
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
- uses: azure/setup-helm@v1
- name: update appVersion
run: |
export version=${GITHUB_REF##*/}
sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml
- name:
run: |
export VERSION=${GITHUB_REF##*/}
MSG=$(helm package --app-version ${VERSION} charts/kafka-ui)
git fetch origin
git stash
git checkout -b gh-pages origin/gh-pages
helm repo index .
git add -f ${MSG##*/} index.yaml
git commit -m "release ${VERSION}"
git push
gh-release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
- id: generate
shell: /usr/bin/bash -x -e {0}
run: |
VERSION=${GITHUB_REF##*/}
CHANGELOG=$(git --no-pager log --oneline --pretty=format:"- %s" `git tag --sort=-creatordate | grep '^v.*' | head -n2 | tail -n1`.. | uniq | grep -v '^- Merge\|^- skip')
CHANGELOG="${CHANGELOG//'%'/'%25'}"
CHANGELOG="${CHANGELOG//$'\n'/'%0A'}"
CHANGELOG="${CHANGELOG//$'\r'/'%0D'}"
echo ${CHANGELOG}
echo "::set-output name=changelog::${CHANGELOG}"
echo "::set-output name=version::${VERSION}"
- id: create_release
uses: actions/github-script@v3
env:
CHANGELOG: ${{steps.generate.outputs.changelog}}
VERSION: ${{steps.generate.outputs.version}}
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
github.repos.createRelease({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: context.ref,
name: "Release "+process.env.VERSION,
body: process.env.CHANGELOG,
draft: false,
prerelease: false
});

View file

@ -39,7 +39,8 @@ docker run -p 8080:8080 \
-d provectuslabs/kafka-ui:latest
```
Then access the web UI at [http://localhost:8080](http://localhost:8080).
Then access the web UI at [http://localhost:8080](http://localhost:8080).
Further configuration with environment variables - [see environment variables](#env_variables)
### Docker Compose
@ -138,10 +139,11 @@ kafka:
* `schemaRegistry`: schemaRegistry's address
* `schemaNameTemplate`: how keys are saved to schemaRegistry
* `jmxPort`: open jmxPosrts of a broker
* `readOnly`: enable read only mode
Configure as many clusters as you need by adding their configs below separated with `-`.
## Environment Variables
## <a name="env_variables"></a> Environment Variables
Alternatively, each variable of of the .yml file can be set with an environment variable.
For example, if you want to use an environment variable to set the `name` parameter, you can write it like this: `KAFKA_CLUSTERS_2_NAME`
@ -154,6 +156,7 @@ For example, if you want to use an environment variable to set the `name` parame
|`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address
|`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry
|`KAFKA_CLUSTERS_0_JMXPORT` |Open jmxPosrts of a broker
|`KAFKA_CLUSTERS_0_READONLY` |Enable read only mode. Default: false

View file

@ -1,5 +1,7 @@
# Quick Start with docker-compose
Envinronment variables documentation - [see usage](README.md#env_variables)
* Add a new service in docker-compose.yml
```yaml
@ -9,14 +11,31 @@ services:
image: provectuslabs/kafka-ui
container_name: kafka-ui
ports:
- "9000:8080"
- "8080:8080"
restart: always
environment:
-e KAFKA_CLUSTERS_0_NAME=local
-e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
-e KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181
- KAFKA_CLUSTERS_0_NAME=local
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
- KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181
```
* If you prefer Kafka UI in read only mode
```yaml
version: '2'
services:
kafka-ui:
image: provectuslabs/kafka-ui
container_name: kafka-ui
ports:
- "8080:8080"
restart: always
environment:
- KAFKA_CLUSTERS_0_NAME=local
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092
- KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181
- KAFKA_CLUSTERS_0_READONLY=true
```
* Start Kafka UI process

View file

@ -86,7 +86,25 @@ services:
SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
ports:
- 8081:8081
- 8085:8085
schemaregistry1:
image: confluentinc/cp-schema-registry:5.5.0
ports:
- 18085:8085
depends_on:
- zookeeper1
- kafka1
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_HOST_NAME: schemaregistry1
SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085
SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
kafka-connect0:
image: confluentinc/cp-kafka-connect:5.2.4

View file

@ -96,6 +96,24 @@ services:
SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
schemaregistry1:
image: confluentinc/cp-schema-registry:5.5.0
ports:
- 18085:8085
depends_on:
- zookeeper1
- kafka1
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:29092
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper1:2181
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
SCHEMA_REGISTRY_HOST_NAME: schemaregistry1
SCHEMA_REGISTRY_LISTENERS: http://schemaregistry1:8085
SCHEMA_REGISTRY_SCHEMA_REGISTRY_INTER_INSTANCE_PROTOCOL: "http"
SCHEMA_REGISTRY_LOG4J_ROOT_LOGLEVEL: INFO
SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas
kafka-connect0:
image: confluentinc/cp-kafka-connect:5.2.4
ports:

View file

@ -0,0 +1,15 @@
package com.provectus.kafka.ui.cluster.exception;
import org.springframework.http.HttpStatus;
public class DuplicateEntityException extends CustomBaseException{
public DuplicateEntityException(String message) {
super(message);
}
@Override
public HttpStatus getResponseStatusCode() {
return HttpStatus.CONFLICT;
}
}

View file

@ -0,0 +1,15 @@
package com.provectus.kafka.ui.cluster.exception;
import org.springframework.http.HttpStatus;
public class UnprocessableEntityException extends CustomBaseException{
public UnprocessableEntityException(String message) {
super(message);
}
@Override
public HttpStatus getResponseStatusCode() {
return HttpStatus.UNPROCESSABLE_ENTITY;
}
}

View file

@ -2,7 +2,8 @@ package com.provectus.kafka.ui.cluster.mapper;
import com.provectus.kafka.ui.cluster.config.ClustersProperties;
import com.provectus.kafka.ui.cluster.model.*;
import com.provectus.kafka.ui.cluster.model.InternalCompatibilityCheck;
import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityCheck;
import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityLevel;
import com.provectus.kafka.ui.model.*;
import java.util.Properties;
import org.mapstruct.Mapper;

View file

@ -1,4 +1,4 @@
package com.provectus.kafka.ui.cluster.model;
package com.provectus.kafka.ui.cluster.model.schemaregistry;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;

View file

@ -1,4 +1,4 @@
package com.provectus.kafka.ui.cluster.model;
package com.provectus.kafka.ui.cluster.model.schemaregistry;
import lombok.Data;

View file

@ -0,0 +1,17 @@
package com.provectus.kafka.ui.cluster.model.schemaregistry;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.provectus.kafka.ui.model.SchemaType;
import lombok.Data;
@Data
public class InternalNewSchema {
private String schema;
@JsonInclude(JsonInclude.Include.NON_NULL)
private SchemaType schemaType;
public InternalNewSchema(String schema, SchemaType schemaType) {
this.schema = schema;
this.schemaType = schemaType;
}
}

View file

@ -0,0 +1,8 @@
package com.provectus.kafka.ui.cluster.model.schemaregistry;
import lombok.Data;
@Data
public class SubjectIdResponse {
private Integer id;
}

View file

@ -1,36 +1,43 @@
package com.provectus.kafka.ui.cluster.service;
import com.provectus.kafka.ui.cluster.exception.DuplicateEntityException;
import com.provectus.kafka.ui.cluster.exception.NotFoundException;
import com.provectus.kafka.ui.cluster.exception.UnprocessableEntityException;
import com.provectus.kafka.ui.cluster.mapper.ClusterMapper;
import com.provectus.kafka.ui.cluster.model.ClustersStorage;
import com.provectus.kafka.ui.cluster.model.InternalCompatibilityCheck;
import com.provectus.kafka.ui.cluster.model.InternalCompatibilityLevel;
import com.provectus.kafka.ui.model.CompatibilityCheckResponse;
import com.provectus.kafka.ui.model.CompatibilityLevel;
import com.provectus.kafka.ui.model.NewSchemaSubject;
import com.provectus.kafka.ui.model.SchemaSubject;
import java.util.Formatter;
import com.provectus.kafka.ui.cluster.model.KafkaCluster;
import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityCheck;
import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalCompatibilityLevel;
import com.provectus.kafka.ui.cluster.model.schemaregistry.InternalNewSchema;
import com.provectus.kafka.ui.cluster.model.schemaregistry.SubjectIdResponse;
import com.provectus.kafka.ui.model.*;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpStatus;
import org.jetbrains.annotations.NotNull;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.ClientResponse;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.Arrays;
import java.util.List;
import java.util.Formatter;
import java.util.Objects;
import java.util.function.Function;
import static org.springframework.http.HttpStatus.NOT_FOUND;
import static org.springframework.http.HttpStatus.UNPROCESSABLE_ENTITY;
@Service
@Log4j2
@RequiredArgsConstructor
public class SchemaRegistryService {
public static final String NO_SUCH_SCHEMA_VERSION = "No such schema %s with version %s";
public static final String NO_SUCH_SCHEMA = "No such schema %s";
public static final String NO_SUCH_CLUSTER = "No such cluster";
private static final String URL_SUBJECTS = "/subjects";
private static final String URL_SUBJECT = "/subjects/{schemaName}";
private static final String URL_SUBJECT_VERSIONS = "/subjects/{schemaName}/versions";
@ -45,7 +52,7 @@ public class SchemaRegistryService {
var allSubjectNames = getAllSubjectNames(clusterName);
return allSubjectNames
.flatMapMany(Flux::fromArray)
.flatMap(subject -> getLatestSchemaSubject(clusterName, subject));
.flatMap(subject -> getLatestSchemaVersionBySubject(clusterName, subject));
}
public Mono<String[]> getAllSubjectNames(String clusterName) {
@ -56,7 +63,7 @@ public class SchemaRegistryService {
.bodyToMono(String[].class)
.doOnError(log::error)
)
.orElse(Mono.error(new NotFoundException("No such cluster")));
.orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER)));
}
public Flux<SchemaSubject> getAllVersionsBySubject(String clusterName, String subject) {
@ -69,19 +76,17 @@ public class SchemaRegistryService {
.map(cluster -> webClient.get()
.uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName)
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals,
resp -> Mono.error(
new NotFoundException(formatted("No such schema %s"))
)
.onStatus(NOT_FOUND::equals,
throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA))
).bodyToFlux(Integer.class)
).orElse(Flux.error(new NotFoundException("No such cluster")));
).orElse(Flux.error(new NotFoundException(NO_SUCH_CLUSTER)));
}
public Mono<SchemaSubject> getSchemaSubjectByVersion(String clusterName, String schemaName, Integer version) {
return this.getSchemaSubject(clusterName, schemaName, String.valueOf(version));
}
public Mono<SchemaSubject> getLatestSchemaSubject(String clusterName, String schemaName) {
public Mono<SchemaSubject> getLatestSchemaVersionBySubject(String clusterName, String schemaName) {
return this.getSchemaSubject(clusterName, schemaName, LATEST);
}
@ -90,13 +95,10 @@ public class SchemaRegistryService {
.map(cluster -> webClient.get()
.uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version)
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals,
resp -> Mono.error(
new NotFoundException(
formatted("No such schema %s with version %s", schemaName, version)
)
)
.onStatus(NOT_FOUND::equals,
throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version))
).bodyToMono(SchemaSubject.class)
.map(this::withSchemaType)
.zipWith(getSchemaCompatibilityInfoOrGlobal(clusterName, schemaName))
.map(tuple -> {
SchemaSubject schema = tuple.getT1();
@ -105,7 +107,21 @@ public class SchemaRegistryService {
return schema;
})
)
.orElseThrow();
.orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER)));
}
/**
* If {@link SchemaSubject#getSchemaType()} is null, then AVRO, otherwise, adds the schema type as is.
*/
@NotNull
private SchemaSubject withSchemaType(SchemaSubject s) {
SchemaType schemaType = Objects.nonNull(s.getSchemaType()) ? s.getSchemaType() : SchemaType.AVRO;
return new SchemaSubject()
.schema(s.getSchema())
.subject(s.getSubject())
.version(s.getVersion())
.id(s.getId())
.schemaType(schemaType);
}
public Mono<ResponseEntity<Void>> deleteSchemaSubjectByVersion(String clusterName, String schemaName, Integer version) {
@ -121,46 +137,71 @@ public class SchemaRegistryService {
.map(cluster -> webClient.delete()
.uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version)
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals,
resp -> Mono.error(
new NotFoundException(
formatted("No such schema %s with version %s", schemaName, version)
)
)
.onStatus(NOT_FOUND::equals,
throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version))
).toBodilessEntity()
).orElse(Mono.error(new NotFoundException("No such cluster")));
).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER)));
}
public Mono<ResponseEntity<Void>> deleteSchemaSubject(String clusterName, String schemaName) {
public Mono<ResponseEntity<Void>> deleteSchemaSubjectEntirely(String clusterName, String schemaName) {
return clustersStorage.getClusterByName(clusterName)
.map(cluster -> webClient.delete()
.uri(cluster.getSchemaRegistry() + URL_SUBJECT, schemaName)
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals,
resp -> Mono.error(
new NotFoundException(
formatted("No such schema %s", schemaName)
)
)
.onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName))
)
.toBodilessEntity())
.orElse(Mono.error(new NotFoundException("No such cluster")));
.orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER)));
}
public Mono<ResponseEntity<SchemaSubject>> createNewSubject(String clusterName, String schemaName, Mono<NewSchemaSubject> newSchemaSubject) {
return clustersStorage.getClusterByName(clusterName)
.map(cluster -> webClient.post()
.uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class))
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals,
resp -> Mono.error(
new NotFoundException(formatted("No such schema %s", schemaName)))
)
.toEntity(SchemaSubject.class)
.log())
.orElse(Mono.error(new NotFoundException("No such cluster")));
/**
* Checks whether the provided schema duplicates the previous or not, creates a new schema
* and then returns the whole content by requesting its latest version.
*/
public Mono<SchemaSubject> registerNewSchema(String clusterName, Mono<NewSchemaSubject> newSchemaSubject) {
return newSchemaSubject
.flatMap(schema -> {
SchemaType schemaType = SchemaType.AVRO == schema.getSchemaType() ? null : schema.getSchemaType();
Mono<InternalNewSchema> newSchema = Mono.just(new InternalNewSchema(schema.getSchema(), schemaType));
String subject = schema.getSubject();
return clustersStorage.getClusterByName(clusterName)
.map(KafkaCluster::getSchemaRegistry)
.map(schemaRegistryUrl -> checkSchemaOnDuplicate(subject, newSchema, schemaRegistryUrl)
.flatMap(s -> submitNewSchema(subject, newSchema, schemaRegistryUrl))
.flatMap(resp -> getLatestSchemaVersionBySubject(clusterName, subject))
)
.orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER)));
});
}
@NotNull
private Mono<SubjectIdResponse> submitNewSchema(String subject, Mono<InternalNewSchema> newSchemaSubject, String schemaRegistryUrl) {
return webClient.post()
.uri(schemaRegistryUrl + URL_SUBJECT_VERSIONS, subject)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class))
.retrieve()
.onStatus(UNPROCESSABLE_ENTITY::equals, r -> Mono.error(new UnprocessableEntityException("Invalid params")))
.bodyToMono(SubjectIdResponse.class);
}
@NotNull
private Mono<SchemaSubject> checkSchemaOnDuplicate(String subject, Mono<InternalNewSchema> newSchemaSubject, String schemaRegistryUrl) {
return webClient.post()
.uri(schemaRegistryUrl + URL_SUBJECT, subject)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class))
.retrieve()
.onStatus(NOT_FOUND::equals, res -> Mono.empty())
.onStatus(UNPROCESSABLE_ENTITY::equals, r -> Mono.error(new UnprocessableEntityException("Invalid params")))
.bodyToMono(SchemaSubject.class)
.filter(s -> Objects.isNull(s.getId()))
.switchIfEmpty(Mono.error(new DuplicateEntityException("Such schema already exists")));
}
@NotNull
private Function<ClientResponse, Mono<? extends Throwable>> throwIfNotFoundStatus(String formatted) {
return resp -> Mono.error(new NotFoundException(formatted));
}
/**
@ -178,10 +219,10 @@ public class SchemaRegistryService {
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(compatibilityLevel, CompatibilityLevel.class))
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals,
resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName))))
.onStatus(NOT_FOUND::equals,
throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)))
.bodyToMono(Void.class);
}).orElse(Mono.error(new NotFoundException("No such cluster")));
}).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER)));
}
public Mono<Void> updateSchemaCompatibility(String clusterName, Mono<CompatibilityLevel> compatibilityLevel) {
@ -217,12 +258,11 @@ public class SchemaRegistryService {
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class))
.retrieve()
.onStatus(HttpStatus.NOT_FOUND::equals,
resp -> Mono.error(new NotFoundException(formatted("No such schema %s", schemaName))))
.onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)))
.bodyToMono(InternalCompatibilityCheck.class)
.map(mapper::toCompatibilityCheckResponse)
.log()
).orElse(Mono.error(new NotFoundException("No such cluster")));
).orElse(Mono.error(new NotFoundException(NO_SUCH_CLUSTER)));
}
public String formatted(String str, Object... args) {

View file

@ -106,7 +106,7 @@ public class MetricsRestController implements ApiClustersApi {
@Override
public Mono<ResponseEntity<SchemaSubject>> getLatestSchema(String clusterName, String subject, ServerWebExchange exchange) {
return schemaRegistryService.getLatestSchemaSubject(clusterName, subject).map(ResponseEntity::ok);
return schemaRegistryService.getLatestSchemaVersionBySubject(clusterName, subject).map(ResponseEntity::ok);
}
@Override
@ -138,14 +138,16 @@ public class MetricsRestController implements ApiClustersApi {
@Override
public Mono<ResponseEntity<Void>> deleteSchema(String clusterName, String subjectName, ServerWebExchange exchange) {
return schemaRegistryService.deleteSchemaSubject(clusterName, subjectName);
return schemaRegistryService.deleteSchemaSubjectEntirely(clusterName, subjectName);
}
@Override
public Mono<ResponseEntity<SchemaSubject>> createNewSchema(String clusterName, String subject,
public Mono<ResponseEntity<SchemaSubject>> createNewSchema(String clusterName,
@Valid Mono<NewSchemaSubject> newSchemaSubject,
ServerWebExchange exchange) {
return schemaRegistryService.createNewSubject(clusterName, subject, newSchemaSubject);
return schemaRegistryService
.registerNewSchema(clusterName, newSchemaSubject)
.map(ResponseEntity::ok);
}
@Override

View file

@ -13,7 +13,7 @@ kafka:
name: secondLocal
bootstrapServers: localhost:9093
zookeeper: localhost:2182
schemaRegistry: http://localhost:8081
schemaRegistry: http://localhost:18085
kafkaConnect:
- name: first
address: http://localhost:8083

View file

@ -9,7 +9,7 @@ kafka:
name: secondLocal
zookeeper: zookeeper1:2181
bootstrapServers: kafka1:29092
schemaRegistry: http://schemaregistry0:8085
schemaRegistry: http://schemaregistry1:8085
admin-client-timeout: 5000
zookeeper:
connection-timeout: 1000

View file

@ -18,7 +18,7 @@ public abstract class AbstractBaseTest {
public static String LOCAL = "local";
public static String SECOND_LOCAL = "secondLocal";
private static final String CONFLUENT_PLATFORM_VERSION = "5.2.1";
private static final String CONFLUENT_PLATFORM_VERSION = "5.5.0";
public static final KafkaContainer kafka = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION))
.withNetwork(Network.SHARED);

View file

@ -249,7 +249,7 @@ public class KafkaConnectServiceTests extends AbstractBaseTest {
.exchange()
.expectStatus().isOk()
.expectBodyList(ConnectorPlugin.class)
.value(plugins -> assertEquals(13, plugins.size()));
.value(plugins -> assertEquals(14, plugins.size()));
}
@Test

View file

@ -1,7 +1,9 @@
package com.provectus.kafka.ui;
import com.provectus.kafka.ui.model.CompatibilityLevel;
import com.provectus.kafka.ui.model.NewSchemaSubject;
import com.provectus.kafka.ui.model.SchemaSubject;
import com.provectus.kafka.ui.model.SchemaType;
import lombok.extern.log4j.Log4j2;
import lombok.val;
import org.junit.jupiter.api.Assertions;
@ -9,11 +11,13 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.web.reactive.server.EntityExchangeResult;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.web.reactive.function.BodyInserters;
import reactor.core.publisher.Mono;
import java.util.List;
import java.util.UUID;
@ -50,6 +54,69 @@ class SchemaRegistryServiceTests extends AbstractBaseTest {
.expectStatus().isNotFound();
}
/**
* It should create a new schema w/o submitting a schemaType field to Schema Registry
*/
@Test
void shouldBeBadRequestIfNoSchemaType() {
String schema = "{\"subject\":\"%s\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}";
webTestClient
.post()
.uri("/api/clusters/{clusterName}/schemas", LOCAL)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromValue(schema.formatted(subject)))
.exchange()
.expectStatus().isBadRequest();
}
@Test
void shouldReturn409WhenSchemaDuplicatesThePreviousVersion() {
String schema = "{\"subject\":\"%s\",\"schemaType\":\"AVRO\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}";
webTestClient
.post()
.uri("/api/clusters/{clusterName}/schemas", LOCAL)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromValue(schema.formatted(subject)))
.exchange()
.expectStatus().isEqualTo(HttpStatus.OK);
webTestClient
.post()
.uri("/api/clusters/{clusterName}/schemas", LOCAL)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromValue(schema.formatted(subject)))
.exchange()
.expectStatus().isEqualTo(HttpStatus.CONFLICT);
}
@Test
void shouldCreateNewProtobufSchema() {
String schema = "syntax = \"proto3\";\n\nmessage MyRecord {\n int32 id = 1;\n string name = 2;\n}\n";
NewSchemaSubject requestBody = new NewSchemaSubject()
.schemaType(SchemaType.PROTOBUF)
.subject(subject)
.schema(schema);
SchemaSubject actual = webTestClient
.post()
.uri("/api/clusters/{clusterName}/schemas", LOCAL)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromPublisher(Mono.just(requestBody), NewSchemaSubject.class))
.exchange()
.expectStatus()
.isOk()
.expectBody(SchemaSubject.class)
.returnResult()
.getResponseBody();
Assertions.assertNotNull(actual);
Assertions.assertEquals(CompatibilityLevel.CompatibilityEnum.BACKWARD.name(), actual.getCompatibilityLevel());
Assertions.assertEquals("1", actual.getVersion());
Assertions.assertEquals(SchemaType.PROTOBUF, actual.getSchemaType());
Assertions.assertEquals(schema, actual.getSchema());
}
@Test
public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() {
webTestClient
@ -132,9 +199,9 @@ class SchemaRegistryServiceTests extends AbstractBaseTest {
private void createNewSubjectAndAssert(String subject) {
webTestClient
.post()
.uri("/api/clusters/{clusterName}/schemas/{subject}", LOCAL, subject)
.uri("/api/clusters/{clusterName}/schemas", LOCAL)
.contentType(MediaType.APPLICATION_JSON)
.body(BodyInserters.fromValue("{\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}"))
.body(BodyInserters.fromValue("{\"subject\":\"%s\",\"schemaType\":\"AVRO\",\"schema\":\"{\\\"type\\\": \\\"string\\\"}\"}".formatted(subject)))
.exchange()
.expectStatus().isOk()
.expectBody(SchemaSubject.class)
@ -151,16 +218,17 @@ class SchemaRegistryServiceTests extends AbstractBaseTest {
Assertions.assertEquals("\"string\"", actualSchema.getSchema());
Assertions.assertNotNull(actualSchema.getCompatibilityLevel());
Assertions.assertEquals(SchemaType.AVRO, actualSchema.getSchemaType());
Assertions.assertEquals(expectedCompatibility.name(), actualSchema.getCompatibilityLevel());
}
private void assertResponseBodyWhenCreateNewSchema(EntityExchangeResult<SchemaSubject> exchangeResult) {
SchemaSubject responseBody = exchangeResult.getResponseBody();
Assertions.assertNotNull(responseBody);
Assertions.assertEquals(1, responseBody.getId(), "The schema ID should be non-null in the response");
String message = "It should be null";
Assertions.assertNull(responseBody.getSchema(), message);
Assertions.assertNull(responseBody.getSubject(), message);
Assertions.assertNull(responseBody.getVersion(), message);
Assertions.assertEquals("1", responseBody.getVersion());
Assertions.assertNotNull(responseBody.getSchema());
Assertions.assertNotNull(responseBody.getSubject());
Assertions.assertNotNull(responseBody.getCompatibilityLevel());
Assertions.assertEquals(SchemaType.AVRO, responseBody.getSchemaType());
}
}

View file

@ -358,6 +358,35 @@ paths:
$ref: '#/components/schemas/ConsumerGroup'
/api/clusters/{clusterName}/schemas:
post:
tags:
- /api/clusters
summary: create a new subject schema
operationId: createNewSchema
parameters:
- name: clusterName
in: path
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/NewSchemaSubject'
responses:
200:
description: Ok
content:
application/json:
schema:
$ref: '#/components/schemas/SchemaSubject'
400:
description: Bad request
409:
description: Duplicate schema
422:
description: Invalid parameters
get:
tags:
- /api/clusters
@ -380,36 +409,6 @@ paths:
$ref: '#/components/schemas/SchemaSubject'
/api/clusters/{clusterName}/schemas/{subject}:
post:
tags:
- /api/clusters
summary: create a new subject schema
operationId: createNewSchema
parameters:
- name: clusterName
in: path
required: true
schema:
type: string
- name: subject
in: path
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/NewSchemaSubject'
responses:
200:
description: Updated
content:
application/json:
schema:
$ref: '#/components/schemas/SchemaSubject'
400:
description: Bad request
delete:
tags:
- /api/clusters
@ -1360,16 +1359,29 @@ components:
type: string
compatibilityLevel:
type: string
schemaType:
$ref: '#/components/schemas/SchemaType'
required:
- id
- subject
- version
- schema
- compatibilityLevel
- schemaType
NewSchemaSubject:
type: object
properties:
subject:
type: string
schema:
type: string
schemaType:
$ref: '#/components/schemas/SchemaType'
required:
- subject
- schema
- schemaType
CompatibilityLevel:
type: object
@ -1387,13 +1399,12 @@ components:
required:
- compatibility
# CompatibilityLevelResponse:
# type: object
# properties:
# compatibilityLevel:
# type: string
# required:
# - compatibilityLevel
SchemaType:
type: string
enum:
- AVRO
- JSON
- PROTOBUF
CompatibilityCheckResponse:
type: object

View file

@ -1,13 +1,10 @@
import React from 'react';
import { Switch, Route, Redirect } from 'react-router-dom';
import { Switch, Route } from 'react-router-dom';
import './App.scss';
import BrokersContainer from './Brokers/BrokersContainer';
import TopicsContainer from './Topics/TopicsContainer';
import NavContainer from './Nav/NavContainer';
import PageLoader from './common/PageLoader/PageLoader';
import Dashboard from './Dashboard/Dashboard';
import ConsumersGroupsContainer from './ConsumerGroups/ConsumersGroupsContainer';
import SchemasContainer from './Schemas/SchemasContainer';
import Cluster from './Cluster/Cluster';
interface AppProps {
isClusterListFetched: boolean;
@ -44,29 +41,10 @@ const App: React.FC<AppProps> = ({
path={['/', '/ui', '/ui/clusters']}
component={Dashboard}
/>
<Route
path="/ui/clusters/:clusterName/brokers"
component={BrokersContainer}
/>
<Route
path="/ui/clusters/:clusterName/topics"
component={TopicsContainer}
/>
<Route
path="/ui/clusters/:clusterName/consumer-groups"
component={ConsumersGroupsContainer}
/>
<Route
path="/ui/clusters/:clusterName/schemas"
component={SchemasContainer}
/>
<Redirect
from="/ui/clusters/:clusterName"
to="/ui/clusters/:clusterName/brokers"
/>
<Route path="/ui/clusters/:clusterName" component={Cluster} />
</Switch>
) : (
<PageLoader />
<PageLoader fullHeight />
)}
</main>
</div>

View file

@ -0,0 +1,39 @@
import React from 'react';
import { useSelector } from 'react-redux';
import { Switch, Route, Redirect, useParams } from 'react-router-dom';
import BrokersContainer from 'components/Brokers/BrokersContainer';
import TopicsContainer from 'components/Topics/TopicsContainer';
import ConsumersGroupsContainer from 'components/ConsumerGroups/ConsumersGroupsContainer';
import Schemas from 'components/Schemas/Schemas';
import { getClustersReadonlyStatus } from 'redux/reducers/clusters/selectors';
import ClusterContext from 'components/contexts/ClusterContext';
const Cluster: React.FC = () => {
const { clusterName } = useParams<{ clusterName: string }>();
const isReadOnly = useSelector(getClustersReadonlyStatus(clusterName));
return (
<ClusterContext.Provider value={{ isReadOnly }}>
<Switch>
<Route
path="/ui/clusters/:clusterName/brokers"
component={BrokersContainer}
/>
<Route
path="/ui/clusters/:clusterName/topics"
component={TopicsContainer}
/>
<Route
path="/ui/clusters/:clusterName/consumer-groups"
component={ConsumersGroupsContainer}
/>
<Route path="/ui/clusters/:clusterName/schemas" component={Schemas} />
<Redirect
from="/ui/clusters/:clusterName"
to="/ui/clusters/:clusterName/brokers"
/>
</Switch>
</ClusterContext.Provider>
);
};
export default Cluster;

View file

@ -17,6 +17,7 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({
bytesInPerSec,
bytesOutPerSec,
onlinePartitionCount,
readOnly,
},
}) => (
<div className="column is-full-modile is-6">
@ -29,6 +30,9 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({
>
{status}
</div>
{readOnly && (
<div className="tag has-margin-right is-info is-light">readonly</div>
)}
{name}
</div>

View file

@ -70,4 +70,17 @@ describe('ClusterWidget', () => {
).toMatchSnapshot();
});
});
describe('when cluster is read-only', () => {
it('renders the tag', () => {
expect(
shallow(
<ClusterWidget cluster={{ ...onlineCluster, readOnly: true }} />
)
.find('.title')
.childAt(1)
.text()
).toEqual('readonly');
});
});
});

View file

@ -2,12 +2,14 @@ import React from 'react';
import { SchemaSubject } from 'generated-sources';
import { ClusterName, SchemaName } from 'redux/interfaces';
import { clusterSchemasPath } from 'lib/paths';
import ClusterContext from 'components/contexts/ClusterContext';
import Breadcrumb from '../../common/Breadcrumb/Breadcrumb';
import SchemaVersion from './SchemaVersion';
import LatestVersionItem from './LatestVersionItem';
import PageLoader from '../../common/PageLoader/PageLoader';
export interface DetailsProps {
subject: SchemaName;
schema: SchemaSubject;
clusterName: ClusterName;
versions: SchemaSubject[];
@ -19,15 +21,18 @@ export interface DetailsProps {
}
const Details: React.FC<DetailsProps> = ({
subject,
schema,
clusterName,
fetchSchemaVersions,
versions,
isFetched,
}) => {
const { isReadOnly } = React.useContext(ClusterContext);
React.useEffect(() => {
fetchSchemaVersions(clusterName, schema.subject as SchemaName);
fetchSchemaVersions(clusterName, subject);
}, [fetchSchemaVersions, clusterName]);
return (
<div className="section">
<div className="level">
@ -39,59 +44,63 @@ const Details: React.FC<DetailsProps> = ({
},
]}
>
{schema.subject}
{subject}
</Breadcrumb>
</div>
<div className="box">
<div className="level">
<div className="level-left">
<div className="level-item">
<div className="mr-1">
<b>Latest Version</b>
</div>
<div className="tag is-info is-light" title="Version">
#{schema.version}
</div>
</div>
</div>
<div className="level-right">
<button
className="button is-warning is-small level-item"
type="button"
title="in development"
disabled
>
Update Schema
</button>
<button
className="button is-danger is-small level-item"
type="button"
title="in development"
disabled
>
Delete
</button>
</div>
</div>
<LatestVersionItem schema={schema} />
</div>
{isFetched ? (
<div className="box">
<table className="table is-striped is-fullwidth">
<thead>
<tr>
<th>Version</th>
<th>ID</th>
<th>Schema</th>
</tr>
</thead>
<tbody>
{versions.map((version) => (
<SchemaVersion key={version.id} version={version} />
))}
</tbody>
</table>
</div>
<>
<div className="box">
<div className="level">
<div className="level-left">
<div className="level-item">
<div className="mr-1">
<b>Latest Version</b>
</div>
<div className="tag is-info is-light" title="Version">
#{schema.version}
</div>
</div>
</div>
{!isReadOnly && (
<div className="level-right">
<button
className="button is-warning is-small level-item"
type="button"
title="in development"
disabled
>
Update Schema
</button>
<button
className="button is-danger is-small level-item"
type="button"
title="in development"
disabled
>
Delete
</button>
</div>
)}
</div>
<LatestVersionItem schema={schema} />
</div>
<div className="box">
<table className="table is-striped is-fullwidth">
<thead>
<tr>
<th>Version</th>
<th>ID</th>
<th>Schema</th>
</tr>
</thead>
<tbody>
{versions.map((version) => (
<SchemaVersion key={version.id} version={version} />
))}
</tbody>
</table>
</div>
</>
) : (
<PageLoader />
)}

View file

@ -24,6 +24,7 @@ const mapStateToProps = (
},
}: OwnProps
) => ({
subject,
schema: getSchema(state, subject),
versions: getSortedSchemaVersions(state),
isFetched: getIsSchemaVersionFetched(state),

View file

@ -1,7 +1,9 @@
import React from 'react';
import { Provider } from 'react-redux';
import { shallow } from 'enzyme';
import { shallow, mount } from 'enzyme';
import configureStore from 'redux/store/configureStore';
import { StaticRouter } from 'react-router';
import ClusterContext from 'components/contexts/ClusterContext';
import DetailsContainer from '../DetailsContainer';
import Details, { DetailsProps } from '../Details';
import { schema, versions } from './fixtures';
@ -24,6 +26,7 @@ describe('Details', () => {
describe('View', () => {
const setupWrapper = (props: Partial<DetailsProps> = {}) => (
<Details
subject={schema.subject}
schema={schema}
clusterName="Test cluster"
fetchSchemaVersions={jest.fn()}
@ -101,6 +104,20 @@ describe('Details', () => {
expect(shallow(setupWrapper({ versions }))).toMatchSnapshot();
});
});
describe('when the readonly flag is set', () => {
it('does not render update & delete buttons', () => {
expect(
mount(
<StaticRouter>
<ClusterContext.Provider value={{ isReadOnly: true }}>
{setupWrapper({ versions })}
</ClusterContext.Provider>
</StaticRouter>
).exists('.level-right')
).toBeFalsy();
});
});
});
});
});

View file

@ -75,6 +75,7 @@ exports[`Details View Initial state matches snapshot 1`] = `
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "1",
}
@ -126,67 +127,6 @@ exports[`Details View when page with schema versions is loading matches snapshot
test
</Breadcrumb>
</div>
<div
className="box"
>
<div
className="level"
>
<div
className="level-left"
>
<div
className="level-item"
>
<div
className="mr-1"
>
<b>
Latest Version
</b>
</div>
<div
className="tag is-info is-light"
title="Version"
>
#
1
</div>
</div>
</div>
<div
className="level-right"
>
<button
className="button is-warning is-small level-item"
disabled={true}
title="in development"
type="button"
>
Update Schema
</button>
<button
className="button is-danger is-small level-item"
disabled={true}
title="in development"
type="button"
>
Delete
</button>
</div>
</div>
<LatestVersionItem
schema={
Object {
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"subject": "test",
"version": "1",
}
}
/>
</div>
<PageLoader />
</div>
`;
@ -266,6 +206,7 @@ exports[`Details View when page with schema versions loaded when schema has vers
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "1",
}
@ -299,6 +240,7 @@ exports[`Details View when page with schema versions loaded when schema has vers
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "1",
}
@ -311,6 +253,7 @@ exports[`Details View when page with schema versions loaded when schema has vers
"compatibilityLevel": "BACKWARD",
"id": 2,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "2",
}
@ -397,6 +340,7 @@ exports[`Details View when page with schema versions loaded when versions are em
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "1",
}

View file

@ -1,4 +1,4 @@
import { SchemaSubject } from 'generated-sources';
import { SchemaSubject, SchemaType } from 'generated-sources';
export const schema: SchemaSubject = {
subject: 'test',
@ -7,6 +7,7 @@ export const schema: SchemaSubject = {
schema:
'{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
};
export const versions: SchemaSubject[] = [
@ -17,6 +18,7 @@ export const versions: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test',
@ -25,5 +27,6 @@ export const versions: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
];

View file

@ -2,54 +2,74 @@ import React from 'react';
import { SchemaSubject } from 'generated-sources';
import { NavLink, useParams } from 'react-router-dom';
import { clusterSchemaNewPath } from 'lib/paths';
import { ClusterName } from 'redux/interfaces';
import PageLoader from 'components/common/PageLoader/PageLoader';
import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb';
import ClusterContext from 'components/contexts/ClusterContext';
import ListItem from './ListItem';
export interface ListProps {
schemas: SchemaSubject[];
isFetching: boolean;
fetchSchemasByClusterName: (clusterName: ClusterName) => void;
}
const List: React.FC<ListProps> = ({ schemas }) => {
const List: React.FC<ListProps> = ({
schemas,
isFetching,
fetchSchemasByClusterName,
}) => {
const { isReadOnly } = React.useContext(ClusterContext);
const { clusterName } = useParams<{ clusterName: string }>();
React.useEffect(() => {
fetchSchemasByClusterName(clusterName);
}, [fetchSchemasByClusterName, clusterName]);
return (
<div className="section">
<Breadcrumb>Schema Registry</Breadcrumb>
<div className="box">
<div className="level">
<div className="level-item level-right">
<NavLink
className="button is-primary"
to={clusterSchemaNewPath(clusterName)}
>
Create Schema
</NavLink>
</div>
{!isReadOnly && (
<div className="level-item level-right">
<NavLink
className="button is-primary"
to={clusterSchemaNewPath(clusterName)}
>
Create Schema
</NavLink>
</div>
)}
</div>
</div>
<div className="box">
<table className="table is-striped is-fullwidth">
<thead>
<tr>
<th>Schema Name</th>
<th>Version</th>
<th>Compatibility</th>
</tr>
</thead>
<tbody>
{schemas.length > 0 ? (
schemas.map((subject) => (
<ListItem key={subject.id} subject={subject} />
))
) : (
{isFetching ? (
<PageLoader />
) : (
<div className="box">
<table className="table is-striped is-fullwidth">
<thead>
<tr>
<td colSpan={10}>No schemas found</td>
<th>Schema Name</th>
<th>Version</th>
<th>Compatibility</th>
</tr>
)}
</tbody>
</table>
</div>
</thead>
<tbody>
{schemas.length > 0 ? (
schemas.map((subject) => (
<ListItem key={subject.id} subject={subject} />
))
) : (
<tr>
<td colSpan={10}>No schemas found</td>
</tr>
)}
</tbody>
</table>
</div>
)}
</div>
);
};

View file

@ -1,10 +1,19 @@
import { connect } from 'react-redux';
import { RootState } from 'redux/interfaces';
import { getSchemaList } from 'redux/reducers/schemas/selectors';
import { fetchSchemasByClusterName } from 'redux/actions';
import {
getIsSchemaListFetching,
getSchemaList,
} from 'redux/reducers/schemas/selectors';
import List from './List';
const mapStateToProps = (state: RootState) => ({
isFetching: getIsSchemaListFetching(state),
schemas: getSchemaList(state),
});
export default connect(mapStateToProps)(List);
const mapDispatchToProps = {
fetchSchemasByClusterName,
};
export default connect(mapStateToProps, mapDispatchToProps)(List);

View file

@ -3,6 +3,7 @@ import { mount, shallow } from 'enzyme';
import { Provider } from 'react-redux';
import { StaticRouter } from 'react-router';
import configureStore from 'redux/store/configureStore';
import ClusterContext from 'components/contexts/ClusterContext';
import ListContainer from '../ListContainer';
import List, { ListProps } from '../List';
import { schemas } from './fixtures';
@ -27,13 +28,50 @@ describe('List', () => {
const setupWrapper = (props: Partial<ListProps> = {}) => (
<StaticRouter location={{ pathname }} context={{}}>
<List schemas={[]} {...props} />
<List
isFetching
fetchSchemasByClusterName={jest.fn()}
schemas={[]}
{...props}
/>
</StaticRouter>
);
describe('Initial state', () => {
let useEffect: jest.SpyInstance<
void,
[effect: React.EffectCallback, deps?: React.DependencyList | undefined]
>;
const mockedFn = jest.fn();
const mockedUseEffect = () => {
useEffect.mockImplementationOnce(mockedFn);
};
beforeEach(() => {
useEffect = jest.spyOn(React, 'useEffect');
mockedUseEffect();
});
it('should call fetchSchemasByClusterName every render', () => {
mount(setupWrapper({ fetchSchemasByClusterName: mockedFn }));
expect(mockedFn).toHaveBeenCalled();
});
});
describe('when fetching', () => {
it('renders PageLoader', () => {
const wrapper = mount(setupWrapper({ isFetching: true }));
expect(wrapper.exists('Breadcrumb')).toBeTruthy();
expect(wrapper.exists('thead')).toBeFalsy();
expect(wrapper.exists('ListItem')).toBeFalsy();
expect(wrapper.exists('PageLoader')).toBeTruthy();
});
});
describe('without schemas', () => {
it('renders table heading without ListItem', () => {
const wrapper = mount(setupWrapper());
const wrapper = mount(setupWrapper({ isFetching: false }));
expect(wrapper.exists('Breadcrumb')).toBeTruthy();
expect(wrapper.exists('thead')).toBeTruthy();
expect(wrapper.exists('ListItem')).toBeFalsy();
@ -41,7 +79,7 @@ describe('List', () => {
});
describe('with schemas', () => {
const wrapper = mount(setupWrapper({ schemas }));
const wrapper = mount(setupWrapper({ isFetching: false, schemas }));
it('renders table heading with ListItem', () => {
expect(wrapper.exists('Breadcrumb')).toBeTruthy();
@ -49,5 +87,18 @@ describe('List', () => {
expect(wrapper.find('ListItem').length).toEqual(3);
});
});
describe('with readonly cluster', () => {
const wrapper = mount(
<StaticRouter>
<ClusterContext.Provider value={{ isReadOnly: true }}>
{setupWrapper({ schemas: [] })}
</ClusterContext.Provider>
</StaticRouter>
);
it('does not render Create Schema button', () => {
expect(wrapper.exists('NavLink')).toBeFalsy();
});
});
});
});

View file

@ -32,6 +32,7 @@ exports[`ListItem matches snapshot 1`] = `
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "1",
}

View file

@ -1,4 +1,4 @@
import { SchemaSubject } from 'generated-sources';
import { SchemaSubject, SchemaType } from 'generated-sources';
export const schemas: SchemaSubject[] = [
{
@ -8,6 +8,7 @@ export const schemas: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test2',
@ -16,6 +17,7 @@ export const schemas: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test3',
@ -24,5 +26,6 @@ export const schemas: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord3","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
];

View file

@ -1,17 +1,16 @@
import React from 'react';
import { ClusterName, SchemaName, NewSchemaSubjectRaw } from 'redux/interfaces';
import { ClusterName, NewSchemaSubjectRaw } from 'redux/interfaces';
import { useForm } from 'react-hook-form';
import { ErrorMessage } from '@hookform/error-message';
import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb';
import { clusterSchemaPath, clusterSchemasPath } from 'lib/paths';
import { NewSchemaSubject } from 'generated-sources';
import { NewSchemaSubject, SchemaType } from 'generated-sources';
import { SCHEMA_NAME_VALIDATION_PATTERN } from 'lib/constants';
import { useHistory, useParams } from 'react-router';
export interface NewProps {
createSchema: (
clusterName: ClusterName,
subject: SchemaName,
newSchemaSubject: NewSchemaSubject
) => void;
}
@ -29,7 +28,11 @@ const New: React.FC<NewProps> = ({ createSchema }) => {
const onSubmit = React.useCallback(
async ({ subject, schema }: NewSchemaSubjectRaw) => {
try {
await createSchema(clusterName, subject, { schema });
await createSchema(clusterName, {
subject,
schema,
schemaType: SchemaType.AVRO,
});
history.push(clusterSchemaPath(clusterName, subject));
} catch (e) {
// Show Error

View file

@ -1,49 +1,27 @@
import React from 'react';
import { ClusterName } from 'redux/interfaces';
import { Switch, Route, useParams } from 'react-router-dom';
import PageLoader from 'components/common/PageLoader/PageLoader';
import { Switch, Route } from 'react-router-dom';
import ListContainer from './List/ListContainer';
import DetailsContainer from './Details/DetailsContainer';
import NewContainer from './New/NewContainer';
export interface SchemasProps {
isFetching: boolean;
fetchSchemasByClusterName: (clusterName: ClusterName) => void;
}
const Schemas: React.FC<SchemasProps> = ({
isFetching,
fetchSchemasByClusterName,
}) => {
const { clusterName } = useParams<{ clusterName: string }>();
React.useEffect(() => {
fetchSchemasByClusterName(clusterName);
}, [fetchSchemasByClusterName, clusterName]);
if (isFetching) {
return <PageLoader />;
}
return (
<Switch>
<Route
exact
path="/ui/clusters/:clusterName/schemas"
component={ListContainer}
/>
<Route
exact
path="/ui/clusters/:clusterName/schemas/new"
component={NewContainer}
/>
<Route
exact
path="/ui/clusters/:clusterName/schemas/:subject/latest"
component={DetailsContainer}
/>
</Switch>
);
};
const Schemas: React.FC = () => (
<Switch>
<Route
exact
path="/ui/clusters/:clusterName/schemas"
component={ListContainer}
/>
<Route
exact
path="/ui/clusters/:clusterName/schemas/new"
component={NewContainer}
/>
<Route
exact
path="/ui/clusters/:clusterName/schemas/:subject/latest"
component={DetailsContainer}
/>
</Switch>
);
export default Schemas;

View file

@ -1,15 +0,0 @@
import { connect } from 'react-redux';
import { RootState } from 'redux/interfaces';
import { fetchSchemasByClusterName } from 'redux/actions';
import { getIsSchemaListFetching } from 'redux/reducers/schemas/selectors';
import Schemas from './Schemas';
const mapStateToProps = (state: RootState) => ({
isFetching: getIsSchemaListFetching(state),
});
const mapDispatchToProps = {
fetchSchemasByClusterName,
};
export default connect(mapStateToProps, mapDispatchToProps)(Schemas);

View file

@ -1,71 +1,18 @@
import React from 'react';
import { Provider } from 'react-redux';
import { mount } from 'enzyme';
import configureStore from 'redux/store/configureStore';
import { shallow } from 'enzyme';
import { StaticRouter } from 'react-router-dom';
import Schemas, { SchemasProps } from '../Schemas';
import SchemasContainer from '../SchemasContainer';
import Schemas from '../Schemas';
describe('Schemas', () => {
const pathname = `/ui/clusters/clusterName/schemas`;
describe('Container', () => {
const store = configureStore();
it('renders', () => {
const wrapper = shallow(
<StaticRouter location={{ pathname }} context={{}}>
<Schemas />
</StaticRouter>
);
it('renders view', () => {
const component = mount(
<Provider store={store}>
<StaticRouter location={{ pathname }} context={{}}>
<SchemasContainer />
</StaticRouter>
</Provider>
);
expect(component.exists()).toBeTruthy();
});
describe('View', () => {
const setupWrapper = (props: Partial<SchemasProps> = {}) => (
<StaticRouter location={{ pathname }} context={{}}>
<Schemas
isFetching
fetchSchemasByClusterName={jest.fn()}
{...props}
/>
</StaticRouter>
);
describe('Initial state', () => {
let useEffect: jest.SpyInstance<
void,
[
effect: React.EffectCallback,
deps?: React.DependencyList | undefined
]
>;
const mockedFn = jest.fn();
const mockedUseEffect = () => {
useEffect.mockImplementationOnce(mockedFn);
};
beforeEach(() => {
useEffect = jest.spyOn(React, 'useEffect');
mockedUseEffect();
});
it('should call fetchSchemasByClusterName every render', () => {
mount(setupWrapper({ fetchSchemasByClusterName: mockedFn }));
expect(mockedFn).toHaveBeenCalled();
});
});
describe('when page is loading', () => {
const wrapper = mount(setupWrapper({ isFetching: true }));
it('renders PageLoader', () => {
expect(wrapper.exists('PageLoader')).toBeTruthy();
});
});
});
expect(wrapper.exists('Schemas')).toBeTruthy();
});
});

View file

@ -10,6 +10,7 @@ import {
clusterTopicMessagesPath,
clusterTopicsTopicEditPath,
} from 'lib/paths';
import ClusterContext from 'components/contexts/ClusterContext';
import OverviewContainer from './Overview/OverviewContainer';
import MessagesContainer from './Messages/MessagesContainer';
import SettingsContainer from './Settings/SettingsContainer';
@ -21,6 +22,7 @@ interface Props extends Topic, TopicDetails {
}
const Details: React.FC<Props> = ({ clusterName, topicName }) => {
const { isReadOnly } = React.useContext(ClusterContext);
return (
<div className="section">
<div className="level">
@ -33,9 +35,11 @@ const Details: React.FC<Props> = ({ clusterName, topicName }) => {
{topicName}
</Breadcrumb>
</div>
<SettingsEditButton
to={clusterTopicsTopicEditPath(clusterName, topicName)}
/>
{!isReadOnly && (
<SettingsEditButton
to={clusterTopicsTopicEditPath(clusterName, topicName)}
/>
)}
</div>
<div className="box">

View file

@ -195,7 +195,7 @@ const Messages: React.FC<Props> = ({
};
if (!isFetched) {
return <PageLoader isFullHeight={false} />;
return <PageLoader />;
}
return (

View file

@ -3,6 +3,7 @@ import { TopicWithDetailedInfo, ClusterName } from 'redux/interfaces';
import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb';
import { NavLink } from 'react-router-dom';
import { clusterTopicNewPath } from 'lib/paths';
import ClusterContext from 'components/contexts/ClusterContext';
import ListItem from './ListItem';
interface Props {
@ -15,7 +16,7 @@ const List: React.FC<Props> = ({ clusterName, topics, externalTopics }) => {
const [showInternal, setShowInternal] = React.useState<boolean>(true);
const handleSwitch = () => setShowInternal(!showInternal);
const { isReadOnly } = React.useContext(ClusterContext);
const items = showInternal ? topics : externalTopics;
return (
@ -38,12 +39,14 @@ const List: React.FC<Props> = ({ clusterName, topics, externalTopics }) => {
</div>
</div>
<div className="level-item level-right">
<NavLink
className="button is-primary"
to={clusterTopicNewPath(clusterName)}
>
Add a Topic
</NavLink>
{!isReadOnly && (
<NavLink
className="button is-primary"
to={clusterTopicNewPath(clusterName)}
>
Add a Topic
</NavLink>
)}
</div>
</div>
</div>

View file

@ -0,0 +1,22 @@
import { mount } from 'enzyme';
import React from 'react';
import ClusterContext from 'components/contexts/ClusterContext';
import List from '../List';
describe('List', () => {
describe('when it has readonly flag', () => {
it('does not render the Add a Topic button', () => {
const props = {
clusterName: 'Cluster',
topics: [],
externalTopics: [],
};
const component = mount(
<ClusterContext.Provider value={{ isReadOnly: true }}>
<List {...props} />
</ClusterContext.Provider>
);
expect(component.exists('NavLink')).toBeFalsy();
});
});
});

View file

@ -10,7 +10,6 @@ import NewContainer from './New/NewContainer';
interface Props {
clusterName: ClusterName;
isFetched: boolean;
fetchBrokers: (clusterName: ClusterName) => void;
fetchTopicsList: (clusterName: ClusterName) => void;
}

View file

@ -8,17 +8,17 @@ describe('MetricsWrapper', () => {
const component = shallow(
<MetricsWrapper wrapperClassName={className} multiline />
);
expect(component.find(`.${className}`).exists()).toBeTruthy();
expect(component.find('.level-multiline').exists()).toBeTruthy();
expect(component.exists(`.${className}`)).toBeTruthy();
expect(component.exists('.level-multiline')).toBeTruthy();
});
it('correctly renders children', () => {
let component = shallow(<MetricsWrapper />);
expect(component.find('.subtitle').exists()).toBeFalsy();
expect(component.exists('.subtitle')).toBeFalsy();
const title = 'title';
component = shallow(<MetricsWrapper title={title} />);
expect(component.find('.subtitle').exists()).toBeTruthy();
expect(component.exists('.subtitle')).toBeTruthy();
expect(component.text()).toEqual(title);
});
});

View file

@ -2,14 +2,14 @@ import React from 'react';
import cx from 'classnames';
interface Props {
isFullHeight: boolean;
fullHeight: boolean;
}
const PageLoader: React.FC<Partial<Props>> = ({ isFullHeight = true }) => (
const PageLoader: React.FC<Partial<Props>> = ({ fullHeight }) => (
<section
className={cx(
'hero',
isFullHeight ? 'is-fullheight-with-navbar' : 'is-halfheight'
fullHeight ? 'is-fullheight-with-navbar' : 'is-halfheight'
)}
>
<div

View file

@ -4,7 +4,18 @@ import PageLoader from '../PageLoader';
describe('PageLoader', () => {
it('matches the snapshot', () => {
const component = mount(<PageLoader />);
expect(component).toMatchSnapshot();
expect(mount(<PageLoader />)).toMatchSnapshot();
});
it('renders half-height page loader by default', () => {
const wrapper = mount(<PageLoader />);
expect(wrapper.exists('.hero.is-halfheight')).toBeTruthy();
expect(wrapper.exists('.hero.is-fullheight-with-navbar')).toBeFalsy();
});
it('renders fullheight page loader', () => {
const wrapper = mount(<PageLoader fullHeight />);
expect(wrapper.exists('.hero.is-halfheight')).toBeFalsy();
expect(wrapper.exists('.hero.is-fullheight-with-navbar')).toBeTruthy();
});
});

View file

@ -3,7 +3,7 @@
exports[`PageLoader matches the snapshot 1`] = `
<PageLoader>
<section
className="hero is-fullheight-with-navbar"
className="hero is-halfheight"
>
<div
className="hero-body has-text-centered"

View file

@ -0,0 +1,8 @@
import React from 'react';
const initialValue: { isReadOnly: boolean } = {
isReadOnly: false,
};
const ClusterContext = React.createContext(initialValue);
export default ClusterContext;

View file

@ -1,4 +1,4 @@
import { ClusterStats, NewSchemaSubject } from 'generated-sources';
import { ClusterStats, NewSchemaSubject, SchemaType } from 'generated-sources';
export const clusterStats: ClusterStats = {
brokerCount: 1,
@ -13,6 +13,8 @@ export const clusterStats: ClusterStats = {
};
export const schemaPayload: NewSchemaSubject = {
subject: 'NewSchema',
schema:
'{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
schemaType: SchemaType.JSON,
};

View file

@ -108,11 +108,11 @@ describe('Thunks', () => {
describe('createSchema', () => {
it('creates POST_SCHEMA__SUCCESS when posting new schema', async () => {
fetchMock.postOnce(`/api/clusters/${clusterName}/schemas/${subject}`, {
fetchMock.postOnce(`/api/clusters/${clusterName}/schemas`, {
body: schemaFixtures.schemaVersionsPayload[0],
});
await store.dispatch(
thunks.createSchema(clusterName, subject, fixtures.schemaPayload)
thunks.createSchema(clusterName, fixtures.schemaPayload)
);
expect(store.getActions()).toEqual([
actions.createSchemaAction.request(),
@ -122,19 +122,19 @@ describe('Thunks', () => {
]);
});
// it('creates POST_SCHEMA__FAILURE when posting new schema', async () => {
// fetchMock.postOnce(
// `/api/clusters/${clusterName}/schemas/${subject}`,
// 404
// );
// await store.dispatch(
// thunks.createSchema(clusterName, subject, fixtures.schemaPayload)
// );
// expect(store.getActions()).toEqual([
// actions.createSchemaAction.request(),
// actions.createSchemaAction.failure(),
// ]);
// expect(store.getActions()).toThrow();
// });
it('creates POST_SCHEMA__FAILURE when posting new schema', async () => {
fetchMock.postOnce(`/api/clusters/${clusterName}/schemas`, 404);
try {
await store.dispatch(
thunks.createSchema(clusterName, fixtures.schemaPayload)
);
} catch (error) {
expect(error.status).toEqual(404);
expect(store.getActions()).toEqual([
actions.createSchemaAction.request(),
actions.createSchemaAction.failure(),
]);
}
});
});
});

View file

@ -285,14 +285,12 @@ export const fetchSchemaVersions = (
export const createSchema = (
clusterName: ClusterName,
subject: SchemaName,
newSchemaSubject: NewSchemaSubject
): PromiseThunkResult => async (dispatch) => {
dispatch(actions.createSchemaAction.request());
try {
const schema: SchemaSubject = await apiClient.createNewSchema({
clusterName,
subject,
newSchemaSubject,
});
dispatch(actions.createSchemaAction.success(schema));

View file

@ -24,3 +24,10 @@ export const getOnlineClusters = createSelector(getClusterList, (clusters) =>
export const getOfflineClusters = createSelector(getClusterList, (clusters) =>
clusters.filter(({ status }) => status === ServerStatus.OFFLINE)
);
export const getClustersReadonlyStatus = (clusterName: string) =>
createSelector(
getClusterList,
(clusters): boolean =>
clusters.find(({ name }) => name === clusterName)?.readOnly || false
);

View file

@ -12,6 +12,7 @@ Object {
"compatibilityLevel": "BACKWARD",
"id": 2,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "2",
},
@ -19,6 +20,7 @@ Object {
"compatibilityLevel": "BACKWARD",
"id": 4,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord4\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test2",
"version": "3",
},
@ -26,6 +28,7 @@ Object {
"compatibilityLevel": "BACKWARD",
"id": 5,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test3",
"version": "1",
},
@ -43,6 +46,7 @@ Object {
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "1",
},
@ -50,6 +54,7 @@ Object {
"compatibilityLevel": "BACKWARD",
"id": 2,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord2\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "2",
},
@ -67,6 +72,7 @@ Object {
"compatibilityLevel": "BACKWARD",
"id": 1,
"schema": "{\\"type\\":\\"record\\",\\"name\\":\\"MyRecord1\\",\\"namespace\\":\\"com.mycompany\\",\\"fields\\":[{\\"name\\":\\"id\\",\\"type\\":\\"long\\"}]}",
"schemaType": "JSON",
"subject": "test",
"version": "1",
},

View file

@ -1,5 +1,5 @@
import { SchemasState } from 'redux/interfaces';
import { SchemaSubject } from 'generated-sources';
import { SchemaSubject, SchemaType } from 'generated-sources';
export const initialState: SchemasState = {
byName: {},
@ -15,6 +15,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test3',
@ -23,6 +24,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test',
@ -31,6 +33,7 @@ export const clusterSchemasPayload: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
];
@ -42,6 +45,7 @@ export const schemaVersionsPayload: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord1","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test',
@ -50,6 +54,7 @@ export const schemaVersionsPayload: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
];
@ -60,6 +65,7 @@ export const newSchemaPayload: SchemaSubject = {
schema:
'{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
};
export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [
@ -70,6 +76,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test3',
@ -78,6 +85,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test',
@ -86,6 +94,7 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord2","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
{
subject: 'test4',
@ -94,5 +103,6 @@ export const clusterSchemasPayloadWithNewSchema: SchemaSubject[] = [
schema:
'{"type":"record","name":"MyRecord4","namespace":"com.mycompany","fields":[{"name":"id","type":"long"}]}',
compatibilityLevel: 'BACKWARD',
schemaType: SchemaType.JSON,
},
];